diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 18eac1b2a..9d8e9035f 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -3699,9 +3699,8 @@ void Tokenizer::syntaxError(const Token *tok, char c) } -bool Tokenizer::simplifyMathFunctions() +void Tokenizer::simplifyMathFunctions() { - bool result = false; for (Token *tok = _tokens; tok; tok = tok->next()) { if (Token::Match(tok, "atol ( %str% )")) @@ -3730,11 +3729,8 @@ bool Tokenizer::simplifyMathFunctions() // Delete remaining ) tok->deleteNext(); - result = true; } } - - return result; } void Tokenizer::simplifyComma() diff --git a/src/tokenize.h b/src/tokenize.h index e50397502..0fb66852f 100644 --- a/src/tokenize.h +++ b/src/tokenize.h @@ -281,10 +281,8 @@ private: /** * Simplify e.g. 'atol("0")' into '0' - * @return true if modifications to token-list are done. - * false if no modifications are done. */ - bool simplifyMathFunctions(); + void simplifyMathFunctions(); void insertTokens(Token *dest, const Token *src, unsigned int n);