diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 066b9bd22..3cd7e423f 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -6885,7 +6885,7 @@ bool Tokenizer::simplifyRedundantParentheses() ret = true; } - while (Token::Match(tok->previous(), "[;{}[]().,!*] ( %name% .")) { + while (Token::Match(tok->previous(), "[;{}[(,!*] ( %name% .")) { Token *tok2 = tok->tokAt(2); while (Token::Match(tok2, ". %name%")) { tok2 = tok2->tokAt(2); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 7a31401a5..3352d8468 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -236,6 +236,7 @@ private: TEST_CASE(removeParentheses21); // Don't "simplify" casts TEST_CASE(removeParentheses22); TEST_CASE(removeParentheses23); // Ticket #6103 - Infinite loop upon valid input + TEST_CASE(removeParentheses24); // Ticket #7040 TEST_CASE(tokenize_double); TEST_CASE(tokenize_strings); @@ -3276,6 +3277,12 @@ private: } } + void removeParentheses24() { // Ticket #7040 + static char code[] = "std::hash()(t._data);"; + static char exp[] = "std :: hash < decltype ( t . _data ) > ( ) ( t . _data ) ;"; + ASSERT_EQUALS(exp, tokenizeAndStringify(code)); + } + void tokenize_double() { const char code[] = "void f() {\n" " double a = 4.2;\n"