diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index be64337b5..90287bb39 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -4712,7 +4712,7 @@ void Tokenizer::simplifyCompoundAssignment() // "a+=b" => "a = a + b" for (Token *tok = _tokens; tok; tok = tok->next()) { - if (Token::Match(tok, "[;{}:] *| (| %var%")) + if (Token::Match(tok, "[;{}] (") || Token::Match(tok, "[;{}:] *| (| %var%")) { if (tok->str() == ":") { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 76b3b7a40..978ec46d7 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5196,6 +5196,7 @@ private: ASSERT_EQUALS("{ x = x >> y ; }", tokenizeAndStringify("{ x >>= y;}")); ASSERT_EQUALS("; * p = * p + y ;", tokenizeAndStringify("; *p += y;")); + ASSERT_EQUALS("; ( * p ) = ( * p ) + y ;", tokenizeAndStringify("; (*p) += y;")); ASSERT_EQUALS("; * ( p [ 0 ] ) = * ( p [ 0 ] ) + y ;", tokenizeAndStringify("; *(p[0]) += y;")); ASSERT_EQUALS("case 0 : x = x + y ; break ;", tokenizeAndStringify("case 0: x += y; break;"));