diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 84e92d884..eced0ac30 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5038,7 +5038,22 @@ void Tokenizer::simplifyCompoundAssignment() tok->insertToken(op); std::stack tokend; - for (const Token *tok2 = tok->previous(); tok2 && tok2 != tok1; tok2 = tok2->previous()) { + for (Token *tok2 = tok->previous(); tok2 && tok2 != tok1; tok2 = tok2->previous()) { + // Don't duplicate ++ and --. Put preincrement in lhs. Put + // postincrement in rhs. + if (tok2->str() == "++" || tok2->str() == "--") { + // pre increment/decrement => don't copy + if (tok2->next()->isName()) { + continue; + } + + // post increment/decrement => move from lhs to rhs + tok->insertToken(tok2->str()); + tok2->deleteThis(); + continue; + } + + // Copy token from lhs to rhs tok->insertToken(tok2->str()); tok->next()->varId(tok2->varId()); if (Token::Match(tok->next(), "]|)")) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index e79384679..2c5369f2d 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5796,6 +5796,8 @@ private: ASSERT_EQUALS("; x [ 0 ] = x [ 0 ] + 1 ;", tokenizeAndStringify("; x[0] += 1;")); ASSERT_EQUALS("; x [ y - 1 ] = x [ y - 1 ] + 1 ;", tokenizeAndStringify("; x[y-1] += 1;")); + ASSERT_EQUALS("; x [ y ] = x [ y ++ ] + 1 ;", tokenizeAndStringify("; x[y++] += 1;")); + ASSERT_EQUALS("; x [ ++ y ] = x [ y ] + 1 ;", tokenizeAndStringify("; x[++y] += 1;")); ASSERT_EQUALS(";", tokenizeAndStringify(";x += 0;")); ASSERT_EQUALS(";", tokenizeAndStringify(";x += '\\0';"));