diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 4de0e4013..d3bf163e2 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -2976,11 +2976,21 @@ bool Tokenizer::simplifyCommaNearKeyWords() // We must not accept just any keyword, e.g. accepting int // would cause function parameters to corrupt. - if (!Token::Match(tok->next(), "delete")) - continue; - - tok->str(";"); - ret = true; + if (Token::Match(tok->next(), "delete")) + { + // Handle "delete a, delete b;" + tok->str(";"); + ret = true; + } + else if (tok->previous() && + Token::Match(tok->previous()->previous(), "delete") && + tok->next()->varId() != 0) + { + // Handle "delete a, b;" + tok->str(";"); + tok->insertToken("delete"); + ret = true; + } } return ret; diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index c715604b1..8808b601d 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -978,12 +978,23 @@ private: void comma_keyword() { - const char code[] = "void foo()\n" - "{\n" - " char *a, *b;\n" - " delete a, delete b;\n" - "}\n"; - ASSERT_EQUALS(" void foo ( ) { char * a ; char * b ; delete a ; delete b ; }", sizeof_(code)); + { + const char code[] = "void foo()\n" + "{\n" + " char *a, *b;\n" + " delete a, delete b;\n" + "}\n"; + ASSERT_EQUALS(" void foo ( ) { char * a ; char * b ; delete a ; delete b ; }", sizeof_(code)); + } + + { + const char code[] = "void foo()\n" + "{\n" + " char *a, *b;\n" + " delete a, b;\n" + "}\n"; + ASSERT_EQUALS(" void foo ( ) { char * a ; char * b ; delete a ; delete b ; }", sizeof_(code)); + } } };