diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 7c7ed8840..c5924bab4 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8176,10 +8176,18 @@ void Tokenizer::simplifyWhile0() { for (Token *tok = _tokens; tok; tok = tok->next()) { - if (!Token::simpleMatch(tok, "while ( 0 )")) + // while (0) + const bool while0(Token::Match(tok, "while ( 0|false )")); + + // for (0) + const bool for0(Token::Match(tok, "for ( %var% = %num% ; %var% < %num% ;") && + tok->strAt(2) == tok->strAt(6) && + tok->strAt(4) == tok->strAt(8)); + + if (!while0 && !for0) continue; - if (Token::simpleMatch(tok->previous(), "}")) + if (while0 && Token::simpleMatch(tok->previous(), "}")) { // find "do" Token *tok2 = tok->previous()->link(); @@ -8203,9 +8211,9 @@ void Tokenizer::simplifyWhile0() } // remove "while (0) { .. }" - if (Token::simpleMatch(tok->tokAt(4), "{")) + if (Token::simpleMatch(tok->next()->link(), ") {")) { - const Token *end = tok->tokAt(4)->link(); + const Token *end = tok->next()->link()->next()->link(); if (!findmatch(tok, end, "continue|break")) { Token::eraseTokens(tok, end ? end->next() : 0); diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 5cfa32bba..7838ca42e 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -5469,6 +5469,10 @@ private: ASSERT_EQUALS("; x = 1 ;", tok("; do { x = 1 ; } while (0);")); ASSERT_EQUALS("; do { continue ; } while ( false ) ;", tok("; do { continue ; } while (0);")); ASSERT_EQUALS("; do { break ; } while ( false ) ;", tok("; do { break; } while (0);")); + ASSERT_EQUALS(";", tok("; while (false) { a; }")); + + // for (condition is always false) + ASSERT_EQUALS("void f ( ) { ; }", tok("void f() { int i; for (i = 0; i < 0; i++) { a; } }")); } void while1()