diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 54b103b51..5c85ff874 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -7832,14 +7832,14 @@ bool Tokenizer::simplifyCalculations() ret = true; } else if (Token::Match(tok->previous(), "[=([,] 0 [+|]") || - Token::Match(tok->previous(), "return 0 [+|]")) + Token::Match(tok->previous(), "return|case 0 [+|]")) { tok->deleteThis(); tok->deleteThis(); ret = true; } else if (Token::Match(tok->previous(), "[=[(,] 0 * %any% ,|]|)|;|=|%op%") || - Token::Match(tok->previous(), "return 0 * %any% ,|;|=|%op%")) + Token::Match(tok->previous(), "return|case 0 * %any% ,|:|;|=|%op%")) { tok->deleteNext(); if (tok->next()->str() == "(") @@ -7942,7 +7942,7 @@ bool Tokenizer::simplifyCalculations() Token::Match(tok, "<< %num% [+-*/] %num% <<") || Token::Match(tok, "[(,[] %num% [|&^] %num% [];,);]") || Token::Match(tok, "(|%op% %num% [+-*/] %num% )|%op%") || - Token::Match(tok,"return %num% [+-*/] %num% ;|,|=|%op%")) + Token::Match(tok,"return|case %num% [+-*/] %num% ;|,|=|:|%op%")) { tok = tok->next(); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index c29ce1317..b8eed846d 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5687,6 +5687,11 @@ private: tokenizeAndStringify("int f() { return a * 1; }", true)); ASSERT_EQUALS("int f ( int a ) { return 0 ; }", tokenizeAndStringify("int f(int a) { return 0 * a; }", true)); + ASSERT_EQUALS("bool f ( int i ) { switch ( i ) { case 15 : ; return true ; } }", + tokenizeAndStringify("bool f(int i) { switch (i) { case 10 + 5: return true; } }", true)); + TODO_ASSERT_EQUALS("bool f ( int i ) { ; switch ( i ) { case 15 : ; return true ; } }", + "bool f ( int i ) { int a ; a = 10 ; int b ; b = 5 ; switch ( i ) { case a + b : return true ; } }", + tokenizeAndStringify("bool f(int i) { int a = 10; int b = 5; switch (i) { case a + b: return true; } }", true)); } void simplifyCompoundAssignment()