diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 768f381a4..0d975c2a1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -3259,14 +3259,17 @@ bool Tokenizer::simplifyTokenList() // Replace constants.. for (Token *tok = list.front(); tok; tok = tok->next()) { - if (Token::Match(tok, "const %type% %var% = %num% ;")) { - unsigned int varId = tok->tokAt(2)->varId(); + if (Token::Match(tok, "const static| %type% %var% = %num% ;")) { + unsigned int offset = 0; + if (tok->strAt(1) == "static") + offset = 1; + const unsigned int varId(tok->tokAt(2 + offset)->varId()); if (varId == 0) { tok = tok->tokAt(5); continue; } - const std::string& num = tok->strAt(4); + const std::string& num = tok->strAt(4 + offset); int indent = 1; for (Token *tok2 = tok->tokAt(6); tok2; tok2 = tok2->next()) { if (tok2->str() == "{") { diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index e20cd2d0a..f61f2ea10 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -591,6 +591,11 @@ private: const char code2[] = " void f ( ) { if ( aa ) { a = 0 ; } else { { a = 2 ; } } }"; ASSERT_EQUALS(tok(code2), tok(code1)); } + + { + const char code1[] = "static const int x=1; void f() { if(x) { a=0; } }"; + ASSERT_EQUALS("void f ( ) { a = 0 ; }", tok(code1)); + } } void combine_strings() { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index e282e3fd7..a81a3b688 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5092,9 +5092,9 @@ private: void simplify_constants4() { const char code[] = "static const int bSize = 4;\n" "static const int aSize = 50;\n" - "const int x = bSize;\n" - "const int y = aSize;\n"; - ASSERT_EQUALS("const int x = 4 ;\nconst int y = 50 ;", tokenizeAndStringify(code,true)); + "x = bSize;\n" + "y = aSize;\n"; + ASSERT_EQUALS("x = 4 ;\ny = 50 ;", tokenizeAndStringify(code,true)); } void simplify_null() {