diff --git a/src/tokenize.cpp b/src/tokenize.cpp index c4b1b5b81..470bf818a 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -1700,7 +1700,8 @@ void Tokenizer::simplifyTokenList() for (Token *tok = _tokens; tok; tok = tok->next()) { if (tok->str() == "NULL" || - tok->str() == "'\\0'") + tok->str() == "'\\0'" || + tok->str() == "0L") tok->str("0"); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 490acb39a..ea25058e8 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -381,6 +381,7 @@ private: " if (p1 != NULL || p2 == NULL) { ; }\n" " if (p1 != NULL && p2 == NULL) { ; }\n" " if (p == '\\0');\n" + " if (p == 0L);\n" "}\n"; ASSERT_EQUALS("void f ( )\n" @@ -412,6 +413,7 @@ private: "if ( p1 || ! p2 ) { ; }\n" "if ( p1 && ! p2 ) { ; }\n" "if ( ! p ) { ; }\n" + "if ( ! p ) { ; }\n" "}", tokenizeAndStringify(code, true)); }