diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 8ad9cca83..06012f33f 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -2289,7 +2289,7 @@ void Tokenizer::simplifyFileAndLineMacro() { for (Token *tok = list.front(); tok; tok = tok->next()) { if (tok->str() == "__FILE__") - tok->str(list.file(tok)); + tok->str("\"" + list.file(tok) + "\""); else if (tok->str() == "__LINE__") tok->str(MathLib::longToString(tok->linenr())); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 7bbbc2b07..a9967e445 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -921,6 +921,7 @@ private: } void simplifyFileAndLineMacro() { // tokenize 'return - __LINE__' correctly + ASSERT_EQUALS("\"test.cpp\"", tokenizeAndStringify("__FILE__")); ASSERT_EQUALS("return -1 ;", tokenizeAndStringify("return - __LINE__;")); }