diff --git a/src/tokenize.cpp b/src/tokenize.cpp index cd59b2621..745d41df1 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -255,6 +255,8 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) ch = (char)code.get(); if (chPrev != '\\' && ch == '\n') break; + if (chPrev == '\\') + line += chPrev; if (chPrev == '#' && ch == '#') { addtoken("##", lineno, FileIndex); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 4d36bb93a..70031a00c 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -98,6 +98,7 @@ private: TEST_CASE(file1); TEST_CASE(file2); + TEST_CASE(file3); TEST_CASE(doublesharp); @@ -840,6 +841,24 @@ private: + void file3() + { + const char code[] = "#file \"c:\\a.h\"\n" + "123\n" + "#endfile\n"; + + // tokenize.. + Tokenizer tokenizer; + std::istringstream istr(code); + tokenizer.tokenize(istr, "a.cpp"); + + + ASSERT_EQUALS("[c:\\a.h:1]", tokenizer.fileLine(tokenizer.tokens())); + } + + + + void doublesharp() { const char code[] = "TEST(var,val) var##_##val = val\n";