diff --git a/src/token.cpp b/src/token.cpp index 5c866a090..db5748382 100644 --- a/src/token.cpp +++ b/src/token.cpp @@ -24,6 +24,7 @@ #include #include #include +#include Token::Token() : _str(""), @@ -475,13 +476,35 @@ std::string Token::stringifyList(const bool varid, const char *title) const ret << "\n### " << title << " ###\n"; unsigned int linenr = 0; + int fileIndex = -1; + std::map lineNumbers; for (const Token *tok = this; tok; tok = tok->next()) { - while (linenr < tok->linenr()) + bool fileChange = false; + if (static_cast(tok->_fileIndex) != fileIndex) { - ++linenr; - ret << "\n" << linenr << ":"; + if (fileIndex != -1) + { + lineNumbers[fileIndex] = tok->_fileIndex; + } + + fileIndex = static_cast(tok->_fileIndex); + ret << "\n\n##file " << fileIndex << ""; + + linenr = lineNumbers[fileIndex]; + fileChange = true; } + + if (linenr != tok->linenr() || fileChange) + { + while (linenr < tok->linenr()) + { + ++linenr; + ret << "\n" << linenr << ":"; + } + linenr = tok->linenr(); + } + ret << " " << tok->str(); if (varid && tok->varId() > 0) ret << "@" << tok->varId(); diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 9f8bfc37f..215bd22f5 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -340,7 +340,7 @@ private: void elseif1() { const char code[] = "else if(ab) { cd } else { ef }gh"; - ASSERT_EQUALS("\n1: else { if ( ab ) { cd } else { ef } } gh\n", elseif(code)); + ASSERT_EQUALS("\n\n##file 0\n1: else { if ( ab ) { cd } else { ef } } gh\n", elseif(code)); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index c7e4e295c..8f0f90466 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -735,7 +735,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: static int i@1 = 1 ;\n" "2: void f ( )\n" "3: {\n" @@ -765,7 +765,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: void f ( )\n" "2: {\n" "3: struct ABC abc@1 ;\n" @@ -793,7 +793,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: static char str@1 [ 4 ] ;\n" "2: void f ( )\n" "3: {\n" @@ -819,7 +819,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: void f ( const int a@1 [ ] )\n" "2: {\n" "3: int i@2 ; i@2 = a@1 [ 10 ] ;\n" @@ -844,7 +844,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: void f ( )\n" "2: {\n" "3: int a@1 ; int b@2 ;\n" @@ -869,7 +869,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: void f ( int a@1 , int b@2 )\n" "2: {\n" "3: return a@1 + b@2 ;\n" @@ -897,7 +897,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: void func ( )\n" "2: {\n" "3: char a@1 [ 256 ] = \"test\" ;\n" @@ -926,7 +926,7 @@ private: // result.. const std::string actual(tokenizer.tokens()->stringifyList(true)); - const std::string expected("\n" + const std::string expected("\n\n##file 0\n" "1: int f ( )\n" "2: {\n" "3: int a@1 ;\n"