From dee18d10a8c974070a2642d07af53a01b4e73e5a Mon Sep 17 00:00:00 2001 From: Kamil Dudka Date: Tue, 28 Jan 2014 17:15:07 +0100 Subject: [PATCH] Fixed #5423 (fix parsing of #line NNNN "file.c") --- lib/tokenlist.cpp | 47 +++++++++++++++++++++++++------------------ lib/tokenlist.h | 3 +++ test/testtokenize.cpp | 17 ++++++++++++++++ 3 files changed, 47 insertions(+), 20 deletions(-) diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 58ab099d7..054459012 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -54,6 +54,18 @@ void TokenList::deallocateTokens() _files.clear(); } +unsigned int TokenList::appendFileIfNew(const std::string &fileName) +{ + // Has this file been tokenized already? + for (unsigned int i = 0; i < _files.size(); ++i) + if (Path::sameFileName(_files[i], fileName)) + return i; + + // The "_files" vector remembers what files have been tokenized.. + _files.push_back(Path::simplifyPath(fileName.c_str())); + return static_cast(_files.size() - 1); +} + void TokenList::deleteTokens(Token *tok) { while (tok) { @@ -236,24 +248,9 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0) // Extract the filename line = line.substr(1, line.length() - 2); - // Has this file been tokenized already? ++lineno; - bool foundOurfile = false; fileIndexes.push(FileIndex); - for (unsigned int i = 0; i < _files.size(); ++i) { - if (Path::sameFileName(_files[i], line)) { - // Use this index - foundOurfile = true; - FileIndex = i; - } - } - - if (!foundOurfile) { - // The "_files" vector remembers what files have been tokenized.. - _files.push_back(Path::simplifyPath(line.c_str())); - FileIndex = static_cast(_files.size() - 1); - } - + FileIndex = appendFileIfNew(line); lineNumbers.push(lineno); lineno = 0; } else { @@ -297,12 +294,22 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0) std::getline(code, line); - // Update the current line number unsigned int row; - if (!(std::stringstream(line) >> row)) - ++lineno; - else + std::istringstream fiss(line); + if (fiss >> row) { + // Update the current line number lineno = row; + + std::string line; + if (std::getline(fiss, line) && line.length() > 4U) { + // _"file_name" -> file_name + line = line.substr(2, line.length() - 3); + + // Update the current file + FileIndex = appendFileIfNew(line); + } + } else + ++lineno; CurrentToken.clear(); continue; } else if (CurrentToken == "#endfile") { diff --git a/lib/tokenlist.h b/lib/tokenlist.h index 7cf0e0e91..9da274ecd 100644 --- a/lib/tokenlist.h +++ b/lib/tokenlist.h @@ -65,6 +65,9 @@ public: /** Deallocate list */ void deallocateTokens(); + /** append file name if seen the first time; return its index in any case */ + unsigned int appendFileIfNew(const std::string &file); + /** get first token of list */ const Token *front() const { return _front; diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index e122427fa..5997b039b 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -319,6 +319,7 @@ private: TEST_CASE(file3); TEST_CASE(line1); // Ticket #4408 + TEST_CASE(line2); // Ticket #5423 TEST_CASE(doublesharp); @@ -5159,6 +5160,22 @@ private: } } + void line2() { + const char code[] = "#line 8 \"c:\\a.h\"\n" + "123\n"; + + errout.str(""); + + Settings settings; + + // tokenize.. + Tokenizer tokenizer(&settings, this); + std::istringstream istr(code); + tokenizer.tokenize(istr, "a.cpp"); + + ASSERT_EQUALS(Path::toNativeSeparators("[c:\\a.h:8]"), tokenizer.list.fileLine(tokenizer.tokens())); + } + void doublesharp() {