Fixes test case "file2", fixes bug with include file handling

This commit is contained in:
Reijo Tomperi 2009-01-20 22:25:49 +00:00
parent 5d8f506d6b
commit 6d8387914b
2 changed files with 17 additions and 8 deletions

View File

@ -167,9 +167,15 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[])
// The current token being parsed // The current token being parsed
std::string CurrentToken; std::string CurrentToken;
// fileIndexes is a vector that holds linenumbers for file indexes // lineNumbers holds line numbers for files in fileIndexes
// every time an include file is complitely parsed, last item in the vector
// is removed and lineno is set to point to that value.
std::vector<unsigned int> lineNumbers;
// fileIndexes holds index for _files vector about currently parsed files
// every time an include file is complitely parsed, last item in the vector
// is removed and FileIndex is set to point to that value.
std::vector<unsigned int> fileIndexes; std::vector<unsigned int> fileIndexes;
fileIndexes.push_back(0);
// FileIndex. What file in the _files vector is read now? // FileIndex. What file in the _files vector is read now?
unsigned int FileIndex = 0; unsigned int FileIndex = 0;
@ -287,13 +293,14 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[])
// Has this file been tokenized already? // Has this file been tokenized already?
++lineno; ++lineno;
bool foundOurfile = false; bool foundOurfile = false;
fileIndexes.push_back(FileIndex);
for (unsigned int i = 0; i < _files.size(); i++) for (unsigned int i = 0; i < _files.size(); i++)
{ {
if (SameFileName(_files[i].c_str(), line.c_str())) if (SameFileName(_files[i].c_str(), line.c_str()))
{ {
// Use this index // Use this index
foundOurfile = true; foundOurfile = true;
fileIndexes.push_back(lineno); FileIndex = i;
} }
} }
@ -301,19 +308,21 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[])
{ {
// The "_files" vector remembers what files have been tokenized.. // The "_files" vector remembers what files have been tokenized..
_files.push_back(FileLister::simplifyPath(line.c_str())); _files.push_back(FileLister::simplifyPath(line.c_str()));
fileIndexes.push_back(lineno); FileIndex = _files.size() - 1;
} }
lineNumbers.push_back(lineno);
lineno = 1; lineno = 1;
FileIndex = fileIndexes.size() - 1;
continue; continue;
} }
else if (strncmp(line.c_str(), "#endfile", 8) == 0) else if (strncmp(line.c_str(), "#endfile", 8) == 0)
{ {
lineno = fileIndexes.back(); lineno = lineNumbers.back();
lineNumbers.pop_back();
FileIndex = fileIndexes.back();
fileIndexes.pop_back(); fileIndexes.pop_back();
FileIndex = fileIndexes.size() - 1;
continue; continue;
} }

View File

@ -68,7 +68,7 @@ private:
TEST_CASE(varid2); TEST_CASE(varid2);
TEST_CASE(file1); TEST_CASE(file1);
// TODO TEST_CASE(file2); TEST_CASE(file2);
TEST_CASE(doublesharp); TEST_CASE(doublesharp);
} }