tokenizer: removed some preprocessing => less false positives

This commit is contained in:
Daniel Marjamäki 2007-10-23 06:36:29 +00:00
parent e7e9402ab0
commit 5070bfd87d
1 changed files with 6 additions and 1 deletions

View File

@ -258,19 +258,23 @@ void Tokenize(const char FileName[])
return;
}
// Open file..
std::ifstream fin(FileName);
if (!fin.is_open())
return;
// The "Files" vector remembers what files have been tokenized..
unsigned int CurrentFile = Files.size();
Files.push_back(FileName);
// Tokenize the file.
unsigned int lineno = 1;
char CurrentToken[1000];
memset(CurrentToken, 0, sizeof(CurrentToken));
char *pToken = CurrentToken;
for (char ch = (char)fin.get(); !fin.eof(); ch = (char)fin.get())
{
// Preprocessor stuff?
if (ch == '#' && !CurrentToken[0])
{
std::string line;
@ -540,7 +544,7 @@ void SimplifyTokenList()
}
/*
// typedefs..
TOKEN *prev = NULL;
for (TOKEN *tok = tokens; tok; tok = tok->next)
@ -639,6 +643,7 @@ void SimplifyTokenList()
}
prev = tok;
}
*/
// Fill the map TypeSize..