Refactoring: Moved copy/paste code in Tokenizer

This commit is contained in:
Daniel Marjamäki 2010-04-03 09:54:10 +02:00
parent f8367da1fd
commit a4144b9d18
2 changed files with 24 additions and 41 deletions

View File

@ -85,11 +85,31 @@ const std::vector<std::string> *Tokenizer::getFiles() const
// add a token. Used by 'Tokenizer' // add a token. Used by 'Tokenizer'
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsigned int fileno) void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsigned int fileno, bool split)
{ {
if (str[0] == 0) if (str[0] == 0)
return; return;
// If token contains # characters, split it up
if (split && strstr(str, "##"))
{
std::string temp;
for (unsigned int i = 0; str[i]; ++i)
{
if (strncmp(&str[i], "##", 2) == 0)
{
addtoken(temp.c_str(), lineno, fileno, false);
temp.clear();
addtoken("##", lineno, fileno, false);
++i;
}
else
temp += str[i];
}
addtoken(temp.c_str(), lineno, fileno, false);
return;
}
// Replace hexadecimal value with decimal // Replace hexadecimal value with decimal
std::ostringstream str2; std::ostringstream str2;
if (strncmp(str, "0x", 2) == 0) if (strncmp(str, "0x", 2) == 0)
@ -338,26 +358,7 @@ void Tokenizer::createTokens(std::istream &code)
continue; continue;
} }
// If token contains # characters, split it up addtoken(CurrentToken.c_str(), lineno, FileIndex, true);
if (CurrentToken.find("##") == std::string::npos)
addtoken(CurrentToken.c_str(), lineno, FileIndex);
else
{
std::string temp;
for (std::string::size_type i = 0; i < CurrentToken.length(); ++i)
{
if (CurrentToken[i] == '#' && CurrentToken.length() + 1 > i && CurrentToken[i+1] == '#')
{
addtoken(temp.c_str(), lineno, FileIndex);
temp.clear();
addtoken("##", lineno, FileIndex);
++i;
}
else
temp += CurrentToken[i];
}
addtoken(temp.c_str(), lineno, FileIndex);
}
CurrentToken.clear(); CurrentToken.clear();
@ -383,25 +384,7 @@ void Tokenizer::createTokens(std::istream &code)
CurrentToken += ch; CurrentToken += ch;
} }
if (CurrentToken.find("##") == std::string::npos) addtoken(CurrentToken.c_str(), lineno, FileIndex, true);
addtoken(CurrentToken.c_str(), lineno, FileIndex);
else
{
std::string temp;
for (std::string::size_type i = 0; i < CurrentToken.length(); ++i)
{
if (CurrentToken[i] == '#' && CurrentToken.length() + 1 > i && CurrentToken[i+1] == '#')
{
addtoken(temp.c_str(), lineno, FileIndex);
temp.clear();
addtoken("##", lineno, FileIndex);
++i;
}
else
temp += CurrentToken[i];
}
addtoken(temp.c_str(), lineno, FileIndex);
}
} }
void Tokenizer::duplicateTypedefError(const Token *tok1, const Token *tok2, const std::string &type) void Tokenizer::duplicateTypedefError(const Token *tok1, const Token *tok2, const std::string &type)

View File

@ -262,7 +262,7 @@ public:
std::vector<const Token *> _functionList; std::vector<const Token *> _functionList;
void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno); void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno, bool split=false);
void addtoken(const Token *tok, const unsigned int lineno, const unsigned int fileno); void addtoken(const Token *tok, const unsigned int lineno, const unsigned int fileno);
/** /**