Tokenizer: refactor duplicated token copy code info function

This commit is contained in:
Robert Reif 2011-02-09 22:02:17 -05:00
parent 90495d52d2
commit e6848aef98
2 changed files with 52 additions and 233 deletions

View File

@ -246,6 +246,36 @@ void Tokenizer::insertTokens(Token *dest, const Token *src, unsigned int n)
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
Token *Tokenizer::copyTokens(Token *dest, const Token *first, const Token *last)
{
std::stack<Token *> links;
Token *tok2 = dest;
for (const Token *tok = first; tok != last->next(); tok = tok->next())
{
tok2->insertToken(tok->str());
tok2 = tok2->next();
tok2->fileIndex(dest->fileIndex());
tok2->linenr(dest->linenr());
tok2->isUnsigned(tok->isUnsigned());
tok2->isSigned(tok->isSigned());
tok2->isLong(tok->isLong());
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[" || tok2->str() == "{")
links.push(tok2);
else if (tok2->str() == ")" || tok2->str() == "]" || tok2->str() == "}")
{
Token * link = links.top();
tok2->link(link);
link->link(tok2);
links.pop();
}
}
return tok2;
}
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
// Tokenize - tokenizes a given file. // Tokenize - tokenizes a given file.
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
@ -1412,26 +1442,7 @@ void Tokenizer::simplifyTypedef()
// start substituting at the typedef name by replacing it with the type // start substituting at the typedef name by replacing it with the type
tok2->str(typeStart->str()); tok2->str(typeStart->str());
Token * nextToken; tok2 = copyTokens(tok2, typeStart->next(), typeEnd);
std::stack<Token *> links;
for (nextToken = typeStart->next(); nextToken != typeEnd->next(); nextToken = nextToken->next())
{
tok2->insertToken(nextToken->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
links.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = links.top();
tok2->link(link);
link->link(tok2);
links.pop();
}
}
if (!pointers.empty()) if (!pointers.empty())
{ {
@ -1448,25 +1459,7 @@ void Tokenizer::simplifyTypedef()
tok2->insertToken("("); tok2->insertToken("(");
tok2 = tok2->next(); tok2 = tok2->next();
Token *tok3 = tok2; Token *tok3 = tok2;
Token *nextTok; tok2 = copyTokens(tok2, funcStart, funcEnd);
for (nextTok = funcStart; nextTok != funcEnd->next(); nextTok = nextTok->next())
{
tok2->insertToken(nextTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
links.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = links.top();
tok2->link(link);
link->link(tok2);
links.pop();
}
}
if (!inCast) if (!inCast)
{ {
@ -1517,30 +1510,7 @@ void Tokenizer::simplifyTypedef()
tok2 = tok2->next(); tok2 = tok2->next();
Token::createMutualLinks(tok2, tok3); Token::createMutualLinks(tok2, tok3);
tok2->insertToken("("); tok2 = copyTokens(tok2, argStart, argEnd);
tok2 = tok2->next();
tok3 = tok2;
for (nextTok = argStart->next(); nextTok != argEnd; nextTok = nextTok->next())
{
tok2->insertToken(nextTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
links.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = links.top();
tok2->link(link);
link->link(tok2);
links.pop();
}
}
tok2->insertToken(")");
tok2 = tok2->next();
Token::createMutualLinks(tok2, tok3);
if (specStart) if (specStart)
{ {
@ -1652,32 +1622,7 @@ void Tokenizer::simplifyTypedef()
Token::createMutualLinks(tok2, tok3); Token::createMutualLinks(tok2, tok3);
} }
tok2->insertToken("("); tok2 = copyTokens(tok2, argStart, argEnd);
tok2 = tok2->next();
tok3 = tok2;
Token * nextArgTok;
std::stack<Token *> argLinks;
for (nextArgTok = argStart->next(); nextArgTok != argEnd; nextArgTok = nextArgTok->next())
{
tok2->insertToken(nextArgTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
argLinks.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = argLinks.top();
tok2->link(link);
link->link(tok2);
argLinks.pop();
}
}
tok2->insertToken(")");
tok2 = tok2->next();
Token::createMutualLinks(tok2, tok3);
if (inTemplate) if (inTemplate)
tok2 = tok2->next(); tok2 = tok2->next();
@ -1727,64 +1672,13 @@ void Tokenizer::simplifyTypedef()
Token::createMutualLinks(tok2, tok4); Token::createMutualLinks(tok2, tok4);
} }
tok2->insertToken("("); tok2 = copyTokens(tok2, argStart, argEnd);
tok2 = tok2->next();
Token *tok5 = tok2;
Token *nextArgTok;
std::stack<Token *> argLinks;
for (nextArgTok = argStart->next(); nextArgTok != argEnd; nextArgTok = nextArgTok->next())
{
tok2->insertToken(nextArgTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
argLinks.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = argLinks.top();
tok2->link(link);
link->link(tok2);
argLinks.pop();
}
}
tok2->insertToken(")");
tok2 = tok2->next();
Token::createMutualLinks(tok2, tok5);
tok2->insertToken(")"); tok2->insertToken(")");
tok2 = tok2->next(); tok2 = tok2->next();
Token::createMutualLinks(tok2, tok3); Token::createMutualLinks(tok2, tok3);
tok2->insertToken("("); tok2 = copyTokens(tok2, argFuncRetStart, argFuncRetEnd);
tok2 = tok2->next();
Token *tok6 = tok2;
for (nextArgTok = argFuncRetStart->next(); nextArgTok != argFuncRetEnd; nextArgTok = nextArgTok->next())
{
tok2->insertToken(nextArgTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
argLinks.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = argLinks.top();
tok2->link(link);
link->link(tok2);
argLinks.pop();
}
}
tok2->insertToken(")");
tok2 = tok2->next();
Token::createMutualLinks(tok2, tok6);
} }
else if (ptrToArray || refToArray) else if (ptrToArray || refToArray)
{ {
@ -1857,33 +1751,7 @@ void Tokenizer::simplifyTypedef()
} }
else if (typeOf) else if (typeOf)
{ {
tok2->insertToken("("); tok2 = copyTokens(tok2, argStart, argEnd);
tok2 = tok2->next();
Token *tok3 = tok2;
Token *nextArgTok;
std::stack<Token *> argLinks;
for (nextArgTok = argStart->next(); nextArgTok != argEnd; nextArgTok = nextArgTok->next())
{
tok2->insertToken(nextArgTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
argLinks.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = argLinks.top();
tok2->link(link);
link->link(tok2);
argLinks.pop();
}
}
tok2->insertToken(")");
tok2 = tok2->next();
Token::createMutualLinks(tok2, tok3);
} }
else if (tok2->tokAt(2) && tok2->tokAt(2)->str() == "[") else if (tok2->tokAt(2) && tok2->tokAt(2)->str() == "[")
{ {
@ -1897,27 +1765,7 @@ void Tokenizer::simplifyTypedef()
{ {
if (!inCast && !inSizeof) if (!inCast && !inSizeof)
tok2 = tok2->next(); tok2 = tok2->next();
Token * nextArrTok; tok2 = copyTokens(tok2, arrayStart, arrayEnd);
std::stack<Token *> arrLinks;
for (nextArrTok = arrayStart; nextArrTok != arrayEnd->next(); nextArrTok = nextArrTok->next())
{
tok2->insertToken(nextArrTok->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[")
arrLinks.push(tok2);
if (tok2->str() == ")" || tok2->str() == "]")
{
Token * link = arrLinks.top();
tok2->link(link);
link->link(tok2);
arrLinks.pop();
}
}
tok2 = tok2->next(); tok2 = tok2->next();
if (tok2->str() == "=") if (tok2->str() == "=")
@ -7691,26 +7539,8 @@ void Tokenizer::simplifyEnum()
if (lastEnumValueStart && lastEnumValueEnd) if (lastEnumValueStart && lastEnumValueEnd)
{ {
// previous value was an expression // previous value was an expression
Token * valueStart = tok1; Token *valueStart = tok1;
std::stack<Token *> links; tok1 = copyTokens(tok1, lastEnumValueStart, lastEnumValueEnd);
for (Token *tok2 = lastEnumValueStart; tok2 != lastEnumValueEnd->next(); tok2 = tok2->next())
{
tok1->insertToken(tok2->str());
tok1 = tok1->next();
// Check for links and fix them up
if (tok1->str() == "(" || tok1->str() == "[" || tok1->str() == "{")
links.push(tok1);
else if (tok1->str() == ")" || tok1->str() == "]" || tok1->str() == "}")
{
Token * link = links.top();
tok1->link(link);
link->link(tok1);
links.pop();
}
}
// value is previous expression + 1 // value is previous expression + 1
tok1->insertToken("+"); tok1->insertToken("+");
@ -7852,29 +7682,9 @@ void Tokenizer::simplifyEnum()
tok2->str(enumValue->str()); tok2->str(enumValue->str());
else else
{ {
std::stack<Token *> links; tok2 = tok2->previous();
tok2->str(enumValueStart->str()); tok2->deleteNext();
if (tok2->str() == "(" || tok2->str() == "[" || tok2->str() == "{") tok2 = copyTokens(tok2, enumValueStart, enumValueEnd);
links.push(tok2);
Token * nextToken = enumValueStart->next();
for (; nextToken != enumValueEnd->next(); nextToken = nextToken->next())
{
tok2->insertToken(nextToken->str());
tok2 = tok2->next();
// Check for links and fix them up
if (tok2->str() == "(" || tok2->str() == "[" || tok2->str() == "{")
links.push(tok2);
else if (tok2->str() == ")" || tok2->str() == "]" || tok2->str() == "}")
{
Token * link = links.top();
tok2->link(link);
link->link(tok2);
links.pop();
}
}
} }
if (hasClass) if (hasClass)

View File

@ -450,6 +450,15 @@ public:
void insertTokens(Token *dest, const Token *src, unsigned int n); void insertTokens(Token *dest, const Token *src, unsigned int n);
/**
* Copy tokens.
* @param dest destination token where copied tokens will be inserted after
* @param first first token to copy
* @param last last token to copy
* @return new location of last token copied
*/
Token *copyTokens(Token *dest, const Token *first, const Token *last);
/** /**
* Send error message to error logger about internal bug. * Send error message to error logger about internal bug.
* @param tok the token that this bug concerns. * @param tok the token that this bug concerns.