TOKEN: Renamed TOKEN::setstr to TOKEN::str
This commit is contained in:
parent
cf355c9e75
commit
9a3696cb6f
|
@ -344,7 +344,7 @@ TOKEN *CheckMemoryLeakClass::getcode(const TOKEN *tok, std::list<const TOKEN *>
|
||||||
{ \
|
{ \
|
||||||
rethead = new TOKEN; \
|
rethead = new TOKEN; \
|
||||||
rettail = rethead; \
|
rettail = rethead; \
|
||||||
rettail->setstr(_str); \
|
rettail->str(_str); \
|
||||||
} \
|
} \
|
||||||
\
|
\
|
||||||
rettail->linenr( tok->linenr() ); \
|
rettail->linenr( tok->linenr() ); \
|
||||||
|
@ -632,7 +632,7 @@ void CheckMemoryLeakClass::simplifycode(TOKEN *tok)
|
||||||
else if ( trylevel == -1 && tok2->str() == "try" )
|
else if ( trylevel == -1 && tok2->str() == "try" )
|
||||||
trylevel = indentlevel;
|
trylevel = indentlevel;
|
||||||
else if ( trylevel == -1 && tok2->str() == "throw" )
|
else if ( trylevel == -1 && tok2->str() == "throw" )
|
||||||
tok2->setstr("return");
|
tok2->str("return");
|
||||||
}
|
}
|
||||||
|
|
||||||
// reduce the code..
|
// reduce the code..
|
||||||
|
@ -653,7 +653,7 @@ void CheckMemoryLeakClass::simplifycode(TOKEN *tok)
|
||||||
// Replace "{ }" with ";"
|
// Replace "{ }" with ";"
|
||||||
if ( TOKEN::Match(tok2->next(), "{ }") )
|
if ( TOKEN::Match(tok2->next(), "{ }") )
|
||||||
{
|
{
|
||||||
tok2->next()->setstr(";");
|
tok2->next()->str(";");
|
||||||
erase(tok2->next(), tok2->tokAt(3));
|
erase(tok2->next(), tok2->tokAt(3));
|
||||||
done = false;
|
done = false;
|
||||||
}
|
}
|
||||||
|
@ -1026,14 +1026,14 @@ void CheckMemoryLeakClass::simplifycode(TOKEN *tok)
|
||||||
if ( !incase && valid )
|
if ( !incase && valid )
|
||||||
{
|
{
|
||||||
done = false;
|
done = false;
|
||||||
tok2->setstr(";");
|
tok2->str(";");
|
||||||
erase( tok2, tok2->tokAt(2) );
|
erase( tok2, tok2->tokAt(2) );
|
||||||
tok2 = tok2->next();
|
tok2 = tok2->next();
|
||||||
bool first = true;
|
bool first = true;
|
||||||
while (TOKEN::Match(tok2,"case") || TOKEN::Match(tok2,"default"))
|
while (TOKEN::Match(tok2,"case") || TOKEN::Match(tok2,"default"))
|
||||||
{
|
{
|
||||||
bool def = TOKEN::Match(tok2, "default");
|
bool def = TOKEN::Match(tok2, "default");
|
||||||
tok2->setstr(first ? "if" : "}");
|
tok2->str(first ? "if" : "}");
|
||||||
if ( first )
|
if ( first )
|
||||||
{
|
{
|
||||||
first = false;
|
first = false;
|
||||||
|
@ -1051,7 +1051,7 @@ void CheckMemoryLeakClass::simplifycode(TOKEN *tok)
|
||||||
tok2 = tok2->next();
|
tok2 = tok2->next();
|
||||||
if (TOKEN::Match(tok2,"break ;"))
|
if (TOKEN::Match(tok2,"break ;"))
|
||||||
{
|
{
|
||||||
tok2->setstr(";");
|
tok2->str(";");
|
||||||
tok2 = tok2->next()->next();
|
tok2 = tok2->next()->next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1095,7 +1095,7 @@ void CheckMemoryLeakClass::CheckMemoryLeak_CheckScope( const TOKEN *Tok1, const
|
||||||
for ( TOKEN *tok2 = tok; tok2; tok2 = tok2->next() )
|
for ( TOKEN *tok2 = tok; tok2; tok2 = tok2->next() )
|
||||||
{
|
{
|
||||||
if (tok2->str() == "&use")
|
if (tok2->str() == "&use")
|
||||||
tok2->setstr("use");
|
tok2->str("use");
|
||||||
}
|
}
|
||||||
|
|
||||||
simplifycode( tok );
|
simplifycode( tok );
|
||||||
|
|
|
@ -38,7 +38,7 @@ private:
|
||||||
void nextprevious()
|
void nextprevious()
|
||||||
{
|
{
|
||||||
TOKEN *token = new TOKEN;
|
TOKEN *token = new TOKEN;
|
||||||
token->setstr( "1" );
|
token->str( "1" );
|
||||||
token->insertToken( "2" );
|
token->insertToken( "2" );
|
||||||
token->next()->insertToken( "3" );
|
token->next()->insertToken( "3" );
|
||||||
TOKEN *last = token->next()->next();
|
TOKEN *last = token->next()->next();
|
||||||
|
|
|
@ -45,7 +45,7 @@ TOKEN::~TOKEN()
|
||||||
std::free(_cstr);
|
std::free(_cstr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void TOKEN::setstr( const char s[] )
|
void TOKEN::str( const char s[] )
|
||||||
{
|
{
|
||||||
_str = s;
|
_str = s;
|
||||||
std::free(_cstr);
|
std::free(_cstr);
|
||||||
|
@ -392,7 +392,7 @@ void TOKEN::previous( TOKEN *previous )
|
||||||
void TOKEN::insertToken( const char str[] )
|
void TOKEN::insertToken( const char str[] )
|
||||||
{
|
{
|
||||||
TOKEN *newToken = new TOKEN;
|
TOKEN *newToken = new TOKEN;
|
||||||
newToken->setstr( str );
|
newToken->str( str );
|
||||||
newToken->_linenr = _linenr;
|
newToken->_linenr = _linenr;
|
||||||
newToken->_fileIndex = _fileIndex;
|
newToken->_fileIndex = _fileIndex;
|
||||||
if( this->next() )
|
if( this->next() )
|
||||||
|
|
2
token.h
2
token.h
|
@ -26,7 +26,7 @@ class TOKEN
|
||||||
public:
|
public:
|
||||||
TOKEN();
|
TOKEN();
|
||||||
~TOKEN();
|
~TOKEN();
|
||||||
void setstr( const char s[] );
|
void str( const char s[] );
|
||||||
|
|
||||||
const std::string &str() const
|
const std::string &str() const
|
||||||
{ return _str; }
|
{ return _str; }
|
||||||
|
|
48
tokenize.cpp
48
tokenize.cpp
|
@ -165,7 +165,7 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
||||||
{
|
{
|
||||||
_tokens = new TOKEN;
|
_tokens = new TOKEN;
|
||||||
_tokensBack = _tokens;
|
_tokensBack = _tokens;
|
||||||
_tokensBack->setstr( str2.str().c_str() );
|
_tokensBack->str( str2.str().c_str() );
|
||||||
}
|
}
|
||||||
|
|
||||||
_tokensBack->linenr( lineno );
|
_tokensBack->linenr( lineno );
|
||||||
|
@ -176,7 +176,7 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
||||||
{
|
{
|
||||||
if (strcmp(str,sym->name)==0)
|
if (strcmp(str,sym->name)==0)
|
||||||
{
|
{
|
||||||
_tokensBack->setstr(sym->value);
|
_tokensBack->str(sym->value);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -526,7 +526,7 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
|
||||||
{
|
{
|
||||||
if ( tok->str() == combineWithNext[ui][0] && tok->next()->str() == combineWithNext[ui][1] )
|
if ( tok->str() == combineWithNext[ui][0] && tok->next()->str() == combineWithNext[ui][1] )
|
||||||
{
|
{
|
||||||
tok->setstr(combineWithNext[ui][2]);
|
tok->str(combineWithNext[ui][2]);
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -543,7 +543,7 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
|
||||||
for ( TOKEN *tok2 = tok; tok2; tok2 = tok2->next() )
|
for ( TOKEN *tok2 = tok; tok2; tok2 = tok2->next() )
|
||||||
{
|
{
|
||||||
if ( tok2->str() == type2 )
|
if ( tok2->str() == type2 )
|
||||||
tok2->setstr(type1);
|
tok2->str(type1);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -558,7 +558,7 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
|
||||||
{
|
{
|
||||||
if ( tok2->str() == type3 )
|
if ( tok2->str() == type3 )
|
||||||
{
|
{
|
||||||
tok2->setstr(type1);
|
tok2->str(type1);
|
||||||
tok2->insertToken(type2);
|
tok2->insertToken(type2);
|
||||||
tok2 = tok2->next();
|
tok2 = tok2->next();
|
||||||
}
|
}
|
||||||
|
@ -708,7 +708,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
{
|
{
|
||||||
if (tok2->str() == sym)
|
if (tok2->str() == sym)
|
||||||
{
|
{
|
||||||
tok2->setstr(num);
|
tok2->str(num);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -748,7 +748,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
std::ostringstream str;
|
std::ostringstream str;
|
||||||
// 'sizeof(type *)' has the same size as 'sizeof(char *)'
|
// 'sizeof(type *)' has the same size as 'sizeof(char *)'
|
||||||
str << sizeof(char *);
|
str << sizeof(char *);
|
||||||
tok->setstr( str.str().c_str() );
|
tok->str( str.str().c_str() );
|
||||||
|
|
||||||
for (int i = 0; i < 4; i++)
|
for (int i = 0; i < 4; i++)
|
||||||
{
|
{
|
||||||
|
@ -764,7 +764,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
{
|
{
|
||||||
std::ostringstream str;
|
std::ostringstream str;
|
||||||
str << size;
|
str << size;
|
||||||
tok->setstr( str.str().c_str() );
|
tok->str( str.str().c_str() );
|
||||||
for (int i = 0; i < 3; i++)
|
for (int i = 0; i < 3; i++)
|
||||||
{
|
{
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
|
@ -774,7 +774,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
|
|
||||||
else if (TOKEN::Match(tok, "sizeof ( * %var% )"))
|
else if (TOKEN::Match(tok, "sizeof ( * %var% )"))
|
||||||
{
|
{
|
||||||
tok->setstr("100");
|
tok->str("100");
|
||||||
for ( int i = 0; i < 4; ++i )
|
for ( int i = 0; i < 4; ++i )
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
}
|
}
|
||||||
|
@ -817,7 +817,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
{
|
{
|
||||||
std::ostringstream str;
|
std::ostringstream str;
|
||||||
str << total_size;
|
str << total_size;
|
||||||
tok2->setstr(str.str().c_str());
|
tok2->str(str.str().c_str());
|
||||||
// Delete the other tokens..
|
// Delete the other tokens..
|
||||||
for (int i = 0; i < 3; i++)
|
for (int i = 0; i < 3; i++)
|
||||||
{
|
{
|
||||||
|
@ -863,7 +863,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
tok = tok->next();
|
tok = tok->next();
|
||||||
std::ostringstream str;
|
std::ostringstream str;
|
||||||
str << i1;
|
str << i1;
|
||||||
tok->setstr(str.str().c_str());
|
tok->str(str.str().c_str());
|
||||||
for (int i = 0; i < 2; i++)
|
for (int i = 0; i < 2; i++)
|
||||||
{
|
{
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
|
@ -894,7 +894,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
for (int i = 0; i < 4; i++)
|
for (int i = 0; i < 4; i++)
|
||||||
{
|
{
|
||||||
tok = tok->next();
|
tok = tok->next();
|
||||||
tok->setstr(str[i]);
|
tok->str(str[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
|
@ -966,7 +966,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
{
|
{
|
||||||
if (tok2->str() == ",")
|
if (tok2->str() == ",")
|
||||||
{
|
{
|
||||||
tok2->setstr(";");
|
tok2->str(";");
|
||||||
InsertTokens(tok2, type0, typelen);
|
InsertTokens(tok2, type0, typelen);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -996,12 +996,12 @@ void Tokenizer::simplifyTokenList()
|
||||||
if (VarTok->aaaa0()=='*')
|
if (VarTok->aaaa0()=='*')
|
||||||
VarTok = VarTok->next();
|
VarTok = VarTok->next();
|
||||||
InsertTokens(eq, VarTok, 2);
|
InsertTokens(eq, VarTok, 2);
|
||||||
eq->setstr(";");
|
eq->str(";");
|
||||||
|
|
||||||
// "= x, " => "= x; type "
|
// "= x, " => "= x; type "
|
||||||
if (tok2->str() == ",")
|
if (tok2->str() == ",")
|
||||||
{
|
{
|
||||||
tok2->setstr(";");
|
tok2->str(";");
|
||||||
InsertTokens( tok2, type0, typelen );
|
InsertTokens( tok2, type0, typelen );
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -1017,7 +1017,7 @@ void Tokenizer::simplifyTokenList()
|
||||||
for ( TOKEN *tok = _tokens; tok; tok = tok->next() )
|
for ( TOKEN *tok = _tokens; tok; tok = tok->next() )
|
||||||
{
|
{
|
||||||
if ( tok->str() == "NULL" )
|
if ( tok->str() == "NULL" )
|
||||||
tok->setstr("0");
|
tok->str("0");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Replace pointer casts of 0.. "(char *)0" => "0"
|
// Replace pointer casts of 0.. "(char *)0" => "0"
|
||||||
|
@ -1144,7 +1144,7 @@ bool Tokenizer::removeReduntantConditions()
|
||||||
if( tok->previous() )
|
if( tok->previous() )
|
||||||
tok = tok->previous();
|
tok = tok->previous();
|
||||||
else
|
else
|
||||||
tok->setstr( ";" );
|
tok->str( ";" );
|
||||||
|
|
||||||
TOKEN::eraseTokens( tok, elseTag->tokAt( 1 ) );
|
TOKEN::eraseTokens( tok, elseTag->tokAt( 1 ) );
|
||||||
}
|
}
|
||||||
|
@ -1168,7 +1168,7 @@ bool Tokenizer::removeReduntantConditions()
|
||||||
if( tok->previous() )
|
if( tok->previous() )
|
||||||
tok = tok->previous();
|
tok = tok->previous();
|
||||||
else
|
else
|
||||||
tok->setstr( ";" );
|
tok->str( ";" );
|
||||||
|
|
||||||
TOKEN::eraseTokens( tok, tok->tokAt(5) );
|
TOKEN::eraseTokens( tok, tok->tokAt(5) );
|
||||||
}
|
}
|
||||||
|
@ -1186,7 +1186,7 @@ bool Tokenizer::removeReduntantConditions()
|
||||||
if( tok->previous() )
|
if( tok->previous() )
|
||||||
tok = tok->previous();
|
tok = tok->previous();
|
||||||
else
|
else
|
||||||
tok->setstr( ";" );
|
tok->str( ";" );
|
||||||
|
|
||||||
TOKEN::eraseTokens( tok, elseTag );
|
TOKEN::eraseTokens( tok, elseTag );
|
||||||
}
|
}
|
||||||
|
@ -1196,7 +1196,7 @@ bool Tokenizer::removeReduntantConditions()
|
||||||
if( tok->previous() )
|
if( tok->previous() )
|
||||||
tok = tok->previous();
|
tok = tok->previous();
|
||||||
else
|
else
|
||||||
tok->setstr( ";" );
|
tok->str( ";" );
|
||||||
|
|
||||||
TOKEN::eraseTokens( tok, tok->tokAt( 5 ) );
|
TOKEN::eraseTokens( tok, tok->tokAt( 5 ) );
|
||||||
}
|
}
|
||||||
|
@ -1314,7 +1314,7 @@ bool Tokenizer::simplifyConditions()
|
||||||
tok2 &&
|
tok2 &&
|
||||||
(tok2->str()==")" || tok2->str()=="&&" || tok2->str()=="||"))
|
(tok2->str()==")" || tok2->str()=="&&" || tok2->str()=="||"))
|
||||||
{
|
{
|
||||||
tok->next()->setstr((tok->next()->str() != "0") ? "true" : "false");
|
tok->next()->str((tok->next()->str() != "0") ? "true" : "false");
|
||||||
ret = true;
|
ret = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1352,7 +1352,7 @@ bool Tokenizer::simplifyConditions()
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
|
|
||||||
tok->setstr( result ? "true" : "false" );
|
tok->str( result ? "true" : "false" );
|
||||||
ret = true;
|
ret = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1425,7 +1425,7 @@ bool Tokenizer::simplifyFunctionReturn()
|
||||||
if ( TOKEN::Match(tok2, pattern.str().c_str()) )
|
if ( TOKEN::Match(tok2, pattern.str().c_str()) )
|
||||||
{
|
{
|
||||||
tok2 = tok2->next();
|
tok2 = tok2->next();
|
||||||
tok2->setstr( tok->strAt(5) );
|
tok2->str( tok->strAt(5) );
|
||||||
tok2->deleteNext();
|
tok2->deleteNext();
|
||||||
tok2->deleteNext();
|
tok2->deleteNext();
|
||||||
ret = true;
|
ret = true;
|
||||||
|
@ -1482,7 +1482,7 @@ bool Tokenizer::simplifyKnownVariables()
|
||||||
if ( TOKEN::Match(tok3, "if ( %varid% )", 0, varid) )
|
if ( TOKEN::Match(tok3, "if ( %varid% )", 0, varid) )
|
||||||
{
|
{
|
||||||
tok3 = tok3->next()->next();
|
tok3 = tok3->next()->next();
|
||||||
tok3->setstr( tok2->strAt(2) );
|
tok3->str( tok2->strAt(2) );
|
||||||
ret = true;
|
ret = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue