Ticket #1228 : Handle tokensBack in the Token class. When adding&removing tokens the Token class can make sure that this pointer is updated accordingly. It is very important that the tokensBack has the same scope as the token list, otherwise there will be a dead pointer problem.
This commit is contained in:
parent
485f3c7708
commit
57d1da3910
|
@ -769,7 +769,7 @@ void CheckBufferOverrun::checkGlobalAndLocalVariable()
|
||||||
if (varid == 0)
|
if (varid == 0)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
Token sizeTok;
|
Token sizeTok(0);
|
||||||
sizeTok.str(type);
|
sizeTok.str(type);
|
||||||
int total_size = size * _tokenizer->sizeOfType(&sizeTok);
|
int total_size = size * _tokenizer->sizeOfType(&sizeTok);
|
||||||
if (total_size == 0)
|
if (total_size == 0)
|
||||||
|
|
|
@ -660,7 +660,7 @@ Token *CheckMemoryLeakInFunction::getcode(const Token *tok, std::list<const Toke
|
||||||
} \
|
} \
|
||||||
else \
|
else \
|
||||||
{ \
|
{ \
|
||||||
rethead = new Token; \
|
rethead = new Token(0); \
|
||||||
rettail = rethead; \
|
rettail = rethead; \
|
||||||
rettail->str(_str); \
|
rettail->str(_str); \
|
||||||
} \
|
} \
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <map>
|
#include <map>
|
||||||
|
|
||||||
Token::Token() :
|
Token::Token(Token **t) :
|
||||||
|
tokensBack(t),
|
||||||
_str(""),
|
_str(""),
|
||||||
_isName(false),
|
_isName(false),
|
||||||
_isNumber(false),
|
_isNumber(false),
|
||||||
|
@ -87,6 +88,8 @@ void Token::deleteNext()
|
||||||
delete n;
|
delete n;
|
||||||
if (_next)
|
if (_next)
|
||||||
_next->previous(this);
|
_next->previous(this);
|
||||||
|
else if (tokensBack)
|
||||||
|
*tokensBack = this;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Token::deleteThis()
|
void Token::deleteThis()
|
||||||
|
@ -139,6 +142,13 @@ void Token::replace(Token *replaceThis, Token *start, Token *end)
|
||||||
start->previous(replaceThis->previous());
|
start->previous(replaceThis->previous());
|
||||||
end->next(replaceThis->next());
|
end->next(replaceThis->next());
|
||||||
|
|
||||||
|
if (end->tokensBack && *(end->tokensBack) == replaceThis)
|
||||||
|
{
|
||||||
|
while (end->next())
|
||||||
|
end = end->next();
|
||||||
|
*(end->tokensBack) = end;
|
||||||
|
}
|
||||||
|
|
||||||
// Delete old token, which is replaced
|
// Delete old token, which is replaced
|
||||||
delete replaceThis;
|
delete replaceThis;
|
||||||
}
|
}
|
||||||
|
@ -569,7 +579,7 @@ const Token *Token::findmatch(const Token *tok, const char pattern[], unsigned i
|
||||||
|
|
||||||
void Token::insertToken(const char str[])
|
void Token::insertToken(const char str[])
|
||||||
{
|
{
|
||||||
Token *newToken = new Token;
|
Token *newToken = new Token(tokensBack);
|
||||||
newToken->str(str);
|
newToken->str(str);
|
||||||
newToken->_linenr = _linenr;
|
newToken->_linenr = _linenr;
|
||||||
newToken->_fileIndex = _fileIndex;
|
newToken->_fileIndex = _fileIndex;
|
||||||
|
@ -578,6 +588,10 @@ void Token::insertToken(const char str[])
|
||||||
newToken->next(this->next());
|
newToken->next(this->next());
|
||||||
newToken->next()->previous(newToken);
|
newToken->next()->previous(newToken);
|
||||||
}
|
}
|
||||||
|
else if (tokensBack)
|
||||||
|
{
|
||||||
|
*tokensBack = newToken;
|
||||||
|
}
|
||||||
|
|
||||||
this->next(newToken);
|
this->next(newToken);
|
||||||
newToken->previous(this);
|
newToken->previous(this);
|
||||||
|
|
|
@ -37,8 +37,14 @@
|
||||||
*/
|
*/
|
||||||
class Token
|
class Token
|
||||||
{
|
{
|
||||||
public:
|
private:
|
||||||
|
Token **tokensBack;
|
||||||
|
|
||||||
|
// Not implemented..
|
||||||
Token();
|
Token();
|
||||||
|
|
||||||
|
public:
|
||||||
|
Token(Token **tokensBack);
|
||||||
~Token();
|
~Token();
|
||||||
|
|
||||||
void str(const std::string &s);
|
void str(const std::string &s);
|
||||||
|
|
|
@ -104,11 +104,10 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
||||||
if (_tokensBack)
|
if (_tokensBack)
|
||||||
{
|
{
|
||||||
_tokensBack->insertToken(str2.str().c_str());
|
_tokensBack->insertToken(str2.str().c_str());
|
||||||
_tokensBack = _tokensBack->next();
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
_tokens = new Token;
|
_tokens = new Token(&_tokensBack);
|
||||||
_tokensBack = _tokens;
|
_tokensBack = _tokens;
|
||||||
_tokensBack->str(str2.str());
|
_tokensBack->str(str2.str());
|
||||||
}
|
}
|
||||||
|
@ -1929,7 +1928,7 @@ void Tokenizer::simplifySizeof()
|
||||||
else if (Token::Match(tok->tokAt(-1), "%type% %var% [ %num% ] [,)]") ||
|
else if (Token::Match(tok->tokAt(-1), "%type% %var% [ %num% ] [,)]") ||
|
||||||
Token::Match(tok->tokAt(-2), "%type% * %var% [ %num% ] [,)]"))
|
Token::Match(tok->tokAt(-2), "%type% * %var% [ %num% ] [,)]"))
|
||||||
{
|
{
|
||||||
Token tempTok;
|
Token tempTok(0);
|
||||||
tempTok.str("*");
|
tempTok.str("*");
|
||||||
sizeOfVar[varId] = MathLib::toString<long>(sizeOfType(&tempTok));
|
sizeOfVar[varId] = MathLib::toString<long>(sizeOfType(&tempTok));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1385,7 +1385,7 @@ private:
|
||||||
ASSERT_EQUALS(10, CheckBufferOverrun::countSprintfLength("\\\\\\\\Hello%d \\0Text\\\\\\\\", unknownParameter));
|
ASSERT_EQUALS(10, CheckBufferOverrun::countSprintfLength("\\\\\\\\Hello%d \\0Text\\\\\\\\", unknownParameter));
|
||||||
ASSERT_EQUALS(4, CheckBufferOverrun::countSprintfLength("%%%%%d", unknownParameter));
|
ASSERT_EQUALS(4, CheckBufferOverrun::countSprintfLength("%%%%%d", unknownParameter));
|
||||||
|
|
||||||
Token strTok;
|
Token strTok(0);
|
||||||
strTok.str("\"12345\"");
|
strTok.str("\"12345\"");
|
||||||
std::list<const Token*> stringAsParameter;
|
std::list<const Token*> stringAsParameter;
|
||||||
stringAsParameter.push_back(&strTok);
|
stringAsParameter.push_back(&strTok);
|
||||||
|
@ -1400,7 +1400,7 @@ private:
|
||||||
ASSERT_EQUALS(7, CheckBufferOverrun::countSprintfLength("%6.6s", stringAsParameter));
|
ASSERT_EQUALS(7, CheckBufferOverrun::countSprintfLength("%6.6s", stringAsParameter));
|
||||||
|
|
||||||
std::list<const Token*> intAsParameter;
|
std::list<const Token*> intAsParameter;
|
||||||
Token numTok;
|
Token numTok(0);
|
||||||
numTok.str("12345");
|
numTok.str("12345");
|
||||||
intAsParameter.push_back(&numTok);
|
intAsParameter.push_back(&numTok);
|
||||||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%02ld", intAsParameter));
|
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%02ld", intAsParameter));
|
||||||
|
@ -1416,7 +1416,7 @@ private:
|
||||||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%5.1x", intAsParameter));
|
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%5.1x", intAsParameter));
|
||||||
|
|
||||||
std::list<const Token*> floatAsParameter;
|
std::list<const Token*> floatAsParameter;
|
||||||
Token floatTok;
|
Token floatTok(0);
|
||||||
floatTok.str("1.12345f");
|
floatTok.str("1.12345f");
|
||||||
floatAsParameter.push_back(&floatTok);
|
floatAsParameter.push_back(&floatTok);
|
||||||
TODO_ASSERT_EQUALS(5, CheckBufferOverrun::countSprintfLength("%.2f", floatAsParameter));
|
TODO_ASSERT_EQUALS(5, CheckBufferOverrun::countSprintfLength("%.2f", floatAsParameter));
|
||||||
|
@ -1424,7 +1424,7 @@ private:
|
||||||
TODO_ASSERT_EQUALS(5, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter));
|
TODO_ASSERT_EQUALS(5, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter));
|
||||||
|
|
||||||
std::list<const Token*> floatAsParameter2;
|
std::list<const Token*> floatAsParameter2;
|
||||||
Token floatTok2;
|
Token floatTok2(0);
|
||||||
floatTok2.str("100.12345f");
|
floatTok2.str("100.12345f");
|
||||||
floatAsParameter2.push_back(&floatTok2);
|
floatAsParameter2.push_back(&floatTok2);
|
||||||
TODO_ASSERT_EQUALS(7, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter2));
|
TODO_ASSERT_EQUALS(7, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter2));
|
||||||
|
|
|
@ -632,7 +632,7 @@ private:
|
||||||
std::istringstream istr("");
|
std::istringstream istr("");
|
||||||
tokenizer.tokenize(istr, "test.cpp");
|
tokenizer.tokenize(istr, "test.cpp");
|
||||||
tokenizer.simplifyTokenList();
|
tokenizer.simplifyTokenList();
|
||||||
Token tok;
|
Token tok(0);
|
||||||
tok.str(type);
|
tok.str(type);
|
||||||
return tokenizer.sizeOfType(&tok);
|
return tokenizer.sizeOfType(&tok);
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,11 +37,13 @@ private:
|
||||||
TEST_CASE(multiCompare);
|
TEST_CASE(multiCompare);
|
||||||
TEST_CASE(getStrLength);
|
TEST_CASE(getStrLength);
|
||||||
TEST_CASE(strValue);
|
TEST_CASE(strValue);
|
||||||
|
|
||||||
|
TEST_CASE(deleteLast);
|
||||||
}
|
}
|
||||||
|
|
||||||
void nextprevious()
|
void nextprevious()
|
||||||
{
|
{
|
||||||
Token *token = new Token;
|
Token *token = new Token(0);
|
||||||
token->str("1");
|
token->str("1");
|
||||||
token->insertToken("2");
|
token->insertToken("2");
|
||||||
token->next()->insertToken("3");
|
token->next()->insertToken("3");
|
||||||
|
@ -84,7 +86,7 @@ private:
|
||||||
|
|
||||||
void getStrLength()
|
void getStrLength()
|
||||||
{
|
{
|
||||||
Token tok;
|
Token tok(0);
|
||||||
|
|
||||||
tok.str("\"\"");
|
tok.str("\"\"");
|
||||||
ASSERT_EQUALS(0, Token::getStrLength(&tok));
|
ASSERT_EQUALS(0, Token::getStrLength(&tok));
|
||||||
|
@ -101,13 +103,24 @@ private:
|
||||||
|
|
||||||
void strValue()
|
void strValue()
|
||||||
{
|
{
|
||||||
Token tok;
|
Token tok(0);
|
||||||
tok.str("\"\"");
|
tok.str("\"\"");
|
||||||
ASSERT_EQUALS(std::string(""), tok.strValue());
|
ASSERT_EQUALS(std::string(""), tok.strValue());
|
||||||
|
|
||||||
tok.str("\"0\"");
|
tok.str("\"0\"");
|
||||||
ASSERT_EQUALS(std::string("0"), tok.strValue());
|
ASSERT_EQUALS(std::string("0"), tok.strValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void deleteLast()
|
||||||
|
{
|
||||||
|
Token *tokensBack = 0;
|
||||||
|
Token tok(&tokensBack);
|
||||||
|
tok.insertToken("aba");
|
||||||
|
ASSERT_EQUALS((unsigned int)tok.next(), (unsigned int)tokensBack);
|
||||||
|
tok.deleteNext();
|
||||||
|
ASSERT_EQUALS((unsigned int)&tok, (unsigned int)tokensBack);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
REGISTER_TEST(TestTOKEN)
|
REGISTER_TEST(TestTOKEN)
|
||||||
|
|
Loading…
Reference in New Issue