* Added declaration for deletePrevious function * Added definition for deletePrevious function * Fixed crash from deleteThis invalidating pointers The crash was caused by deleteThis() invalidating the pointer to a constant variable usage. This happened when a usage followed an assignment. This fixes bug #8579. * Added tokensFront to match tokensBack This means deletePrevious can set the list's front if necessary. * Initialised tokensFront in appropriate places * Switched to using default Token constructor * Switched to using Token default constructor * Switched to using default constructor for Token * Added missing argument to Token constructor * Changed to use default constructor for Tokens * Switched to using default constructor for Tokens * Switched to using default constructor for Token * Added new test for deleting front Token Also made sure to use the correct constructor for Token in other tests. * Syntax error * Replaced tokensFront and tokensBack with a struct This decreases the size of the Token class for performance purposes. * Replaced tokensFront and tokensBack with a struct * Added tokensFrontBack to destructor * Reworked to use TokensBackFront struct Also ran astyle. * Reworked to use TokenList's TokensFrontBack member * Reworked to use TokensFrontBack struct * Reworked to use TokensFrontBack struct * Reworked to work with TokensFrontBack struct * Removed unnecessary scope operator * Added missing parentheses * Fixed syntax error * Removed unnecessary constructor * Default constructor now 0-initialises everything This is safer for not using a temporary TokensFrontBack object, and doesn't use delegating constructors which aren't supported yet. * Fixed unsafe null check * Added missing explicit keyword * Fixing stylistic nits Removed default constructor as it has been superseded by the single-argument constructor with a default argument value. Renamed listEnds to tokensFrontBack. Fixed if statement that was supposed to be adding safety but would actually cause a crash if tokensFrontBack was null. * Fixing stylistic nits Removed default constructor and replaced it with a single-argument constructor with a default value. * Fixing stylistic nits Renamed _listEnds to _tokensFrontBack. * Fixing stylistic nits Renamed _listEnds to _tokensFrontBack.
This commit is contained in:
parent
1af983dd95
commit
42a65c5160
|
@ -1329,7 +1329,7 @@ CheckIO::ArgumentInfo::ArgumentInfo(const Token * arg, const Settings *settings,
|
|||
top = top->astParent();
|
||||
const ValueType *valuetype = top->argumentType();
|
||||
if (valuetype && valuetype->type >= ValueType::Type::BOOL) {
|
||||
typeToken = tempToken = new Token(nullptr);
|
||||
typeToken = tempToken = new Token();
|
||||
if (valuetype->pointer && valuetype->constness & 1) {
|
||||
tempToken->str("const");
|
||||
tempToken->insertToken("a");
|
||||
|
@ -1406,7 +1406,7 @@ CheckIO::ArgumentInfo::ArgumentInfo(const Token * arg, const Settings *settings,
|
|||
if (function->retType->classScope->enumType)
|
||||
typeToken = function->retType->classScope->enumType;
|
||||
else {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(tok1->fileIndex());
|
||||
tempToken->linenr(tok1->linenr());
|
||||
tempToken->str("int");
|
||||
|
@ -1427,7 +1427,7 @@ CheckIO::ArgumentInfo::ArgumentInfo(const Token * arg, const Settings *settings,
|
|||
if (function->retType->classScope->enumType)
|
||||
typeToken = function->retType->classScope->enumType;
|
||||
else {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(tok1->fileIndex());
|
||||
tempToken->linenr(tok1->linenr());
|
||||
tempToken->str("int");
|
||||
|
@ -1452,7 +1452,7 @@ CheckIO::ArgumentInfo::ArgumentInfo(const Token * arg, const Settings *settings,
|
|||
// check for some common well known functions
|
||||
else if (isCPP && ((Token::Match(tok1->previous(), "%var% . size|empty|c_str ( ) [,)]") && isStdContainer(tok1->previous())) ||
|
||||
(Token::Match(tok1->previous(), "] . size|empty|c_str ( ) [,)]") && isStdContainer(tok1->previous()->link()->previous())))) {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(tok1->fileIndex());
|
||||
tempToken->linenr(tok1->linenr());
|
||||
if (tok1->next()->str() == "size") {
|
||||
|
@ -1513,7 +1513,7 @@ CheckIO::ArgumentInfo::ArgumentInfo(const Token * arg, const Settings *settings,
|
|||
if (variableInfo->type() && variableInfo->type()->classScope && variableInfo->type()->classScope->enumType)
|
||||
typeToken = variableInfo->type()->classScope->enumType;
|
||||
else {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(tok1->fileIndex());
|
||||
tempToken->linenr(tok1->linenr());
|
||||
tempToken->str("int");
|
||||
|
@ -1552,7 +1552,7 @@ bool CheckIO::ArgumentInfo::isStdVectorOrString()
|
|||
_template = true;
|
||||
return true;
|
||||
} else if (variableInfo->isStlType(stl_string)) {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(variableInfo->typeStartToken()->fileIndex());
|
||||
tempToken->linenr(variableInfo->typeStartToken()->linenr());
|
||||
if (variableInfo->typeStartToken()->strAt(2) == "string")
|
||||
|
@ -1570,7 +1570,7 @@ bool CheckIO::ArgumentInfo::isStdVectorOrString()
|
|||
_template = true;
|
||||
return true;
|
||||
} else if (Token::Match(nameTok, "std :: string|wstring")) {
|
||||
tempToken = new Token(nullptr);
|
||||
tempToken = new Token();
|
||||
tempToken->fileIndex(variableInfo->typeStartToken()->fileIndex());
|
||||
tempToken->linenr(variableInfo->typeStartToken()->linenr());
|
||||
if (nameTok->strAt(2) == "string")
|
||||
|
|
|
@ -688,7 +688,7 @@ Token *CheckMemoryLeakInFunction::getcode(const Token *tok, std::list<const Toke
|
|||
std::set<unsigned int> extravar;
|
||||
|
||||
// The first token should be ";"
|
||||
Token* rethead = new Token(nullptr);
|
||||
Token* rethead = new Token();
|
||||
rethead->str(";");
|
||||
rethead->linenr(tok->linenr());
|
||||
rethead->fileIndex(tok->fileIndex());
|
||||
|
|
|
@ -35,8 +35,8 @@
|
|||
|
||||
const std::list<ValueFlow::Value> Token::emptyValueList;
|
||||
|
||||
Token::Token(Token **tokens) :
|
||||
tokensBack(tokens),
|
||||
Token::Token(TokensFrontBack *tokensFrontBack) :
|
||||
tokensFrontBack(tokensFrontBack),
|
||||
_next(nullptr),
|
||||
_previous(nullptr),
|
||||
_link(nullptr),
|
||||
|
@ -219,8 +219,28 @@ void Token::deleteNext(unsigned long index)
|
|||
|
||||
if (_next)
|
||||
_next->previous(this);
|
||||
else if (tokensBack)
|
||||
*tokensBack = this;
|
||||
else if (tokensFrontBack)
|
||||
tokensFrontBack->back = this;
|
||||
}
|
||||
|
||||
void Token::deletePrevious(unsigned long index)
|
||||
{
|
||||
while (_previous && index) {
|
||||
Token *p = _previous;
|
||||
|
||||
// #8154 we are about to be unknown -> destroy the link to us
|
||||
if (p->_link && p->_link->_link == p)
|
||||
p->_link->link(nullptr);
|
||||
|
||||
_previous = p->previous();
|
||||
delete p;
|
||||
--index;
|
||||
}
|
||||
|
||||
if (_previous)
|
||||
_previous->next(this);
|
||||
else if (tokensFrontBack)
|
||||
tokensFrontBack->front = this;
|
||||
}
|
||||
|
||||
void Token::swapWithNext()
|
||||
|
@ -312,10 +332,10 @@ void Token::replace(Token *replaceThis, Token *start, Token *end)
|
|||
start->previous(replaceThis->previous());
|
||||
end->next(replaceThis->next());
|
||||
|
||||
if (end->tokensBack && *(end->tokensBack) == end) {
|
||||
if (end->tokensFrontBack && end->tokensFrontBack->back == end) {
|
||||
while (end->next())
|
||||
end = end->next();
|
||||
*(end->tokensBack) = end;
|
||||
end->tokensFrontBack->back = end;
|
||||
}
|
||||
|
||||
// Update _progressValue, fileIndex and linenr
|
||||
|
@ -906,7 +926,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original
|
|||
if (_str.empty())
|
||||
newToken = this;
|
||||
else
|
||||
newToken = new Token(tokensBack);
|
||||
newToken = new Token(tokensFrontBack);
|
||||
newToken->str(tokenStr);
|
||||
if (!originalNameStr.empty())
|
||||
newToken->originalName(originalNameStr);
|
||||
|
@ -929,8 +949,8 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original
|
|||
if (this->next()) {
|
||||
newToken->next(this->next());
|
||||
newToken->next()->previous(newToken);
|
||||
} else if (tokensBack) {
|
||||
*tokensBack = newToken;
|
||||
} else if (tokensFrontBack) {
|
||||
tokensFrontBack->back = newToken;
|
||||
}
|
||||
this->next(newToken);
|
||||
newToken->previous(this);
|
||||
|
|
18
lib/token.h
18
lib/token.h
|
@ -39,6 +39,14 @@ class Type;
|
|||
class ValueType;
|
||||
class Variable;
|
||||
|
||||
/**
|
||||
* @brief This struct stores pointers to the front and back tokens of the list this token is in.
|
||||
*/
|
||||
struct TokensFrontBack {
|
||||
Token *front;
|
||||
Token *back;
|
||||
};
|
||||
|
||||
/// @addtogroup Core
|
||||
/// @{
|
||||
|
||||
|
@ -54,10 +62,9 @@ class Variable;
|
|||
*/
|
||||
class CPPCHECKLIB Token {
|
||||
private:
|
||||
Token **tokensBack;
|
||||
TokensFrontBack* tokensFrontBack;
|
||||
|
||||
// Not implemented..
|
||||
Token();
|
||||
Token(const Token &);
|
||||
Token operator=(const Token &);
|
||||
|
||||
|
@ -71,7 +78,7 @@ public:
|
|||
eNone
|
||||
};
|
||||
|
||||
explicit Token(Token **tokens);
|
||||
explicit Token(TokensFrontBack *tokensFrontBack = nullptr);
|
||||
~Token();
|
||||
|
||||
template<typename T>
|
||||
|
@ -97,6 +104,11 @@ public:
|
|||
*/
|
||||
void deleteNext(unsigned long index = 1);
|
||||
|
||||
/**
|
||||
* Unlink and delete the previous 'index' tokens.
|
||||
*/
|
||||
void deletePrevious(unsigned long index = 1);
|
||||
|
||||
/**
|
||||
* Swap the contents of this token with the next token.
|
||||
*/
|
||||
|
|
|
@ -3372,7 +3372,7 @@ bool Tokenizer::simplifySizeof()
|
|||
|
||||
else if (Token::Match(tok->previous(), "%type% %name% [ %num% ] [,)]") ||
|
||||
Token::Match(tok->tokAt(-2), "%type% * %name% [ %num% ] [,)]")) {
|
||||
Token tempTok(nullptr);
|
||||
Token tempTok;
|
||||
tempTok.str("*");
|
||||
sizeOfVar[varId] = sizeOfType(&tempTok);
|
||||
declTokOfVar[varId] = tok;
|
||||
|
@ -6445,7 +6445,16 @@ bool Tokenizer::simplifyKnownVariables()
|
|||
while (startTok->next()->str() != ";")
|
||||
startTok->deleteNext();
|
||||
startTok->deleteNext();
|
||||
|
||||
// #8579 if we can we want another token to delete startTok. if we can't it doesn't matter
|
||||
if (startTok->previous()) {
|
||||
startTok->previous()->deleteNext();
|
||||
} else if (startTok->next()) {
|
||||
startTok->next()->deletePrevious();
|
||||
} else {
|
||||
startTok->deleteThis();
|
||||
}
|
||||
startTok = nullptr;
|
||||
|
||||
constantVar->second = nullptr;
|
||||
ret = true;
|
||||
|
|
|
@ -38,8 +38,7 @@ static const unsigned int AST_MAX_DEPTH = 50U;
|
|||
|
||||
|
||||
TokenList::TokenList(const Settings* settings) :
|
||||
_front(nullptr),
|
||||
_back(nullptr),
|
||||
_tokensFrontBack(),
|
||||
_settings(settings),
|
||||
_isC(false),
|
||||
_isCPP(false)
|
||||
|
@ -66,9 +65,9 @@ const std::string& TokenList::getSourceFilePath() const
|
|||
// Deallocate lists..
|
||||
void TokenList::deallocateTokens()
|
||||
{
|
||||
deleteTokens(_front);
|
||||
_front = nullptr;
|
||||
_back = nullptr;
|
||||
deleteTokens(_tokensFrontBack.front);
|
||||
_tokensFrontBack.front = nullptr;
|
||||
_tokensFrontBack.back = nullptr;
|
||||
_files.clear();
|
||||
}
|
||||
|
||||
|
@ -142,18 +141,18 @@ void TokenList::addtoken(std::string str, const unsigned int lineno, const unsig
|
|||
str = MathLib::value(str).str() + suffix;
|
||||
}
|
||||
|
||||
if (_back) {
|
||||
_back->insertToken(str);
|
||||
if (_tokensFrontBack.back) {
|
||||
_tokensFrontBack.back->insertToken(str);
|
||||
} else {
|
||||
_front = new Token(&_back);
|
||||
_back = _front;
|
||||
_back->str(str);
|
||||
_tokensFrontBack.front = new Token(&_tokensFrontBack);
|
||||
_tokensFrontBack.back = _tokensFrontBack.front;
|
||||
_tokensFrontBack.back->str(str);
|
||||
}
|
||||
|
||||
if (isCPP() && str == "delete")
|
||||
_back->isKeyword(true);
|
||||
_back->linenr(lineno);
|
||||
_back->fileIndex(fileno);
|
||||
_tokensFrontBack.back->isKeyword(true);
|
||||
_tokensFrontBack.back->linenr(lineno);
|
||||
_tokensFrontBack.back->fileIndex(fileno);
|
||||
}
|
||||
|
||||
void TokenList::addtoken(const Token * tok, const unsigned int lineno, const unsigned int fileno)
|
||||
|
@ -161,19 +160,19 @@ void TokenList::addtoken(const Token * tok, const unsigned int lineno, const uns
|
|||
if (tok == nullptr)
|
||||
return;
|
||||
|
||||
if (_back) {
|
||||
_back->insertToken(tok->str(), tok->originalName());
|
||||
if (_tokensFrontBack.back) {
|
||||
_tokensFrontBack.back->insertToken(tok->str(), tok->originalName());
|
||||
} else {
|
||||
_front = new Token(&_back);
|
||||
_back = _front;
|
||||
_back->str(tok->str());
|
||||
_tokensFrontBack.front = new Token(&_tokensFrontBack);
|
||||
_tokensFrontBack.back = _tokensFrontBack.front;
|
||||
_tokensFrontBack.back->str(tok->str());
|
||||
if (!tok->originalName().empty())
|
||||
_back->originalName(tok->originalName());
|
||||
_tokensFrontBack.back->originalName(tok->originalName());
|
||||
}
|
||||
|
||||
_back->linenr(lineno);
|
||||
_back->fileIndex(fileno);
|
||||
_back->flags(tok->flags());
|
||||
_tokensFrontBack.back->linenr(lineno);
|
||||
_tokensFrontBack.back->fileIndex(fileno);
|
||||
_tokensFrontBack.back->flags(tok->flags());
|
||||
}
|
||||
|
||||
|
||||
|
@ -305,20 +304,20 @@ void TokenList::createTokens(const simplecpp::TokenList *tokenList)
|
|||
if (str.size() > 1 && str[0] == '.' && std::isdigit(str[1]))
|
||||
str = '0' + str;
|
||||
|
||||
if (_back) {
|
||||
_back->insertToken(str);
|
||||
if (_tokensFrontBack.back) {
|
||||
_tokensFrontBack.back->insertToken(str);
|
||||
} else {
|
||||
_front = new Token(&_back);
|
||||
_back = _front;
|
||||
_back->str(str);
|
||||
_tokensFrontBack.front = new Token(&_tokensFrontBack);
|
||||
_tokensFrontBack.back = _tokensFrontBack.front;
|
||||
_tokensFrontBack.back->str(str);
|
||||
}
|
||||
|
||||
if (isCPP() && _back->str() == "delete")
|
||||
_back->isKeyword(true);
|
||||
_back->fileIndex(tok->location.fileIndex);
|
||||
_back->linenr(tok->location.line);
|
||||
_back->col(tok->location.col);
|
||||
_back->isExpandedMacro(!tok->macro.empty());
|
||||
if (isCPP() && _tokensFrontBack.back->str() == "delete")
|
||||
_tokensFrontBack.back->isKeyword(true);
|
||||
_tokensFrontBack.back->fileIndex(tok->location.fileIndex);
|
||||
_tokensFrontBack.back->linenr(tok->location.line);
|
||||
_tokensFrontBack.back->col(tok->location.col);
|
||||
_tokensFrontBack.back->isExpandedMacro(!tok->macro.empty());
|
||||
}
|
||||
|
||||
if (_settings && _settings->relativePaths) {
|
||||
|
@ -326,7 +325,7 @@ void TokenList::createTokens(const simplecpp::TokenList *tokenList)
|
|||
_files[i] = Path::getRelativePath(_files[i], _settings->basePaths);
|
||||
}
|
||||
|
||||
Token::assignProgressValues(_front);
|
||||
Token::assignProgressValues(_tokensFrontBack.front);
|
||||
}
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
@ -1164,7 +1163,7 @@ static Token * createAstAtToken(Token *tok, bool cpp)
|
|||
|
||||
void TokenList::createAst()
|
||||
{
|
||||
for (Token *tok = _front; tok; tok = tok ? tok->next() : nullptr) {
|
||||
for (Token *tok = _tokensFrontBack.front; tok; tok = tok ? tok->next() : nullptr) {
|
||||
tok = createAstAtToken(tok, isCPP());
|
||||
}
|
||||
}
|
||||
|
@ -1173,7 +1172,7 @@ void TokenList::validateAst() const
|
|||
{
|
||||
// Check for some known issues in AST to avoid crash/hang later on
|
||||
std::set < const Token* > safeAstTokens; // list of "safe" AST tokens without endless recursion
|
||||
for (const Token *tok = _front; tok; tok = tok->next()) {
|
||||
for (const Token *tok = _tokensFrontBack.front; tok; tok = tok->next()) {
|
||||
// Syntax error if binary operator only has 1 operand
|
||||
if ((tok->isAssignmentOp() || tok->isComparisonOp() || Token::Match(tok,"[|^/%]")) && tok->astOperand1() && !tok->astOperand2())
|
||||
throw InternalError(tok, "Syntax Error: AST broken, binary operator has only one operand.", InternalError::AST);
|
||||
|
@ -1217,7 +1216,7 @@ bool TokenList::validateToken(const Token* tok) const
|
|||
{
|
||||
if (!tok)
|
||||
return true;
|
||||
for (const Token *t = _front; t; t = t->next()) {
|
||||
for (const Token *t = _tokensFrontBack.front; t; t = t->next()) {
|
||||
if (tok==t)
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
//---------------------------------------------------------------------------
|
||||
|
||||
#include "config.h"
|
||||
#include "token.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
@ -104,18 +105,18 @@ public:
|
|||
|
||||
/** get first token of list */
|
||||
const Token *front() const {
|
||||
return _front;
|
||||
return _tokensFrontBack.front;
|
||||
}
|
||||
Token *front() {
|
||||
return _front;
|
||||
return _tokensFrontBack.front;
|
||||
}
|
||||
|
||||
/** get last token of list */
|
||||
const Token *back() const {
|
||||
return _back;
|
||||
return _tokensFrontBack.back;
|
||||
}
|
||||
Token *back() {
|
||||
return _back;
|
||||
return _tokensFrontBack.back;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -181,7 +182,7 @@ private:
|
|||
TokenList &operator=(const TokenList &);
|
||||
|
||||
/** Token list */
|
||||
Token *_front, *_back;
|
||||
TokensFrontBack _tokensFrontBack;
|
||||
|
||||
/** filenames for the tokenized source code (source + included) */
|
||||
std::vector<std::string> _files;
|
||||
|
|
|
@ -3149,7 +3149,7 @@ private:
|
|||
ASSERT_EQUALS(10, CheckBufferOverrun::countSprintfLength("\\\\\\\\Hello%d \\0Text\\\\\\\\", unknownParameter));
|
||||
ASSERT_EQUALS(4, CheckBufferOverrun::countSprintfLength("%%%%%d", unknownParameter));
|
||||
|
||||
Token strTok(0);
|
||||
Token strTok;
|
||||
std::list<const Token*> stringAsParameter(1, &strTok);
|
||||
strTok.str("\"\"");
|
||||
ASSERT_EQUALS(4, CheckBufferOverrun::countSprintfLength("str%s", stringAsParameter));
|
||||
|
@ -3164,7 +3164,7 @@ private:
|
|||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%5.6s", stringAsParameter));
|
||||
ASSERT_EQUALS(7, CheckBufferOverrun::countSprintfLength("%6.6s", stringAsParameter));
|
||||
|
||||
Token numTok(0);
|
||||
Token numTok;
|
||||
numTok.str("12345");
|
||||
std::list<const Token*> intAsParameter(1, &numTok);
|
||||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%02ld", intAsParameter));
|
||||
|
@ -3179,14 +3179,14 @@ private:
|
|||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%1.5x", intAsParameter));
|
||||
ASSERT_EQUALS(6, CheckBufferOverrun::countSprintfLength("%5.1x", intAsParameter));
|
||||
|
||||
Token floatTok(0);
|
||||
Token floatTok;
|
||||
floatTok.str("1.12345f");
|
||||
std::list<const Token*> floatAsParameter(1, &floatTok);
|
||||
TODO_ASSERT_EQUALS(5, 3, CheckBufferOverrun::countSprintfLength("%.2f", floatAsParameter));
|
||||
ASSERT_EQUALS(9, CheckBufferOverrun::countSprintfLength("%8.2f", floatAsParameter));
|
||||
TODO_ASSERT_EQUALS(5, 3, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter));
|
||||
|
||||
Token floatTok2(0);
|
||||
Token floatTok2;
|
||||
floatTok2.str("100.12345f");
|
||||
std::list<const Token*> floatAsParameter2(1, &floatTok2);
|
||||
TODO_ASSERT_EQUALS(7, 3, CheckBufferOverrun::countSprintfLength("%2.2f", floatAsParameter2));
|
||||
|
|
|
@ -858,7 +858,7 @@ private:
|
|||
unsigned int sizeofFromTokenizer(const char type[]) {
|
||||
Tokenizer tokenizer(&settings0, this);
|
||||
tokenizer.fillTypeSizes();
|
||||
Token tok1(0);
|
||||
Token tok1;
|
||||
tok1.str(type);
|
||||
return tokenizer.sizeOfType(&tok1);
|
||||
}
|
||||
|
|
|
@ -61,6 +61,7 @@ private:
|
|||
TEST_CASE(strValue);
|
||||
|
||||
TEST_CASE(deleteLast);
|
||||
TEST_CASE(deleteFirst);
|
||||
TEST_CASE(nextArgument);
|
||||
TEST_CASE(eraseTokens);
|
||||
|
||||
|
@ -107,7 +108,7 @@ private:
|
|||
}
|
||||
|
||||
void nextprevious() const {
|
||||
Token *token = new Token(0);
|
||||
Token *token = new Token();
|
||||
token->str("1");
|
||||
token->insertToken("2");
|
||||
token->next()->insertToken("3");
|
||||
|
@ -139,49 +140,49 @@ private:
|
|||
|
||||
void multiCompare() const {
|
||||
// Test for found
|
||||
Token one(0);
|
||||
Token one;
|
||||
one.str("one");
|
||||
ASSERT_EQUALS(1, Token::multiCompare(&one, "one|two", 0));
|
||||
|
||||
Token two(0);
|
||||
Token two;
|
||||
two.str("two");
|
||||
ASSERT_EQUALS(1, Token::multiCompare(&two, "one|two", 0));
|
||||
ASSERT_EQUALS(1, Token::multiCompare(&two, "verybig|two|", 0));
|
||||
|
||||
// Test for empty string found
|
||||
Token notfound(0);
|
||||
Token notfound;
|
||||
notfound.str("notfound");
|
||||
ASSERT_EQUALS(0, Token::multiCompare(¬found, "one|two|", 0));
|
||||
|
||||
// Test for not found
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(¬found, "one|two", 0)));
|
||||
|
||||
Token s(0);
|
||||
Token s;
|
||||
s.str("s");
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(&s, "verybig|two", 0)));
|
||||
|
||||
Token ne(0);
|
||||
Token ne;
|
||||
ne.str("ne");
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(&ne, "one|two", 0)));
|
||||
|
||||
Token a(0);
|
||||
Token a;
|
||||
a.str("a");
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(&a, "abc|def", 0)));
|
||||
|
||||
Token abcd(0);
|
||||
Token abcd;
|
||||
abcd.str("abcd");
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(&abcd, "abc|def", 0)));
|
||||
|
||||
Token def(0);
|
||||
Token def;
|
||||
def.str("default");
|
||||
ASSERT_EQUALS(static_cast<unsigned int>(-1), static_cast<unsigned int>(Token::multiCompare(&def, "abc|def", 0)));
|
||||
|
||||
// %op%
|
||||
Token plus(0);
|
||||
Token plus;
|
||||
plus.str("+");
|
||||
ASSERT_EQUALS(1, Token::multiCompare(&plus, "one|%op%", 0));
|
||||
ASSERT_EQUALS(1, Token::multiCompare(&plus, "%op%|two", 0));
|
||||
Token x(0);
|
||||
Token x;
|
||||
x.str("x");
|
||||
ASSERT_EQUALS(-1, Token::multiCompare(&x, "one|%op%", 0));
|
||||
ASSERT_EQUALS(-1, Token::multiCompare(&x, "%op%|two", 0));
|
||||
|
@ -257,13 +258,13 @@ private:
|
|||
}
|
||||
|
||||
void multiCompare5() const {
|
||||
Token tok(0);
|
||||
Token tok;
|
||||
tok.str("||");
|
||||
ASSERT_EQUALS(true, Token::multiCompare(&tok, "+|%or%|%oror%", 0) >= 0);
|
||||
}
|
||||
|
||||
void getStrLength() const {
|
||||
Token tok(0);
|
||||
Token tok;
|
||||
|
||||
tok.str("\"\"");
|
||||
ASSERT_EQUALS(0, (int)Token::getStrLength(&tok));
|
||||
|
@ -279,7 +280,7 @@ private:
|
|||
}
|
||||
|
||||
void getStrSize() const {
|
||||
Token tok(0);
|
||||
Token tok;
|
||||
|
||||
tok.str("\"abc\"");
|
||||
ASSERT_EQUALS(sizeof("abc"), Token::getStrSize(&tok));
|
||||
|
@ -292,7 +293,7 @@ private:
|
|||
}
|
||||
|
||||
void strValue() const {
|
||||
Token tok(0);
|
||||
Token tok;
|
||||
|
||||
tok.str("\"\"");
|
||||
ASSERT_EQUALS("", tok.strValue());
|
||||
|
@ -319,12 +320,25 @@ private:
|
|||
|
||||
|
||||
void deleteLast() const {
|
||||
Token *tokensBack = 0;
|
||||
Token tok(&tokensBack);
|
||||
TokensFrontBack listEnds{ 0 };
|
||||
Token **tokensBack = &(listEnds.back);
|
||||
Token tok(&listEnds);
|
||||
tok.insertToken("aba");
|
||||
ASSERT_EQUALS(true, tokensBack == tok.next());
|
||||
ASSERT_EQUALS(true, *tokensBack == tok.next());
|
||||
tok.deleteNext();
|
||||
ASSERT_EQUALS(true, tokensBack == &tok);
|
||||
ASSERT_EQUALS(true, *tokensBack == &tok);
|
||||
}
|
||||
|
||||
void deleteFirst() const {
|
||||
TokensFrontBack listEnds{ 0 };
|
||||
Token **tokensFront = &(listEnds.front);
|
||||
Token tok(&listEnds);
|
||||
|
||||
tok.insertToken("aba");
|
||||
|
||||
ASSERT_EQUALS(true, *tokensFront == tok.previous());
|
||||
tok.deletePrevious();
|
||||
ASSERT_EQUALS(true, *tokensFront == &tok);
|
||||
}
|
||||
|
||||
void nextArgument() const {
|
||||
|
@ -362,7 +376,7 @@ private:
|
|||
ASSERT_EQUALS(true, Token::Match(singleChar.tokens(), "[a|bc]"));
|
||||
ASSERT_EQUALS(false, Token::Match(singleChar.tokens(), "[d|ef]"));
|
||||
|
||||
Token multiChar(0);
|
||||
Token multiChar;
|
||||
multiChar.str("[ab");
|
||||
ASSERT_EQUALS(false, Token::Match(&multiChar, "[ab|def]"));
|
||||
}
|
||||
|
@ -601,7 +615,7 @@ private:
|
|||
void isArithmeticalOp() const {
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = arithmeticalOps.end();
|
||||
for (test_op = arithmeticalOps.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(true, tok.isArithmeticalOp());
|
||||
}
|
||||
|
@ -616,7 +630,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator other_op, other_ops_end = other_ops.end();
|
||||
for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*other_op);
|
||||
ASSERT_EQUALS_MSG(false, tok.isArithmeticalOp(), "Failing arithmetical operator: " + *other_op);
|
||||
}
|
||||
|
@ -632,7 +646,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = test_ops.end();
|
||||
for (test_op = test_ops.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(true, tok.isOp());
|
||||
}
|
||||
|
@ -643,7 +657,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator other_op, other_ops_end = other_ops.end();
|
||||
for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*other_op);
|
||||
ASSERT_EQUALS_MSG(false, tok.isOp(), "Failing normal operator: " + *other_op);
|
||||
}
|
||||
|
@ -658,7 +672,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = test_ops.end();
|
||||
for (test_op = test_ops.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(true, tok.isConstOp());
|
||||
}
|
||||
|
@ -670,7 +684,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator other_op, other_ops_end = other_ops.end();
|
||||
for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*other_op);
|
||||
ASSERT_EQUALS_MSG(false, tok.isConstOp(), "Failing normal operator: " + *other_op);
|
||||
}
|
||||
|
@ -686,7 +700,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = test_ops.end();
|
||||
for (test_op = test_ops.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(true, tok.isExtendedOp());
|
||||
}
|
||||
|
@ -694,7 +708,7 @@ private:
|
|||
// Negative test against assignment operators
|
||||
std::vector<std::string>::const_iterator other_op, other_ops_end = assignmentOps.end();
|
||||
for (other_op = assignmentOps.begin(); other_op != other_ops_end; ++other_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*other_op);
|
||||
ASSERT_EQUALS_MSG(false, tok.isExtendedOp(), "Failing assignment operator: " + *other_op);
|
||||
}
|
||||
|
@ -703,7 +717,7 @@ private:
|
|||
void isAssignmentOp() const {
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = assignmentOps.end();
|
||||
for (test_op = assignmentOps.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(true, tok.isAssignmentOp());
|
||||
}
|
||||
|
@ -718,7 +732,7 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator other_op, other_ops_end = other_ops.end();
|
||||
for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*other_op);
|
||||
ASSERT_EQUALS_MSG(false, tok.isAssignmentOp(), "Failing assignment operator: " + *other_op);
|
||||
}
|
||||
|
@ -727,26 +741,26 @@ private:
|
|||
void operators() const {
|
||||
std::vector<std::string>::const_iterator test_op;
|
||||
for (test_op = extendedOps.begin(); test_op != extendedOps.end(); ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(Token::eExtendedOp, tok.tokType());
|
||||
}
|
||||
for (test_op = logicalOps.begin(); test_op != logicalOps.end(); ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(Token::eLogicalOp, tok.tokType());
|
||||
}
|
||||
for (test_op = bitOps.begin(); test_op != bitOps.end(); ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(Token::eBitOp, tok.tokType());
|
||||
}
|
||||
for (test_op = comparisonOps.begin(); test_op != comparisonOps.end(); ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS(Token::eComparisonOp, tok.tokType());
|
||||
}
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("++");
|
||||
ASSERT_EQUALS(Token::eIncDecOp, tok.tokType());
|
||||
tok.str("--");
|
||||
|
@ -754,7 +768,7 @@ private:
|
|||
}
|
||||
|
||||
void literals() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
|
||||
tok.str("\"foo\"");
|
||||
ASSERT(tok.tokType() == Token::eString);
|
||||
|
@ -785,13 +799,13 @@ private:
|
|||
|
||||
std::vector<std::string>::const_iterator test_op, test_ops_end = standard_types.end();
|
||||
for (test_op = standard_types.begin(); test_op != test_ops_end; ++test_op) {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str(*test_op);
|
||||
ASSERT_EQUALS_MSG(true, tok.isStandardType(), "Failing standard type: " + *test_op);
|
||||
}
|
||||
|
||||
// Negative test
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("string");
|
||||
ASSERT_EQUALS(false, tok.isStandardType());
|
||||
|
||||
|
@ -807,7 +821,7 @@ private:
|
|||
}
|
||||
|
||||
void updateProperties() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("foobar");
|
||||
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
|
@ -820,7 +834,7 @@ private:
|
|||
}
|
||||
|
||||
void updatePropertiesConcatStr() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("true");
|
||||
|
||||
ASSERT_EQUALS(true, tok.isBoolean());
|
||||
|
@ -832,39 +846,39 @@ private:
|
|||
}
|
||||
|
||||
void isNameGuarantees1() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("Name");
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
}
|
||||
|
||||
void isNameGuarantees2() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("_name");
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
}
|
||||
|
||||
void isNameGuarantees3() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("_123");
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
}
|
||||
|
||||
void isNameGuarantees4() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("123456");
|
||||
ASSERT_EQUALS(false, tok.isName());
|
||||
ASSERT_EQUALS(true, tok.isNumber());
|
||||
}
|
||||
|
||||
void isNameGuarantees5() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("a123456");
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
ASSERT_EQUALS(false, tok.isNumber());
|
||||
}
|
||||
|
||||
void isNameGuarantees6() const {
|
||||
Token tok(nullptr);
|
||||
Token tok;
|
||||
tok.str("$f");
|
||||
ASSERT_EQUALS(true, tok.isName());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue