Refactorizations in tokenize.cpp and testsimplifytokens.cpp:
- Avoid const_cast in testsimplifytokens.cpp - Removed redundant null-check (VS11 code analysis) - Fixed MSVC compiler warning - Replaced some indentation counters
This commit is contained in:
parent
9b5c245899
commit
c4b881f844
|
@ -178,13 +178,9 @@ bool Tokenizer::duplicateTypedef(Token **tokPtr, const Token *name, const Token
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// find end of definition
|
// find end of definition
|
||||||
int level = 0;
|
while (end && end->next() && !Token::Match(end->next(), ";|)|>")) {
|
||||||
while (end && end->next() && (!Token::Match(end->next(), ";|)|>") ||
|
|
||||||
(end->next()->str() == ")" && level == 0))) {
|
|
||||||
if (end->next()->str() == "(")
|
if (end->next()->str() == "(")
|
||||||
++level;
|
end = end->linkAt(1);
|
||||||
else if (end->next()->str() == ")")
|
|
||||||
--level;
|
|
||||||
|
|
||||||
end = end->next();
|
end = end->next();
|
||||||
}
|
}
|
||||||
|
@ -249,8 +245,7 @@ bool Tokenizer::duplicateTypedef(Token **tokPtr, const Token *name, const Token
|
||||||
(!Token::Match(tok->previous(), "return|new|const|friend|public|private|protected|throw|extern") &&
|
(!Token::Match(tok->previous(), "return|new|const|friend|public|private|protected|throw|extern") &&
|
||||||
!Token::simpleMatch(tok->tokAt(-2), "friend class")))) {
|
!Token::simpleMatch(tok->tokAt(-2), "friend class")))) {
|
||||||
// scan backwards for the end of the previous statement
|
// scan backwards for the end of the previous statement
|
||||||
int level = (tok->previous()->str() == "}") ? 1 : 0;
|
while (tok && tok->previous() && !Token::Match(tok->previous(), ";|{")) {
|
||||||
while (tok && tok->previous() && (!Token::Match(tok->previous(), ";|{") || (level != 0))) {
|
|
||||||
if (tok->previous()->str() == "}") {
|
if (tok->previous()->str() == "}") {
|
||||||
tok = tok->previous()->link();
|
tok = tok->previous()->link();
|
||||||
} else if (tok->previous()->str() == "typedef") {
|
} else if (tok->previous()->str() == "typedef") {
|
||||||
|
@ -299,8 +294,7 @@ bool Tokenizer::duplicateTypedef(Token **tokPtr, const Token *name, const Token
|
||||||
duplicateDeclarationError(*tokPtr, name, "class");
|
duplicateDeclarationError(*tokPtr, name, "class");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} else if (tok->previous()->str() == "{")
|
}
|
||||||
--level;
|
|
||||||
|
|
||||||
tok = tok->previous();
|
tok = tok->previous();
|
||||||
}
|
}
|
||||||
|
@ -426,7 +420,7 @@ static Token *splitDefinitionFromTypedef(Token *tok)
|
||||||
tok1->insertToken(";");
|
tok1->insertToken(";");
|
||||||
tok1 = tok1->next();
|
tok1 = tok1->next();
|
||||||
|
|
||||||
if (tok1->next() && tok1->next()->str() == ";" && tok1 && tok1->previous()->str() == "}") {
|
if (tok1->next() && tok1->next()->str() == ";" && tok1->previous()->str() == "}") {
|
||||||
tok->deleteThis();
|
tok->deleteThis();
|
||||||
tok1->deleteThis();
|
tok1->deleteThis();
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -4601,7 +4595,7 @@ void Tokenizer::simplifyCasts()
|
||||||
unsigned int bits = 8 * _typeSize[tok->next()->link()->previous()->str()];
|
unsigned int bits = 8 * _typeSize[tok->next()->link()->previous()->str()];
|
||||||
if (!tok->tokAt(2)->isUnsigned())
|
if (!tok->tokAt(2)->isUnsigned())
|
||||||
bits--;
|
bits--;
|
||||||
if (bits < 31 && value >= 0 && value < (1 << bits)) {
|
if (bits < 31 && value >= 0 && value < (1LL << bits)) {
|
||||||
Token::eraseTokens(tok, tok->next()->link()->next());
|
Token::eraseTokens(tok, tok->next()->link()->next());
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -7206,7 +7206,7 @@ private:
|
||||||
Tokenizer tokenizer(&settings, NULL);
|
Tokenizer tokenizer(&settings, NULL);
|
||||||
std::istringstream istr("x ; return a not_eq x;");
|
std::istringstream istr("x ; return a not_eq x;");
|
||||||
tokenizer.tokenize(istr, "test.c");
|
tokenizer.tokenize(istr, "test.c");
|
||||||
Token *x_token = const_cast<Token *>(tokenizer.tokens()->tokAt(5));
|
Token *x_token = tokenizer.list.front()->tokAt(5);
|
||||||
ASSERT_EQUALS(false, tokenizer.duplicateDefinition(&x_token, tokenizer.tokens()));
|
ASSERT_EQUALS(false, tokenizer.duplicateDefinition(&x_token, tokenizer.tokens()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue