Refactorized handling of syntaxError: Print debug output, removed a few return values that are no longer required

This commit is contained in:
PKEuS 2014-03-27 18:41:52 +01:00
parent ab25741fee
commit efe3f834be
4 changed files with 105 additions and 148 deletions

View File

@ -1649,10 +1649,7 @@ bool Tokenizer::tokenizeCondition(const std::string &code)
// Concatenate double sharp: 'a ## b' -> 'ab' // Concatenate double sharp: 'a ## b' -> 'ab'
concatenateDoubleSharp(); concatenateDoubleSharp();
if (!createLinks()) { createLinks();
// Source has syntax errors, can't proceed
return false;
}
// replace 'NULL' and similar '0'-defined macros with '0' // replace 'NULL' and similar '0'-defined macros with '0'
simplifyNull(); simplifyNull();
@ -1690,15 +1687,11 @@ bool Tokenizer::tokenizeCondition(const std::string &code)
return true; return true;
} }
bool Tokenizer::hasComplicatedSyntaxErrorsInTemplates() void Tokenizer::findComplicatedSyntaxErrorsInTemplates()
{ {
const Token *tok = TemplateSimplifier::hasComplicatedSyntaxErrorsInTemplates(list.front()); const Token *tok = TemplateSimplifier::hasComplicatedSyntaxErrorsInTemplates(list.front());
if (tok) { if (tok)
syntaxError(tok); syntaxError(tok);
return true;
}
return false;
} }
bool Tokenizer::hasEnumsWithTypedef() bool Tokenizer::hasEnumsWithTypedef()
@ -2088,7 +2081,7 @@ static Token *skipTernaryOp(Token *tok)
/** simplify labels and case|default in the code: add a ";" if not already in.*/ /** simplify labels and case|default in the code: add a ";" if not already in.*/
bool Tokenizer::simplifyLabelsCaseDefault() void Tokenizer::simplifyLabelsCaseDefault()
{ {
bool executablescope = false; bool executablescope = false;
unsigned int indentlevel = 0; unsigned int indentlevel = 0;
@ -2126,7 +2119,6 @@ bool Tokenizer::simplifyLabelsCaseDefault()
Token *tok1 = skipTernaryOp(tok); Token *tok1 = skipTernaryOp(tok);
if (!tok1) { if (!tok1) {
syntaxError(tok); syntaxError(tok);
return false;
} }
tok = tok1; tok = tok1;
} }
@ -2139,14 +2131,12 @@ bool Tokenizer::simplifyLabelsCaseDefault()
tok->insertToken(";"); tok->insertToken(";");
} else { } else {
syntaxError(tok); syntaxError(tok);
return false;
} }
} else if (Token::Match(tok, "[;{}] %var% : !!;")) { } else if (Token::Match(tok, "[;{}] %var% : !!;")) {
tok = tok->tokAt(2); tok = tok->tokAt(2);
tok->insertToken(";"); tok->insertToken(";");
} }
} }
return true;
} }
@ -2624,7 +2614,7 @@ void Tokenizer::setVarId()
} }
} }
static bool linkBrackets(Tokenizer* tokenizer, std::stack<const Token*>& type, std::stack<Token*>& links, Token* token, char open, char close) static void linkBrackets(Tokenizer* tokenizer, std::stack<const Token*>& type, std::stack<Token*>& links, Token* token, char open, char close)
{ {
if (token->str()[0] == open) { if (token->str()[0] == open) {
links.push(token); links.push(token);
@ -2633,21 +2623,18 @@ static bool linkBrackets(Tokenizer* tokenizer, std::stack<const Token*>& type, s
if (links.empty()) { if (links.empty()) {
// Error, { and } don't match. // Error, { and } don't match.
tokenizer->syntaxError(token, open); tokenizer->syntaxError(token, open);
return false;
} }
if (type.top()->str()[0] != open) { if (type.top()->str()[0] != open) {
tokenizer->syntaxError(type.top(), type.top()->str()[0]); tokenizer->syntaxError(type.top(), type.top()->str()[0]);
return false;
} }
type.pop(); type.pop();
Token::createMutualLinks(links.top(), token); Token::createMutualLinks(links.top(), token);
links.pop(); links.pop();
} }
return (true);
} }
bool Tokenizer::createLinks() void Tokenizer::createLinks()
{ {
std::stack<const Token*> type; std::stack<const Token*> type;
std::stack<Token*> links1; std::stack<Token*> links1;
@ -2658,38 +2645,27 @@ bool Tokenizer::createLinks()
token->link(0); token->link(0);
} }
bool validSyntax = linkBrackets(this, type, links1, token, '{', '}'); linkBrackets(this, type, links1, token, '{', '}');
if (!validSyntax)
return false;
validSyntax = linkBrackets(this, type, links2, token, '(', ')'); linkBrackets(this, type, links2, token, '(', ')');
if (!validSyntax)
return false;
validSyntax = linkBrackets(this, type, links3, token, '[', ']'); linkBrackets(this, type, links3, token, '[', ']');
if (!validSyntax)
return false;
} }
if (!links1.empty()) { if (!links1.empty()) {
// Error, { and } don't match. // Error, { and } don't match.
syntaxError(links1.top(), '{'); syntaxError(links1.top(), '{');
return false;
} }
if (!links2.empty()) { if (!links2.empty()) {
// Error, ( and ) don't match. // Error, ( and ) don't match.
syntaxError(links2.top(), '('); syntaxError(links2.top(), '(');
return false;
} }
if (!links3.empty()) { if (!links3.empty()) {
// Error, [ and ] don't match. // Error, [ and ] don't match.
syntaxError(links3.top(), '['); syntaxError(links3.top(), '[');
return false;
} }
return true;
} }
void Tokenizer::createLinks2() void Tokenizer::createLinks2()
@ -3052,10 +3028,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[])
// Concatenate double sharp: 'a ## b' -> 'ab' // Concatenate double sharp: 'a ## b' -> 'ab'
concatenateDoubleSharp(); concatenateDoubleSharp();
if (!createLinks()) { createLinks();
// Source has syntax errors, can't proceed
return false;
}
// if (x) MACRO() .. // if (x) MACRO() ..
for (const Token *tok = list.front(); tok; tok = tok->next()) { for (const Token *tok = list.front(); tok; tok = tok->next()) {
@ -3130,15 +3103,13 @@ bool Tokenizer::simplifyTokenList1(const char FileName[])
// Convert K&R function declarations to modern C // Convert K&R function declarations to modern C
simplifyVarDecl(true); simplifyVarDecl(true);
if (!simplifyFunctionParameters()) simplifyFunctionParameters();
return false;
// specify array size.. // specify array size..
arraySize(); arraySize();
// simplify labels and 'case|default'-like syntaxes // simplify labels and 'case|default'-like syntaxes
if (!simplifyLabelsCaseDefault()) simplifyLabelsCaseDefault();
return false;
// simplify '[;{}] * & ( %any% ) =' to '%any% =' // simplify '[;{}] * & ( %any% ) =' to '%any% ='
simplifyMulAndParens(); simplifyMulAndParens();
@ -3146,10 +3117,8 @@ bool Tokenizer::simplifyTokenList1(const char FileName[])
// ";a+=b;" => ";a=a+b;" // ";a+=b;" => ";a=a+b;"
simplifyCompoundAssignment(); simplifyCompoundAssignment();
if (!_settings->library.markupFile(FileName) if (!_settings->library.markupFile(FileName)) {
&& hasComplicatedSyntaxErrorsInTemplates()) { findComplicatedSyntaxErrorsInTemplates();
list.deallocateTokens();
return false;
} }
if (_settings->terminated()) if (_settings->terminated())
@ -3228,10 +3197,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[])
// to reproduce bad typedef, download upx-ucl from: // to reproduce bad typedef, download upx-ucl from:
// http://packages.debian.org/sid/upx-ucl // http://packages.debian.org/sid/upx-ucl
// analyse the file src/stub/src/i386-linux.elf.interp-main.c // analyse the file src/stub/src/i386-linux.elf.interp-main.c
if (!validate()) { validate();
// Source has syntax errors, can't proceed
return false;
}
// enum.. // enum..
simplifyEnum(); simplifyEnum();
@ -3431,7 +3397,8 @@ bool Tokenizer::simplifyTokenList1(const char FileName[])
elseif(); elseif();
return validate(); validate();
return true;
} }
bool Tokenizer::simplifyTokenList2() bool Tokenizer::simplifyTokenList2()
@ -3629,68 +3596,68 @@ bool Tokenizer::simplifyTokenList2()
while (simplifyMathFunctions()) {}; while (simplifyMathFunctions()) {};
const bool bValidate = validate(); validate();
if (bValidate || // either anything is fine here...
(_settings->debug && _settings->_verbose) // or it could be dangerous to proceed, so we demand this combination of flags
) {
list.front()->assignProgressValues();
list.front()->assignProgressValues();
// Create symbol database and then remove const keywords // Create symbol database and then remove const keywords
createSymbolDatabase(); createSymbolDatabase();
for (Token *tok = list.front(); tok; tok = tok->next()) { for (Token *tok = list.front(); tok; tok = tok->next()) {
if (Token::simpleMatch(tok, "* const")) if (Token::simpleMatch(tok, "* const"))
tok->deleteNext(); tok->deleteNext();
}
list.createAst();
ValueFlow::setValues(&list, _errorLogger, _settings);
if (_settings->terminated())
return false;
if (_settings->debug) {
list.front()->printOut(0, list.getFiles());
if (_settings->_verbose)
_symbolDatabase->printOut("Symbol database");
list.front()->printAst(_settings->_verbose);
list.front()->printValueFlow();
}
if (_settings->debugwarnings) {
printUnknownTypes();
// #5054 - the typeStartToken() should come before typeEndToken()
for (const Token *tok = tokens(); tok; tok = tok->next()) {
if (tok->varId() == 0U)
continue;
const Variable *var = tok->variable();
if (!var)
continue;
const Token * typetok = var->typeStartToken();
while (typetok && typetok != var->typeEndToken())
typetok = typetok->next();
if (typetok != var->typeEndToken()) {
reportError(tok,
Severity::debug,
"debug",
"Variable::typeStartToken() is not located before Variable::typeEndToken(). The location of the typeStartToken() is '" + var->typeStartToken()->str() + "' at line " + MathLib::toString(var->typeStartToken()->linenr()));
}
}
}
} }
return bValidate; list.createAst();
ValueFlow::setValues(&list, _errorLogger, _settings);
if (_settings->terminated())
return false;
printDebugOutput();
return true;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void Tokenizer::printDebugOutput() const
{
if (_settings->debug) {
list.front()->printOut(0, list.getFiles());
if (_settings->_verbose && _symbolDatabase)
_symbolDatabase->printOut("Symbol database");
list.front()->printAst(_settings->_verbose);
list.front()->printValueFlow();
}
if (_settings->debugwarnings) {
printUnknownTypes();
// #5054 - the typeStartToken() should come before typeEndToken()
for (const Token *tok = tokens(); tok; tok = tok->next()) {
if (tok->varId() == 0U)
continue;
const Variable *var = tok->variable();
if (!var)
continue;
const Token * typetok = var->typeStartToken();
while (typetok && typetok != var->typeEndToken())
typetok = typetok->next();
if (typetok != var->typeEndToken()) {
reportError(tok,
Severity::debug,
"debug",
"Variable::typeStartToken() is not located before Variable::typeEndToken(). The location of the typeStartToken() is '" + var->typeStartToken()->str() + "' at line " + MathLib::toString(var->typeStartToken()->linenr()));
}
}
}
}
void Tokenizer::removeMacrosInGlobalScope() void Tokenizer::removeMacrosInGlobalScope()
{ {
for (Token *tok = list.front(); tok; tok = tok->next()) { for (Token *tok = list.front(); tok; tok = tok->next()) {
@ -4845,7 +4812,7 @@ void Tokenizer::simplifyCasts()
} }
bool Tokenizer::simplifyFunctionParameters() void Tokenizer::simplifyFunctionParameters()
{ {
for (Token *tok = list.front(); tok; tok = tok->next()) { for (Token *tok = list.front(); tok; tok = tok->next()) {
if (tok->str() == "{" || tok->str() == "[" || tok->str() == "(") { if (tok->str() == "{" || tok->str() == "[" || tok->str() == "(") {
@ -4941,7 +4908,6 @@ bool Tokenizer::simplifyFunctionParameters()
if (tok1->str() == ";") { if (tok1->str() == ";") {
if (tokparam) { if (tokparam) {
syntaxError(tokparam); syntaxError(tokparam);
return false;
} }
Token *tok2 = tok1->previous(); Token *tok2 = tok1->previous();
while (tok2->str() == "]") while (tok2->str() == "]")
@ -4956,7 +4922,6 @@ bool Tokenizer::simplifyFunctionParameters()
if (argumentNames2.find(tok2->str()) != argumentNames2.end()) { if (argumentNames2.find(tok2->str()) != argumentNames2.end()) {
//same parameter names... //same parameter names...
syntaxError(tok1); syntaxError(tok1);
return false;
} else } else
argumentNames2[tok2->str()] = tok2; argumentNames2[tok2->str()] = tok2;
@ -5014,7 +4979,6 @@ bool Tokenizer::simplifyFunctionParameters()
tok = tok->next()->link(); tok = tok->next()->link();
} }
} }
return true;
} }
void Tokenizer::simplifyPointerToStandardType() void Tokenizer::simplifyPointerToStandardType()
@ -8158,11 +8122,13 @@ void Tokenizer::eraseDeadCode(Token *begin, const Token *end)
void Tokenizer::syntaxError(const Token *tok) const void Tokenizer::syntaxError(const Token *tok) const
{ {
printDebugOutput();
throw InternalError(tok, "syntax error", InternalError::SYNTAX); throw InternalError(tok, "syntax error", InternalError::SYNTAX);
} }
void Tokenizer::syntaxError(const Token *tok, char c) const void Tokenizer::syntaxError(const Token *tok, char c) const
{ {
printDebugOutput();
throw InternalError(tok, throw InternalError(tok,
std::string("Invalid number of character (") + c + ") " + std::string("Invalid number of character (") + c + ") " +
"when these macros are defined: '" + _configuration + "'.", "when these macros are defined: '" + _configuration + "'.",
@ -8183,6 +8149,7 @@ void Tokenizer::unhandled_macro_class_x_y(const Token *tok) const
void Tokenizer::cppcheckError(const Token *tok) const void Tokenizer::cppcheckError(const Token *tok) const
{ {
printDebugOutput();
throw InternalError(tok, "Analysis failed. If the code is valid then please report this failure.", InternalError::INTERNAL); throw InternalError(tok, "Analysis failed. If the code is valid then please report this failure.", InternalError::INTERNAL);
} }
// ------------------------------------------------------------------------ // ------------------------------------------------------------------------
@ -8659,63 +8626,45 @@ void Tokenizer::removeExceptionSpecifications()
bool Tokenizer::validate() const void Tokenizer::validate() const
{ {
std::stack<const Token *> linktok; std::stack<const Token *> linktok;
const Token *lastTok = nullptr; const Token *lastTok = nullptr;
for (const Token *tok = tokens(); tok; tok = tok->next()) { for (const Token *tok = tokens(); tok; tok = tok->next()) {
lastTok = tok; lastTok = tok;
if (Token::Match(tok, "[{([]") || (tok->str() == "<" && tok->link())) { if (Token::Match(tok, "[{([]") || (tok->str() == "<" && tok->link())) {
if (tok->link() == nullptr) { if (tok->link() == nullptr)
cppcheckError(tok); cppcheckError(tok);
return false;
}
linktok.push(tok); linktok.push(tok);
} }
else if (Token::Match(tok, "[})]]") || (tok->str() == ">" && tok->link())) { else if (Token::Match(tok, "[})]]") || (tok->str() == ">" && tok->link())) {
if (tok->link() == nullptr) { if (tok->link() == nullptr)
cppcheckError(tok); cppcheckError(tok);
return false;
}
if (linktok.empty() == true) { if (linktok.empty() == true)
cppcheckError(tok); cppcheckError(tok);
return false;
}
if (tok->link() != linktok.top()) { if (tok->link() != linktok.top())
cppcheckError(tok); cppcheckError(tok);
return false;
}
if (tok != tok->link()->link()) { if (tok != tok->link()->link())
cppcheckError(tok); cppcheckError(tok);
return false;
}
linktok.pop(); linktok.pop();
} }
else if (tok->link() != nullptr) { else if (tok->link() != nullptr)
cppcheckError(tok); cppcheckError(tok);
return false;
}
} }
if (!linktok.empty()) { if (!linktok.empty())
cppcheckError(linktok.top()); cppcheckError(linktok.top());
return false;
}
// Validate that the Tokenizer::list.back() is updated correctly during simplifications // Validate that the Tokenizer::list.back() is updated correctly during simplifications
if (lastTok != list.back()) { if (lastTok != list.back())
cppcheckError(lastTok); cppcheckError(lastTok);
return false;
}
return true;
} }
std::string Tokenizer::simplifyString(const std::string &source) std::string Tokenizer::simplifyString(const std::string &source)
@ -10210,8 +10159,11 @@ void Tokenizer::simplifyReturnStrncat()
} }
} }
void Tokenizer::printUnknownTypes() void Tokenizer::printUnknownTypes() const
{ {
if (!_symbolDatabase)
return;
std::multimap<std::string, const Token *> unknowns; std::multimap<std::string, const Token *> unknowns;
for (unsigned int i = 1; i <= _varId; ++i) { for (unsigned int i = 1; i <= _varId; ++i) {

View File

@ -190,7 +190,7 @@ public:
* @return true if found nothing or the syntax is correct. * @return true if found nothing or the syntax is correct.
* false if syntax is found to be wrong. * false if syntax is found to be wrong.
*/ */
bool simplifyLabelsCaseDefault(); void simplifyLabelsCaseDefault();
/** Remove macros in global scope */ /** Remove macros in global scope */
void removeMacrosInGlobalScope(); void removeMacrosInGlobalScope();
@ -452,7 +452,7 @@ public:
* into "void f(int x) {" * into "void f(int x) {"
* @return false only if there's a syntax error * @return false only if there's a syntax error
*/ */
bool simplifyFunctionParameters(); void simplifyFunctionParameters();
/** /**
* Simplify templates * Simplify templates
@ -493,7 +493,7 @@ public:
void simplifyDefaultAndDeleteInsideClass(); void simplifyDefaultAndDeleteInsideClass();
bool hasComplicatedSyntaxErrorsInTemplates(); void findComplicatedSyntaxErrorsInTemplates();
/** /**
* Simplify e.g. 'atol("0")' into '0' * Simplify e.g. 'atol("0")' into '0'
@ -568,7 +568,7 @@ public:
* @return false if there was a mismatch with tokens, this * @return false if there was a mismatch with tokens, this
* should mean that source code was not valid. * should mean that source code was not valid.
*/ */
bool createLinks(); void createLinks();
/** /**
* Setup links between < and >. * Setup links between < and >.
@ -589,7 +589,7 @@ public:
* to catch problems in simplifyTokenList. * to catch problems in simplifyTokenList.
* @return always true. * @return always true.
*/ */
bool validate() const; void validate() const;
/** /**
* Remove __declspec() * Remove __declspec()
@ -719,6 +719,8 @@ public:
void createSymbolDatabase(); void createSymbolDatabase();
void deleteSymbolDatabase(); void deleteSymbolDatabase();
void printDebugOutput() const;
Token *deleteInvalidTypedef(Token *typeDef); Token *deleteInvalidTypedef(Token *typeDef);
/** /**
@ -738,7 +740,7 @@ public:
/** /**
* Output list of unknown types. * Output list of unknown types.
*/ */
void printUnknownTypes(); void printUnknownTypes() const;
/** /**

View File

@ -3853,7 +3853,7 @@ private:
tokenizer.simplifyTokenList2(); tokenizer.simplifyTokenList2();
ASSERT_EQUALS(true, tokenizer.validate()); tokenizer.validate();
} }
void simplifyTypedef19() { void simplifyTypedef19() {
@ -3916,7 +3916,7 @@ private:
tokenizer.simplifyTokenList2(); tokenizer.simplifyTokenList2();
ASSERT_EQUALS(true, tokenizer.validate()); tokenizer.validate();
} }
void simplifyTypedef21() { void simplifyTypedef21() {

View File

@ -891,8 +891,11 @@ private:
" )\n" " )\n"
"}"; "}";
Settings settings;
Tokenizer tokenizer(&settings, this);
std::istringstream istr(code);
try { try {
tokenizeAndStringify(code); tokenizer.tokenize(istr, "test.cpp");
assertThrowFail(__FILE__, __LINE__); assertThrowFail(__FILE__, __LINE__);
} catch (InternalError& e) { } catch (InternalError& e) {
ASSERT_EQUALS("Analysis failed. If the code is valid then please report this failure.", e.errorMessage); ASSERT_EQUALS("Analysis failed. If the code is valid then please report this failure.", e.errorMessage);
@ -5829,7 +5832,7 @@ private:
Tokenizer tokenizer(&settings, this); Tokenizer tokenizer(&settings, this);
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp", ""); tokenizer.tokenize(istr, "test.cpp", "");
ASSERT_EQUALS(true, tokenizer.validate()); tokenizer.validate();
} }
void vardecl_par2() { void vardecl_par2() {
@ -5840,7 +5843,7 @@ private:
Tokenizer tokenizer(&settings, this); Tokenizer tokenizer(&settings, this);
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp", ""); tokenizer.tokenize(istr, "test.cpp", "");
ASSERT_EQUALS(true, tokenizer.validate()); tokenizer.validate();
} }
void vardec_static() { void vardec_static() {