Add missing types to Platform::platformString(). Add another pattern to Tokenizer::findGarbageCode() to avoid potential crash in Valueflow

This commit is contained in:
Alexander Mai 2017-06-06 22:15:11 +02:00
parent 9da28d5f4c
commit ce13b75967
2 changed files with 23 additions and 16 deletions

View File

@ -111,16 +111,20 @@ namespace cppcheck {
const char *platformString() const {
switch (platformType) {
case Unix32:
return "unix32";
case Unix64:
return "unix64";
case Unspecified:
return "Unspecified";
case Native:
return "Native";
case Win32A:
return "win32A";
case Win32W:
return "win32W";
case Win64:
return "win64";
case Unix32:
return "unix32";
case Unix64:
return "unix64";
case AVR8:
return "avr8";
default:

View File

@ -8130,6 +8130,18 @@ void Tokenizer::validate() const
cppcheckError(lastTok);
}
static const std::set<std::string> controlFlowKeywords = make_container< std::set<std::string> > () <<
"goto" <<
"do" <<
"if" <<
"else" <<
"for" <<
"while" <<
"switch" <<
"case" <<
"break" <<
"continue" <<
"return";
const Token * Tokenizer::findGarbageCode() const
{
@ -8178,23 +8190,14 @@ const Token * Tokenizer::findGarbageCode() const
return list.back();
if (list.back()->str() == ")" && !Token::Match(list.back()->link()->previous(), "%name% ("))
return list.back();
if (Token::Match(list.back(), "void|char|short|int|long|float|double|const|volatile|static|inline|struct|class|enum|union|template|sizeof|break|continue|typedef"))
if (Token::Match(list.back(), "void|char|short|int|long|float|double|const|volatile|static|inline|struct|class|enum|union|template|sizeof|case|break|continue|typedef"))
return list.back();
if ((list.back()->str()==")"||list.back()->str()=="}") && list.back()->previous() && controlFlowKeywords.find(list.back()->previous()->str()) != controlFlowKeywords.end())
return list.back()->previous();
return nullptr;
}
static const std::set<std::string> controlFlowKeywords = make_container< std::set<std::string> > () <<
"goto" <<
"do" <<
"if" <<
"else" <<
"for" <<
"while" <<
"switch" <<
"break" <<
"continue" <<
"return";
bool Tokenizer::isGarbageExpr(const Token *start, const Token *end)
{