Fixed #1492 (false negatives: array index out of bounds)
This commit is contained in:
parent
7fb5b9b67b
commit
ef57d01f7a
|
@ -566,6 +566,15 @@ bool Token::isStandardType() const
|
|||
return ret;
|
||||
}
|
||||
|
||||
bool Token::isIntegerType() const
|
||||
{
|
||||
bool ret = false;
|
||||
const char *type[] = {"char", "short", "int", "long", "size_t", "__int64", 0};
|
||||
for (int i = 0; type[i]; i++)
|
||||
ret |= (_str == type[i]);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void Token::move(Token *srcStart, Token *srcEnd, Token *newLocation)
|
||||
{
|
||||
/**[newLocation] -> b -> c -> [srcStart] -> [srcEnd] -> f */
|
||||
|
|
|
@ -174,6 +174,7 @@ public:
|
|||
_isLong = size;
|
||||
}
|
||||
bool isStandardType() const;
|
||||
bool isIntegerType() const;
|
||||
|
||||
static const Token *findmatch(const Token *tok, const char pattern[], unsigned int varId = 0);
|
||||
|
||||
|
|
197
lib/tokenize.cpp
197
lib/tokenize.cpp
|
@ -115,6 +115,40 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
|||
_tokensBack->linenr(lineno);
|
||||
_tokensBack->fileIndex(fileno);
|
||||
}
|
||||
|
||||
void Tokenizer::addtoken(const Token * tok, const unsigned int lineno, const unsigned int fileno)
|
||||
{
|
||||
if (tok == 0)
|
||||
return;
|
||||
|
||||
// Replace hexadecimal value with decimal
|
||||
std::ostringstream str2;
|
||||
if (strncmp(tok->str().c_str(), "0x", 2) == 0)
|
||||
{
|
||||
str2 << std::strtoul(tok->str().c_str() + 2, NULL, 16);
|
||||
}
|
||||
else
|
||||
{
|
||||
str2 << tok->str();
|
||||
}
|
||||
|
||||
if (_tokensBack)
|
||||
{
|
||||
_tokensBack->insertToken(str2.str().c_str());
|
||||
}
|
||||
else
|
||||
{
|
||||
_tokens = new Token(&_tokensBack);
|
||||
_tokensBack = _tokens;
|
||||
_tokensBack->str(str2.str());
|
||||
}
|
||||
|
||||
_tokensBack->linenr(lineno);
|
||||
_tokensBack->fileIndex(fileno);
|
||||
_tokensBack->isUnsigned(tok->isUnsigned());
|
||||
_tokensBack->isSigned(tok->isSigned());
|
||||
_tokensBack->isLong(tok->isLong());
|
||||
}
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
@ -1238,9 +1272,6 @@ bool Tokenizer::tokenize(std::istream &code, const char FileName[], const std::s
|
|||
}
|
||||
}
|
||||
|
||||
// replace "unsigned i" with "unsigned int i"
|
||||
unsignedint();
|
||||
|
||||
// colapse compound standard types into a single token
|
||||
// unsigned long long int => long _isUnsigned=true,_isLong=true
|
||||
simplifyStdType();
|
||||
|
@ -1649,13 +1680,12 @@ void Tokenizer::simplifyTemplates()
|
|||
for (std::list<Token *>::iterator iter1 = templates.begin(); iter1 != templates.end(); ++iter1)
|
||||
{
|
||||
Token *tok = *iter1;
|
||||
std::vector<std::string> type;
|
||||
std::vector<const Token *> type;
|
||||
for (tok = tok->tokAt(2); tok && tok->str() != ">"; tok = tok->next())
|
||||
{
|
||||
if (Token::Match(tok, "%var% ,|>"))
|
||||
type.push_back(tok->str());
|
||||
type.push_back(tok);
|
||||
}
|
||||
|
||||
// bail out if the end of the file was reached
|
||||
if (!tok)
|
||||
break;
|
||||
|
@ -1732,8 +1762,9 @@ void Tokenizer::simplifyTemplates()
|
|||
continue;
|
||||
|
||||
// New type..
|
||||
std::vector<std::string> types2;
|
||||
std::vector<Token> types2;
|
||||
s = "";
|
||||
std::string s1(name + " < ");
|
||||
for (const Token *tok3 = tok2->tokAt(2); tok3 && tok3->str() != ">"; tok3 = tok3->next())
|
||||
{
|
||||
if (!tok3->next())
|
||||
|
@ -1741,11 +1772,20 @@ void Tokenizer::simplifyTemplates()
|
|||
s.clear();
|
||||
break;
|
||||
}
|
||||
|
||||
s1 += tok3->str();
|
||||
s1 += " ";
|
||||
if (tok3->str() != ",")
|
||||
types2.push_back(tok3->str());
|
||||
types2.push_back(*tok3);
|
||||
// add additional type information
|
||||
if (tok3->isUnsigned())
|
||||
s += "unsigned";
|
||||
else if (tok3->isSigned())
|
||||
s += "signed";
|
||||
if (tok3->isLong())
|
||||
s += "long";
|
||||
s += tok3->str();
|
||||
}
|
||||
s1 += ">";
|
||||
const std::string type2(s);
|
||||
|
||||
if (type2.empty() || type.size() != types2.size())
|
||||
|
@ -1838,13 +1878,13 @@ void Tokenizer::simplifyTemplates()
|
|||
{
|
||||
// search for this token in the type vector
|
||||
unsigned int itype = 0;
|
||||
while (itype < type.size() && type[itype] != tok3->str())
|
||||
while (itype < type.size() && type[itype]->str() != tok3->str())
|
||||
++itype;
|
||||
|
||||
// replace type with given type..
|
||||
if (itype < type.size())
|
||||
{
|
||||
addtoken(types2[itype].c_str(), tok3->linenr(), tok3->fileIndex());
|
||||
addtoken(&types2[itype], tok3->linenr(), tok3->fileIndex());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -1857,7 +1897,7 @@ void Tokenizer::simplifyTemplates()
|
|||
}
|
||||
|
||||
// copy
|
||||
addtoken(tok3->str().c_str(), tok3->linenr(), tok3->fileIndex());
|
||||
addtoken(tok3, tok3->linenr(), tok3->fileIndex());
|
||||
if (Token::Match(tok3, "%type% <"))
|
||||
{
|
||||
if (!Token::Match(tok3, (name + " <").c_str()))
|
||||
|
@ -1905,24 +1945,40 @@ void Tokenizer::simplifyTemplates()
|
|||
}
|
||||
|
||||
// Replace all these template usages..
|
||||
s = name + " < " + type2 + " >";
|
||||
for (std::string::size_type pos = s.find(","); pos != std::string::npos; pos = s.find(",", pos + 2))
|
||||
{
|
||||
s.insert(pos + 1, " ");
|
||||
s.insert(pos, " ");
|
||||
}
|
||||
for (Token *tok4 = tok2; tok4; tok4 = tok4->next())
|
||||
{
|
||||
if (Token::simpleMatch(tok4, s.c_str()))
|
||||
if (Token::simpleMatch(tok4, s1.c_str()))
|
||||
{
|
||||
tok4->str(name2);
|
||||
while (tok4->next()->str() != ">")
|
||||
bool match = true;
|
||||
Token * tok5 = tok4->tokAt(2);
|
||||
unsigned int count = 0;
|
||||
while (tok5->str() != ">")
|
||||
{
|
||||
if (tok5->str() != ",")
|
||||
{
|
||||
if (tok5->isUnsigned() != types2[count].isUnsigned() ||
|
||||
tok5->isSigned() != types2[count].isSigned() ||
|
||||
tok5->isLong() != types2[count].isLong())
|
||||
{
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
tok5 = tok5->next();
|
||||
}
|
||||
|
||||
if (match)
|
||||
{
|
||||
tok4->str(name2);
|
||||
while (tok4->next()->str() != ">")
|
||||
{
|
||||
used.remove(tok4->next());
|
||||
tok4->deleteNext();
|
||||
}
|
||||
used.remove(tok4->next());
|
||||
tok4->deleteNext();
|
||||
}
|
||||
used.remove(tok4->next());
|
||||
tok4->deleteNext();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4324,7 +4380,7 @@ void Tokenizer::simplifyStdType()
|
|||
for (Token *tok = _tokens; tok; tok = tok->next())
|
||||
{
|
||||
// long unsigned => unsigned long
|
||||
if (Token::Match(tok, "long|short unsigned|signed"))
|
||||
if (Token::Match(tok, "long|short|int|char|_int64 unsigned|signed"))
|
||||
{
|
||||
std::string temp = tok->str();
|
||||
tok->str(tok->next()->str());
|
||||
|
@ -4338,7 +4394,12 @@ void Tokenizer::simplifyStdType()
|
|||
if (Token::Match(tok, "unsigned|signed"))
|
||||
{
|
||||
bool isUnsigned = tok->str() == "unsigned";
|
||||
tok->deleteThis();
|
||||
|
||||
// unsigned i => unsigned int i
|
||||
if (!tok->next()->isIntegerType())
|
||||
tok->str("int");
|
||||
else
|
||||
tok->deleteThis();
|
||||
tok->isUnsigned(isUnsigned);
|
||||
tok->isSigned(!isUnsigned);
|
||||
}
|
||||
|
@ -4373,92 +4434,6 @@ void Tokenizer::simplifyStdType()
|
|||
}
|
||||
}
|
||||
|
||||
void Tokenizer::unsignedint()
|
||||
{
|
||||
for (Token *tok = _tokens; tok; tok = tok->next())
|
||||
{
|
||||
if (!Token::Match(tok, "unsigned|signed"))
|
||||
continue;
|
||||
|
||||
if (Token::Match(tok->previous(), "%type% unsigned|signed %var% [;,=)]") &&
|
||||
tok->previous()->isStandardType())
|
||||
{
|
||||
if (tok->str() == "signed")
|
||||
{
|
||||
// int signed a; -> int a;
|
||||
tok = tok->previous();
|
||||
tok->deleteNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
// int unsigned a; -> unsigned int a;
|
||||
std::string temp = tok->str();
|
||||
tok->str(tok->previous()->str());
|
||||
tok->previous()->str(temp);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Token::Match(tok->previous(), "extern unsigned|signed *| %var% [;[]"))
|
||||
{
|
||||
tok->str("int");
|
||||
continue;
|
||||
}
|
||||
|
||||
// signed int a; -> int a;
|
||||
if (Token::Match(tok, "signed %type% %var% [;,=)]"))
|
||||
{
|
||||
if (tok->next()->isStandardType())
|
||||
{
|
||||
tok->str(tok->next()->str());
|
||||
tok->deleteNext();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// A variable declaration where the "int" is left out?
|
||||
else if (!Token::Match(tok, "unsigned|signed %var% [;,=)]") &&
|
||||
!Token::Match(tok->previous(), "( unsigned|signed )"))
|
||||
continue;
|
||||
|
||||
// Previous token should either be a symbol or one of "{};(,"
|
||||
if (tok->previous() &&
|
||||
!tok->previous()->isName() &&
|
||||
!Token::Match(tok->previous(), "[{};(,]"))
|
||||
continue;
|
||||
|
||||
// next token should not be a standard type?
|
||||
if (tok->next()->isStandardType())
|
||||
{
|
||||
if (tok->str() == "signed")
|
||||
{
|
||||
tok->str(tok->next()->str());
|
||||
tok->deleteNext();
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// The "int" is missing.. add it
|
||||
if (tok->str() == "signed")
|
||||
tok->str("int");
|
||||
else
|
||||
tok->insertToken("int");
|
||||
}
|
||||
|
||||
// simplify template arguments..
|
||||
for (Token *tok = _tokens; tok; tok = tok->next())
|
||||
{
|
||||
if (Token::simpleMatch(tok, "< unsigned >"))
|
||||
tok->next()->str("int");
|
||||
else if (Token::Match(tok, "< unsigned %type% >") &&
|
||||
tok->tokAt(2)->isStandardType())
|
||||
tok->deleteNext();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Tokenizer::simplifyIfAssign()
|
||||
{
|
||||
for (Token *tok = _tokens; tok; tok = tok->next())
|
||||
|
|
|
@ -162,14 +162,6 @@ public:
|
|||
*/
|
||||
void simplifyInitVar();
|
||||
|
||||
/**
|
||||
* insert an "int" after "unsigned" if needed:
|
||||
* "unsigned i" => "unsigned int i"
|
||||
* "signed int i" => "int i"
|
||||
* "signed i" => "int i"
|
||||
*/
|
||||
void unsignedint();
|
||||
|
||||
/**
|
||||
* Colapse compound standard types into a single token.
|
||||
* unsigned long long int => long _isUnsigned=true,_isLong=true
|
||||
|
@ -267,6 +259,7 @@ public:
|
|||
std::vector<const Token *> _functionList;
|
||||
|
||||
void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno);
|
||||
void addtoken(const Token *tok, const unsigned int lineno, const unsigned int fileno);
|
||||
|
||||
/**
|
||||
* Simplify the operator "?:"
|
||||
|
|
|
@ -65,6 +65,7 @@ private:
|
|||
TEST_CASE(division6);
|
||||
TEST_CASE(division7);
|
||||
TEST_CASE(division8);
|
||||
TEST_CASE(division9);
|
||||
}
|
||||
|
||||
void division1()
|
||||
|
@ -199,6 +200,38 @@ private:
|
|||
"}\n", true, true);
|
||||
ASSERT_EQUALS("", errout.str());
|
||||
}
|
||||
|
||||
void division9()
|
||||
{
|
||||
check("void f()\n"
|
||||
"{\n"
|
||||
" int ivar = -2;\n"
|
||||
" unsigned long uvar = 2;\n"
|
||||
" return ivar / uvar;\n"
|
||||
"}\n");
|
||||
ASSERT_EQUALS("[test.cpp:5]: (possible style) Division with signed and unsigned operators\n", errout.str());
|
||||
|
||||
check("void f()\n"
|
||||
"{\n"
|
||||
" int ivar = -2;\n"
|
||||
" unsigned long long uvar = 2;\n"
|
||||
" return ivar / uvar;\n"
|
||||
"}\n");
|
||||
ASSERT_EQUALS("[test.cpp:5]: (possible style) Division with signed and unsigned operators\n", errout.str());
|
||||
|
||||
check("template<class A, class B> class C\n"
|
||||
"{\n"
|
||||
" A a;\n"
|
||||
" B b;\n"
|
||||
" void foo() { a / b; }\n"
|
||||
"};\n"
|
||||
"C<int, unsigned int> c1;\n"
|
||||
"C<int, unsigned long> c2;\n"
|
||||
"C<int, unsigned long long> c3;\n");
|
||||
ASSERT_EQUALS("[test.cpp:5]: (possible style) Division with signed and unsigned operators\n"
|
||||
"[test.cpp:5]: (possible style) Division with signed and unsigned operators\n"
|
||||
"[test.cpp:5]: (possible style) Division with signed and unsigned operators\n", errout.str());
|
||||
}
|
||||
};
|
||||
|
||||
REGISTER_TEST(TestDivision)
|
||||
|
|
|
@ -2639,51 +2639,48 @@ private:
|
|||
|
||||
|
||||
/**
|
||||
* tokenize "signed i" => "int i"
|
||||
* tokenize "signed i" => "signed int i"
|
||||
*/
|
||||
void signed1()
|
||||
{
|
||||
{
|
||||
const char code[] = "void foo ( signed int , signed float ) ;";
|
||||
const char code2[] = "void foo ( int , float ) ;";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code));
|
||||
const char code1[] = "void foo ( signed int , float ) ;";
|
||||
ASSERT_EQUALS(code1, tokenizeAndStringify(code1));
|
||||
}
|
||||
|
||||
{
|
||||
const char code1[] = "signed i ;";
|
||||
const char code2[] = "int i ;";
|
||||
const char code2[] = "signed int i ;";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code1));
|
||||
}
|
||||
|
||||
{
|
||||
const char code1[] = "signed int i ;";
|
||||
const char code2[] = "int i ;";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code1));
|
||||
ASSERT_EQUALS(code1, tokenizeAndStringify(code1));
|
||||
}
|
||||
|
||||
{
|
||||
const char code1[] = "int signed i ;";
|
||||
const char code2[] = "int i ;";
|
||||
const char code2[] = "signed int i ;";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code1));
|
||||
}
|
||||
|
||||
{
|
||||
const char code1[] = "for (signed i=0; i<10; i++)";
|
||||
const char code2[] = "for ( int i = 0 ; i < 10 ; i ++ )";
|
||||
const char code2[] = "for ( signed int i = 0 ; i < 10 ; i ++ )";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code1));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* tokenize "unsigned i" => "unsigned int i"
|
||||
* tokenize "unsigned int" => "unsigned int"
|
||||
* tokenize "unsigned" => "unsigned int"
|
||||
*/
|
||||
void unsigned1()
|
||||
{
|
||||
// No changes..
|
||||
{
|
||||
const char code[] = "void foo ( unsigned int , unsigned float ) ;";
|
||||
const char code[] = "void foo ( unsigned int , float ) ;";
|
||||
ASSERT_EQUALS(code, tokenizeAndStringify(code));
|
||||
}
|
||||
|
||||
|
@ -2710,7 +2707,7 @@ private:
|
|||
// "extern unsigned x;" => "extern int x;"
|
||||
{
|
||||
const char code1[] = "; extern unsigned x;";
|
||||
const char code2[] = "; extern int x ;";
|
||||
const char code2[] = "; extern unsigned int x ;";
|
||||
ASSERT_EQUALS(code2, tokenizeAndStringify(code1));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue