Use simplecpp lexer in test cases

This commit is contained in:
Daniel Marjamäki 2017-05-18 21:52:31 +02:00
parent d89baee1f6
commit 040d2f0012
16 changed files with 299 additions and 441 deletions

View File

@ -1815,23 +1815,6 @@ void Tokenizer::combineOperators()
tok->deleteNext();
continue;
}
// simplify "->"
else if (c1 == '-' && c2 == '>') {
// If the preceding sequence is "( & %name% )", replace it by "%name%"
Token *t = tok->tokAt(-4);
if (Token::Match(t, "( & %name% )")) {
t->deleteThis();
t->deleteThis();
t->deleteNext();
tok = t->next();
}
// Replace "->" with "."
tok->str(".");
tok->originalName("->");
tok->deleteNext();
continue;
}
} else if (tok->next()->str() == "=") {
if (tok->str() == ">>") {
tok->str(">>=");
@ -1869,6 +1852,14 @@ void Tokenizer::combineOperators()
tok->deleteNext();
}
} else if (tok->str() == "->") {
// If the preceding sequence is "( & %name% )", replace it by "%name%"
Token *t = tok->tokAt(-4);
if (Token::Match(t, "( & %name% )")) {
t->deleteThis();
t->deleteThis();
t->deleteNext();
}
tok->str(".");
tok->originalName("->");
}

View File

@ -214,198 +214,12 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0)
{
appendFileIfNew(file0);
// line number in parsed code
unsigned int lineno = 1;
simplecpp::OutputList outputList;
simplecpp::TokenList tokens(code, _files, file0, &outputList);
// The current token being parsed
std::string CurrentToken;
createTokens(&tokens);
// lineNumbers holds line numbers for files in fileIndexes
// every time an include file is completely parsed, last item in the vector
// is removed and lineno is set to point to that value.
std::stack<unsigned int> lineNumbers;
// fileIndexes holds index for _files vector about currently parsed files
// every time an include file is completely parsed, last item in the vector
// is removed and FileIndex is set to point to that value.
std::stack<unsigned int> fileIndexes;
// FileIndex. What file in the _files vector is read now?
unsigned int FileIndex = 0;
bool expandedMacro = false;
// Read one byte at a time from code and create tokens
for (char ch = (char)code.get(); code.good() && ch; ch = (char)code.get()) {
if (ch == Preprocessor::macroChar) {
while (code.peek() == Preprocessor::macroChar)
code.get();
if (!CurrentToken.empty()) {
addtoken(CurrentToken, lineno, FileIndex, true);
_back->isExpandedMacro(expandedMacro);
CurrentToken.clear();
}
expandedMacro = true;
continue;
}
// char/string..
// multiline strings are not handled. The preprocessor should handle that for us.
else if (ch == '\'' || ch == '\"') {
std::string line;
// read char
bool special = false;
char c = ch;
do {
// Append token..
line += c;
// Special sequence '\.'
if (special)
special = false;
else
special = (c == '\\');
// Get next character
c = (char)code.get();
} while (code.good() && (special || c != ch));
line += ch;
// Handle #file "file.h"
if (CurrentToken == "#file") {
// Extract the filename
line = line.substr(1, line.length() - 2);
++lineno;
fileIndexes.push(FileIndex);
FileIndex = appendFileIfNew(line);
lineNumbers.push(lineno);
lineno = 0;
} else {
// Add previous token
addtoken(CurrentToken, lineno, FileIndex);
if (!CurrentToken.empty())
_back->isExpandedMacro(expandedMacro);
// Add content of the string
addtoken(line, lineno, FileIndex);
if (!line.empty())
_back->isExpandedMacro(expandedMacro);
}
CurrentToken.clear();
continue;
}
if (ch == '.' &&
!CurrentToken.empty() &&
std::isdigit((unsigned char)CurrentToken[0])) {
// Don't separate doubles "5.4"
} else if (std::strchr("+-", ch) &&
CurrentToken.length() > 0 &&
std::isdigit((unsigned char)CurrentToken[0]) &&
(endsWith(CurrentToken,'e') ||
endsWith(CurrentToken,'E')) &&
!MathLib::isIntHex(CurrentToken)) {
// Don't separate doubles "4.2e+10"
} else if (CurrentToken.empty() && ch == '.' && std::isdigit((unsigned char)code.peek())) {
// tokenize .125 into 0.125
CurrentToken = "0";
} else if (std::strchr("+-*/%&|^?!=<>[](){};:,.~\n ", ch)) {
if (CurrentToken == "#file") {
// Handle this where strings are handled
continue;
} else if (CurrentToken == "#line") {
// Read to end of line
std::string line;
std::getline(code, line);
unsigned int row=0;
std::istringstream fiss(line);
if (fiss >> row) {
// Update the current line number
lineno = row;
std::string line2;
if (std::getline(fiss, line2) && line2.length() > 4U) {
// _"file_name" -> file_name
line2 = line2.substr(2, line2.length() - 3);
// Update the current file
FileIndex = appendFileIfNew(line2);
}
} else
++lineno;
CurrentToken.clear();
continue;
} else if (CurrentToken == "#endfile") {
if (lineNumbers.empty() || fileIndexes.empty()) { // error
deallocateTokens();
return false;
}
lineno = lineNumbers.top();
lineNumbers.pop();
FileIndex = fileIndexes.top();
fileIndexes.pop();
CurrentToken.clear();
continue;
}
addtoken(CurrentToken, lineno, FileIndex, true);
if (!CurrentToken.empty()) {
_back->isExpandedMacro(expandedMacro);
expandedMacro = false;
CurrentToken.clear();
}
if (ch == '\n') {
if (_settings->terminated())
return false;
++lineno;
continue;
} else if (ch == ' ') {
continue;
}
CurrentToken += ch;
// Add "++", "--", ">>" or ... token
if (std::strchr("+-<>=:&|", ch) && (code.peek() == ch))
CurrentToken += (char)code.get();
addtoken(CurrentToken, lineno, FileIndex);
_back->isExpandedMacro(expandedMacro);
CurrentToken.clear();
expandedMacro = false;
continue;
}
CurrentToken += ch;
}
addtoken(CurrentToken, lineno, FileIndex, true);
if (!CurrentToken.empty())
_back->isExpandedMacro(expandedMacro);
// Split up ++ and --..
for (Token *tok = _front; tok; tok = tok->next()) {
if (!Token::Match(tok, "++|--"))
continue;
if (Token::Match(tok->previous(), "%num% ++|--") ||
Token::Match(tok, "++|-- %num%")) {
tok->str(tok->str()[0]);
tok->insertToken(tok->str());
}
}
Token::assignProgressValues(_front);
for (std::size_t i = 1; i < _files.size(); i++)
_files[i] = Path::getRelativePath(_files[i], _settings->basePaths);
return true;
return outputList.empty();
}
//---------------------------------------------------------------------------
@ -428,12 +242,34 @@ void TokenList::createTokens(const simplecpp::TokenList *tokenList)
}
for (const simplecpp::Token *tok = tokenList->cfront(); tok; tok = tok->next) {
std::string str = tok->str;
// Replace hexadecimal value with decimal
// TODO: Remove this
const bool isHex = MathLib::isIntHex(str) ;
if (isHex || MathLib::isOct(str) || MathLib::isBin(str)) {
// TODO: It would be better if TokenList didn't simplify hexadecimal numbers
std::string suffix;
if (isHex &&
str.size() == (2 + _settings->int_bit / 4) &&
(str[2] >= '8') && // includes A-F and a-f
MathLib::getSuffix(str).empty()
)
suffix = "U";
str = MathLib::value(str).str() + suffix;
}
// Float literal
if (str.size() > 1 && str[0] == '.' && std::isdigit(str[1]))
str = '0' + str;
if (_back) {
_back->insertToken(tok->str);
_back->insertToken(str);
} else {
_front = new Token(&_back);
_back = _front;
_back->str(tok->str);
_back->str(str);
}
if (isCPP() && _back->str() == "delete")

View File

@ -1475,7 +1475,7 @@ private:
check("void bufferAccessOutOfBounds2() {\n"
" char *buffer[]={\"a\",\"b\",\"c\"};\n"
" for(int i=3; i--;) {\n"
" printf(\"files(%i): %s\n\", 3-i, buffer[3-i]);\n"
" printf(\"files(%i): %s\", 3-i, buffer[3-i]);\n"
" }\n"
"}");
TODO_ASSERT_EQUALS("[test.cpp:4]: (error) Array 'buffer[3]' accessed at index 3, which is out of bounds.\n", "", errout.str());

View File

@ -20,7 +20,7 @@
#include "checkcondition.h"
#include "testsuite.h"
#include <tinyxml2.h>
#include <simplecpp.h>
class TestCondition : public TestFixture {
public:
@ -92,12 +92,24 @@ private:
settings0.inconclusive = inconclusive;
CheckCondition checkCondition;
// Tokenize..
Tokenizer tokenizer(&settings0, this);
// Raw tokens..
std::vector<std::string> files;
files.push_back(filename);
std::istringstream istr(code);
tokenizer.tokenize(istr, filename);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenizer..
Tokenizer tokenizer(&settings0, this);
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
// Run checks..
CheckCondition checkCondition;
checkCondition.runChecks(&tokenizer, &settings0, this);
tokenizer.simplifyTokenList2();
checkCondition.runSimplifiedChecks(&tokenizer, &settings0, this);
@ -1033,8 +1045,9 @@ private:
}
void incorrectLogicOperator4() {
check("void f(int x) {\n"
" if (x && x != $0) {}\n"
check("#define ZERO 0\n"
"void f(int x) {\n"
" if (x && x != ZERO) {}\n"
"}");
ASSERT_EQUALS("", errout.str());
}
@ -1791,22 +1804,25 @@ private:
errout.str());
// Avoid FP when condition comes from macro
check("void f() {\n"
check("#define NOT !\n"
"void f() {\n"
" int x = 0;\n"
" if (a) { return; }\n" // <- this is just here to fool simplifyKnownVariabels
" if ($!x) {}\n"
" if (NOT x) {}\n"
"}");
ASSERT_EQUALS("", errout.str());
check("void f() {\n"
check("#define M x != 0\n"
"void f() {\n"
" int x = 0;\n"
" if (a) { return; }\n" // <- this is just here to fool simplifyKnownVariabels
" if ($x != $0) {}\n"
" if (M) {}\n"
"}");
ASSERT_EQUALS("", errout.str());
check("void f() {\n"
" $if $( 1 $&& $x()) {}\n"
check("#define IF(X) if (X && x())\n"
"void f() {\n"
" IF(1) {}\n"
"}");
ASSERT_EQUALS("", errout.str());

View File

@ -361,7 +361,7 @@ private:
// Passed as function argument
check("int f()\n"
"{\n"
" printf(\"Magic guess: %d\n\", getpwent());\n"
" printf(\"Magic guess: %d\", getpwent());\n"
"}");
ASSERT_EQUALS("[test.cpp:3]: (portability) Non reentrant function 'getpwent' called. For threadsafe applications it is recommended to use the reentrant replacement function 'getpwent_r'.\n", errout.str());

View File

@ -19,7 +19,7 @@
#include "testsuite.h"
#include "tokenize.h"
#include "checkother.h"
#include <simplecpp.h>
class TestIncompleteStatement : public TestFixture {
public:
@ -33,10 +33,22 @@ private:
// Clear the error buffer..
errout.str("");
// Raw tokens..
std::vector<std::string> files;
files.push_back("test.cpp");
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenize..
Tokenizer tokenizer(&settings, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp");
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
tokenizer.simplifyTokenList2();
// Check for incomplete statements..
@ -148,7 +160,8 @@ private:
check("void f() { (void*)0; }");
ASSERT_EQUALS("", errout.str());
check("void f() { $0; }");
check("#define X 0\n"
"void f() { X; }");
ASSERT_EQUALS("", errout.str());
}

View File

@ -5963,17 +5963,19 @@ private:
// Clear the error buffer..
errout.str("");
std::istringstream istr(code);
std::vector<std::string> files;
files.push_back("test.cpp");
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess...
Preprocessor preprocessor(settings, this);
std::istringstream istrpreproc(code);
std::map<std::string, std::string> actual;
preprocessor.preprocess(istrpreproc, actual, "test.cpp");
const simplecpp::TokenList &tokens2 = preprocessor.preprocess(tokens1, "", files);
// Tokenize..
// Tokenizer..
Tokenizer tokenizer(&settings, this);
std::istringstream istr(actual[""]);
tokenizer.tokenize(istr, "test.cpp");
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokenList1(files[0].c_str());
tokenizer.simplifyTokenList2();
// Check for memory leaks..

View File

@ -20,7 +20,7 @@
#include "checknullpointer.h"
#include "testsuite.h"
#include <tinyxml2.h>
#include <simplecpp.h>
class TestNullPointer : public TestFixture {
public:
@ -120,6 +120,38 @@ private:
checkNullPointer.runSimplifiedChecks(&tokenizer, &settings, this);
}
void checkP(const char code[]) {
// Clear the error buffer..
errout.str("");
settings.inconclusive = false;
// Raw tokens..
std::vector<std::string> files;
files.push_back("test.cpp");
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenizer..
Tokenizer tokenizer(&settings, this);
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
// Check for null pointer dereferences..
CheckNullPointer checkNullPointer;
checkNullPointer.runChecks(&tokenizer, &settings, this);
tokenizer.simplifyTokenList2();
checkNullPointer.runSimplifiedChecks(&tokenizer, &settings, this);
}
void nullpointerAfterLoop() {
check("int foo(const Token *tok)\n"
@ -507,10 +539,11 @@ private:
}
// #3425 - false positives when there are macros
check("void f(struct FRED *fred) {\n"
" fred->x = 0;\n"
" $if(!fred){}\n"
"}");
checkP("#define IF if\n"
"void f(struct FRED *fred) {\n"
" fred->x = 0;\n"
" IF(!fred){}\n"
"}");
ASSERT_EQUALS("", errout.str());
}
@ -831,10 +864,11 @@ private:
ASSERT_EQUALS("", errout.str());
// #3425 - false positives when there are macros
check("void f(int *p) {\n"
" *p = 0;\n"
" $if(!p){}\n"
"}");
checkP("#define IF if\n"
"void f(int *p) {\n"
" *p = 0;\n"
" IF(!p){}\n"
"}");
ASSERT_EQUALS("", errout.str());
check("void f() {\n" // #3914 - false positive

View File

@ -23,7 +23,7 @@
#include "testsuite.h"
#include "testutils.h"
#include <tinyxml2.h>
#include <simplecpp.h>
class TestOther : public TestFixture {
public:
@ -227,6 +227,43 @@ private:
}
}
void checkP(const char code[], const char *filename = "test.cpp", Settings* settings = 0) {
// Clear the error buffer..
errout.str("");
settings = &_settings;
settings->addEnabled("style");
settings->addEnabled("warning");
settings->addEnabled("portability");
settings->addEnabled("performance");
settings->standards.c = Standards::CLatest;
settings->standards.cpp = Standards::CPPLatest;
settings->inconclusive = true;
settings->experimental = false;
// Raw tokens..
std::vector<std::string> files;
files.push_back(filename);
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenizer..
Tokenizer tokenizer(settings, this);
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
// Check..
CheckOther checkOther(&tokenizer, settings, this);
checkOther.runChecks(&tokenizer, settings, this);
tokenizer.simplifyTokenList2();
checkOther.runSimplifiedChecks(&tokenizer, settings, this);
}
void checkposix(const char code[]) {
static Settings settings;
settings.addEnabled("warning");
@ -579,7 +616,7 @@ private:
check("void f()\n"
"{\n"
" double x = 3.0 / 0.0 + 1.0\n"
" printf(\"%f\n\", x);\n"
" printf(\"%f\", x);\n"
"}");
ASSERT_EQUALS(
"[test.cpp:3]: (style) Using NaN/Inf in a computation.\n", errout.str());
@ -587,7 +624,7 @@ private:
check("void f()\n"
"{\n"
" double x = 3.0 / 0.0 - 1.0\n"
" printf(\"%f\n\", x);\n"
" printf(\"%f\", x);\n"
"}");
ASSERT_EQUALS(
"[test.cpp:3]: (style) Using NaN/Inf in a computation.\n", errout.str());
@ -595,7 +632,7 @@ private:
check("void f()\n"
"{\n"
" double x = 1.0 + 3.0 / 0.0\n"
" printf(\"%f\n\", x);\n"
" printf(\"%f\", x);\n"
"}");
ASSERT_EQUALS(
"[test.cpp:3]: (style) Using NaN/Inf in a computation.\n", errout.str());
@ -603,7 +640,7 @@ private:
check("void f()\n"
"{\n"
" double x = 1.0 - 3.0 / 0.0\n"
" printf(\"%f\n\", x);\n"
" printf(\"%f\", x);\n"
"}");
ASSERT_EQUALS(
"[test.cpp:3]: (style) Using NaN/Inf in a computation.\n", errout.str());
@ -611,7 +648,7 @@ private:
check("void f()\n"
"{\n"
" double x = 3.0 / 0.0\n"
" printf(\"%f\n\", x);\n"
" printf(\"%f\", x);\n"
"}");
ASSERT_EQUALS("", errout.str());
@ -2709,17 +2746,17 @@ private:
ASSERT_EQUALS("", errout.str());
check("void foo(int c) {\n"
" printf(\"%i\n\", ({x==0;}));\n"
" printf(\"%i\", ({x==0;}));\n"
"}");
ASSERT_EQUALS("", errout.str());
check("void foo(int x) {\n"
" printf(\"%i\n\", ({int x = do_something(); x == 0;}));\n"
" printf(\"%i\", ({int x = do_something(); x == 0;}));\n"
"}");
ASSERT_EQUALS("", errout.str());
check("void foo(int x) {\n"
" printf(\"%i\n\", ({x == 0; x > 0 ? 10 : 20}));\n"
" printf(\"%i\", ({x == 0; x > 0 ? 10 : 20}));\n"
"}");
ASSERT_EQUALS("[test.cpp:2]: (warning, inconclusive) Found suspicious equality comparison. Did you intend to assign a value instead?\n", errout.str());
@ -3332,12 +3369,14 @@ private:
}
void duplicateBranch2() {
check("void f(int x) {\n" // #4329
" if (x)\n"
" $;\n"
" else\n"
" $;\n"
"}");
checkP("#define DOSTUFF1 ;\n"
"#define DOSTUFF2 ;\n"
"void f(int x) {\n" // #4329
" if (x)\n"
" DOSTUFF1\n"
" else\n"
" DOSTUFF2\n"
"}");
ASSERT_EQUALS("", errout.str());
}
@ -6014,10 +6053,11 @@ private:
void testEvaluationOrderMacro() {
// macro, don't bailout (#7233)
check((std::string("void f(int x) {\n"
" return x + ") + Preprocessor::macroChar + "x++;\n"
"}").c_str(), "test.c");
ASSERT_EQUALS("[test.c:2]: (error) Expression 'x+x++' depends on order of evaluation of side effects\n", errout.str());
checkP("#define X x\n"
"void f(int x) {\n"
" return x + X++;\n"
"}", "test.c");
ASSERT_EQUALS("[test.c:3]: (error) Expression 'x+x++' depends on order of evaluation of side effects\n", errout.str());
}
void testEvaluationOrderSequencePointsFunctionCall() {

View File

@ -3470,12 +3470,12 @@ private:
void simplifyCharAt() { // ticket #4481
ASSERT_EQUALS("'h' ;", tok("\"hello\"[0] ;"));
ASSERT_EQUALS("'\n' ;", tok("\"\n\"[0] ;"));
ASSERT_EQUALS("'\\n' ;", tok("\"\\n\"[0] ;"));
ASSERT_EQUALS("'\\0' ;", tok("\"hello\"[5] ;"));
ASSERT_EQUALS("'\\0' ;", tok("\"\"[0] ;"));
ASSERT_EQUALS("'\\0' ;", tok("\"\\0\"[0] ;"));
ASSERT_EQUALS("'\\n' ;", tok("\"hello\\nworld\"[5] ;"));
ASSERT_EQUALS("'w' ;", tok("\"hello\nworld\"[6] ;"));
ASSERT_EQUALS("'w' ;", tok("\"hello world\"[6] ;"));
ASSERT_EQUALS("\"hello\" [ 7 ] ;", tok("\"hello\"[7] ;"));
ASSERT_EQUALS("\"hello\" [ -1 ] ;", tok("\"hello\"[-1] ;"));
}
@ -3485,11 +3485,11 @@ private:
" int c, t;\n"
"again:\n"
" do {\n"
" if ((c = macroid(c)) == EOF_CHAR || c == '\n') {\n"
" if ((c = macroid(c)) == EOF_CHAR || c == '\\n') {\n"
" }\n"
" } while ((t = type[c]) == LET && catenate());\n"
"}\n";
ASSERT_EQUALS("int evallex ( ) { int c ; int t ; again : ; do { c = macroid ( c ) ; if ( c == EOF_CHAR || c == '\n' ) { } t = type [ c ] ; } while ( t == LET && catenate ( ) ) ; }",
ASSERT_EQUALS("int evallex ( ) { int c ; int t ; again : ; do { c = macroid ( c ) ; if ( c == EOF_CHAR || c == '\\n' ) { } t = type [ c ] ; } while ( t == LET && catenate ( ) ) ; }",
tok(code, true));
}

View File

@ -19,7 +19,7 @@
#include "tokenize.h"
#include "checksizeof.h"
#include "testsuite.h"
#include <simplecpp.h>
class TestSizeof : public TestFixture {
public:
@ -60,6 +60,31 @@ private:
checkSizeof.runChecks(&tokenizer, &settings, this);
}
void checkP(const char code[]) {
// Clear the error buffer..
errout.str("");
// Raw tokens..
std::vector<std::string> files;
files.push_back("test.cpp");
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenize..
Tokenizer tokenizer(&settings, this);
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
// Check...
CheckSizeof checkSizeof(&tokenizer, &settings, this);
checkSizeof.runChecks(&tokenizer, &settings, this);
}
void sizeofsizeof() {
check("void foo()\n"
"{\n"
@ -109,23 +134,28 @@ private:
ASSERT_EQUALS("[test.cpp:1]: (warning) Found calculation inside sizeof().\n", errout.str());
// #6888
check("int f(int i) {\n"
" $($void$)$sizeof$($i $!= $2$);\n" // '$' sets Token::isExpandedMacro() to true
" $($void$)$($($($($sizeof$($i $!= $2$)$)$)$)$);\n"
" $static_cast<void>$($sizeof($i $!= $2$)$);\n"
" $static_cast<void>$($($($($($sizeof$($i $!= $2$)$)$)$)$)$);\n"
" return i + foo(1);\n"
"}");
checkP("#define SIZEOF1 sizeof(i != 2)\n"
"#define SIZEOF2 ((sizeof(i != 2)))\n"
"#define VOIDCAST1 (void)\n"
"#define VOIDCAST2(SZ) static_cast<void>(SZ)\n"
"int f(int i) {\n"
" VOIDCAST1 SIZEOF1;\n"
" VOIDCAST1 SIZEOF2;\n"
" VOIDCAST2(SIZEOF1);\n"
" VOIDCAST2(SIZEOF2);\n"
" return i + foo(1);\n"
"}");
ASSERT_EQUALS("", errout.str());
check("int f(int i) {\n"
" $sizeof$($i $!= $2$);\n"
" $($($sizeof($i $!= 2$)$)$);\n"
" return i + foo(1);\n"
"}");
ASSERT_EQUALS("[test.cpp:2]: (warning, inconclusive) Found calculation inside sizeof().\n"
"[test.cpp:3]: (warning, inconclusive) Found calculation inside sizeof().\n", errout.str());
checkP("#define SIZEOF1 sizeof(i != 2)\n"
"#define SIZEOF2 ((sizeof(i != 2)))\n"
"int f(int i) {\n"
" SIZEOF1;\n"
" SIZEOF2;\n"
" return i + foo(1);\n"
"}");
ASSERT_EQUALS("[test.cpp:4]: (warning, inconclusive) Found calculation inside sizeof().\n"
"[test.cpp:5]: (warning, inconclusive) Found calculation inside sizeof().\n", errout.str());
}
void sizeofForArrayParameter() {
@ -202,7 +232,7 @@ private:
check("typedef char Fixname[1000];\n"
"int f2(Fixname& f2v) {\n"
" int i = sizeof(f2v);\n"
" printf(\"sizeof f2v %d\n\", i);\n"
" printf(\"sizeof f2v %d\", i);\n"
" }\n"
);
ASSERT_EQUALS("", errout.str());

View File

@ -200,10 +200,6 @@ private:
TEST_CASE(simplifyExternC);
TEST_CASE(simplifyKeyword); // #5842 - remove C99 static keyword between []
TEST_CASE(file1);
TEST_CASE(file2);
TEST_CASE(file3);
TEST_CASE(isZeroNumber);
TEST_CASE(isOneNumber);
TEST_CASE(isTwoNumber);
@ -736,10 +732,10 @@ private:
}
void tokenize22() { // tokenize special marker $ from preprocessor
ASSERT_EQUALS("a $b", tokenizeAndStringify("a$b"));
ASSERT_EQUALS("a$b", tokenizeAndStringify("a$b"));
ASSERT_EQUALS("a $b\nc", tokenizeAndStringify("a $b\nc"));
ASSERT_EQUALS("a = $0 ;", tokenizeAndStringify("a = $0;"));
ASSERT_EQUALS("a $++ ;", tokenizeAndStringify("a$++;"));
ASSERT_EQUALS("a$ ++ ;", tokenizeAndStringify("a$++;"));
ASSERT_EQUALS("$if ( ! p )", tokenizeAndStringify("$if(!p)"));
}
@ -2985,78 +2981,6 @@ private:
ASSERT_EQUALS("int foo ( ) ;", tokenizeAndStringify("extern \"C\" { int foo(); }"));
}
void file1() {
const char code[] = "a1\n"
"#file \"b\"\n"
"b1\n"
"b2\n"
"#endfile\n"
"a3\n";
errout.str("");
// tokenize..
Tokenizer tokenizer(&settings0, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "a");
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) {
std::ostringstream ostr;
ostr << char('a' + tok->fileIndex()) << tok->linenr();
ASSERT_EQUALS(tok->str(), ostr.str());
}
}
void file2() {
const char code[] = "a1\n"
"#file \"b\"\n"
"b1\n"
"b2\n"
"#file \"c\"\n"
"c1\n"
"c2\n"
"#endfile\n"
"b4\n"
"#endfile\n"
"a3\n"
"#file \"d\"\n"
"d1\n"
"#endfile\n"
"a5\n";
errout.str("");
// tokenize..
Tokenizer tokenizer(&settings0, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "a");
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) {
std::ostringstream ostr;
ostr << char('a' + tok->fileIndex()) << tok->linenr();
ASSERT_EQUALS(tok->str(), ostr.str());
}
}
void file3() {
const char code[] = "#file \"c:\\a.h\"\n"
"123 ;\n"
"#endfile\n";
errout.str("");
// tokenize..
Tokenizer tokenizer(&settings0, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "a.cpp");
ASSERT_EQUALS(Path::toNativeSeparators("[c:\\a.h:1]"), tokenizer.list.fileLine(tokenizer.tokens()));
}
void simplifyFunctionParameters() {
{
const char code[] = "char a [ ABC ( DEF ) ] ;";
@ -6228,9 +6152,9 @@ private:
" dst[0] = 0;"
" _tcscat(dst, src);"
" LPTSTR d = _tcsdup(str);"
" _tprintf(_T(\"Hello world!\n\"));"
" _stprintf(dst, _T(\"Hello!\n\"));"
" _sntprintf(dst, sizeof(dst) / sizeof(TCHAR), _T(\"Hello world!\n\"));"
" _tprintf(_T(\"Hello world!\"));"
" _stprintf(dst, _T(\"Hello!\"));"
" _sntprintf(dst, sizeof(dst) / sizeof(TCHAR), _T(\"Hello world!\"));"
" _tscanf(_T(\"%s\"), dst);"
" _stscanf(dst, _T(\"%s\"), dst);"
"}"
@ -6249,9 +6173,9 @@ private:
"dst [ 0 ] = 0 ; "
"strcat ( dst , src ) ; "
"char * d ; d = strdup ( str ) ; "
"printf ( \"Hello world!\n\" ) ; "
"sprintf ( dst , \"Hello!\n\" ) ; "
"_snprintf ( dst , sizeof ( dst ) / sizeof ( char ) , \"Hello world!\n\" ) ; "
"printf ( \"Hello world!\" ) ; "
"sprintf ( dst , \"Hello!\" ) ; "
"_snprintf ( dst , sizeof ( dst ) / sizeof ( char ) , \"Hello world!\" ) ; "
"scanf ( \"%s\" , dst ) ; "
"sscanf ( dst , \"%s\" , dst ) ; "
"} "
@ -6275,9 +6199,9 @@ private:
" dst[0] = 0;"
" _tcscat(dst, src);"
" LPTSTR d = _tcsdup(str);"
" _tprintf(_T(\"Hello world!\n\"));"
" _stprintf(dst, _T(\"Hello!\n\"));"
" _sntprintf(dst, sizeof(dst) / sizeof(TCHAR), _T(\"Hello world!\n\"));"
" _tprintf(_T(\"Hello world!\"));"
" _stprintf(dst, _T(\"Hello!\"));"
" _sntprintf(dst, sizeof(dst) / sizeof(TCHAR), _T(\"Hello world!\"));"
" _tscanf(_T(\"%s\"), dst);"
" _stscanf(dst, _T(\"%s\"), dst);"
"}";
@ -6296,9 +6220,9 @@ private:
"dst [ 0 ] = 0 ; "
"wcscat ( dst , src ) ; "
"wchar_t * d ; d = wcsdup ( str ) ; "
"wprintf ( L\"Hello world!\n\" ) ; "
"swprintf ( dst , L\"Hello!\n\" ) ; "
"_snwprintf ( dst , sizeof ( dst ) / sizeof ( wchar_t ) , L\"Hello world!\n\" ) ; "
"wprintf ( L\"Hello world!\" ) ; "
"swprintf ( dst , L\"Hello!\" ) ; "
"_snwprintf ( dst , sizeof ( dst ) / sizeof ( wchar_t ) , L\"Hello world!\" ) ; "
"wscanf ( L\"%s\" , dst ) ; "
"swscanf ( dst , L\"%s\" , dst ) ; "
"}";

View File

@ -34,8 +34,6 @@ private:
Settings settings;
void run() {
TEST_CASE(line1); // Ticket #4408
TEST_CASE(line2); // Ticket #5423
TEST_CASE(testaddtoken1);
TEST_CASE(testaddtoken2);
TEST_CASE(inc);
@ -64,54 +62,6 @@ private:
ASSERT_EQUALS("4026531840U", tokenlist.front()->str());
}
void line1() const {
// Test for Ticket #4408
const char code[] = "#file \"c:\\a.h\"\n"
"first\n"
"#line 5\n"
"second\n"
"#line not-a-number\n"
"third\n"
"#line 100 \"i.h\"\n"
"fourth\n"
"fifth\n"
"#endfile\n";
errout.str("");
TokenList tokenList(&settings);
std::istringstream istr(code);
bool res = tokenList.createTokens(istr, "a.cpp");
ASSERT_EQUALS(res, true);
for (const Token *tok = tokenList.front(); tok; tok = tok->next()) {
if (tok->str() == "first")
ASSERT_EQUALS(1, tok->linenr());
if (tok->str() == "second")
ASSERT_EQUALS(5, tok->linenr());
if (tok->str() == "third")
ASSERT_EQUALS(7, tok->linenr());
if (tok->str() == "fourth")
ASSERT_EQUALS(100, tok->linenr());
if (tok->str() == "fifth")
ASSERT_EQUALS(101, tok->linenr());
}
}
void line2() const {
const char code[] = "#line 8 \"c:\\a.h\"\n"
"123\n";
errout.str("");
// tokenize..
TokenList tokenlist(&settings);
std::istringstream istr(code);
tokenlist.createTokens(istr, "a.cpp");
ASSERT_EQUALS(Path::toNativeSeparators("[c:\\a.h:8]"), tokenlist.fileLine(tokenlist.front()));
}
void inc() const {
const char code[] = "a++1;1++b;";

View File

@ -20,7 +20,7 @@
#include "tokenize.h"
#include "checkclass.h"
#include "testsuite.h"
#include <simplecpp.h>
class TestUnusedPrivateFunction : public TestFixture {
public:
@ -85,10 +85,21 @@ private:
settings.platform(platform);
// Raw tokens..
std::vector<std::string> files;
files.push_back("test.cpp");
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
// Preprocess..
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenize..
Tokenizer tokenizer(&settings, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp");
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
tokenizer.simplifyTokenList2();
// Check for unused private functions..
@ -115,7 +126,7 @@ private:
ASSERT_EQUALS("[test.cpp:4]: (style) Unused private function: 'Fred::f'\n", errout.str());
check("#file \"p.h\"\n"
check("#line 1 \"p.h\"\n"
"class Fred\n"
"{\n"
"private:\n"
@ -124,7 +135,7 @@ private:
" Fred();\n"
"};\n"
"\n"
"#endfile\n"
"#line 1 \"p.cpp\"\n"
"Fred::Fred()\n"
"{ }\n"
"\n"
@ -133,7 +144,7 @@ private:
ASSERT_EQUALS("[p.h:4]: (style) Unused private function: 'Fred::f'\n", errout.str());
check("#file \"p.h\"\n"
check("#line 1 \"p.h\"\n"
"class Fred\n"
"{\n"
"private:\n"
@ -141,7 +152,7 @@ private:
"};\n"
"\n"
"\n"
"#endfile\n"
"#line 1 \"p.cpp\"\n"
"\n"
"void Fred::f()\n"
"{\n"
@ -149,7 +160,7 @@ private:
ASSERT_EQUALS("[p.h:4]: (style) Unused private function: 'Fred::f'\n", errout.str());
// Don't warn about include files which implementation we don't see
check("#file \"p.h\"\n"
check("#line 1 \"p.h\"\n"
"class Fred\n"
"{\n"
"private:\n"
@ -157,7 +168,7 @@ private:
"void g() {}\n"
"};\n"
"\n"
"#endfile\n"
"#line 1 \"p.cpp\"\n"
"\n"
"int main()\n"
"{\n"

View File

@ -419,7 +419,7 @@ private:
"} var = {0};\n"
"int main(int argc, char *argv[])\n"
"{\n"
" printf(\"var.struct1.a = %d\n\", var.struct1.a);\n"
" printf(\"var.struct1.a = %d\", var.struct1.a);\n"
" return 1;\n"
"}\n");
ASSERT_EQUALS("", errout.str());

View File

@ -22,6 +22,7 @@
#include "tokenize.h"
#include "token.h"
#include <simplecpp.h>
#include <vector>
#include <string>
#include <cmath>
@ -167,12 +168,21 @@ private:
void bailout(const char code[]) {
settings.debugwarnings = true;
errout.str("");
std::vector<std::string> files;
files.push_back("test.cpp");
std::istringstream istr(code);
const simplecpp::TokenList tokens1(istr, files, files[0]);
simplecpp::TokenList tokens2(files);
std::map<std::string, simplecpp::TokenList*> filedata;
simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI());
// Tokenize..
Tokenizer tokenizer(&settings, this);
std::istringstream istr(code);
errout.str("");
tokenizer.tokenize(istr, "test.cpp");
tokenizer.createTokens(&tokens2);
tokenizer.simplifyTokens1("");
settings.debugwarnings = false;
}
@ -893,11 +903,12 @@ private:
void valueFlowBeforeConditionMacro() {
// bailout: condition is a expanded macro
bailout("void f(int x) {\n"
bailout("#define M if (x==123) {}\n"
"void f(int x) {\n"
" a = x;\n"
" $if ($x==$123){}\n"
" M;\n"
"}");
ASSERT_EQUALS("[test.cpp:3]: (debug) ValueFlow bailout: variable x, condition is defined in macro\n", errout.str());
ASSERT_EQUALS("[test.cpp:4]: (debug) ValueFlow bailout: variable x, condition is defined in macro\n", errout.str());
}
void valueFlowBeforeConditionGoto() {