diff --git a/CommonCheck.cpp b/CommonCheck.cpp index 800539329..cc6accadf 100644 --- a/CommonCheck.cpp +++ b/CommonCheck.cpp @@ -5,6 +5,7 @@ #include //--------------------------------------------------------------------------- bool HasErrors; +std::ostringstream errout; //--------------------------------------------------------------------------- std::string FileLine(TOKEN *tok) @@ -17,7 +18,7 @@ std::string FileLine(TOKEN *tok) void ReportErr(const std::string errmsg) { - std::cerr << errmsg << std::endl; + errout << errmsg << std::endl; HasErrors = true; } //--------------------------------------------------------------------------- diff --git a/CommonCheck.h b/CommonCheck.h index c6e66d6d8..1f57b6f99 100644 --- a/CommonCheck.h +++ b/CommonCheck.h @@ -4,12 +4,14 @@ //--------------------------------------------------------------------------- #include +#include struct TOKEN; std::string FileLine(TOKEN *tok); void ReportErr(const std::string errmsg); +extern std::ostringstream errout; bool IsName(const char str[]); diff --git a/Makefile b/Makefile index 52a465f81..a8b51a9db 100644 --- a/Makefile +++ b/Makefile @@ -3,11 +3,11 @@ OBJS=$(SRCS:%.cpp=%.o) %.o: %.cpp - g++ -Wall -pedantic -I. -o $@ -c $^ + g++ -Wall -pedantic -g -I. -o $@ -c $^ all: ${OBJS} main.o - g++ -o cppcheck $^ -test: ${OBJS} TestTok.o - g++ -o cppcheck_test $^ + g++ -Wall -g -o cppcheck $^ +test: ${OBJS} tests.o + g++ -Wall -g -o cppcheck_test $^ clean: rm -f *.o cppcheck_test cppcheck diff --git a/main.cpp b/main.cpp index 9a18db156..cb7926bde 100644 --- a/main.cpp +++ b/main.cpp @@ -4,6 +4,7 @@ #include "tokenize.h" // <- Tokenizer #include "Statements.h" // <- Statement list +#include "CommonCheck.h" #include "CheckMemoryLeak.h" #include "CheckBufferOverrun.h" @@ -62,6 +63,8 @@ int main(int argc, char* argv[]) CppCheck(fname); + std::cerr << errout.str(); + return 0; } diff --git a/tests.cpp b/tests.cpp new file mode 100644 index 000000000..1d6771690 --- /dev/null +++ b/tests.cpp @@ -0,0 +1,56 @@ + +// Unit-testing cppcheck + +#include "tokenize.h" // <- Tokenizer +#include "CommonCheck.h" +#include "CheckBufferOverrun.h" + +#include +#include + +#define assert_equal(A,B) if (A!=B) { std::cerr << "Failed at line " << __LINE__ << "\n"; FailCount++; } else { SuccessCount++; } + +bool Debug = false; +bool ShowAll = false; +bool CheckCodingStyle = false; + +static unsigned int FailCount, SuccessCount; + +static void buffer_overrun_1(); + +int main() +{ + Files.push_back( std::string("test.cpp") ); + buffer_overrun_1(); + std::cout << "Success Rate: " + << SuccessCount + << " / " + << (SuccessCount + FailCount) + << std::endl; + return 0; +} + +static void buffer_overrun_1() +{ + const char code[] = "void f()\n" + "{\n" + " char str[0x10];\n" + " str[15] = 0;\n" + " str[16] = 0;\n" + "}\n"; + + // Tokenize.. + tokens = tokens_back = NULL; + std::istringstream istr(code); + TokenizeCode( istr ); + SimplifyTokenList(); + + errout.str(""); + CheckBufferOverrun(); + + std::string err = errout.str(); + assert_equal( errout.str(), "[test.cpp:5]: Array index out of bounds\n" ); +} + + + diff --git a/tokenize.cpp b/tokenize.cpp index d381b9ed2..40eee932e 100644 --- a/tokenize.cpp +++ b/tokenize.cpp @@ -264,21 +264,34 @@ void Tokenize(const char FileName[]) return; // The "Files" vector remembers what files have been tokenized.. - unsigned int CurrentFile = Files.size(); Files.push_back(FileName); + // Tokenize the file.. + TokenizeCode( fin, Files.size() - 1 ); +} +//--------------------------------------------------------------------------- + + + + + +//--------------------------------------------------------------------------- +// Tokenize - tokenizes input stream +//--------------------------------------------------------------------------- + +void TokenizeCode(std::istream &code, const unsigned int FileIndex) +{ // Tokenize the file. unsigned int lineno = 1; - char CurrentToken[1000]; - memset(CurrentToken, 0, sizeof(CurrentToken)); + char CurrentToken[1000] = {0}; char *pToken = CurrentToken; - for (char ch = (char)fin.get(); !fin.eof(); ch = (char)fin.get()) + for (char ch = (char)code.get(); !code.eof(); ch = (char)code.get()) { // Preprocessor stuff? if (ch == '#' && !CurrentToken[0]) { std::string line; - getline(fin,line); + getline(code,line); line = "#" + line; if (strncmp(line.c_str(),"#include",8)==0 && line.find("\"") != std::string::npos) @@ -288,17 +301,15 @@ void Tokenize(const char FileName[]) line.erase(line.find("\"")); // Relative path.. - if (strchr(FileName,'\\')) + if (Files.back().find_first_of("\\/") != std::string::npos) { - char path[1000]; - memset(path,0,sizeof(path)); - const char *p = strrchr(FileName, '\\'); - memcpy(path, FileName, p-FileName+1); + std::string path = Files.back(); + path.erase( 1 + path.find_last_of("\\/") ); line = path + line; } - addtoken("#include", lineno, CurrentFile); - addtoken(line.c_str(), lineno, CurrentFile); + addtoken("#include", lineno, FileIndex); + addtoken(line.c_str(), lineno, FileIndex); Tokenize(line.c_str()); } @@ -332,9 +343,9 @@ void Tokenize(const char FileName[]) if (State==Value) { - addtoken("def", lineno, CurrentFile); - addtoken(strId, lineno, CurrentFile); - addtoken(";", lineno, CurrentFile); + addtoken("def", lineno, FileIndex); + addtoken(strId, lineno, FileIndex); + addtoken(";", lineno, FileIndex); Define(strId, CurrentToken); } @@ -345,8 +356,8 @@ void Tokenize(const char FileName[]) else { - addtoken("#", lineno, CurrentFile); - addtoken(";", lineno, CurrentFile); + addtoken("#", lineno, FileIndex); + addtoken(";", lineno, FileIndex); } lineno++; @@ -356,27 +367,27 @@ void Tokenize(const char FileName[]) if (ch == '\n') { // Add current token.. - addtoken(CurrentToken, lineno++, CurrentFile); + addtoken(CurrentToken, lineno++, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; continue; } // Comments.. - if (ch == '/' && !fin.eof()) + if (ch == '/' && !code.eof()) { // Add current token.. - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; // Read next character.. - ch = (char)fin.get(); + ch = (char)code.get(); // If '//'.. if (ch == '/') { - while (!fin.eof() && (char)fin.get()!='\n'); + while (!code.eof() && (char)code.get()!='\n'); lineno++; continue; } @@ -386,10 +397,10 @@ void Tokenize(const char FileName[]) { char chPrev; ch = chPrev = 'A'; - while (!fin.eof() && (chPrev!='*' || ch!='/')) + while (!code.eof() && (chPrev!='*' || ch!='/')) { chPrev = ch; - ch = (char)fin.get(); + ch = (char)code.get(); if (ch == '\n') lineno++; } @@ -397,25 +408,25 @@ void Tokenize(const char FileName[]) } // Not a comment.. add token.. - addtoken("/", lineno, CurrentFile); + addtoken("/", lineno, FileIndex); } // char.. if (ch == '\'') { // Add previous token - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); // Read this .. CurrentToken[0] = ch; - CurrentToken[1] = (char)fin.get(); - CurrentToken[2] = (char)fin.get(); + CurrentToken[1] = (char)code.get(); + CurrentToken[2] = (char)code.get(); if (CurrentToken[1] == '\\') - CurrentToken[3] = (char)fin.get(); + CurrentToken[3] = (char)code.get(); // Add token and start on next.. - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; @@ -425,7 +436,7 @@ void Tokenize(const char FileName[]) // String.. if (ch == '\"') { - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; bool special = false; @@ -446,11 +457,11 @@ void Tokenize(const char FileName[]) special = (c == '\\'); // Get next character - c = (char)fin.get(); + c = (char)code.get(); } while (special || c != '\"'); *pToken = '\"'; - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; continue; @@ -458,10 +469,10 @@ void Tokenize(const char FileName[]) if (strchr("+-*/%&|^?!=<>[](){};:,.",ch)) { - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); CurrentToken[0] = ch; - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); memset(CurrentToken, 0, sizeof(CurrentToken)); pToken = CurrentToken; continue; @@ -470,7 +481,7 @@ void Tokenize(const char FileName[]) if (std::isspace(ch) || std::iscntrl(ch)) { - addtoken(CurrentToken, lineno, CurrentFile); + addtoken(CurrentToken, lineno, FileIndex); pToken = CurrentToken; memset(CurrentToken, 0, sizeof(CurrentToken)); continue; diff --git a/tokenize.h b/tokenize.h index 5fd20c889..ad48deb43 100644 --- a/tokenize.h +++ b/tokenize.h @@ -20,6 +20,8 @@ extern struct TOKEN *tokens, *tokens_back; void Tokenize(const char FileName[]); +void TokenizeCode(std::istream &code, const unsigned int FileIndex=0); + // Return size. int SizeOfType(const char type[]);