Unit Testing: Start

This commit is contained in:
Daniel Marjamäki 2008-02-16 15:46:32 +00:00
parent a8e6a4c343
commit 4cb3c7b96b
7 changed files with 116 additions and 41 deletions

View File

@ -5,6 +5,7 @@
#include <sstream> #include <sstream>
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool HasErrors; bool HasErrors;
std::ostringstream errout;
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
std::string FileLine(TOKEN *tok) std::string FileLine(TOKEN *tok)
@ -17,7 +18,7 @@ std::string FileLine(TOKEN *tok)
void ReportErr(const std::string errmsg) void ReportErr(const std::string errmsg)
{ {
std::cerr << errmsg << std::endl; errout << errmsg << std::endl;
HasErrors = true; HasErrors = true;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------

View File

@ -4,12 +4,14 @@
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
#include <string> #include <string>
#include <sstream>
struct TOKEN; struct TOKEN;
std::string FileLine(TOKEN *tok); std::string FileLine(TOKEN *tok);
void ReportErr(const std::string errmsg); void ReportErr(const std::string errmsg);
extern std::ostringstream errout;
bool IsName(const char str[]); bool IsName(const char str[]);

View File

@ -3,11 +3,11 @@ OBJS=$(SRCS:%.cpp=%.o)
%.o: %.cpp %.o: %.cpp
g++ -Wall -pedantic -I. -o $@ -c $^ g++ -Wall -pedantic -g -I. -o $@ -c $^
all: ${OBJS} main.o all: ${OBJS} main.o
g++ -o cppcheck $^ g++ -Wall -g -o cppcheck $^
test: ${OBJS} TestTok.o test: ${OBJS} tests.o
g++ -o cppcheck_test $^ g++ -Wall -g -o cppcheck_test $^
clean: clean:
rm -f *.o cppcheck_test cppcheck rm -f *.o cppcheck_test cppcheck

View File

@ -4,6 +4,7 @@
#include "tokenize.h" // <- Tokenizer #include "tokenize.h" // <- Tokenizer
#include "Statements.h" // <- Statement list #include "Statements.h" // <- Statement list
#include "CommonCheck.h"
#include "CheckMemoryLeak.h" #include "CheckMemoryLeak.h"
#include "CheckBufferOverrun.h" #include "CheckBufferOverrun.h"
@ -62,6 +63,8 @@ int main(int argc, char* argv[])
CppCheck(fname); CppCheck(fname);
std::cerr << errout.str();
return 0; return 0;
} }

56
tests.cpp Normal file
View File

@ -0,0 +1,56 @@
// Unit-testing cppcheck
#include "tokenize.h" // <- Tokenizer
#include "CommonCheck.h"
#include "CheckBufferOverrun.h"
#include <iostream>
#include <sstream>
#define assert_equal(A,B) if (A!=B) { std::cerr << "Failed at line " << __LINE__ << "\n"; FailCount++; } else { SuccessCount++; }
bool Debug = false;
bool ShowAll = false;
bool CheckCodingStyle = false;
static unsigned int FailCount, SuccessCount;
static void buffer_overrun_1();
int main()
{
Files.push_back( std::string("test.cpp") );
buffer_overrun_1();
std::cout << "Success Rate: "
<< SuccessCount
<< " / "
<< (SuccessCount + FailCount)
<< std::endl;
return 0;
}
static void buffer_overrun_1()
{
const char code[] = "void f()\n"
"{\n"
" char str[0x10];\n"
" str[15] = 0;\n"
" str[16] = 0;\n"
"}\n";
// Tokenize..
tokens = tokens_back = NULL;
std::istringstream istr(code);
TokenizeCode( istr );
SimplifyTokenList();
errout.str("");
CheckBufferOverrun();
std::string err = errout.str();
assert_equal( errout.str(), "[test.cpp:5]: Array index out of bounds\n" );
}

View File

@ -264,21 +264,34 @@ void Tokenize(const char FileName[])
return; return;
// The "Files" vector remembers what files have been tokenized.. // The "Files" vector remembers what files have been tokenized..
unsigned int CurrentFile = Files.size();
Files.push_back(FileName); Files.push_back(FileName);
// Tokenize the file..
TokenizeCode( fin, Files.size() - 1 );
}
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
// Tokenize - tokenizes input stream
//---------------------------------------------------------------------------
void TokenizeCode(std::istream &code, const unsigned int FileIndex)
{
// Tokenize the file. // Tokenize the file.
unsigned int lineno = 1; unsigned int lineno = 1;
char CurrentToken[1000]; char CurrentToken[1000] = {0};
memset(CurrentToken, 0, sizeof(CurrentToken));
char *pToken = CurrentToken; char *pToken = CurrentToken;
for (char ch = (char)fin.get(); !fin.eof(); ch = (char)fin.get()) for (char ch = (char)code.get(); !code.eof(); ch = (char)code.get())
{ {
// Preprocessor stuff? // Preprocessor stuff?
if (ch == '#' && !CurrentToken[0]) if (ch == '#' && !CurrentToken[0])
{ {
std::string line; std::string line;
getline(fin,line); getline(code,line);
line = "#" + line; line = "#" + line;
if (strncmp(line.c_str(),"#include",8)==0 && if (strncmp(line.c_str(),"#include",8)==0 &&
line.find("\"") != std::string::npos) line.find("\"") != std::string::npos)
@ -288,17 +301,15 @@ void Tokenize(const char FileName[])
line.erase(line.find("\"")); line.erase(line.find("\""));
// Relative path.. // Relative path..
if (strchr(FileName,'\\')) if (Files.back().find_first_of("\\/") != std::string::npos)
{ {
char path[1000]; std::string path = Files.back();
memset(path,0,sizeof(path)); path.erase( 1 + path.find_last_of("\\/") );
const char *p = strrchr(FileName, '\\');
memcpy(path, FileName, p-FileName+1);
line = path + line; line = path + line;
} }
addtoken("#include", lineno, CurrentFile); addtoken("#include", lineno, FileIndex);
addtoken(line.c_str(), lineno, CurrentFile); addtoken(line.c_str(), lineno, FileIndex);
Tokenize(line.c_str()); Tokenize(line.c_str());
} }
@ -332,9 +343,9 @@ void Tokenize(const char FileName[])
if (State==Value) if (State==Value)
{ {
addtoken("def", lineno, CurrentFile); addtoken("def", lineno, FileIndex);
addtoken(strId, lineno, CurrentFile); addtoken(strId, lineno, FileIndex);
addtoken(";", lineno, CurrentFile); addtoken(";", lineno, FileIndex);
Define(strId, CurrentToken); Define(strId, CurrentToken);
} }
@ -345,8 +356,8 @@ void Tokenize(const char FileName[])
else else
{ {
addtoken("#", lineno, CurrentFile); addtoken("#", lineno, FileIndex);
addtoken(";", lineno, CurrentFile); addtoken(";", lineno, FileIndex);
} }
lineno++; lineno++;
@ -356,27 +367,27 @@ void Tokenize(const char FileName[])
if (ch == '\n') if (ch == '\n')
{ {
// Add current token.. // Add current token..
addtoken(CurrentToken, lineno++, CurrentFile); addtoken(CurrentToken, lineno++, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
continue; continue;
} }
// Comments.. // Comments..
if (ch == '/' && !fin.eof()) if (ch == '/' && !code.eof())
{ {
// Add current token.. // Add current token..
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
// Read next character.. // Read next character..
ch = (char)fin.get(); ch = (char)code.get();
// If '//'.. // If '//'..
if (ch == '/') if (ch == '/')
{ {
while (!fin.eof() && (char)fin.get()!='\n'); while (!code.eof() && (char)code.get()!='\n');
lineno++; lineno++;
continue; continue;
} }
@ -386,10 +397,10 @@ void Tokenize(const char FileName[])
{ {
char chPrev; char chPrev;
ch = chPrev = 'A'; ch = chPrev = 'A';
while (!fin.eof() && (chPrev!='*' || ch!='/')) while (!code.eof() && (chPrev!='*' || ch!='/'))
{ {
chPrev = ch; chPrev = ch;
ch = (char)fin.get(); ch = (char)code.get();
if (ch == '\n') if (ch == '\n')
lineno++; lineno++;
} }
@ -397,25 +408,25 @@ void Tokenize(const char FileName[])
} }
// Not a comment.. add token.. // Not a comment.. add token..
addtoken("/", lineno, CurrentFile); addtoken("/", lineno, FileIndex);
} }
// char.. // char..
if (ch == '\'') if (ch == '\'')
{ {
// Add previous token // Add previous token
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
// Read this .. // Read this ..
CurrentToken[0] = ch; CurrentToken[0] = ch;
CurrentToken[1] = (char)fin.get(); CurrentToken[1] = (char)code.get();
CurrentToken[2] = (char)fin.get(); CurrentToken[2] = (char)code.get();
if (CurrentToken[1] == '\\') if (CurrentToken[1] == '\\')
CurrentToken[3] = (char)fin.get(); CurrentToken[3] = (char)code.get();
// Add token and start on next.. // Add token and start on next..
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
@ -425,7 +436,7 @@ void Tokenize(const char FileName[])
// String.. // String..
if (ch == '\"') if (ch == '\"')
{ {
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
bool special = false; bool special = false;
@ -446,11 +457,11 @@ void Tokenize(const char FileName[])
special = (c == '\\'); special = (c == '\\');
// Get next character // Get next character
c = (char)fin.get(); c = (char)code.get();
} }
while (special || c != '\"'); while (special || c != '\"');
*pToken = '\"'; *pToken = '\"';
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
continue; continue;
@ -458,10 +469,10 @@ void Tokenize(const char FileName[])
if (strchr("+-*/%&|^?!=<>[](){};:,.",ch)) if (strchr("+-*/%&|^?!=<>[](){};:,.",ch))
{ {
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
CurrentToken[0] = ch; CurrentToken[0] = ch;
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
pToken = CurrentToken; pToken = CurrentToken;
continue; continue;
@ -470,7 +481,7 @@ void Tokenize(const char FileName[])
if (std::isspace(ch) || std::iscntrl(ch)) if (std::isspace(ch) || std::iscntrl(ch))
{ {
addtoken(CurrentToken, lineno, CurrentFile); addtoken(CurrentToken, lineno, FileIndex);
pToken = CurrentToken; pToken = CurrentToken;
memset(CurrentToken, 0, sizeof(CurrentToken)); memset(CurrentToken, 0, sizeof(CurrentToken));
continue; continue;

View File

@ -20,6 +20,8 @@ extern struct TOKEN *tokens, *tokens_back;
void Tokenize(const char FileName[]); void Tokenize(const char FileName[]);
void TokenizeCode(std::istream &code, const unsigned int FileIndex=0);
// Return size. // Return size.
int SizeOfType(const char type[]); int SizeOfType(const char type[]);