diff --git a/TestTok.cpp b/TestTok.cpp index c5c5eb173..881a97fef 100644 --- a/TestTok.cpp +++ b/TestTok.cpp @@ -14,6 +14,7 @@ int main(int argc, char* argv[]) tokens = tokens_back = NULL; Tokenize(argv[1]); + SimplifyTokenList(); unsigned int linenr = 0; for (TOKEN *tok = tokens; tok; tok = tok->next) diff --git a/internaltesting/testdecl.out b/internaltesting/testdecl.out index 877bdabf9..c3a59f52d 100644 --- a/internaltesting/testdecl.out +++ b/internaltesting/testdecl.out @@ -1,9 +1,9 @@ 4 : { 5 : decl a 6 : decl b +6 : assign b 6 : decl c 6 : decl d -6 : assign b 6 : assign d 6 : use NULL 7 : decl e diff --git a/main.cpp b/main.cpp index 18fc49bfd..59887ba75 100644 --- a/main.cpp +++ b/main.cpp @@ -64,6 +64,16 @@ static void CppCheck(const char FileName[]) Files.clear(); Tokenize(FileName); + + // Check that the memsets are valid. + // This function can do dangerous things if used wrong. + // Important: The checking doesn't work on simplified tokens list. + CheckMemset(); + + + SimplifyTokenList(); + + // Create a statement list. It's used by for example 'CheckMemoryLeak' CreateStatementList(); @@ -75,19 +85,10 @@ static void CppCheck(const char FileName[]) CheckBufferOverrun(); - //std::ofstream f("tokens.txt"); - //for (TOKEN *tok = tokens; tok; tok = tok->next) - // f << "[" << Files[tok->FileIndex] << ":" << tok->linenr << "]:" << tok->str << '\n'; - //f.close(); - // Check that all private functions are called. // Temporarily inactivated to avoid any false positives CheckUnusedPrivateFunctions(); - // Check that the memsets are valid. - // This function can do dangerous things if used wrong. - CheckMemset(); - // Warnings if (ShowWarnings) diff --git a/tok.bpr b/tok.bpr index 7e03032cc..98bce6f18 100644 --- a/tok.bpr +++ b/tok.bpr @@ -164,7 +164,7 @@ Item6=bjornb.cod bjornb.920 DebugSourceDirs=$(BCB)\source\vcl [Parameters] -RunParams=testbufferoverrun6\testbufferoverrun6.cpp +RunParams=test.cpp Launcher= UseLauncher=0 DebugCWD= diff --git a/tokenize.cpp b/tokenize.cpp index 80e88cbcd..b7f473b1a 100644 --- a/tokenize.cpp +++ b/tokenize.cpp @@ -214,6 +214,33 @@ static void DeleteNextToken(TOKEN *tok) +//--------------------------------------------------------------------------- +// InsertTokens - Copy and insert tokens +//--------------------------------------------------------------------------- + +void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n) +{ + while (n > 0) + { + TOKEN *NewToken = new TOKEN; + NewToken->FileIndex = src->FileIndex; + NewToken->linenr = src->linenr; + NewToken->str = strdup(src->str); + + NewToken->next = dest->next; + dest->next = NewToken; + + dest = dest->next; + src = src->next; + n--; + } +} +//--------------------------------------------------------------------------- + + + + + //--------------------------------------------------------------------------- // Tokenize - tokenizes a given file. //--------------------------------------------------------------------------- @@ -461,8 +488,22 @@ void Tokenize(const char FileName[]) combine_2tokens(tok, "protected", ":"); combine_2tokens(tok, "public", ":"); } +} +//--------------------------------------------------------------------------- - + + + + + + + +//--------------------------------------------------------------------------- +// Simplify token list +//--------------------------------------------------------------------------- + +void SimplifyTokenList() +{ // Replace constants.. for (TOKEN *tok = tokens; tok; tok = tok->next) { @@ -483,6 +524,7 @@ void Tokenize(const char FileName[]) } + // Fill the map TypeSize.. TypeSize.clear(); TypeSize["char"] = sizeof(char); @@ -591,6 +633,8 @@ void Tokenize(const char FileName[]) } + + // Simple calculations.. bool done = false; @@ -665,6 +709,107 @@ void Tokenize(const char FileName[]) DeleteNextToken(tok); } } + + + + // Split up variable declarations if possible.. + for (TOKEN *tok = tokens; tok; tok = tok->next) + { + if ( ! strchr("{};", tok->str[0]) ) + continue; + + TOKEN *type0 = tok->next; + + TOKEN *tok2 = NULL; + unsigned int typelen = 0; + + if ( match(type0, "type var ,") ) + { + tok2 = gettok(type0, 2); // The ',' token + typelen = 1; + } + + else if ( match(type0, "type * var ,") ) + { + tok2 = gettok(type0, 3); // The ',' token + typelen = 2; + } + + else if ( match(type0, "type var [ num ] ,") ) + { + tok2 = gettok(type0, 5); // The ',' token + typelen = 1; + } + + else if ( match(type0, "type * var [ num ] ,") ) + { + tok2 = gettok(type0, 6); // The ',' token + typelen = 2; + } + + + else if ( match(type0, "type var =") ) + { + tok2 = gettok(type0, 2); + typelen = 1; + } + + else if ( match(type0, "type * var =") ) + { + tok2 = gettok(type0, 3); + typelen = 2; + } + + if (tok2) + { + if (tok2->str[0] == ',') + { + free(tok2->str); + tok2->str = strdup(";"); + InsertTokens(tok2, type0, typelen); + } + + else + { + TOKEN *eq = tok2; + + int parlevel = 0; + while (tok2) + { + if ( strchr("{(", tok2->str[0]) ) + { + parlevel++; + } + + else if ( strchr("})", tok2->str[0]) ) + { + if (parlevel<0) + break; + parlevel--; + } + + else if ( parlevel==0 && strchr(";,",tok2->str[0]) ) + { + // "type var =" => "type var; var =" + InsertTokens(eq, gettok(type0,typelen), 2); + free(eq->str); + eq->str = strdup(";"); + + // "= x, " => "= x; type " + if (tok2->str[0] == ',') + { + free(tok2->str); + tok2->str = strdup(";"); + InsertTokens( tok2, type0, typelen ); + } + break; + } + + tok2 = tok2->next; + } + } + } + } } //--------------------------------------------------------------------------- @@ -674,6 +819,10 @@ void Tokenize(const char FileName[]) + + + + //--------------------------------------------------------------------------- // Helper functions for handling the tokens list //--------------------------------------------------------------------------- @@ -762,3 +911,6 @@ const char *getstr(TOKEN *tok, int index) //--------------------------------------------------------------------------- + + + diff --git a/tokenize.h b/tokenize.h index 0b46dd55b..c5f4e09b0 100644 --- a/tokenize.h +++ b/tokenize.h @@ -20,6 +20,10 @@ extern struct TOKEN *tokens, *tokens_back; void Tokenize(const char FileName[]); +// Simplify tokenlist +// ----------------------------- +void SimplifyTokenList(); + // Helper functions for handling the tokens list.. TOKEN *findtoken(TOKEN *tok1, const char *tokenstr[]);