Tokenize: Simplify declarations

This commit is contained in:
Daniel Marjamäki 2007-05-29 06:24:36 +00:00
parent a2df9a5647
commit ae46002607
6 changed files with 170 additions and 12 deletions

View File

@ -14,6 +14,7 @@ int main(int argc, char* argv[])
tokens = tokens_back = NULL;
Tokenize(argv[1]);
SimplifyTokenList();
unsigned int linenr = 0;
for (TOKEN *tok = tokens; tok; tok = tok->next)

View File

@ -1,9 +1,9 @@
4 : {
5 : decl a
6 : decl b
6 : assign b
6 : decl c
6 : decl d
6 : assign b
6 : assign d
6 : use NULL
7 : decl e

View File

@ -64,6 +64,16 @@ static void CppCheck(const char FileName[])
Files.clear();
Tokenize(FileName);
// Check that the memsets are valid.
// This function can do dangerous things if used wrong.
// Important: The checking doesn't work on simplified tokens list.
CheckMemset();
SimplifyTokenList();
// Create a statement list. It's used by for example 'CheckMemoryLeak'
CreateStatementList();
@ -75,19 +85,10 @@ static void CppCheck(const char FileName[])
CheckBufferOverrun();
//std::ofstream f("tokens.txt");
//for (TOKEN *tok = tokens; tok; tok = tok->next)
// f << "[" << Files[tok->FileIndex] << ":" << tok->linenr << "]:" << tok->str << '\n';
//f.close();
// Check that all private functions are called.
// Temporarily inactivated to avoid any false positives
CheckUnusedPrivateFunctions();
// Check that the memsets are valid.
// This function can do dangerous things if used wrong.
CheckMemset();
// Warnings
if (ShowWarnings)

View File

@ -164,7 +164,7 @@ Item6=bjornb.cod bjornb.920
DebugSourceDirs=$(BCB)\source\vcl
[Parameters]
RunParams=testbufferoverrun6\testbufferoverrun6.cpp
RunParams=test.cpp
Launcher=
UseLauncher=0
DebugCWD=

View File

@ -214,6 +214,33 @@ static void DeleteNextToken(TOKEN *tok)
//---------------------------------------------------------------------------
// InsertTokens - Copy and insert tokens
//---------------------------------------------------------------------------
void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n)
{
while (n > 0)
{
TOKEN *NewToken = new TOKEN;
NewToken->FileIndex = src->FileIndex;
NewToken->linenr = src->linenr;
NewToken->str = strdup(src->str);
NewToken->next = dest->next;
dest->next = NewToken;
dest = dest->next;
src = src->next;
n--;
}
}
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
// Tokenize - tokenizes a given file.
//---------------------------------------------------------------------------
@ -461,8 +488,22 @@ void Tokenize(const char FileName[])
combine_2tokens(tok, "protected", ":");
combine_2tokens(tok, "public", ":");
}
}
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
// Simplify token list
//---------------------------------------------------------------------------
void SimplifyTokenList()
{
// Replace constants..
for (TOKEN *tok = tokens; tok; tok = tok->next)
{
@ -483,6 +524,7 @@ void Tokenize(const char FileName[])
}
// Fill the map TypeSize..
TypeSize.clear();
TypeSize["char"] = sizeof(char);
@ -591,6 +633,8 @@ void Tokenize(const char FileName[])
}
// Simple calculations..
bool done = false;
@ -665,6 +709,107 @@ void Tokenize(const char FileName[])
DeleteNextToken(tok);
}
}
// Split up variable declarations if possible..
for (TOKEN *tok = tokens; tok; tok = tok->next)
{
if ( ! strchr("{};", tok->str[0]) )
continue;
TOKEN *type0 = tok->next;
TOKEN *tok2 = NULL;
unsigned int typelen = 0;
if ( match(type0, "type var ,") )
{
tok2 = gettok(type0, 2); // The ',' token
typelen = 1;
}
else if ( match(type0, "type * var ,") )
{
tok2 = gettok(type0, 3); // The ',' token
typelen = 2;
}
else if ( match(type0, "type var [ num ] ,") )
{
tok2 = gettok(type0, 5); // The ',' token
typelen = 1;
}
else if ( match(type0, "type * var [ num ] ,") )
{
tok2 = gettok(type0, 6); // The ',' token
typelen = 2;
}
else if ( match(type0, "type var =") )
{
tok2 = gettok(type0, 2);
typelen = 1;
}
else if ( match(type0, "type * var =") )
{
tok2 = gettok(type0, 3);
typelen = 2;
}
if (tok2)
{
if (tok2->str[0] == ',')
{
free(tok2->str);
tok2->str = strdup(";");
InsertTokens(tok2, type0, typelen);
}
else
{
TOKEN *eq = tok2;
int parlevel = 0;
while (tok2)
{
if ( strchr("{(", tok2->str[0]) )
{
parlevel++;
}
else if ( strchr("})", tok2->str[0]) )
{
if (parlevel<0)
break;
parlevel--;
}
else if ( parlevel==0 && strchr(";,",tok2->str[0]) )
{
// "type var =" => "type var; var ="
InsertTokens(eq, gettok(type0,typelen), 2);
free(eq->str);
eq->str = strdup(";");
// "= x, " => "= x; type "
if (tok2->str[0] == ',')
{
free(tok2->str);
tok2->str = strdup(";");
InsertTokens( tok2, type0, typelen );
}
break;
}
tok2 = tok2->next;
}
}
}
}
}
//---------------------------------------------------------------------------
@ -674,6 +819,10 @@ void Tokenize(const char FileName[])
//---------------------------------------------------------------------------
// Helper functions for handling the tokens list
//---------------------------------------------------------------------------
@ -762,3 +911,6 @@ const char *getstr(TOKEN *tok, int index)
//---------------------------------------------------------------------------

View File

@ -20,6 +20,10 @@ extern struct TOKEN *tokens, *tokens_back;
void Tokenize(const char FileName[]);
// Simplify tokenlist
// -----------------------------
void SimplifyTokenList();
// Helper functions for handling the tokens list..
TOKEN *findtoken(TOKEN *tok1, const char *tokenstr[]);