tokenizer: Added DeallocateTokens to the destructor so it's not necessary to cleanup manually

This commit is contained in:
Daniel Marjamäki 2008-11-22 09:44:02 +00:00
parent 9ce8918895
commit 2db69e6072
10 changed files with 220 additions and 242 deletions

View File

@ -57,8 +57,6 @@ private:
// Check for memory leaks.. // Check for memory leaks..
CheckBufferOverrunClass checkBufferOverrun( &tokenizer, this ); CheckBufferOverrunClass checkBufferOverrun( &tokenizer, this );
checkBufferOverrun.CheckBufferOverrun(); checkBufferOverrun.CheckBufferOverrun();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -60,8 +60,6 @@ private:
// Check for memory leaks.. // Check for memory leaks..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckCharVariable(); checkOther.CheckCharVariable();
tokenizer.DeallocateTokens();
} }
void array_index() void array_index()

View File

@ -51,8 +51,6 @@ private:
settings._checkCodingStyle = true; settings._checkCodingStyle = true;
CheckClass checkClass( &tokenizer, settings, this ); CheckClass checkClass( &tokenizer, settings, this );
checkClass.CheckConstructors(); checkClass.CheckConstructors();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -51,8 +51,6 @@ private:
// Check for memory leaks.. // Check for memory leaks..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckUnsignedDivision(); checkOther.CheckUnsignedDivision();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -50,8 +50,6 @@ private:
// Check for unused variables.. // Check for unused variables..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckIncompleteStatement(); checkOther.CheckIncompleteStatement();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -54,8 +54,6 @@ private:
tokenizer.FillFunctionList(0); tokenizer.FillFunctionList(0);
CheckMemoryLeakClass checkMemoryLeak( &tokenizer, settings, this ); CheckMemoryLeakClass checkMemoryLeak( &tokenizer, settings, this );
checkMemoryLeak.CheckMemoryLeak(); checkMemoryLeak.CheckMemoryLeak();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -82,8 +82,6 @@ private:
// Compare.. // Compare..
ASSERT_EQUALS( true, cmptok(expected, tokenizer.tokens()) ); ASSERT_EQUALS( true, cmptok(expected, tokenizer.tokens()) );
tokenizer.DeallocateTokens();
} }
@ -99,8 +97,6 @@ private:
// Expected result.. // Expected result..
ASSERT_EQUALS( std::string(10000,'a'), std::string(tokenizer.tokens()->str) ); ASSERT_EQUALS( std::string(10000,'a'), std::string(tokenizer.tokens()->str) );
tokenizer.DeallocateTokens();
} }
@ -135,8 +131,6 @@ private:
// Compare.. // Compare..
ASSERT_EQUALS( true, cmptok(expected, tokenizer.tokens()) ); ASSERT_EQUALS( true, cmptok(expected, tokenizer.tokens()) );
tokenizer.DeallocateTokens();
} }

View File

@ -57,8 +57,6 @@ private:
settings._checkCodingStyle = true; settings._checkCodingStyle = true;
CheckClass checkClass( &tokenizer, settings, this ); CheckClass checkClass( &tokenizer, settings, this );
checkClass.CheckUnusedPrivateFunctions(); checkClass.CheckUnusedPrivateFunctions();
tokenizer.DeallocateTokens();
} }

View File

@ -49,8 +49,6 @@ private:
// Check for unused variables.. // Check for unused variables..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckStructMemberUsage(); checkOther.CheckStructMemberUsage();
tokenizer.DeallocateTokens();
} }
void run() void run()

View File

@ -28,8 +28,8 @@
#include <string> #include <string>
#include <cstring> #include <cstring>
#include <iostream> #include <iostream>
#include <sstream> #include <sstream>
#include <list> #include <list>
#include <algorithm> #include <algorithm>
#include <stdlib.h> // <- strtoul #include <stdlib.h> // <- strtoul
@ -52,13 +52,13 @@ Tokenizer::Tokenizer(ErrorLogger *errorLogger)
{ {
_tokens = 0; _tokens = 0;
tokens_back = 0; tokens_back = 0;
dsymlist = 0; dsymlist = 0;
_errorLogger = errorLogger; _errorLogger = errorLogger;
} }
Tokenizer::~Tokenizer() Tokenizer::~Tokenizer()
{ {
DeallocateTokens();
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
@ -1315,219 +1315,219 @@ void Tokenizer::settings( const Settings &settings )
{ {
_settings = settings; _settings = settings;
} }
// Deallocate lists.. // Deallocate lists..
void Tokenizer::DeallocateTokens() void Tokenizer::DeallocateTokens()
{ {
deleteTokens( _tokens ); deleteTokens( _tokens );
_tokens = 0; _tokens = 0;
tokens_back = 0; tokens_back = 0;
while (dsymlist) while (dsymlist)
{ {
struct DefineSymbol *next = dsymlist->next; struct DefineSymbol *next = dsymlist->next;
free(dsymlist->name); free(dsymlist->name);
free(dsymlist->value); free(dsymlist->value);
delete dsymlist; delete dsymlist;
dsymlist = next; dsymlist = next;
} }
Files.clear(); Files.clear();
} }
void Tokenizer::deleteTokens(TOKEN *tok) void Tokenizer::deleteTokens(TOKEN *tok)
{ {
while (tok) while (tok)
{ {
TOKEN *next = tok->next; TOKEN *next = tok->next;
delete tok; delete tok;
tok = next; tok = next;
} }
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
const char *Tokenizer::getParameterName( const TOKEN *ftok, int par ) const char *Tokenizer::getParameterName( const TOKEN *ftok, int par )
{ {
int _par = 1; int _par = 1;
for ( ; ftok; ftok = ftok->next) for ( ; ftok; ftok = ftok->next)
{ {
if ( Tokenizer::Match(ftok, ",") ) if ( Tokenizer::Match(ftok, ",") )
++_par; ++_par;
if ( par==_par && Tokenizer::Match(ftok, "%var% [,)]") ) if ( par==_par && Tokenizer::Match(ftok, "%var% [,)]") )
return ftok->str; return ftok->str;
} }
return NULL; return NULL;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
const TOKEN *Tokenizer::findmatch(const TOKEN *tok, const char pattern[], const char *varname1[], const char *varname2[]) const TOKEN *Tokenizer::findmatch(const TOKEN *tok, const char pattern[], const char *varname1[], const char *varname2[])
{ {
for ( ; tok; tok = tok->next) for ( ; tok; tok = tok->next)
{ {
if ( Tokenizer::Match(tok, pattern, varname1, varname2) ) if ( Tokenizer::Match(tok, pattern, varname1, varname2) )
return tok; return tok;
} }
return 0; return 0;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
std::string Tokenizer::fileLine( const TOKEN *tok ) std::string Tokenizer::fileLine( const TOKEN *tok )
{ {
std::ostringstream ostr; std::ostringstream ostr;
ostr << "[" << Files.at(tok->FileIndex) << ":" << tok->linenr << "]"; ostr << "[" << Files.at(tok->FileIndex) << ":" << tok->linenr << "]";
return ostr.str(); return ostr.str();
} }
bool Tokenizer::Match(const TOKEN *tok, const char pattern[], const char *varname1[], const char *varname2[]) bool Tokenizer::Match(const TOKEN *tok, const char pattern[], const char *varname1[], const char *varname2[])
{ {
if (!tok) if (!tok)
return false; return false;
const char *p = pattern; const char *p = pattern;
while (*p) while (*p)
{ {
// Skip spaces in pattern.. // Skip spaces in pattern..
while ( *p == ' ' ) while ( *p == ' ' )
p++; p++;
// Extract token from pattern.. // Extract token from pattern..
char str[50]; char str[50];
char *s = str; char *s = str;
while (*p && *p!=' ') while (*p && *p!=' ')
{ {
*s = *p; *s = *p;
s++; s++;
p++; p++;
} }
*s = 0; *s = 0;
// No token => Success! // No token => Success!
if (str[0] == 0) if (str[0] == 0)
return true; return true;
// Any symbolname.. // Any symbolname..
if (strcmp(str,"%var%")==0 || strcmp(str,"%type%")==0) if (strcmp(str,"%var%")==0 || strcmp(str,"%type%")==0)
{ {
if (!Tokenizer::IsName(tok->str)) if (!Tokenizer::IsName(tok->str))
return false; return false;
} }
// Variable name.. // Variable name..
else if (strcmp(str,"%var1%")==0 || strcmp(str,"%var2%")==0) else if (strcmp(str,"%var1%")==0 || strcmp(str,"%var2%")==0)
{ {
const char **varname = (strcmp(str,"%var1%")==0) ? varname1 : varname2; const char **varname = (strcmp(str,"%var1%")==0) ? varname1 : varname2;
if ( ! varname ) if ( ! varname )
return false; return false;
if (strcmp(tok->str, varname[0]) != 0) if (strcmp(tok->str, varname[0]) != 0)
return false; return false;
for ( int i = 1; varname[i]; i++ ) for ( int i = 1; varname[i]; i++ )
{ {
if ( ! Tokenizer::gettok(tok, 2) ) if ( ! Tokenizer::gettok(tok, 2) )
return false; return false;
if ( strcmp(Tokenizer::getstr(tok, 1), ".") ) if ( strcmp(Tokenizer::getstr(tok, 1), ".") )
return false; return false;
if ( strcmp(Tokenizer::getstr(tok, 2), varname[i]) ) if ( strcmp(Tokenizer::getstr(tok, 2), varname[i]) )
return false; return false;
tok = Tokenizer::gettok(tok, 2); tok = Tokenizer::gettok(tok, 2);
} }
} }
else if (strcmp(str,"%num%")==0) else if (strcmp(str,"%num%")==0)
{ {
if ( ! Tokenizer::IsNumber(tok->str) ) if ( ! Tokenizer::IsNumber(tok->str) )
return false; return false;
} }
else if (strcmp(str,"%str%")==0) else if (strcmp(str,"%str%")==0)
{ {
if ( tok->str[0] != '\"' ) if ( tok->str[0] != '\"' )
return false; return false;
} }
// [.. => search for a one-character token.. // [.. => search for a one-character token..
else if (str[0]=='[' && strchr(str, ']') && tok->str[1] == 0) else if (str[0]=='[' && strchr(str, ']') && tok->str[1] == 0)
{ {
*strrchr(str, ']') = 0; *strrchr(str, ']') = 0;
if ( strchr( str + 1, tok->str[0] ) == 0 ) if ( strchr( str + 1, tok->str[0] ) == 0 )
return false; return false;
} }
else if (strcmp(str, tok->str) != 0) else if (strcmp(str, tok->str) != 0)
return false; return false;
tok = tok->next; tok = tok->next;
if (!tok && *p) if (!tok && *p)
return false; return false;
} }
// The end of the pattern has been reached and nothing wrong has been found // The end of the pattern has been reached and nothing wrong has been found
return true; return true;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool Tokenizer::SameFileName( const char fname1[], const char fname2[] ) bool Tokenizer::SameFileName( const char fname1[], const char fname2[] )
{ {
#ifdef __linux__ #ifdef __linux__
return bool( strcmp(fname1, fname2) == 0 ); return bool( strcmp(fname1, fname2) == 0 );
#endif #endif
#ifdef __GNUC__ #ifdef __GNUC__
return bool( strcasecmp(fname1, fname2) == 0 ); return bool( strcasecmp(fname1, fname2) == 0 );
#endif #endif
#ifdef __BORLANDC__ #ifdef __BORLANDC__
return bool( stricmp(fname1, fname2) == 0 ); return bool( stricmp(fname1, fname2) == 0 );
#endif #endif
#ifdef _MSC_VER #ifdef _MSC_VER
return bool( _stricmp(fname1, fname2) == 0 ); return bool( _stricmp(fname1, fname2) == 0 );
#endif #endif
} }
bool Tokenizer::IsName(const char str[]) bool Tokenizer::IsName(const char str[])
{ {
return bool(str[0]=='_' || isalpha(str[0])); return bool(str[0]=='_' || isalpha(str[0]));
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool Tokenizer::IsNumber(const char str[]) bool Tokenizer::IsNumber(const char str[])
{ {
return bool(isdigit(str[0]) != 0); return bool(isdigit(str[0]) != 0);
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool Tokenizer::IsStandardType(const char str[]) bool Tokenizer::IsStandardType(const char str[])
{ {
if (!str) if (!str)
return false; return false;
bool Ret = false; bool Ret = false;
const char *type[] = {"bool","char","short","int","long","float","double",0}; const char *type[] = {"bool","char","short","int","long","float","double",0};
for (int i = 0; type[i]; i++) for (int i = 0; type[i]; i++)
Ret |= (strcmp(str,type[i])==0); Ret |= (strcmp(str,type[i])==0);
return Ret; return Ret;
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool Tokenizer::alwaysTrue( const TOKEN *tok ) bool Tokenizer::alwaysTrue( const TOKEN *tok )
{ {
return (Match(tok,"( 1 [|)]") | Match(tok,"( 1 ||") | return (Match(tok,"( 1 [|)]") | Match(tok,"( 1 ||") |
Match(tok,"( true [|)]") | Match(tok,"( true ||")); Match(tok,"( true [|)]") | Match(tok,"( true ||"));
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
bool Tokenizer::alwaysFalse( const TOKEN *tok ) bool Tokenizer::alwaysFalse( const TOKEN *tok )
{ {
return (Match(tok,"( 0 [&)]") | Match(tok,"( 0 &&") | return (Match(tok,"( 0 [&)]") | Match(tok,"( 0 &&") |
Match(tok,"( false [&)]") | Match(tok,"( false &&")); Match(tok,"( false [&)]") | Match(tok,"( false &&"));
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------