Refactoring: Moved token creation and token deletion into TOKEN class. Added previous() (not tested yet)
This commit is contained in:
parent
a4a55553f8
commit
2ef955eb65
|
@ -287,11 +287,8 @@ void CheckMemoryLeakClass::MemoryLeak( const TOKEN *tok, const char varname[] )
|
|||
//---------------------------------------------------------------------------
|
||||
|
||||
void CheckMemoryLeakClass::instoken(TOKEN *tok, const char str[])
|
||||
{
|
||||
TOKEN *newtok = new TOKEN;
|
||||
newtok->setstr(str);
|
||||
newtok->next( tok->next() );
|
||||
tok->next( newtok );
|
||||
{
|
||||
tok->insertToken( str );
|
||||
}
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
|
@ -313,20 +310,23 @@ TOKEN *CheckMemoryLeakClass::getcode(const TOKEN *tok, std::list<const TOKEN *>
|
|||
|
||||
TOKEN *rethead = 0, *rettail = 0;
|
||||
#define addtoken(_str) \
|
||||
{ \
|
||||
TOKEN *newtok = new TOKEN; \
|
||||
newtok->setstr(_str); \
|
||||
newtok->linenr( tok->linenr() ); \
|
||||
newtok->fileIndex( tok->fileIndex() ); \
|
||||
newtok->next( 0 ); \
|
||||
if (rettail) \
|
||||
rettail->next( newtok ); \
|
||||
else \
|
||||
rethead = newtok; \
|
||||
rettail=newtok; \
|
||||
{ \
|
||||
if (rettail) \
|
||||
{ \
|
||||
rettail->insertToken(_str); \
|
||||
rettail = rettail->next(); \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
rethead = new TOKEN; \
|
||||
rettail = rethead; \
|
||||
rettail->setstr(_str); \
|
||||
} \
|
||||
\
|
||||
rettail->linenr( tok->linenr() ); \
|
||||
rettail->fileIndex( tok->fileIndex() ); \
|
||||
}
|
||||
|
||||
|
||||
// The first token should be ";"
|
||||
addtoken(";");
|
||||
|
||||
|
@ -554,16 +554,8 @@ TOKEN *CheckMemoryLeakClass::getcode(const TOKEN *tok, std::list<const TOKEN *>
|
|||
}
|
||||
|
||||
void CheckMemoryLeakClass::erase(TOKEN *begin, const TOKEN *end)
|
||||
{
|
||||
if ( ! begin )
|
||||
return;
|
||||
|
||||
while ( begin->next() && begin->next() != end )
|
||||
{
|
||||
TOKEN *next = begin->next();
|
||||
begin->next( begin->next()->next() );
|
||||
delete next;
|
||||
}
|
||||
{
|
||||
TOKEN::eraseTokens( begin, end );
|
||||
}
|
||||
|
||||
void CheckMemoryLeakClass::simplifycode(TOKEN *tok)
|
||||
|
|
48
token.cpp
48
token.cpp
|
@ -31,10 +31,11 @@ TOKEN::TOKEN()
|
|||
_cstr = 0;
|
||||
_str = "";
|
||||
_linenr = 0;
|
||||
_next = 0;
|
||||
_next = 0;
|
||||
_previous = 0;
|
||||
_varId = 0;
|
||||
_isName = false;
|
||||
_isNumber = false;
|
||||
_isNumber = false;
|
||||
}
|
||||
|
||||
TOKEN::~TOKEN()
|
||||
|
@ -335,6 +336,49 @@ void TOKEN::next( TOKEN *next )
|
|||
{
|
||||
_next = next;
|
||||
}
|
||||
|
||||
TOKEN *TOKEN::previous() const
|
||||
{
|
||||
return _previous;
|
||||
}
|
||||
|
||||
void TOKEN::previous( TOKEN *previous )
|
||||
{
|
||||
_previous = previous;
|
||||
}
|
||||
|
||||
void TOKEN::insertToken( const char *str )
|
||||
{
|
||||
TOKEN *newToken = new TOKEN;
|
||||
newToken->setstr( str );
|
||||
if( this->next() )
|
||||
{
|
||||
newToken->next( this->next() );
|
||||
newToken->next()->previous( newToken );
|
||||
}
|
||||
|
||||
this->next( newToken );
|
||||
newToken->previous( this );
|
||||
}
|
||||
|
||||
void TOKEN::eraseTokens( TOKEN *begin, const TOKEN *end )
|
||||
{
|
||||
if ( ! begin )
|
||||
return;
|
||||
|
||||
while ( begin->next() && begin->next() != end )
|
||||
{
|
||||
begin->eraseToken();
|
||||
}
|
||||
}
|
||||
|
||||
void TOKEN::eraseToken()
|
||||
{
|
||||
TOKEN *next = this->next();
|
||||
this->next( next->next() );
|
||||
next->next()->previous( this );
|
||||
delete next;
|
||||
}
|
||||
|
||||
unsigned int TOKEN::fileIndex() const
|
||||
{
|
||||
|
|
30
token.h
30
token.h
|
@ -122,18 +122,42 @@ public:
|
|||
void fileIndex( unsigned int fileIndex );
|
||||
|
||||
TOKEN *next() const;
|
||||
void next( TOKEN *next );
|
||||
|
||||
|
||||
/**
|
||||
* Delete tokens between begin and end. E.g. if begin = 1
|
||||
* and end = 5, tokens 2,3 and 4 would be erased.
|
||||
*
|
||||
* @param begin Tokens after this will be erased.
|
||||
* @param end Tokens before this will be erased.
|
||||
*/
|
||||
static void eraseTokens( TOKEN *begin, const TOKEN *end );
|
||||
|
||||
void eraseToken();
|
||||
|
||||
/**
|
||||
* Insert new token after this token. This function will handle
|
||||
* relations between next and previous token also.
|
||||
* @param str String for the new token.
|
||||
*/
|
||||
void insertToken( const char *str );
|
||||
|
||||
TOKEN *previous() const;
|
||||
|
||||
|
||||
unsigned int varId() const;
|
||||
void varId( unsigned int id );
|
||||
|
||||
private:
|
||||
private:
|
||||
void next( TOKEN *next );
|
||||
void previous( TOKEN *previous );
|
||||
std::string _str;
|
||||
char * _cstr;
|
||||
bool _isName;
|
||||
bool _isNumber;
|
||||
unsigned int _varId;
|
||||
TOKEN *_next;
|
||||
TOKEN *_next;
|
||||
TOKEN *_previous;
|
||||
unsigned int _fileIndex;
|
||||
unsigned int _linenr;
|
||||
};
|
||||
|
|
52
tokenize.cpp
52
tokenize.cpp
|
@ -159,26 +159,27 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
|||
str2 << str;
|
||||
}
|
||||
|
||||
TOKEN *newtoken = new TOKEN;
|
||||
newtoken->setstr(str2.str().c_str());
|
||||
newtoken->linenr( lineno );
|
||||
newtoken->fileIndex( fileno );
|
||||
if (_tokensBack)
|
||||
{
|
||||
_tokensBack->next( newtoken );
|
||||
_tokensBack = newtoken;
|
||||
{
|
||||
_tokensBack->insertToken( str2.str().c_str() );
|
||||
_tokensBack = _tokensBack->next();
|
||||
}
|
||||
else
|
||||
{
|
||||
_tokens = _tokensBack = newtoken;
|
||||
}
|
||||
_tokens = new TOKEN;
|
||||
_tokensBack = _tokens;
|
||||
_tokensBack->setstr( str2.str().c_str() );
|
||||
}
|
||||
|
||||
_tokensBack->linenr( lineno );
|
||||
_tokensBack->fileIndex( fileno );
|
||||
|
||||
// Check if str is defined..
|
||||
for (DefineSymbol *sym = _dsymlist; sym; sym = sym->next)
|
||||
{
|
||||
if (strcmp(str,sym->name)==0)
|
||||
{
|
||||
newtoken->setstr(sym->value);
|
||||
_tokensBack->setstr(sym->value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -211,15 +212,10 @@ int Tokenizer::SizeOfType(const char type[]) const
|
|||
void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n)
|
||||
{
|
||||
while (n > 0)
|
||||
{
|
||||
TOKEN *NewToken = new TOKEN;
|
||||
NewToken->fileIndex( src->fileIndex() );
|
||||
NewToken->linenr( src->linenr() );
|
||||
NewToken->setstr(src->aaaa());
|
||||
|
||||
NewToken->next( dest->next() );
|
||||
dest->next( NewToken );
|
||||
|
||||
{
|
||||
dest->insertToken( src->aaaa() );
|
||||
dest->next()->fileIndex( src->fileIndex() );
|
||||
dest->next()->linenr( src->linenr() );
|
||||
dest = dest->next();
|
||||
src = src->next();
|
||||
n--;
|
||||
|
@ -566,14 +562,10 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
|
|||
if (tok2->aaaa()!=type3 && (tok2->str() == type3))
|
||||
{
|
||||
tok2->setstr(type1);
|
||||
|
||||
TOKEN *newtok = new TOKEN;
|
||||
newtok->setstr(type2);
|
||||
newtok->fileIndex( tok2->fileIndex() );
|
||||
newtok->linenr( tok2->linenr() );
|
||||
newtok->next( tok2->next() );
|
||||
tok2->next( newtok );
|
||||
tok2 = newtok;
|
||||
tok2->insertToken( type2 );
|
||||
tok2->next()->fileIndex( tok2->fileIndex() );
|
||||
tok2->next()->linenr( tok2->linenr() );
|
||||
tok2 = tok2->next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -589,10 +581,8 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
|
|||
{
|
||||
bool last = TOKEN::Match( tok->next(), "}" );
|
||||
|
||||
// Unlink and delete tok->next()
|
||||
TOKEN *next = tok->next();
|
||||
tok->next( tok->next()->next() );
|
||||
delete next;
|
||||
// Unlink and delete tok->next()
|
||||
tok->eraseToken();
|
||||
|
||||
// break if this was the last token to delete..
|
||||
if (last)
|
||||
|
|
Loading…
Reference in New Issue