Refactoring: Rest of the public variables in TOKEN moved to private area.

This commit is contained in:
Reijo Tomperi 2008-12-08 22:02:37 +00:00
parent 51024235a3
commit 440203bd5c
7 changed files with 47 additions and 22 deletions

View File

@ -46,7 +46,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer )
// Function declarations..
for ( const TOKEN *tok = tokenizer.tokens(); tok; tok = tok->next() )
{
if ( tok->FileIndex != 0 )
if ( tok->fileIndex() != 0 )
continue;
const TOKEN *funcname = 0;

View File

@ -51,7 +51,7 @@ void CheckHeaders::WarningHeaderWithImplementation()
for ( const TOKEN *tok = _tokenizer->tokens(); tok; tok = tok->next())
{
// Only interested in included file
if (tok->FileIndex == 0)
if (tok->fileIndex() == 0)
continue;
if (TOKEN::Match(tok, ") {"))
@ -61,8 +61,8 @@ void CheckHeaders::WarningHeaderWithImplementation()
_errorLogger->reportErr(ostr.str());
// Goto next file..
unsigned int fileindex = tok->FileIndex;
while ( tok->next() && tok->FileIndex == fileindex )
unsigned int fileindex = tok->fileIndex();
while ( tok->next() && tok->fileIndex() == fileindex )
tok = tok->next();
}
}
@ -114,7 +114,7 @@ void CheckHeaders::WarningIncludeHeader()
int indentlevel = 0;
for ( const TOKEN *tok1 = _tokenizer->tokens(); tok1; tok1 = tok1->next() )
{
if ( tok1->FileIndex != hfile )
if ( tok1->fileIndex() != hfile )
continue;
// I'm only interested in stuff that is declared at indentlevel 0
@ -208,7 +208,7 @@ void CheckHeaders::WarningIncludeHeader()
bool NeedDeclaration = false;
for ( const TOKEN *tok1 = _tokenizer->tokens(); tok1; tok1 = tok1->next())
{
if (tok1->FileIndex != includetok->FileIndex)
if (tok1->fileIndex() != includetok->fileIndex())
continue;
if ( TOKEN::Match(tok1, ": %var% {") || TOKEN::Match(tok1, ": %type% %var% {") )
@ -236,7 +236,7 @@ void CheckHeaders::WarningIncludeHeader()
// Not a header file?
if (includetok->FileIndex == 0)
if (includetok->fileIndex() == 0)
Needed |= NeedDeclaration;
// Not needed!

View File

@ -316,8 +316,8 @@ TOKEN *CheckMemoryLeakClass::getcode(const TOKEN *tok, std::list<const TOKEN *>
{ \
TOKEN *newtok = new TOKEN; \
newtok->setstr(_str); \
newtok->linenr = tok->linenr; \
newtok->FileIndex = tok->FileIndex; \
newtok->linenr( tok->linenr() ); \
newtok->fileIndex( tok->fileIndex() ); \
newtok->next( 0 ); \
if (rettail) \
rettail->next( newtok ); \

View File

@ -644,7 +644,7 @@ void CheckOther::CheckStructMemberUsage()
for ( const TOKEN *tok = _tokenizer->tokens(); tok; tok = tok->next() )
{
if ( tok->FileIndex != 0 )
if ( tok->fileIndex() != 0 )
continue;
if ( tok->str() == "}" )
structname = 0;
@ -671,7 +671,7 @@ void CheckOther::CheckStructMemberUsage()
bool used = false;
for ( const TOKEN *tok2 = _tokenizer->tokens(); tok2; tok2 = tok2->next() )
{
if ( tok->FileIndex != 0 )
if ( tok->fileIndex() != 0 )
continue;
if (TOKEN::Match(tok2, ". %var%", varnames))

View File

@ -27,10 +27,10 @@
TOKEN::TOKEN()
{
FileIndex = 0;
_fileIndex = 0;
_cstr = 0;
_str = "";
linenr = 0;
_linenr = 0;
_next = 0;
_varId = 0;
_isName = false;
@ -329,3 +329,22 @@ void TOKEN::next( TOKEN *next )
_next = next;
}
unsigned int TOKEN::fileIndex() const
{
return _fileIndex;
}
void TOKEN::fileIndex( unsigned int fileIndex )
{
_fileIndex = fileIndex;
}
unsigned int TOKEN::linenr() const
{
return _linenr;
}
void TOKEN::linenr( unsigned int linenr )
{
_linenr = linenr;
}

10
token.h
View File

@ -111,8 +111,12 @@ public:
*/
static int multiCompare( const char *needle, const char *haystack );
unsigned int FileIndex;
unsigned int linenr;
unsigned int linenr() const;
void linenr( unsigned int linenr );
unsigned int fileIndex() const;
void fileIndex( unsigned int fileIndex );
TOKEN *next() const;
void next( TOKEN *next );
@ -127,6 +131,8 @@ private:
bool _isNumber;
unsigned int _varId;
TOKEN *_next;
unsigned int _fileIndex;
unsigned int _linenr;
};
#endif // TOKEN_H

View File

@ -161,8 +161,8 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
TOKEN *newtoken = new TOKEN;
newtoken->setstr(str2.str().c_str());
newtoken->linenr = lineno;
newtoken->FileIndex = fileno;
newtoken->linenr( lineno );
newtoken->fileIndex( fileno );
if (_tokensBack)
{
_tokensBack->next( newtoken );
@ -213,8 +213,8 @@ void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n)
while (n > 0)
{
TOKEN *NewToken = new TOKEN;
NewToken->FileIndex = src->FileIndex;
NewToken->linenr = src->linenr;
NewToken->fileIndex( src->fileIndex() );
NewToken->linenr( src->linenr() );
NewToken->setstr(src->aaaa());
NewToken->next( dest->next() );
@ -569,8 +569,8 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
TOKEN *newtok = new TOKEN;
newtok->setstr(type2);
newtok->FileIndex = tok2->FileIndex;
newtok->linenr = tok2->linenr;
newtok->fileIndex( tok2->fileIndex() );
newtok->linenr( tok2->linenr() );
newtok->next( tok2->next() );
tok2->next( newtok );
tok2 = newtok;
@ -1267,7 +1267,7 @@ const char *Tokenizer::getParameterName( const TOKEN *ftok, int par )
std::string Tokenizer::fileLine( const TOKEN *tok ) const
{
std::ostringstream ostr;
ostr << "[" << _files.at(tok->FileIndex) << ":" << tok->linenr << "]";
ostr << "[" << _files.at(tok->fileIndex()) << ":" << tok->linenr() << "]";
return ostr.str();
}