From 440203bd5c1ebc914d5c7d2f81451ac6187c8a19 Mon Sep 17 00:00:00 2001 From: Reijo Tomperi Date: Mon, 8 Dec 2008 22:02:37 +0000 Subject: [PATCH] Refactoring: Rest of the public variables in TOKEN moved to private area. --- CheckFunctionUsage.cpp | 2 +- CheckHeaders.cpp | 12 ++++++------ CheckMemoryLeak.cpp | 4 ++-- CheckOther.cpp | 4 ++-- token.cpp | 23 +++++++++++++++++++++-- token.h | 10 ++++++++-- tokenize.cpp | 14 +++++++------- 7 files changed, 47 insertions(+), 22 deletions(-) diff --git a/CheckFunctionUsage.cpp b/CheckFunctionUsage.cpp index 57cae577a..549bee59b 100644 --- a/CheckFunctionUsage.cpp +++ b/CheckFunctionUsage.cpp @@ -46,7 +46,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer ) // Function declarations.. for ( const TOKEN *tok = tokenizer.tokens(); tok; tok = tok->next() ) { - if ( tok->FileIndex != 0 ) + if ( tok->fileIndex() != 0 ) continue; const TOKEN *funcname = 0; diff --git a/CheckHeaders.cpp b/CheckHeaders.cpp index ceb5cb16f..35564c734 100644 --- a/CheckHeaders.cpp +++ b/CheckHeaders.cpp @@ -51,7 +51,7 @@ void CheckHeaders::WarningHeaderWithImplementation() for ( const TOKEN *tok = _tokenizer->tokens(); tok; tok = tok->next()) { // Only interested in included file - if (tok->FileIndex == 0) + if (tok->fileIndex() == 0) continue; if (TOKEN::Match(tok, ") {")) @@ -61,8 +61,8 @@ void CheckHeaders::WarningHeaderWithImplementation() _errorLogger->reportErr(ostr.str()); // Goto next file.. - unsigned int fileindex = tok->FileIndex; - while ( tok->next() && tok->FileIndex == fileindex ) + unsigned int fileindex = tok->fileIndex(); + while ( tok->next() && tok->fileIndex() == fileindex ) tok = tok->next(); } } @@ -114,7 +114,7 @@ void CheckHeaders::WarningIncludeHeader() int indentlevel = 0; for ( const TOKEN *tok1 = _tokenizer->tokens(); tok1; tok1 = tok1->next() ) { - if ( tok1->FileIndex != hfile ) + if ( tok1->fileIndex() != hfile ) continue; // I'm only interested in stuff that is declared at indentlevel 0 @@ -208,7 +208,7 @@ void CheckHeaders::WarningIncludeHeader() bool NeedDeclaration = false; for ( const TOKEN *tok1 = _tokenizer->tokens(); tok1; tok1 = tok1->next()) { - if (tok1->FileIndex != includetok->FileIndex) + if (tok1->fileIndex() != includetok->fileIndex()) continue; if ( TOKEN::Match(tok1, ": %var% {") || TOKEN::Match(tok1, ": %type% %var% {") ) @@ -236,7 +236,7 @@ void CheckHeaders::WarningIncludeHeader() // Not a header file? - if (includetok->FileIndex == 0) + if (includetok->fileIndex() == 0) Needed |= NeedDeclaration; // Not needed! diff --git a/CheckMemoryLeak.cpp b/CheckMemoryLeak.cpp index 35e4eab18..0104ba8d0 100644 --- a/CheckMemoryLeak.cpp +++ b/CheckMemoryLeak.cpp @@ -316,8 +316,8 @@ TOKEN *CheckMemoryLeakClass::getcode(const TOKEN *tok, std::list { \ TOKEN *newtok = new TOKEN; \ newtok->setstr(_str); \ - newtok->linenr = tok->linenr; \ - newtok->FileIndex = tok->FileIndex; \ + newtok->linenr( tok->linenr() ); \ + newtok->fileIndex( tok->fileIndex() ); \ newtok->next( 0 ); \ if (rettail) \ rettail->next( newtok ); \ diff --git a/CheckOther.cpp b/CheckOther.cpp index 36b15d6bb..f80fc4259 100644 --- a/CheckOther.cpp +++ b/CheckOther.cpp @@ -644,7 +644,7 @@ void CheckOther::CheckStructMemberUsage() for ( const TOKEN *tok = _tokenizer->tokens(); tok; tok = tok->next() ) { - if ( tok->FileIndex != 0 ) + if ( tok->fileIndex() != 0 ) continue; if ( tok->str() == "}" ) structname = 0; @@ -671,7 +671,7 @@ void CheckOther::CheckStructMemberUsage() bool used = false; for ( const TOKEN *tok2 = _tokenizer->tokens(); tok2; tok2 = tok2->next() ) { - if ( tok->FileIndex != 0 ) + if ( tok->fileIndex() != 0 ) continue; if (TOKEN::Match(tok2, ". %var%", varnames)) diff --git a/token.cpp b/token.cpp index 7bf09fb9b..beb4ad61b 100644 --- a/token.cpp +++ b/token.cpp @@ -27,10 +27,10 @@ TOKEN::TOKEN() { - FileIndex = 0; + _fileIndex = 0; _cstr = 0; _str = ""; - linenr = 0; + _linenr = 0; _next = 0; _varId = 0; _isName = false; @@ -329,3 +329,22 @@ void TOKEN::next( TOKEN *next ) _next = next; } +unsigned int TOKEN::fileIndex() const +{ + return _fileIndex; +} + +void TOKEN::fileIndex( unsigned int fileIndex ) +{ + _fileIndex = fileIndex; +} + +unsigned int TOKEN::linenr() const +{ + return _linenr; +} + +void TOKEN::linenr( unsigned int linenr ) +{ + _linenr = linenr; +} diff --git a/token.h b/token.h index 64a4ffb88..8b2ad6e8f 100644 --- a/token.h +++ b/token.h @@ -111,8 +111,12 @@ public: */ static int multiCompare( const char *needle, const char *haystack ); - unsigned int FileIndex; - unsigned int linenr; + + unsigned int linenr() const; + void linenr( unsigned int linenr ); + + unsigned int fileIndex() const; + void fileIndex( unsigned int fileIndex ); TOKEN *next() const; void next( TOKEN *next ); @@ -127,6 +131,8 @@ private: bool _isNumber; unsigned int _varId; TOKEN *_next; + unsigned int _fileIndex; + unsigned int _linenr; }; #endif // TOKEN_H diff --git a/tokenize.cpp b/tokenize.cpp index c868d512f..f1d320fee 100644 --- a/tokenize.cpp +++ b/tokenize.cpp @@ -161,8 +161,8 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi TOKEN *newtoken = new TOKEN; newtoken->setstr(str2.str().c_str()); - newtoken->linenr = lineno; - newtoken->FileIndex = fileno; + newtoken->linenr( lineno ); + newtoken->fileIndex( fileno ); if (_tokensBack) { _tokensBack->next( newtoken ); @@ -213,8 +213,8 @@ void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n) while (n > 0) { TOKEN *NewToken = new TOKEN; - NewToken->FileIndex = src->FileIndex; - NewToken->linenr = src->linenr; + NewToken->fileIndex( src->fileIndex() ); + NewToken->linenr( src->linenr() ); NewToken->setstr(src->aaaa()); NewToken->next( dest->next() ); @@ -569,8 +569,8 @@ void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex) TOKEN *newtok = new TOKEN; newtok->setstr(type2); - newtok->FileIndex = tok2->FileIndex; - newtok->linenr = tok2->linenr; + newtok->fileIndex( tok2->fileIndex() ); + newtok->linenr( tok2->linenr() ); newtok->next( tok2->next() ); tok2->next( newtok ); tok2 = newtok; @@ -1267,7 +1267,7 @@ const char *Tokenizer::getParameterName( const TOKEN *ftok, int par ) std::string Tokenizer::fileLine( const TOKEN *tok ) const { std::ostringstream ostr; - ostr << "[" << _files.at(tok->FileIndex) << ":" << tok->linenr << "]"; + ostr << "[" << _files.at(tok->fileIndex()) << ":" << tok->linenr() << "]"; return ostr.str(); }