Improved Tokens and Tokenizer:

- Better handling of deleteThis() as last element of token list
- Code simplified
This commit is contained in:
PKEuS 2012-02-13 17:44:08 +01:00
parent da8abeb63c
commit bb319c1c96
3 changed files with 59 additions and 63 deletions

View File

@ -118,7 +118,7 @@ std::string Token::strValue() const
{
assert(_str.length() >= 2);
assert(_str[0] == '"');
assert(_str[_str.length()-1] == '"');
assert(_str.back() == '"');
return _str.substr(1, _str.length() - 2);
}
@ -138,7 +138,7 @@ void Token::deleteNext(unsigned long index)
void Token::deleteThis()
{
if (_next) {
if (_next) { // Copy next to this and delete next
_str = _next->_str;
_isName = _next->_isName;
_isNumber = _next->_isNumber;
@ -158,14 +158,34 @@ void Token::deleteThis()
_link->link(this);
deleteNext();
} else if (_previous) {
// This should never be used for tokens
// at the end of the list
str(";");
} else if (_previous && _previous->_previous) { // Copy previous to this and delete previous
_str = _previous->_str;
_isName = _previous->_isName;
_isNumber = _previous->_isNumber;
_isBoolean = _previous->_isBoolean;
_isUnsigned = _previous->_isUnsigned;
_isSigned = _previous->_isSigned;
_isPointerCompare = _previous->_isPointerCompare;
_isLong = _previous->_isLong;
_isUnused = _previous->_isUnused;
_isStandardType = _previous->_isStandardType;
_isExpandedMacro = _previous->_isExpandedMacro;
_varId = _previous->_varId;
_fileIndex = _previous->_fileIndex;
_linenr = _previous->_linenr;
_link = _previous->_link;
if (_link)
_link->link(this);
Token* toDelete = _previous;
_previous = _previous->_previous;
_previous->_next = this;
delete toDelete;
} else {
// We are the last token in the list, we can't delete
// ourselves, so just make us ;
str(";");
// ourselves, so just make us empty
str("");
}
}
@ -839,25 +859,31 @@ const Token *Token::findmatch(const Token *tok, const char pattern[], const Toke
void Token::insertToken(const std::string &tokenStr)
{
Token *newToken = new Token(tokensBack);
Token *newToken;
if (_str == "")
newToken = this;
else
newToken = new Token(tokensBack);
newToken->str(tokenStr);
newToken->_linenr = _linenr;
newToken->_fileIndex = _fileIndex;
newToken->_progressValue = _progressValue;
if (this->next()) {
newToken->next(this->next());
newToken->next()->previous(newToken);
} else if (tokensBack) {
*tokensBack = newToken;
}
this->next(newToken);
newToken->previous(this);
if (newToken != this) {
if (this->next()) {
newToken->next(this->next());
newToken->next()->previous(newToken);
} else if (tokensBack) {
*tokensBack = newToken;
}
this->next(newToken);
newToken->previous(this);
}
}
void Token::eraseTokens(Token *begin, const Token *end)
{
if (!begin)
if (!begin || begin == end)
return;
while (begin->next() && begin->next() != end) {
@ -901,6 +927,8 @@ std::string Token::stringify(const Token* end) const
ret << str();
for (const Token *tok = this->next(); tok && tok != end; tok = tok->next()) {
if (tok->str() == "")
continue;
if (tok->isUnsigned())
ret << " unsigned";
else if (tok->isSigned())

View File

@ -381,32 +381,6 @@ void Tokenizer::createTokens(std::istream &code)
} else if (CurrentToken.empty() && ch == '.' && std::isdigit(code.peek())) {
// tokenize .125 into 0.125
CurrentToken = "0";
} else if (ch=='&' && code.peek() == '&') {
if (!CurrentToken.empty()) {
addtoken(CurrentToken.c_str(), lineno, FileIndex, true);
if (!CurrentToken.empty())
_tokensBack->setExpandedMacro(expandedMacro);
CurrentToken.clear();
}
// &&
ch = (char)code.get();
addtoken("&&", lineno, FileIndex, true);
_tokensBack->setExpandedMacro(expandedMacro);
continue;
} else if (ch==':' && CurrentToken.empty() && code.peek() == ' ') {
// :
addtoken(":", lineno, FileIndex, true);
_tokensBack->setExpandedMacro(expandedMacro);
CurrentToken.clear();
continue;
} else if (ch==':' && CurrentToken.empty() && code.peek() == ':') {
// ::
ch = (char)code.get();
addtoken("::", lineno, FileIndex, true);
_tokensBack->setExpandedMacro(expandedMacro);
CurrentToken.clear();
continue;
} else if (strchr("+-*/%&|^?!=<>[](){};:,.~\n ", ch)) {
if (CurrentToken == "#file") {
// Handle this where strings are handled
@ -440,8 +414,8 @@ void Tokenizer::createTokens(std::istream &code)
}
CurrentToken += ch;
// Add "++", "--" or ">>" token
if ((ch == '+' || ch == '-' || ch == '>') && (code.peek() == ch))
// Add "++", "--", ">>" or ... token
if (strchr("+-<>=:&|", ch) && (code.peek() == ch))
CurrentToken += (char)code.get();
addtoken(CurrentToken.c_str(), lineno, FileIndex);
_tokensBack->setExpandedMacro(expandedMacro);
@ -2027,19 +2001,8 @@ bool Tokenizer::tokenize(std::istream &code,
if (tok->str().length() == 1 && tok->next()->str().length() == 1) {
const char c2 = tok->next()->str()[0];
// combine equal tokens..
if (c1 == c2 && (c1 == '<' || c1 == '|' || c1 == ':')) {
tok->str(tok->str() + c2);
tok->deleteNext();
if (c1 == '<' && Token::simpleMatch(tok->next(), "=")) {
tok->str("<<=");
tok->deleteNext();
}
continue;
}
// combine +-*/ and =
else if (c2 == '=' && (strchr("+-*/%&|^=!<>", c1))) {
if (c2 == '=' && (strchr("+-*/%&|^=!<>", c1))) {
tok->str(tok->str() + c2);
tok->deleteNext();
continue;
@ -2058,6 +2021,11 @@ bool Tokenizer::tokenize(std::istream &code,
tok->deleteNext();
}
else if (tok->str() == "<<" && tok->next()->str() == "=") {
tok->str("<<=");
tok->deleteNext();
}
else if ((c1 == 'p' || c1 == '_') && tok->next()->str() == ":" && tok->strAt(2) != ":") {
if (tok->str() == "private" || tok->str() == "protected" || tok->str() == "public" || tok->str() == "__published") {
tok->str(tok->str() + ":");

View File

@ -1763,7 +1763,7 @@ private:
"\n"
"template<typename T> inline B<T> h() { return B<T>(); }\n";
ASSERT_EQUALS(";", sizeof_(code));
ASSERT_EQUALS("", sizeof_(code));
ASSERT_EQUALS("class A { } ;", sizeof_("class A{ template<typename T> int foo(T d);};"));
}
@ -2279,7 +2279,7 @@ private:
"{ }";
// The expected result..
const std::string expected(";");
const std::string expected("");
ASSERT_EQUALS(expected, sizeof_(code));
}
@ -4487,7 +4487,7 @@ private:
void simplifyTypedef39() {
const char code[] = "typedef int A;\n"
"template <const A, volatile A>::value;";
const char expected[] = ";";
const char expected[] = "";
ASSERT_EQUALS(expected, tok(code, false));
checkSimplifyTypedef(code);
@ -5253,7 +5253,7 @@ private:
void simplifyTypedef75() { // ticket #2426
const char code[] = "typedef _Packed struct S { long l; }; \n";
const std::string expected = ";";
const std::string expected = "";
ASSERT_EQUALS(expected, sizeof_(code));
ASSERT_EQUALS("", errout.str());
}
@ -6950,7 +6950,7 @@ private:
void enum20() { // ticket #2600 segmentation fault
const char code[] = "enum { const }\n";
ASSERT_EQUALS(";", tok(code, false));
ASSERT_EQUALS("", tok(code, false));
}
void enum21() { // ticket #2720 syntax error