diff --git a/src/token.cpp b/src/token.cpp index a13806d18..25dfaae72 100644 --- a/src/token.cpp +++ b/src/token.cpp @@ -90,6 +90,9 @@ void Token::deleteThis() _fileIndex = _next->_fileIndex; _linenr = _next->_linenr; _link = _next->_link; + if (_link) + _link->link(this); + deleteNext(); } else if (_previous) diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 799fe8d42..fee82d34f 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -2160,8 +2160,6 @@ bool Tokenizer::simplifyQuestionMark() bool Tokenizer::simplifyCasts() { - createLinks(); - bool ret = false; for (Token *tok = _tokens; tok; tok = tok->next()) { @@ -3237,6 +3235,8 @@ void Tokenizer::simplifyGoto() // Insert the statements.. bool ret = false; + + std::list links; for (const Token *tok2 = tok; tok2; tok2 = tok2->next()) { if (tok2->str() == "}") @@ -3245,7 +3245,24 @@ void Tokenizer::simplifyGoto() ret = true; token->insertToken(tok2->str().c_str()); token = token->next(); + if (token->str() == "(") + { + links.push_back(token); + } + else if (token->str() == ")") + { + if (links.size() == 0) + { + // This should never happen at this point + syntaxError(token, ')'); + return; + } + + Token::createMutualLinks(links.back(), token); + links.pop_back(); + } } + if (!ret) { token->insertToken("return");