Tokenizer: Fixed memory corruption

This commit is contained in:
Daniel Marjamäki 2011-01-05 19:38:22 +01:00
parent 157498e694
commit a0d62e041f
1 changed files with 2 additions and 1 deletions

View File

@ -4898,7 +4898,8 @@ bool Tokenizer::simplifyConditions()
else if (Token::simpleMatch(tok, "|| true )") ||
Token::simpleMatch(tok, "&& false )"))
{
Token::eraseTokens(tok->tokAt(2)->link(), tok->next());
tok = tok->next();
Token::eraseTokens(tok->next()->link(), tok);
ret = true;
}