From 0984a0be4766bcffab9acbf2517d3840cf4f9dcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Thu, 4 Nov 2010 19:38:19 +0100 Subject: [PATCH] Tokenizer: Fixed 'a[b-1]+=1' --- lib/tokenize.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 71e861afb..0a0014c5f 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -4424,12 +4424,15 @@ void Tokenizer::simplifyCompoundAssignment() // modify the token list.. tok->str("="); tok->insertToken(op); + Token *tokend = 0; for (const Token *tok2 = tok->previous(); tok2 && tok2 != tok1; tok2 = tok2->previous()) { tok->insertToken(tok2->str()); tok->next()->varId(tok2->varId()); - if (Token::Match(tok->next(), "[ %any% ]")) - Token::createMutualLinks(tok->next(), tok->next()->next()->next()); + if (Token::simpleMatch(tok->next(), "]")) + tokend = tok->next(); + else if (Token::simpleMatch(tok->next(), "[")) + Token::createMutualLinks(tok->next(), tokend); } } }