diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index ef5e8c208..682938209 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8510,15 +8510,15 @@ void Tokenizer::findGarbageCode() const // keyword keyword const std::set nonConsecutiveKeywords{"break", - "continue", - "for", - "goto", - "if", - "return", - "switch", - "throw", - "typedef", - "while"}; + "continue", + "for", + "goto", + "if", + "return", + "switch", + "throw", + "typedef", + "while"}; for (const Token *tok = tokens(); tok; tok = tok->next()) { if (Token::Match(tok, "%name% %name%") && nonConsecutiveKeywords.count(tok->str()) == 1 && nonConsecutiveKeywords.count(tok->next()->str()) == 1) syntaxError(tok);