diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 630b1d740..5e5f94ee2 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -1054,7 +1054,7 @@ void Tokenizer::simplifyTokenList() } // Remove unwanted keywords - static const char* unwantedWords[] = { "unsigned", "unlikely", "likely" }; + static const char * const unwantedWords[] = { "unsigned", "unlikely", "likely" }; for (Token *tok = _tokens; tok; tok = tok->next()) { for (unsigned ui = 0; ui < sizeof(unwantedWords) / sizeof(unwantedWords[0]) && tok->next(); ui++)