diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 96e225f82..822ee1c60 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -3635,7 +3635,7 @@ void Tokenizer::elseif() if (indent == 0 && Token::Match(tok2, "}|;")) { - if (tok2->next()->str() != "else") + if (tok2->next() && tok2->next()->str() != "else") { tok->insertToken("{"); tok2->insertToken("}"); diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index c36caf0d1..aab0abaf9 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -530,6 +530,9 @@ private: { const char code[] = "else if(ab) { cd } else { ef }gh"; ASSERT_EQUALS("\n\n##file 0\n1: else { if ( ab ) { cd } else { ef } } gh\n", elseif(code)); + + // syntax error: assert there is no segmentation fault + ASSERT_EQUALS("\n\n##file 0\n1: else if ( x ) { }\n", elseif("else if (x) { }")); }