diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 41c5effb7..4ed2fb182 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8037,13 +8037,8 @@ std::string Tokenizer::simplifyString(const std::string &source) sz++; std::istringstream istr(str.substr(i+1, sz-1)); istr >> std::oct >> c; - if (sz == 2) { - if (c == 0) { - str = str.substr(0,i) + "\""; - continue; - } else - str[i+1] = (char)c; - } + str = str.substr(0,i) + (char)c + str.substr(i+sz); + continue; } if (sz <= 2) diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 805a0aa48..9a0849b48 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -848,8 +848,11 @@ private: const char code2[] = "char str [ ] = \"\\x00\";"; const char expected2[] = "char str [ 2 ] = \"\\0\" ;"; - std::string actual = tok(code2); - ASSERT_EQUALS(expected2, actual); + ASSERT_EQUALS(expected2, tok(code2)); + + const char code3[] = "char str [ ] = \"\\0\";"; + const char expected3[] = "char str [ 2 ] = \"\\0\" ;"; + ASSERT_EQUALS(expected3, tok(code3)); } void dontRemoveIncrement() { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index b09580835..6986d4090 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -6005,7 +6005,7 @@ private: ASSERT_EQUALS("\" 7\"", tokenizer.simplifyString("\"\\0407\"")); // terminate a string at null character. - ASSERT_EQUALS("\"a\"", tokenizer.simplifyString("\"a\\0\"")); + ASSERT_EQUALS(std::string("\"a") + '\0' + "\"", tokenizer.simplifyString("\"a\\0\"")); } void simplifyConst() {