diff --git a/lib/token.cpp b/lib/token.cpp index 5adcfc748..57019a1f7 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -976,7 +976,16 @@ void Token::stringify(std::ostream& os, bool varid, bool attributes) const if (isLong()) os << "long "; } - os << _str; + if (_str[0] != '\"' || _str.find("\0") == std::string::npos) + os << _str; + else { + for (std::size_t i = 0U; i < _str.size(); ++i) { + if (_str[i] == '\0') + os << "\\0"; + else + os << _str[i]; + } + } if (varid && _varId != 0) os << '@' << _varId; } diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 1d7348009..41c5effb7 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8048,12 +8048,9 @@ std::string Tokenizer::simplifyString(const std::string &source) if (sz <= 2) i++; - else if (i+sz < str.size()) { - if (c == 0) - str = str.substr(0,i) + "\""; - else - str.replace(i, sz, std::string(1U, (char)c)); - } else + else if (i+sz < str.size()) + str.replace(i, sz, std::string(1U, (char)c)); + else str.replace(i, str.size() - i - 1U, "a"); } diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 547db3746..805a0aa48 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -842,9 +842,14 @@ private: } void declareArray() { - const char code[] = "void f ( ) { char str [ ] = \"100\" ; }"; - const char expected[] = "void f ( ) { char str [ 4 ] = \"100\" ; }"; - ASSERT_EQUALS(expected, tok(code)); + const char code1[] = "void f ( ) { char str [ ] = \"100\" ; }"; + const char expected1[] = "void f ( ) { char str [ 4 ] = \"100\" ; }"; + ASSERT_EQUALS(expected1, tok(code1)); + + const char code2[] = "char str [ ] = \"\\x00\";"; + const char expected2[] = "char str [ 2 ] = \"\\0\" ;"; + std::string actual = tok(code2); + ASSERT_EQUALS(expected2, actual); } void dontRemoveIncrement() {