Fixed #8226 (Tokenizer: Wrong handling of string literal u"abc")

This commit is contained in:
Daniel Marjamäki 2017-09-21 13:33:14 +02:00
parent fbb5c3b942
commit 71d207c034
2 changed files with 21 additions and 22 deletions

View File

@ -1896,10 +1896,8 @@ void Tokenizer::combineOperators()
void Tokenizer::combineStringAndCharLiterals()
{
// Combine wide strings and wide characters
for (Token *tok = list.front();
tok;
tok = tok->next()) {
if (tok->str() == "L" && tok->next() && (tok->next()->tokType() == Token::eString || tok->next()->tokType() == Token::eChar)) {
for (Token *tok = list.front(); tok; tok = tok->next()) {
if (Token::Match(tok, "[Lu] %char%|%string%")) {
// Combine 'L "string"' and 'L 'c''
tok->str(tok->next()->str());
tok->deleteNext();

View File

@ -114,6 +114,7 @@ private:
TEST_CASE(test_4881); // similar to doWhileAssign (#4911), taken from #4881 with full code
TEST_CASE(combine_wstrings);
TEST_CASE(combine_ustrings);
// Simplify "not" to "!" (#345)
TEST_CASE(not1);
@ -488,29 +489,29 @@ private:
}
void combine_wstrings() {
const char code1[] = "void foo()\n"
"{\n"
"const wchar_t *a =\n"
"{\n"
"L\"hello \"\n"
"L\"world\"\n"
"};\n"
"}\n";
const char code[] = "a = L\"hello \" L\"world\" ;\n";
const char code2[] = "void foo()\n"
"{\n"
"const wchar_t *a =\n"
"{\n"
"\"hello world\"\n"
"};\n"
"}\n";
const char expected[] = "a = \"hello world\" ;";
Tokenizer tokenizer(&settings0, this);
std::istringstream istr(code1);
std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp");
ASSERT_EQUALS(tok(code2), tokenizer.tokens()->stringifyList(0, false));
ASSERT_EQUALS(true, tokenizer.tokens()->tokAt(13) && tokenizer.tokens()->tokAt(13)->isLong());
ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(0, false));
ASSERT_EQUALS(true, tokenizer.tokens()->tokAt(2)->isLong());
}
void combine_ustrings() {
const char code[] = "abc = u\"abc\";";
const char expected[] = "abc = \"abc\" ;";
Tokenizer tokenizer(&settings0, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp");
ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(0, false));
ASSERT_EQUALS(true, tokenizer.tokens()->tokAt(2)->isLong());
}
void double_plus() {