Fixed #4142 (false positive (error) Uninitialized variable: b)
This commit is contained in:
parent
c31ce68520
commit
2722f53edd
|
@ -1971,6 +1971,8 @@ bool Tokenizer::tokenize(std::istream &code,
|
||||||
|
|
||||||
// Remove redundant parentheses
|
// Remove redundant parentheses
|
||||||
simplifyRedundantParenthesis();
|
simplifyRedundantParenthesis();
|
||||||
|
for (Token *tok = list.front(); tok; tok = tok->next())
|
||||||
|
while (TemplateSimplifier::simplifyNumericCalculations(tok));
|
||||||
|
|
||||||
// Handle templates..
|
// Handle templates..
|
||||||
simplifyTemplates();
|
simplifyTemplates();
|
||||||
|
|
|
@ -6840,11 +6840,10 @@ private:
|
||||||
const char expected[] = "int sum ; sum = "
|
const char expected[] = "int sum ; sum = "
|
||||||
"sizeof ( int ) + "
|
"sizeof ( int ) + "
|
||||||
"1 + sizeof ( int ) + "
|
"1 + sizeof ( int ) + "
|
||||||
"1 + sizeof ( int ) + 100 + "
|
"1 + sizeof ( int ) + 101 + " // 101 = 100 + 1
|
||||||
"1 + sizeof ( int ) + 100 + 1 + "
|
"sizeof ( int ) + 102 + " // 102 = 100 + 1 + 1
|
||||||
"1 + sizeof ( int ) + 100 + 2 + "
|
"sizeof ( int ) + 283 " // 283 = 100+2+90+91
|
||||||
"90 + "
|
";";
|
||||||
"91 ;";
|
|
||||||
|
|
||||||
ASSERT_EQUALS(expected, tok(code, false));
|
ASSERT_EQUALS(expected, tok(code, false));
|
||||||
ASSERT_EQUALS("int sum ; sum = 508 ;", tok(code, true));
|
ASSERT_EQUALS("int sum ; sum = 508 ;", tok(code, true));
|
||||||
|
|
|
@ -4573,9 +4573,7 @@ private:
|
||||||
|
|
||||||
void removeParentheses15() {
|
void removeParentheses15() {
|
||||||
ASSERT_EQUALS("a = b ? c : 123 ;", tokenizeAndStringify("a = b ? c : (123);", false));
|
ASSERT_EQUALS("a = b ? c : 123 ;", tokenizeAndStringify("a = b ? c : (123);", false));
|
||||||
TODO_ASSERT_EQUALS("a = b ? c : 579 ;",
|
ASSERT_EQUALS("a = b ? c : 579 ;", tokenizeAndStringify("a = b ? c : ((123)+(456));", false));
|
||||||
"a = b ? c : 123 + 456 ;",
|
|
||||||
tokenizeAndStringify("a = b ? c : ((123)+(456));", false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void tokenize_double() {
|
void tokenize_double() {
|
||||||
|
|
Loading…
Reference in New Issue