Fixed tokenizer: don't change '*(str+num)' if '*' means a product with a previous variable or number.
Ditto for '&str[num]' (still commented).
This commit is contained in:
parent
a546fc6e9d
commit
3cc9a333c9
|
@ -4154,8 +4154,10 @@ bool Tokenizer::simplifyTokenList()
|
||||||
|
|
||||||
// Replace "*(str + num)" => "str[num]"
|
// Replace "*(str + num)" => "str[num]"
|
||||||
for (Token *tok = _tokens; tok; tok = tok->next()) {
|
for (Token *tok = _tokens; tok; tok = tok->next()) {
|
||||||
if (Token::Match(tok->next(), "* ( %var% + %num% )") ||
|
if (!Token::Match(tok, "%var%") && !Token::Match(tok, "%num%")
|
||||||
Token::Match(tok->next(), "* ( %var% + %var% )")) {
|
&& !Token::Match(tok, "]|)")
|
||||||
|
&& (Token::Match(tok->next(), "* ( %var% + %num% )") ||
|
||||||
|
Token::Match(tok->next(), "* ( %var% + %var% )"))) {
|
||||||
// remove '* ('
|
// remove '* ('
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
tok->deleteNext();
|
tok->deleteNext();
|
||||||
|
@ -4178,15 +4180,11 @@ bool Tokenizer::simplifyTokenList()
|
||||||
//"[test.cpp:7]: (error) Invalid pointer 'first' after push_back / push_front\n".
|
//"[test.cpp:7]: (error) Invalid pointer 'first' after push_back / push_front\n".
|
||||||
//Actual:
|
//Actual:
|
||||||
//"".
|
//"".
|
||||||
//2)
|
|
||||||
//test/testautovariables.cpp:279: Assertion failed.
|
|
||||||
//Expected:
|
|
||||||
//"[test.cpp:4]: (error) Return of the address of an auto-variable\n".
|
|
||||||
//Actual:
|
|
||||||
//"".
|
|
||||||
/*for (Token *tok = _tokens; tok; tok = tok->next()) {
|
/*for (Token *tok = _tokens; tok; tok = tok->next()) {
|
||||||
if ((Token::Match(tok->next(), "& %var% [ %num% ]") ||
|
if (!Token::Match(tok, "%var%") && !Token::Match(tok, "%num%")
|
||||||
Token::Match(tok->next(), "& %var% [ %var% ]"))) {
|
&& !Token::Match(tok, "]|)")
|
||||||
|
&& (Token::Match(tok->next(), "& %var% [ %num% ]") ||
|
||||||
|
Token::Match(tok->next(), "& %var% [ %var% ]"))) {
|
||||||
tok = tok->next();
|
tok = tok->next();
|
||||||
// '&' => '('
|
// '&' => '('
|
||||||
tok->str("(");
|
tok->str("(");
|
||||||
|
|
|
@ -475,9 +475,21 @@ private:
|
||||||
}
|
}
|
||||||
|
|
||||||
void tokenize6() {
|
void tokenize6() {
|
||||||
|
// "&p[1]" => "p+1"
|
||||||
|
/*
|
||||||
|
ASSERT_EQUALS("; x = p + n ;", tokenizeAndStringify("; x = & p [ n ] ;", true));
|
||||||
|
ASSERT_EQUALS("; x = ( p + n ) [ m ] ;", tokenizeAndStringify("; x = & p [ n ] [ m ] ;", true));
|
||||||
|
ASSERT_EQUALS("; x = y & p [ n ] ;", tokenizeAndStringify("; x = y & p [ n ] ;", true));
|
||||||
|
ASSERT_EQUALS("; x = 10 & p [ n ] ;", tokenizeAndStringify("; x = 10 & p [ n ] ;", true));
|
||||||
|
ASSERT_EQUALS("; x = y [ 10 ] & p [ n ] ;", tokenizeAndStringify("; x = y [ 10 ] & p [ n ] ;", true));
|
||||||
|
ASSERT_EQUALS("; x = ( a + m ) & p [ n ] ;", tokenizeAndStringify("; x = ( a + m ) & p [ n ] ;", true));*/
|
||||||
// "*(p+1)" => "p[1]"
|
// "*(p+1)" => "p[1]"
|
||||||
ASSERT_EQUALS("; x = p [ 1 ] ;", tokenizeAndStringify("; x = * ( p + 1 ) ;", true));
|
ASSERT_EQUALS("; x = p [ 1 ] ;", tokenizeAndStringify("; x = * ( p + 1 ) ;", true));
|
||||||
ASSERT_EQUALS("; x = p [ n ] ;", tokenizeAndStringify("; x = * ( p + n ) ;", true));
|
ASSERT_EQUALS("; x = p [ n ] ;", tokenizeAndStringify("; x = * ( p + n ) ;", true));
|
||||||
|
ASSERT_EQUALS("; x = y * ( p + n ) ;", tokenizeAndStringify("; x = y * ( p + n ) ;", true));
|
||||||
|
ASSERT_EQUALS("; x = 10 * ( p + n ) ;", tokenizeAndStringify("; x = 10 * ( p + n ) ;", true));
|
||||||
|
ASSERT_EQUALS("; x = y [ 10 ] * ( p + n ) ;", tokenizeAndStringify("; x = y [ 10 ] * ( p + n ) ;", true));
|
||||||
|
ASSERT_EQUALS("; x = ( a + m ) * ( p + n ) ;", tokenizeAndStringify("; x = ( a + m ) * ( p + n ) ;", true));
|
||||||
}
|
}
|
||||||
|
|
||||||
void tokenize7() {
|
void tokenize7() {
|
||||||
|
|
Loading…
Reference in New Issue