diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index b07df25af..76fd73d74 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -4963,7 +4963,7 @@ void Tokenizer::removeRedundantCodeAfterReturn() } else { - if (indentcase >= indentret && indentlevel > indentlabel) + if (indentcase > indentret && indentlevel > indentlabel) { tok = tok->previous(); tok->deleteNext(); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index dc1232703..3af465e2b 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5929,6 +5929,8 @@ private: ASSERT_EQUALS("int f ( int n ) { switch ( n ) { case 0 : return 0 ; default : ; return n ; } return -1 ; }", tokenizeAndStringify("int f(int n) { switch (n) {case 0: return 0; n*=2; default: return n; n*=6;} return -1; foo();}")); + //ticket #3132 + ASSERT_EQUALS("void f ( int i ) { goto label ; switch ( i ) { label : ; return ; } }",tokenizeAndStringify("void f (int i) { goto label; switch(i) { label: return; } }")); { const char code[] = "void f(){ "