From 1fe1d7ea4a1df3bbfc3f4d6c14176b5e9c68f775 Mon Sep 17 00:00:00 2001 From: PKEuS Date: Fri, 6 Nov 2015 18:46:43 +0100 Subject: [PATCH] Extended Tokenizer::simplifyCaseRange() to support also ranges of chars. --- lib/tokenize.cpp | 14 ++++++++++++++ test/testtokenize.cpp | 3 +++ 2 files changed, 17 insertions(+) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index c4160c00e..02709955d 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -2397,6 +2397,20 @@ void Tokenizer::simplifyCaseRange() tok->insertToken("case"); } } + } else if (Token::Match(tok, "case %char% . . . %char% :")) { + char start = tok->strAt(1)[1]; + char end = tok->strAt(5)[1]; + if (start < end) { + tok = tok->tokAt(2); + tok->str(":"); + tok->deleteNext(); + tok->next()->str("case"); + for (char i = end - 1; i > start; i--) { + tok->insertToken(":"); + tok->insertToken(std::string(1, '\'') + i + '\''); + tok->insertToken("case"); + } + } } } } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 4d9df9751..c42928891 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -7996,6 +7996,9 @@ private: void simplifyCaseRange() { ASSERT_EQUALS("void f ( ) { case 1 : ; case 2 : ; case 3 : ; case 4 : ; }", tokenizeAndStringify("void f() { case 1 ... 4: }")); ASSERT_EQUALS("void f ( ) { case 4 . . . 1 : ; }", tokenizeAndStringify("void f() { case 4 ... 1: }")); + + ASSERT_EQUALS("void f ( ) { case 'a' : ; case 'b' : ; case 'c' : ; }", tokenizeAndStringify("void f() { case 'a' ... 'c': }")); + ASSERT_EQUALS("void f ( ) { case 'c' . . . 'a' : ; }", tokenizeAndStringify("void f() { case 'c' ... 'a': }")); } void prepareTernaryOpForAST() {