From fad65663e5cf402b2c09be00035fd6fcdec8c4cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Tue, 20 Jan 2009 17:26:16 +0000 Subject: [PATCH] tokenizer: tokenize ## better --- src/tokenize.cpp | 9 +++++++++ test/testtokenize.cpp | 21 +++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 1ace705b4..9465abc78 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -243,6 +243,13 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) if (ch == '#' && CurrentToken.empty()) { + // If previous token was "#" then append this to create a "##" token + if (Token::simpleMatch(_tokensBack, "#")) + { + _tokensBack->str("##"); + continue; + } + std::string line("#"); { char chPrev = '#'; @@ -254,7 +261,9 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) if (ch != ' ') chPrev = ch; if (ch != '\\' && ch != '\n') + { line += ch; + } if (ch == '\n') ++lineno; } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 383258df0..b8ba1aee3 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -69,6 +69,8 @@ private: TEST_CASE(file1); // TODO TEST_CASE(file2); + + TEST_CASE(doublesharp); } @@ -687,6 +689,25 @@ private: ASSERT_EQUALS(tok->str(), ostr.str()); } } + + + + void doublesharp() + { + const char code[] = "TEST(var,val) var##_##val = val\n"; + + // Tokenize.. + Tokenizer tokenizer; + std::istringstream istr(code); + tokenizer.tokenize(istr, ""); + + // Stringify the tokens.. + std::ostringstream ostr; + for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) + ostr << tok->str() << " "; + + ASSERT_EQUALS("TEST ( var , val ) var ## _ ## val = val ", ostr.str()); + } }; REGISTER_TEST(TestTokenizer)