From 7b90246198ebfcf8a681a8b8a4dec05ccd1a4ccf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Mon, 19 Jan 2009 18:40:24 +0000 Subject: [PATCH] testtokenize: Added test "file1". It currently fails. --- test/testtokenize.cpp | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 2d2494209..8cd076ded 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -66,6 +66,8 @@ private: TEST_CASE(varid1); TEST_CASE(varid2); + + // TODO TEST_CASE(file1); } @@ -629,6 +631,29 @@ private: ASSERT_EQUALS(0, tok->varId()); } } + + + void file1() + { + const char code[] = "a1\n" + "#file \"b\"\n" + "b1\n" + "b2\n" + "#endfile\n" + "a3\n"; + + // tokenize.. + Tokenizer tokenizer; + std::istringstream istr(code); + tokenizer.tokenize(istr, "a"); + + for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) + { + std::ostringstream ostr; + ostr << char('a' + tok->fileIndex()) << tok->linenr(); + ASSERT_EQUALS(tok->str(), ostr.str()); + } + } }; REGISTER_TEST(TestTokenizer)