testtokenize: Added test "file1". It currently fails.

This commit is contained in:
Daniel Marjamäki 2009-01-19 18:40:24 +00:00
parent e90857ce0d
commit 7b90246198
1 changed files with 25 additions and 0 deletions

View File

@ -66,6 +66,8 @@ private:
TEST_CASE(varid1);
TEST_CASE(varid2);
// TODO TEST_CASE(file1);
}
@ -629,6 +631,29 @@ private:
ASSERT_EQUALS(0, tok->varId());
}
}
void file1()
{
const char code[] = "a1\n"
"#file \"b\"\n"
"b1\n"
"b2\n"
"#endfile\n"
"a3\n";
// tokenize..
Tokenizer tokenizer;
std::istringstream istr(code);
tokenizer.tokenize(istr, "a");
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next())
{
std::ostringstream ostr;
ostr << char('a' + tok->fileIndex()) << tok->linenr();
ASSERT_EQUALS(tok->str(), ostr.str());
}
}
};
REGISTER_TEST(TestTokenizer)