diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 4e258c6f8..db1a7bfa6 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -433,7 +433,7 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) // Remove __asm.. for (Token *tok = _tokens; tok; tok = tok->next()) { - if (Token::simpleMatch(tok->next(), "__asm {")) + if (Token::Match(tok->next(), "__asm|asm {")) { while (tok->next()) { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index ff8bcd77c..5541130b6 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -225,34 +225,15 @@ private: void inlineasm() { - const char filedata[] = "void foo()\n" - "{\n" - " __asm\n" - " {\n" - " jmp $jump1\n" - " $jump1:\n" - " }\n" - "}\n"; - - // tokenize.. - Tokenizer tokenizer; - std::istringstream istr(filedata); - tokenizer.tokenize(istr, "test.cpp"); - - // Expected result.. - const char *expected[] = { - "void", - "foo", - "(", - ")", - "{", - "}", - 0 - }; + const char code[] = "abc asm { mov ax,bx } def"; + ASSERT_EQUALS("abc def", tokenizeAndStringify(code)); + } - // Compare.. - ASSERT_EQUALS(true, cmptok(expected, tokenizer.tokens())); + { + const char code[] = "abc __asm { mov ax,bx } def"; + ASSERT_EQUALS("abc def", tokenizeAndStringify(code)); + } }