diff --git a/src/tokenize.cpp b/src/tokenize.cpp index db1a7bfa6..eacd46069 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -433,7 +433,7 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) // Remove __asm.. for (Token *tok = _tokens; tok; tok = tok->next()) { - if (Token::Match(tok->next(), "__asm|asm {")) + if (Token::Match(tok->next(), "__asm|_asm|asm {")) { while (tok->next()) { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 5541130b6..5d16e981f 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -230,6 +230,11 @@ private: ASSERT_EQUALS("abc def", tokenizeAndStringify(code)); } + { + const char code[] = "abc _asm { mov ax,bx } def"; + ASSERT_EQUALS("abc def", tokenizeAndStringify(code)); + } + { const char code[] = "abc __asm { mov ax,bx } def"; ASSERT_EQUALS("abc def", tokenizeAndStringify(code));