diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 5b25a2aeb..b0c8f88a2 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -1693,14 +1693,14 @@ bool Tokenizer::tokenize(std::istream &code, const char FileName[], const std::s Token::createMutualLinks(tok->tokAt(2), tok->tokAt(3)); } - // Remove "volatile", "inline" and register - while (Token::Match(_tokens, "volatile|inline|register")) + // Remove "volatile", "inline" and "register" + while (Token::Match(_tokens, "volatile|inline|__inline|__forceinline|register")) { _tokens->deleteThis(); } for (Token *tok = _tokens; tok; tok = tok->next()) { - while (Token::Match(tok->next(), "volatile|inline|register")) + while (Token::Match(tok->next(), "volatile|inline|__inline|__forceinline|register")) { tok->deleteNext(); } diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 4f7389400..63cd5f472 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -5089,6 +5089,8 @@ private: ASSERT_EQUALS("int var ;", tok("register int var ;", true)); ASSERT_EQUALS("short var ;", tok("register short int var ;", true)); ASSERT_EQUALS("int foo ( ) { }", tok("inline int foo ( ) { }", true)); + ASSERT_EQUALS("int foo ( ) { }", tok("__inline int foo ( ) { }", true)); + ASSERT_EQUALS("int foo ( ) { }", tok("__forceinline int foo ( ) { }", true)); ASSERT_EQUALS("if ( a ) { }", tok("if ( likely ( a ) ) { }", true)); ASSERT_EQUALS("if ( a ) { }", tok("if ( unlikely ( a ) ) { }", true)); }