diff --git a/src/token.cpp b/src/token.cpp index 2b28f6854..593c6fc55 100644 --- a/src/token.cpp +++ b/src/token.cpp @@ -104,12 +104,19 @@ void Token::deleteThis() void Token::replace(Token *replaceThis, Token *start, Token *end) { // Fix the whole in the old location of start and end - start->previous()->next(end->next()); - end->next()->previous(start->previous()); + if (start->previous()) + start->previous()->next(end->next()); + + if (end->next()) + end->next()->previous(start->previous()); // Move start and end to their new location - replaceThis->previous()->next(start); - replaceThis->next()->previous(end); + if (replaceThis->previous()) + replaceThis->previous()->next(start); + + if (replaceThis->next()) + replaceThis->next()->previous(end); + start->previous(replaceThis->previous()); end->next(replaceThis->next()); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 7a20b43af..f7d1c0210 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -1735,6 +1735,16 @@ private: ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(std::string("[test.cpp:1]: (error) Invalid number of character ((). Can't process file.\n"), errout.str()); } + + { + errout.str(""); + const char code[] = "namespace extract{\nB(weighted_moment)\n}\nusing extract::weighted_moment;\n"; + Tokenizer tokenizer(s, this); + std::istringstream istr(code); + ASSERT_EQUALS(true, tokenizer.tokenize(istr, "test.cpp")); + tokenizer.simplifyTokenList(); + ASSERT_EQUALS(std::string(""), errout.str()); + } } };