Fixed #1396 (false positive: Resource leak)

This commit is contained in:
Daniel Marjamäki 2010-02-21 09:47:41 +01:00
parent e4a685c6e9
commit 459a3bac50
3 changed files with 52 additions and 0 deletions

View File

@ -1062,8 +1062,12 @@ bool Tokenizer::tokenize(std::istream &code, const char FileName[], const std::s
return false;
}
// specify array size..
arraySize();
// simplify labels..
labels();
simplifyDoWhileAddBraces();
simplifyIfAddBraces();
@ -1247,6 +1251,36 @@ void Tokenizer::arraySize()
}
}
/** simplify labels in the code.. add an ";" */
void Tokenizer::labels()
{
for (Token *tok = _tokens; tok; tok = tok->next())
{
if (Token::Match(tok, ") const| {"))
{
// Simplify labels in the executable scope..
unsigned int indentlevel = 0;
while (0 != (tok = tok->next()))
{
// indentations..
if (tok->str() == "{")
++indentlevel;
else if (tok->str() == "}")
{
if (indentlevel <= 1)
break;
--indentlevel;
}
// simplify label..
if (Token::Match(tok, "[;{}] %var% : %var%"))
tok->tokAt(2)->insertToken(";");
}
}
}
}
/**
* is the token pointing at a template parameters block..
@ -5059,6 +5093,8 @@ void Tokenizer::simplifyGoto()
tok->deleteThis();
tok->deleteThis();
if (Token::Match(tok, "; %any%"))
tok->deleteThis();
// This label is at the end of the function.. replace all matching goto statements..
for (std::list<Token *>::iterator it = gotos.begin(); it != gotos.end(); ++it)

View File

@ -142,6 +142,9 @@ private:
/** Insert array size where it isn't given */
void arraySize();
/** Simplify labels */
void labels();
/** Remove redundant assignment */
void removeRedundantAssignment();

View File

@ -198,6 +198,8 @@ private:
TEST_CASE(cpp0xtemplate);
TEST_CASE(arraySize);
TEST_CASE(labels);
}
@ -3075,6 +3077,17 @@ private:
{
ASSERT_EQUALS("; int a[3]={1,2,3};", arraySize_(";int a[]={1,2,3};"));
}
std::string labels_(const std::string &code)
{
// the arraySize_ does what we want currently..
return arraySize_(code);
}
void labels()
{
ASSERT_EQUALS(" void f(){ ab:; a=0;}", labels_("void f() { ab: a=0; }"));
}
};
REGISTER_TEST(TestTokenizer)