Try to address some Coverity issues. Add TODO testcase for #5783. Introduce TODO_ASSERT macro.

This commit is contained in:
Alexander Mai 2015-07-19 15:03:02 +02:00
parent f1f46611d6
commit 8b97f04de4
4 changed files with 36 additions and 8 deletions

View File

@ -595,21 +595,21 @@ static void PrintCallstack(FILE* f, PEXCEPTION_POINTERS ex)
static void writeMemoryErrorDetails(FILE* f, PEXCEPTION_POINTERS ex, const char* description)
{
fputs(description, f);
fprintf(f, " (instruction: 0x%p) ", ex->ExceptionRecord->ExceptionAddress);
fprintf(f, " (instruction: 0x%X) ", ex->ExceptionRecord->ExceptionAddress);
// Using %p for ULONG_PTR later on, so it must have size identical to size of pointer
// This is not the universally portable solution but good enough for Win32/64
C_ASSERT(sizeof(void*) == sizeof(ex->ExceptionRecord->ExceptionInformation[1]));
switch (ex->ExceptionRecord->ExceptionInformation[0]) {
case 0:
fprintf(f, "reading from 0x%p",
fprintf(f, "reading from 0x%X",
ex->ExceptionRecord->ExceptionInformation[1]);
break;
case 1:
fprintf(f, "writing to 0x%p",
fprintf(f, "writing to 0x%X",
ex->ExceptionRecord->ExceptionInformation[1]);
break;
case 8:
fprintf(f, "data execution prevention at 0x%p",
fprintf(f, "data execution prevention at 0x%X",
ex->ExceptionRecord->ExceptionInformation[1]);
break;
default:

View File

@ -1730,7 +1730,7 @@ bool Tokenizer::tokenize(std::istream &code,
_configuration = configuration;
if (!list.createTokens(code, Path::getRelativePath(Path::simplifyPath(FileName), _settings->_basePaths))) {
cppcheckError(0);
cppcheckError(nullptr);
return false;
}
@ -1768,7 +1768,7 @@ bool Tokenizer::tokenizeCondition(const std::string &code)
{
std::istringstream istr(code);
if (!list.createTokens(istr)) {
cppcheckError(0);
cppcheckError(nullptr);
return false;
}
}
@ -8773,9 +8773,10 @@ void Tokenizer::simplifyComma()
break;
}
}
if (!startFrom)
// to be very sure...
return;
std::size_t commaCounter = 0;
for (Token *tok2 = startFrom->next(); tok2; tok2 = tok2->next()) {
if (tok2->str() == ";") {
endAt = tok2;

View File

@ -90,6 +90,7 @@ extern std::ostringstream warnings;
#define ASSERT_EQUALS_DOUBLE( EXPECTED , ACTUAL ) assertEqualsDouble(__FILE__, __LINE__, EXPECTED, ACTUAL)
#define ASSERT_EQUALS_MSG( EXPECTED , ACTUAL, MSG ) assertEquals(__FILE__, __LINE__, EXPECTED, ACTUAL, MSG)
#define ASSERT_THROW( CMD, EXCEPTION ) try { CMD ; assertThrowFail(__FILE__, __LINE__); } catch (const EXCEPTION&) { } catch (...) { assertThrowFail(__FILE__, __LINE__); }
#define TODO_ASSERT( CONDITION ) { bool condition=CONDITION; todoAssertEquals(__FILE__, __LINE__, true, false, condition); }
#define TODO_ASSERT_EQUALS( WANTED , CURRENT , ACTUAL ) todoAssertEquals(__FILE__, __LINE__, WANTED, CURRENT, ACTUAL)
#define REGISTER_TEST( CLASSNAME ) namespace { CLASSNAME instance; }

View File

@ -474,6 +474,7 @@ private:
TEST_CASE(sizeofAddParentheses);
TEST_CASE(incompleteTernary); // #6659
TEST_CASE(noreturn); // #5783
}
std::string tokenizeAndStringify(const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Unspecified, const char* filename = "test.cpp", bool cpp11 = true) {
@ -8772,6 +8773,31 @@ private:
tokenizeAndStringify(code, true);
}
// see #5783
void noreturn() {
const char code[] = "void myassert() {\n"
" exit(1);\n"
"}\n"
"void f(char *buf) {\n"
" if(i==0) {\n"
" free(buf);\n"
" myassert();\n"
" }\n"
" free(buf);\n"
"}\n";
Settings settings;
// tokenize..
Tokenizer tokenizer(&settings, this);
std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp");
const Token * func = Token::findsimplematch(tokenizer.tokens(), "myassert");
TODO_ASSERT(func && func->isAttributeNoreturn());
}
};
REGISTER_TEST(TestTokenizer)