Update addons structure. Make Y2038 addon works. (#2024)
* Update addons structure. Make Y2038 addon works. All addons are now located in same directory, where cppcheck-gui is looking for them. Y2038 addon has been updated to latest cppcheck version. Output is same as303622f01c
commit as it described in README:303622f01c/addons/y2038/README
. * Clean up .travis.yml * Update travis.yml paths. * misra.py: Bring back -P argument for backward compatibility * Fix paths * Normalize dumpfile paths tpo cppcheck format * Fixup 3 test. * Add arguments regression test. * Fixing travis build
This commit is contained in:
parent
3458692cc7
commit
fbd7b5180b
20
.travis.yml
20
.travis.yml
|
@ -111,16 +111,16 @@ matrix:
|
||||||
- cd ../../
|
- cd ../../
|
||||||
# check addons/misra.py
|
# check addons/misra.py
|
||||||
- cd addons/test
|
- cd addons/test
|
||||||
- ${CPPCHECK} --dump misra-test.c
|
- ${CPPCHECK} --dump misra/misra-test.c
|
||||||
- python3 ../misra.py -verify misra-test.c.dump
|
- python3 ../misra.py -verify misra/misra-test.c.dump
|
||||||
- ${CPPCHECK} --dump misra-test.cpp
|
- ${CPPCHECK} --dump misra/misra-test.cpp
|
||||||
- python3 ../misra.py -verify misra-test.cpp.dump
|
- python3 ../misra.py -verify misra/misra-test.cpp.dump
|
||||||
- python ../misra.py --rule-texts=misra2012_rules_dummy_ascii.txt -verify misra-test.cpp.dump
|
- python ../misra.py --rule-texts=misra/misra2012_rules_dummy_ascii.txt -verify misra/misra-test.cpp.dump
|
||||||
- python3 ../misra.py --rule-texts=misra2012_rules_dummy_ascii.txt -verify misra-test.cpp.dump
|
- python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_ascii.txt -verify misra/misra-test.cpp.dump
|
||||||
- python ../misra.py --rule-texts=misra2012_rules_dummy_utf8.txt -verify misra-test.cpp.dump
|
- python ../misra.py --rule-texts=misra/misra2012_rules_dummy_utf8.txt -verify misra/misra-test.cpp.dump
|
||||||
- python3 ../misra.py --rule-texts=misra2012_rules_dummy_utf8.txt -verify misra-test.cpp.dump
|
- python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_utf8.txt -verify misra/misra-test.cpp.dump
|
||||||
- python ../misra.py --rule-texts=misra2012_rules_dummy_windows1250.txt -verify misra-test.cpp.dump
|
- python ../misra.py --rule-texts=misra/misra2012_rules_dummy_windows1250.txt -verify misra/misra-test.cpp.dump
|
||||||
- python3 ../misra.py --rule-texts=misra2012_rules_dummy_windows1250.txt -verify misra-test.cpp.dump
|
- python3 ../misra.py --rule-texts=misra/misra2012_rules_dummy_windows1250.txt -verify misra/misra-test.cpp.dump
|
||||||
- cd ../../
|
- cd ../../
|
||||||
# check addons/naming.py
|
# check addons/naming.py
|
||||||
- cd addons/test
|
- cd addons/test
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
#ifndef MISRA_TEST_H
|
||||||
|
#define MISRA_TEST_H
|
||||||
|
struct misra_h_s { int foo; };
|
||||||
|
#endif // MISRA_TEST_H
|
|
@ -1,29 +1,14 @@
|
||||||
# python -m pytest addons/test/test-misra.py
|
# python -m pytest addons/test/test-misra.py
|
||||||
import json
|
|
||||||
import pytest
|
import pytest
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
try:
|
|
||||||
from cStringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from io import StringIO
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
from .util import dump_create, dump_remove, convert_json_output
|
||||||
TEST_SOURCE_FILES = ['./addons/test/misra-test.c']
|
|
||||||
|
|
||||||
|
|
||||||
def dump_create(fpath, *argv):
|
TEST_SOURCE_FILES = ['./addons/test/misra/misra-test.c']
|
||||||
cmd = ["./cppcheck", "--dump", "--quiet", fpath] + list(argv)
|
|
||||||
p = subprocess.Popen(cmd)
|
|
||||||
p.communicate()
|
|
||||||
if p.returncode != 0:
|
|
||||||
raise OSError("cppcheck returns error code: %d" % p.returncode)
|
|
||||||
subprocess.Popen(["sync"])
|
|
||||||
|
|
||||||
|
|
||||||
def dump_remove(fpath):
|
|
||||||
subprocess.Popen(["rm", "-f", fpath + ".dump"])
|
|
||||||
|
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
|
@ -45,20 +30,20 @@ def checker():
|
||||||
|
|
||||||
|
|
||||||
def test_loadRuleTexts_structure(checker):
|
def test_loadRuleTexts_structure(checker):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_structure.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_structure.txt")
|
||||||
assert(checker.ruleTexts.get(101, None) is None)
|
assert(checker.ruleTexts.get(101, None) is None)
|
||||||
assert(checker.ruleTexts[102].text == "Rule text.")
|
assert(checker.ruleTexts[102].text == "Rule text.")
|
||||||
assert(checker.ruleTexts.get(103, None) is None)
|
assert(checker.ruleTexts.get(103, None) is None)
|
||||||
|
|
||||||
|
|
||||||
def test_loadRuleTexts_empty_lines(checker):
|
def test_loadRuleTexts_empty_lines(checker):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_empty_lines.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_empty_lines.txt")
|
||||||
assert(len(checker.ruleTexts) == 3)
|
assert(len(checker.ruleTexts) == 3)
|
||||||
assert(len(checker.ruleTexts[102].text) == len("Rule text."))
|
assert(len(checker.ruleTexts[102].text) == len("Rule text."))
|
||||||
|
|
||||||
|
|
||||||
def test_loadRuleTexts_mutiple_lines(checker):
|
def test_loadRuleTexts_mutiple_lines(checker):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_multiple_lines.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_multiple_lines.txt")
|
||||||
assert(checker.ruleTexts[101].text == "Multiple lines text.")
|
assert(checker.ruleTexts[101].text == "Multiple lines text.")
|
||||||
assert(checker.ruleTexts[102].text == "Multiple lines text.")
|
assert(checker.ruleTexts[102].text == "Multiple lines text.")
|
||||||
assert(checker.ruleTexts[103].text == "Multiple lines text.")
|
assert(checker.ruleTexts[103].text == "Multiple lines text.")
|
||||||
|
@ -68,7 +53,7 @@ def test_loadRuleTexts_mutiple_lines(checker):
|
||||||
|
|
||||||
|
|
||||||
def test_verifyRuleTexts(checker, capsys):
|
def test_verifyRuleTexts(checker, capsys):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_dummy.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_dummy.txt")
|
||||||
checker.verifyRuleTexts()
|
checker.verifyRuleTexts()
|
||||||
captured = capsys.readouterr().out
|
captured = capsys.readouterr().out
|
||||||
assert("21.3" not in captured)
|
assert("21.3" not in captured)
|
||||||
|
@ -76,7 +61,7 @@ def test_verifyRuleTexts(checker, capsys):
|
||||||
|
|
||||||
|
|
||||||
def test_rules_misra_severity(checker):
|
def test_rules_misra_severity(checker):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_dummy.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_dummy.txt")
|
||||||
assert(checker.ruleTexts[1004].misra_severity == 'Mandatory')
|
assert(checker.ruleTexts[1004].misra_severity == 'Mandatory')
|
||||||
assert(checker.ruleTexts[401].misra_severity == 'Required')
|
assert(checker.ruleTexts[401].misra_severity == 'Required')
|
||||||
assert(checker.ruleTexts[1505].misra_severity == 'Advisory')
|
assert(checker.ruleTexts[1505].misra_severity == 'Advisory')
|
||||||
|
@ -85,26 +70,20 @@ def test_rules_misra_severity(checker):
|
||||||
|
|
||||||
def test_json_out(checker, capsys):
|
def test_json_out(checker, capsys):
|
||||||
sys.argv.append("--cli")
|
sys.argv.append("--cli")
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_dummy.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_dummy.txt")
|
||||||
checker.parseDump("./addons/test/misra-test.c.dump")
|
checker.parseDump("./addons/test/misra/misra-test.c.dump")
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
captured = captured.out.splitlines()
|
captured = captured.out.splitlines()
|
||||||
sys.argv.remove("--cli")
|
sys.argv.remove("--cli")
|
||||||
json_output = {}
|
json_output = convert_json_output(captured)
|
||||||
for line in captured:
|
assert("Mandatory" in json_output['c2012-10.4'][0]['extra'])
|
||||||
try:
|
assert("Required" in json_output['c2012-21.3'][0]['extra'])
|
||||||
json_line = json.loads(line)
|
assert("Advisory" in json_output['c2012-20.1'][0]['extra'])
|
||||||
json_output[json_line['errorId']] = json_line
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
assert("Mandatory" in json_output['c2012-10.4']['extra'])
|
|
||||||
assert("Required" in json_output['c2012-21.3']['extra'])
|
|
||||||
assert("Advisory" in json_output['c2012-20.1']['extra'])
|
|
||||||
|
|
||||||
|
|
||||||
def test_rules_cppcheck_severity(checker, capsys):
|
def test_rules_cppcheck_severity(checker, capsys):
|
||||||
checker.loadRuleTexts("./addons/test/assets/misra_rules_dummy.txt")
|
checker.loadRuleTexts("./addons/test/misra/misra_rules_dummy.txt")
|
||||||
checker.parseDump("./addons/test/misra-test.c.dump")
|
checker.parseDump("./addons/test/misra/misra-test.c.dump")
|
||||||
captured = capsys.readouterr().err
|
captured = capsys.readouterr().err
|
||||||
assert("(error)" not in captured)
|
assert("(error)" not in captured)
|
||||||
assert("(warning)" not in captured)
|
assert("(warning)" not in captured)
|
||||||
|
@ -112,13 +91,13 @@ def test_rules_cppcheck_severity(checker, capsys):
|
||||||
|
|
||||||
|
|
||||||
def test_rules_suppression(checker, capsys):
|
def test_rules_suppression(checker, capsys):
|
||||||
test_sources = ["addons/test/misra-suppressions1-test.c",
|
test_sources = ["addons/test/misra/misra-suppressions1-test.c",
|
||||||
"addons/test/misra-suppressions2-test.c"]
|
"addons/test/misra/misra-suppressions2-test.c"]
|
||||||
|
|
||||||
for src in test_sources:
|
for src in test_sources:
|
||||||
re_suppressed= r"\[%s\:[0-9]+\]" % src
|
re_suppressed= r"\[%s\:[0-9]+\]" % src
|
||||||
dump_remove(src)
|
dump_remove(src)
|
||||||
dump_create(src, "--suppressions-list=addons/test/suppressions.txt")
|
dump_create(src, "--suppressions-list=addons/test/misra/suppressions.txt")
|
||||||
checker.parseDump(src + ".dump")
|
checker.parseDump(src + ".dump")
|
||||||
captured = capsys.readouterr().err
|
captured = capsys.readouterr().err
|
||||||
found = re.search(re_suppressed, captured)
|
found = re.search(re_suppressed, captured)
|
||||||
|
|
|
@ -0,0 +1,105 @@
|
||||||
|
# python -m pytest addons/test/test-y2038.py
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from addons.y2038 import check_y2038_safe
|
||||||
|
|
||||||
|
from .util import dump_create, dump_remove, convert_json_output
|
||||||
|
|
||||||
|
|
||||||
|
TEST_SOURCE_FILES = ['./addons/test/y2038/y2038-test-1-bad-time-bits.c',
|
||||||
|
'./addons/test/y2038/y2038-test-2-no-time-bits.c',
|
||||||
|
'./addons/test/y2038/y2038-test-3-no-use-time-bits.c',
|
||||||
|
'./addons/test/y2038/y2038-test-4-good.c']
|
||||||
|
|
||||||
|
|
||||||
|
def setup_module(module):
|
||||||
|
sys.argv.append("--cli")
|
||||||
|
for f in TEST_SOURCE_FILES:
|
||||||
|
dump_create(f)
|
||||||
|
|
||||||
|
|
||||||
|
def teardown_module(module):
|
||||||
|
sys.argv.remove("--cli")
|
||||||
|
for f in TEST_SOURCE_FILES:
|
||||||
|
dump_remove(f)
|
||||||
|
|
||||||
|
|
||||||
|
def test_1_bad_time_bits(capsys):
|
||||||
|
check_y2038_safe('./addons/test/y2038/y2038-test-1-bad-time-bits.c.dump', quiet=True)
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
captured = captured.out.splitlines()
|
||||||
|
json_output = convert_json_output(captured)
|
||||||
|
|
||||||
|
# Has exactly one warnings of _TIME_BITS and _USE_TIME_BITS64 kind.
|
||||||
|
assert(len(json_output['type-bits-undef']) == 1)
|
||||||
|
assert(len(json_output['type-bits-not-64']) == 1)
|
||||||
|
|
||||||
|
# There are 2 unsafe calls in test source and 3 in y2038-in.h
|
||||||
|
unsafe_calls = json_output['unsafe-call']
|
||||||
|
assert(len([c for c in unsafe_calls if c['file'].endswith('h')]) == 3)
|
||||||
|
assert(len([c for c in unsafe_calls if c['file'].endswith('c')]) == 0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_2_no_time_bits(capsys):
|
||||||
|
check_y2038_safe('./addons/test/y2038/y2038-test-2-no-time-bits.c.dump', quiet=True)
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
captured = captured.out.splitlines()
|
||||||
|
json_output = convert_json_output(captured)
|
||||||
|
|
||||||
|
# _USE_TIME_BITS64 defined in y2038-inc.h header, but there is not
|
||||||
|
# _TIME_BITS definition. Here must be appropriate warning.
|
||||||
|
assert(len(json_output['type-bits-undef']) == 1)
|
||||||
|
assert(json_output.get('type-bits-not-64') is None)
|
||||||
|
|
||||||
|
# y2038-in.h still has y2038-unsafe calls.
|
||||||
|
unsafe_calls = json_output['unsafe-call']
|
||||||
|
assert(len([c for c in unsafe_calls if c['file'].endswith('h')]) == 3)
|
||||||
|
|
||||||
|
|
||||||
|
def test_3_no_use_time_bits(capsys):
|
||||||
|
check_y2038_safe('./addons/test/y2038/y2038-test-3-no-use-time-bits.c.dump', quiet=True)
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
captured = captured.out.splitlines()
|
||||||
|
json_output = convert_json_output(captured)
|
||||||
|
|
||||||
|
# Included bad _USE_TIME_BITS64 definition must trigger the errors.
|
||||||
|
unsafe_calls = json_output['unsafe-call']
|
||||||
|
assert(len(unsafe_calls) == 2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_4_good(capsys):
|
||||||
|
check_y2038_safe('./addons/test/y2038/y2038-test-4-good.c.dump', quiet=True)
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
captured = captured.out.splitlines()
|
||||||
|
json_output = convert_json_output(captured)
|
||||||
|
|
||||||
|
# Defined _TIME_BITS equal to 64 so that glibc knows we want Y2038 support.
|
||||||
|
# There are no warnings from C sources.
|
||||||
|
unsafe_calls = json_output['unsafe-call']
|
||||||
|
assert(len([c for c in unsafe_calls if c['file'].endswith('.c')]) == 0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_arguments_regression():
|
||||||
|
args_ok = ["-t=foo", "--template=foo",
|
||||||
|
"-q", "--quiet",
|
||||||
|
"--cli"]
|
||||||
|
# Arguments with expected SystemExit
|
||||||
|
args_exit = ["--non-exists", "--non-exists-param=42", "-h", "--help"]
|
||||||
|
|
||||||
|
from addons.y2038 import get_args
|
||||||
|
|
||||||
|
for arg in args_exit:
|
||||||
|
sys.argv.append(arg)
|
||||||
|
with pytest.raises(SystemExit):
|
||||||
|
get_args()
|
||||||
|
sys.argv.remove(arg)
|
||||||
|
|
||||||
|
for arg in args_ok:
|
||||||
|
sys.argv.append(arg)
|
||||||
|
try:
|
||||||
|
get_args()
|
||||||
|
except SystemExit:
|
||||||
|
pytest.fail("Unexpected SystemExit with '%s'" % arg)
|
||||||
|
sys.argv.remove(arg)
|
|
@ -0,0 +1,28 @@
|
||||||
|
# Helpers for pytest tests
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
|
||||||
|
def dump_create(fpath, *argv):
|
||||||
|
cmd = ["./cppcheck", "--dump", "--quiet", fpath] + list(argv)
|
||||||
|
p = subprocess.Popen(cmd)
|
||||||
|
p.communicate()
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise OSError("cppcheck returns error code: %d" % p.returncode)
|
||||||
|
subprocess.Popen(["sync"])
|
||||||
|
|
||||||
|
|
||||||
|
def dump_remove(fpath):
|
||||||
|
subprocess.Popen(["rm", "-f", fpath + ".dump"])
|
||||||
|
|
||||||
|
|
||||||
|
def convert_json_output(raw_json_strings):
|
||||||
|
"""Convert raw stdout/stderr cppcheck JSON output to python dict."""
|
||||||
|
json_output = {}
|
||||||
|
for line in raw_json_strings:
|
||||||
|
try:
|
||||||
|
json_line = json.loads(line)
|
||||||
|
# json_output[json_line['errorId']] = json_line
|
||||||
|
json_output.setdefault(json_line['errorId'], []).append(json_line)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return json_output
|
|
@ -9,53 +9,17 @@
|
||||||
# 3. Any Y2038-unsafe symbol when _USE_TIME_BITS64 is not defined.
|
# 3. Any Y2038-unsafe symbol when _USE_TIME_BITS64 is not defined.
|
||||||
#
|
#
|
||||||
# Example usage:
|
# Example usage:
|
||||||
# $ cppcheck --dump path-to-src/
|
# $ cppcheck --dump path-to-src/test.c
|
||||||
# $ y2038.py path-to-src/
|
# $ y2038.py path-to-src/test.c.dump
|
||||||
#
|
#
|
||||||
# y2038.py will walk the source tree for .dump files.
|
# y2038.py will walk the source tree for .dump files.
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import cppcheckdata
|
import cppcheckdata
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# --------------
|
|
||||||
# Error handling
|
|
||||||
# --------------
|
|
||||||
|
|
||||||
diagnostics = {}
|
|
||||||
|
|
||||||
|
|
||||||
def reportDiagnostic(template, configuration, file, line, severity, message):
|
|
||||||
# collect diagnostics by configuration
|
|
||||||
if configuration not in diagnostics:
|
|
||||||
diagnostics[configuration] = []
|
|
||||||
# add error to this configuration
|
|
||||||
diagnostics[configuration].append(
|
|
||||||
cppcheckdata.reportError(template, [[file, line]], severity, message))
|
|
||||||
|
|
||||||
|
|
||||||
def printDiagnostics():
|
|
||||||
for cfg in diagnostics:
|
|
||||||
sys.stderr.write('# Configuration "' + cfg + '":\n')
|
|
||||||
for diag in diagnostics[cfg]:
|
|
||||||
sys.stderr.write(diag + '\n')
|
|
||||||
|
|
||||||
|
|
||||||
def reportDirDiag(template, cfg, filename, linenr, directive, severity, msg):
|
|
||||||
reportDiagnostic(template, cfg.name,
|
|
||||||
directive.file, directive.linenr,
|
|
||||||
severity, msg)
|
|
||||||
if (filename != directive.file) or (linenr != directive.linenr):
|
|
||||||
reportDiagnostic(template, cfg.name,
|
|
||||||
filename, linenr, 'information',
|
|
||||||
directive.file + ' was included from here')
|
|
||||||
|
|
||||||
|
|
||||||
def reportTokDiag(template, cfg, token, severity, msg):
|
|
||||||
reportDiagnostic(template, cfg.name,
|
|
||||||
token.file, token.linenr,
|
|
||||||
severity, msg)
|
|
||||||
|
|
||||||
# --------------------------------------------
|
# --------------------------------------------
|
||||||
# #define/#undef detection regular expressions
|
# #define/#undef detection regular expressions
|
||||||
|
@ -186,57 +150,29 @@ id_Y2038 = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# return all files ending in .dump among or under the given paths
|
def check_y2038_safe(dumpfile, quiet=False):
|
||||||
def find_dump_files(paths):
|
|
||||||
dumpfiles = []
|
|
||||||
for path in paths:
|
|
||||||
if path.endswith('.dump'):
|
|
||||||
if path not in dumpfiles:
|
|
||||||
dumpfiles.append(path)
|
|
||||||
else:
|
|
||||||
for (top, subdirs, files) in os.walk(path):
|
|
||||||
for file in files:
|
|
||||||
if file.endswith('.dump'):
|
|
||||||
f = top + '/' + file
|
|
||||||
if f not in dumpfiles:
|
|
||||||
dumpfiles.append(f)
|
|
||||||
dumpfiles.sort()
|
|
||||||
return dumpfiles
|
|
||||||
|
|
||||||
# -----------------
|
|
||||||
# Let's get to work
|
|
||||||
# -----------------
|
|
||||||
|
|
||||||
# extend cppcheck parser with our own options
|
|
||||||
parser = cppcheckdata.ArgumentParser()
|
|
||||||
parser.add_argument('-q', '--quiet', action='store_true',
|
|
||||||
help='do not print "Checking ..." lines')
|
|
||||||
parser.add_argument('paths', nargs='+', metavar='path',
|
|
||||||
help='path to dump file or directory')
|
|
||||||
|
|
||||||
|
|
||||||
# parse command line
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# now operate on each file in turn
|
|
||||||
dumpfiles = find_dump_files(args.paths)
|
|
||||||
|
|
||||||
for dumpfile in dumpfiles:
|
|
||||||
if not args.quiet:
|
|
||||||
print('Checking ' + dumpfile + '...')
|
|
||||||
srcfile = dumpfile.rstrip('.dump')
|
|
||||||
# at the start of the check, we don't know if code is Y2038 safe
|
# at the start of the check, we don't know if code is Y2038 safe
|
||||||
y2038safe = False
|
y2038safe = False
|
||||||
# load XML from .dump file
|
# load XML from .dump file
|
||||||
data = cppcheckdata.parsedump(dumpfile)
|
data = cppcheckdata.parsedump(dumpfile)
|
||||||
|
|
||||||
|
# Convert dump file path to source file in format generated by cppcheck.
|
||||||
|
# For example after the following call:
|
||||||
|
# cppcheck ./src/my-src.c --dump
|
||||||
|
# We got 'src/my-src.c' value for 'file' field in cppcheckdata.
|
||||||
|
srcfile = dumpfile.rstrip('.dump')
|
||||||
|
srcfile = os.path.expanduser(srcfile)
|
||||||
|
srcfile = os.path.normpath(srcfile)
|
||||||
|
|
||||||
# go through each configuration
|
# go through each configuration
|
||||||
for cfg in data.configurations:
|
for cfg in data.configurations:
|
||||||
if not args.quiet:
|
if not quiet:
|
||||||
print('Checking ' + dumpfile + ', config "' + cfg.name + '"...')
|
print('Checking ' + srcfile + ', config "' + cfg.name + '"...')
|
||||||
safe_ranges = []
|
safe_ranges = []
|
||||||
safe = -1
|
safe = -1
|
||||||
time_bits_defined = False
|
time_bits_defined = False
|
||||||
srclinenr = '0'
|
srclinenr = '0'
|
||||||
|
|
||||||
for directive in cfg.directives:
|
for directive in cfg.directives:
|
||||||
# track source line number
|
# track source line number
|
||||||
if directive.file == srcfile:
|
if directive.file == srcfile:
|
||||||
|
@ -245,10 +181,12 @@ for dumpfile in dumpfiles:
|
||||||
if re_define_time_bits_64.match(directive.str):
|
if re_define_time_bits_64.match(directive.str):
|
||||||
time_bits_defined = True
|
time_bits_defined = True
|
||||||
elif re_define_time_bits.match(directive.str):
|
elif re_define_time_bits.match(directive.str):
|
||||||
reportDirDiag(args.template, cfg, srcfile, srclinenr,
|
cppcheckdata.reportError(directive, 'error',
|
||||||
directive, 'error',
|
'_TIME_BITS must be defined equal to 64',
|
||||||
'_TIME_BITS must be defined equal to 64')
|
'y2038',
|
||||||
|
'type-bits-not-64')
|
||||||
time_bits_defined = False
|
time_bits_defined = False
|
||||||
|
y2038safe = False
|
||||||
elif re_undef_time_bits.match(directive.str):
|
elif re_undef_time_bits.match(directive.str):
|
||||||
time_bits_defined = False
|
time_bits_defined = False
|
||||||
# check for _USE_TIME_BITS64 (un)definition
|
# check for _USE_TIME_BITS64 (un)definition
|
||||||
|
@ -256,27 +194,66 @@ for dumpfile in dumpfiles:
|
||||||
safe = int(srclinenr)
|
safe = int(srclinenr)
|
||||||
# warn about _TIME_BITS not being defined
|
# warn about _TIME_BITS not being defined
|
||||||
if not time_bits_defined:
|
if not time_bits_defined:
|
||||||
reportDirDiag(args.template,
|
cppcheckdata.reportError(directive, 'warning',
|
||||||
cfg, srcfile, srclinenr, directive, 'warning',
|
'_USE_TIME_BITS64 is defined but _TIME_BITS was not',
|
||||||
'_USE_TIME_BITS64 is defined but _TIME_BITS was not')
|
'y2038',
|
||||||
|
'type-bits-undef')
|
||||||
elif re_undef_use_time_bits64.match(directive.str):
|
elif re_undef_use_time_bits64.match(directive.str):
|
||||||
unsafe = int(srclinenr)
|
unsafe = int(srclinenr)
|
||||||
# do we have a safe..unsafe area?
|
# do we have a safe..unsafe area?
|
||||||
if unsafe > safe > 0:
|
if unsafe > safe > 0:
|
||||||
safe_ranges.append((safe, unsafe))
|
safe_ranges.append((safe, unsafe))
|
||||||
safe = -1
|
safe = -1
|
||||||
|
|
||||||
# check end of source beyond last directive
|
# check end of source beyond last directive
|
||||||
if len(cfg.tokenlist) > 0:
|
if len(cfg.tokenlist) > 0:
|
||||||
unsafe = int(cfg.tokenlist[-1].linenr)
|
unsafe = int(cfg.tokenlist[-1].linenr)
|
||||||
if unsafe > safe > 0:
|
if unsafe > safe > 0:
|
||||||
safe_ranges.append((safe, unsafe))
|
safe_ranges.append((safe, unsafe))
|
||||||
|
|
||||||
# go through all tokens
|
# go through all tokens
|
||||||
for token in cfg.tokenlist:
|
for token in cfg.tokenlist:
|
||||||
if token.str in id_Y2038:
|
if token.str in id_Y2038:
|
||||||
if not any(lower <= int(token.linenr) <= upper
|
if not any(lower <= int(token.linenr) <= upper
|
||||||
for (lower, upper) in safe_ranges):
|
for (lower, upper) in safe_ranges):
|
||||||
reportTokDiag(args.template, cfg, token, 'warning',
|
cppcheckdata.reportError(token, 'warning',
|
||||||
token.str + ' is Y2038-unsafe')
|
token.str + ' is Y2038-unsafe',
|
||||||
|
'y2038',
|
||||||
|
'unsafe-call')
|
||||||
|
y2038safe = False
|
||||||
token = token.next
|
token = token.next
|
||||||
|
|
||||||
printDiagnostics()
|
return y2038safe
|
||||||
|
|
||||||
|
|
||||||
|
def get_args():
|
||||||
|
parser = cppcheckdata.ArgumentParser()
|
||||||
|
parser.add_argument("dumpfile", nargs='*', help="Path of dump file from cppcheck")
|
||||||
|
parser.add_argument('-q', '--quiet', action='store_true',
|
||||||
|
help='do not print "Checking ..." lines')
|
||||||
|
parser.add_argument('--cli', help='Addon is executed from Cppcheck', action='store_true')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
args = get_args()
|
||||||
|
|
||||||
|
exit_code = 0
|
||||||
|
quiet = not any((args.quiet, args.cli))
|
||||||
|
|
||||||
|
if args.dumpfile:
|
||||||
|
for dumpfile in args.dumpfile:
|
||||||
|
if not os.path.isfile(dumpfile):
|
||||||
|
print("Error: File not found: %s" % dumpfile)
|
||||||
|
sys.exit(127)
|
||||||
|
if not os.access(dumpfile, os.R_OK):
|
||||||
|
print("Error: Permission denied: %s" % dumpfile)
|
||||||
|
sys.exit(13)
|
||||||
|
if not args.quiet:
|
||||||
|
print('Checking ' + dumpfile + '...')
|
||||||
|
|
||||||
|
y2038safe = check_y2038_safe(dumpfile, quiet)
|
||||||
|
if not y2038safe and exit_code == 0:
|
||||||
|
exit_code = 1
|
||||||
|
|
||||||
|
sys.exit(exit_code)
|
Loading…
Reference in New Issue