2017-06-04 22:51:48 +02:00
|
|
|
#!/usr/bin/env python
|
2013-10-12 14:02:03 +02:00
|
|
|
#
|
|
|
|
# 1. Create a folder daca2 in your HOME folder
|
|
|
|
# 2. Put cppcheck-O2 in daca2. It should be built with all optimisations.
|
2013-10-19 13:46:15 +02:00
|
|
|
# 3. Optional: Put a file called "suppressions.txt" in the daca2 folder.
|
|
|
|
# 4. Optional: tweak FTPSERVER and FTPPATH in this script below.
|
|
|
|
# 5. Run the daca2 script: python daca2.py FOLDER
|
2013-10-12 14:02:03 +02:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
import argparse
|
2013-10-12 14:02:03 +02:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import shutil
|
|
|
|
import glob
|
|
|
|
import os
|
2013-10-13 11:47:51 +02:00
|
|
|
import datetime
|
2013-10-13 14:13:10 +02:00
|
|
|
import time
|
2016-11-28 05:13:36 +01:00
|
|
|
import logging
|
2013-10-12 14:02:03 +02:00
|
|
|
|
2017-06-04 22:51:48 +02:00
|
|
|
DEBIAN = ('ftp://ftp.se.debian.org/debian/',
|
|
|
|
'ftp://ftp.debian.org/debian/')
|
2013-10-26 11:47:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def wget(filepath):
|
|
|
|
filename = filepath
|
2017-06-04 22:51:48 +02:00
|
|
|
if '/' in filepath:
|
2013-10-26 11:47:15 +02:00
|
|
|
filename = filename[filename.rfind('/') + 1:]
|
|
|
|
for d in DEBIAN:
|
2014-01-02 16:50:12 +01:00
|
|
|
subprocess.call(
|
2014-02-23 10:21:00 +01:00
|
|
|
['nice', 'wget', '--tries=10', '--timeout=300', '-O', filename, d + filepath])
|
2013-10-26 11:47:15 +02:00
|
|
|
if os.path.isfile(filename):
|
|
|
|
return True
|
2013-10-27 07:53:23 +01:00
|
|
|
print('Sleep for 10 seconds..')
|
2013-10-26 11:47:15 +02:00
|
|
|
time.sleep(10)
|
|
|
|
return False
|
2013-10-19 13:46:15 +02:00
|
|
|
|
2013-10-20 12:45:05 +02:00
|
|
|
|
2013-10-19 13:46:15 +02:00
|
|
|
def getpackages(folder):
|
2013-10-26 11:47:15 +02:00
|
|
|
if not wget('ls-lR.gz'):
|
|
|
|
return []
|
2013-10-24 18:22:15 +02:00
|
|
|
subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
|
|
|
|
f = open('ls-lR', 'rt')
|
|
|
|
lines = f.readlines()
|
|
|
|
f.close()
|
|
|
|
subprocess.call(['rm', 'ls-lR'])
|
|
|
|
|
|
|
|
path = None
|
2013-10-19 13:46:15 +02:00
|
|
|
archives = []
|
2013-10-24 18:22:15 +02:00
|
|
|
filename = None
|
|
|
|
for line in lines:
|
2013-10-24 18:36:08 +02:00
|
|
|
line = line.strip()
|
2013-10-24 18:22:15 +02:00
|
|
|
if len(line) < 4:
|
|
|
|
if filename:
|
|
|
|
archives.append(path + '/' + filename)
|
|
|
|
path = None
|
|
|
|
filename = None
|
|
|
|
elif line[:13 + len(folder)] == './pool/main/' + folder + '/':
|
2013-10-24 18:36:08 +02:00
|
|
|
path = line[2:-1]
|
2017-06-05 13:23:00 +02:00
|
|
|
elif path and '.orig.tar.' in line:
|
2013-10-24 18:22:15 +02:00
|
|
|
filename = line[1 + line.rfind(' '):]
|
2013-10-19 13:46:15 +02:00
|
|
|
|
2013-10-24 18:36:08 +02:00
|
|
|
for a in archives:
|
|
|
|
print(a)
|
|
|
|
|
2013-10-19 13:46:15 +02:00
|
|
|
return archives
|
2013-10-12 16:55:21 +02:00
|
|
|
|
2013-10-18 17:35:59 +02:00
|
|
|
|
2013-10-14 15:49:11 +02:00
|
|
|
def handleRemoveReadonly(func, path, exc):
|
|
|
|
import stat
|
|
|
|
if not os.access(path, os.W_OK):
|
|
|
|
# Is the error an access error ?
|
|
|
|
os.chmod(path, stat.S_IWUSR)
|
|
|
|
func(path)
|
2013-10-12 14:02:03 +02:00
|
|
|
|
2013-10-18 17:35:59 +02:00
|
|
|
|
2013-10-12 14:02:03 +02:00
|
|
|
def removeAllExceptResults():
|
2016-08-21 11:54:08 +02:00
|
|
|
filenames = []
|
2017-06-04 22:51:48 +02:00
|
|
|
filenames.extend(glob.glob('[A-Za-z0-9]*'))
|
|
|
|
filenames.extend(glob.glob('.[a-z]*'))
|
2016-08-21 11:54:08 +02:00
|
|
|
|
|
|
|
for filename in filenames:
|
|
|
|
count = 5
|
|
|
|
while count > 0:
|
2017-06-05 13:23:00 +02:00
|
|
|
count -= 1
|
2016-08-21 11:54:08 +02:00
|
|
|
|
|
|
|
try:
|
2013-10-16 17:22:00 +02:00
|
|
|
if os.path.isdir(filename):
|
|
|
|
shutil.rmtree(filename, onerror=handleRemoveReadonly)
|
2016-11-28 05:13:36 +01:00
|
|
|
elif filename != RESULTS_FILENAME:
|
2013-10-16 17:22:00 +02:00
|
|
|
os.remove(filename)
|
2016-08-21 11:54:08 +02:00
|
|
|
break
|
|
|
|
except WindowsError as err:
|
|
|
|
time.sleep(30)
|
|
|
|
if count == 0:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.error('Failed to cleanup {}: {}'.format(filename, err))
|
2016-08-21 11:54:08 +02:00
|
|
|
except OSError as err:
|
|
|
|
time.sleep(30)
|
|
|
|
if count == 0:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.error('Failed to cleanup {}: {}'.format(filename, err))
|
2013-10-12 14:02:03 +02:00
|
|
|
|
2013-10-18 17:35:59 +02:00
|
|
|
|
2013-10-18 04:53:14 +02:00
|
|
|
def removeLargeFiles(path):
|
|
|
|
for g in glob.glob(path + '*'):
|
2017-06-04 22:51:48 +02:00
|
|
|
if g in {'.', '..'}:
|
2013-10-18 04:53:14 +02:00
|
|
|
continue
|
2013-11-18 17:59:47 +01:00
|
|
|
if os.path.islink(g):
|
|
|
|
continue
|
2013-10-18 04:53:14 +02:00
|
|
|
if os.path.isdir(g):
|
2016-08-21 11:54:08 +02:00
|
|
|
# Remove test code
|
|
|
|
if g.endswith('/testsuite') or g.endswith('/clang/INPUTS'):
|
|
|
|
shutil.rmtree(g, onerror=handleRemoveReadonly)
|
2017-09-15 22:19:11 +02:00
|
|
|
# Remove docs and examples ... that might be garbage
|
|
|
|
elif g.endswith('/doc') or g.endswith('/examples'):
|
|
|
|
shutil.rmtree(g, onerror=handleRemoveReadonly)
|
2016-08-21 11:54:08 +02:00
|
|
|
else:
|
|
|
|
removeLargeFiles(g + '/')
|
2013-11-02 12:03:25 +01:00
|
|
|
elif os.path.isfile(g) and g[-4:] != '.txt':
|
2013-10-18 04:53:14 +02:00
|
|
|
statinfo = os.stat(g)
|
2016-08-21 11:54:08 +02:00
|
|
|
if statinfo.st_size > 1000000:
|
|
|
|
try:
|
|
|
|
os.remove(g)
|
|
|
|
except OSError as err:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.error('Failed to remove {}: {}'.format(g, err))
|
2013-10-13 08:07:39 +02:00
|
|
|
|
2017-06-04 22:51:48 +02:00
|
|
|
|
2016-07-22 08:31:00 +02:00
|
|
|
def strfCurrTime(fmt):
|
|
|
|
return datetime.time.strftime(datetime.datetime.now().time(), fmt)
|
2013-10-18 17:35:59 +02:00
|
|
|
|
2017-06-04 22:51:48 +02:00
|
|
|
|
2016-09-20 12:46:15 +02:00
|
|
|
def scanarchive(filepath, jobs, cpulimit):
|
2016-11-28 05:13:36 +01:00
|
|
|
# remove all files/folders except RESULTS_FILENAME
|
2013-10-27 07:53:23 +01:00
|
|
|
removeAllExceptResults()
|
|
|
|
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.info(DEBIAN[0] + filepath)
|
2013-10-13 11:10:22 +02:00
|
|
|
|
2013-10-26 11:47:15 +02:00
|
|
|
if not wget(filepath):
|
|
|
|
if not wget(filepath):
|
2018-01-23 13:05:07 +01:00
|
|
|
logging.error('wget failed at {}'.format(filepath))
|
2013-10-26 11:47:15 +02:00
|
|
|
return
|
2013-10-24 17:39:37 +02:00
|
|
|
|
2013-10-26 11:47:15 +02:00
|
|
|
filename = filepath[filepath.rfind('/') + 1:]
|
2013-10-13 14:13:10 +02:00
|
|
|
if filename[-3:] == '.gz':
|
|
|
|
subprocess.call(['tar', 'xzvf', filename])
|
2013-10-20 12:45:05 +02:00
|
|
|
elif filename[-3:] == '.xz':
|
|
|
|
subprocess.call(['tar', 'xJvf', filename])
|
2013-10-13 14:13:10 +02:00
|
|
|
elif filename[-4:] == '.bz2':
|
|
|
|
subprocess.call(['tar', 'xjvf', filename])
|
2013-10-13 11:10:22 +02:00
|
|
|
|
2013-10-18 05:31:00 +02:00
|
|
|
removeLargeFiles('')
|
|
|
|
|
2016-07-23 13:44:05 +02:00
|
|
|
print(strfCurrTime('[%H:%M] cppcheck ') + filename)
|
2013-10-26 11:47:15 +02:00
|
|
|
|
2016-09-20 12:46:15 +02:00
|
|
|
if cpulimit:
|
2016-12-06 09:33:41 +01:00
|
|
|
cmd = 'cpulimit --limit=' + cpulimit
|
2016-09-20 12:46:15 +02:00
|
|
|
else:
|
2016-12-06 09:33:41 +01:00
|
|
|
cmd = 'nice --adjustment=1000'
|
2018-05-21 08:54:21 +02:00
|
|
|
# TODO: The --exception-handling=stderr is skipped right now because it hangs (#8589)
|
2017-06-04 22:51:48 +02:00
|
|
|
cmd = cmd + ' ../cppcheck-O2 -D__GCC__ --enable=style --inconclusive --error-exitcode=0 ' +\
|
2018-05-21 08:54:21 +02:00
|
|
|
jobs + ' --template=daca2 .'
|
2016-12-06 09:33:41 +01:00
|
|
|
cmds = cmd.split()
|
2016-09-20 12:46:15 +02:00
|
|
|
|
2016-12-06 09:33:41 +01:00
|
|
|
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
2013-10-13 11:10:22 +02:00
|
|
|
comm = p.communicate()
|
|
|
|
|
2013-12-31 20:56:16 +01:00
|
|
|
if p.returncode == 0:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.info(comm[1] + strfCurrTime('[%H:%M]'))
|
2017-06-04 22:51:48 +02:00
|
|
|
elif 'cppcheck: error: could not find or open any of the paths given.' not in comm[0]:
|
2018-05-07 07:10:00 +02:00
|
|
|
stdout = comm[0]
|
|
|
|
pos1 = stdout.rfind('Checking ')
|
|
|
|
if pos1 > 0:
|
|
|
|
logging.error(stdout[pos1:])
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.error(comm[1] + strfCurrTime('[%H:%M]'))
|
|
|
|
logging.error('Exit code is not zero! Crash?\n')
|
|
|
|
|
2013-10-13 11:10:22 +02:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
parser = argparse.ArgumentParser(description='Checks debian source code')
|
|
|
|
parser.add_argument('folder', metavar='FOLDER')
|
|
|
|
parser.add_argument('--rev')
|
|
|
|
parser.add_argument('--workdir', default='~/daca2')
|
|
|
|
parser.add_argument('-j', '--jobs', default='-j1')
|
|
|
|
parser.add_argument('--skip', default=[], action='append')
|
|
|
|
parser.add_argument('--cpulimit')
|
2013-10-20 11:28:16 +02:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
args = parser.parse_args()
|
2013-10-20 11:28:16 +02:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
workdir = os.path.expanduser(args.workdir)
|
|
|
|
if not os.path.isdir(workdir):
|
2018-01-23 13:05:07 +01:00
|
|
|
logging.critical('workdir \'' + workdir + '\' is not a folder')
|
2016-08-01 11:29:04 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
workdir = os.path.join(workdir, args.folder)
|
|
|
|
if not os.path.isdir(workdir):
|
|
|
|
os.makedirs(workdir)
|
|
|
|
|
2016-11-28 05:13:36 +01:00
|
|
|
RESULTS_FILENAME = 'results.txt'
|
|
|
|
RESULTS_FILE = os.path.join(workdir, RESULTS_FILENAME)
|
|
|
|
|
|
|
|
logging.basicConfig(
|
2017-06-04 22:51:48 +02:00
|
|
|
filename=RESULTS_FILE,
|
|
|
|
level=logging.INFO,
|
|
|
|
format='%(message)s')
|
2016-11-28 05:13:36 +01:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
print(workdir)
|
|
|
|
|
|
|
|
archives = getpackages(args.folder)
|
2013-10-24 17:39:37 +02:00
|
|
|
if len(archives) == 0:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.critical('failed to load packages')
|
2013-10-24 17:39:37 +02:00
|
|
|
sys.exit(1)
|
2013-10-13 14:13:10 +02:00
|
|
|
|
2016-11-28 04:22:30 +01:00
|
|
|
if not os.path.isdir(workdir):
|
|
|
|
os.makedirs(workdir)
|
|
|
|
os.chdir(workdir)
|
2013-10-12 14:02:03 +02:00
|
|
|
|
2013-10-13 11:10:22 +02:00
|
|
|
try:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.info('STARTDATE ' + str(datetime.date.today()))
|
|
|
|
logging.info('STARTTIME ' + strfCurrTime('%H:%M:%S'))
|
2016-11-28 04:22:30 +01:00
|
|
|
if args.rev:
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.info('GIT-REVISION ' + args.rev + '\n')
|
|
|
|
logging.info('')
|
2013-10-13 11:43:05 +02:00
|
|
|
|
2013-10-13 14:13:10 +02:00
|
|
|
for archive in archives:
|
2016-11-28 04:22:30 +01:00
|
|
|
if len(args.skip) > 0:
|
2016-08-01 11:29:04 +02:00
|
|
|
a = archive[:archive.rfind('/')]
|
2018-02-13 09:26:11 +01:00
|
|
|
a = a[a.rfind('/') + 1:]
|
2016-11-28 04:22:30 +01:00
|
|
|
if a in args.skip:
|
2016-08-01 11:29:04 +02:00
|
|
|
continue
|
2016-11-28 04:22:30 +01:00
|
|
|
scanarchive(archive, args.jobs, args.cpulimit)
|
2013-10-13 08:07:39 +02:00
|
|
|
|
2016-11-28 05:13:36 +01:00
|
|
|
logging.info('DATE {}'.format(datetime.date.today()))
|
|
|
|
logging.info('TIME {}'.format(strfCurrTime('%H:%M:%S')))
|
2013-11-24 10:50:16 +01:00
|
|
|
|
2013-10-13 13:10:25 +02:00
|
|
|
except EOFError:
|
|
|
|
pass
|
|
|
|
|
2016-11-28 05:13:36 +01:00
|
|
|
# remove all files/folders except RESULTS_FILENAME
|
2013-10-13 13:10:25 +02:00
|
|
|
removeAllExceptResults()
|