cppcheck/tools/daca2.py

231 lines
6.7 KiB
Python
Raw Normal View History

2017-06-04 22:51:48 +02:00
#!/usr/bin/env python
#
# 1. Create a folder daca2 in your HOME folder
# 2. Put cppcheck-O2 in daca2. It should be built with all optimisations.
2013-10-19 13:46:15 +02:00
# 3. Optional: Put a file called "suppressions.txt" in the daca2 folder.
# 4. Optional: tweak FTPSERVER and FTPPATH in this script below.
# 5. Run the daca2 script: python daca2.py FOLDER
import argparse
import subprocess
import sys
import shutil
import glob
import os
2013-10-13 11:47:51 +02:00
import datetime
import time
import logging
2017-06-04 22:51:48 +02:00
DEBIAN = ('ftp://ftp.se.debian.org/debian/',
'ftp://ftp.debian.org/debian/')
def wget(filepath):
filename = filepath
2017-06-04 22:51:48 +02:00
if '/' in filepath:
filename = filename[filename.rfind('/') + 1:]
for d in DEBIAN:
subprocess.call(
2014-02-23 10:21:00 +01:00
['nice', 'wget', '--tries=10', '--timeout=300', '-O', filename, d + filepath])
if os.path.isfile(filename):
return True
2013-10-27 07:53:23 +01:00
print('Sleep for 10 seconds..')
time.sleep(10)
return False
2013-10-19 13:46:15 +02:00
2013-10-20 12:45:05 +02:00
2013-10-19 13:46:15 +02:00
def getpackages(folder):
if not wget('ls-lR.gz'):
return []
2013-10-24 18:22:15 +02:00
subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
f = open('ls-lR', 'rt')
lines = f.readlines()
f.close()
subprocess.call(['rm', 'ls-lR'])
path = None
2013-10-19 13:46:15 +02:00
archives = []
2013-10-24 18:22:15 +02:00
filename = None
for line in lines:
2013-10-24 18:36:08 +02:00
line = line.strip()
2013-10-24 18:22:15 +02:00
if len(line) < 4:
if filename:
archives.append(path + '/' + filename)
path = None
filename = None
elif line[:13 + len(folder)] == './pool/main/' + folder + '/':
2013-10-24 18:36:08 +02:00
path = line[2:-1]
2017-06-05 13:23:00 +02:00
elif path and '.orig.tar.' in line:
2013-10-24 18:22:15 +02:00
filename = line[1 + line.rfind(' '):]
2013-10-19 13:46:15 +02:00
2013-10-24 18:36:08 +02:00
for a in archives:
print(a)
2013-10-19 13:46:15 +02:00
return archives
2013-10-12 16:55:21 +02:00
2013-10-18 17:35:59 +02:00
2013-10-14 15:49:11 +02:00
def handleRemoveReadonly(func, path, exc):
import stat
if not os.access(path, os.W_OK):
# Is the error an access error ?
os.chmod(path, stat.S_IWUSR)
func(path)
2013-10-18 17:35:59 +02:00
def removeAllExceptResults():
filenames = []
2017-06-04 22:51:48 +02:00
filenames.extend(glob.glob('[A-Za-z0-9]*'))
filenames.extend(glob.glob('.[a-z]*'))
for filename in filenames:
count = 5
while count > 0:
2017-06-05 13:23:00 +02:00
count -= 1
try:
2013-10-16 17:22:00 +02:00
if os.path.isdir(filename):
shutil.rmtree(filename, onerror=handleRemoveReadonly)
elif filename != RESULTS_FILENAME:
2013-10-16 17:22:00 +02:00
os.remove(filename)
break
except WindowsError as err:
time.sleep(30)
if count == 0:
logging.error('Failed to cleanup {}: {}'.format(filename, err))
except OSError as err:
time.sleep(30)
if count == 0:
logging.error('Failed to cleanup {}: {}'.format(filename, err))
2013-10-18 17:35:59 +02:00
2013-10-18 04:53:14 +02:00
def removeLargeFiles(path):
for g in glob.glob(path + '*'):
2017-06-04 22:51:48 +02:00
if g in {'.', '..'}:
2013-10-18 04:53:14 +02:00
continue
2013-11-18 17:59:47 +01:00
if os.path.islink(g):
continue
2013-10-18 04:53:14 +02:00
if os.path.isdir(g):
# Remove test code
if g.endswith('/testsuite') or g.endswith('/clang/INPUTS'):
shutil.rmtree(g, onerror=handleRemoveReadonly)
# Remove docs and examples ... that might be garbage
elif g.endswith('/doc') or g.endswith('/examples'):
shutil.rmtree(g, onerror=handleRemoveReadonly)
else:
removeLargeFiles(g + '/')
elif os.path.isfile(g) and g[-4:] != '.txt':
2013-10-18 04:53:14 +02:00
statinfo = os.stat(g)
if statinfo.st_size > 1000000:
try:
os.remove(g)
except OSError as err:
logging.error('Failed to remove {}: {}'.format(g, err))
2013-10-13 08:07:39 +02:00
2017-06-04 22:51:48 +02:00
2016-07-22 08:31:00 +02:00
def strfCurrTime(fmt):
return datetime.time.strftime(datetime.datetime.now().time(), fmt)
2013-10-18 17:35:59 +02:00
2017-06-04 22:51:48 +02:00
2016-09-20 12:46:15 +02:00
def scanarchive(filepath, jobs, cpulimit):
# remove all files/folders except RESULTS_FILENAME
2013-10-27 07:53:23 +01:00
removeAllExceptResults()
logging.info(DEBIAN[0] + filepath)
2013-10-13 11:10:22 +02:00
if not wget(filepath):
if not wget(filepath):
logging.error('wget failed at {}'.format(filepath))
return
2013-10-24 17:39:37 +02:00
filename = filepath[filepath.rfind('/') + 1:]
if filename[-3:] == '.gz':
subprocess.call(['tar', 'xzvf', filename])
2013-10-20 12:45:05 +02:00
elif filename[-3:] == '.xz':
subprocess.call(['tar', 'xJvf', filename])
elif filename[-4:] == '.bz2':
subprocess.call(['tar', 'xjvf', filename])
2013-10-13 11:10:22 +02:00
removeLargeFiles('')
2016-07-23 13:44:05 +02:00
print(strfCurrTime('[%H:%M] cppcheck ') + filename)
2016-09-20 12:46:15 +02:00
if cpulimit:
cmd = 'cpulimit --limit=' + cpulimit
2016-09-20 12:46:15 +02:00
else:
cmd = 'nice --adjustment=1000'
2017-06-04 22:51:48 +02:00
cmd = cmd + ' ../cppcheck-O2 -D__GCC__ --enable=style --inconclusive --error-exitcode=0 ' +\
'--exception-handling=stderr ' + jobs + ' --template=daca2 .'
cmds = cmd.split()
2016-09-20 12:46:15 +02:00
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2013-10-13 11:10:22 +02:00
comm = p.communicate()
2013-12-31 20:56:16 +01:00
if p.returncode == 0:
logging.info(comm[1] + strfCurrTime('[%H:%M]'))
2017-06-04 22:51:48 +02:00
elif 'cppcheck: error: could not find or open any of the paths given.' not in comm[0]:
logging.error(comm[1] + strfCurrTime('[%H:%M]'))
logging.error('Exit code is not zero! Crash?\n')
2013-10-13 11:10:22 +02:00
parser = argparse.ArgumentParser(description='Checks debian source code')
parser.add_argument('folder', metavar='FOLDER')
parser.add_argument('--rev')
parser.add_argument('--workdir', default='~/daca2')
parser.add_argument('-j', '--jobs', default='-j1')
parser.add_argument('--skip', default=[], action='append')
parser.add_argument('--cpulimit')
2013-10-20 11:28:16 +02:00
args = parser.parse_args()
2013-10-20 11:28:16 +02:00
workdir = os.path.expanduser(args.workdir)
if not os.path.isdir(workdir):
logging.critical('workdir \'' + workdir + '\' is not a folder')
sys.exit(1)
workdir = os.path.join(workdir, args.folder)
if not os.path.isdir(workdir):
os.makedirs(workdir)
RESULTS_FILENAME = 'results.txt'
RESULTS_FILE = os.path.join(workdir, RESULTS_FILENAME)
logging.basicConfig(
2017-06-04 22:51:48 +02:00
filename=RESULTS_FILE,
level=logging.INFO,
format='%(message)s')
print(workdir)
archives = getpackages(args.folder)
2013-10-24 17:39:37 +02:00
if len(archives) == 0:
logging.critical('failed to load packages')
2013-10-24 17:39:37 +02:00
sys.exit(1)
if not os.path.isdir(workdir):
os.makedirs(workdir)
os.chdir(workdir)
2013-10-13 11:10:22 +02:00
try:
logging.info('STARTDATE ' + str(datetime.date.today()))
logging.info('STARTTIME ' + strfCurrTime('%H:%M:%S'))
if args.rev:
logging.info('GIT-REVISION ' + args.rev + '\n')
logging.info('')
2013-10-13 11:43:05 +02:00
for archive in archives:
if len(args.skip) > 0:
a = archive[:archive.rfind('/')]
a = a[a.rfind('/')+1:]
if a in args.skip:
continue
scanarchive(archive, args.jobs, args.cpulimit)
2013-10-13 08:07:39 +02:00
logging.info('DATE {}'.format(datetime.date.today()))
logging.info('TIME {}'.format(strfCurrTime('%H:%M:%S')))
2013-10-13 13:10:25 +02:00
except EOFError:
pass
# remove all files/folders except RESULTS_FILENAME
2013-10-13 13:10:25 +02:00
removeAllExceptResults()