Remove deprecated daca scripts
This commit is contained in:
parent
441d50df25
commit
12c81ac0bf
|
@ -1,249 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# 1. Create the folder ~/daca2-addons
|
||||
# 2. Put cppcheck-O2 in ~/daca2-addons. It should be built with all optimisations.
|
||||
# 3. Optional: tweak FTPSERVER and FTPPATH in this script below.
|
||||
# 4. Run the daca2-addons script: python daca2-addons.py FOLDER
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import shutil
|
||||
import glob
|
||||
import os
|
||||
import datetime
|
||||
import time
|
||||
|
||||
DEBIAN = ('ftp://ftp.se.debian.org/debian/',
|
||||
'ftp://ftp.debian.org/debian/')
|
||||
|
||||
|
||||
def wget(filepath):
|
||||
filename = filepath
|
||||
if '/' in filepath:
|
||||
filename = filename[filename.rfind('/') + 1:]
|
||||
for d in DEBIAN:
|
||||
subprocess.call(
|
||||
['nice', 'wget', '--tries=10', '--timeout=300', '-O', filename, d + filepath])
|
||||
if os.path.isfile(filename):
|
||||
return True
|
||||
print('Sleep for 10 seconds..')
|
||||
time.sleep(10)
|
||||
return False
|
||||
|
||||
|
||||
def getpackages(folder):
|
||||
if not wget('ls-lR.gz'):
|
||||
return []
|
||||
subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
|
||||
f = open('ls-lR', 'rt')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
subprocess.call(['rm', 'ls-lR'])
|
||||
|
||||
path = None
|
||||
archives = []
|
||||
filename = None
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if len(line) < 4:
|
||||
if filename:
|
||||
archives.append(path + '/' + filename)
|
||||
path = None
|
||||
filename = None
|
||||
elif line[:13 + len(folder)] == './pool/main/' + folder + '/':
|
||||
path = line[2:-1]
|
||||
elif path and '.orig.tar.' in line:
|
||||
filename = line[1 + line.rfind(' '):]
|
||||
|
||||
for a in archives:
|
||||
print(a)
|
||||
|
||||
return archives
|
||||
|
||||
|
||||
def handleRemoveReadonly(func, path, exc):
|
||||
import stat
|
||||
if not os.access(path, os.W_OK):
|
||||
# Is the error an access error ?
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
|
||||
|
||||
def removeAllExceptResults():
|
||||
count = 5
|
||||
while count > 0:
|
||||
count -= 1
|
||||
|
||||
filenames = []
|
||||
filenames.extend(glob.glob('[A-Za-z0-9]*'))
|
||||
filenames.extend(glob.glob('.[a-z]*'))
|
||||
|
||||
try:
|
||||
for filename in filenames:
|
||||
if os.path.isdir(filename):
|
||||
shutil.rmtree(filename, onerror=handleRemoveReadonly)
|
||||
elif filename != 'results.txt':
|
||||
os.remove(filename)
|
||||
except WindowsError as err:
|
||||
time.sleep(30)
|
||||
if count == 0:
|
||||
print('Failed to cleanup files/folders')
|
||||
print(err)
|
||||
sys.exit(1)
|
||||
continue
|
||||
except OSError as err:
|
||||
time.sleep(30)
|
||||
if count == 0:
|
||||
print('Failed to cleanup files/folders')
|
||||
print(err)
|
||||
sys.exit(1)
|
||||
continue
|
||||
count = 0
|
||||
|
||||
|
||||
def removeLargeFiles(path):
|
||||
for g in glob.glob(path + '*'):
|
||||
if g == '.' or g == '..':
|
||||
continue
|
||||
if os.path.islink(g):
|
||||
continue
|
||||
if os.path.isdir(g):
|
||||
removeLargeFiles(g + '/')
|
||||
elif os.path.isfile(g) and g[-4:] != '.txt':
|
||||
statinfo = os.stat(g)
|
||||
if '/clang/INPUTS/' in path or statinfo.st_size > 100000:
|
||||
os.remove(g)
|
||||
|
||||
|
||||
def dumpfiles(path):
|
||||
ret = []
|
||||
for g in glob.glob(path + '*'):
|
||||
if os.path.islink(g):
|
||||
continue
|
||||
if os.path.isdir(g):
|
||||
ret.extend(dumpfiles(path + g + '/'))
|
||||
elif os.path.isfile(g) and g[-5:] == '.dump':
|
||||
ret.append(g)
|
||||
return ret
|
||||
|
||||
|
||||
def scanarchive(filepath, jobs):
|
||||
# remove all files/folders except results.txt
|
||||
removeAllExceptResults()
|
||||
|
||||
results = open('results.txt', 'at')
|
||||
results.write(DEBIAN[0] + filepath + '\n')
|
||||
results.close()
|
||||
|
||||
if not wget(filepath):
|
||||
if not wget(filepath):
|
||||
results = open('results.txt', 'at')
|
||||
results.write('wget failed\n')
|
||||
results.close()
|
||||
return
|
||||
|
||||
filename = filepath[filepath.rfind('/') + 1:]
|
||||
if filename[-3:] == '.gz':
|
||||
subprocess.call(['tar', 'xzvf', filename])
|
||||
elif filename[-3:] == '.xz':
|
||||
subprocess.call(['tar', 'xJvf', filename])
|
||||
elif filename[-4:] == '.bz2':
|
||||
subprocess.call(['tar', 'xjvf', filename])
|
||||
|
||||
#
|
||||
# List of skipped packages - which trigger known yet unresolved problems with cppcheck.
|
||||
# The issues on trac (http://trac.cppcheck.net) are given for reference
|
||||
# boost #3654 (?)
|
||||
# flite #5975
|
||||
# insight#5184
|
||||
# valgrind #6151
|
||||
# gcc-arm - no ticket. Reproducible timeout in daca2 though as of 1.73/early 2016.
|
||||
#
|
||||
|
||||
if filename[:5] == 'flite' or filename[:5] == 'boost' or filename[:7] == 'insight' or\
|
||||
filename[:8] == 'valgrind' or filename[:7] == 'gcc-arm':
|
||||
results = open('results.txt', 'at')
|
||||
results.write('fixme: skipped package to avoid hang\n')
|
||||
results.close()
|
||||
return
|
||||
|
||||
removeLargeFiles('')
|
||||
|
||||
print('cppcheck ' + filename)
|
||||
|
||||
p = subprocess.Popen(
|
||||
['nice',
|
||||
'../cppcheck-O2',
|
||||
'--dump',
|
||||
'-D__GCC__',
|
||||
'--enable=style',
|
||||
'--error-exitcode=0',
|
||||
jobs,
|
||||
'.'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
p.communicate()
|
||||
|
||||
results = open('results.txt', 'at')
|
||||
|
||||
addons = sorted(glob.glob(os.path.expanduser('~/cppcheck/addons/*.py')))
|
||||
for dumpfile in sorted(dumpfiles('')):
|
||||
for addon in addons:
|
||||
if 'cppcheckdata.py' in addon:
|
||||
continue
|
||||
|
||||
p2 = subprocess.Popen(['nice',
|
||||
'python',
|
||||
addon,
|
||||
dumpfile],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
comm = p2.communicate()
|
||||
results.write(comm[1])
|
||||
results.close()
|
||||
|
||||
FOLDER = None
|
||||
JOBS = '-j1'
|
||||
REV = None
|
||||
for arg in sys.argv[1:]:
|
||||
if arg[:6] == '--rev=':
|
||||
REV = arg[6:]
|
||||
elif arg[:2] == '-j':
|
||||
JOBS = arg
|
||||
else:
|
||||
FOLDER = arg
|
||||
|
||||
if not FOLDER:
|
||||
print('no folder given')
|
||||
sys.exit(1)
|
||||
|
||||
workdir = os.path.expanduser('~/daca2-addons/')
|
||||
if not os.path.isdir(workdir + FOLDER):
|
||||
os.makedirs(workdir + FOLDER)
|
||||
os.chdir(workdir + FOLDER)
|
||||
|
||||
archives = getpackages(FOLDER)
|
||||
if len(archives) == 0:
|
||||
print('failed to load packages')
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
results = open('results.txt', 'wt')
|
||||
results.write('STARTDATE ' + str(datetime.date.today()) + '\n')
|
||||
if REV:
|
||||
results.write('GIT-REVISION ' + REV + '\n')
|
||||
results.write('\n')
|
||||
results.close()
|
||||
|
||||
for archive in archives:
|
||||
scanarchive(archive, JOBS)
|
||||
|
||||
results = open('results.txt', 'at')
|
||||
results.write('DATE ' + str(datetime.date.today()) + '\n')
|
||||
results.close()
|
||||
|
||||
except EOFError:
|
||||
pass
|
||||
|
||||
# remove all files/folders except results.txt
|
||||
removeAllExceptResults()
|
|
@ -1,88 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
# cgi-script for searching the results
|
||||
|
||||
import sys
|
||||
import glob
|
||||
import os
|
||||
import cgi
|
||||
import cgitb
|
||||
import re
|
||||
|
||||
def getfiles(path, arguments):
|
||||
files = []
|
||||
if 'folder' in arguments:
|
||||
files.append(path + '/daca2-' + arguments['folder'].value + '.html')
|
||||
else:
|
||||
files.extend(sorted(glob.glob(path+'/daca2-?.html')))
|
||||
files.extend(sorted(glob.glob(path+'/daca2-lib?.html')))
|
||||
return files
|
||||
|
||||
def readlines(filename):
|
||||
if not os.path.isfile(filename):
|
||||
return []
|
||||
f = open(filename, 'rt')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
return lines
|
||||
|
||||
def trimline(line):
|
||||
while len(line)>1 and (line[-1]=='\r' or line[-1]=='\n'):
|
||||
line = line[:-1]
|
||||
return line
|
||||
|
||||
def matchline(line, id):
|
||||
return line.endswith('[' + id + ']')
|
||||
|
||||
def doSearch(path,arguments):
|
||||
id = arguments['id'].value
|
||||
for g in getfiles(path, arguments):
|
||||
ftp = ''
|
||||
found = False
|
||||
for line in readlines(g):
|
||||
line = trimline(line)
|
||||
if line.startswith('ftp://'):
|
||||
ftp = line
|
||||
if matchline(line, id):
|
||||
found = True
|
||||
sys.stdout.write(ftp + '\n')
|
||||
elif line.find(': note:') < 0:
|
||||
found = False
|
||||
if found:
|
||||
sys.stdout.write(line + '\n')
|
||||
|
||||
def summary(path, arguments):
|
||||
count = {}
|
||||
pattern = re.compile(r'.*: (error|warning|style|performance|portability):.*\[([a-zA-Z0-9]+)\]$')
|
||||
for g in getfiles(path, arguments):
|
||||
for line in readlines(g):
|
||||
res = pattern.match(trimline(line))
|
||||
if res is None:
|
||||
continue
|
||||
id = res.group(2)
|
||||
if id in count:
|
||||
count[id] += 1
|
||||
else:
|
||||
count[id] = 1
|
||||
print('<table>')
|
||||
for id in sorted(count.keys()):
|
||||
print('<tr><td>' + id +'</td><td><a href="/cgi-bin/daca2-search.cgi?id='+id+'">'+str(count[id])+'</a></td></tr>')
|
||||
print('</table>')
|
||||
|
||||
sys.stdout.write('Content-type: text/html\r\n\r\n'
|
||||
'<html><body>\n')
|
||||
|
||||
cgitb.enable()
|
||||
arguments = cgi.FieldStorage()
|
||||
if 'id' in arguments:
|
||||
id = arguments['id'].value
|
||||
#id = 'oppositeInnerCondition'
|
||||
print(id)
|
||||
sys.stdout.write('<pre>\n')
|
||||
doSearch('../htdocs/devinfo/daca2-report', arguments)
|
||||
#doSearch(os.path.expanduser('~/temp'), id)
|
||||
sys.stdout.write('</pre>\n')
|
||||
else:
|
||||
summary('../htdocs/devinfo/daca2-report', arguments)
|
||||
#summary(os.path.expanduser('~/temp'), arguments)
|
||||
sys.stdout.write('</body></html>\n')
|
239
tools/daca2.py
239
tools/daca2.py
|
@ -1,239 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# 1. Create a folder daca2 in your HOME folder
|
||||
# 2. Put cppcheck-head in daca2. It should be built with all optimisations.
|
||||
# 3. Optional: Put a file called "suppressions.txt" in the daca2 folder.
|
||||
# 4. Optional: tweak FTPSERVER and FTPPATH in this script below.
|
||||
# 5. Run the daca2 script: python daca2.py FOLDER
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
import sys
|
||||
import shutil
|
||||
import glob
|
||||
import os
|
||||
import datetime
|
||||
import time
|
||||
|
||||
DEBIAN = ('ftp://ftp.se.debian.org/debian/',
|
||||
'ftp://ftp.debian.org/debian/')
|
||||
|
||||
RESULTS_FILES = ['results.txt']
|
||||
|
||||
def wget(filepath):
|
||||
filename = filepath
|
||||
if '/' in filepath:
|
||||
filename = filename[filename.rfind('/') + 1:]
|
||||
for d in DEBIAN:
|
||||
subprocess.call(
|
||||
['nice', 'wget', '--tries=10', '--timeout=300', '-O', filename, d + filepath])
|
||||
if os.path.isfile(filename):
|
||||
return True
|
||||
print('Sleep for 10 seconds..')
|
||||
time.sleep(10)
|
||||
return False
|
||||
|
||||
|
||||
def getpackages(folder):
|
||||
if not wget('ls-lR.gz'):
|
||||
return []
|
||||
subprocess.call(['nice', 'gunzip', 'ls-lR.gz'])
|
||||
f = open('ls-lR', 'rt')
|
||||
lines = f.readlines()
|
||||
f.close()
|
||||
subprocess.call(['rm', 'ls-lR'])
|
||||
|
||||
path = None
|
||||
archives = []
|
||||
filename = None
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if len(line) < 4:
|
||||
if filename:
|
||||
archives.append(path + '/' + filename)
|
||||
path = None
|
||||
filename = None
|
||||
elif line[:13 + len(folder)] == './pool/main/' + folder + '/':
|
||||
path = line[2:-1]
|
||||
elif path and '.orig.tar.' in line:
|
||||
filename = line[1 + line.rfind(' '):]
|
||||
|
||||
for a in archives:
|
||||
print(a)
|
||||
|
||||
return archives
|
||||
|
||||
|
||||
def handleRemoveReadonly(func, path, exc):
|
||||
import stat
|
||||
if not os.access(path, os.W_OK):
|
||||
# Is the error an access error ?
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
|
||||
|
||||
def removeAllExceptResults():
|
||||
filenames = []
|
||||
filenames.extend(glob.glob('[A-Za-z0-9]*'))
|
||||
filenames.extend(glob.glob('.[a-z]*'))
|
||||
|
||||
for filename in filenames:
|
||||
count = 5
|
||||
while count > 0:
|
||||
count -= 1
|
||||
|
||||
try:
|
||||
if os.path.isdir(filename):
|
||||
shutil.rmtree(filename, onerror=handleRemoveReadonly)
|
||||
elif filename not in RESULTS_FILES:
|
||||
os.remove(filename)
|
||||
break
|
||||
except WindowsError as err:
|
||||
time.sleep(30)
|
||||
if count == 0:
|
||||
logging.error('Failed to cleanup {}: {}'.format(filename, err))
|
||||
except OSError as err:
|
||||
time.sleep(30)
|
||||
if count == 0:
|
||||
logging.error('Failed to cleanup {}: {}'.format(filename, err))
|
||||
|
||||
|
||||
def removeLargeFiles(path):
|
||||
for g in glob.glob(path + '*'):
|
||||
if g in {'.', '..'}:
|
||||
continue
|
||||
if os.path.islink(g):
|
||||
continue
|
||||
if os.path.isdir(g):
|
||||
# Remove test code
|
||||
if g.endswith('/testsuite') or g.endswith('/clang/INPUTS'):
|
||||
shutil.rmtree(g, onerror=handleRemoveReadonly)
|
||||
# Remove docs and examples ... that might be garbage
|
||||
elif g.endswith('/doc') or g.endswith('/examples'):
|
||||
shutil.rmtree(g, onerror=handleRemoveReadonly)
|
||||
else:
|
||||
removeLargeFiles(g + '/')
|
||||
elif os.path.isfile(g) and not g.endswith('.txt'):
|
||||
statinfo = os.stat(g)
|
||||
if statinfo.st_size > 1000000:
|
||||
try:
|
||||
os.remove(g)
|
||||
except OSError as err:
|
||||
logging.error('Failed to remove {}: {}'.format(g, err))
|
||||
|
||||
|
||||
def strfCurrTime(fmt):
|
||||
return datetime.time.strftime(datetime.datetime.now().time(), fmt)
|
||||
|
||||
|
||||
def scanarchive(filepath, args, run, resultsFile):
|
||||
# remove all files/folders except RESULTS_FILENAME
|
||||
removeAllExceptResults()
|
||||
|
||||
resultsFile.write(DEBIAN[0] + filepath + '\n')
|
||||
|
||||
if not wget(filepath):
|
||||
if not wget(filepath):
|
||||
resultsFile.write('wget failed at {}'.format(filepath))
|
||||
return
|
||||
|
||||
filename = filepath[filepath.rfind('/') + 1:]
|
||||
if filename[-3:] == '.gz':
|
||||
subprocess.call(['tar', 'xzvf', filename])
|
||||
elif filename[-3:] == '.xz':
|
||||
subprocess.call(['tar', 'xJvf', filename])
|
||||
elif filename[-4:] == '.bz2':
|
||||
subprocess.call(['tar', 'xjvf', filename])
|
||||
|
||||
removeLargeFiles('')
|
||||
|
||||
print(strfCurrTime('[%H:%M] cppcheck ') + filename)
|
||||
|
||||
if args.cpulimit:
|
||||
cmd = 'cpulimit --limit=' + args.cpulimit
|
||||
else:
|
||||
cmd = 'nice --adjustment=1000'
|
||||
# TODO: The --exception-handling=stderr is skipped right now because it hangs (#8589)
|
||||
cppcheck = '../cppcheck-' + run
|
||||
cmd = cmd + ' ' + cppcheck + ' -D__GCC__ --enable=style --inconclusive --error-exitcode=0 ' +\
|
||||
args.jobs + ' --template=daca2 .'
|
||||
cmds = cmd.split()
|
||||
|
||||
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
comm = p.communicate()
|
||||
|
||||
if p.returncode == 0:
|
||||
resultsFile.write(comm[1] + strfCurrTime('[%H:%M]') + '\n')
|
||||
elif 'cppcheck: error: could not find or open any of the paths given.' not in comm[0]:
|
||||
stdout = comm[0]
|
||||
pos1 = stdout.rfind('Checking ')
|
||||
if pos1 > 0:
|
||||
resultsFile.write(stdout[pos1:]+'\n')
|
||||
resultsFile.write(comm[1] + strfCurrTime('[%H:%M]')+'\n')
|
||||
resultsFile.write('Exit code is not zero! Crash?\n')
|
||||
resultsFile.write('\n')
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Checks debian source code')
|
||||
parser.add_argument('folder', metavar='FOLDER')
|
||||
parser.add_argument('--rev')
|
||||
parser.add_argument('--workdir', default='~/daca2')
|
||||
parser.add_argument('-j', '--jobs', default='-j1')
|
||||
parser.add_argument('--skip', default=[], action='append')
|
||||
parser.add_argument('--cpulimit')
|
||||
parser.add_argument('--baseversion')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
workdir = os.path.expanduser(args.workdir)
|
||||
if not os.path.isdir(workdir):
|
||||
print('workdir \'' + workdir + '\' is not a folder')
|
||||
sys.exit(1)
|
||||
os.chdir(workdir)
|
||||
|
||||
archives = getpackages(args.folder)
|
||||
if len(archives) == 0:
|
||||
print('failed to load packages')
|
||||
sys.exit(1)
|
||||
|
||||
workdir = os.path.join(workdir, args.folder)
|
||||
if not os.path.isdir(workdir):
|
||||
os.makedirs(workdir)
|
||||
os.chdir(workdir)
|
||||
|
||||
versions = ['head']
|
||||
if args.baseversion:
|
||||
versions.append(args.baseversion)
|
||||
RESULTS_FILES = ['results-head.txt', 'results-' + args.baseversion + '.txt']
|
||||
|
||||
for run in versions:
|
||||
try:
|
||||
f = None
|
||||
if args.baseversion:
|
||||
f = open('results-' + run + '.txt', 'wt')
|
||||
else:
|
||||
f = open('results.txt', 'wt')
|
||||
f.write('STARTDATE ' + str(datetime.date.today()) + '\n')
|
||||
f.write('STARTTIME ' + strfCurrTime('%H:%M:%S') + '\n')
|
||||
if args.rev:
|
||||
f.write('GIT-REVISION ' + args.rev + '\n')
|
||||
f.write('\n')
|
||||
|
||||
for archive in archives:
|
||||
if len(args.skip) > 0:
|
||||
a = archive[:archive.rfind('/')]
|
||||
a = a[a.rfind('/') + 1:]
|
||||
if a in args.skip:
|
||||
continue
|
||||
scanarchive(archive, args, run, f)
|
||||
|
||||
f.write('DATE {}'.format(datetime.date.today()) + '\n')
|
||||
f.write('TIME {}'.format(strfCurrTime('%H:%M:%S')) + '\n')
|
||||
f.close()
|
||||
|
||||
except EOFError:
|
||||
pass
|
||||
|
||||
# remove all files/folders except RESULTS_FILENAME
|
||||
removeAllExceptResults()
|
|
@ -1,121 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import subprocess
|
||||
import pexpect
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
BASE = '1.84'
|
||||
|
||||
def compilecppcheck(CPPFLAGS):
|
||||
subprocess.call(['nice', 'make', 'clean'])
|
||||
subprocess.call(['nice', 'make', 'SRCDIR=build', 'CFGDIR=' +
|
||||
os.path.expanduser('~/cppcheck/cfg'), 'CXXFLAGS=-g -O2', 'CPPFLAGS=' + CPPFLAGS])
|
||||
subprocess.call(['cp', 'cppcheck', os.path.expanduser('~/daca2/cppcheck-head')])
|
||||
|
||||
|
||||
def runcppcheck(rev, folder):
|
||||
subprocess.call(['rm', '-rf', os.path.expanduser('~/daca2/' + folder)])
|
||||
subprocess.call(['nice', '--adjustment=19', 'python',
|
||||
os.path.expanduser('~/cppcheck/tools/daca2.py'), folder, '--rev=' + rev,
|
||||
'--baseversion='+BASE, '--skip=hashdeep', '--skip=lice'])
|
||||
|
||||
|
||||
def daca2report(reportfolder):
|
||||
subprocess.call(['rm', '-rf', reportfolder])
|
||||
subprocess.call(['mkdir', reportfolder])
|
||||
subprocess.call(['python', os.path.expanduser('~/cppcheck/tools/daca2-report.py'), reportfolder])
|
||||
|
||||
|
||||
# Upload file to sourceforge server using scp
|
||||
def upload(localfolder, webfolder, password):
|
||||
if len(password) < 3:
|
||||
return
|
||||
tries = 1
|
||||
while tries <= 5:
|
||||
try:
|
||||
child = pexpect.spawn(
|
||||
'scp -r ' + localfolder + ' danielmarjamaki,cppcheck@web.sf.net:htdocs/' + webfolder)
|
||||
# child.expect('upload@trac.cppcheck.net\'s password:')
|
||||
child.expect('Password:')
|
||||
child.sendline(password)
|
||||
child.interact()
|
||||
return
|
||||
except (IOError, OSError, pexpect.TIMEOUT, pexpect.EOF):
|
||||
print('rundaca2.py: Upload failed. Sleep for 10 seconds..')
|
||||
time.sleep(10)
|
||||
tries = tries + 1
|
||||
|
||||
def getDate(filename):
|
||||
for line in open(filename):
|
||||
if line.startswith('DATE '):
|
||||
return line[5:]
|
||||
return None
|
||||
|
||||
def getFolderNum():
|
||||
folders = '0123456789abcdefghijklmnopqrstuvwxyz'
|
||||
oldestDate = None
|
||||
oldestFolderNum = 0
|
||||
for folderNum in range(len(folders)):
|
||||
folder = folders[folderNum]
|
||||
path = os.path.expanduser('~/daca2/' + folder)
|
||||
if not os.path.isdir(path):
|
||||
if folder == '0' or folder >= 'a':
|
||||
return folderNum
|
||||
continue
|
||||
if not os.path.isfile(path + '/results-head.txt'):
|
||||
return folderNum
|
||||
if not os.path.isfile(path + '/results-'+BASE+'.txt'):
|
||||
return folderNum
|
||||
d1 = getDate(path + '/results-head.txt')
|
||||
if d1 is None: # results are unfinished so they need to be recreated
|
||||
return folderNum
|
||||
d2 = getDate(path + '/results-'+BASE+'.txt')
|
||||
if d2 is None: # results are unfinished so they need to be recreated
|
||||
return folderNum
|
||||
if oldestDate is None or d1 < oldestDate:
|
||||
oldestDate = d1
|
||||
oldestFolderNum = folderNum
|
||||
if d2 < oldestDate:
|
||||
oldestDate = d2
|
||||
oldestFolderNum = folderNum
|
||||
|
||||
return oldestFolderNum
|
||||
|
||||
|
||||
def daca2(folderNum, password):
|
||||
folders = '0123456789abcdefghijklmnopqrstuvwxyz'
|
||||
folder = folders[folderNum % len(folders)]
|
||||
|
||||
print('Daca2 folder=' + folder)
|
||||
|
||||
os.chdir(os.path.expanduser('~/cppcheck'))
|
||||
subprocess.call(['git', 'pull'])
|
||||
p = subprocess.Popen(['git', 'show', '--format=%h'],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
comm = p.communicate()
|
||||
rev = comm[0]
|
||||
rev = rev[:rev.find('\n')]
|
||||
|
||||
print('rundaca2.py: compile cppcheck')
|
||||
compilecppcheck('-DMAXTIME=600 -DDACA2')
|
||||
|
||||
print('rundaca2.py: runcppcheck')
|
||||
runcppcheck(rev, folder)
|
||||
runcppcheck(rev, 'lib' + folder)
|
||||
|
||||
print('rundaca2.py: daca2 report')
|
||||
daca2report(os.path.expanduser('~/daca2-report'))
|
||||
|
||||
print('rundaca2.py: upload')
|
||||
upload(os.path.expanduser('~/daca2-report'), 'devinfo/', password)
|
||||
|
||||
|
||||
print('enter password:')
|
||||
password = sys.stdin.readline().rstrip()
|
||||
folderNum = getFolderNum()
|
||||
while True:
|
||||
daca2(folderNum, password)
|
||||
folderNum = folderNum + 1
|
||||
|
Loading…
Reference in New Issue