2018-08-23 21:31:02 +02:00
|
|
|
# Donate CPU
|
|
|
|
#
|
|
|
|
# A script a user can run to donate CPU to cppcheck project
|
|
|
|
#
|
2019-01-18 21:36:03 +01:00
|
|
|
# Syntax: donate-cpu.py [-jN] [--package=url] [--stop-time=HH:MM] [--work-path=path] [--test] [--bandwidth-limit=limit]
|
2018-08-31 14:28:42 +02:00
|
|
|
# -jN Use N threads in compilation/analysis. Default is 1.
|
2018-12-05 10:25:35 +01:00
|
|
|
# --package=url Check a specific package and then stop. Can be useful if you want to reproduce some warning/crash/exception/etc..
|
2018-08-31 14:28:42 +02:00
|
|
|
# --stop-time=HH:MM Stop analysis when time has passed. Default is that you must terminate the script.
|
2018-09-06 13:15:54 +02:00
|
|
|
# --work-path=path Work folder path. Default path is cppcheck-donate-cpu-workfolder in your home folder.
|
2019-01-03 13:42:33 +01:00
|
|
|
# --test Connect to a donate-cpu-server that is running locally on port 8001 for testing.
|
2019-01-18 21:36:03 +01:00
|
|
|
# --bandwidth-limit=limit Limit download rate for packages. Format for limit is the same that wget uses.
|
|
|
|
# Examples: --bandwidth-limit=250k => max. 250 kilobytes per second
|
|
|
|
# --bandwidth-limit=2m => max. 2 megabytes per second
|
2019-03-01 22:34:50 +01:00
|
|
|
# --max-packages=N Process N packages and then exit. A value of 0 means infinitely.
|
2018-08-31 14:28:42 +02:00
|
|
|
#
|
|
|
|
# What this script does:
|
2018-08-23 21:31:02 +02:00
|
|
|
# 1. Check requirements
|
|
|
|
# 2. Pull & compile Cppcheck
|
|
|
|
# 3. Select a package
|
|
|
|
# 4. Download package
|
|
|
|
# 5. Analyze source code
|
|
|
|
# 6. Upload results
|
|
|
|
# 7. Repeat from step 2
|
2018-08-31 14:28:42 +02:00
|
|
|
#
|
|
|
|
# Quick start: just run this script without any arguments
|
2018-08-23 21:31:02 +02:00
|
|
|
|
|
|
|
import shutil
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import socket
|
|
|
|
import time
|
2018-08-24 13:20:38 +02:00
|
|
|
import re
|
2018-09-15 18:56:46 +02:00
|
|
|
import tarfile
|
2019-01-11 20:07:11 +01:00
|
|
|
import platform
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2019-01-09 16:01:53 +01:00
|
|
|
|
2019-02-09 09:27:54 +01:00
|
|
|
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
|
|
|
|
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
|
|
|
|
# changes)
|
2019-03-01 22:34:50 +01:00
|
|
|
CLIENT_VERSION = "1.1.11"
|
2019-02-09 09:27:54 +01:00
|
|
|
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
def checkRequirements():
|
|
|
|
result = True
|
2018-09-15 18:56:46 +02:00
|
|
|
for app in ['g++', 'git', 'make', 'wget']:
|
2018-08-23 21:31:02 +02:00
|
|
|
try:
|
|
|
|
subprocess.call([app, '--version'])
|
|
|
|
except OSError:
|
2018-12-20 14:26:00 +01:00
|
|
|
print(app + ' is required')
|
|
|
|
result = False
|
2018-08-23 21:31:02 +02:00
|
|
|
return result
|
|
|
|
|
2019-01-09 16:01:53 +01:00
|
|
|
|
2018-08-24 18:49:11 +02:00
|
|
|
def getCppcheck(cppcheckPath):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Get Cppcheck..')
|
2018-08-26 16:23:42 +02:00
|
|
|
for i in range(5):
|
|
|
|
if os.path.exists(cppcheckPath):
|
|
|
|
os.chdir(cppcheckPath)
|
|
|
|
subprocess.call(['git', 'checkout', '-f'])
|
|
|
|
subprocess.call(['git', 'pull'])
|
|
|
|
else:
|
2018-10-17 06:14:06 +02:00
|
|
|
subprocess.call(['git', 'clone', 'https://github.com/danmar/cppcheck.git', cppcheckPath])
|
2018-08-26 16:23:42 +02:00
|
|
|
if not os.path.exists(cppcheckPath):
|
|
|
|
print('Failed to clone, will try again in 10 minutes..')
|
|
|
|
time.sleep(600)
|
|
|
|
continue
|
|
|
|
time.sleep(2)
|
|
|
|
return True
|
|
|
|
return False
|
2018-08-24 18:49:11 +02:00
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-31 14:28:42 +02:00
|
|
|
def compile_version(workPath, jobs, version):
|
2018-08-24 21:07:50 +02:00
|
|
|
if os.path.isfile(workPath + '/' + version + '/cppcheck'):
|
2018-08-26 16:23:42 +02:00
|
|
|
return True
|
2018-08-24 21:07:50 +02:00
|
|
|
os.chdir(workPath + '/cppcheck')
|
|
|
|
subprocess.call(['git', 'checkout', version])
|
|
|
|
subprocess.call(['make', 'clean'])
|
2018-08-31 14:28:42 +02:00
|
|
|
subprocess.call(['make', jobs, 'SRCDIR=build', 'CXXFLAGS=-O2'])
|
2018-08-24 21:07:50 +02:00
|
|
|
if os.path.isfile(workPath + '/cppcheck/cppcheck'):
|
|
|
|
os.mkdir(workpath + '/' + version)
|
|
|
|
destPath = workpath + '/' + version + '/'
|
|
|
|
subprocess.call(['cp', '-R', workPath + '/cppcheck/cfg', destPath])
|
|
|
|
subprocess.call(['cp', 'cppcheck', destPath])
|
|
|
|
subprocess.call(['git', 'checkout', 'master'])
|
2018-08-26 16:23:42 +02:00
|
|
|
try:
|
|
|
|
subprocess.call([workPath + '/' + version + '/cppcheck', '--version'])
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
2018-08-24 21:07:50 +02:00
|
|
|
|
|
|
|
|
2018-08-31 14:28:42 +02:00
|
|
|
def compile(cppcheckPath, jobs):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Compiling Cppcheck..')
|
2018-08-24 13:20:38 +02:00
|
|
|
try:
|
2018-08-24 18:49:11 +02:00
|
|
|
os.chdir(cppcheckPath)
|
2018-08-31 14:28:42 +02:00
|
|
|
subprocess.call(['make', jobs, 'SRCDIR=build', 'CXXFLAGS=-O2'])
|
2018-08-24 18:49:11 +02:00
|
|
|
subprocess.call([cppcheckPath + '/cppcheck', '--version'])
|
2018-08-24 13:20:38 +02:00
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
return True
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-24 18:49:11 +02:00
|
|
|
|
2019-01-03 13:42:33 +01:00
|
|
|
def getCppcheckVersions(server_address):
|
2018-11-29 21:19:45 +01:00
|
|
|
print('Connecting to server to get Cppcheck versions..')
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
try:
|
|
|
|
sock.connect(server_address)
|
|
|
|
sock.send(b'GetCppcheckVersions\n')
|
|
|
|
versions = sock.recv(256)
|
|
|
|
except socket.error:
|
2018-12-08 11:39:44 +01:00
|
|
|
return None
|
2018-11-29 21:19:45 +01:00
|
|
|
sock.close()
|
|
|
|
return versions.decode('utf-8').split()
|
|
|
|
|
|
|
|
|
2019-01-03 13:42:33 +01:00
|
|
|
def getPackage(server_address):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Connecting to server to get assigned work..')
|
|
|
|
package = None
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
try:
|
2018-11-20 11:18:18 +01:00
|
|
|
sock.connect(server_address)
|
2018-08-25 20:34:43 +02:00
|
|
|
sock.send(b'get\n')
|
2018-08-23 21:31:02 +02:00
|
|
|
package = sock.recv(256)
|
2018-11-14 21:41:16 +01:00
|
|
|
except socket.error:
|
|
|
|
package = ''
|
|
|
|
sock.close()
|
2018-08-25 20:34:43 +02:00
|
|
|
return package.decode('utf-8')
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-24 18:49:11 +02:00
|
|
|
|
2018-09-01 17:04:34 +02:00
|
|
|
def handleRemoveReadonly(func, path, exc):
|
|
|
|
import stat
|
|
|
|
if not os.access(path, os.W_OK):
|
|
|
|
# Is the error an access error ?
|
|
|
|
os.chmod(path, stat.S_IWUSR)
|
|
|
|
func(path)
|
|
|
|
|
|
|
|
|
|
|
|
def removeTree(folderName):
|
|
|
|
if not os.path.exists(folderName):
|
|
|
|
return
|
|
|
|
count = 5
|
|
|
|
while count > 0:
|
|
|
|
count -= 1
|
|
|
|
try:
|
|
|
|
shutil.rmtree(folderName, onerror=handleRemoveReadonly)
|
|
|
|
break
|
|
|
|
except OSError as err:
|
|
|
|
time.sleep(30)
|
|
|
|
if count == 0:
|
|
|
|
print('Failed to cleanup {}: {}'.format(folderName, err))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2019-01-18 21:36:03 +01:00
|
|
|
def wget(url, destfile, bandwidth_limit):
|
2018-09-07 15:59:59 +02:00
|
|
|
if os.path.exists(destfile):
|
|
|
|
if os.path.isfile(destfile):
|
|
|
|
os.remove(destfile)
|
|
|
|
else:
|
|
|
|
print('Error: ' + destfile + ' exists but it is not a file! Please check the path and delete it manually.')
|
|
|
|
sys.exit(1)
|
2019-01-18 21:36:03 +01:00
|
|
|
limit_rate_option = ''
|
|
|
|
if bandwidth_limit and isinstance(bandwidth_limit, str):
|
|
|
|
limit_rate_option = '--limit-rate=' + bandwidth_limit
|
2018-08-23 21:31:02 +02:00
|
|
|
subprocess.call(
|
2019-01-18 21:36:03 +01:00
|
|
|
['wget', '--tries=10', '--timeout=300', limit_rate_option, '-O', destfile, url])
|
2018-08-23 21:31:02 +02:00
|
|
|
if os.path.isfile(destfile):
|
|
|
|
return True
|
|
|
|
print('Sleep for 10 seconds..')
|
|
|
|
time.sleep(10)
|
|
|
|
return False
|
|
|
|
|
2018-08-24 21:07:50 +02:00
|
|
|
|
2019-01-18 21:36:03 +01:00
|
|
|
def downloadPackage(workPath, package, bandwidth_limit):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Download package ' + package)
|
|
|
|
destfile = workPath + '/temp.tgz'
|
2019-01-18 21:36:03 +01:00
|
|
|
if not wget(package, destfile, bandwidth_limit):
|
|
|
|
if not wget(package, destfile, bandwidth_limit):
|
2018-08-23 21:31:02 +02:00
|
|
|
return None
|
2018-08-24 21:07:50 +02:00
|
|
|
return destfile
|
|
|
|
|
|
|
|
|
|
|
|
def unpackPackage(workPath, tgz):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Unpacking..')
|
2018-08-24 21:07:50 +02:00
|
|
|
tempPath = workPath + '/temp'
|
2018-09-01 17:04:34 +02:00
|
|
|
removeTree(tempPath)
|
2018-08-23 21:31:02 +02:00
|
|
|
os.mkdir(tempPath)
|
|
|
|
os.chdir(tempPath)
|
2018-09-15 18:56:46 +02:00
|
|
|
if tarfile.is_tarfile(tgz):
|
|
|
|
tf = tarfile.open(tgz)
|
|
|
|
for member in tf:
|
|
|
|
if member.name.startswith(('/', '..')):
|
|
|
|
# Skip dangerous file names
|
|
|
|
continue
|
2019-02-27 14:59:19 +01:00
|
|
|
elif member.name.lower().endswith(('.c', '.cpp', '.cxx', '.cc', '.c++', '.h', '.hpp',
|
2019-02-28 22:41:21 +01:00
|
|
|
'.h++', '.hxx', '.hh', '.tpp', '.txx', '.qml')):
|
2018-11-12 14:45:39 +01:00
|
|
|
try:
|
|
|
|
tf.extract(member.name)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2018-11-22 08:44:24 +01:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
2018-09-15 18:56:46 +02:00
|
|
|
tf.close()
|
2018-08-23 21:31:02 +02:00
|
|
|
os.chdir(workPath)
|
2018-08-24 21:07:50 +02:00
|
|
|
|
|
|
|
|
2019-01-25 21:41:28 +01:00
|
|
|
def hasInclude(path, includes):
|
2018-12-03 18:15:07 +01:00
|
|
|
for root, _, files in os.walk(path):
|
|
|
|
for name in files:
|
|
|
|
filename = os.path.join(root, name)
|
2018-12-05 10:25:35 +01:00
|
|
|
try:
|
2019-02-27 20:01:38 +01:00
|
|
|
if sys.version_info.major < 3:
|
|
|
|
f = open(filename, 'rt')
|
|
|
|
else:
|
|
|
|
f = open(filename, 'rt', errors='ignore')
|
2018-12-07 06:51:07 +01:00
|
|
|
filedata = f.read()
|
|
|
|
try:
|
|
|
|
# Python2 needs to decode the data first
|
|
|
|
filedata = filedata.decode(encoding='utf-8', errors='ignore')
|
|
|
|
except AttributeError:
|
|
|
|
# Python3 directly reads the data into a string object that has no decode()
|
|
|
|
pass
|
2018-12-05 10:25:35 +01:00
|
|
|
f.close()
|
2019-02-21 08:14:47 +01:00
|
|
|
re_includes = [re.escape(inc) for inc in includes]
|
2019-02-23 17:13:35 +01:00
|
|
|
if re.search('^[ \t]*#[ \t]*include[ \t]*(' + '|'.join(re_includes) + ')', filedata, re.MULTILINE):
|
2019-02-21 08:14:47 +01:00
|
|
|
return True
|
2018-12-05 10:25:35 +01:00
|
|
|
except IOError:
|
|
|
|
pass
|
2018-12-01 20:02:13 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-02-09 22:05:12 +01:00
|
|
|
def scanPackage(workPath, cppcheckPath, jobs, fast):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Analyze..')
|
2018-08-24 21:07:50 +02:00
|
|
|
os.chdir(workPath)
|
2019-02-08 11:19:51 +01:00
|
|
|
libraries = ' --library=posix --library=gnu'
|
2019-02-19 11:50:39 +01:00
|
|
|
|
|
|
|
libraryIncludes = {'boost': ['<boost/'],
|
2019-02-23 18:47:19 +01:00
|
|
|
'googletest': ['<gtest/gtest.h>'],
|
2019-02-19 11:50:39 +01:00
|
|
|
'gtk': ['<gtk/gtk.h>', '<glib.h>', '<glib/'],
|
2019-02-22 12:52:07 +01:00
|
|
|
# 'libcerror': ['<libcerror.h>'], <- Enable after release of 1.88
|
2019-02-19 11:50:39 +01:00
|
|
|
'motif': ['<X11/', '<Xm/'],
|
|
|
|
'python': ['<Python.h>'],
|
|
|
|
'qt': ['<QApplication>', '<QString>', '<QWidget>', '<QtWidgets>', '<QtGui'],
|
|
|
|
'sdl': ['<SDL.h>'],
|
2019-02-22 17:51:47 +01:00
|
|
|
'tinyxml2': ['<tinyxml2', '"tinyxml2'],
|
2019-02-19 11:50:39 +01:00
|
|
|
'wxwidgets': ['<wx/', '"wx/'],
|
|
|
|
'zlib': ['<zlib.h>'],
|
|
|
|
}
|
|
|
|
for library, includes in libraryIncludes.items():
|
|
|
|
if os.path.exists(os.path.join(cppcheckPath, 'cfg', library + '.cfg')) and hasInclude('temp', includes):
|
|
|
|
libraries += ' --library=' + library
|
2019-02-03 21:56:48 +01:00
|
|
|
|
|
|
|
# Reference for GNU C: https://gcc.gnu.org/onlinedocs/cpp/Common-Predefined-Macros.html
|
2019-02-08 11:19:51 +01:00
|
|
|
options = jobs + libraries + ' -D__GNUC__ --check-library --inconclusive --enable=style,information --platform=unix64 --template=daca2 -rp=temp temp'
|
2019-02-09 22:05:12 +01:00
|
|
|
if fast:
|
|
|
|
options = '--experimental-fast ' + options
|
2019-02-04 11:05:14 +01:00
|
|
|
cmd = 'nice ' + cppcheckPath + '/cppcheck' + ' ' + options
|
2018-08-23 21:31:02 +02:00
|
|
|
print(cmd)
|
2018-08-29 06:51:33 +02:00
|
|
|
startTime = time.time()
|
2018-08-23 21:31:02 +02:00
|
|
|
p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
comm = p.communicate()
|
2018-08-29 06:51:33 +02:00
|
|
|
stopTime = time.time()
|
2018-08-29 22:07:48 +02:00
|
|
|
stdout = comm[0].decode(encoding='utf-8', errors='ignore')
|
|
|
|
stderr = comm[1].decode(encoding='utf-8', errors='ignore')
|
|
|
|
if p.returncode != 0 and 'cppcheck: error: could not find or open any of the paths given.' not in stdout:
|
|
|
|
# Crash!
|
|
|
|
print('Crash!')
|
2019-01-22 15:27:13 +01:00
|
|
|
return -1, '', '', -1, options
|
2018-12-01 13:25:48 +01:00
|
|
|
if stderr.find('Internal error: Child process crashed with signal 11 [cppcheckError]') > 0:
|
|
|
|
# Crash!
|
|
|
|
print('Crash!')
|
2019-01-22 15:27:13 +01:00
|
|
|
return -1, '', '', -1, options
|
2018-08-29 06:51:33 +02:00
|
|
|
elapsedTime = stopTime - startTime
|
2019-02-12 09:47:57 +01:00
|
|
|
information_messages_list = []
|
|
|
|
issue_messages_list = []
|
2018-08-24 13:20:38 +02:00
|
|
|
count = 0
|
2018-08-29 22:07:48 +02:00
|
|
|
for line in stderr.split('\n'):
|
2019-01-22 15:27:13 +01:00
|
|
|
if ': information: ' in line:
|
2019-02-10 08:57:35 +01:00
|
|
|
information_messages_list.append(line + '\n')
|
2019-02-04 11:25:29 +01:00
|
|
|
elif line:
|
2019-02-10 08:57:35 +01:00
|
|
|
issue_messages_list.append(line + '\n')
|
2019-02-04 11:25:29 +01:00
|
|
|
if re.match(r'.*:[0-9]+:.*\]$', line):
|
|
|
|
count += 1
|
2018-08-26 16:23:42 +02:00
|
|
|
print('Number of issues: ' + str(count))
|
2019-02-10 08:57:35 +01:00
|
|
|
return count, ''.join(issue_messages_list), ''.join(information_messages_list), elapsedTime, options
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-24 21:07:50 +02:00
|
|
|
|
2018-11-18 16:26:56 +01:00
|
|
|
def splitResults(results):
|
|
|
|
ret = []
|
|
|
|
w = None
|
|
|
|
for line in results.split('\n'):
|
|
|
|
if line.endswith(']') and re.search(r': (error|warning|style|performance|portability|information|debug):', line):
|
|
|
|
if w is not None:
|
|
|
|
ret.append(w.strip())
|
|
|
|
w = ''
|
|
|
|
if w is not None:
|
2018-12-06 17:36:22 +01:00
|
|
|
w += ' ' * 5 + line + '\n'
|
2018-11-18 16:26:56 +01:00
|
|
|
if w is not None:
|
|
|
|
ret.append(w.strip())
|
|
|
|
return ret
|
|
|
|
|
2019-01-09 16:01:53 +01:00
|
|
|
|
2018-08-26 13:42:01 +02:00
|
|
|
def diffResults(workPath, ver1, results1, ver2, results2):
|
|
|
|
print('Diff results..')
|
|
|
|
ret = ''
|
2018-11-18 16:26:56 +01:00
|
|
|
r1 = sorted(splitResults(results1))
|
|
|
|
r2 = sorted(splitResults(results2))
|
2018-08-26 13:42:01 +02:00
|
|
|
i1 = 0
|
|
|
|
i2 = 0
|
|
|
|
while i1 < len(r1) and i2 < len(r2):
|
|
|
|
if r1[i1] == r2[i2]:
|
|
|
|
i1 += 1
|
|
|
|
i2 += 1
|
|
|
|
elif r1[i1] < r2[i2]:
|
|
|
|
ret += ver1 + ' ' + r1[i1] + '\n'
|
|
|
|
i1 += 1
|
|
|
|
else:
|
|
|
|
ret += ver2 + ' ' + r2[i2] + '\n'
|
|
|
|
i2 += 1
|
|
|
|
while i1 < len(r1):
|
|
|
|
ret += ver1 + ' ' + r1[i1] + '\n'
|
|
|
|
i1 += 1
|
|
|
|
while i2 < len(r2):
|
|
|
|
ret += ver2 + ' ' + r2[i2] + '\n'
|
|
|
|
i2 += 1
|
2018-11-18 16:26:56 +01:00
|
|
|
|
2018-08-26 13:42:01 +02:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2018-08-25 10:25:05 +02:00
|
|
|
def sendAll(connection, data):
|
2018-12-03 09:02:05 +01:00
|
|
|
bytes = data.encode('ascii', 'ignore')
|
2018-08-25 20:34:43 +02:00
|
|
|
while bytes:
|
|
|
|
num = connection.send(bytes)
|
|
|
|
if num < len(bytes):
|
|
|
|
bytes = bytes[num:]
|
2018-08-25 10:25:05 +02:00
|
|
|
else:
|
2018-08-25 20:34:43 +02:00
|
|
|
bytes = None
|
2018-08-25 10:25:05 +02:00
|
|
|
|
|
|
|
|
2019-01-03 13:42:33 +01:00
|
|
|
def uploadResults(package, results, server_address):
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Uploading results..')
|
2018-11-27 12:05:21 +01:00
|
|
|
for retry in range(4):
|
|
|
|
try:
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
sock.connect(server_address)
|
2019-02-09 22:05:12 +01:00
|
|
|
if results.startswith('FAST'):
|
|
|
|
cmd = 'write-fast\n'
|
|
|
|
else:
|
|
|
|
cmd = 'write\n'
|
|
|
|
sendAll(sock, cmd + package + '\n' + results + '\nDONE')
|
2018-11-27 12:05:21 +01:00
|
|
|
sock.close()
|
2019-01-22 15:27:13 +01:00
|
|
|
print('Results have been successfully uploaded.')
|
|
|
|
return True
|
|
|
|
except socket.error:
|
|
|
|
print('Upload failed, retry in 30 seconds')
|
|
|
|
time.sleep(30)
|
|
|
|
print('Upload permanently failed!')
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def uploadInfo(package, info_output, server_address):
|
|
|
|
print('Uploading information output..')
|
|
|
|
for retry in range(3):
|
|
|
|
try:
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
sock.connect(server_address)
|
|
|
|
sendAll(sock, 'write_info\n' + package + '\n' + info_output + '\nDONE')
|
|
|
|
sock.close()
|
|
|
|
print('Information output has been successfully uploaded.')
|
2018-11-28 19:30:14 +01:00
|
|
|
return True
|
2018-11-27 12:05:21 +01:00
|
|
|
except socket.error:
|
2019-01-22 15:27:13 +01:00
|
|
|
print('Upload failed, retry in 30 seconds')
|
2018-11-27 12:05:21 +01:00
|
|
|
time.sleep(30)
|
2019-01-22 15:27:13 +01:00
|
|
|
print('Upload permanently failed!')
|
2018-11-28 19:30:14 +01:00
|
|
|
return False
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2019-01-09 16:01:53 +01:00
|
|
|
|
2018-08-31 14:28:42 +02:00
|
|
|
jobs = '-j1'
|
2018-08-29 11:08:56 +02:00
|
|
|
stopTime = None
|
2018-08-31 14:28:42 +02:00
|
|
|
workpath = os.path.expanduser('~/cppcheck-donate-cpu-workfolder')
|
2018-11-19 10:04:53 +01:00
|
|
|
packageUrl = None
|
2019-01-03 13:42:33 +01:00
|
|
|
server_address = ('cppcheck.osuosl.org', 8000)
|
2019-01-18 21:36:03 +01:00
|
|
|
bandwidth_limit = None
|
2019-03-01 22:34:50 +01:00
|
|
|
max_packages = None
|
2018-08-29 11:08:56 +02:00
|
|
|
for arg in sys.argv[1:]:
|
|
|
|
# --stop-time=12:00 => run until ~12:00 and then stop
|
|
|
|
if arg.startswith('--stop-time='):
|
|
|
|
stopTime = arg[-5:]
|
2018-08-31 14:28:42 +02:00
|
|
|
print('Stop time:' + stopTime)
|
|
|
|
elif arg.startswith('-j'):
|
|
|
|
jobs = arg
|
|
|
|
print('Jobs:' + jobs[2:])
|
2018-11-19 10:04:53 +01:00
|
|
|
elif arg.startswith('--package='):
|
|
|
|
packageUrl = arg[arg.find('=')+1:]
|
|
|
|
print('Package:' + packageUrl)
|
2018-08-31 14:28:42 +02:00
|
|
|
elif arg.startswith('--work-path='):
|
|
|
|
workpath = arg[arg.find('=')+1:]
|
|
|
|
print('workpath:' + workpath)
|
|
|
|
if not os.path.exists(workpath):
|
|
|
|
print('work path does not exist!')
|
|
|
|
sys.exit(1)
|
2019-01-03 13:42:33 +01:00
|
|
|
elif arg == '--test':
|
|
|
|
server_address = ('localhost', 8001)
|
2019-01-18 21:36:03 +01:00
|
|
|
elif arg.startswith('--bandwidth-limit='):
|
|
|
|
bandwidth_limit = arg[arg.find('=')+1:]
|
2019-03-01 22:34:50 +01:00
|
|
|
elif arg.startswith('--max-packages='):
|
|
|
|
arg_value = arg[arg.find('=')+1:]
|
|
|
|
try:
|
|
|
|
max_packages = int(arg_value)
|
|
|
|
except ValueError:
|
|
|
|
max_packages = None
|
|
|
|
if max_packages < 0:
|
|
|
|
max_packages = None
|
|
|
|
if max_packages is None:
|
|
|
|
print('Error: Max. packages value "{}" is invalid. Must be a positive number or 0.'.format(arg_value))
|
|
|
|
sys.exit(1)
|
|
|
|
# 0 means infinitely, no counting needed.
|
|
|
|
if max_packages == 0:
|
|
|
|
max_packages = None
|
2018-08-31 14:28:42 +02:00
|
|
|
elif arg == '--help':
|
|
|
|
print('Donate CPU to Cppcheck project')
|
|
|
|
print('')
|
2018-09-06 13:15:54 +02:00
|
|
|
print('Syntax: donate-cpu.py [-jN] [--stop-time=HH:MM] [--work-path=path]')
|
2018-08-31 14:28:42 +02:00
|
|
|
print(' -jN Use N threads in compilation/analysis. Default is 1.')
|
2018-12-20 21:15:42 +01:00
|
|
|
print(' --package=url Check a specific package and then stop. Can be useful if you want to reproduce')
|
|
|
|
print(' some warning/crash/exception/etc..')
|
2018-08-31 14:28:42 +02:00
|
|
|
print(' --stop-time=HH:MM Stop analysis when time has passed. Default is that you must terminate the script.')
|
2018-09-06 13:15:54 +02:00
|
|
|
print(' --work-path=path Work folder path. Default path is ' + workpath)
|
2019-01-18 21:36:03 +01:00
|
|
|
print(' --bandwidth-limit=limit Limit download rate for packages. Format for limit is the same that wget uses.')
|
|
|
|
print(' Examples: --bandwidth-limit=250k => max. 250 kilobytes per second')
|
|
|
|
print(' --bandwidth-limit=2m => max. 2 megabytes per second')
|
2019-03-01 22:34:50 +01:00
|
|
|
print(' --max-packages=N Process N packages and then exit. A value of 0 means infinitely.')
|
2018-08-31 14:28:42 +02:00
|
|
|
print('')
|
|
|
|
print('Quick start: just run this script without any arguments')
|
|
|
|
sys.exit(0)
|
|
|
|
else:
|
|
|
|
print('Unhandled argument: ' + arg)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Thank you!')
|
|
|
|
if not checkRequirements():
|
|
|
|
sys.exit(1)
|
2019-01-19 12:45:08 +01:00
|
|
|
if bandwidth_limit and isinstance(bandwidth_limit, str):
|
|
|
|
if subprocess.call(['wget', '--limit-rate=' + bandwidth_limit, '-q', '--spider', 'cppcheck.osuosl.org']) is 2:
|
|
|
|
print('Error: Bandwidth limit value "' + bandwidth_limit + '" is invalid.')
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print('Bandwidth-limit: ' + bandwidth_limit)
|
2019-03-01 22:34:50 +01:00
|
|
|
if max_packages:
|
|
|
|
print('Maximum number of packages to download and analyze: {}'.format(max_packages))
|
2018-08-23 21:31:02 +02:00
|
|
|
if not os.path.exists(workpath):
|
|
|
|
os.mkdir(workpath)
|
|
|
|
cppcheckPath = workpath + '/cppcheck'
|
2019-03-01 22:34:50 +01:00
|
|
|
packages_processed = 0
|
2018-08-23 21:31:02 +02:00
|
|
|
while True:
|
2019-03-01 22:34:50 +01:00
|
|
|
if max_packages:
|
|
|
|
if packages_processed >= max_packages:
|
|
|
|
print('Processed the specified number of {} package(s). Exiting now.'.format(max_packages))
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
print('Processing package {} of the specified {} package(s).'.format(packages_processed + 1, max_packages))
|
|
|
|
packages_processed += 1
|
2018-08-29 11:08:56 +02:00
|
|
|
if stopTime:
|
|
|
|
print('stopTime:' + stopTime + '. Time:' + time.strftime('%H:%M') + '.')
|
|
|
|
if stopTime < time.strftime('%H:%M'):
|
|
|
|
print('Stopping. Thank you!')
|
|
|
|
sys.exit(0)
|
2018-08-24 18:49:11 +02:00
|
|
|
if not getCppcheck(cppcheckPath):
|
2018-08-26 16:23:42 +02:00
|
|
|
print('Failed to clone Cppcheck, retry later')
|
|
|
|
sys.exit(1)
|
2019-01-03 13:42:33 +01:00
|
|
|
cppcheckVersions = getCppcheckVersions(server_address)
|
2018-12-08 11:39:44 +01:00
|
|
|
if cppcheckVersions is None:
|
|
|
|
print('Failed to communicate with server, retry later')
|
|
|
|
sys.exit(1)
|
2018-11-29 21:19:45 +01:00
|
|
|
for ver in cppcheckVersions:
|
|
|
|
if ver == 'head':
|
2019-01-09 16:01:53 +01:00
|
|
|
if not compile(cppcheckPath, jobs):
|
2018-11-29 21:19:45 +01:00
|
|
|
print('Failed to compile Cppcheck, retry later')
|
|
|
|
sys.exit(1)
|
2019-01-09 16:01:53 +01:00
|
|
|
elif not compile_version(workpath, jobs, ver):
|
2018-11-29 21:19:45 +01:00
|
|
|
print('Failed to compile Cppcheck-{}, retry later'.format(ver))
|
|
|
|
sys.exit(1)
|
2018-11-19 10:04:53 +01:00
|
|
|
if packageUrl:
|
|
|
|
package = packageUrl
|
|
|
|
else:
|
2019-01-03 13:42:33 +01:00
|
|
|
package = getPackage(server_address)
|
2018-11-19 10:04:53 +01:00
|
|
|
while len(package) == 0:
|
|
|
|
print("network or server might be temporarily down.. will try again in 30 seconds..")
|
|
|
|
time.sleep(30)
|
2019-01-03 13:42:33 +01:00
|
|
|
package = getPackage(server_address)
|
2019-01-18 21:36:03 +01:00
|
|
|
tgz = downloadPackage(workpath, package, bandwidth_limit)
|
2018-08-24 21:07:50 +02:00
|
|
|
unpackPackage(workpath, tgz)
|
2018-08-29 22:07:48 +02:00
|
|
|
crash = False
|
2018-08-29 06:51:33 +02:00
|
|
|
count = ''
|
|
|
|
elapsedTime = ''
|
2018-08-26 13:42:01 +02:00
|
|
|
resultsToDiff = []
|
2019-01-09 18:38:00 +01:00
|
|
|
cppcheck_options = ''
|
2019-01-22 15:27:13 +01:00
|
|
|
head_info_msg = ''
|
2018-11-29 21:19:45 +01:00
|
|
|
for ver in cppcheckVersions:
|
|
|
|
if ver == 'head':
|
2019-02-04 11:05:14 +01:00
|
|
|
current_cppcheck_dir = 'cppcheck'
|
2018-11-29 21:19:45 +01:00
|
|
|
else:
|
2019-02-04 11:05:14 +01:00
|
|
|
current_cppcheck_dir = ver
|
2019-02-09 22:05:12 +01:00
|
|
|
c, errout, info, t, cppcheck_options = scanPackage(workpath, current_cppcheck_dir, jobs, False)
|
2018-08-29 22:07:48 +02:00
|
|
|
if c < 0:
|
|
|
|
crash = True
|
|
|
|
count += ' Crash!'
|
|
|
|
else:
|
|
|
|
count += ' ' + str(c)
|
2018-08-29 06:51:33 +02:00
|
|
|
elapsedTime += " {:.1f}".format(t)
|
2018-08-26 16:23:42 +02:00
|
|
|
resultsToDiff.append(errout)
|
2019-01-22 15:27:13 +01:00
|
|
|
if ver == 'head':
|
|
|
|
head_info_msg = info
|
2019-02-09 22:05:12 +01:00
|
|
|
|
|
|
|
# Fast results
|
|
|
|
fast_c, fast_errout, fast_info, fast_t, fast_cppcheck_options = scanPackage(workpath, current_cppcheck_dir, jobs, True)
|
|
|
|
if c > 0 and errout and fast_errout:
|
|
|
|
output = 'FAST\n'
|
|
|
|
output += 'elapsed-time: %.1f %.1f' % (t, fast_t)
|
|
|
|
output += '\ndiff:\n'
|
|
|
|
output += diffResults(workpath, 'head', errout, 'fast', fast_errout)
|
|
|
|
uploadResults(package, output, server_address)
|
|
|
|
|
2019-01-22 15:27:13 +01:00
|
|
|
results_exist = True
|
|
|
|
if len(resultsToDiff[0]) + len(resultsToDiff[1]) == 0:
|
|
|
|
results_exist = False
|
|
|
|
info_exists = True
|
|
|
|
if len(head_info_msg) == 0:
|
|
|
|
info_exists = False
|
|
|
|
if not crash and not results_exist and not info_exists:
|
2018-08-26 16:47:20 +02:00
|
|
|
print('No results')
|
|
|
|
continue
|
2019-01-09 18:38:00 +01:00
|
|
|
output = 'cppcheck-options: ' + cppcheck_options + '\n'
|
2019-01-11 20:07:11 +01:00
|
|
|
output += 'platform: ' + platform.platform() + '\n'
|
|
|
|
output += 'python: ' + platform.python_version() + '\n'
|
2019-02-09 09:27:54 +01:00
|
|
|
output += 'client-version: ' + CLIENT_VERSION + '\n'
|
2019-01-09 18:38:00 +01:00
|
|
|
output += 'cppcheck: ' + ' '.join(cppcheckVersions) + '\n'
|
2018-08-29 06:51:33 +02:00
|
|
|
output += 'count:' + count + '\n'
|
|
|
|
output += 'elapsed-time:' + elapsedTime + '\n'
|
2019-01-22 15:27:13 +01:00
|
|
|
info_output = output
|
|
|
|
info_output += 'info messages:\n' + head_info_msg
|
2018-11-29 21:28:49 +01:00
|
|
|
if 'head' in cppcheckVersions:
|
|
|
|
output += 'head results:\n' + resultsToDiff[cppcheckVersions.index('head')]
|
2018-08-29 22:07:48 +02:00
|
|
|
if not crash:
|
2018-11-29 21:19:45 +01:00
|
|
|
output += 'diff:\n' + diffResults(workpath, cppcheckVersions[0], resultsToDiff[0], cppcheckVersions[1], resultsToDiff[1]) + '\n'
|
2018-11-19 10:04:53 +01:00
|
|
|
if packageUrl:
|
|
|
|
print('=========================================================')
|
|
|
|
print(output)
|
|
|
|
print('=========================================================')
|
|
|
|
break
|
2019-02-19 13:29:32 +01:00
|
|
|
if crash or results_exist:
|
2019-01-22 15:27:13 +01:00
|
|
|
uploadResults(package, output, server_address)
|
|
|
|
if info_exists:
|
|
|
|
uploadInfo(package, info_output, server_address)
|
2019-03-01 22:34:50 +01:00
|
|
|
if not max_packages or packages_processed < max_packages:
|
|
|
|
print('Sleep 5 seconds..')
|
|
|
|
time.sleep(5)
|