2019-11-09 18:18:57 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
# Donate CPU
|
|
|
|
#
|
|
|
|
# A script a user can run to donate CPU to cppcheck project
|
|
|
|
#
|
2019-01-18 21:36:03 +01:00
|
|
|
# Syntax: donate-cpu.py [-jN] [--package=url] [--stop-time=HH:MM] [--work-path=path] [--test] [--bandwidth-limit=limit]
|
2018-08-31 14:28:42 +02:00
|
|
|
# -jN Use N threads in compilation/analysis. Default is 1.
|
2019-03-29 09:40:09 +01:00
|
|
|
# --package=url Check a specific package and then stop. Can be useful if you want to reproduce
|
|
|
|
# some warning/crash/exception/etc..
|
2018-08-31 14:28:42 +02:00
|
|
|
# --stop-time=HH:MM Stop analysis when time has passed. Default is that you must terminate the script.
|
2018-09-06 13:15:54 +02:00
|
|
|
# --work-path=path Work folder path. Default path is cppcheck-donate-cpu-workfolder in your home folder.
|
2019-01-03 13:42:33 +01:00
|
|
|
# --test Connect to a donate-cpu-server that is running locally on port 8001 for testing.
|
2019-01-18 21:36:03 +01:00
|
|
|
# --bandwidth-limit=limit Limit download rate for packages. Format for limit is the same that wget uses.
|
|
|
|
# Examples: --bandwidth-limit=250k => max. 250 kilobytes per second
|
|
|
|
# --bandwidth-limit=2m => max. 2 megabytes per second
|
2019-03-01 22:34:50 +01:00
|
|
|
# --max-packages=N Process N packages and then exit. A value of 0 means infinitely.
|
2019-03-28 15:49:20 +01:00
|
|
|
# --no-upload Do not upload anything. Defaults to False.
|
2018-08-31 14:28:42 +02:00
|
|
|
#
|
|
|
|
# What this script does:
|
2018-08-23 21:31:02 +02:00
|
|
|
# 1. Check requirements
|
|
|
|
# 2. Pull & compile Cppcheck
|
|
|
|
# 3. Select a package
|
|
|
|
# 4. Download package
|
|
|
|
# 5. Analyze source code
|
|
|
|
# 6. Upload results
|
|
|
|
# 7. Repeat from step 2
|
2018-08-31 14:28:42 +02:00
|
|
|
#
|
|
|
|
# Quick start: just run this script without any arguments
|
2018-08-23 21:31:02 +02:00
|
|
|
|
|
|
|
import shutil
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import socket
|
|
|
|
import time
|
2018-08-24 13:20:38 +02:00
|
|
|
import re
|
2018-09-15 18:56:46 +02:00
|
|
|
import tarfile
|
2019-01-11 20:07:11 +01:00
|
|
|
import platform
|
2019-10-23 09:12:15 +02:00
|
|
|
from donate_cpu_lib import *
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-29 11:08:56 +02:00
|
|
|
for arg in sys.argv[1:]:
|
|
|
|
# --stop-time=12:00 => run until ~12:00 and then stop
|
|
|
|
if arg.startswith('--stop-time='):
|
2019-06-30 12:43:05 +02:00
|
|
|
stop_time = arg[-5:]
|
|
|
|
print('Stop time:' + stop_time)
|
2018-08-31 14:28:42 +02:00
|
|
|
elif arg.startswith('-j'):
|
2019-11-22 22:04:41 +01:00
|
|
|
if not re.match(r'-j\d+', arg):
|
|
|
|
print('Argument "{}" is invalid.'.format(arg))
|
|
|
|
print('"-j" must be followed by a positive number.')
|
|
|
|
sys.exit(1)
|
2018-08-31 14:28:42 +02:00
|
|
|
jobs = arg
|
|
|
|
print('Jobs:' + jobs[2:])
|
2018-11-19 10:04:53 +01:00
|
|
|
elif arg.startswith('--package='):
|
2019-06-30 12:43:05 +02:00
|
|
|
package_url = arg[arg.find('=')+1:]
|
|
|
|
print('Package:' + package_url)
|
2018-08-31 14:28:42 +02:00
|
|
|
elif arg.startswith('--work-path='):
|
2019-06-30 12:43:05 +02:00
|
|
|
work_path = arg[arg.find('=')+1:]
|
|
|
|
print('work_path:' + work_path)
|
|
|
|
if not os.path.exists(work_path):
|
2018-08-31 14:28:42 +02:00
|
|
|
print('work path does not exist!')
|
|
|
|
sys.exit(1)
|
2019-01-03 13:42:33 +01:00
|
|
|
elif arg == '--test':
|
|
|
|
server_address = ('localhost', 8001)
|
2019-01-18 21:36:03 +01:00
|
|
|
elif arg.startswith('--bandwidth-limit='):
|
|
|
|
bandwidth_limit = arg[arg.find('=')+1:]
|
2019-03-01 22:34:50 +01:00
|
|
|
elif arg.startswith('--max-packages='):
|
|
|
|
arg_value = arg[arg.find('=')+1:]
|
|
|
|
try:
|
|
|
|
max_packages = int(arg_value)
|
|
|
|
except ValueError:
|
|
|
|
max_packages = None
|
|
|
|
if max_packages < 0:
|
|
|
|
max_packages = None
|
|
|
|
if max_packages is None:
|
|
|
|
print('Error: Max. packages value "{}" is invalid. Must be a positive number or 0.'.format(arg_value))
|
|
|
|
sys.exit(1)
|
|
|
|
# 0 means infinitely, no counting needed.
|
|
|
|
if max_packages == 0:
|
|
|
|
max_packages = None
|
2019-03-28 15:49:20 +01:00
|
|
|
elif arg.startswith('--no-upload'):
|
|
|
|
do_upload = False
|
2018-08-31 14:28:42 +02:00
|
|
|
elif arg == '--help':
|
|
|
|
print('Donate CPU to Cppcheck project')
|
|
|
|
print('')
|
2018-09-06 13:15:54 +02:00
|
|
|
print('Syntax: donate-cpu.py [-jN] [--stop-time=HH:MM] [--work-path=path]')
|
2018-08-31 14:28:42 +02:00
|
|
|
print(' -jN Use N threads in compilation/analysis. Default is 1.')
|
2018-12-20 21:15:42 +01:00
|
|
|
print(' --package=url Check a specific package and then stop. Can be useful if you want to reproduce')
|
|
|
|
print(' some warning/crash/exception/etc..')
|
2018-08-31 14:28:42 +02:00
|
|
|
print(' --stop-time=HH:MM Stop analysis when time has passed. Default is that you must terminate the script.')
|
2019-06-30 12:43:05 +02:00
|
|
|
print(' --work-path=path Work folder path. Default path is ' + work_path)
|
2019-01-18 21:36:03 +01:00
|
|
|
print(' --bandwidth-limit=limit Limit download rate for packages. Format for limit is the same that wget uses.')
|
|
|
|
print(' Examples: --bandwidth-limit=250k => max. 250 kilobytes per second')
|
|
|
|
print(' --bandwidth-limit=2m => max. 2 megabytes per second')
|
2019-03-01 22:34:50 +01:00
|
|
|
print(' --max-packages=N Process N packages and then exit. A value of 0 means infinitely.')
|
2019-03-28 15:49:20 +01:00
|
|
|
print(' --no-upload Do not upload anything. Defaults to False.')
|
2018-08-31 14:28:42 +02:00
|
|
|
print('')
|
|
|
|
print('Quick start: just run this script without any arguments')
|
|
|
|
sys.exit(0)
|
|
|
|
else:
|
|
|
|
print('Unhandled argument: ' + arg)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2020-01-17 12:23:07 +01:00
|
|
|
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 4):
|
2019-11-09 18:18:57 +01:00
|
|
|
print("#" * 80)
|
|
|
|
print("IMPORTANT")
|
2020-01-17 12:23:07 +01:00
|
|
|
print("Please run the client with at least Python 3.4, thanks!")
|
2019-11-09 18:18:57 +01:00
|
|
|
print("#" * 80)
|
2020-01-16 20:01:46 +01:00
|
|
|
time.sleep(2)
|
|
|
|
sys.exit(1)
|
2019-11-09 18:18:57 +01:00
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
print('Thank you!')
|
2019-06-30 12:43:05 +02:00
|
|
|
if not check_requirements():
|
2018-08-23 21:31:02 +02:00
|
|
|
sys.exit(1)
|
2019-01-19 12:45:08 +01:00
|
|
|
if bandwidth_limit and isinstance(bandwidth_limit, str):
|
2019-11-04 18:36:14 +01:00
|
|
|
if subprocess.call(['wget', '--limit-rate=' + bandwidth_limit, '-q', '--spider', 'cppcheck1.osuosl.org']) == 2:
|
2019-01-19 12:45:08 +01:00
|
|
|
print('Error: Bandwidth limit value "' + bandwidth_limit + '" is invalid.')
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print('Bandwidth-limit: ' + bandwidth_limit)
|
2019-06-30 12:43:05 +02:00
|
|
|
if package_url:
|
2019-03-28 15:49:20 +01:00
|
|
|
max_packages = 1
|
2019-03-01 22:34:50 +01:00
|
|
|
if max_packages:
|
|
|
|
print('Maximum number of packages to download and analyze: {}'.format(max_packages))
|
2019-06-30 12:43:05 +02:00
|
|
|
if not os.path.exists(work_path):
|
|
|
|
os.mkdir(work_path)
|
|
|
|
cppcheck_path = os.path.join(work_path, 'cppcheck')
|
2019-03-01 22:34:50 +01:00
|
|
|
packages_processed = 0
|
2018-08-23 21:31:02 +02:00
|
|
|
while True:
|
2019-03-01 22:34:50 +01:00
|
|
|
if max_packages:
|
|
|
|
if packages_processed >= max_packages:
|
|
|
|
print('Processed the specified number of {} package(s). Exiting now.'.format(max_packages))
|
|
|
|
break
|
2019-11-04 18:36:14 +01:00
|
|
|
print('Processing package {} of the specified {} package(s).'.format(packages_processed + 1, max_packages))
|
2019-03-01 22:34:50 +01:00
|
|
|
packages_processed += 1
|
2019-06-30 12:43:05 +02:00
|
|
|
if stop_time:
|
|
|
|
print('stop_time:' + stop_time + '. Time:' + time.strftime('%H:%M') + '.')
|
|
|
|
if stop_time < time.strftime('%H:%M'):
|
2018-08-29 11:08:56 +02:00
|
|
|
print('Stopping. Thank you!')
|
|
|
|
sys.exit(0)
|
2019-08-27 06:48:00 +02:00
|
|
|
if not get_cppcheck(cppcheck_path, work_path):
|
2018-08-26 16:23:42 +02:00
|
|
|
print('Failed to clone Cppcheck, retry later')
|
|
|
|
sys.exit(1)
|
2019-06-30 12:43:05 +02:00
|
|
|
cppcheck_versions = get_cppcheck_versions(server_address)
|
|
|
|
if cppcheck_versions is None:
|
2018-12-08 11:39:44 +01:00
|
|
|
print('Failed to communicate with server, retry later')
|
|
|
|
sys.exit(1)
|
2019-06-30 12:43:05 +02:00
|
|
|
if len(cppcheck_versions) == 0:
|
2019-04-04 13:09:35 +02:00
|
|
|
print('Did not get any cppcheck versions from server, retry later')
|
|
|
|
sys.exit(1)
|
2019-06-30 12:43:05 +02:00
|
|
|
for ver in cppcheck_versions:
|
2018-11-29 21:19:45 +01:00
|
|
|
if ver == 'head':
|
2019-06-30 12:43:05 +02:00
|
|
|
if not compile(cppcheck_path, jobs):
|
2018-11-29 21:19:45 +01:00
|
|
|
print('Failed to compile Cppcheck, retry later')
|
|
|
|
sys.exit(1)
|
2019-06-30 12:43:05 +02:00
|
|
|
elif not compile_version(work_path, jobs, ver):
|
2018-11-29 21:19:45 +01:00
|
|
|
print('Failed to compile Cppcheck-{}, retry later'.format(ver))
|
|
|
|
sys.exit(1)
|
2019-06-30 12:43:05 +02:00
|
|
|
if package_url:
|
|
|
|
package = package_url
|
2018-11-19 10:04:53 +01:00
|
|
|
else:
|
2019-06-30 12:43:05 +02:00
|
|
|
package = get_package(server_address)
|
2018-11-19 10:04:53 +01:00
|
|
|
while len(package) == 0:
|
|
|
|
print("network or server might be temporarily down.. will try again in 30 seconds..")
|
|
|
|
time.sleep(30)
|
2019-06-30 12:43:05 +02:00
|
|
|
package = get_package(server_address)
|
|
|
|
tgz = download_package(work_path, package, bandwidth_limit)
|
2019-03-28 15:49:20 +01:00
|
|
|
if tgz is None:
|
|
|
|
print("No package downloaded")
|
|
|
|
continue
|
2019-06-30 12:43:05 +02:00
|
|
|
if not unpack_package(work_path, tgz):
|
2019-04-03 09:27:04 +02:00
|
|
|
print("No files to process")
|
|
|
|
continue
|
2018-08-29 22:07:48 +02:00
|
|
|
crash = False
|
2018-08-29 06:51:33 +02:00
|
|
|
count = ''
|
2019-06-30 12:43:05 +02:00
|
|
|
elapsed_time = ''
|
|
|
|
results_to_diff = []
|
2019-01-09 18:38:00 +01:00
|
|
|
cppcheck_options = ''
|
2019-01-22 15:27:13 +01:00
|
|
|
head_info_msg = ''
|
2019-09-28 22:09:58 +02:00
|
|
|
head_timing_info = ''
|
|
|
|
old_timing_info = ''
|
|
|
|
cppcheck_head_info = ''
|
|
|
|
libraries = get_libraries()
|
2019-09-09 13:42:57 +02:00
|
|
|
|
2019-06-30 12:43:05 +02:00
|
|
|
for ver in cppcheck_versions:
|
2018-11-29 21:19:45 +01:00
|
|
|
if ver == 'head':
|
2019-02-04 11:05:14 +01:00
|
|
|
current_cppcheck_dir = 'cppcheck'
|
2019-09-05 09:08:45 +02:00
|
|
|
cppcheck_head_info = get_cppcheck_info(work_path + '/cppcheck')
|
2018-11-29 21:19:45 +01:00
|
|
|
else:
|
2019-02-04 11:05:14 +01:00
|
|
|
current_cppcheck_dir = ver
|
2019-09-28 22:09:58 +02:00
|
|
|
c, errout, info, t, cppcheck_options, timing_info = scan_package(work_path, current_cppcheck_dir, jobs, libraries)
|
2018-08-29 22:07:48 +02:00
|
|
|
if c < 0:
|
2019-07-26 10:15:09 +02:00
|
|
|
if c == -101 and 'error: could not find or open any of the paths given.' in errout:
|
|
|
|
# No sourcefile found (for example only headers present)
|
|
|
|
count += ' 0'
|
|
|
|
else:
|
|
|
|
crash = True
|
|
|
|
count += ' Crash!'
|
2018-08-29 22:07:48 +02:00
|
|
|
else:
|
|
|
|
count += ' ' + str(c)
|
2019-06-30 12:43:05 +02:00
|
|
|
elapsed_time += " {:.1f}".format(t)
|
|
|
|
results_to_diff.append(errout)
|
2019-01-22 15:27:13 +01:00
|
|
|
if ver == 'head':
|
|
|
|
head_info_msg = info
|
2019-09-28 22:09:58 +02:00
|
|
|
head_timing_info = timing_info
|
|
|
|
else:
|
|
|
|
old_timing_info = timing_info
|
2019-02-09 22:05:12 +01:00
|
|
|
|
2019-01-09 18:38:00 +01:00
|
|
|
output = 'cppcheck-options: ' + cppcheck_options + '\n'
|
2019-01-11 20:07:11 +01:00
|
|
|
output += 'platform: ' + platform.platform() + '\n'
|
|
|
|
output += 'python: ' + platform.python_version() + '\n'
|
2019-02-09 09:27:54 +01:00
|
|
|
output += 'client-version: ' + CLIENT_VERSION + '\n'
|
2019-06-30 12:43:05 +02:00
|
|
|
output += 'cppcheck: ' + ' '.join(cppcheck_versions) + '\n'
|
2019-09-05 09:08:45 +02:00
|
|
|
output += 'head-info: ' + cppcheck_head_info + '\n'
|
2018-08-29 06:51:33 +02:00
|
|
|
output += 'count:' + count + '\n'
|
2019-06-30 12:43:05 +02:00
|
|
|
output += 'elapsed-time:' + elapsed_time + '\n'
|
2019-09-28 22:09:58 +02:00
|
|
|
output += 'head-timing-info:\n' + head_timing_info + '\n'
|
|
|
|
output += 'old-timing-info:\n' + old_timing_info + '\n'
|
2019-01-22 15:27:13 +01:00
|
|
|
info_output = output
|
|
|
|
info_output += 'info messages:\n' + head_info_msg
|
2019-06-30 12:43:05 +02:00
|
|
|
if 'head' in cppcheck_versions:
|
|
|
|
output += 'head results:\n' + results_to_diff[cppcheck_versions.index('head')]
|
2018-08-29 22:07:48 +02:00
|
|
|
if not crash:
|
2019-06-30 12:43:05 +02:00
|
|
|
output += 'diff:\n' + diff_results(work_path, cppcheck_versions[0], results_to_diff[0], cppcheck_versions[1], results_to_diff[1]) + '\n'
|
|
|
|
if package_url:
|
2018-11-19 10:04:53 +01:00
|
|
|
print('=========================================================')
|
|
|
|
print(output)
|
|
|
|
print('=========================================================')
|
2019-04-03 09:27:04 +02:00
|
|
|
print(info_output)
|
|
|
|
print('=========================================================')
|
2019-03-28 15:49:20 +01:00
|
|
|
if do_upload:
|
2019-06-30 12:43:05 +02:00
|
|
|
upload_results(package, output, server_address)
|
|
|
|
upload_info(package, info_output, server_address)
|
2019-03-01 22:34:50 +01:00
|
|
|
if not max_packages or packages_processed < max_packages:
|
|
|
|
print('Sleep 5 seconds..')
|
|
|
|
time.sleep(5)
|