cleaned up global `donate_cpu_lib.py` configuration variables (#4532)

This commit is contained in:
Oliver Stöneberg 2022-10-06 20:49:47 +02:00 committed by GitHub
parent 7ead32f96e
commit 12afb9bbf4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 87 additions and 72 deletions

View File

@ -31,12 +31,22 @@
# Quick start: just run this script without any arguments
import platform
import os
import sys
import re
import time
import subprocess
import donate_cpu_lib as lib
from packaging.version import Version
from donate_cpu_lib import *
__my_script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
work_path = os.path.expanduser(os.path.join('~', 'cppcheck-' + __my_script_name + '-workfolder'))
max_packages = None
package_urls = []
do_upload = True
bandwidth_limit = None
stop_time = None
for arg in sys.argv[1:]:
# --stop-time=12:00 => run until ~12:00 and then stop
@ -48,8 +58,8 @@ for arg in sys.argv[1:]:
print('Argument "{}" is invalid.'.format(arg))
print('"-j" must be followed by a positive number.')
sys.exit(1)
jobs = arg
print('Jobs:' + jobs[2:])
print('Jobs:' + arg[2:])
lib.set_jobs(arg)
elif arg.startswith('--package='):
pkg = arg[arg.find('=')+1:]
package_urls.append(pkg)
@ -70,7 +80,7 @@ for arg in sys.argv[1:]:
print('work path does not exist!')
sys.exit(1)
elif arg == '--test':
server_address = ('localhost', 8001)
lib.set_server_address(('localhost', 8001))
elif arg.startswith('--bandwidth-limit='):
bandwidth_limit = arg[arg.find('=')+1:]
elif arg.startswith('--max-packages='):
@ -90,7 +100,7 @@ for arg in sys.argv[1:]:
elif arg.startswith('--no-upload'):
do_upload = False
elif arg == '--version':
print(get_client_version())
print(lib.get_client_version())
sys.exit(0)
elif arg == '--help':
print('Donate CPU to Cppcheck project')
@ -124,7 +134,7 @@ if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_in
sys.exit(1)
print('Thank you!')
if not check_requirements():
if not lib.check_requirements():
sys.exit(1)
if bandwidth_limit and isinstance(bandwidth_limit, str):
if subprocess.call(['wget', '--limit-rate=' + bandwidth_limit, '-q', '--spider', 'cppcheck1.osuosl.org']) == 2:
@ -146,9 +156,9 @@ packages_processed = 0
print('Get Cppcheck..')
try:
try_retry(clone_cppcheck, fargs=(repo_path, migrate_repo_path))
except:
print('Error: Failed to clone Cppcheck, retry later')
lib.try_retry(lib.clone_cppcheck, fargs=(repo_path, migrate_repo_path))
except Exception as e:
print('Error: Failed to clone Cppcheck ({}), retry later'.format(e))
sys.exit(1)
while True:
@ -163,7 +173,7 @@ while True:
if stop_time < time.strftime('%H:%M'):
print('Stopping. Thank you!')
sys.exit(0)
cppcheck_versions = get_cppcheck_versions(server_address)
cppcheck_versions = lib.get_cppcheck_versions()
if cppcheck_versions is None:
print('Failed to communicate with server, retry later')
sys.exit(1)
@ -176,26 +186,26 @@ while True:
current_cppcheck_dir = os.path.join(work_path, 'tree-'+ver)
try:
print('Fetching Cppcheck-{}..'.format(ver))
try_retry(checkout_cppcheck_version, fargs=(repo_path, ver, current_cppcheck_dir))
lib.try_retry(lib.checkout_cppcheck_version, fargs=(repo_path, ver, current_cppcheck_dir))
except KeyboardInterrupt as e:
# Passthrough for user abort
raise e
except:
print('Failed to update Cppcheck, retry later')
except Exception as e:
print('Failed to update Cppcheck ({}), retry later'.format(e))
sys.exit(1)
if ver == 'main':
if not compile_cppcheck(current_cppcheck_dir, jobs):
if not lib.compile_cppcheck(current_cppcheck_dir):
print('Failed to compile Cppcheck-{}, retry later'.format(ver))
sys.exit(1)
else:
if not compile_version(current_cppcheck_dir, jobs):
if not lib.compile_version(current_cppcheck_dir):
print('Failed to compile Cppcheck-{}, retry later'.format(ver))
sys.exit(1)
if package_urls:
package = package_urls[packages_processed-1]
else:
package = get_package(server_address)
tgz = download_package(work_path, package, bandwidth_limit)
package = lib.get_package()
tgz = lib.download_package(work_path, package, bandwidth_limit)
if tgz is None:
print("No package downloaded")
continue
@ -203,7 +213,7 @@ while True:
if package.find('/qtcreator/') > 0:
# macro_pounder_fn.c is a preprocessor torture test that takes time to finish
skip_files = ('macro_pounder_fn.c',)
source_path, source_found = unpack_package(work_path, tgz, skip_files=skip_files)
source_path, source_found = lib.unpack_package(work_path, tgz, skip_files=skip_files)
if not source_found:
print("No files to process")
continue
@ -218,14 +228,14 @@ while True:
old_timing_info = ''
cppcheck_head_info = ''
client_version_head = ''
libraries = library_includes.get_libraries(source_path)
libraries = lib.library_includes.get_libraries(source_path)
for ver in cppcheck_versions:
tree_path = os.path.join(work_path, 'tree-'+ver)
capture_callstack = False
if ver == 'head':
tree_path = os.path.join(work_path, 'tree-main')
cppcheck_head_info = get_cppcheck_info(tree_path)
cppcheck_head_info = lib.get_cppcheck_info(tree_path)
capture_callstack = True
def get_client_version_head():
@ -238,12 +248,12 @@ while True:
return None
client_version_head = get_client_version_head()
c, errout, info, t, cppcheck_options, timing_info = scan_package(tree_path, source_path, jobs, libraries, capture_callstack)
c, errout, info, t, cppcheck_options, timing_info = lib.scan_package(tree_path, source_path, libraries, capture_callstack)
if c < 0:
if c == -101 and 'error: could not find or open any of the paths given.' in errout:
# No sourcefile found (for example only headers present)
count += ' 0'
elif c == RETURN_CODE_TIMEOUT:
elif c == lib.RETURN_CODE_TIMEOUT:
# Timeout
count += ' TO!'
timeout = True
@ -263,8 +273,8 @@ while True:
output = 'cppcheck-options: ' + cppcheck_options + '\n'
output += 'platform: ' + platform.platform() + '\n'
output += 'python: ' + platform.python_version() + '\n'
output += 'client-version: ' + get_client_version() + '\n'
output += 'compiler: ' + get_compiler_version() + '\n'
output += 'client-version: ' + lib.get_client_version() + '\n'
output += 'compiler: ' + lib.get_compiler_version() + '\n'
output += 'cppcheck: ' + ' '.join(cppcheck_versions) + '\n'
output += 'head-info: ' + cppcheck_head_info + '\n'
output += 'count:' + count + '\n'
@ -276,7 +286,7 @@ while True:
if 'head' in cppcheck_versions:
output += 'head results:\n' + results_to_diff[cppcheck_versions.index('head')]
if not crash and not timeout:
output += 'diff:\n' + diff_results(cppcheck_versions[0], results_to_diff[0], cppcheck_versions[1], results_to_diff[1]) + '\n'
output += 'diff:\n' + lib.diff_results(cppcheck_versions[0], results_to_diff[0], cppcheck_versions[1], results_to_diff[1]) + '\n'
if package_urls:
print('=========================================================')
print(output)
@ -284,10 +294,10 @@ while True:
print(info_output)
print('=========================================================')
if do_upload:
if upload_results(package, output, server_address):
upload_info(package, info_output, server_address)
if lib.upload_results(package, output):
lib.upload_info(package, info_output)
if not max_packages or packages_processed < max_packages:
print('Sleep 5 seconds..')
if (client_version_head is not None) and (Version(client_version_head) > Version(get_client_version())):
if (client_version_head is not None) and (Version(client_version_head) > Version(lib.get_client_version())):
print("ATTENTION: A newer client version ({}) is available - please update!".format(client_version_head))
time.sleep(5)

View File

@ -15,7 +15,7 @@ import shlex
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
# changes)
CLIENT_VERSION = "1.3.36"
CLIENT_VERSION = "1.3.37"
# Timeout for analysis with Cppcheck in seconds
CPPCHECK_TIMEOUT = 30 * 60
@ -25,6 +25,8 @@ CPPCHECK_REPO_URL = "https://github.com/danmar/cppcheck.git"
# Return code that is used to mark a timed out analysis
RETURN_CODE_TIMEOUT = -999
__jobs = '-j1'
__server_address = ('cppcheck1.osuosl.org', 8000)
__make_cmd = None
def detect_make():
@ -146,7 +148,7 @@ def get_cppcheck_info(cppcheck_path):
return ''
def compile_version(cppcheck_path, jobs):
def compile_version(cppcheck_path):
if __make_cmd == "msbuild.exe":
if os.path.isfile(os.path.join(cppcheck_path, 'bin', 'cppcheck.exe')):
return True
@ -156,7 +158,7 @@ def compile_version(cppcheck_path, jobs):
elif os.path.isfile(os.path.join(cppcheck_path, 'cppcheck')):
return True
# Build
ret = compile_cppcheck(cppcheck_path, jobs)
ret = compile_cppcheck(cppcheck_path)
# Clean intermediate build files
if __make_cmd == "msbuild.exe":
exclude_bin = 'bin'
@ -169,18 +171,18 @@ def compile_version(cppcheck_path, jobs):
return ret
def compile_cppcheck(cppcheck_path, jobs):
def compile_cppcheck(cppcheck_path):
print('Compiling {}'.format(os.path.basename(cppcheck_path)))
try:
if __make_cmd == 'msbuild.exe':
subprocess.check_call(['python3', os.path.join('tools', 'matchcompiler.py'), '--write-dir', 'lib'], cwd=cppcheck_path)
build_env = os.environ
# append to cl.exe options - need to omit dash or slash since a dash is being prepended
build_env["_CL_"] = jobs.replace('j', 'MP', 1)
build_env["_CL_"] = __jobs.replace('j', 'MP', 1)
# TODO: processes still exhaust all threads of the system
subprocess.check_call([__make_cmd, '-t:cli', os.path.join(cppcheck_path, 'cppcheck.sln'), '/property:Configuration=Release;Platform=x64'], cwd=cppcheck_path, env=build_env)
else:
build_cmd = [__make_cmd, jobs, 'MATCHCOMPILER=yes', 'CXXFLAGS=-O2 -g -w']
build_cmd = [__make_cmd, __jobs, 'MATCHCOMPILER=yes', 'CXXFLAGS=-O2 -g -w']
build_env = os.environ
if __make_cmd == 'mingw32-make':
# TODO: MinGW will always link even if no changes are present
@ -206,11 +208,11 @@ def compile_cppcheck(cppcheck_path, jobs):
return True
def get_cppcheck_versions(server_address):
def get_cppcheck_versions():
print('Connecting to server to get Cppcheck versions..')
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect(server_address)
sock.connect(__server_address)
sock.send(b'GetCppcheckVersions\n')
versions = sock.recv(256)
except socket.error as err:
@ -219,11 +221,11 @@ def get_cppcheck_versions(server_address):
return versions.decode('utf-8').split()
def get_packages_count(server_address):
def get_packages_count():
print('Connecting to server to get count of packages..')
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect(server_address)
sock.connect(__server_address)
sock.send(b'getPackagesCount\n')
packages = int(sock.recv(64))
except socket.error as err:
@ -232,13 +234,13 @@ def get_packages_count(server_address):
return packages
def get_package(server_address, package_index=None):
def get_package(package_index=None):
package = b''
while not package:
print('Connecting to server to get assigned work..')
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect(server_address)
sock.connect(__server_address)
if package_index is None:
sock.send(b'get\n')
else:
@ -362,7 +364,7 @@ def unpack_package(work_path, tgz, cpp_only=False, c_only=False, skip_files=None
def __run_command(cmd, print_cmd=True):
if print_cmd:
print(cmd)
start_time = time.time()
time_start = time.time()
comm = None
if sys.platform == 'win32':
p = subprocess.Popen(shlex.split(cmd, comments=False, posix=False), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
@ -390,13 +392,13 @@ def __run_command(cmd, print_cmd=True):
if p:
os.killpg(os.getpgid(p.pid), signal.SIGTERM) # Send the signal to all the process groups
comm = p.communicate()
stop_time = time.time()
time_stop = time.time()
stdout, stderr = comm
elapsed_time = stop_time - start_time
elapsed_time = time_stop - time_start
return return_code, stdout, stderr, elapsed_time
def scan_package(cppcheck_path, source_path, jobs, libraries, capture_callstack=True):
def scan_package(cppcheck_path, source_path, libraries, capture_callstack=True):
print('Analyze..')
libs = ''
for library in libraries:
@ -412,13 +414,13 @@ def scan_package(cppcheck_path, source_path, jobs, libraries, capture_callstack=
options_rp = options + ' -rp={}'.format(dir_to_scan)
if __make_cmd == 'msbuild.exe':
cppcheck_cmd = os.path.join(cppcheck_path, 'bin', 'cppcheck.exe') + ' ' + options_rp
cmd = cppcheck_cmd + ' ' + jobs + ' ' + dir_to_scan
cmd = cppcheck_cmd + ' ' + __jobs + ' ' + dir_to_scan
else:
nice_cmd = 'nice'
if __make_cmd == 'mingw32-make':
nice_cmd = ''
cppcheck_cmd = os.path.join(cppcheck_path, 'cppcheck') + ' ' + options_rp
cmd = nice_cmd + ' ' + cppcheck_cmd + ' ' + jobs + ' ' + dir_to_scan
cmd = nice_cmd + ' ' + cppcheck_cmd + ' ' + __jobs + ' ' + dir_to_scan
returncode, stdout, stderr, elapsed_time = __run_command(cmd)
# collect messages
@ -471,7 +473,7 @@ def scan_package(cppcheck_path, source_path, jobs, libraries, capture_callstack=
break
print('cppcheck finished with ' + str(returncode) + ('' if sig_num == -1 else ' (signal ' + str(sig_num) + ')'))
options_j = options + ' ' + jobs
options_j = options + ' ' + __jobs
if returncode == RETURN_CODE_TIMEOUT:
print('Timeout!')
@ -584,7 +586,7 @@ def __send_all(connection, data):
bytes_ = None
def upload_results(package, results, server_address):
def upload_results(package, results):
if not __make_cmd == 'make':
print('Error: Result upload not performed - only make build binaries are currently fully supported')
return False
@ -594,7 +596,7 @@ def upload_results(package, results, server_address):
for retry in range(max_retries):
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect(server_address)
sock.connect(__server_address)
cmd = 'write\n'
__send_all(sock, cmd + package + '\n' + results + '\nDONE')
print('Results have been successfully uploaded.')
@ -608,7 +610,7 @@ def upload_results(package, results, server_address):
return False
def upload_info(package, info_output, server_address):
def upload_info(package, info_output):
if not __make_cmd == 'make':
print('Error: Information upload not performed - only make build binaries are currently fully supported')
return False
@ -618,7 +620,7 @@ def upload_info(package, info_output, server_address):
for retry in range(max_retries):
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect(server_address)
sock.connect(__server_address)
__send_all(sock, 'write_info\n' + package + '\n' + info_output + '\nDONE')
print('Information output has been successfully uploaded.')
return True
@ -721,13 +723,13 @@ def get_client_version():
return CLIENT_VERSION
my_script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
jobs = '-j1'
stop_time = None
work_path = os.path.expanduser(os.path.join('~', 'cppcheck-' + my_script_name + '-workfolder'))
package_url = None
server_address = ('cppcheck1.osuosl.org', 8000)
bandwidth_limit = None
max_packages = None
do_upload = True
def set_server_address(server_address):
global __server_address
__server_address = server_address
def set_jobs(jobs: str):
global __jobs
__jobs = jobs
library_includes = LibraryIncludes()

View File

@ -20,6 +20,9 @@ def format_float(a, b=1):
if __name__ == "__main__":
__my_script_name = os.path.splitext(os.path.basename(sys.argv[0]))[0]
__work_path = os.path.expanduser(os.path.join('~', 'cppcheck-' + __my_script_name + '-workfolder'))
parser = argparse.ArgumentParser(description='Run this script from your branch with proposed Cppcheck patch to verify your patch against current main. It will compare output of testing bunch of opensource packages')
parser.add_argument('-j', default=1, type=int, help='Concurency execution threads')
group = parser.add_mutually_exclusive_group()
@ -28,7 +31,7 @@ if __name__ == "__main__":
parser.add_argument('-o', default='my_check_diff.log', help='Filename of result inside a working path dir')
parser.add_argument('--c-only', dest='c_only', help='Only process c packages', action='store_true')
parser.add_argument('--cpp-only', dest='cpp_only', help='Only process c++ packages', action='store_true')
parser.add_argument('--work-path', '--work-path=', default=lib.work_path, type=str, help='Working directory for reference repo')
parser.add_argument('--work-path', '--work-path=', default=__work_path, type=str, help='Working directory for reference repo')
args = parser.parse_args()
print(args)
@ -44,7 +47,7 @@ if __name__ == "__main__":
old_repo_dir = os.path.join(work_path, 'cppcheck')
main_dir = os.path.join(work_path, 'tree-main')
jobs = '-j' + str(args.j)
lib.set_jobs('-j' + str(args.j))
result_file = os.path.join(work_path, args.o)
(f, ext) = os.path.splitext(result_file)
timing_file = f + '_timing' + ext
@ -58,15 +61,15 @@ if __name__ == "__main__":
try:
lib.clone_cppcheck(repo_dir, old_repo_dir)
pass
except:
print('Failed to clone Cppcheck repository, retry later')
except Exception as e:
print('Failed to clone Cppcheck repository ({}), retry later'.format(e))
sys.exit(1)
try:
lib.checkout_cppcheck_version(repo_dir, 'main', main_dir)
pass
except:
print('Failed to checkout main, retry later')
except Exception as e:
print('Failed to checkout main ({}), retry later'.format(e))
sys.exit(1)
try:
@ -88,12 +91,12 @@ if __name__ == "__main__":
print('Failed to switch to common ancestor of your branch and main')
sys.exit(1)
if not lib.compile_cppcheck(main_dir, jobs):
if not lib.compile_cppcheck(main_dir):
print('Failed to compile main of Cppcheck')
sys.exit(1)
print('Testing your PR from directory: ' + your_repo_dir)
if not lib.compile_cppcheck(your_repo_dir, jobs):
if not lib.compile_cppcheck(your_repo_dir):
print('Failed to compile your version of Cppcheck')
sys.exit(1)
@ -101,7 +104,7 @@ if __name__ == "__main__":
args.p = len(args.packages)
packages_idxs = []
else:
packages_count = lib.get_packages_count(lib.server_address)
packages_count = lib.get_packages_count()
if not packages_count:
print("network or server might be temporarily down..")
sys.exit(1)
@ -117,7 +120,7 @@ if __name__ == "__main__":
if args.packages:
package = args.packages.pop()
else:
package = lib.get_package(lib.server_address, packages_idxs.pop())
package = lib.get_package(packages_idxs.pop())
tgz = lib.download_package(work_path, package, None)
if tgz is None:
@ -138,7 +141,7 @@ if __name__ == "__main__":
your_timeout = False
libraries = lib.library_includes.get_libraries(source_path)
c, errout, info, time_main, cppcheck_options, timing_info = lib.scan_package(main_dir, source_path, jobs, libraries)
c, errout, info, time_main, cppcheck_options, timing_info = lib.scan_package(main_dir, source_path, libraries)
if c < 0:
if c == -101 and 'error: could not find or open any of the paths given.' in errout:
# No sourcefile found (for example only headers present)
@ -151,7 +154,7 @@ if __name__ == "__main__":
main_crashed = True
results_to_diff.append(errout)
c, errout, info, time_your, cppcheck_options, timing_info = lib.scan_package(your_repo_dir, source_path, jobs, libraries)
c, errout, info, time_your, cppcheck_options, timing_info = lib.scan_package(your_repo_dir, source_path, libraries)
if c < 0:
if c == -101 and 'error: could not find or open any of the paths given.' in errout:
# No sourcefile found (for example only headers present)