2019-10-26 21:10:21 +02:00
|
|
|
#!/usr/bin/env python3
|
2018-08-23 21:31:02 +02:00
|
|
|
|
|
|
|
# Server for 'donate-cpu.py'
|
2019-10-26 21:10:21 +02:00
|
|
|
# Runs only under Python 3.
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2021-01-31 12:01:01 +01:00
|
|
|
import collections
|
2018-08-25 08:49:40 +02:00
|
|
|
import glob
|
2019-06-30 14:02:51 +02:00
|
|
|
import json
|
2018-08-23 21:31:02 +02:00
|
|
|
import os
|
|
|
|
import socket
|
|
|
|
import re
|
2018-08-24 13:04:25 +02:00
|
|
|
import datetime
|
2018-08-25 08:49:40 +02:00
|
|
|
import time
|
2021-02-14 16:16:28 +01:00
|
|
|
import traceback
|
2018-08-25 18:38:51 +02:00
|
|
|
from threading import Thread
|
2018-08-27 18:21:16 +02:00
|
|
|
import sys
|
2019-10-26 21:10:21 +02:00
|
|
|
import urllib.request
|
|
|
|
import urllib.parse
|
|
|
|
import urllib.error
|
2019-02-09 12:41:02 +01:00
|
|
|
import logging
|
|
|
|
import logging.handlers
|
2019-02-28 13:34:23 +01:00
|
|
|
import operator
|
2021-02-09 22:17:14 +01:00
|
|
|
import html as html_lib
|
2022-09-29 21:55:44 +02:00
|
|
|
from urllib.parse import urlparse
|
2018-08-24 13:04:25 +02:00
|
|
|
|
2019-03-29 12:12:16 +01:00
|
|
|
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
|
|
|
|
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
|
|
|
|
# changes)
|
2023-12-23 09:18:41 +01:00
|
|
|
SERVER_VERSION = "1.3.47"
|
2019-03-29 12:12:16 +01:00
|
|
|
|
2023-12-23 09:18:41 +01:00
|
|
|
OLD_VERSION = '2.13.0'
|
2018-12-08 11:39:44 +01:00
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
HEAD_MARKER = 'head results:'
|
|
|
|
INFO_MARKER = 'info messages:'
|
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-02-09 12:41:02 +01:00
|
|
|
# Set up logging
|
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Logging to console
|
|
|
|
handler_stream = logging.StreamHandler()
|
|
|
|
logger.addHandler(handler_stream)
|
|
|
|
# Log errors to a rotating file
|
|
|
|
logfile = sys.path[0]
|
|
|
|
if logfile:
|
|
|
|
logfile += '/'
|
|
|
|
logfile += 'donate-cpu-server.log'
|
|
|
|
handler_file = logging.handlers.RotatingFileHandler(filename=logfile, maxBytes=100*1024, backupCount=1)
|
2022-09-29 21:55:44 +02:00
|
|
|
handler_file.setFormatter(logging.Formatter('%(asctime)s %(message)s'))
|
2019-02-09 12:41:02 +01:00
|
|
|
handler_file.setLevel(logging.ERROR)
|
|
|
|
logger.addHandler(handler_file)
|
|
|
|
|
|
|
|
|
2022-09-29 21:55:44 +02:00
|
|
|
def print_ts(msg) -> None:
|
2023-04-04 11:37:16 +02:00
|
|
|
dt = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
|
|
|
|
print('[{}] {}'.format(dt, msg))
|
2022-09-22 21:20:38 +02:00
|
|
|
|
|
|
|
|
2019-02-09 12:41:02 +01:00
|
|
|
# Set up an exception hook for all uncaught exceptions so they can be logged
|
|
|
|
def handle_uncaught_exception(exc_type, exc_value, exc_traceback):
|
|
|
|
if issubclass(exc_type, KeyboardInterrupt):
|
|
|
|
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
|
|
return
|
|
|
|
|
|
|
|
logging.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
|
|
|
|
|
|
|
|
|
|
|
sys.excepthook = handle_uncaught_exception
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def strDateTime() -> str:
|
2023-04-04 11:37:16 +02:00
|
|
|
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
|
2019-03-29 12:12:16 +01:00
|
|
|
|
2018-08-24 13:04:25 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def dateTimeFromStr(datestr: str) -> datetime.datetime:
|
2019-03-29 12:12:16 +01:00
|
|
|
return datetime.datetime.strptime(datestr, '%Y-%m-%d %H:%M')
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-10-18 12:57:51 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def overviewReport() -> str:
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>daca@home</title></head><body>\n'
|
2018-09-02 07:28:25 +02:00
|
|
|
html += '<h1>daca@home</h1>\n'
|
2022-12-31 18:10:40 +01:00
|
|
|
html += '<a href="crash.html">Crash report</a> - <a href="crash.html?pkgs=1">packages.txt</a><br>\n'
|
2020-01-24 12:33:51 +01:00
|
|
|
html += '<a href="timeout.html">Timeout report</a><br>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '<a href="stale.html">Stale report</a><br>\n'
|
2019-01-24 12:02:45 +01:00
|
|
|
html += '<a href="diff.html">Diff report</a><br>\n'
|
|
|
|
html += '<a href="head.html">HEAD report</a><br>\n'
|
2023-08-02 10:43:06 +02:00
|
|
|
html += '<a href="headinfo.html">HEAD (information) report</a><br>\n'
|
2018-09-02 09:35:38 +02:00
|
|
|
html += '<a href="latest.html">Latest results</a><br>\n'
|
2021-01-25 08:10:28 +01:00
|
|
|
html += '<a href="time_lt.html">Time report (improved)</a><br>\n'
|
2022-12-31 18:10:40 +01:00
|
|
|
html += '<a href="time_gt.html">Time report (regressed)</a> - <a href="time_gt.html?pkgs=1">packages.txt</a><br>\n'
|
2021-02-21 08:47:58 +01:00
|
|
|
html += '<a href="time_slow.html">Time report (slowest)</a><br>\n'
|
2022-09-15 20:21:40 +02:00
|
|
|
html += '<br>\n'
|
|
|
|
html += '--check-library:<br>\n'
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '<a href="check_library_function_report.html">checkLibraryFunction report</a><br>\n'
|
|
|
|
html += '<a href="check_library_noreturn_report.html">checkLibraryNoReturn report</a><br>\n'
|
2019-02-04 15:53:51 +01:00
|
|
|
html += '<a href="check_library_use_ignore_report.html">checkLibraryUseIgnore report</a><br>\n'
|
2022-09-15 20:21:40 +02:00
|
|
|
html += '<a href="check_library_check_type_report.html">checkLibraryCheckType report</a><br>\n'
|
2022-09-09 20:56:14 +02:00
|
|
|
html += '<br>\n'
|
|
|
|
html += 'Debug warnings:<br>\n'
|
|
|
|
html += '<a href="head-debug">debug</a><br>\n'
|
|
|
|
html += '<a href="head-varid0">varid0</a><br>\n'
|
|
|
|
html += '<a href="head-valueType">valueType</a><br>\n'
|
|
|
|
html += '<a href="head-noparamend">noparamend</a><br>\n'
|
|
|
|
html += '<a href="head-simplifyTypedef">simplifyTypedef</a><br>\n'
|
|
|
|
html += '<a href="head-simplifyUsingUnmatchedBodyEnd">simplifyUsingUnmatchedBodyEnd</a><br>\n'
|
|
|
|
html += '<a href="head-simplifyUsing">simplifyUsing</a><br>\n'
|
2022-12-30 21:20:00 +01:00
|
|
|
html += '<a href="head-valueFlowMaxIterations">valueFlowMaxIterations</a><br>\n'
|
2023-04-08 16:50:11 +02:00
|
|
|
html += '<a href="head-templateInstantiation">templateInstantiation</a><br>\n'
|
2022-09-09 20:56:14 +02:00
|
|
|
#html += '<a href="head-autoNoType">autoNoType</a><br>\n'
|
|
|
|
#html += '<a href="head-valueFlowBailout">valueFlowBailout</a><br>\n'
|
|
|
|
#html += '<a href="head-bailoutUninitVar">bailoutUninitVar</a><br>\n'
|
|
|
|
#html += '<a href="head-symbolDatabaseWarning">symbolDatabaseWarning</a><br>\n'
|
2023-09-20 11:23:04 +02:00
|
|
|
html += '<br>\n'
|
|
|
|
html += 'Custom reports:<br>\n'
|
2023-09-08 18:08:44 +02:00
|
|
|
html += '<a href="value_flow_bailout_incomplete_var.html">valueFlowBailoutIncompleteVar report</a><br>\n'
|
2023-09-20 11:23:04 +02:00
|
|
|
html += '<a href="unknown_macro.html">unknownMacro report</a><br>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '<br>\n'
|
2022-01-21 15:46:29 +01:00
|
|
|
html += 'Important errors:<br>\n'
|
|
|
|
html += '<a href="head-cppcheckError">cppcheckError</a><br>\n'
|
2023-08-02 10:43:06 +02:00
|
|
|
html += '<a href="head-internalError">internalError</a><br>\n'
|
2022-01-21 15:46:29 +01:00
|
|
|
html += '<a href="head-internalAstError">internalAstError</a><br>\n'
|
|
|
|
html += '<a href="head-syntaxError">syntaxError</a><br>\n'
|
2022-01-27 17:54:44 +01:00
|
|
|
html += '<a href="head-DacaWrongData">DacaWrongData</a><br>\n'
|
|
|
|
html += '<a href="head-dacaWrongSplitTemplateRightAngleBrackets">dacaWrongSplitTemplateRightAngleBrackets</a><br>\n'
|
2022-01-21 15:46:29 +01:00
|
|
|
html += '<br>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += 'version ' + SERVER_VERSION + '\n'
|
2018-09-02 07:28:25 +02:00
|
|
|
html += '</body></html>'
|
|
|
|
return html
|
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2020-02-19 09:42:32 +01:00
|
|
|
def fmt(a: str, b: str, c: str = None, d: str = None, e: str = None, link: bool = True, column_width=None) -> str:
|
|
|
|
if column_width is None:
|
|
|
|
column_width = [40, 10, 5, 7, 7, 8]
|
2019-01-15 11:39:47 +01:00
|
|
|
ret = a
|
|
|
|
while len(ret) < column_width[0]:
|
2018-08-25 08:49:40 +02:00
|
|
|
ret += ' '
|
2019-01-15 11:39:47 +01:00
|
|
|
if len(ret) == column_width[0]:
|
|
|
|
ret += ' ' + b[:10]
|
|
|
|
while len(ret) < (column_width[0] + 1 + column_width[1]):
|
2018-08-25 08:49:40 +02:00
|
|
|
ret += ' '
|
2019-01-15 11:39:47 +01:00
|
|
|
ret += ' '
|
2019-10-18 09:56:15 +02:00
|
|
|
if len(b) > 10:
|
|
|
|
ret += b[-5:].rjust(column_width[2]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if c is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += c.rjust(column_width[3]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if d is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += d.rjust(column_width[4]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if e is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += e.rjust(column_width[5])
|
2019-10-18 09:56:15 +02:00
|
|
|
if link:
|
2018-08-25 10:59:49 +02:00
|
|
|
pos = ret.find(' ')
|
|
|
|
ret = '<a href="' + a + '">' + a + '</a>' + ret[pos:]
|
2018-08-25 08:49:40 +02:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def latestReport(latestResults: list) -> str:
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>Latest daca@home results</title></head><body>\n'
|
2019-01-15 11:39:47 +01:00
|
|
|
html += '<h1>Latest daca@home results</h1>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<pre>\n<b>' + fmt('Package', 'Date Time', OLD_VERSION, 'Head', 'Diff', link=False) + '</b>\n'
|
2018-08-25 08:49:40 +02:00
|
|
|
|
|
|
|
# Write report for latest results
|
2018-08-25 09:06:15 +02:00
|
|
|
for filename in latestResults:
|
2018-10-15 11:01:51 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2018-08-25 08:49:40 +02:00
|
|
|
package = filename[filename.rfind('/')+1:]
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2018-08-25 08:49:40 +02:00
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
datestr = None
|
2019-01-09 18:39:42 +01:00
|
|
|
count = ['0', '0']
|
2018-08-26 16:47:20 +02:00
|
|
|
lost = 0
|
|
|
|
added = 0
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2018-08-25 08:49:40 +02:00
|
|
|
line = line.strip()
|
2022-09-22 21:20:38 +02:00
|
|
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2018-08-25 08:49:40 +02:00
|
|
|
datestr = line
|
2018-08-26 16:47:20 +02:00
|
|
|
#elif line.startswith('cppcheck:'):
|
|
|
|
# cppcheck = line[9:]
|
|
|
|
elif line.startswith('count: '):
|
|
|
|
count = line.split(' ')[1:]
|
2018-12-10 20:05:34 +01:00
|
|
|
elif line.startswith('head ') and not line.startswith('head results:'):
|
2018-08-26 16:47:20 +02:00
|
|
|
added += 1
|
2018-12-08 11:39:44 +01:00
|
|
|
elif line.startswith(OLD_VERSION + ' '):
|
2018-08-26 16:57:09 +02:00
|
|
|
lost += 1
|
2018-08-26 16:47:20 +02:00
|
|
|
diff = ''
|
|
|
|
if lost > 0:
|
|
|
|
diff += '-' + str(lost)
|
|
|
|
if added > 0:
|
2018-08-26 16:57:09 +02:00
|
|
|
diff += '+' + str(added)
|
|
|
|
html += fmt(package, datestr, count[1], count[0], diff) + '\n'
|
2018-08-25 08:49:40 +02:00
|
|
|
|
|
|
|
html += '</pre></body></html>\n'
|
|
|
|
return html
|
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2022-09-29 21:55:44 +02:00
|
|
|
def crashReport(results_path: str, query_params: dict):
|
|
|
|
pkgs = '' if query_params.get('pkgs') == '1' else None
|
|
|
|
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>Crash report</title></head><body>\n'
|
2018-08-31 14:28:01 +02:00
|
|
|
html += '<h1>Crash report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<b>' + fmt('Package', 'Date Time', OLD_VERSION, 'Head', link=False) + '</b>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2019-10-18 09:56:15 +02:00
|
|
|
stack_traces = {}
|
2019-09-10 07:58:37 +02:00
|
|
|
for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2018-08-31 14:28:01 +02:00
|
|
|
continue
|
2019-10-18 12:57:51 +02:00
|
|
|
with open(filename, 'rt') as file_:
|
2022-09-22 21:20:38 +02:00
|
|
|
datestr = None
|
2022-09-29 21:55:44 +02:00
|
|
|
package_url = None
|
2019-10-18 12:57:51 +02:00
|
|
|
for line in file_:
|
2019-10-18 09:56:15 +02:00
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2019-10-18 09:56:15 +02:00
|
|
|
datestr = line
|
2022-09-29 21:55:44 +02:00
|
|
|
elif pkgs is not None and package_url is None and line.startswith('ftp://'):
|
|
|
|
package_url = line
|
2021-01-31 21:20:52 +01:00
|
|
|
elif line.startswith('count:'):
|
2019-10-18 09:56:15 +02:00
|
|
|
if line.find('Crash') < 0:
|
|
|
|
break
|
|
|
|
package = filename[filename.rfind('/')+1:]
|
2021-02-21 08:47:58 +01:00
|
|
|
counts = line.split(' ')
|
2022-09-22 21:20:38 +02:00
|
|
|
c_version = ''
|
2019-10-18 09:56:15 +02:00
|
|
|
if counts[2] == 'Crash!':
|
2022-09-22 21:20:38 +02:00
|
|
|
c_version = 'Crash'
|
|
|
|
c_head = ''
|
2019-10-18 09:56:15 +02:00
|
|
|
if counts[1] == 'Crash!':
|
2022-09-22 21:20:38 +02:00
|
|
|
c_head = 'Crash'
|
|
|
|
html += fmt(package, datestr, c_version, c_head) + '\n'
|
|
|
|
if c_head != 'Crash':
|
2019-10-18 09:56:15 +02:00
|
|
|
break
|
2023-01-18 20:19:31 +01:00
|
|
|
if package_url is not None:
|
|
|
|
pkgs += '{}\n'.format(package_url)
|
2021-01-31 21:20:52 +01:00
|
|
|
elif line.find(' received signal ') != -1:
|
2019-10-18 12:57:51 +02:00
|
|
|
crash_line = next(file_, '').strip()
|
2021-01-29 10:28:35 +01:00
|
|
|
location_index = crash_line.rfind(' at ')
|
2019-10-18 09:56:15 +02:00
|
|
|
if location_index > 0:
|
2019-10-18 12:57:51 +02:00
|
|
|
code_line = next(file_, '').strip()
|
2021-02-09 22:17:14 +01:00
|
|
|
else:
|
|
|
|
code_line = ''
|
|
|
|
stack_trace = []
|
|
|
|
while True:
|
|
|
|
l = next(file_, '')
|
|
|
|
if not l.strip():
|
|
|
|
break
|
|
|
|
# #0 0x00007ffff71cbf67 in raise () from /lib64/libc.so.6
|
2021-02-11 08:04:26 +01:00
|
|
|
m = re.search(r'(?P<number>#\d+) .* in (?P<function>.+)\(.*\) from (?P<binary>.*)$', l)
|
2021-02-09 22:17:14 +01:00
|
|
|
if m:
|
2021-10-18 07:25:31 +02:00
|
|
|
#print('0 - {} - {} - {}'.format(m.group('number'), m.group('function'), m.group('binary')))
|
2021-02-09 22:17:14 +01:00
|
|
|
stack_trace.append(m.group('number') + ' ' + m.group('function') + '(...) from ' + m.group('binary'))
|
|
|
|
continue
|
|
|
|
# #11 0x00000000006f2414 in valueFlowNumber (tokenlist=tokenlist@entry=0x7fffffffc610) at build/valueflow.cpp:2503
|
2021-10-24 11:01:48 +02:00
|
|
|
m = re.search(r'(?P<number>#\d+) .* in (?P<function>.+?) \(.*\) at (?P<location>.*)$', l)
|
2021-02-09 22:17:14 +01:00
|
|
|
if m:
|
2021-10-18 07:25:31 +02:00
|
|
|
#print('1 - {} - {} - {}'.format(m.group('number'), m.group('function'), m.group('location')))
|
2021-02-09 22:17:14 +01:00
|
|
|
stack_trace.append(m.group('number') + ' ' + m.group('function') + '(...) at ' + m.group('location'))
|
|
|
|
continue
|
|
|
|
# #18 ForwardTraversal::updateRecursive (this=0x7fffffffb3c0, tok=0x14668a0) at build/forwardanalyzer.cpp:415
|
|
|
|
m = re.search(r'(?P<number>#\d+) (?P<function>.+)\(.*\) at (?P<location>.*)$', l)
|
|
|
|
if m:
|
2021-10-18 07:25:31 +02:00
|
|
|
#print('2 - {} - {} - {}'.format(m.group('number'), m.group('function'), m.group('location')))
|
2019-10-18 09:56:15 +02:00
|
|
|
stack_trace.append(m.group('number') + ' ' + m.group('function') + '(...) at ' + m.group('location'))
|
2021-02-09 22:17:14 +01:00
|
|
|
continue
|
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('{} - unmatched stack frame - {}'.format(package, l))
|
2021-02-09 22:17:14 +01:00
|
|
|
break
|
|
|
|
key = hash(' '.join(stack_trace))
|
|
|
|
|
|
|
|
if key in stack_traces:
|
|
|
|
stack_traces[key]['code_line'] = code_line
|
|
|
|
stack_traces[key]['stack_trace'] = stack_trace
|
|
|
|
stack_traces[key]['n'] += 1
|
|
|
|
stack_traces[key]['packages'].append(package)
|
|
|
|
else:
|
|
|
|
stack_traces[key] = {'stack_trace': stack_trace, 'n': 1, 'code_line': code_line, 'packages': [package], 'crash_line': crash_line}
|
2019-07-01 14:55:38 +02:00
|
|
|
break
|
2019-10-18 09:56:15 +02:00
|
|
|
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '<pre>\n'
|
|
|
|
html += '<b>Stack traces</b>\n'
|
2019-10-26 21:10:21 +02:00
|
|
|
for stack_trace in sorted(list(stack_traces.values()), key=lambda x: x['n'], reverse=True):
|
2019-10-18 09:56:15 +02:00
|
|
|
html += 'Packages: ' + ' '.join(['<a href="' + p + '">' + p + '</a>' for p in stack_trace['packages']]) + '\n'
|
2021-02-09 22:17:14 +01:00
|
|
|
html += html_lib.escape(stack_trace['crash_line']) + '\n'
|
|
|
|
html += html_lib.escape(stack_trace['code_line']) + '\n'
|
|
|
|
html += html_lib.escape('\n'.join(stack_trace['stack_trace'])) + '\n\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '</pre>\n'
|
|
|
|
|
|
|
|
html += '</body></html>\n'
|
2022-09-29 21:55:44 +02:00
|
|
|
if pkgs is not None:
|
|
|
|
return pkgs, 'text/plain'
|
|
|
|
return html, 'text/html'
|
2019-03-29 12:12:16 +01:00
|
|
|
|
|
|
|
|
2020-01-24 12:33:51 +01:00
|
|
|
def timeoutReport(results_path: str) -> str:
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>Timeout report</title></head><body>\n'
|
2020-01-24 12:33:51 +01:00
|
|
|
html += '<h1>Timeout report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
|
|
|
html += '<b>' + fmt('Package', 'Date Time', OLD_VERSION, 'Head', link=False) + '</b>\n'
|
|
|
|
current_year = datetime.date.today().year
|
|
|
|
for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2020-01-24 12:33:51 +01:00
|
|
|
continue
|
|
|
|
with open(filename, 'rt') as file_:
|
2022-09-22 21:20:38 +02:00
|
|
|
datestr = None
|
2020-01-24 12:33:51 +01:00
|
|
|
for line in file_:
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2020-01-24 12:33:51 +01:00
|
|
|
datestr = line
|
2021-01-31 21:20:52 +01:00
|
|
|
elif line.startswith('count:'):
|
2020-01-24 12:33:51 +01:00
|
|
|
if line.find('TO!') < 0:
|
|
|
|
break
|
|
|
|
package = filename[filename.rfind('/')+1:]
|
2021-02-21 08:47:58 +01:00
|
|
|
counts = line.split(' ')
|
2020-01-24 12:33:51 +01:00
|
|
|
c2 = ''
|
|
|
|
if counts[2] == 'TO!':
|
|
|
|
c2 = 'Timeout'
|
|
|
|
c1 = ''
|
|
|
|
if counts[1] == 'TO!':
|
|
|
|
c1 = 'Timeout'
|
|
|
|
html += fmt(package, datestr, c2, c1) + '\n'
|
|
|
|
break
|
|
|
|
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def staleReport(results_path: str) -> str:
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>Stale report</title></head><body>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '<h1>Stale report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<b>' + fmt('Package', 'Date Time', link=False) + '</b>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2019-09-10 07:58:37 +02:00
|
|
|
for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-03-29 12:12:16 +01:00
|
|
|
continue
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
|
|
|
datestr = line
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
dt = dateTimeFromStr(datestr)
|
|
|
|
diff = datetime.datetime.now() - dt
|
|
|
|
if diff.days < 30:
|
|
|
|
continue
|
|
|
|
package = filename[filename.rfind('/')+1:]
|
2019-10-18 09:56:15 +02:00
|
|
|
html += fmt(package, datestr) + '\n'
|
2018-08-31 14:28:01 +02:00
|
|
|
break
|
|
|
|
html += '</pre>\n'
|
2018-11-28 20:36:19 +01:00
|
|
|
|
2018-08-31 14:28:01 +02:00
|
|
|
html += '</body></html>\n'
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffReportFromDict(out: dict, today: str) -> str:
|
2018-09-06 17:31:07 +02:00
|
|
|
html = '<pre>\n'
|
2018-12-08 11:39:44 +01:00
|
|
|
html += '<b>MessageID ' + OLD_VERSION + ' Head</b>\n'
|
2018-08-27 18:21:16 +02:00
|
|
|
sum0 = 0
|
|
|
|
sum1 = 0
|
|
|
|
for messageId in sorted(out.keys()):
|
|
|
|
line = messageId + ' '
|
|
|
|
counts = out[messageId]
|
2018-09-06 17:31:07 +02:00
|
|
|
sum0 += counts[0]
|
|
|
|
sum1 += counts[1]
|
2018-08-27 18:21:16 +02:00
|
|
|
if counts[0] > 0:
|
2018-09-06 17:31:07 +02:00
|
|
|
c = str(counts[0])
|
2018-08-27 18:21:16 +02:00
|
|
|
while len(line) < 40 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c + ' '
|
|
|
|
if counts[1] > 0:
|
2018-09-06 17:31:07 +02:00
|
|
|
c = str(counts[1])
|
2018-08-27 18:21:16 +02:00
|
|
|
while len(line) < 48 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c
|
2018-09-06 17:31:07 +02:00
|
|
|
line = '<a href="diff' + today + '-' + messageId + '">' + messageId + '</a>' + line[line.find(' '):]
|
2018-08-27 18:21:16 +02:00
|
|
|
html += line + '\n'
|
|
|
|
|
|
|
|
# Sum
|
|
|
|
html += '================================================\n'
|
|
|
|
line = ''
|
|
|
|
while len(line) < 40 - len(str(sum0)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sum0) + ' '
|
|
|
|
while len(line) < 48 - len(str(sum1)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sum1)
|
|
|
|
html += line + '\n'
|
2018-09-06 17:31:07 +02:00
|
|
|
html += '</pre>\n'
|
2018-08-27 18:21:16 +02:00
|
|
|
|
|
|
|
return html
|
|
|
|
|
2018-09-06 06:53:40 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffReport(resultsPath: str) -> str:
|
2018-09-06 06:53:40 +02:00
|
|
|
out = {}
|
2018-09-06 17:31:07 +02:00
|
|
|
outToday = {}
|
2018-09-06 06:53:40 +02:00
|
|
|
today = strDateTime()[:10]
|
|
|
|
|
2019-06-30 14:02:51 +02:00
|
|
|
for filename in sorted(glob.glob(resultsPath + '/*.diff')):
|
2018-09-06 06:53:40 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 14:02:51 +02:00
|
|
|
with open(filename, 'rt') as f:
|
|
|
|
data = json.loads(f.read())
|
|
|
|
uploadedToday = data['date'] == today
|
2019-06-30 14:14:02 +02:00
|
|
|
for messageId in data['sums']:
|
|
|
|
sums = data['sums'][messageId]
|
2019-09-02 13:46:29 +02:00
|
|
|
if OLD_VERSION not in sums:
|
|
|
|
continue
|
2019-01-09 18:39:42 +01:00
|
|
|
if messageId not in out:
|
|
|
|
out[messageId] = [0, 0]
|
2019-06-30 14:02:51 +02:00
|
|
|
out[messageId][0] += sums[OLD_VERSION]
|
|
|
|
out[messageId][1] += sums['head']
|
2018-09-06 17:31:07 +02:00
|
|
|
if uploadedToday:
|
2019-01-09 18:39:42 +01:00
|
|
|
if messageId not in outToday:
|
|
|
|
outToday[messageId] = [0, 0]
|
2019-06-30 14:02:51 +02:00
|
|
|
outToday[messageId][0] += sums[OLD_VERSION]
|
|
|
|
outToday[messageId][1] += sums['head']
|
2018-09-06 06:53:40 +02:00
|
|
|
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>Diff report</title></head><body>\n'
|
2018-09-06 17:31:07 +02:00
|
|
|
html += '<h1>Diff report</h1>\n'
|
|
|
|
html += '<h2>Uploaded today</h2>'
|
|
|
|
html += diffReportFromDict(outToday, 'today')
|
|
|
|
html += '<h2>All</h2>'
|
|
|
|
html += diffReportFromDict(out, '')
|
2018-09-06 06:53:40 +02:00
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def generate_package_diff_statistics(filename: str) -> None:
|
2019-06-30 14:02:51 +02:00
|
|
|
is_diff = False
|
|
|
|
|
|
|
|
sums = {}
|
|
|
|
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
line = line.strip()
|
|
|
|
if line == 'diff:':
|
|
|
|
is_diff = True
|
|
|
|
continue
|
|
|
|
elif not is_diff:
|
|
|
|
continue
|
|
|
|
if not line.endswith(']'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if line.startswith(OLD_VERSION + ' '):
|
|
|
|
version = OLD_VERSION
|
|
|
|
elif line.startswith('head '):
|
|
|
|
version = 'head'
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
messageId = line[line.rfind('[')+1:len(line)-1]
|
|
|
|
|
|
|
|
if messageId not in sums:
|
2019-10-18 12:57:51 +02:00
|
|
|
sums[messageId] = {OLD_VERSION: 0, 'head': 0}
|
2019-06-30 14:02:51 +02:00
|
|
|
|
|
|
|
sums[messageId][version] += 1
|
|
|
|
|
2019-10-18 12:57:51 +02:00
|
|
|
output = {'date': strDateTime()[:10], 'sums': sums}
|
2019-06-30 14:02:51 +02:00
|
|
|
|
|
|
|
filename_diff = filename + '.diff'
|
|
|
|
if sums:
|
|
|
|
with open(filename_diff, 'wt') as f:
|
|
|
|
f.write(json.dumps(output))
|
|
|
|
elif os.path.isfile(filename_diff):
|
|
|
|
os.remove(filename_diff)
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffMessageIdReport(resultPath: str, messageId: str) -> str:
|
2018-08-27 18:21:16 +02:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
2019-06-30 23:16:04 +02:00
|
|
|
for filename in sorted(glob.glob(resultPath + '/*.diff')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 23:16:04 +02:00
|
|
|
with open(filename, 'rt') as f:
|
2019-10-18 12:57:51 +02:00
|
|
|
diff_stats = f.read()
|
|
|
|
if messageId not in diff_stats:
|
|
|
|
continue
|
2018-08-27 18:21:16 +02:00
|
|
|
url = None
|
|
|
|
diff = False
|
2019-06-30 23:16:04 +02:00
|
|
|
for line in open(filename[:-5], 'rt'):
|
2018-08-27 18:21:16 +02:00
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line == 'diff:\n':
|
|
|
|
diff = True
|
|
|
|
elif not diff:
|
|
|
|
continue
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
2018-09-06 17:31:07 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffMessageIdTodayReport(resultPath: str, messageId: str) -> str:
|
2018-09-06 17:31:07 +02:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
today = strDateTime()[:10]
|
2019-06-30 23:16:04 +02:00
|
|
|
for filename in sorted(glob.glob(resultPath + '/*.diff')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 23:16:04 +02:00
|
|
|
with open(filename, 'rt') as f:
|
2019-10-18 12:57:51 +02:00
|
|
|
diff_stats = f.read()
|
|
|
|
if messageId not in diff_stats:
|
|
|
|
continue
|
|
|
|
if today not in diff_stats:
|
|
|
|
continue
|
2018-09-06 17:31:07 +02:00
|
|
|
url = None
|
|
|
|
diff = False
|
|
|
|
firstLine = True
|
2019-06-30 23:16:04 +02:00
|
|
|
for line in open(filename[:-5], 'rt'):
|
2018-09-06 17:31:07 +02:00
|
|
|
if firstLine:
|
|
|
|
firstLine = False
|
|
|
|
if not line.startswith(today):
|
|
|
|
break
|
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line == 'diff:\n':
|
|
|
|
diff = True
|
|
|
|
elif not diff:
|
|
|
|
continue
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
def summaryReportFromDict(out: dict, prefix: str, today: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
html = '<pre>\n'
|
|
|
|
html += '<b>MessageID Count</b>\n'
|
|
|
|
sumTotal = 0
|
|
|
|
for messageId in sorted(out.keys()):
|
|
|
|
line = messageId + ' '
|
|
|
|
counts = out[messageId]
|
|
|
|
sumTotal += counts
|
|
|
|
if counts > 0:
|
|
|
|
c = str(counts)
|
|
|
|
while len(line) < 48 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c + ' '
|
2023-04-28 15:33:25 +02:00
|
|
|
line = '<a href="' + prefix + today + '-' + messageId + '">' + messageId + '</a>' + line[line.find(' '):]
|
2019-01-09 10:47:58 +01:00
|
|
|
html += line + '\n'
|
|
|
|
|
|
|
|
# Sum
|
|
|
|
html += '================================================\n'
|
|
|
|
line = ''
|
|
|
|
while len(line) < 48 - len(str(sumTotal)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sumTotal) + ' '
|
|
|
|
html += line + '\n'
|
|
|
|
html += '</pre>\n'
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
def summaryReport(resultsPath: str, name: str, prefix: str, marker: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
out = {}
|
|
|
|
outToday = {}
|
|
|
|
today = strDateTime()[:10]
|
|
|
|
|
|
|
|
for filename in sorted(glob.glob(resultsPath + '/*')):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-09 10:47:58 +01:00
|
|
|
continue
|
|
|
|
uploadedToday = False
|
|
|
|
firstLine = True
|
2023-04-28 15:33:25 +02:00
|
|
|
inResults = False
|
2019-01-09 10:47:58 +01:00
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
if firstLine:
|
|
|
|
if line.startswith(today):
|
|
|
|
uploadedToday = True
|
|
|
|
firstLine = False
|
|
|
|
continue
|
|
|
|
line = line.strip()
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2023-04-28 15:33:25 +02:00
|
|
|
if line.startswith(marker):
|
|
|
|
inResults = True
|
2019-01-09 10:47:58 +01:00
|
|
|
continue
|
2023-04-28 15:33:25 +02:00
|
|
|
if not inResults:
|
2019-01-09 10:47:58 +01:00
|
|
|
continue
|
2023-09-22 12:17:33 +02:00
|
|
|
if line.startswith('diff:'):
|
|
|
|
break
|
2019-01-09 10:47:58 +01:00
|
|
|
if not line.endswith(']'):
|
|
|
|
continue
|
|
|
|
if ': note: ' in line:
|
|
|
|
# notes normally do not contain message ids but can end with ']'
|
|
|
|
continue
|
2019-04-03 11:39:03 +02:00
|
|
|
message_id_start_pos = line.rfind('[')
|
|
|
|
if message_id_start_pos <= 0:
|
|
|
|
continue
|
|
|
|
messageId = line[message_id_start_pos+1:len(line)-1]
|
|
|
|
if ' ' in messageId:
|
|
|
|
# skip invalid messageIds
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
|
|
|
|
if messageId not in out:
|
|
|
|
out[messageId] = 0
|
|
|
|
out[messageId] += 1
|
|
|
|
if uploadedToday:
|
|
|
|
if messageId not in outToday:
|
|
|
|
outToday[messageId] = 0
|
|
|
|
outToday[messageId] += 1
|
|
|
|
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
2023-08-02 10:43:06 +02:00
|
|
|
html += '<html><head><title>{} report</title></head><body>\n'.format(name)
|
2019-01-09 10:47:58 +01:00
|
|
|
html += '<h1>HEAD report</h1>\n'
|
|
|
|
html += '<h2>Uploaded today</h2>'
|
2023-04-28 15:33:25 +02:00
|
|
|
html += summaryReportFromDict(outToday, prefix, 'today')
|
2019-01-09 10:47:58 +01:00
|
|
|
html += '<h2>All</h2>'
|
2023-04-28 15:33:25 +02:00
|
|
|
html += summaryReportFromDict(out, prefix, '')
|
2019-01-09 10:47:58 +01:00
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
def headReport(resultsPath: str) -> str:
|
|
|
|
return summaryReport(resultsPath, 'HEAD', 'head', HEAD_MARKER)
|
|
|
|
|
|
|
|
|
|
|
|
def infoReport(resultsPath: str) -> str:
|
2023-08-02 10:43:06 +02:00
|
|
|
return summaryReport(resultsPath, 'HEAD (information)', 'headinfo', INFO_MARKER)
|
2023-04-28 15:33:25 +02:00
|
|
|
|
|
|
|
|
|
|
|
def messageIdReport(resultPath: str, marker: str, messageId: str, query_params: dict) -> str:
|
2022-09-29 21:55:44 +02:00
|
|
|
pkgs = '' if query_params.get('pkgs') == '1' else None
|
2019-01-09 10:47:58 +01:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
for filename in sorted(glob.glob(resultPath + '/*')):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
url = None
|
2023-04-28 15:33:25 +02:00
|
|
|
inResults = False
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2023-09-22 12:17:33 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if not inResults:
|
2023-09-22 15:18:16 +02:00
|
|
|
if line.startswith(marker):
|
|
|
|
inResults = True
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if line.startswith('diff:'):
|
2019-01-09 10:47:58 +01:00
|
|
|
break
|
2023-09-22 12:17:33 +02:00
|
|
|
if line.endswith(e):
|
2019-01-09 10:47:58 +01:00
|
|
|
if url:
|
|
|
|
text += url
|
2022-09-29 21:55:44 +02:00
|
|
|
if pkgs is not None:
|
|
|
|
pkgs += url
|
2019-01-09 10:47:58 +01:00
|
|
|
url = None
|
|
|
|
text += line
|
2022-09-29 21:55:44 +02:00
|
|
|
if pkgs is not None:
|
|
|
|
return pkgs
|
2019-01-09 10:47:58 +01:00
|
|
|
return text
|
|
|
|
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
def headMessageIdReport(resultPath: str, messageId: str, query_params: dict) -> str:
|
|
|
|
return messageIdReport(resultPath, HEAD_MARKER, messageId, query_params)
|
|
|
|
|
|
|
|
|
|
|
|
def infoMessageIdReport(resultPath: str, messageId: str, query_params: dict) -> str:
|
|
|
|
return messageIdReport(resultPath, INFO_MARKER, messageId, query_params)
|
|
|
|
|
|
|
|
|
|
|
|
def messageIdTodayReport(resultPath: str, messageId: str, marker: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
today = strDateTime()[:10]
|
|
|
|
for filename in sorted(glob.glob(resultPath + '/*')):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
url = None
|
2023-04-28 15:33:25 +02:00
|
|
|
inResults = False
|
2019-01-09 10:47:58 +01:00
|
|
|
firstLine = True
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2019-01-09 10:47:58 +01:00
|
|
|
if firstLine:
|
|
|
|
firstLine = False
|
|
|
|
if not line.startswith(today):
|
|
|
|
break
|
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if not inResults:
|
2023-09-22 15:18:16 +02:00
|
|
|
if line.startswith(marker):
|
|
|
|
inResults = True
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if line.startswith('diff:'):
|
2019-01-09 10:47:58 +01:00
|
|
|
break
|
2023-09-22 12:17:33 +02:00
|
|
|
if line.endswith(e):
|
2019-01-09 10:47:58 +01:00
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
def headMessageIdTodayReport(resultPath: str, messageId: str) -> str:
|
|
|
|
return messageIdTodayReport(resultPath, messageId, HEAD_MARKER)
|
|
|
|
|
|
|
|
|
|
|
|
def infoMessageIdTodayReport(resultPath: str, messageId: str) -> str:
|
|
|
|
return messageIdTodayReport(resultPath, messageId, INFO_MARKER)
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: needs to dinicate that it returns 'tuple[str, str]' but that isn't supported until Python 3.9
|
|
|
|
def timeReport(resultPath: str, show_gt: bool, query_params: dict):
|
2022-12-31 18:10:40 +01:00
|
|
|
# no need for package report support in "improved" report
|
|
|
|
pkgs = '' if show_gt and query_params and query_params.get('pkgs') == '1' else None
|
|
|
|
factor = float(query_params.get('factor')) if query_params and 'factor' in query_params else 2.0
|
|
|
|
if not show_gt:
|
|
|
|
factor = 1.0 / factor
|
|
|
|
|
2021-01-31 12:01:01 +01:00
|
|
|
title = 'Time report ({})'.format('regressed' if show_gt else 'improved')
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>{}</title></head><body>\n'.format(title)
|
2021-01-31 12:01:01 +01:00
|
|
|
html += '<h1>{}</h1>\n'.format(title)
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '<pre>\n'
|
2021-02-11 08:04:26 +01:00
|
|
|
column_width = [40, 10, 10, 10, 10, 10]
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '<b>'
|
2021-01-31 21:20:52 +01:00
|
|
|
html += fmt('Package', 'Date Time', OLD_VERSION, 'Head', 'Factor', link=False, column_width=column_width)
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '</b>\n'
|
2019-01-10 19:10:56 +01:00
|
|
|
|
2021-01-31 21:20:52 +01:00
|
|
|
current_year = datetime.date.today().year
|
|
|
|
|
2021-01-31 12:01:01 +01:00
|
|
|
data = {}
|
|
|
|
|
2019-01-10 19:10:56 +01:00
|
|
|
total_time_base = 0.0
|
|
|
|
total_time_head = 0.0
|
2018-09-02 09:35:38 +02:00
|
|
|
for filename in glob.glob(resultPath + '/*'):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
datestr = None
|
2022-12-31 18:10:40 +01:00
|
|
|
package_url = None
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2021-02-21 08:47:58 +01:00
|
|
|
line = line.strip()
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2021-01-31 21:20:52 +01:00
|
|
|
datestr = line
|
|
|
|
continue
|
2022-12-31 18:10:40 +01:00
|
|
|
elif pkgs is not None and package_url is None and line.startswith('ftp://'):
|
|
|
|
package_url = line
|
2018-09-02 09:35:38 +02:00
|
|
|
if not line.startswith('elapsed-time:'):
|
|
|
|
continue
|
2021-02-21 08:47:58 +01:00
|
|
|
split_line = line.split()
|
2019-01-10 19:10:56 +01:00
|
|
|
time_base = float(split_line[2])
|
|
|
|
time_head = float(split_line[1])
|
2019-07-01 14:55:38 +02:00
|
|
|
if time_base < 0.0 or time_head < 0.0:
|
|
|
|
# ignore results with crashes / errors for the time report
|
|
|
|
break
|
2022-12-31 18:10:40 +01:00
|
|
|
if time_base == 0.0 and time_head == 0.0:
|
|
|
|
# no difference possible
|
|
|
|
break
|
2019-01-10 19:10:56 +01:00
|
|
|
total_time_base += time_base
|
|
|
|
total_time_head += time_head
|
2022-12-31 18:10:40 +01:00
|
|
|
if time_base == time_head:
|
|
|
|
# no difference
|
|
|
|
break
|
|
|
|
if time_base > 0.0 and time_head > 0.0:
|
|
|
|
time_factor = time_head / time_base
|
|
|
|
elif time_base == 0.0:
|
2023-01-01 15:28:03 +01:00
|
|
|
# the smallest possible value is 0.1 so treat that as an increase of 100%
|
|
|
|
# on top of the existing 100% (treating the base 0.0 as such).
|
|
|
|
time_factor = 1.0 + (time_head * 10)
|
2022-12-31 18:10:40 +01:00
|
|
|
else:
|
|
|
|
time_factor = 0.0
|
2019-01-10 19:10:56 +01:00
|
|
|
suspicious_time_difference = False
|
2022-12-31 18:10:40 +01:00
|
|
|
if show_gt and time_factor > factor:
|
2019-01-10 19:10:56 +01:00
|
|
|
suspicious_time_difference = True
|
2022-12-31 18:10:40 +01:00
|
|
|
elif not show_gt and time_factor < factor:
|
2019-01-10 19:10:56 +01:00
|
|
|
suspicious_time_difference = True
|
|
|
|
if suspicious_time_difference:
|
2021-01-31 12:01:01 +01:00
|
|
|
pkg_name = filename[len(resultPath)+1:]
|
2021-01-31 21:20:52 +01:00
|
|
|
data[pkg_name] = (datestr, split_line[2], split_line[1], time_factor)
|
2022-12-31 18:10:40 +01:00
|
|
|
|
|
|
|
if package_url is not None:
|
|
|
|
pkgs += '{}\n'.format(package_url)
|
2018-09-02 09:35:38 +02:00
|
|
|
break
|
|
|
|
|
2021-02-21 08:47:58 +01:00
|
|
|
sorted_data = sorted(data.items(), key=lambda kv: kv[1][3], reverse=show_gt)
|
2021-01-31 12:01:01 +01:00
|
|
|
sorted_dict = collections.OrderedDict(sorted_data)
|
|
|
|
for key in sorted_dict:
|
2021-01-31 21:20:52 +01:00
|
|
|
html += fmt(key, sorted_dict[key][0], sorted_dict[key][1], sorted_dict[key][2], '{:.2f}'.format(sorted_dict[key][3]),
|
2021-01-31 12:01:01 +01:00
|
|
|
column_width=column_width) + '\n'
|
|
|
|
|
2019-11-03 08:27:05 +01:00
|
|
|
html += '\n'
|
2021-01-25 08:10:28 +01:00
|
|
|
html += '(listed above are all suspicious timings with a factor '
|
2022-12-31 18:10:40 +01:00
|
|
|
html += '>' if show_gt else '<'
|
|
|
|
html += ' {}'.format(format(factor, '.2f'))
|
2021-01-25 08:10:28 +01:00
|
|
|
html += ')\n'
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '\n'
|
|
|
|
if total_time_base > 0.0:
|
|
|
|
total_time_factor = total_time_head / total_time_base
|
|
|
|
else:
|
|
|
|
total_time_factor = 0.0
|
2019-02-04 12:39:22 +01:00
|
|
|
html += 'Time for all packages (not just the ones listed above):\n'
|
2020-02-02 16:36:19 +01:00
|
|
|
html += fmt('Total time:',
|
2021-02-11 08:04:26 +01:00
|
|
|
'',
|
2020-02-02 16:36:19 +01:00
|
|
|
'{:.1f}'.format(total_time_base),
|
|
|
|
'{:.1f}'.format(total_time_head),
|
|
|
|
'{:.2f}'.format(total_time_factor), link=False, column_width=column_width)
|
2019-01-11 10:05:19 +01:00
|
|
|
|
|
|
|
html += '\n'
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
|
2022-12-31 18:10:40 +01:00
|
|
|
if pkgs is not None:
|
|
|
|
return pkgs, 'text/plain'
|
|
|
|
return html, 'text/html'
|
2018-09-02 09:35:38 +02:00
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2021-02-21 08:47:58 +01:00
|
|
|
def timeReportSlow(resultPath: str) -> str:
|
|
|
|
title = 'Time report (slowest)'
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>{}</title></head><body>\n'.format(title)
|
2021-02-21 08:47:58 +01:00
|
|
|
html += '<h1>{}</h1>\n'.format(title)
|
|
|
|
html += '<pre>\n'
|
|
|
|
html += '<b>'
|
|
|
|
html += fmt('Package', 'Date Time', OLD_VERSION, 'Head', link=False)
|
|
|
|
html += '</b>\n'
|
|
|
|
|
|
|
|
current_year = datetime.date.today().year
|
|
|
|
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
for filename in glob.glob(resultPath + '/*'):
|
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
datestr = None
|
2021-02-21 08:47:58 +01:00
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2021-02-21 08:47:58 +01:00
|
|
|
datestr = line
|
|
|
|
continue
|
|
|
|
elif line.startswith('count:'):
|
|
|
|
count_head = line.split()[1]
|
|
|
|
if count_head == 'TO!':
|
|
|
|
# ignore results with timeouts
|
|
|
|
break
|
|
|
|
continue
|
|
|
|
if not line.startswith('elapsed-time:'):
|
|
|
|
continue
|
|
|
|
split_line = line.split()
|
|
|
|
time_base = float(split_line[2])
|
|
|
|
time_head = float(split_line[1])
|
|
|
|
if time_base < 0.0 or time_head < 0.0:
|
|
|
|
# ignore results with crashes / errors
|
|
|
|
break
|
|
|
|
pkg_name = filename[len(resultPath)+1:]
|
|
|
|
data[pkg_name] = (datestr, split_line[2], split_line[1], time_head)
|
|
|
|
break
|
|
|
|
|
|
|
|
sorted_data = sorted(data.items(), key=lambda kv: kv[1][3])
|
|
|
|
if len(data) > 100:
|
|
|
|
first_key, _ = sorted_data[0]
|
|
|
|
# remove the entry with the lowest run-time
|
|
|
|
del data[first_key]
|
|
|
|
|
|
|
|
sorted_data = sorted(data.items(), key=lambda kv: kv[1][3], reverse=True)
|
|
|
|
sorted_dict = collections.OrderedDict(sorted_data)
|
|
|
|
for key in sorted_dict:
|
|
|
|
html += fmt(key, sorted_dict[key][0], sorted_dict[key][1], sorted_dict[key][2]) + '\n'
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def check_library_report(result_path: str, message_id: str) -> str:
|
2023-09-20 11:23:04 +02:00
|
|
|
if message_id not in ('checkLibraryNoReturn', 'checkLibraryFunction', 'checkLibraryUseIgnore', 'checkLibraryCheckType', 'valueFlowBailoutIncompleteVar', 'unknownMacro'):
|
2019-01-22 15:27:13 +01:00
|
|
|
error_message = 'Invalid value ' + message_id + ' for message_id parameter.'
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts(error_message)
|
2019-01-22 15:27:13 +01:00
|
|
|
return error_message
|
2019-02-05 10:39:40 +01:00
|
|
|
|
2023-09-20 11:23:04 +02:00
|
|
|
if message_id == 'unknownMacro':
|
|
|
|
metric = 'macros'
|
|
|
|
m_column = 'macro'
|
|
|
|
metric_link = 'unknown_macro'
|
2023-09-22 15:18:16 +02:00
|
|
|
start_marker = HEAD_MARKER
|
2023-09-20 11:23:04 +02:00
|
|
|
elif message_id == 'valueFlowBailoutIncompleteVar':
|
2023-09-08 18:08:44 +02:00
|
|
|
metric = 'variables'
|
|
|
|
m_column = 'Variable'
|
|
|
|
metric_link = 'incomplete_var'
|
2023-09-22 15:18:16 +02:00
|
|
|
start_marker = HEAD_MARKER
|
2023-09-08 18:08:44 +02:00
|
|
|
elif message_id == 'checkLibraryCheckType':
|
2022-09-15 20:21:40 +02:00
|
|
|
metric = 'types'
|
|
|
|
m_column = 'Type'
|
2023-09-08 18:08:44 +02:00
|
|
|
metric_link = 'check_library'
|
2023-09-22 15:18:16 +02:00
|
|
|
start_marker = INFO_MARKER
|
2022-09-15 20:21:40 +02:00
|
|
|
else:
|
|
|
|
metric = 'functions'
|
|
|
|
m_column = 'Function'
|
2023-09-08 18:08:44 +02:00
|
|
|
metric_link = 'check_library'
|
2023-09-22 15:18:16 +02:00
|
|
|
start_marker = INFO_MARKER
|
2022-09-15 20:21:40 +02:00
|
|
|
|
|
|
|
functions_shown_max = 5000
|
2022-12-31 18:10:40 +01:00
|
|
|
html = '<!DOCTYPE html>\n'
|
|
|
|
html += '<html><head><title>' + message_id + ' report</title></head><body>\n'
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '<h1>' + message_id + ' report</h1>\n'
|
2022-09-15 20:21:40 +02:00
|
|
|
html += 'Top ' + str(functions_shown_max) + ' ' + metric + ' are shown.'
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '<pre>\n'
|
|
|
|
column_widths = [10, 100]
|
|
|
|
html += '<b>'
|
2022-09-15 20:21:40 +02:00
|
|
|
html += 'Count'.rjust(column_widths[0]) + ' ' + m_column
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '</b>\n'
|
|
|
|
|
2019-02-12 09:47:57 +01:00
|
|
|
function_counts = {}
|
2019-01-22 15:27:13 +01:00
|
|
|
for filename in glob.glob(result_path + '/*'):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
2023-09-22 12:17:33 +02:00
|
|
|
in_results = False
|
2019-01-22 15:27:13 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2023-09-22 12:17:33 +02:00
|
|
|
if not in_results:
|
2023-09-22 15:18:16 +02:00
|
|
|
if line.startswith(start_marker):
|
|
|
|
in_results = True
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if line.startswith('diff:'):
|
|
|
|
break
|
2019-01-22 15:27:13 +01:00
|
|
|
if line.endswith('[' + message_id + ']\n'):
|
2023-09-20 11:23:04 +02:00
|
|
|
if message_id == 'unknownMacro':
|
|
|
|
marker = 'required. If '
|
|
|
|
function_name = line[(line.find(marker) + len(marker)):line.rfind('is a macro') - 1]
|
|
|
|
elif message_id == 'valueFlowBailoutIncompleteVar':
|
2023-09-08 18:08:44 +02:00
|
|
|
marker = 'incomplete variable '
|
|
|
|
function_name = line[(line.find(marker) + len(marker)):line.rfind('[') - 1]
|
|
|
|
elif message_id == 'checkLibraryFunction':
|
|
|
|
marker = 'for function '
|
|
|
|
function_name = line[(line.find(marker) + len(marker)):line.rfind('[') - 1]
|
2022-09-15 20:21:40 +02:00
|
|
|
elif message_id == 'checkLibraryCheckType':
|
2023-09-08 18:08:44 +02:00
|
|
|
marker = 'configuration for '
|
|
|
|
function_name = line[(line.find(marker) + len(marker)):line.rfind('[') - 1]
|
2019-02-04 15:53:51 +01:00
|
|
|
else:
|
2023-09-08 18:08:44 +02:00
|
|
|
marker = ': Function '
|
|
|
|
function_name = line[(line.find(marker) + len(marker)):line.rfind('should have') - 1]
|
2019-01-22 15:27:13 +01:00
|
|
|
function_counts[function_name] = function_counts.setdefault(function_name, 0) + 1
|
|
|
|
|
2019-02-12 09:47:57 +01:00
|
|
|
function_details_list = []
|
2019-10-26 21:10:21 +02:00
|
|
|
for function_name, count in sorted(list(function_counts.items()), key=operator.itemgetter(1), reverse=True):
|
2019-02-12 08:53:03 +01:00
|
|
|
if len(function_details_list) >= functions_shown_max:
|
2019-01-22 15:27:13 +01:00
|
|
|
break
|
2019-02-12 08:53:03 +01:00
|
|
|
function_details_list.append(str(count).rjust(column_widths[0]) + ' ' +
|
2023-09-08 18:08:44 +02:00
|
|
|
'<a href="' + metric_link + '-' + urllib.parse.quote_plus(function_name) + '">' + function_name + '</a>\n')
|
2019-01-22 15:27:13 +01:00
|
|
|
|
2019-02-12 08:53:03 +01:00
|
|
|
html += ''.join(function_details_list)
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
|
|
|
# Lists all checkLibrary* messages regarding the given function name
|
2023-09-21 15:25:18 +02:00
|
|
|
def check_library_function_name(result_path: str, function_name: str, query_params: dict, nonfunc_id: str='') -> str:
|
|
|
|
pkgs = '' if query_params.get('pkgs') == '1' else None
|
|
|
|
function_name = urllib.parse.unquote_plus(function_name)
|
2023-09-20 11:23:04 +02:00
|
|
|
if nonfunc_id:
|
|
|
|
id = '[' + nonfunc_id
|
2023-09-22 12:17:33 +02:00
|
|
|
marker = HEAD_MARKER
|
2022-09-15 20:21:40 +02:00
|
|
|
else:
|
2023-09-08 18:08:44 +02:00
|
|
|
if function_name.endswith('()'):
|
|
|
|
id = '[checkLibrary'
|
|
|
|
else:
|
|
|
|
id = '[checkLibraryCheckType]'
|
2023-09-22 12:17:33 +02:00
|
|
|
marker = INFO_MARKER
|
2019-02-12 09:47:57 +01:00
|
|
|
output_lines_list = []
|
2019-01-22 15:27:13 +01:00
|
|
|
for filename in glob.glob(result_path + '/*'):
|
2021-02-21 08:47:58 +01:00
|
|
|
if not os.path.isfile(filename) or filename.endswith('.diff'):
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
2023-09-22 12:17:33 +02:00
|
|
|
in_results = False
|
2023-09-21 15:25:18 +02:00
|
|
|
package_url = None
|
2019-01-22 15:27:13 +01:00
|
|
|
cppcheck_options = None
|
|
|
|
for line in open(filename, 'rt'):
|
2023-09-21 15:25:18 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2019-01-22 15:27:13 +01:00
|
|
|
if line.startswith('ftp://'):
|
2023-09-21 15:25:18 +02:00
|
|
|
package_url = line
|
2023-09-08 18:08:44 +02:00
|
|
|
continue
|
|
|
|
if line.startswith('cppcheck-options:'):
|
2019-01-22 15:27:13 +01:00
|
|
|
cppcheck_options = line
|
|
|
|
continue
|
2023-09-22 15:18:16 +02:00
|
|
|
if not in_results:
|
|
|
|
if line.startswith(marker):
|
|
|
|
in_results = True
|
2023-09-22 12:17:33 +02:00
|
|
|
continue
|
|
|
|
if line.startswith('diff:'):
|
|
|
|
break
|
2023-09-21 15:25:18 +02:00
|
|
|
if id not in line:
|
|
|
|
continue
|
|
|
|
if not (' ' + function_name + ' ') in line:
|
|
|
|
continue
|
|
|
|
if pkgs is not None and package_url is not None:
|
|
|
|
pkgs += '{}\n'.format(package_url.strip())
|
|
|
|
break
|
|
|
|
if package_url:
|
|
|
|
output_lines_list.append(package_url)
|
|
|
|
package_url = None
|
|
|
|
if cppcheck_options:
|
|
|
|
output_lines_list.append(cppcheck_options)
|
|
|
|
cppcheck_options = None
|
|
|
|
output_lines_list.append(line)
|
2019-01-22 15:27:13 +01:00
|
|
|
|
2023-09-21 15:25:18 +02:00
|
|
|
if pkgs is not None:
|
|
|
|
return pkgs
|
2019-02-12 08:53:03 +01:00
|
|
|
return ''.join(output_lines_list)
|
2019-01-22 15:27:13 +01:00
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def sendAll(connection: socket.socket, text: str) -> None:
|
|
|
|
data = text.encode('utf-8', 'ignore')
|
2018-08-25 10:25:05 +02:00
|
|
|
while data:
|
2018-08-25 18:38:51 +02:00
|
|
|
num = connection.send(data)
|
|
|
|
if num < len(data):
|
|
|
|
data = data[num:]
|
2018-08-25 10:25:05 +02:00
|
|
|
else:
|
|
|
|
data = None
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def httpGetResponse(connection: socket.socket, data: str, contentType: str) -> None:
|
2018-08-26 11:17:18 +02:00
|
|
|
resp = 'HTTP/1.1 200 OK\r\n'
|
|
|
|
resp += 'Connection: close\r\n'
|
|
|
|
resp += 'Content-length: ' + str(len(data)) + '\r\n'
|
|
|
|
resp += 'Content-type: ' + contentType + '\r\n\r\n'
|
2018-08-25 10:59:49 +02:00
|
|
|
resp += data
|
|
|
|
sendAll(connection, resp)
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
class HttpClientThread(Thread):
|
2019-10-26 21:10:21 +02:00
|
|
|
def __init__(self, connection: socket.socket, cmd: str, resultPath: str, latestResults: list) -> None:
|
2018-08-25 18:38:51 +02:00
|
|
|
Thread.__init__(self)
|
|
|
|
self.connection = connection
|
2022-09-29 21:55:44 +02:00
|
|
|
self.cmd = cmd[:cmd.find('\r\n')]
|
2018-08-27 18:21:16 +02:00
|
|
|
self.resultPath = resultPath
|
2023-04-28 15:33:25 +02:00
|
|
|
self.infoPath = os.path.join(self.resultPath, 'info_output')
|
2018-08-25 18:38:51 +02:00
|
|
|
self.latestResults = latestResults
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2022-09-29 21:55:44 +02:00
|
|
|
# TODO: use a proper parser
|
2023-04-28 15:33:25 +02:00
|
|
|
@staticmethod
|
2022-09-29 21:55:44 +02:00
|
|
|
def parse_req(cmd):
|
|
|
|
req_parts = cmd.split(' ')
|
|
|
|
if len(req_parts) != 3 or req_parts[0] != 'GET' or not req_parts[2].startswith('HTTP'):
|
|
|
|
return None, None
|
|
|
|
url_obj = urlparse(req_parts[1])
|
|
|
|
return url_obj.path, dict(urllib.parse.parse_qsl(url_obj.query))
|
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
def run(self):
|
|
|
|
try:
|
|
|
|
cmd = self.cmd
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts(cmd)
|
2023-04-28 15:33:25 +02:00
|
|
|
url, queryParams = self.parse_req(cmd)
|
2022-09-29 21:55:44 +02:00
|
|
|
if url is None:
|
|
|
|
print_ts('invalid request: {}'.format(cmd))
|
2018-08-27 18:21:16 +02:00
|
|
|
self.connection.close()
|
|
|
|
return
|
2022-09-29 21:55:44 +02:00
|
|
|
if url == '/':
|
2018-09-02 07:28:25 +02:00
|
|
|
html = overviewReport()
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/latest.html':
|
2018-08-25 18:38:51 +02:00
|
|
|
html = latestReport(self.latestResults)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/crash.html':
|
|
|
|
text, mime = crashReport(self.resultPath, queryParams)
|
|
|
|
httpGetResponse(self.connection, text, mime)
|
|
|
|
elif url == '/timeout.html':
|
2020-01-24 12:33:51 +01:00
|
|
|
html = timeoutReport(self.resultPath)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/stale.html':
|
2019-09-10 07:58:37 +02:00
|
|
|
html = staleReport(self.resultPath)
|
2019-03-29 12:12:16 +01:00
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/diff.html':
|
2018-09-06 17:31:07 +02:00
|
|
|
html = diffReport(self.resultPath)
|
2018-09-06 06:53:40 +02:00
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url.startswith('/difftoday-'):
|
|
|
|
messageId = url[len('/difftoday-'):]
|
2018-09-06 17:31:07 +02:00
|
|
|
text = diffMessageIdTodayReport(self.resultPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url.startswith('/diff-'):
|
|
|
|
messageId = url[len('/diff-'):]
|
2018-08-27 18:44:17 +02:00
|
|
|
text = diffMessageIdReport(self.resultPath, messageId)
|
2018-08-27 18:21:16 +02:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/head.html':
|
2019-01-09 10:47:58 +01:00
|
|
|
html = headReport(self.resultPath)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2023-04-28 15:33:25 +02:00
|
|
|
elif url == '/headinfo.html':
|
|
|
|
html = infoReport(self.infoPath)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url.startswith('/headtoday-'):
|
|
|
|
messageId = url[len('/headtoday-'):]
|
2019-01-09 10:47:58 +01:00
|
|
|
text = headMessageIdTodayReport(self.resultPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2023-04-28 15:33:25 +02:00
|
|
|
elif url.startswith('/headinfotoday-'):
|
|
|
|
messageId = url[len('/headinfotoday-'):]
|
|
|
|
text = infoMessageIdTodayReport(self.infoPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url.startswith('/head-'):
|
|
|
|
messageId = url[len('/head-'):]
|
|
|
|
text = headMessageIdReport(self.resultPath, messageId, queryParams)
|
2019-01-09 10:47:58 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2023-04-28 15:33:25 +02:00
|
|
|
elif url.startswith('/headinfo-'):
|
|
|
|
messageId = url[len('/headinfo-'):]
|
|
|
|
text = infoMessageIdReport(self.infoPath, messageId, queryParams)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/time_lt.html':
|
2022-12-31 18:10:40 +01:00
|
|
|
text, mime = timeReport(self.resultPath, False, queryParams)
|
|
|
|
httpGetResponse(self.connection, text, mime)
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/time_gt.html':
|
2022-12-31 18:10:40 +01:00
|
|
|
text, mime = timeReport(self.resultPath, True, queryParams)
|
|
|
|
httpGetResponse(self.connection, text, mime)
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/time_slow.html':
|
2021-02-21 08:47:58 +01:00
|
|
|
text = timeReportSlow(self.resultPath)
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/check_library_function_report.html':
|
2023-04-28 15:33:25 +02:00
|
|
|
text = check_library_report(self.infoPath, message_id='checkLibraryFunction')
|
2019-01-22 15:27:13 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/check_library_noreturn_report.html':
|
2023-04-28 15:33:25 +02:00
|
|
|
text = check_library_report(self.infoPath, message_id='checkLibraryNoReturn')
|
2019-01-22 15:27:13 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/check_library_use_ignore_report.html':
|
2023-04-28 15:33:25 +02:00
|
|
|
text = check_library_report(self.infoPath, message_id='checkLibraryUseIgnore')
|
2019-02-04 15:53:51 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url == '/check_library_check_type_report.html':
|
2023-04-28 15:33:25 +02:00
|
|
|
text = check_library_report(self.infoPath, message_id='checkLibraryCheckType')
|
2022-09-15 20:21:40 +02:00
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2022-09-29 21:55:44 +02:00
|
|
|
elif url.startswith('/check_library-'):
|
|
|
|
function_name = url[len('/check_library-'):]
|
2023-09-21 15:25:18 +02:00
|
|
|
text = check_library_function_name(self.infoPath, function_name, queryParams)
|
2019-01-22 15:27:13 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2023-09-08 18:08:44 +02:00
|
|
|
elif url == '/value_flow_bailout_incomplete_var.html':
|
|
|
|
text = check_library_report(self.resultPath, message_id='valueFlowBailoutIncompleteVar')
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2023-09-20 11:23:04 +02:00
|
|
|
elif url == '/unknown_macro.html':
|
|
|
|
text = check_library_report(self.resultPath, message_id='unknownMacro')
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2023-09-08 18:08:44 +02:00
|
|
|
elif url.startswith('/incomplete_var-'):
|
|
|
|
var_name = url[len('/incomplete_var-'):]
|
2023-09-21 15:25:18 +02:00
|
|
|
text = check_library_function_name(self.resultPath, var_name, queryParams, nonfunc_id='valueFlowBailoutIncompleteVar')
|
2023-09-20 11:23:04 +02:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
|
|
|
elif url.startswith('/unknown_macro-'):
|
|
|
|
var_name = url[len('/unknown_macro-'):]
|
2023-09-21 15:25:18 +02:00
|
|
|
text = check_library_function_name(self.resultPath, var_name, queryParams, nonfunc_id='unknownMacro')
|
2023-09-08 18:08:44 +02:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2018-08-25 18:38:51 +02:00
|
|
|
else:
|
2022-09-29 21:55:44 +02:00
|
|
|
filename = resultPath + url
|
2018-08-25 18:38:51 +02:00
|
|
|
if not os.path.isfile(filename):
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('HTTP/1.1 404 Not Found')
|
2019-10-26 21:10:21 +02:00
|
|
|
self.connection.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
|
2018-08-25 18:38:51 +02:00
|
|
|
else:
|
2022-09-22 21:20:38 +02:00
|
|
|
with open(filename, 'rt') as f:
|
|
|
|
data = f.read()
|
2018-08-25 18:38:51 +02:00
|
|
|
httpGetResponse(self.connection, data, 'text/plain')
|
2021-02-14 16:16:28 +01:00
|
|
|
except:
|
|
|
|
tb = "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts(tb)
|
2021-02-14 16:16:28 +01:00
|
|
|
httpGetResponse(self.connection, tb, 'text/plain')
|
2018-08-26 11:17:18 +02:00
|
|
|
finally:
|
2018-08-25 20:58:31 +02:00
|
|
|
time.sleep(1)
|
2018-08-25 18:38:51 +02:00
|
|
|
self.connection.close()
|
|
|
|
|
2018-11-17 19:32:10 +01:00
|
|
|
|
2022-11-20 13:10:43 +01:00
|
|
|
def read_data(connection, cmd, pos_nl, max_data_size, check_done, cmd_name, timeout=10):
|
|
|
|
data = cmd[pos_nl+1:]
|
2023-04-28 15:33:25 +02:00
|
|
|
t = 0.0
|
2022-11-20 13:10:43 +01:00
|
|
|
try:
|
|
|
|
while (len(data) < max_data_size) and (not check_done or not data.endswith('\nDONE')) and (timeout > 0 and t < timeout):
|
|
|
|
bytes_received = connection.recv(1024)
|
|
|
|
if bytes_received:
|
|
|
|
try:
|
|
|
|
text_received = bytes_received.decode('utf-8', 'ignore')
|
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
print_ts('Error: Decoding failed ({}): {}'.format(cmd_name, e))
|
|
|
|
data = None
|
|
|
|
break
|
|
|
|
t = 0.0
|
|
|
|
data += text_received
|
|
|
|
elif not check_done:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
time.sleep(0.2)
|
|
|
|
t += 0.2
|
|
|
|
connection.close()
|
|
|
|
except socket.error as e:
|
2023-01-10 15:15:27 +01:00
|
|
|
print_ts('Socket error occurred ({}): {}'.format(cmd_name, e))
|
2022-11-20 13:10:43 +01:00
|
|
|
data = None
|
|
|
|
|
2023-04-28 15:33:25 +02:00
|
|
|
if timeout > 0 and t >= timeout:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Timeout occurred ({}).'.format(cmd_name))
|
|
|
|
data = None
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def server(server_address_port: int, packages: list, packageIndex: int, resultPath: str) -> None:
|
2018-09-02 07:28:25 +02:00
|
|
|
socket.setdefaulttimeout(30)
|
2018-08-25 18:38:51 +02:00
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
2018-08-27 18:21:16 +02:00
|
|
|
server_address = ('', server_address_port)
|
2018-08-25 18:38:51 +02:00
|
|
|
sock.bind(server_address)
|
|
|
|
|
|
|
|
sock.listen(1)
|
|
|
|
|
|
|
|
latestResults = []
|
2018-10-15 11:01:51 +02:00
|
|
|
if os.path.isfile('latest.txt'):
|
|
|
|
with open('latest.txt', 'rt') as f:
|
|
|
|
latestResults = f.read().strip().split(' ')
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('version ' + SERVER_VERSION)
|
|
|
|
print_ts('listening on port ' + str(server_address_port))
|
2019-03-29 12:12:16 +01:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
while True:
|
|
|
|
# wait for a connection
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('waiting for a connection')
|
2018-08-25 18:38:51 +02:00
|
|
|
connection, client_address = sock.accept()
|
2018-08-25 20:00:04 +02:00
|
|
|
try:
|
2019-10-26 21:10:21 +02:00
|
|
|
bytes_received = connection.recv(128)
|
|
|
|
cmd = bytes_received.decode('utf-8', 'ignore')
|
2018-08-25 20:43:20 +02:00
|
|
|
except socket.error:
|
2018-08-27 18:21:16 +02:00
|
|
|
connection.close()
|
2018-08-25 20:00:04 +02:00
|
|
|
continue
|
2019-10-26 21:10:21 +02:00
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
connection.close()
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('Error: Decoding failed: ' + str(e))
|
2019-10-26 21:10:21 +02:00
|
|
|
continue
|
2022-11-20 13:10:43 +01:00
|
|
|
pos_nl = cmd.find('\n')
|
|
|
|
if pos_nl < 1:
|
|
|
|
print_ts('No newline found in data.')
|
2018-08-25 18:38:51 +02:00
|
|
|
continue
|
2022-11-20 13:10:43 +01:00
|
|
|
firstLine = cmd[:pos_nl]
|
2019-01-09 18:39:42 +01:00
|
|
|
if re.match('[a-zA-Z0-9./ ]+', firstLine) is None:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Unsupported characters found in command: {}'.format(firstLine))
|
2018-08-25 18:38:51 +02:00
|
|
|
connection.close()
|
2019-01-09 18:39:42 +01:00
|
|
|
continue
|
2018-08-25 18:38:51 +02:00
|
|
|
if cmd.startswith('GET /'):
|
2018-08-27 18:21:16 +02:00
|
|
|
newThread = HttpClientThread(connection, cmd, resultPath, latestResults)
|
2018-08-25 18:38:51 +02:00
|
|
|
newThread.start()
|
2019-01-09 18:39:42 +01:00
|
|
|
elif cmd == 'GetCppcheckVersions\n':
|
2018-12-08 11:39:44 +01:00
|
|
|
reply = 'head ' + OLD_VERSION
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('GetCppcheckVersions: ' + reply)
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(reply.encode('utf-8', 'ignore'))
|
2018-11-29 21:19:45 +01:00
|
|
|
connection.close()
|
2019-01-09 18:39:42 +01:00
|
|
|
elif cmd == 'get\n':
|
2022-11-23 19:13:54 +01:00
|
|
|
while True:
|
|
|
|
pkg = packages[packageIndex]
|
|
|
|
packageIndex += 1
|
|
|
|
if packageIndex >= len(packages):
|
|
|
|
packageIndex = 0
|
|
|
|
if pkg is not None:
|
|
|
|
break
|
2018-12-02 07:10:23 +01:00
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
with open('package-index.txt', 'wt') as f:
|
|
|
|
f.write(str(packageIndex) + '\n')
|
2018-12-02 07:10:23 +01:00
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('get:' + pkg)
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(pkg.encode('utf-8', 'ignore'))
|
2018-08-25 18:38:51 +02:00
|
|
|
connection.close()
|
2019-11-09 19:29:47 +01:00
|
|
|
elif cmd.startswith('write\nftp://') or cmd.startswith('write\nhttp://'):
|
2022-11-20 13:10:43 +01:00
|
|
|
data = read_data(connection, cmd, pos_nl, max_data_size=2 * 1024 * 1024, check_done=True, cmd_name='write')
|
|
|
|
if data is None:
|
|
|
|
continue
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
pos = data.find('\n')
|
2022-11-20 13:10:43 +01:00
|
|
|
if pos == -1:
|
|
|
|
print_ts('No newline found in data. Ignoring result data.')
|
|
|
|
continue
|
2018-08-25 18:38:51 +02:00
|
|
|
if pos < 10:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Data is less than 10 characters. Ignoring result data.')
|
2018-08-25 18:38:51 +02:00
|
|
|
continue
|
|
|
|
url = data[:pos]
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('write:' + url)
|
2018-08-25 18:38:51 +02:00
|
|
|
|
|
|
|
# save data
|
2020-01-24 13:31:55 +01:00
|
|
|
res = re.match(r'ftp://.*pool/main/[^/]+/([^/]+)/[^/]*tar.(gz|bz2|xz)', url)
|
2019-11-09 19:29:47 +01:00
|
|
|
if res is None:
|
2021-08-28 12:46:54 +02:00
|
|
|
res = re.match(r'https?://cppcheck\.sf\.net/([a-z]+).tgz', url)
|
2018-11-19 10:57:04 +01:00
|
|
|
if res is None:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('res is None. Ignoring result data.')
|
2018-11-19 10:57:04 +01:00
|
|
|
continue
|
|
|
|
if url not in packages:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Url is not in packages. Ignoring result data.')
|
2019-10-19 10:52:04 +02:00
|
|
|
continue
|
2019-03-29 12:52:27 +01:00
|
|
|
# Verify that head was compared to correct OLD_VERSION
|
|
|
|
versions_found = False
|
|
|
|
old_version_wrong = False
|
|
|
|
for line in data.split('\n', 20):
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
versions_found = True
|
|
|
|
if OLD_VERSION not in line.split():
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('Compared to wrong old version. Should be ' + OLD_VERSION + '. Versions compared: ' +
|
2022-11-20 13:10:43 +01:00
|
|
|
line + '. Ignoring result data.')
|
2019-03-29 12:52:27 +01:00
|
|
|
old_version_wrong = True
|
2019-07-17 14:29:07 +02:00
|
|
|
break
|
2019-03-29 12:52:27 +01:00
|
|
|
if not versions_found:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Cppcheck versions missing in result data. Ignoring result data.')
|
2019-03-29 12:52:27 +01:00
|
|
|
continue
|
|
|
|
if old_version_wrong:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Unexpected old version. Ignoring result data.')
|
2019-03-29 12:52:27 +01:00
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('results added for package ' + res.group(1))
|
2019-06-30 14:02:51 +02:00
|
|
|
filename = os.path.join(resultPath, res.group(1))
|
2018-11-19 10:57:04 +01:00
|
|
|
with open(filename, 'wt') as f:
|
|
|
|
f.write(strDateTime() + '\n' + data)
|
|
|
|
# track latest added results..
|
2019-03-29 12:52:27 +01:00
|
|
|
if len(latestResults) >= 20:
|
|
|
|
latestResults = latestResults[1:]
|
|
|
|
latestResults.append(filename)
|
|
|
|
with open('latest.txt', 'wt') as f:
|
|
|
|
f.write(' '.join(latestResults))
|
2019-06-30 14:02:51 +02:00
|
|
|
# generate package.diff..
|
|
|
|
generate_package_diff_statistics(filename)
|
2019-11-09 19:29:47 +01:00
|
|
|
elif cmd.startswith('write_info\nftp://') or cmd.startswith('write_info\nhttp://'):
|
2022-11-20 13:10:43 +01:00
|
|
|
data = read_data(connection, cmd, pos_nl, max_data_size=1024 * 1024, check_done=True, cmd_name='write_info')
|
|
|
|
if data is None:
|
|
|
|
continue
|
2019-01-22 15:27:13 +01:00
|
|
|
|
|
|
|
pos = data.find('\n')
|
2022-11-20 13:10:43 +01:00
|
|
|
if pos == -1:
|
|
|
|
print_ts('No newline found in data. Ignoring information data.')
|
|
|
|
continue
|
2019-01-22 15:27:13 +01:00
|
|
|
if pos < 10:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Data is less than 10 characters. Ignoring information data.')
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
|
|
|
url = data[:pos]
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('write_info:' + url)
|
2019-01-22 15:27:13 +01:00
|
|
|
|
|
|
|
# save data
|
2020-01-24 13:31:55 +01:00
|
|
|
res = re.match(r'ftp://.*pool/main/[^/]+/([^/]+)/[^/]*tar.(gz|bz2|xz)', url)
|
2019-11-09 19:29:47 +01:00
|
|
|
if res is None:
|
2021-08-28 12:46:54 +02:00
|
|
|
res = re.match(r'https://cppcheck\.sf\.net/([a-z]+).tgz', url)
|
2019-01-22 15:27:13 +01:00
|
|
|
if res is None:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('res is None. Ignoring information data.')
|
2019-01-22 15:27:13 +01:00
|
|
|
continue
|
|
|
|
if url not in packages:
|
2022-11-20 13:10:43 +01:00
|
|
|
print_ts('Url is not in packages. Ignoring information data.')
|
2019-10-19 10:52:04 +02:00
|
|
|
continue
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('adding info output for package ' + res.group(1))
|
2019-01-22 15:27:13 +01:00
|
|
|
info_path = resultPath + '/' + 'info_output'
|
|
|
|
if not os.path.exists(info_path):
|
|
|
|
os.mkdir(info_path)
|
|
|
|
filename = info_path + '/' + res.group(1)
|
|
|
|
with open(filename, 'wt') as f:
|
|
|
|
f.write(strDateTime() + '\n' + data)
|
2019-10-19 10:52:04 +02:00
|
|
|
elif cmd == 'getPackagesCount\n':
|
|
|
|
packages_count = str(len(packages))
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(packages_count.encode('utf-8', 'ignore'))
|
2019-10-19 10:52:04 +02:00
|
|
|
connection.close()
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('getPackagesCount: ' + packages_count)
|
2019-10-19 10:52:04 +02:00
|
|
|
continue
|
|
|
|
elif cmd.startswith('getPackageIdx'):
|
|
|
|
request_idx = abs(int(cmd[len('getPackageIdx:'):]))
|
|
|
|
if request_idx < len(packages):
|
|
|
|
pkg = packages[request_idx]
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(pkg.encode('utf-8', 'ignore'))
|
2019-10-19 10:52:04 +02:00
|
|
|
connection.close()
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('getPackageIdx: ' + pkg)
|
2019-10-19 10:52:04 +02:00
|
|
|
else:
|
|
|
|
connection.close()
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('getPackageIdx: index is out of range')
|
2019-10-19 10:52:04 +02:00
|
|
|
continue
|
2022-11-23 19:13:54 +01:00
|
|
|
elif cmd.startswith('write_nodata\nftp://'):
|
|
|
|
data = read_data(connection, cmd, pos_nl, max_data_size=8 * 1024, check_done=False, cmd_name='write_nodata')
|
|
|
|
if data is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
pos = data.find('\n')
|
|
|
|
if pos == -1:
|
|
|
|
print_ts('No newline found in data. Ignoring no-data data.')
|
|
|
|
continue
|
|
|
|
if pos < 10:
|
|
|
|
print_ts('Data is less than 10 characters ({}). Ignoring no-data data.'.format(pos))
|
|
|
|
continue
|
|
|
|
url = data[:pos]
|
|
|
|
|
|
|
|
startIdx = packageIndex
|
|
|
|
currentIdx = packageIndex
|
|
|
|
while True:
|
|
|
|
if packages[currentIdx] == url:
|
|
|
|
packages[currentIdx] = None
|
|
|
|
print_ts('write_nodata:' + url)
|
|
|
|
|
|
|
|
with open('packages_nodata.txt', 'at') as f:
|
|
|
|
f.write(url + '\n')
|
|
|
|
break
|
|
|
|
if currentIdx == 0:
|
|
|
|
currentIdx = len(packages) - 1
|
|
|
|
else:
|
|
|
|
currentIdx -= 1
|
|
|
|
if currentIdx == startIdx:
|
|
|
|
print_ts('write_nodata:' + url + ' - package not found')
|
|
|
|
break
|
|
|
|
|
|
|
|
connection.close()
|
2018-08-23 22:13:53 +02:00
|
|
|
else:
|
2022-11-20 13:10:43 +01:00
|
|
|
if pos_nl < 0:
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('invalid command: "' + firstLine + '"')
|
2019-11-09 19:29:47 +01:00
|
|
|
else:
|
|
|
|
lines = cmd.split('\n')
|
|
|
|
s = '\\n'.join(lines[:2])
|
|
|
|
if len(lines) > 2:
|
|
|
|
s += '...'
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('invalid command: "' + s + '"')
|
2018-08-25 18:38:51 +02:00
|
|
|
connection.close()
|
2018-08-27 18:21:16 +02:00
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2018-08-27 18:21:16 +02:00
|
|
|
if __name__ == "__main__":
|
2019-10-18 20:04:50 +02:00
|
|
|
workPath = '/var/daca@home'
|
|
|
|
if not os.path.isdir(workPath):
|
|
|
|
workPath = os.path.expanduser('~/daca@home')
|
2018-08-27 18:21:16 +02:00
|
|
|
os.chdir(workPath)
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('work path: ' + workPath)
|
2018-09-06 17:31:07 +02:00
|
|
|
resultPath = workPath + '/donated-results'
|
2022-09-22 21:20:38 +02:00
|
|
|
if not os.path.isdir(resultPath):
|
|
|
|
print_ts("fatal: result path '{}' is missing".format(resultPath))
|
|
|
|
sys.exit(1)
|
2018-08-27 18:21:16 +02:00
|
|
|
|
2022-09-22 21:20:38 +02:00
|
|
|
with open('packages.txt', 'rt') as f:
|
|
|
|
packages = [val.strip() for val in f.readlines()]
|
2018-08-27 18:21:16 +02:00
|
|
|
|
2022-11-23 19:13:54 +01:00
|
|
|
print_ts('packages: {}'.format(len(packages)))
|
|
|
|
|
|
|
|
if os.path.isfile('packages_nodata.txt'):
|
|
|
|
with open('packages_nodata.txt', 'rt') as f:
|
|
|
|
packages_nodata = [val.strip() for val in f.readlines()]
|
|
|
|
packages_nodata.sort()
|
|
|
|
|
|
|
|
print_ts('packages_nodata: {}'.format(len(packages_nodata)))
|
|
|
|
|
|
|
|
print_ts('removing packages with no files to process'.format(len(packages_nodata)))
|
|
|
|
packages_nodata_clean = []
|
|
|
|
for pkg_n in packages_nodata:
|
|
|
|
if pkg_n in packages:
|
|
|
|
packages.remove(pkg_n)
|
|
|
|
packages_nodata_clean.append(pkg_n)
|
|
|
|
|
|
|
|
packages_nodata_diff = len(packages_nodata) - len(packages_nodata_clean)
|
|
|
|
if packages_nodata_diff:
|
|
|
|
with open('packages_nodata.txt', 'wt') as f:
|
|
|
|
for pkg in packages_nodata_clean:
|
|
|
|
f.write(pkg + '\n')
|
|
|
|
|
|
|
|
print_ts('removed {} packages from packages_nodata.txt'.format(packages_nodata_diff))
|
|
|
|
|
|
|
|
print_ts('packages: {}'.format(len(packages)))
|
2018-08-27 18:21:16 +02:00
|
|
|
|
|
|
|
if len(packages) == 0:
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('fatal: there are no packages')
|
2018-08-27 18:21:16 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
packageIndex = 0
|
|
|
|
if os.path.isfile('package-index.txt'):
|
2022-09-22 21:20:38 +02:00
|
|
|
with open('package-index.txt', 'rt') as f:
|
|
|
|
packageIndex = int(f.read())
|
2018-08-27 18:21:16 +02:00
|
|
|
if packageIndex < 0 or packageIndex >= len(packages):
|
|
|
|
packageIndex = 0
|
|
|
|
|
|
|
|
server_address_port = 8000
|
|
|
|
if '--test' in sys.argv[1:]:
|
|
|
|
server_address_port = 8001
|
|
|
|
|
2018-09-02 07:28:25 +02:00
|
|
|
try:
|
|
|
|
server(server_address_port, packages, packageIndex, resultPath)
|
|
|
|
except socket.timeout:
|
2022-09-22 21:20:38 +02:00
|
|
|
print_ts('Timeout!')
|