2019-10-26 21:10:21 +02:00
|
|
|
#!/usr/bin/env python3
|
2018-08-23 21:31:02 +02:00
|
|
|
|
|
|
|
# Server for 'donate-cpu.py'
|
2019-10-26 21:10:21 +02:00
|
|
|
# Runs only under Python 3.
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 08:49:40 +02:00
|
|
|
import glob
|
2019-06-30 14:02:51 +02:00
|
|
|
import json
|
2018-08-23 21:31:02 +02:00
|
|
|
import os
|
|
|
|
import socket
|
|
|
|
import re
|
2018-08-24 13:04:25 +02:00
|
|
|
import datetime
|
2018-08-25 08:49:40 +02:00
|
|
|
import time
|
2018-08-25 18:38:51 +02:00
|
|
|
from threading import Thread
|
2018-08-27 18:21:16 +02:00
|
|
|
import sys
|
2019-10-26 21:10:21 +02:00
|
|
|
import urllib.request
|
|
|
|
import urllib.parse
|
|
|
|
import urllib.error
|
2019-02-09 12:41:02 +01:00
|
|
|
import logging
|
|
|
|
import logging.handlers
|
2019-02-28 13:34:23 +01:00
|
|
|
import operator
|
2018-08-24 13:04:25 +02:00
|
|
|
|
2019-03-29 12:12:16 +01:00
|
|
|
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
|
|
|
|
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
|
|
|
|
# changes)
|
2019-10-26 21:10:21 +02:00
|
|
|
SERVER_VERSION = "1.3.0"
|
2019-03-29 12:12:16 +01:00
|
|
|
|
2019-09-01 15:34:06 +02:00
|
|
|
OLD_VERSION = '1.89'
|
2018-12-08 11:39:44 +01:00
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-02-09 12:41:02 +01:00
|
|
|
# Set up logging
|
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
# Logging to console
|
|
|
|
handler_stream = logging.StreamHandler()
|
|
|
|
logger.addHandler(handler_stream)
|
|
|
|
# Log errors to a rotating file
|
|
|
|
logfile = sys.path[0]
|
|
|
|
if logfile:
|
|
|
|
logfile += '/'
|
|
|
|
logfile += 'donate-cpu-server.log'
|
|
|
|
handler_file = logging.handlers.RotatingFileHandler(filename=logfile, maxBytes=100*1024, backupCount=1)
|
|
|
|
handler_file.setLevel(logging.ERROR)
|
|
|
|
logger.addHandler(handler_file)
|
|
|
|
|
|
|
|
|
|
|
|
# Set up an exception hook for all uncaught exceptions so they can be logged
|
|
|
|
def handle_uncaught_exception(exc_type, exc_value, exc_traceback):
|
|
|
|
if issubclass(exc_type, KeyboardInterrupt):
|
|
|
|
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
|
|
return
|
|
|
|
|
|
|
|
logging.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
|
|
|
|
|
|
|
|
|
|
|
sys.excepthook = handle_uncaught_exception
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def strDateTime() -> str:
|
2019-03-29 12:12:16 +01:00
|
|
|
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M')
|
|
|
|
|
2018-08-24 13:04:25 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def dateTimeFromStr(datestr: str) -> datetime.datetime:
|
2019-03-29 12:12:16 +01:00
|
|
|
return datetime.datetime.strptime(datestr, '%Y-%m-%d %H:%M')
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-10-18 12:57:51 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def overviewReport() -> str:
|
2018-09-02 07:28:25 +02:00
|
|
|
html = '<html><head><title>daca@home</title></head><body>\n'
|
|
|
|
html += '<h1>daca@home</h1>\n'
|
2019-01-24 12:02:45 +01:00
|
|
|
html += '<a href="crash.html">Crash report</a><br>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '<a href="stale.html">Stale report</a><br>\n'
|
2019-01-24 12:02:45 +01:00
|
|
|
html += '<a href="diff.html">Diff report</a><br>\n'
|
|
|
|
html += '<a href="head.html">HEAD report</a><br>\n'
|
2018-09-02 09:35:38 +02:00
|
|
|
html += '<a href="latest.html">Latest results</a><br>\n'
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '<a href="time.html">Time report</a><br>\n'
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '<a href="check_library_function_report.html">checkLibraryFunction report</a><br>\n'
|
|
|
|
html += '<a href="check_library_noreturn_report.html">checkLibraryNoReturn report</a><br>\n'
|
2019-02-04 15:53:51 +01:00
|
|
|
html += '<a href="check_library_use_ignore_report.html">checkLibraryUseIgnore report</a><br>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '<br>\n'
|
|
|
|
html += 'version ' + SERVER_VERSION + '\n'
|
2018-09-02 07:28:25 +02:00
|
|
|
html += '</body></html>'
|
|
|
|
return html
|
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def fmt(a: str, b: str, c: str = None, d: str = None, e: str = None, link: bool = True) -> str:
|
2019-03-29 12:12:16 +01:00
|
|
|
column_width = [40, 10, 5, 6, 6, 8]
|
2019-01-15 11:39:47 +01:00
|
|
|
ret = a
|
|
|
|
while len(ret) < column_width[0]:
|
2018-08-25 08:49:40 +02:00
|
|
|
ret += ' '
|
2019-01-15 11:39:47 +01:00
|
|
|
if len(ret) == column_width[0]:
|
|
|
|
ret += ' ' + b[:10]
|
|
|
|
while len(ret) < (column_width[0] + 1 + column_width[1]):
|
2018-08-25 08:49:40 +02:00
|
|
|
ret += ' '
|
2019-01-15 11:39:47 +01:00
|
|
|
ret += ' '
|
2019-10-18 09:56:15 +02:00
|
|
|
if len(b) > 10:
|
|
|
|
ret += b[-5:].rjust(column_width[2]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if c is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += c.rjust(column_width[3]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if d is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += d.rjust(column_width[4]) + ' '
|
2019-10-18 12:57:51 +02:00
|
|
|
if e is not None:
|
2019-03-29 12:12:16 +01:00
|
|
|
ret += e.rjust(column_width[5])
|
2019-10-18 09:56:15 +02:00
|
|
|
if link:
|
2018-08-25 10:59:49 +02:00
|
|
|
pos = ret.find(' ')
|
|
|
|
ret = '<a href="' + a + '">' + a + '</a>' + ret[pos:]
|
2018-08-25 08:49:40 +02:00
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def latestReport(latestResults: list) -> str:
|
2018-08-26 16:47:20 +02:00
|
|
|
html = '<html><head><title>Latest daca@home results</title></head><body>\n'
|
2019-01-15 11:39:47 +01:00
|
|
|
html += '<h1>Latest daca@home results</h1>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<pre>\n<b>' + fmt('Package', 'Date Time', OLD_VERSION, 'Head', 'Diff', link=False) + '</b>\n'
|
2018-08-25 08:49:40 +02:00
|
|
|
|
|
|
|
# Write report for latest results
|
2018-08-25 09:06:15 +02:00
|
|
|
for filename in latestResults:
|
2018-10-15 11:01:51 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2018-08-25 08:49:40 +02:00
|
|
|
package = filename[filename.rfind('/')+1:]
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2018-08-25 08:49:40 +02:00
|
|
|
|
|
|
|
datestr = ''
|
2019-01-09 18:39:42 +01:00
|
|
|
count = ['0', '0']
|
2018-08-26 16:47:20 +02:00
|
|
|
lost = 0
|
|
|
|
added = 0
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2018-08-25 08:49:40 +02:00
|
|
|
line = line.strip()
|
2019-01-04 11:51:32 +01:00
|
|
|
if line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
2018-08-25 08:49:40 +02:00
|
|
|
datestr = line
|
2018-08-26 16:47:20 +02:00
|
|
|
#elif line.startswith('cppcheck:'):
|
|
|
|
# cppcheck = line[9:]
|
|
|
|
elif line.startswith('count: '):
|
|
|
|
count = line.split(' ')[1:]
|
2018-12-10 20:05:34 +01:00
|
|
|
elif line.startswith('head ') and not line.startswith('head results:'):
|
2018-08-26 16:47:20 +02:00
|
|
|
added += 1
|
2018-12-08 11:39:44 +01:00
|
|
|
elif line.startswith(OLD_VERSION + ' '):
|
2018-08-26 16:57:09 +02:00
|
|
|
lost += 1
|
2018-08-26 16:47:20 +02:00
|
|
|
diff = ''
|
|
|
|
if lost > 0:
|
|
|
|
diff += '-' + str(lost)
|
|
|
|
if added > 0:
|
2018-08-26 16:57:09 +02:00
|
|
|
diff += '+' + str(added)
|
|
|
|
html += fmt(package, datestr, count[1], count[0], diff) + '\n'
|
2018-08-25 08:49:40 +02:00
|
|
|
|
|
|
|
html += '</pre></body></html>\n'
|
|
|
|
return html
|
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def crashReport(results_path: str) -> str:
|
2018-08-31 14:28:01 +02:00
|
|
|
html = '<html><head><title>Crash report</title></head><body>\n'
|
|
|
|
html += '<h1>Crash report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<b>' + fmt('Package', 'Date Time', OLD_VERSION, 'Head', link=False) + '</b>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2019-10-18 09:56:15 +02:00
|
|
|
stack_traces = {}
|
2019-09-10 07:58:37 +02:00
|
|
|
for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
|
2018-08-31 14:28:01 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-03-29 12:12:16 +01:00
|
|
|
datestr = ''
|
2019-10-18 12:57:51 +02:00
|
|
|
with open(filename, 'rt') as file_:
|
|
|
|
for line in file_:
|
2019-10-18 09:56:15 +02:00
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
|
|
|
if line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
|
|
|
datestr = line
|
|
|
|
if line.startswith('count:'):
|
|
|
|
if line.find('Crash') < 0:
|
|
|
|
break
|
|
|
|
package = filename[filename.rfind('/')+1:]
|
|
|
|
counts = line.strip().split(' ')
|
|
|
|
c2 = ''
|
|
|
|
if counts[2] == 'Crash!':
|
|
|
|
c2 = 'Crash'
|
|
|
|
c1 = ''
|
|
|
|
if counts[1] == 'Crash!':
|
|
|
|
c1 = 'Crash'
|
|
|
|
html += fmt(package, datestr, c2, c1) + '\n'
|
|
|
|
if c1 != 'Crash':
|
|
|
|
break
|
|
|
|
if line.find(' received signal ') != -1:
|
2019-10-18 12:57:51 +02:00
|
|
|
crash_line = next(file_, '').strip()
|
2019-10-18 09:56:15 +02:00
|
|
|
location_index = crash_line.rindex(' at ')
|
|
|
|
if location_index > 0:
|
2019-10-18 12:57:51 +02:00
|
|
|
code_line = next(file_, '').strip()
|
2019-10-18 09:56:15 +02:00
|
|
|
stack_trace = []
|
|
|
|
while True:
|
2019-10-18 12:57:51 +02:00
|
|
|
l = next(file_, '')
|
2019-10-18 09:56:15 +02:00
|
|
|
m = re.search(r'(?P<number>#\d+) .* (?P<function>.+)\(.*\) at (?P<location>.*)$', l)
|
|
|
|
if not m:
|
|
|
|
break
|
|
|
|
stack_trace.append(m.group('number') + ' ' + m.group('function') + '(...) at ' + m.group('location'))
|
|
|
|
key = hash(' '.join(stack_trace))
|
|
|
|
|
|
|
|
if key in stack_traces:
|
|
|
|
stack_traces[key]['code_line'] = code_line
|
|
|
|
stack_traces[key]['stack_trace'] = stack_trace
|
|
|
|
stack_traces[key]['n'] += 1
|
|
|
|
stack_traces[key]['packages'].append(package)
|
|
|
|
else:
|
|
|
|
stack_traces[key] = {'stack_trace': stack_trace, 'n': 1, 'code_line': code_line, 'packages': [package], 'crash_line': crash_line}
|
2019-07-01 14:55:38 +02:00
|
|
|
break
|
2019-10-18 09:56:15 +02:00
|
|
|
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '<pre>\n'
|
|
|
|
html += '<b>Stack traces</b>\n'
|
2019-10-26 21:10:21 +02:00
|
|
|
for stack_trace in sorted(list(stack_traces.values()), key=lambda x: x['n'], reverse=True):
|
2019-10-18 09:56:15 +02:00
|
|
|
html += 'Packages: ' + ' '.join(['<a href="' + p + '">' + p + '</a>' for p in stack_trace['packages']]) + '\n'
|
|
|
|
html += stack_trace['crash_line'] + '\n'
|
|
|
|
html += stack_trace['code_line'] + '\n'
|
|
|
|
html += '\n'.join(stack_trace['stack_trace']) + '\n\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
html += '</pre>\n'
|
|
|
|
|
|
|
|
html += '</body></html>\n'
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def staleReport(results_path: str) -> str:
|
2019-03-29 12:12:16 +01:00
|
|
|
html = '<html><head><title>Stale report</title></head><body>\n'
|
|
|
|
html += '<h1>Stale report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
2019-10-18 09:56:15 +02:00
|
|
|
html += '<b>' + fmt('Package', 'Date Time', link=False) + '</b>\n'
|
2019-03-29 12:12:16 +01:00
|
|
|
current_year = datetime.date.today().year
|
2019-09-10 07:58:37 +02:00
|
|
|
for filename in sorted(glob.glob(os.path.expanduser(results_path + '/*'))):
|
2019-03-29 12:12:16 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
|
|
|
datestr = line
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
dt = dateTimeFromStr(datestr)
|
|
|
|
diff = datetime.datetime.now() - dt
|
|
|
|
if diff.days < 30:
|
|
|
|
continue
|
|
|
|
package = filename[filename.rfind('/')+1:]
|
2019-10-18 09:56:15 +02:00
|
|
|
html += fmt(package, datestr) + '\n'
|
2018-08-31 14:28:01 +02:00
|
|
|
break
|
|
|
|
html += '</pre>\n'
|
2018-11-28 20:36:19 +01:00
|
|
|
|
2018-08-31 14:28:01 +02:00
|
|
|
html += '</body></html>\n'
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffReportFromDict(out: dict, today: str) -> str:
|
2018-09-06 17:31:07 +02:00
|
|
|
html = '<pre>\n'
|
2018-12-08 11:39:44 +01:00
|
|
|
html += '<b>MessageID ' + OLD_VERSION + ' Head</b>\n'
|
2018-08-27 18:21:16 +02:00
|
|
|
sum0 = 0
|
|
|
|
sum1 = 0
|
|
|
|
for messageId in sorted(out.keys()):
|
|
|
|
line = messageId + ' '
|
|
|
|
counts = out[messageId]
|
2018-09-06 17:31:07 +02:00
|
|
|
sum0 += counts[0]
|
|
|
|
sum1 += counts[1]
|
2018-08-27 18:21:16 +02:00
|
|
|
if counts[0] > 0:
|
2018-09-06 17:31:07 +02:00
|
|
|
c = str(counts[0])
|
2018-08-27 18:21:16 +02:00
|
|
|
while len(line) < 40 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c + ' '
|
|
|
|
if counts[1] > 0:
|
2018-09-06 17:31:07 +02:00
|
|
|
c = str(counts[1])
|
2018-08-27 18:21:16 +02:00
|
|
|
while len(line) < 48 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c
|
2018-09-06 17:31:07 +02:00
|
|
|
line = '<a href="diff' + today + '-' + messageId + '">' + messageId + '</a>' + line[line.find(' '):]
|
2018-08-27 18:21:16 +02:00
|
|
|
html += line + '\n'
|
|
|
|
|
|
|
|
# Sum
|
|
|
|
html += '================================================\n'
|
|
|
|
line = ''
|
|
|
|
while len(line) < 40 - len(str(sum0)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sum0) + ' '
|
|
|
|
while len(line) < 48 - len(str(sum1)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sum1)
|
|
|
|
html += line + '\n'
|
2018-09-06 17:31:07 +02:00
|
|
|
html += '</pre>\n'
|
2018-08-27 18:21:16 +02:00
|
|
|
|
|
|
|
return html
|
|
|
|
|
2018-09-06 06:53:40 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffReport(resultsPath: str) -> str:
|
2018-09-06 06:53:40 +02:00
|
|
|
out = {}
|
2018-09-06 17:31:07 +02:00
|
|
|
outToday = {}
|
2018-09-06 06:53:40 +02:00
|
|
|
today = strDateTime()[:10]
|
|
|
|
|
2019-06-30 14:02:51 +02:00
|
|
|
for filename in sorted(glob.glob(resultsPath + '/*.diff')):
|
2018-09-06 06:53:40 +02:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 14:02:51 +02:00
|
|
|
with open(filename, 'rt') as f:
|
|
|
|
data = json.loads(f.read())
|
|
|
|
uploadedToday = data['date'] == today
|
2019-06-30 14:14:02 +02:00
|
|
|
for messageId in data['sums']:
|
|
|
|
sums = data['sums'][messageId]
|
2019-09-02 13:46:29 +02:00
|
|
|
if OLD_VERSION not in sums:
|
|
|
|
continue
|
2019-01-09 18:39:42 +01:00
|
|
|
if messageId not in out:
|
|
|
|
out[messageId] = [0, 0]
|
2019-06-30 14:02:51 +02:00
|
|
|
out[messageId][0] += sums[OLD_VERSION]
|
|
|
|
out[messageId][1] += sums['head']
|
2018-09-06 17:31:07 +02:00
|
|
|
if uploadedToday:
|
2019-01-09 18:39:42 +01:00
|
|
|
if messageId not in outToday:
|
|
|
|
outToday[messageId] = [0, 0]
|
2019-06-30 14:02:51 +02:00
|
|
|
outToday[messageId][0] += sums[OLD_VERSION]
|
|
|
|
outToday[messageId][1] += sums['head']
|
2018-09-06 06:53:40 +02:00
|
|
|
|
2018-09-06 17:31:07 +02:00
|
|
|
html = '<html><head><title>Diff report</title></head><body>\n'
|
|
|
|
html += '<h1>Diff report</h1>\n'
|
|
|
|
html += '<h2>Uploaded today</h2>'
|
|
|
|
html += diffReportFromDict(outToday, 'today')
|
|
|
|
html += '<h2>All</h2>'
|
|
|
|
html += diffReportFromDict(out, '')
|
2018-09-06 06:53:40 +02:00
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def generate_package_diff_statistics(filename: str) -> None:
|
2019-06-30 14:02:51 +02:00
|
|
|
is_diff = False
|
|
|
|
|
|
|
|
sums = {}
|
|
|
|
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
line = line.strip()
|
|
|
|
if line == 'diff:':
|
|
|
|
is_diff = True
|
|
|
|
continue
|
|
|
|
elif not is_diff:
|
|
|
|
continue
|
|
|
|
if not line.endswith(']'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
version = None
|
|
|
|
if line.startswith(OLD_VERSION + ' '):
|
|
|
|
version = OLD_VERSION
|
|
|
|
elif line.startswith('head '):
|
|
|
|
version = 'head'
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
messageId = line[line.rfind('[')+1:len(line)-1]
|
|
|
|
|
|
|
|
if messageId not in sums:
|
2019-10-18 12:57:51 +02:00
|
|
|
sums[messageId] = {OLD_VERSION: 0, 'head': 0}
|
2019-06-30 14:02:51 +02:00
|
|
|
|
|
|
|
sums[messageId][version] += 1
|
|
|
|
|
2019-10-18 12:57:51 +02:00
|
|
|
output = {'date': strDateTime()[:10], 'sums': sums}
|
2019-06-30 14:02:51 +02:00
|
|
|
|
|
|
|
filename_diff = filename + '.diff'
|
|
|
|
if sums:
|
|
|
|
with open(filename_diff, 'wt') as f:
|
|
|
|
f.write(json.dumps(output))
|
|
|
|
elif os.path.isfile(filename_diff):
|
|
|
|
os.remove(filename_diff)
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffMessageIdReport(resultPath: str, messageId: str) -> str:
|
2018-08-27 18:21:16 +02:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
2019-06-30 23:16:04 +02:00
|
|
|
for filename in sorted(glob.glob(resultPath + '/*.diff')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 23:16:04 +02:00
|
|
|
with open(filename, 'rt') as f:
|
2019-10-18 12:57:51 +02:00
|
|
|
diff_stats = f.read()
|
|
|
|
if messageId not in diff_stats:
|
|
|
|
continue
|
2018-08-27 18:21:16 +02:00
|
|
|
url = None
|
|
|
|
diff = False
|
2019-06-30 23:16:04 +02:00
|
|
|
for line in open(filename[:-5], 'rt'):
|
2018-08-27 18:21:16 +02:00
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line == 'diff:\n':
|
|
|
|
diff = True
|
|
|
|
elif not diff:
|
|
|
|
continue
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
2018-09-06 17:31:07 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def diffMessageIdTodayReport(resultPath: str, messageId: str) -> str:
|
2018-09-06 17:31:07 +02:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
today = strDateTime()[:10]
|
2019-06-30 23:16:04 +02:00
|
|
|
for filename in sorted(glob.glob(resultPath + '/*.diff')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-06-30 23:16:04 +02:00
|
|
|
with open(filename, 'rt') as f:
|
2019-10-18 12:57:51 +02:00
|
|
|
diff_stats = f.read()
|
|
|
|
if messageId not in diff_stats:
|
|
|
|
continue
|
|
|
|
if today not in diff_stats:
|
|
|
|
continue
|
2018-09-06 17:31:07 +02:00
|
|
|
url = None
|
|
|
|
diff = False
|
|
|
|
firstLine = True
|
2019-06-30 23:16:04 +02:00
|
|
|
for line in open(filename[:-5], 'rt'):
|
2018-09-06 17:31:07 +02:00
|
|
|
if firstLine:
|
|
|
|
firstLine = False
|
|
|
|
if not line.startswith(today):
|
|
|
|
break
|
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line == 'diff:\n':
|
|
|
|
diff = True
|
|
|
|
elif not diff:
|
|
|
|
continue
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def headReportFromDict(out: dict, today: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
html = '<pre>\n'
|
|
|
|
html += '<b>MessageID Count</b>\n'
|
|
|
|
sumTotal = 0
|
|
|
|
for messageId in sorted(out.keys()):
|
|
|
|
line = messageId + ' '
|
|
|
|
counts = out[messageId]
|
|
|
|
sumTotal += counts
|
|
|
|
if counts > 0:
|
|
|
|
c = str(counts)
|
|
|
|
while len(line) < 48 - len(c):
|
|
|
|
line += ' '
|
|
|
|
line += c + ' '
|
|
|
|
line = '<a href="head' + today + '-' + messageId + '">' + messageId + '</a>' + line[line.find(' '):]
|
|
|
|
html += line + '\n'
|
|
|
|
|
|
|
|
# Sum
|
|
|
|
html += '================================================\n'
|
|
|
|
line = ''
|
|
|
|
while len(line) < 48 - len(str(sumTotal)):
|
|
|
|
line += ' '
|
|
|
|
line += str(sumTotal) + ' '
|
|
|
|
html += line + '\n'
|
|
|
|
html += '</pre>\n'
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def headReport(resultsPath: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
out = {}
|
|
|
|
outToday = {}
|
|
|
|
today = strDateTime()[:10]
|
|
|
|
|
|
|
|
for filename in sorted(glob.glob(resultsPath + '/*')):
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
|
|
|
uploadedToday = False
|
|
|
|
firstLine = True
|
|
|
|
headResults = False
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
if firstLine:
|
|
|
|
if line.startswith(today):
|
|
|
|
uploadedToday = True
|
|
|
|
firstLine = False
|
|
|
|
continue
|
|
|
|
line = line.strip()
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
if line.startswith('head results:'):
|
|
|
|
headResults = True
|
|
|
|
continue
|
|
|
|
if line.startswith('diff:'):
|
|
|
|
if headResults:
|
|
|
|
break
|
|
|
|
if not headResults:
|
|
|
|
continue
|
|
|
|
if not line.endswith(']'):
|
|
|
|
continue
|
|
|
|
if ': note: ' in line:
|
|
|
|
# notes normally do not contain message ids but can end with ']'
|
|
|
|
continue
|
2019-04-03 11:39:03 +02:00
|
|
|
message_id_start_pos = line.rfind('[')
|
|
|
|
if message_id_start_pos <= 0:
|
|
|
|
continue
|
|
|
|
messageId = line[message_id_start_pos+1:len(line)-1]
|
|
|
|
if ' ' in messageId:
|
|
|
|
# skip invalid messageIds
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
|
|
|
|
if messageId not in out:
|
|
|
|
out[messageId] = 0
|
|
|
|
out[messageId] += 1
|
|
|
|
if uploadedToday:
|
|
|
|
if messageId not in outToday:
|
|
|
|
outToday[messageId] = 0
|
|
|
|
outToday[messageId] += 1
|
|
|
|
|
|
|
|
html = '<html><head><title>HEAD report</title></head><body>\n'
|
|
|
|
html += '<h1>HEAD report</h1>\n'
|
|
|
|
html += '<h2>Uploaded today</h2>'
|
|
|
|
html += headReportFromDict(outToday, 'today')
|
|
|
|
html += '<h2>All</h2>'
|
|
|
|
html += headReportFromDict(out, '')
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def headMessageIdReport(resultPath: str, messageId: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
for filename in sorted(glob.glob(resultPath + '/*')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
url = None
|
|
|
|
headResults = False
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2019-01-09 10:47:58 +01:00
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line.startswith('head results:'):
|
|
|
|
headResults = True
|
|
|
|
elif not headResults:
|
|
|
|
continue
|
|
|
|
elif headResults and line.startswith('diff:'):
|
|
|
|
break
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def headMessageIdTodayReport(resultPath: str, messageId: str) -> str:
|
2019-01-09 10:47:58 +01:00
|
|
|
text = messageId + '\n'
|
|
|
|
e = '[' + messageId + ']\n'
|
|
|
|
today = strDateTime()[:10]
|
|
|
|
for filename in sorted(glob.glob(resultPath + '/*')):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-01-09 10:47:58 +01:00
|
|
|
url = None
|
|
|
|
headResults = False
|
|
|
|
firstLine = True
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2019-01-09 10:47:58 +01:00
|
|
|
if firstLine:
|
|
|
|
firstLine = False
|
|
|
|
if not line.startswith(today):
|
|
|
|
break
|
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line.startswith('head results:'):
|
|
|
|
headResults = True
|
|
|
|
elif not headResults:
|
|
|
|
continue
|
|
|
|
elif headResults and line.startswith('diff:'):
|
|
|
|
break
|
|
|
|
elif line.endswith(e):
|
|
|
|
if url:
|
|
|
|
text += url
|
|
|
|
url = None
|
|
|
|
text += line
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def timeReport(resultPath: str) -> str:
|
2019-01-11 10:05:19 +01:00
|
|
|
html = '<html><head><title>Time report</title></head><body>\n'
|
|
|
|
html += '<h1>Time report</h1>\n'
|
|
|
|
html += '<pre>\n'
|
2019-01-10 19:10:56 +01:00
|
|
|
column_widths = [25, 10, 10, 10]
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '<b>'
|
|
|
|
html += 'Package '.ljust(column_widths[0]) + ' ' + \
|
2019-01-10 19:10:56 +01:00
|
|
|
OLD_VERSION.rjust(column_widths[1]) + ' ' + \
|
|
|
|
'Head'.rjust(column_widths[2]) + ' ' + \
|
2019-01-11 10:05:19 +01:00
|
|
|
'Factor'.rjust(column_widths[3])
|
|
|
|
html += '</b>\n'
|
2019-01-10 19:10:56 +01:00
|
|
|
|
|
|
|
total_time_base = 0.0
|
|
|
|
total_time_head = 0.0
|
2018-09-02 09:35:38 +02:00
|
|
|
for filename in glob.glob(resultPath + '/*'):
|
2019-01-22 15:27:13 +01:00
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
2019-01-09 18:39:42 +01:00
|
|
|
for line in open(filename, 'rt'):
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2018-09-02 09:35:38 +02:00
|
|
|
if not line.startswith('elapsed-time:'):
|
|
|
|
continue
|
2019-01-10 19:10:56 +01:00
|
|
|
split_line = line.strip().split()
|
|
|
|
time_base = float(split_line[2])
|
|
|
|
time_head = float(split_line[1])
|
2019-07-01 14:55:38 +02:00
|
|
|
if time_base < 0.0 or time_head < 0.0:
|
|
|
|
# ignore results with crashes / errors for the time report
|
|
|
|
break
|
2019-01-10 19:10:56 +01:00
|
|
|
total_time_base += time_base
|
|
|
|
total_time_head += time_head
|
|
|
|
suspicious_time_difference = False
|
|
|
|
if time_base > 1 and time_base*2 < time_head:
|
|
|
|
suspicious_time_difference = True
|
|
|
|
elif time_head > 1 and time_head*2 < time_base:
|
|
|
|
suspicious_time_difference = True
|
|
|
|
if suspicious_time_difference:
|
|
|
|
if time_base > 0.0:
|
|
|
|
time_factor = time_head / time_base
|
|
|
|
else:
|
|
|
|
time_factor = 0.0
|
2019-01-11 10:05:19 +01:00
|
|
|
html += filename[len(resultPath)+1:].ljust(column_widths[0]) + ' ' + \
|
|
|
|
split_line[2].rjust(column_widths[1]) + ' ' + \
|
|
|
|
split_line[1].rjust(column_widths[2]) + ' ' + \
|
|
|
|
'{:.2f}'.format(time_factor).rjust(column_widths[3]) + '\n'
|
2018-09-02 09:35:38 +02:00
|
|
|
break
|
|
|
|
|
2019-01-11 10:05:19 +01:00
|
|
|
html += '\n'
|
|
|
|
if total_time_base > 0.0:
|
|
|
|
total_time_factor = total_time_head / total_time_base
|
|
|
|
else:
|
|
|
|
total_time_factor = 0.0
|
2019-02-04 12:39:22 +01:00
|
|
|
html += 'Time for all packages (not just the ones listed above):\n'
|
2019-01-11 10:05:19 +01:00
|
|
|
html += 'Total time: '.ljust(column_widths[0]) + ' ' + \
|
2019-02-28 13:46:40 +01:00
|
|
|
'{:.1f}'.format(total_time_base).rjust(column_widths[1]) + ' ' + \
|
|
|
|
'{:.1f}'.format(total_time_head).rjust(column_widths[2]) + ' ' + \
|
2019-01-11 10:05:19 +01:00
|
|
|
'{:.2f}'.format(total_time_factor).rjust(column_widths[3])
|
|
|
|
|
|
|
|
html += '\n'
|
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
|
|
|
|
return html
|
2018-09-02 09:35:38 +02:00
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def check_library_report(result_path: str, message_id: str) -> str:
|
2019-02-04 15:53:51 +01:00
|
|
|
if message_id not in ('checkLibraryNoReturn', 'checkLibraryFunction', 'checkLibraryUseIgnore'):
|
2019-01-22 15:27:13 +01:00
|
|
|
error_message = 'Invalid value ' + message_id + ' for message_id parameter.'
|
|
|
|
print(error_message)
|
|
|
|
return error_message
|
2019-02-05 10:39:40 +01:00
|
|
|
|
|
|
|
functions_shown_max = 50000
|
2019-01-22 15:27:13 +01:00
|
|
|
html = '<html><head><title>' + message_id + ' report</title></head><body>\n'
|
|
|
|
html += '<h1>' + message_id + ' report</h1>\n'
|
2019-02-05 10:39:40 +01:00
|
|
|
html += 'Top ' + str(functions_shown_max) + ' functions are shown.'
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '<pre>\n'
|
|
|
|
column_widths = [10, 100]
|
|
|
|
html += '<b>'
|
|
|
|
html += 'Count'.rjust(column_widths[0]) + ' ' + \
|
|
|
|
'Function'
|
|
|
|
html += '</b>\n'
|
|
|
|
|
2019-02-12 09:47:57 +01:00
|
|
|
function_counts = {}
|
2019-01-22 15:27:13 +01:00
|
|
|
for filename in glob.glob(result_path + '/*'):
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
|
|
|
info_messages = False
|
|
|
|
for line in open(filename, 'rt'):
|
2019-07-01 14:55:38 +02:00
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
if OLD_VERSION not in line:
|
|
|
|
# Package results seem to be too old, skip
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Current package, parse on
|
|
|
|
continue
|
2019-01-22 15:27:13 +01:00
|
|
|
if line == 'info messages:\n':
|
|
|
|
info_messages = True
|
|
|
|
if not info_messages:
|
|
|
|
continue
|
|
|
|
if line.endswith('[' + message_id + ']\n'):
|
2019-10-26 21:10:21 +02:00
|
|
|
if message_id == 'checkLibraryFunction':
|
2019-01-22 15:27:13 +01:00
|
|
|
function_name = line[(line.find('for function ') + len('for function ')):line.rfind('[') - 1]
|
2019-02-04 15:53:51 +01:00
|
|
|
else:
|
|
|
|
function_name = line[(line.find(': Function ') + len(': Function ')):line.rfind('should have') - 1]
|
2019-01-22 15:27:13 +01:00
|
|
|
function_counts[function_name] = function_counts.setdefault(function_name, 0) + 1
|
|
|
|
|
2019-02-12 09:47:57 +01:00
|
|
|
function_details_list = []
|
2019-10-26 21:10:21 +02:00
|
|
|
for function_name, count in sorted(list(function_counts.items()), key=operator.itemgetter(1), reverse=True):
|
2019-02-12 08:53:03 +01:00
|
|
|
if len(function_details_list) >= functions_shown_max:
|
2019-01-22 15:27:13 +01:00
|
|
|
break
|
2019-02-12 08:53:03 +01:00
|
|
|
function_details_list.append(str(count).rjust(column_widths[0]) + ' ' +
|
2019-10-26 21:10:21 +02:00
|
|
|
'<a href="check_library-' + urllib.parse.quote_plus(function_name) + '">' + function_name + '</a>\n')
|
2019-01-22 15:27:13 +01:00
|
|
|
|
2019-02-12 08:53:03 +01:00
|
|
|
html += ''.join(function_details_list)
|
2019-01-22 15:27:13 +01:00
|
|
|
html += '</pre>\n'
|
|
|
|
html += '</body></html>\n'
|
|
|
|
|
|
|
|
return html
|
|
|
|
|
|
|
|
|
|
|
|
# Lists all checkLibrary* messages regarding the given function name
|
2019-10-26 21:10:21 +02:00
|
|
|
def check_library_function_name(result_path: str, function_name: str) -> str:
|
2019-01-22 15:27:13 +01:00
|
|
|
print('check_library_function_name')
|
2019-10-26 21:10:21 +02:00
|
|
|
function_name = urllib.parse.unquote_plus(function_name)
|
2019-02-12 09:47:57 +01:00
|
|
|
output_lines_list = []
|
2019-01-22 15:27:13 +01:00
|
|
|
for filename in glob.glob(result_path + '/*'):
|
|
|
|
if not os.path.isfile(filename):
|
|
|
|
continue
|
|
|
|
info_messages = False
|
|
|
|
url = None
|
|
|
|
cppcheck_options = None
|
|
|
|
for line in open(filename, 'rt'):
|
|
|
|
if line.startswith('ftp://'):
|
|
|
|
url = line
|
|
|
|
elif line.startswith('cppcheck-options:'):
|
|
|
|
cppcheck_options = line
|
|
|
|
elif line == 'info messages:\n':
|
|
|
|
info_messages = True
|
|
|
|
if not info_messages:
|
|
|
|
continue
|
|
|
|
if '[checkLibrary' in line:
|
|
|
|
if (' ' + function_name) in line:
|
|
|
|
if url:
|
2019-02-12 08:53:03 +01:00
|
|
|
output_lines_list.append(url)
|
2019-01-22 15:27:13 +01:00
|
|
|
url = None
|
|
|
|
if cppcheck_options:
|
2019-02-12 08:53:03 +01:00
|
|
|
output_lines_list.append(cppcheck_options)
|
2019-01-22 15:27:13 +01:00
|
|
|
cppcheck_options = None
|
2019-02-12 08:53:03 +01:00
|
|
|
output_lines_list.append(line)
|
2019-01-22 15:27:13 +01:00
|
|
|
|
2019-02-12 08:53:03 +01:00
|
|
|
return ''.join(output_lines_list)
|
2019-01-22 15:27:13 +01:00
|
|
|
|
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def sendAll(connection: socket.socket, text: str) -> None:
|
|
|
|
data = text.encode('utf-8', 'ignore')
|
2018-08-25 10:25:05 +02:00
|
|
|
while data:
|
2018-08-25 18:38:51 +02:00
|
|
|
num = connection.send(data)
|
|
|
|
if num < len(data):
|
|
|
|
data = data[num:]
|
2018-08-25 10:25:05 +02:00
|
|
|
else:
|
|
|
|
data = None
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def httpGetResponse(connection: socket.socket, data: str, contentType: str) -> None:
|
2018-08-26 11:17:18 +02:00
|
|
|
resp = 'HTTP/1.1 200 OK\r\n'
|
|
|
|
resp += 'Connection: close\r\n'
|
|
|
|
resp += 'Content-length: ' + str(len(data)) + '\r\n'
|
|
|
|
resp += 'Content-type: ' + contentType + '\r\n\r\n'
|
2018-08-25 10:59:49 +02:00
|
|
|
resp += data
|
|
|
|
sendAll(connection, resp)
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
class HttpClientThread(Thread):
|
2019-10-26 21:10:21 +02:00
|
|
|
def __init__(self, connection: socket.socket, cmd: str, resultPath: str, latestResults: list) -> None:
|
2018-08-25 18:38:51 +02:00
|
|
|
Thread.__init__(self)
|
|
|
|
self.connection = connection
|
|
|
|
self.cmd = cmd[:cmd.find('\n')]
|
2018-08-27 18:21:16 +02:00
|
|
|
self.resultPath = resultPath
|
2018-08-25 18:38:51 +02:00
|
|
|
self.latestResults = latestResults
|
2018-08-23 21:31:02 +02:00
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
def run(self):
|
|
|
|
try:
|
|
|
|
cmd = self.cmd
|
|
|
|
print('[' + strDateTime() + '] ' + cmd)
|
2019-01-22 15:27:13 +01:00
|
|
|
res = re.match(r'GET /([a-zA-Z0-9_\-\.\+%]*) HTTP', cmd)
|
2018-08-27 18:21:16 +02:00
|
|
|
if res is None:
|
|
|
|
self.connection.close()
|
|
|
|
return
|
|
|
|
url = res.group(1)
|
2018-09-02 07:28:25 +02:00
|
|
|
if url == '':
|
|
|
|
html = overviewReport()
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
|
|
|
elif url == 'latest.html':
|
2018-08-25 18:38:51 +02:00
|
|
|
html = latestReport(self.latestResults)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2019-01-24 12:02:45 +01:00
|
|
|
elif url == 'crash.html':
|
2019-09-10 07:58:37 +02:00
|
|
|
html = crashReport(self.resultPath)
|
2018-08-31 14:28:01 +02:00
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2019-03-29 12:12:16 +01:00
|
|
|
elif url == 'stale.html':
|
2019-09-10 07:58:37 +02:00
|
|
|
html = staleReport(self.resultPath)
|
2019-03-29 12:12:16 +01:00
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2019-01-24 12:02:45 +01:00
|
|
|
elif url == 'diff.html':
|
2018-09-06 17:31:07 +02:00
|
|
|
html = diffReport(self.resultPath)
|
2018-09-06 06:53:40 +02:00
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
2018-09-06 17:31:07 +02:00
|
|
|
elif url.startswith('difftoday-'):
|
|
|
|
messageId = url[10:]
|
|
|
|
text = diffMessageIdTodayReport(self.resultPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2018-08-27 18:21:16 +02:00
|
|
|
elif url.startswith('diff-'):
|
|
|
|
messageId = url[5:]
|
2018-08-27 18:44:17 +02:00
|
|
|
text = diffMessageIdReport(self.resultPath, messageId)
|
2018-08-27 18:21:16 +02:00
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2019-01-24 12:02:45 +01:00
|
|
|
elif url == 'head.html':
|
2019-01-09 10:47:58 +01:00
|
|
|
html = headReport(self.resultPath)
|
|
|
|
httpGetResponse(self.connection, html, 'text/html')
|
|
|
|
elif url.startswith('headtoday-'):
|
|
|
|
messageId = url[10:]
|
|
|
|
text = headMessageIdTodayReport(self.resultPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
|
|
|
elif url.startswith('head-'):
|
|
|
|
messageId = url[5:]
|
|
|
|
text = headMessageIdReport(self.resultPath, messageId)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2019-01-11 10:05:19 +01:00
|
|
|
elif url == 'time.html':
|
2018-09-02 09:35:38 +02:00
|
|
|
text = timeReport(self.resultPath)
|
2019-01-11 10:05:19 +01:00
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2019-01-22 15:27:13 +01:00
|
|
|
elif url == 'check_library_function_report.html':
|
|
|
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryFunction')
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
|
|
|
elif url == 'check_library_noreturn_report.html':
|
|
|
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryNoReturn')
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2019-02-04 15:53:51 +01:00
|
|
|
elif url == 'check_library_use_ignore_report.html':
|
|
|
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryUseIgnore')
|
|
|
|
httpGetResponse(self.connection, text, 'text/html')
|
2019-01-22 15:27:13 +01:00
|
|
|
elif url.startswith('check_library-'):
|
|
|
|
print('check library function !')
|
|
|
|
function_name = url[len('check_library-'):]
|
|
|
|
text = check_library_function_name(self.resultPath + '/' + 'info_output', function_name)
|
|
|
|
httpGetResponse(self.connection, text, 'text/plain')
|
2018-08-25 18:38:51 +02:00
|
|
|
else:
|
2018-08-27 18:21:16 +02:00
|
|
|
filename = resultPath + '/' + url
|
2018-08-25 18:38:51 +02:00
|
|
|
if not os.path.isfile(filename):
|
2018-08-25 20:58:31 +02:00
|
|
|
print('HTTP/1.1 404 Not Found')
|
2019-10-26 21:10:21 +02:00
|
|
|
self.connection.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
|
2018-08-25 18:38:51 +02:00
|
|
|
else:
|
2019-01-09 18:39:42 +01:00
|
|
|
f = open(filename, 'rt')
|
2018-08-25 18:38:51 +02:00
|
|
|
data = f.read()
|
|
|
|
f.close()
|
|
|
|
httpGetResponse(self.connection, data, 'text/plain')
|
2018-08-26 11:17:18 +02:00
|
|
|
finally:
|
2018-08-25 20:58:31 +02:00
|
|
|
time.sleep(1)
|
2018-08-25 18:38:51 +02:00
|
|
|
self.connection.close()
|
|
|
|
|
2018-11-17 19:32:10 +01:00
|
|
|
|
2019-10-26 21:10:21 +02:00
|
|
|
def server(server_address_port: int, packages: list, packageIndex: int, resultPath: str) -> None:
|
2018-09-02 07:28:25 +02:00
|
|
|
socket.setdefaulttimeout(30)
|
2018-08-25 18:38:51 +02:00
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
2018-08-27 18:21:16 +02:00
|
|
|
server_address = ('', server_address_port)
|
2018-08-25 18:38:51 +02:00
|
|
|
sock.bind(server_address)
|
|
|
|
|
|
|
|
sock.listen(1)
|
|
|
|
|
|
|
|
latestResults = []
|
2018-10-15 11:01:51 +02:00
|
|
|
if os.path.isfile('latest.txt'):
|
|
|
|
with open('latest.txt', 'rt') as f:
|
|
|
|
latestResults = f.read().strip().split(' ')
|
2018-08-25 18:38:51 +02:00
|
|
|
|
2019-03-29 12:12:16 +01:00
|
|
|
print('[' + strDateTime() + '] version ' + SERVER_VERSION)
|
|
|
|
print('[' + strDateTime() + '] listening on port ' + str(server_address_port))
|
|
|
|
|
2018-08-25 18:38:51 +02:00
|
|
|
while True:
|
|
|
|
# wait for a connection
|
|
|
|
print('[' + strDateTime() + '] waiting for a connection')
|
|
|
|
connection, client_address = sock.accept()
|
2018-08-25 20:00:04 +02:00
|
|
|
try:
|
2019-10-26 21:10:21 +02:00
|
|
|
bytes_received = connection.recv(128)
|
|
|
|
cmd = bytes_received.decode('utf-8', 'ignore')
|
2018-08-25 20:43:20 +02:00
|
|
|
except socket.error:
|
2018-08-27 18:21:16 +02:00
|
|
|
connection.close()
|
2018-08-25 20:00:04 +02:00
|
|
|
continue
|
2019-10-26 21:10:21 +02:00
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
connection.close()
|
|
|
|
print('Error: Decoding failed: ' + str(e))
|
|
|
|
continue
|
2018-08-25 18:38:51 +02:00
|
|
|
if cmd.find('\n') < 1:
|
|
|
|
continue
|
|
|
|
firstLine = cmd[:cmd.find('\n')]
|
2019-01-09 18:39:42 +01:00
|
|
|
if re.match('[a-zA-Z0-9./ ]+', firstLine) is None:
|
2018-08-25 18:38:51 +02:00
|
|
|
connection.close()
|
2019-01-09 18:39:42 +01:00
|
|
|
continue
|
2018-08-25 18:38:51 +02:00
|
|
|
if cmd.startswith('GET /'):
|
2018-08-27 18:21:16 +02:00
|
|
|
newThread = HttpClientThread(connection, cmd, resultPath, latestResults)
|
2018-08-25 18:38:51 +02:00
|
|
|
newThread.start()
|
2019-01-09 18:39:42 +01:00
|
|
|
elif cmd == 'GetCppcheckVersions\n':
|
2018-12-08 11:39:44 +01:00
|
|
|
reply = 'head ' + OLD_VERSION
|
|
|
|
print('[' + strDateTime() + '] GetCppcheckVersions: ' + reply)
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(reply.encode('utf-8', 'ignore'))
|
2018-11-29 21:19:45 +01:00
|
|
|
connection.close()
|
2019-01-09 18:39:42 +01:00
|
|
|
elif cmd == 'get\n':
|
2019-10-19 10:52:04 +02:00
|
|
|
pkg = packages[packageIndex]
|
2018-12-02 07:10:23 +01:00
|
|
|
packageIndex += 1
|
|
|
|
if packageIndex >= len(packages):
|
|
|
|
packageIndex = 0
|
|
|
|
|
|
|
|
f = open('package-index.txt', 'wt')
|
|
|
|
f.write(str(packageIndex) + '\n')
|
|
|
|
f.close()
|
|
|
|
|
2018-11-17 19:32:10 +01:00
|
|
|
print('[' + strDateTime() + '] get:' + pkg)
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(pkg.encode('utf-8', 'ignore'))
|
2018-08-25 18:38:51 +02:00
|
|
|
connection.close()
|
2019-03-29 12:52:27 +01:00
|
|
|
elif cmd.startswith('write\nftp://'):
|
2018-08-25 18:38:51 +02:00
|
|
|
# read data
|
2019-02-09 22:05:12 +01:00
|
|
|
data = cmd[cmd.find('ftp'):]
|
2018-08-25 18:38:51 +02:00
|
|
|
try:
|
2019-10-26 21:10:21 +02:00
|
|
|
t = 0.0
|
2019-01-11 14:58:27 +01:00
|
|
|
max_data_size = 2 * 1024 * 1024
|
|
|
|
while (len(data) < max_data_size) and (not data.endswith('\nDONE')) and (t < 10):
|
2019-10-26 21:10:21 +02:00
|
|
|
bytes_received = connection.recv(1024)
|
|
|
|
if bytes_received:
|
|
|
|
try:
|
|
|
|
text_received = bytes_received.decode('utf-8', 'ignore')
|
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
print('Error: Decoding failed: ' + str(e))
|
|
|
|
data = ''
|
|
|
|
break
|
|
|
|
t = 0.0
|
|
|
|
data += text_received
|
2018-08-25 18:38:51 +02:00
|
|
|
else:
|
|
|
|
time.sleep(0.2)
|
|
|
|
t += 0.2
|
|
|
|
connection.close()
|
|
|
|
except socket.error as e:
|
|
|
|
pass
|
|
|
|
|
2018-08-23 21:31:02 +02:00
|
|
|
pos = data.find('\n')
|
2018-08-25 18:38:51 +02:00
|
|
|
if pos < 10:
|
|
|
|
continue
|
|
|
|
url = data[:pos]
|
2018-08-26 16:47:20 +02:00
|
|
|
print('[' + strDateTime() + '] write:' + url)
|
2018-08-25 18:38:51 +02:00
|
|
|
|
|
|
|
# save data
|
2019-02-28 08:40:07 +01:00
|
|
|
res = re.match(r'ftp://.*pool/main/[^/]+/([^/]+)/[^/]*tar.(gz|bz2)', url)
|
2018-11-19 10:57:04 +01:00
|
|
|
if res is None:
|
|
|
|
print('results not written. res is None.')
|
|
|
|
continue
|
|
|
|
if url not in packages:
|
2019-10-19 10:52:04 +02:00
|
|
|
print('results not written. url is not in packages.')
|
|
|
|
continue
|
2019-03-29 12:52:27 +01:00
|
|
|
# Verify that head was compared to correct OLD_VERSION
|
|
|
|
versions_found = False
|
|
|
|
old_version_wrong = False
|
|
|
|
for line in data.split('\n', 20):
|
|
|
|
if line.startswith('cppcheck: '):
|
|
|
|
versions_found = True
|
|
|
|
if OLD_VERSION not in line.split():
|
|
|
|
print('Compared to wrong old version. Should be ' + OLD_VERSION + '. Versions compared: ' +
|
|
|
|
line)
|
|
|
|
print('Ignoring data.')
|
|
|
|
old_version_wrong = True
|
2019-07-17 14:29:07 +02:00
|
|
|
break
|
2019-03-29 12:52:27 +01:00
|
|
|
if not versions_found:
|
|
|
|
print('Cppcheck versions missing in result data. Ignoring data.')
|
|
|
|
continue
|
|
|
|
if old_version_wrong:
|
|
|
|
continue
|
2018-11-19 10:57:04 +01:00
|
|
|
print('results added for package ' + res.group(1))
|
2019-06-30 14:02:51 +02:00
|
|
|
filename = os.path.join(resultPath, res.group(1))
|
2018-11-19 10:57:04 +01:00
|
|
|
with open(filename, 'wt') as f:
|
|
|
|
f.write(strDateTime() + '\n' + data)
|
|
|
|
# track latest added results..
|
2019-03-29 12:52:27 +01:00
|
|
|
if len(latestResults) >= 20:
|
|
|
|
latestResults = latestResults[1:]
|
|
|
|
latestResults.append(filename)
|
|
|
|
with open('latest.txt', 'wt') as f:
|
|
|
|
f.write(' '.join(latestResults))
|
2019-06-30 14:02:51 +02:00
|
|
|
# generate package.diff..
|
|
|
|
generate_package_diff_statistics(filename)
|
2019-01-22 15:27:13 +01:00
|
|
|
elif cmd.startswith('write_info\nftp://'):
|
|
|
|
# read data
|
|
|
|
data = cmd[11:]
|
|
|
|
try:
|
2019-10-26 21:10:21 +02:00
|
|
|
t = 0.0
|
2019-01-22 15:27:13 +01:00
|
|
|
max_data_size = 1024 * 1024
|
|
|
|
while (len(data) < max_data_size) and (not data.endswith('\nDONE')) and (t < 10):
|
2019-10-26 21:10:21 +02:00
|
|
|
bytes_received = connection.recv(1024)
|
|
|
|
if bytes_received:
|
|
|
|
try:
|
|
|
|
text_received = bytes_received.decode('utf-8', 'ignore')
|
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
print('Error: Decoding failed: ' + str(e))
|
|
|
|
data = ''
|
|
|
|
break
|
|
|
|
t = 0.0
|
|
|
|
data += text_received
|
2019-01-22 15:27:13 +01:00
|
|
|
else:
|
|
|
|
time.sleep(0.2)
|
|
|
|
t += 0.2
|
|
|
|
connection.close()
|
|
|
|
except socket.error as e:
|
|
|
|
pass
|
|
|
|
|
|
|
|
pos = data.find('\n')
|
|
|
|
if pos < 10:
|
|
|
|
continue
|
|
|
|
url = data[:pos]
|
|
|
|
print('[' + strDateTime() + '] write_info:' + url)
|
|
|
|
|
|
|
|
# save data
|
2019-02-28 08:40:07 +01:00
|
|
|
res = re.match(r'ftp://.*pool/main/[^/]+/([^/]+)/[^/]*tar.(gz|bz2)', url)
|
2019-01-22 15:27:13 +01:00
|
|
|
if res is None:
|
|
|
|
print('info output not written. res is None.')
|
|
|
|
continue
|
|
|
|
if url not in packages:
|
2019-10-19 10:52:04 +02:00
|
|
|
print('info output not written. url is not in packages.')
|
|
|
|
continue
|
2019-01-22 15:27:13 +01:00
|
|
|
print('adding info output for package ' + res.group(1))
|
|
|
|
info_path = resultPath + '/' + 'info_output'
|
|
|
|
if not os.path.exists(info_path):
|
|
|
|
os.mkdir(info_path)
|
|
|
|
filename = info_path + '/' + res.group(1)
|
|
|
|
with open(filename, 'wt') as f:
|
|
|
|
f.write(strDateTime() + '\n' + data)
|
2019-10-19 10:52:04 +02:00
|
|
|
elif cmd == 'getPackagesCount\n':
|
|
|
|
packages_count = str(len(packages))
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(packages_count.encode('utf-8', 'ignore'))
|
2019-10-19 10:52:04 +02:00
|
|
|
connection.close()
|
|
|
|
print('[' + strDateTime() + '] getPackagesCount: ' + packages_count)
|
|
|
|
continue
|
|
|
|
elif cmd.startswith('getPackageIdx'):
|
|
|
|
request_idx = abs(int(cmd[len('getPackageIdx:'):]))
|
|
|
|
if request_idx < len(packages):
|
|
|
|
pkg = packages[request_idx]
|
2019-10-26 21:10:21 +02:00
|
|
|
connection.send(pkg.encode('utf-8', 'ignore'))
|
2019-10-19 10:52:04 +02:00
|
|
|
connection.close()
|
|
|
|
print('[' + strDateTime() + '] getPackageIdx: ' + pkg)
|
|
|
|
else:
|
|
|
|
connection.close()
|
|
|
|
print('[' + strDateTime() + '] getPackageIdx: index is out of range')
|
|
|
|
continue
|
2018-08-23 22:13:53 +02:00
|
|
|
else:
|
2018-08-25 18:38:51 +02:00
|
|
|
print('[' + strDateTime() + '] invalid command: ' + firstLine)
|
|
|
|
connection.close()
|
2018-08-27 18:21:16 +02:00
|
|
|
|
2019-01-09 18:39:42 +01:00
|
|
|
|
2018-08-27 18:21:16 +02:00
|
|
|
if __name__ == "__main__":
|
2019-10-18 20:04:50 +02:00
|
|
|
workPath = '/var/daca@home'
|
|
|
|
if not os.path.isdir(workPath):
|
|
|
|
workPath = os.path.expanduser('~/daca@home')
|
2018-08-27 18:21:16 +02:00
|
|
|
os.chdir(workPath)
|
2019-10-18 20:04:50 +02:00
|
|
|
print('work path: ' + workPath)
|
2018-09-06 17:31:07 +02:00
|
|
|
resultPath = workPath + '/donated-results'
|
2018-08-27 18:21:16 +02:00
|
|
|
|
|
|
|
f = open('packages.txt', 'rt')
|
2019-10-19 10:52:04 +02:00
|
|
|
packages = [val.strip() for val in f.readlines()]
|
2018-08-27 18:21:16 +02:00
|
|
|
f.close()
|
|
|
|
|
|
|
|
print('packages: ' + str(len(packages)))
|
|
|
|
|
|
|
|
if len(packages) == 0:
|
|
|
|
print('fatal: there are no packages')
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
packageIndex = 0
|
|
|
|
if os.path.isfile('package-index.txt'):
|
|
|
|
f = open('package-index.txt', 'rt')
|
|
|
|
packageIndex = int(f.read())
|
|
|
|
if packageIndex < 0 or packageIndex >= len(packages):
|
|
|
|
packageIndex = 0
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
server_address_port = 8000
|
|
|
|
if '--test' in sys.argv[1:]:
|
|
|
|
server_address_port = 8001
|
|
|
|
|
2018-09-02 07:28:25 +02:00
|
|
|
try:
|
|
|
|
server(server_address_port, packages, packageIndex, resultPath)
|
|
|
|
except socket.timeout:
|
|
|
|
print('Timeout!')
|