refs #10700 - donate-cpu-server.py: added query parameter `pkgs` to some reports to request a list of affected packages (#3743)
This commit is contained in:
parent
083efe6361
commit
0ab7116891
|
@ -148,6 +148,13 @@ jobs:
|
||||||
env:
|
env:
|
||||||
PYTHONPATH: ./tools
|
PYTHONPATH: ./tools
|
||||||
|
|
||||||
|
- name: test donate_cpu_server
|
||||||
|
if: matrix.python-version != '2.7'
|
||||||
|
run: |
|
||||||
|
python -m pytest -v tools/test_donate_cpu_server.py
|
||||||
|
env:
|
||||||
|
PYTHONPATH: ./tools
|
||||||
|
|
||||||
- name: dmake
|
- name: dmake
|
||||||
if: matrix.python-version == '3.10'
|
if: matrix.python-version == '3.10'
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -21,11 +21,12 @@ import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import operator
|
import operator
|
||||||
import html as html_lib
|
import html as html_lib
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
|
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
|
||||||
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
|
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
|
||||||
# changes)
|
# changes)
|
||||||
SERVER_VERSION = "1.3.29"
|
SERVER_VERSION = "1.3.30"
|
||||||
|
|
||||||
OLD_VERSION = '2.9'
|
OLD_VERSION = '2.9'
|
||||||
|
|
||||||
|
@ -42,12 +43,13 @@ if logfile:
|
||||||
logfile += '/'
|
logfile += '/'
|
||||||
logfile += 'donate-cpu-server.log'
|
logfile += 'donate-cpu-server.log'
|
||||||
handler_file = logging.handlers.RotatingFileHandler(filename=logfile, maxBytes=100*1024, backupCount=1)
|
handler_file = logging.handlers.RotatingFileHandler(filename=logfile, maxBytes=100*1024, backupCount=1)
|
||||||
|
handler_file.setFormatter(logging.Formatter('%(asctime)s %(message)s'))
|
||||||
handler_file.setLevel(logging.ERROR)
|
handler_file.setLevel(logging.ERROR)
|
||||||
logger.addHandler(handler_file)
|
logger.addHandler(handler_file)
|
||||||
|
|
||||||
|
|
||||||
def print_ts(msg):
|
def print_ts(msg) -> None:
|
||||||
print('[' + strDateTime() + '] ' + msg)
|
print('[{}] {}'.format(strDateTime(), msg))
|
||||||
|
|
||||||
|
|
||||||
# Set up an exception hook for all uncaught exceptions so they can be logged
|
# Set up an exception hook for all uncaught exceptions so they can be logged
|
||||||
|
@ -179,7 +181,9 @@ def latestReport(latestResults: list) -> str:
|
||||||
return html
|
return html
|
||||||
|
|
||||||
|
|
||||||
def crashReport(results_path: str) -> str:
|
def crashReport(results_path: str, query_params: dict):
|
||||||
|
pkgs = '' if query_params.get('pkgs') == '1' else None
|
||||||
|
|
||||||
html = '<html><head><title>Crash report</title></head><body>\n'
|
html = '<html><head><title>Crash report</title></head><body>\n'
|
||||||
html += '<h1>Crash report</h1>\n'
|
html += '<h1>Crash report</h1>\n'
|
||||||
html += '<pre>\n'
|
html += '<pre>\n'
|
||||||
|
@ -191,6 +195,7 @@ def crashReport(results_path: str) -> str:
|
||||||
continue
|
continue
|
||||||
with open(filename, 'rt') as file_:
|
with open(filename, 'rt') as file_:
|
||||||
datestr = None
|
datestr = None
|
||||||
|
package_url = None
|
||||||
for line in file_:
|
for line in file_:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line.startswith('cppcheck: '):
|
if line.startswith('cppcheck: '):
|
||||||
|
@ -202,6 +207,8 @@ def crashReport(results_path: str) -> str:
|
||||||
continue
|
continue
|
||||||
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
if datestr is None and line.startswith(str(current_year) + '-') or line.startswith(str(current_year - 1) + '-'):
|
||||||
datestr = line
|
datestr = line
|
||||||
|
elif pkgs is not None and package_url is None and line.startswith('ftp://'):
|
||||||
|
package_url = line
|
||||||
elif line.startswith('count:'):
|
elif line.startswith('count:'):
|
||||||
if line.find('Crash') < 0:
|
if line.find('Crash') < 0:
|
||||||
break
|
break
|
||||||
|
@ -214,6 +221,8 @@ def crashReport(results_path: str) -> str:
|
||||||
if counts[1] == 'Crash!':
|
if counts[1] == 'Crash!':
|
||||||
c_head = 'Crash'
|
c_head = 'Crash'
|
||||||
html += fmt(package, datestr, c_version, c_head) + '\n'
|
html += fmt(package, datestr, c_version, c_head) + '\n'
|
||||||
|
if package_url is not None:
|
||||||
|
pkgs += '{}\n'.format(package_url)
|
||||||
if c_head != 'Crash':
|
if c_head != 'Crash':
|
||||||
break
|
break
|
||||||
elif line.find(' received signal ') != -1:
|
elif line.find(' received signal ') != -1:
|
||||||
|
@ -271,7 +280,9 @@ def crashReport(results_path: str) -> str:
|
||||||
html += '</pre>\n'
|
html += '</pre>\n'
|
||||||
|
|
||||||
html += '</body></html>\n'
|
html += '</body></html>\n'
|
||||||
return html
|
if pkgs is not None:
|
||||||
|
return pkgs, 'text/plain'
|
||||||
|
return html, 'text/html'
|
||||||
|
|
||||||
|
|
||||||
def timeoutReport(results_path: str) -> str:
|
def timeoutReport(results_path: str) -> str:
|
||||||
|
@ -609,7 +620,8 @@ def headReport(resultsPath: str) -> str:
|
||||||
return html
|
return html
|
||||||
|
|
||||||
|
|
||||||
def headMessageIdReport(resultPath: str, messageId: str) -> str:
|
def headMessageIdReport(resultPath: str, messageId: str, query_params: dict) -> str:
|
||||||
|
pkgs = '' if query_params.get('pkgs') == '1' else None
|
||||||
text = messageId + '\n'
|
text = messageId + '\n'
|
||||||
e = '[' + messageId + ']\n'
|
e = '[' + messageId + ']\n'
|
||||||
for filename in sorted(glob.glob(resultPath + '/*')):
|
for filename in sorted(glob.glob(resultPath + '/*')):
|
||||||
|
@ -629,8 +641,12 @@ def headMessageIdReport(resultPath: str, messageId: str) -> str:
|
||||||
elif line.endswith(e):
|
elif line.endswith(e):
|
||||||
if url:
|
if url:
|
||||||
text += url
|
text += url
|
||||||
|
if pkgs is not None:
|
||||||
|
pkgs += url
|
||||||
url = None
|
url = None
|
||||||
text += line
|
text += line
|
||||||
|
if pkgs is not None:
|
||||||
|
return pkgs
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
@ -938,83 +954,91 @@ class HttpClientThread(Thread):
|
||||||
def __init__(self, connection: socket.socket, cmd: str, resultPath: str, latestResults: list) -> None:
|
def __init__(self, connection: socket.socket, cmd: str, resultPath: str, latestResults: list) -> None:
|
||||||
Thread.__init__(self)
|
Thread.__init__(self)
|
||||||
self.connection = connection
|
self.connection = connection
|
||||||
self.cmd = cmd[:cmd.find('\n')]
|
self.cmd = cmd[:cmd.find('\r\n')]
|
||||||
self.resultPath = resultPath
|
self.resultPath = resultPath
|
||||||
self.latestResults = latestResults
|
self.latestResults = latestResults
|
||||||
|
|
||||||
|
# TODO: use a proper parser
|
||||||
|
def parse_req(cmd):
|
||||||
|
req_parts = cmd.split(' ')
|
||||||
|
if len(req_parts) != 3 or req_parts[0] != 'GET' or not req_parts[2].startswith('HTTP'):
|
||||||
|
return None, None
|
||||||
|
url_obj = urlparse(req_parts[1])
|
||||||
|
return url_obj.path, dict(urllib.parse.parse_qsl(url_obj.query))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
cmd = self.cmd
|
cmd = self.cmd
|
||||||
print_ts(cmd)
|
print_ts(cmd)
|
||||||
res = re.match(r'GET /([a-zA-Z0-9_\-\.\+%]*) HTTP', cmd)
|
url, queryParams = HttpClientThread.parse_req(cmd)
|
||||||
if res is None:
|
if url is None:
|
||||||
|
print_ts('invalid request: {}'.format(cmd))
|
||||||
self.connection.close()
|
self.connection.close()
|
||||||
return
|
return
|
||||||
url = res.group(1)
|
if url == '/':
|
||||||
if url == '':
|
|
||||||
html = overviewReport()
|
html = overviewReport()
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url == 'latest.html':
|
elif url == '/latest.html':
|
||||||
html = latestReport(self.latestResults)
|
html = latestReport(self.latestResults)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url == 'crash.html':
|
elif url == '/crash.html':
|
||||||
html = crashReport(self.resultPath)
|
text, mime = crashReport(self.resultPath, queryParams)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, text, mime)
|
||||||
elif url == 'timeout.html':
|
elif url == '/timeout.html':
|
||||||
html = timeoutReport(self.resultPath)
|
html = timeoutReport(self.resultPath)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url == 'stale.html':
|
elif url == '/stale.html':
|
||||||
html = staleReport(self.resultPath)
|
html = staleReport(self.resultPath)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url == 'diff.html':
|
elif url == '/diff.html':
|
||||||
html = diffReport(self.resultPath)
|
html = diffReport(self.resultPath)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url.startswith('difftoday-'):
|
elif url.startswith('/difftoday-'):
|
||||||
messageId = url[10:]
|
messageId = url[len('/difftoday-'):]
|
||||||
text = diffMessageIdTodayReport(self.resultPath, messageId)
|
text = diffMessageIdTodayReport(self.resultPath, messageId)
|
||||||
httpGetResponse(self.connection, text, 'text/plain')
|
httpGetResponse(self.connection, text, 'text/plain')
|
||||||
elif url.startswith('diff-'):
|
elif url.startswith('/diff-'):
|
||||||
messageId = url[5:]
|
messageId = url[len('/diff-'):]
|
||||||
text = diffMessageIdReport(self.resultPath, messageId)
|
text = diffMessageIdReport(self.resultPath, messageId)
|
||||||
httpGetResponse(self.connection, text, 'text/plain')
|
httpGetResponse(self.connection, text, 'text/plain')
|
||||||
elif url == 'head.html':
|
elif url == '/head.html':
|
||||||
html = headReport(self.resultPath)
|
html = headReport(self.resultPath)
|
||||||
httpGetResponse(self.connection, html, 'text/html')
|
httpGetResponse(self.connection, html, 'text/html')
|
||||||
elif url.startswith('headtoday-'):
|
elif url.startswith('/headtoday-'):
|
||||||
messageId = url[10:]
|
messageId = url[len('/headtoday-'):]
|
||||||
text = headMessageIdTodayReport(self.resultPath, messageId)
|
text = headMessageIdTodayReport(self.resultPath, messageId)
|
||||||
httpGetResponse(self.connection, text, 'text/plain')
|
httpGetResponse(self.connection, text, 'text/plain')
|
||||||
elif url.startswith('head-'):
|
elif url.startswith('/head-'):
|
||||||
messageId = url[5:]
|
messageId = url[len('/head-'):]
|
||||||
text = headMessageIdReport(self.resultPath, messageId)
|
text = headMessageIdReport(self.resultPath, messageId, queryParams)
|
||||||
httpGetResponse(self.connection, text, 'text/plain')
|
httpGetResponse(self.connection, text, 'text/plain')
|
||||||
elif url == 'time_lt.html':
|
elif url == '/time_lt.html':
|
||||||
text = timeReport(self.resultPath, False)
|
text = timeReport(self.resultPath, False)
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'time_gt.html':
|
elif url == '/time_gt.html':
|
||||||
text = timeReport(self.resultPath, True)
|
text = timeReport(self.resultPath, True)
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'time_slow.html':
|
elif url == '/time_slow.html':
|
||||||
text = timeReportSlow(self.resultPath)
|
text = timeReportSlow(self.resultPath)
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'check_library_function_report.html':
|
elif url == '/check_library_function_report.html':
|
||||||
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryFunction')
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryFunction')
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'check_library_noreturn_report.html':
|
elif url == '/check_library_noreturn_report.html':
|
||||||
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryNoReturn')
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryNoReturn')
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'check_library_use_ignore_report.html':
|
elif url == '/check_library_use_ignore_report.html':
|
||||||
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryUseIgnore')
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryUseIgnore')
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url == 'check_library_check_type_report.html':
|
elif url == '/check_library_check_type_report.html':
|
||||||
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryCheckType')
|
text = check_library_report(self.resultPath + '/' + 'info_output', message_id='checkLibraryCheckType')
|
||||||
httpGetResponse(self.connection, text, 'text/html')
|
httpGetResponse(self.connection, text, 'text/html')
|
||||||
elif url.startswith('check_library-'):
|
elif url.startswith('/check_library-'):
|
||||||
function_name = url[len('check_library-'):]
|
function_name = url[len('/check_library-'):]
|
||||||
text = check_library_function_name(self.resultPath + '/' + 'info_output', function_name)
|
text = check_library_function_name(self.resultPath + '/' + 'info_output', function_name)
|
||||||
httpGetResponse(self.connection, text, 'text/plain')
|
httpGetResponse(self.connection, text, 'text/plain')
|
||||||
else:
|
else:
|
||||||
filename = resultPath + '/' + url
|
filename = resultPath + url
|
||||||
if not os.path.isfile(filename):
|
if not os.path.isfile(filename):
|
||||||
print_ts('HTTP/1.1 404 Not Found')
|
print_ts('HTTP/1.1 404 Not Found')
|
||||||
self.connection.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
|
self.connection.send(b'HTTP/1.1 404 Not Found\r\n\r\n')
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
from importlib import import_module
|
||||||
|
|
||||||
|
donate_cpu_server = import_module('donate-cpu-server')
|
||||||
|
|
||||||
|
def _test_parse_req(req_str, url_exp, queryParams_exp):
|
||||||
|
url, queryParams = donate_cpu_server.HttpClientThread.parse_req(req_str)
|
||||||
|
assert url == url_exp and queryParams == queryParams_exp
|
||||||
|
|
||||||
|
def test_parse_req():
|
||||||
|
_test_parse_req("", None, None)
|
||||||
|
_test_parse_req("GET / HTTP/1.1", '/', {})
|
||||||
|
_test_parse_req("GET /crash.html HTTP/1.1", '/crash.html', {})
|
||||||
|
_test_parse_req("GET /head-uninitvar HTTP/1.1", '/head-uninitvar', {})
|
||||||
|
_test_parse_req("GET /check_library-std%3A%3Aunordered_set%3A%3Ainsert%28%29 HTTP/1.1", '/check_library-std%3A%3Aunordered_set%3A%3Ainsert%28%29', {})
|
||||||
|
_test_parse_req("GET /head-uninitvar?pkgs=1 HTTP/1.1", '/head-uninitvar', {'pkgs': '1'})
|
||||||
|
_test_parse_req("GET /crash.html?pkgs=1 HTTP/1.1", '/crash.html', {'pkgs': '1'})
|
||||||
|
_test_parse_req("GET /head-uninitvar?pkgs=1&pkgs2=2 HTTP/1.1", '/head-uninitvar', {'pkgs': '1', 'pkgs2': '2'})
|
Loading…
Reference in New Issue