2012-09-07 18:44:46 +02:00
|
|
|
#!/usr/bin/env python
|
2013-10-23 15:27:27 +02:00
|
|
|
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2021-08-09 15:03:11 +02:00
|
|
|
import datetime
|
2013-10-23 15:27:27 +02:00
|
|
|
import io
|
2021-08-09 15:03:11 +02:00
|
|
|
import locale
|
|
|
|
import operator
|
2009-12-08 21:09:21 +01:00
|
|
|
import optparse
|
|
|
|
import os
|
2021-08-09 15:03:11 +02:00
|
|
|
import sys
|
|
|
|
import subprocess
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2015-04-20 09:17:45 +02:00
|
|
|
from collections import Counter
|
2009-12-08 21:09:21 +01:00
|
|
|
from pygments import highlight
|
2019-10-30 18:01:39 +01:00
|
|
|
from pygments.lexers import guess_lexer, guess_lexer_for_filename
|
2019-11-04 15:44:13 +01:00
|
|
|
from pygments.formatters import HtmlFormatter # pylint: disable=no-name-in-module
|
2019-10-30 18:01:39 +01:00
|
|
|
from pygments.util import ClassNotFound
|
2009-12-08 21:09:21 +01:00
|
|
|
from xml.sax import parse as xml_parse
|
|
|
|
from xml.sax import SAXParseException as XmlParseException
|
|
|
|
from xml.sax.handler import ContentHandler as XmlContentHandler
|
2014-10-15 21:49:22 +02:00
|
|
|
from xml.sax.saxutils import escape
|
2009-12-08 21:09:21 +01:00
|
|
|
"""
|
|
|
|
Turns a cppcheck xml file into a browsable html report along
|
|
|
|
with syntax highlighted source code.
|
|
|
|
"""
|
|
|
|
|
2009-12-11 12:39:39 +01:00
|
|
|
STYLE_FILE = """
|
2013-10-13 10:03:09 +02:00
|
|
|
body {
|
2020-01-20 15:25:12 +01:00
|
|
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif;
|
|
|
|
font-size: 13px;
|
|
|
|
line-height: 1.5;
|
2012-12-23 11:50:52 +01:00
|
|
|
margin: 0;
|
2014-06-05 22:39:44 +02:00
|
|
|
width: auto;
|
|
|
|
}
|
|
|
|
|
|
|
|
h1 {
|
2014-07-29 13:56:31 +02:00
|
|
|
margin: 10px;
|
2014-06-05 22:39:44 +02:00
|
|
|
}
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
.header {
|
|
|
|
border-bottom: thin solid #aaa;
|
|
|
|
}
|
|
|
|
|
|
|
|
.footer {
|
|
|
|
border-top: thin solid #aaa;
|
|
|
|
font-size: 90%;
|
|
|
|
margin-top: 5px;
|
|
|
|
}
|
|
|
|
|
|
|
|
.footer ul {
|
|
|
|
list-style-type: none;
|
|
|
|
padding-left: 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
.footer > p {
|
2014-07-29 13:56:31 +02:00
|
|
|
margin: 4px;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
.wrapper {
|
|
|
|
display: -webkit-box;
|
|
|
|
display: -ms-flexbox;
|
|
|
|
display: flex;
|
|
|
|
-webkit-box-pack: justify;
|
|
|
|
-ms-flex-pack: justify;
|
|
|
|
justify-content: space-between;
|
|
|
|
}
|
|
|
|
|
|
|
|
#menu,
|
|
|
|
#menu_index {
|
|
|
|
text-align: left;
|
|
|
|
width: 350px;
|
|
|
|
height: 90vh;
|
|
|
|
min-height: 200px;
|
|
|
|
overflow: auto;
|
|
|
|
position: -webkit-sticky;
|
|
|
|
position: sticky;
|
|
|
|
top: 0;
|
|
|
|
padding: 0 15px 15px 15px;
|
|
|
|
}
|
|
|
|
|
|
|
|
#menu > a {
|
|
|
|
display: block;
|
|
|
|
margin-left: 10px;
|
|
|
|
font-size: 12px;
|
|
|
|
z-index: 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
#content,
|
|
|
|
#content_index {
|
|
|
|
background-color: #fff;
|
|
|
|
-webkit-box-sizing: content-box;
|
|
|
|
-moz-box-sizing: content-box;
|
|
|
|
box-sizing: content-box;
|
|
|
|
padding: 0 15px 15px 15px;
|
|
|
|
width: calc(100% - 350px);
|
|
|
|
height: 100%;
|
|
|
|
overflow-x: auto;
|
|
|
|
}
|
|
|
|
|
|
|
|
#filename {
|
|
|
|
margin-left: 10px;
|
|
|
|
font-size: 12px;
|
|
|
|
z-index: 1;
|
|
|
|
}
|
|
|
|
|
2012-09-15 22:41:56 +02:00
|
|
|
.error {
|
2012-09-28 20:25:46 +02:00
|
|
|
background-color: #ffb7b7;
|
2012-09-15 22:41:56 +02:00
|
|
|
}
|
|
|
|
|
2013-10-17 14:49:48 +02:00
|
|
|
.error2 {
|
|
|
|
background-color: #faa;
|
2013-10-24 07:12:21 +02:00
|
|
|
display: inline-block;
|
2013-10-17 14:49:48 +02:00
|
|
|
margin-left: 4px;
|
2013-10-24 07:12:21 +02:00
|
|
|
}
|
|
|
|
|
2014-07-17 23:05:00 +02:00
|
|
|
.inconclusive {
|
2020-01-18 01:49:46 +01:00
|
|
|
background-color: #b6b6b4;
|
2014-07-17 23:05:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
.inconclusive2 {
|
2020-01-18 01:49:46 +01:00
|
|
|
background-color: #b6b6b4;
|
2014-07-17 23:05:00 +02:00
|
|
|
display: inline-block;
|
|
|
|
margin-left: 4px;
|
|
|
|
}
|
2014-10-15 21:49:22 +02:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
.verbose {
|
2014-10-15 21:49:22 +02:00
|
|
|
display: inline-block;
|
|
|
|
vertical-align: top;
|
|
|
|
cursor: help;
|
|
|
|
}
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
.verbose .content {
|
2014-10-15 21:49:22 +02:00
|
|
|
display: none;
|
|
|
|
position: absolute;
|
|
|
|
padding: 10px;
|
|
|
|
margin: 4px;
|
|
|
|
max-width: 40%;
|
|
|
|
white-space: pre-wrap;
|
2020-01-18 01:49:46 +01:00
|
|
|
border: 1px solid #000;
|
|
|
|
background-color: #ffffcc;
|
2014-10-15 21:49:22 +02:00
|
|
|
cursor: auto;
|
|
|
|
}
|
|
|
|
|
2013-10-24 07:12:21 +02:00
|
|
|
.highlight .hll {
|
|
|
|
padding: 1px;
|
2013-10-17 14:49:48 +02:00
|
|
|
}
|
2009-12-11 12:39:39 +01:00
|
|
|
|
2014-06-01 11:28:02 +02:00
|
|
|
.highlighttable {
|
2020-01-18 01:49:46 +01:00
|
|
|
background-color: #fff;
|
2014-06-01 11:28:02 +02:00
|
|
|
z-index: 10;
|
|
|
|
position: relative;
|
2020-01-20 15:25:12 +01:00
|
|
|
margin: -10px;
|
2014-07-23 12:23:52 +02:00
|
|
|
}
|
|
|
|
|
2013-10-13 00:42:17 +02:00
|
|
|
.linenos {
|
2013-10-17 14:49:48 +02:00
|
|
|
border-right: thin solid #aaa;
|
2020-01-18 01:49:46 +01:00
|
|
|
color: #d3d3d3;
|
2013-10-17 14:49:48 +02:00
|
|
|
padding-right: 6px;
|
2013-10-13 00:42:17 +02:00
|
|
|
}
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
.d-none {
|
|
|
|
display: none;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
HTML_HEAD = """
|
2020-01-18 01:49:46 +01:00
|
|
|
<!doctype html>
|
2013-10-13 10:03:09 +02:00
|
|
|
<html lang="en">
|
2009-12-08 21:09:21 +01:00
|
|
|
<head>
|
2013-10-13 10:03:09 +02:00
|
|
|
<meta charset="utf-8">
|
|
|
|
<title>Cppcheck - HTML report - %s</title>
|
2013-10-17 14:49:48 +02:00
|
|
|
<link rel="stylesheet" href="style.css">
|
2014-06-05 22:09:28 +02:00
|
|
|
<style>
|
|
|
|
%s
|
2014-10-15 21:49:22 +02:00
|
|
|
</style>
|
2020-01-18 01:49:46 +01:00
|
|
|
<script>
|
|
|
|
function getStyle(el, styleProp) {
|
|
|
|
var y;
|
2020-01-20 15:25:12 +01:00
|
|
|
|
|
|
|
if (el.currentStyle) {
|
2020-01-18 01:49:46 +01:00
|
|
|
y = el.currentStyle[styleProp];
|
2020-01-20 15:25:12 +01:00
|
|
|
} else if (window.getComputedStyle) {
|
2020-01-18 01:49:46 +01:00
|
|
|
y = document.defaultView.getComputedStyle(el, null).getPropertyValue(styleProp);
|
2020-01-20 15:25:12 +01:00
|
|
|
}
|
|
|
|
|
2014-10-15 21:49:22 +02:00
|
|
|
return y;
|
|
|
|
}
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2014-10-15 21:49:22 +02:00
|
|
|
function toggle() {
|
|
|
|
var el = this.expandable_content;
|
|
|
|
var mark = this.expandable_marker;
|
2020-01-18 01:49:46 +01:00
|
|
|
|
|
|
|
if (el.style.display === "block") {
|
2014-10-15 21:49:22 +02:00
|
|
|
el.style.display = "none";
|
2020-01-18 01:49:46 +01:00
|
|
|
mark.textContent = "[+]";
|
2014-10-15 21:49:22 +02:00
|
|
|
} else {
|
|
|
|
el.style.display = "block";
|
2020-01-18 01:49:46 +01:00
|
|
|
mark.textContent = "[-]";
|
2014-10-15 21:49:22 +02:00
|
|
|
}
|
|
|
|
}
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
function initExpandables() {
|
|
|
|
var elements = document.querySelectorAll(".expandable");
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
for (var i = 0, len = elements.length; i < len; i++) {
|
|
|
|
var el = elements[i];
|
2020-01-18 01:49:46 +01:00
|
|
|
var clickable = el.querySelector("span");
|
|
|
|
var marker = clickable.querySelector(".marker");
|
|
|
|
var content = el.querySelector(".content");
|
2014-10-15 21:49:22 +02:00
|
|
|
var width = clickable.clientWidth - parseInt(getStyle(content, "padding-left")) - parseInt(getStyle(content, "padding-right"));
|
|
|
|
content.style.width = width + "px";
|
|
|
|
clickable.expandable_content = content;
|
|
|
|
clickable.expandable_marker = marker;
|
2020-01-18 01:49:46 +01:00
|
|
|
clickable.addEventListener("click", toggle);
|
2014-10-15 21:49:22 +02:00
|
|
|
}
|
|
|
|
}
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
function toggleDisplay(id) {
|
|
|
|
var elements = document.querySelectorAll("." + id);
|
2020-01-18 01:49:46 +01:00
|
|
|
|
|
|
|
for (var i = 0, len = elements.length; i < len; i++) {
|
2020-01-20 15:25:12 +01:00
|
|
|
elements[i].classList.toggle("d-none");
|
2016-03-11 06:20:44 +01:00
|
|
|
}
|
|
|
|
}
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
function toggleAll() {
|
|
|
|
var elements = document.querySelectorAll("input");
|
2020-01-18 01:49:46 +01:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
// starting from 1 since 0 is the "toggle all" input
|
|
|
|
for (var i = 1, len = elements.length; i < len; i++) {
|
|
|
|
var el = elements[i];
|
2020-01-18 01:49:46 +01:00
|
|
|
|
|
|
|
if (el.checked) {
|
|
|
|
el.checked = false;
|
|
|
|
} else {
|
|
|
|
el.checked = true;
|
|
|
|
}
|
2020-01-20 15:25:12 +01:00
|
|
|
|
|
|
|
toggleDisplay(el.id);
|
2020-01-18 01:49:46 +01:00
|
|
|
}
|
2019-05-24 10:45:45 +02:00
|
|
|
}
|
2020-01-20 15:25:12 +01:00
|
|
|
window.addEventListener("load", initExpandables);
|
2014-10-15 21:49:22 +02:00
|
|
|
</script>
|
2009-12-08 21:09:21 +01:00
|
|
|
</head>
|
2020-01-18 01:49:46 +01:00
|
|
|
<body>
|
2020-01-20 15:25:12 +01:00
|
|
|
<div id="header" class="header">
|
|
|
|
<h1>Cppcheck report - %s: %s</h1>
|
2020-01-18 01:49:46 +01:00
|
|
|
</div>
|
2020-01-20 15:25:12 +01:00
|
|
|
<div class="wrapper">
|
|
|
|
<div id="menu">
|
|
|
|
<p id="filename"><a href="index.html">Defects:</a> %s</p>
|
2014-06-01 00:12:58 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
HTML_HEAD_END = """
|
2020-01-18 01:49:46 +01:00
|
|
|
</div>
|
|
|
|
<div id="content">
|
2009-12-08 21:09:21 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
HTML_FOOTER = """
|
2020-01-20 15:25:12 +01:00
|
|
|
</div> <!-- /.wrapper -->
|
2020-01-18 01:49:46 +01:00
|
|
|
</div>
|
2020-01-20 15:25:12 +01:00
|
|
|
<div id="footer" class="footer">
|
2020-01-18 01:49:46 +01:00
|
|
|
<p>
|
2020-01-20 15:25:12 +01:00
|
|
|
Cppcheck %s - a tool for static C/C++ code analysis<br>
|
|
|
|
<br>
|
2021-08-28 12:46:54 +02:00
|
|
|
Internet: <a href="https://cppcheck.sourceforge.io">https://cppcheck.sourceforge.io</a><br>
|
2020-01-20 15:25:12 +01:00
|
|
|
IRC: <a href="irc://irc.freenode.net/cppcheck">irc://irc.freenode.net/cppcheck</a><br>
|
2020-01-18 01:49:46 +01:00
|
|
|
</p>
|
|
|
|
</div>
|
2009-12-08 21:09:21 +01:00
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
"""
|
|
|
|
|
2020-01-18 01:49:46 +01:00
|
|
|
HTML_ERROR = "<span class=\"error2\"><--- %s</span>\n"
|
|
|
|
HTML_INCONCLUSIVE = "<span class=\"inconclusive2\"><--- %s</span>\n"
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2020-01-18 01:49:46 +01:00
|
|
|
HTML_EXPANDABLE_ERROR = "<div class=\"verbose expandable\"><span class=\"error2\"><--- %s <span class=\"marker\">[+]</span></span><div class=\"content\">%s</div></div>\n"""
|
|
|
|
HTML_EXPANDABLE_INCONCLUSIVE = "<div class=\"verbose expandable\"><span class=\"inconclusive2\"><--- %s <span class=\"marker\">[+]</span></span><div class=\"content\">%s</div></div>\n"""
|
2014-10-15 21:49:22 +02:00
|
|
|
|
|
|
|
# escape() and unescape() takes care of &, < and >.
|
|
|
|
html_escape_table = {
|
|
|
|
'"': """,
|
|
|
|
"'": "'"
|
|
|
|
}
|
2015-03-20 01:29:22 +01:00
|
|
|
html_unescape_table = {v: k for k, v in html_escape_table.items()}
|
|
|
|
|
2014-10-15 21:49:22 +02:00
|
|
|
|
|
|
|
def html_escape(text):
|
|
|
|
return escape(text, html_escape_table)
|
2014-07-29 13:56:31 +02:00
|
|
|
|
2015-03-20 01:29:22 +01:00
|
|
|
|
2021-08-09 15:03:11 +02:00
|
|
|
def git_blame(line, path, file, blame_options):
|
|
|
|
git_blame_dict = {}
|
|
|
|
head, tail = os.path.split(file)
|
|
|
|
if head != "":
|
|
|
|
path = head
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.chdir(path)
|
|
|
|
except:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = subprocess.check_output('git blame -L %d %s %s --porcelain -- %s' % (
|
|
|
|
line, " -w" if "-w" in blame_options else "", " -M" if "-M" in blame_options else "", file))
|
|
|
|
result = result.decode(locale.getpreferredencoding())
|
|
|
|
except:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
if result.startswith('fatal'):
|
|
|
|
return {}
|
|
|
|
|
|
|
|
disallowed_characters = '<>'
|
|
|
|
for line in result.split('\n')[1:]:
|
|
|
|
space_pos = line.find(' ')
|
|
|
|
if space_pos > 30:
|
|
|
|
break
|
|
|
|
key = line[:space_pos]
|
|
|
|
val = line[space_pos + 1:]
|
|
|
|
|
|
|
|
for character in disallowed_characters:
|
|
|
|
val = val.replace(character, "")
|
|
|
|
git_blame_dict[key] = val
|
|
|
|
|
|
|
|
datetime_object = datetime.date.fromtimestamp(float(git_blame_dict['author-time']))
|
|
|
|
year = datetime_object.strftime("%Y")
|
|
|
|
month = datetime_object.strftime("%m")
|
|
|
|
day = datetime_object.strftime("%d")
|
|
|
|
|
|
|
|
git_blame_dict['author-time'] = '%s/%s/%s' % (day, month, year)
|
|
|
|
|
|
|
|
return git_blame_dict
|
|
|
|
|
|
|
|
|
|
|
|
def tr_str(td_th, line, id, cwe, severity, message, author, author_mail, date, add_author, tr_class=None, htmlfile=None, message_class=None):
|
|
|
|
ret = ''
|
|
|
|
if htmlfile:
|
|
|
|
ret += '<%s><a href="%s#line-%d">%d</a></%s>' % (td_th, htmlfile, line, line, td_th)
|
|
|
|
for item in (id, cwe, severity):
|
|
|
|
ret += '<%s>%s</%s>' % (td_th, item, td_th)
|
|
|
|
else:
|
|
|
|
for item in (line, id, cwe, severity):
|
|
|
|
ret += '<%s>%s</%s>' % (td_th, item, td_th)
|
|
|
|
if message_class:
|
|
|
|
message_attribute = ' class="%s"' % message_class
|
|
|
|
else:
|
|
|
|
message_attribute = ''
|
|
|
|
ret += '<%s%s>%s</%s>' % (td_th, message_attribute, html_escape(message), td_th)
|
|
|
|
|
|
|
|
if add_author:
|
|
|
|
for item in (author, author_mail, date):
|
|
|
|
ret += '<%s>%s</%s>' % (td_th, item, td_th)
|
|
|
|
if tr_class:
|
|
|
|
tr_attributes = ' class="%s"' % tr_class
|
|
|
|
else:
|
|
|
|
tr_attributes = ''
|
|
|
|
return '<tr%s>%s</tr>' % (tr_attributes, ret)
|
|
|
|
|
|
|
|
|
2010-01-12 21:35:54 +01:00
|
|
|
class AnnotateCodeFormatter(HtmlFormatter):
|
|
|
|
errors = []
|
|
|
|
|
|
|
|
def wrap(self, source, outfile):
|
|
|
|
line_no = 1
|
|
|
|
for i, t in HtmlFormatter.wrap(self, source, outfile):
|
2013-10-24 14:17:09 +02:00
|
|
|
# If this is a source code line we want to add a span tag at the
|
|
|
|
# end.
|
2010-01-12 21:35:54 +01:00
|
|
|
if i == 1:
|
|
|
|
for error in self.errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
if error['line'] == line_no:
|
2014-07-17 23:05:00 +02:00
|
|
|
try:
|
|
|
|
if error['inconclusive'] == 'true':
|
2014-07-21 16:10:04 +02:00
|
|
|
# only print verbose msg if it really differs
|
|
|
|
# from actual message
|
|
|
|
if error.get('verbose') and (error['verbose'] != error['msg']):
|
2014-10-15 21:49:22 +02:00
|
|
|
index = t.rfind('\n')
|
2014-07-21 16:10:04 +02:00
|
|
|
t = t[:index] + HTML_EXPANDABLE_INCONCLUSIVE % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index + 1:]
|
2014-10-15 21:49:22 +02:00
|
|
|
else:
|
|
|
|
t = t.replace('\n', HTML_INCONCLUSIVE % error['msg'])
|
2014-07-17 23:05:00 +02:00
|
|
|
except KeyError:
|
2014-11-08 23:48:32 +01:00
|
|
|
if error.get('verbose') and (error['verbose'] != error['msg']):
|
2014-10-15 21:49:22 +02:00
|
|
|
index = t.rfind('\n')
|
2014-07-21 16:10:04 +02:00
|
|
|
t = t[:index] + HTML_EXPANDABLE_ERROR % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index + 1:]
|
2014-10-15 21:49:22 +02:00
|
|
|
else:
|
|
|
|
t = t.replace('\n', HTML_ERROR % error['msg'])
|
2014-07-17 23:05:00 +02:00
|
|
|
|
2010-01-12 21:35:54 +01:00
|
|
|
line_no = line_no + 1
|
|
|
|
yield i, t
|
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
class CppCheckHandler(XmlContentHandler):
|
2013-10-17 14:49:48 +02:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
"""Parses the cppcheck xml file and produces a list of all its errors."""
|
2013-10-05 17:03:31 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
XmlContentHandler.__init__(self)
|
|
|
|
self.errors = []
|
2013-10-24 14:17:09 +02:00
|
|
|
self.version = '1'
|
2014-07-29 13:56:31 +02:00
|
|
|
self.versionCppcheck = ''
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
def startElement(self, name, attributes):
|
2013-10-24 14:17:09 +02:00
|
|
|
if name == 'results':
|
|
|
|
self.version = attributes.get('version', self.version)
|
2013-10-05 17:03:31 +02:00
|
|
|
|
|
|
|
if self.version == '1':
|
|
|
|
self.handleVersion1(name, attributes)
|
|
|
|
else:
|
|
|
|
self.handleVersion2(name, attributes)
|
|
|
|
|
|
|
|
def handleVersion1(self, name, attributes):
|
2013-10-24 14:17:09 +02:00
|
|
|
if name != 'error':
|
2009-12-08 21:09:21 +01:00
|
|
|
return
|
|
|
|
|
2013-10-05 17:03:31 +02:00
|
|
|
self.errors.append({
|
2013-10-24 14:17:09 +02:00
|
|
|
'file': attributes.get('file', ''),
|
|
|
|
'line': int(attributes.get('line', 0)),
|
2018-11-23 11:58:19 +01:00
|
|
|
'locations': [{
|
|
|
|
'file': attributes.get('file', ''),
|
|
|
|
'line': int(attributes.get('line', 0)),
|
|
|
|
}],
|
2013-10-24 14:17:09 +02:00
|
|
|
'id': attributes['id'],
|
|
|
|
'severity': attributes['severity'],
|
|
|
|
'msg': attributes['msg']
|
2013-10-05 17:03:31 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
def handleVersion2(self, name, attributes):
|
2014-05-25 14:36:52 +02:00
|
|
|
if name == 'cppcheck':
|
|
|
|
self.versionCppcheck = attributes['version']
|
2013-10-24 14:17:09 +02:00
|
|
|
if name == 'error':
|
2018-11-23 11:58:19 +01:00
|
|
|
error = {
|
|
|
|
'locations': [],
|
|
|
|
'file': '',
|
|
|
|
'line': 0,
|
|
|
|
'id': attributes['id'],
|
|
|
|
'severity': attributes['severity'],
|
|
|
|
'msg': attributes['msg'],
|
|
|
|
'verbose': attributes.get('verbose')
|
|
|
|
}
|
|
|
|
|
|
|
|
if 'inconclusive' in attributes:
|
|
|
|
error['inconclusive'] = attributes['inconclusive']
|
|
|
|
if 'cwe' in attributes:
|
|
|
|
error['cwe'] = attributes['cwe']
|
|
|
|
|
|
|
|
self.errors.append(error)
|
2013-10-24 14:17:09 +02:00
|
|
|
elif name == 'location':
|
2013-10-05 17:03:31 +02:00
|
|
|
assert self.errors
|
2018-11-23 11:58:19 +01:00
|
|
|
error = self.errors[-1]
|
|
|
|
locations = error['locations']
|
|
|
|
file = attributes['file']
|
|
|
|
line = int(attributes['line'])
|
|
|
|
if not locations:
|
|
|
|
error['file'] = file
|
|
|
|
error['line'] = line
|
|
|
|
locations.append({
|
|
|
|
'file': file,
|
|
|
|
'line': line,
|
|
|
|
'info': attributes.get('info')
|
|
|
|
})
|
2013-10-05 17:03:31 +02:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
# Configure all the options this little utility is using.
|
|
|
|
parser = optparse.OptionParser()
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.add_option('--title', dest='title',
|
|
|
|
help='The title of the project.',
|
|
|
|
default='[project name]')
|
2020-09-25 20:12:41 +02:00
|
|
|
parser.add_option('--file', dest='file', action="append",
|
2013-10-24 14:17:09 +02:00
|
|
|
help='The cppcheck xml output file to read defects '
|
2020-09-25 20:12:41 +02:00
|
|
|
'from. You can combine results from several '
|
|
|
|
'xml reports i.e. "--file file1.xml --file file2.xml ..". '
|
|
|
|
'Default is reading from stdin.')
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.add_option('--report-dir', dest='report_dir',
|
|
|
|
help='The directory where the HTML report content is '
|
|
|
|
'written.')
|
|
|
|
parser.add_option('--source-dir', dest='source_dir',
|
|
|
|
help='Base directory where source code files can be '
|
|
|
|
'found.')
|
2021-08-09 15:03:11 +02:00
|
|
|
parser.add_option('--add-author-information', dest='add_author_information',
|
|
|
|
help='Initially set to false'
|
|
|
|
'Adds author, author-mail and time to htmlreport')
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.add_option('--source-encoding', dest='source_encoding',
|
|
|
|
help='Encoding of source code.', default='utf-8')
|
2021-08-09 15:03:11 +02:00
|
|
|
parser.add_option('--blame-options', dest='blame_options',
|
|
|
|
help='[-w, -M] blame options which you can use to get author and author mail '
|
|
|
|
'-w --> not including white spaces and returns original author of the line '
|
|
|
|
'-M --> not including moving of lines and returns original author of the line')
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Parse options and make sure that we have an output directory set.
|
|
|
|
options, args = parser.parse_args()
|
2014-07-22 01:12:27 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
sys.argv[1]
|
2014-07-29 13:56:31 +02:00
|
|
|
except IndexError: # no arguments give, print --help
|
2014-07-22 01:12:27 +02:00
|
|
|
parser.print_help()
|
|
|
|
quit()
|
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
if not options.report_dir:
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.error('No report directory set.')
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Get the directory where source code files are located.
|
2021-08-09 15:03:11 +02:00
|
|
|
cwd = os.getcwd()
|
2009-12-08 21:09:21 +01:00
|
|
|
source_dir = os.getcwd()
|
|
|
|
if options.source_dir:
|
|
|
|
source_dir = options.source_dir
|
|
|
|
|
2021-08-09 15:03:11 +02:00
|
|
|
add_author_information = False
|
|
|
|
if options.add_author_information:
|
|
|
|
add_author_information = True
|
|
|
|
|
|
|
|
blame_options = ''
|
|
|
|
if options.blame_options:
|
|
|
|
blame_options = options.blame_options
|
|
|
|
add_author_information = True
|
2020-09-25 20:12:41 +02:00
|
|
|
# Parse the xml from all files defined in file argument
|
|
|
|
# or from stdin. If no input is provided, stdin is used
|
|
|
|
# Produce a simple list of errors.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Parsing xml report.')
|
2009-12-08 21:09:21 +01:00
|
|
|
try:
|
|
|
|
contentHandler = CppCheckHandler()
|
2020-09-25 20:12:41 +02:00
|
|
|
for fname in options.file or [sys.stdin]:
|
|
|
|
xml_parse(fname, contentHandler)
|
|
|
|
except (XmlParseException, ValueError) as msg:
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Failed to parse cppcheck xml file: %s' % msg)
|
2009-12-08 21:09:21 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# We have a list of errors. But now we want to group them on
|
|
|
|
# each source code file. Lets create a files dictionary that
|
|
|
|
# will contain a list of all the errors in that file. For each
|
2013-10-13 10:03:09 +02:00
|
|
|
# file we will also generate a HTML filename to use.
|
2009-12-08 21:09:21 +01:00
|
|
|
files = {}
|
|
|
|
file_no = 0
|
|
|
|
for error in contentHandler.errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
filename = error['file']
|
2009-12-08 21:09:21 +01:00
|
|
|
if filename not in files.keys():
|
2013-10-24 14:17:09 +02:00
|
|
|
files[filename] = {
|
|
|
|
'errors': [], 'htmlfile': str(file_no) + '.html'}
|
2009-12-08 21:09:21 +01:00
|
|
|
file_no = file_no + 1
|
2013-10-24 14:17:09 +02:00
|
|
|
files[filename]['errors'].append(error)
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Make sure that the report directory is created if it doesn't exist.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating %s directory' % options.report_dir)
|
2009-12-08 21:09:21 +01:00
|
|
|
if not os.path.exists(options.report_dir):
|
2019-05-11 09:50:28 +02:00
|
|
|
os.makedirs(options.report_dir)
|
2009-12-08 21:09:21 +01:00
|
|
|
|
2013-10-13 10:03:09 +02:00
|
|
|
# Generate a HTML file with syntax highlighted source code for each
|
2009-12-08 21:09:21 +01:00
|
|
|
# file that contains one or more errors.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Processing errors')
|
2015-04-20 09:17:45 +02:00
|
|
|
|
2014-07-21 01:28:29 +02:00
|
|
|
decode_errors = []
|
2014-05-25 17:43:09 +02:00
|
|
|
for filename, data in sorted(files.items()):
|
2013-10-24 14:17:09 +02:00
|
|
|
htmlfile = data['htmlfile']
|
2018-11-23 11:58:19 +01:00
|
|
|
errors = []
|
|
|
|
|
|
|
|
for error in data['errors']:
|
|
|
|
for location in error['locations']:
|
|
|
|
if filename == location['file']:
|
|
|
|
newError = dict(error)
|
|
|
|
|
|
|
|
del newError['locations']
|
|
|
|
newError['line'] = location['line']
|
|
|
|
if location.get('info'):
|
|
|
|
newError['msg'] = location['info']
|
|
|
|
newError['severity'] = 'information'
|
|
|
|
del newError['verbose']
|
|
|
|
|
|
|
|
errors.append(newError)
|
2013-10-23 15:27:27 +02:00
|
|
|
|
|
|
|
lines = []
|
|
|
|
for error in errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
lines.append(error['line'])
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
if filename == '':
|
2013-10-23 15:27:27 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
source_filename = os.path.join(source_dir, filename)
|
2013-10-24 14:17:09 +02:00
|
|
|
try:
|
2014-06-18 22:12:31 +02:00
|
|
|
with io.open(source_filename, 'r', encoding=options.source_encoding) as input_file:
|
2013-10-24 14:17:09 +02:00
|
|
|
content = input_file.read()
|
|
|
|
except IOError:
|
2021-01-31 14:27:11 +01:00
|
|
|
if error['id'] == 'unmatchedSuppression':
|
2015-03-20 01:29:22 +01:00
|
|
|
continue # file not found, bail out
|
2015-03-05 17:09:19 +01:00
|
|
|
else:
|
|
|
|
sys.stderr.write("ERROR: Source file '%s' not found.\n" %
|
2015-03-20 01:29:22 +01:00
|
|
|
source_filename)
|
2013-10-23 15:27:27 +02:00
|
|
|
continue
|
2014-07-21 01:28:29 +02:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
sys.stderr.write("WARNING: Unicode decode error in '%s'.\n" %
|
|
|
|
source_filename)
|
2014-07-29 13:56:31 +02:00
|
|
|
decode_errors.append(source_filename[2:]) # "[2:]" gets rid of "./" at beginning
|
2014-07-21 01:28:29 +02:00
|
|
|
continue
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
htmlFormatter = AnnotateCodeFormatter(linenos=True,
|
|
|
|
style='colorful',
|
|
|
|
hl_lines=lines,
|
|
|
|
lineanchors='line',
|
|
|
|
encoding=options.source_encoding)
|
2013-10-23 15:27:27 +02:00
|
|
|
htmlFormatter.errors = errors
|
2014-10-15 21:49:22 +02:00
|
|
|
|
2017-05-19 15:47:31 +02:00
|
|
|
with io.open(os.path.join(options.report_dir, htmlfile), 'w', encoding='utf-8') as output_file:
|
2013-10-23 15:27:27 +02:00
|
|
|
output_file.write(HTML_HEAD %
|
|
|
|
(options.title,
|
2014-06-05 22:09:28 +02:00
|
|
|
htmlFormatter.get_style_defs('.highlight'),
|
2014-06-01 14:17:15 +02:00
|
|
|
options.title,
|
2015-03-10 13:49:10 +01:00
|
|
|
filename,
|
|
|
|
filename.split('/')[-1]))
|
2014-06-01 14:17:15 +02:00
|
|
|
|
2014-07-21 21:33:55 +02:00
|
|
|
for error in sorted(errors, key=lambda k: k['line']):
|
2020-01-18 01:49:46 +01:00
|
|
|
output_file.write("<a href=\"%s#line-%d\"> %s %s</a>" % (data['htmlfile'], error['line'], error['id'], error['line']))
|
2014-06-01 01:26:23 +02:00
|
|
|
|
2014-06-01 00:12:58 +02:00
|
|
|
output_file.write(HTML_HEAD_END)
|
2014-07-22 14:20:01 +02:00
|
|
|
try:
|
2020-01-21 09:13:55 +01:00
|
|
|
lexer = guess_lexer_for_filename(source_filename, '', stripnl=False)
|
2019-10-30 18:01:39 +01:00
|
|
|
except ClassNotFound:
|
|
|
|
try:
|
2020-01-21 09:13:55 +01:00
|
|
|
lexer = guess_lexer(content, stripnl=False)
|
2019-10-30 18:01:39 +01:00
|
|
|
except ClassNotFound:
|
|
|
|
sys.stderr.write("ERROR: Couldn't determine lexer for the file' " + source_filename + " '. Won't be able to syntax highlight this file.")
|
2020-01-18 01:49:46 +01:00
|
|
|
output_file.write("\n <tr><td colspan=\"5\"> Could not generate content because pygments failed to determine the code type.</td></tr>")
|
|
|
|
output_file.write("\n <tr><td colspan=\"5\"> Sorry about this.</td></tr>")
|
2019-10-30 18:01:39 +01:00
|
|
|
continue
|
2014-07-22 14:20:01 +02:00
|
|
|
|
2011-04-16 11:22:44 +02:00
|
|
|
if options.source_encoding:
|
|
|
|
lexer.encoding = options.source_encoding
|
2010-01-14 09:47:38 +01:00
|
|
|
|
2013-10-23 15:27:27 +02:00
|
|
|
output_file.write(
|
|
|
|
highlight(content, lexer, htmlFormatter).decode(
|
|
|
|
options.source_encoding))
|
|
|
|
|
2014-06-23 19:07:58 +02:00
|
|
|
output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
print(' ' + filename)
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Generate a master index.html file that will contain a list of
|
|
|
|
# all the errors created.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating index.html')
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
with io.open(os.path.join(options.report_dir, 'index.html'),
|
|
|
|
'w') as output_file:
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2014-07-30 01:08:16 +02:00
|
|
|
stats_count = 0
|
2014-07-23 12:23:52 +02:00
|
|
|
stats = []
|
|
|
|
for filename, data in sorted(files.items()):
|
|
|
|
for error in data['errors']:
|
2014-07-29 13:56:31 +02:00
|
|
|
stats.append(error['id']) # get the stats
|
2014-07-30 01:08:16 +02:00
|
|
|
stats_count += 1
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2016-03-11 05:55:35 +01:00
|
|
|
counter = Counter(stats)
|
|
|
|
|
2014-07-23 12:23:52 +02:00
|
|
|
stat_html = []
|
2014-07-24 16:15:29 +02:00
|
|
|
# the following lines sort the stat primary by value (occurrences),
|
|
|
|
# but if two IDs occur equally often, then we sort them alphabetically by warning ID
|
|
|
|
try:
|
2016-03-11 05:55:35 +01:00
|
|
|
cnt_max = counter.most_common()[0][1]
|
2014-07-24 16:15:29 +02:00
|
|
|
except IndexError:
|
|
|
|
cnt_max = 0
|
|
|
|
|
|
|
|
try:
|
2016-03-11 05:55:35 +01:00
|
|
|
cnt_min = counter.most_common()[-1][1]
|
2014-07-24 16:15:29 +02:00
|
|
|
except IndexError:
|
|
|
|
cnt_min = 0
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
stat_fmt = "\n <tr><td><input type=\"checkbox\" onclick=\"toggleDisplay(this.id)\" id=\"{}\" name=\"{}\" checked></td><td>{}</td><td>{}</td></tr>"
|
2014-07-21 16:10:04 +02:00
|
|
|
for occurrences in reversed(range(cnt_min, cnt_max + 1)):
|
2016-03-11 05:55:35 +01:00
|
|
|
for _id in [k for k, v in sorted(counter.items()) if v == occurrences]:
|
2016-03-11 06:20:44 +01:00
|
|
|
stat_html.append(stat_fmt.format(_id, _id, dict(counter.most_common())[_id], _id))
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
output_file.write(HTML_HEAD.replace('id="menu"', 'id="menu_index"', 1).replace("Defects:", "Defect summary;", 1) % (options.title, '', options.title, '', ''))
|
|
|
|
output_file.write('\n <label><input type="checkbox" onclick="toggleAll()" checked> Toggle all</label>')
|
2019-11-01 15:06:44 +01:00
|
|
|
output_file.write('\n <table>')
|
|
|
|
output_file.write('\n <tr><th>Show</th><th>#</th><th>Defect ID</th></tr>')
|
2016-03-11 06:20:44 +01:00
|
|
|
output_file.write(''.join(stat_html))
|
2019-11-01 15:06:44 +01:00
|
|
|
output_file.write('\n <tr><td></td><td>' + str(stats_count) + '</td><td>total</td></tr>')
|
|
|
|
output_file.write('\n </table>')
|
|
|
|
output_file.write('\n <p><a href="stats.html">Statistics</a></p>')
|
2014-07-23 12:23:52 +02:00
|
|
|
output_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
|
2016-02-28 09:56:56 +01:00
|
|
|
|
2019-11-01 15:06:44 +01:00
|
|
|
output_file.write('\n <table>')
|
2021-08-09 15:03:11 +02:00
|
|
|
output_file.write(
|
|
|
|
'\n %s' %
|
|
|
|
tr_str('th', 'Line', 'Id', 'CWE', 'Severity', 'Message', 'Author', 'Author mail', 'Date (DD/MM/YYYY)', add_author=add_author_information))
|
|
|
|
|
2014-05-25 17:43:09 +02:00
|
|
|
for filename, data in sorted(files.items()):
|
2014-07-29 13:56:31 +02:00
|
|
|
if filename in decode_errors: # don't print a link but a note
|
2021-01-31 14:27:11 +01:00
|
|
|
output_file.write("\n <tr><td colspan=\"5\">%s</td></tr>" % filename)
|
2020-01-18 01:49:46 +01:00
|
|
|
output_file.write("\n <tr><td colspan=\"5\"> Could not generated due to UnicodeDecodeError</td></tr>")
|
2014-07-21 01:28:29 +02:00
|
|
|
else:
|
2015-03-20 01:29:22 +01:00
|
|
|
if filename.endswith('*'): # assume unmatched suppression
|
2015-03-05 17:09:19 +01:00
|
|
|
output_file.write(
|
2020-01-18 01:49:46 +01:00
|
|
|
"\n <tr><td colspan=\"5\">%s</td></tr>" %
|
2021-01-31 14:27:11 +01:00
|
|
|
filename)
|
2015-03-05 17:09:19 +01:00
|
|
|
else:
|
|
|
|
output_file.write(
|
2020-01-18 01:49:46 +01:00
|
|
|
"\n <tr><td colspan=\"5\"><a href=\"%s\">%s</a></td></tr>" %
|
2015-03-05 17:09:19 +01:00
|
|
|
(data['htmlfile'], filename))
|
|
|
|
|
2014-07-21 21:33:55 +02:00
|
|
|
for error in sorted(data['errors'], key=lambda k: k['line']):
|
2021-08-09 15:03:11 +02:00
|
|
|
if add_author_information:
|
|
|
|
git_blame_dict = git_blame(error['line'], source_dir, error['file'], blame_options)
|
|
|
|
else:
|
|
|
|
git_blame_dict = {}
|
|
|
|
message_class = None
|
2014-07-21 01:28:29 +02:00
|
|
|
try:
|
|
|
|
if error['inconclusive'] == 'true':
|
2021-08-09 15:03:11 +02:00
|
|
|
message_class = 'inconclusive'
|
2014-07-21 01:28:29 +02:00
|
|
|
error['severity'] += ", inconcl."
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2016-02-28 09:56:56 +01:00
|
|
|
try:
|
|
|
|
if error['cwe']:
|
2020-01-18 01:49:46 +01:00
|
|
|
cwe_url = "<a href=\"https://cwe.mitre.org/data/definitions/" + error['cwe'] + ".html\">" + error['cwe'] + "</a>"
|
2016-02-28 09:56:56 +01:00
|
|
|
except KeyError:
|
|
|
|
cwe_url = ""
|
|
|
|
|
2014-07-21 01:28:29 +02:00
|
|
|
if error['severity'] == 'error':
|
2021-08-09 15:03:11 +02:00
|
|
|
message_class = 'error'
|
2014-07-21 01:28:29 +02:00
|
|
|
|
2021-08-09 15:03:11 +02:00
|
|
|
is_file = filename != '' and not filename.endswith('*')
|
|
|
|
line = error["line"] if is_file else ""
|
|
|
|
htmlfile = data.get('htmlfile') if is_file else None
|
|
|
|
|
|
|
|
output_file.write(
|
|
|
|
'\n %s' %
|
|
|
|
tr_str('td', line, error["id"], cwe_url, error["severity"], error["msg"],
|
|
|
|
git_blame_dict.get('author', 'Unknown'), git_blame_dict.get('author-mail', '---'),
|
|
|
|
git_blame_dict.get('author-time', '---'),
|
|
|
|
tr_class=error["id"],
|
|
|
|
message_class=message_class,
|
|
|
|
add_author=add_author_information,
|
2021-08-09 20:32:24 +02:00
|
|
|
htmlfile=htmlfile))
|
2014-05-31 23:47:44 +02:00
|
|
|
output_file.write('\n </table>')
|
2014-05-25 14:36:52 +02:00
|
|
|
output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
|
2013-10-24 14:17:09 +02:00
|
|
|
|
2021-01-31 14:27:11 +01:00
|
|
|
if decode_errors:
|
2014-07-23 12:23:52 +02:00
|
|
|
sys.stderr.write("\nGenerating html failed for the following files: " + ' '.join(decode_errors))
|
|
|
|
sys.stderr.write("\nConsider changing source-encoding (for example: \"htmlreport ... --source-encoding=\"iso8859-1\"\"\n")
|
2014-07-21 01:28:29 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating style.css file')
|
2021-08-09 15:03:11 +02:00
|
|
|
os.chdir(cwd) # going back to the cwd to find style.css
|
|
|
|
with io.open(os.path.join(options.report_dir, 'style.css'), 'w') as css_file:
|
2013-10-24 14:17:09 +02:00
|
|
|
css_file.write(STYLE_FILE)
|
2014-07-22 01:12:27 +02:00
|
|
|
|
2015-04-20 09:17:45 +02:00
|
|
|
print("Creating stats.html (statistics)\n")
|
2014-07-21 16:10:04 +02:00
|
|
|
stats_countlist = {}
|
2015-04-20 09:17:45 +02:00
|
|
|
|
|
|
|
for filename, data in sorted(files.items()):
|
2021-01-31 14:27:11 +01:00
|
|
|
if filename == '':
|
2015-04-20 09:17:45 +02:00
|
|
|
continue
|
2014-07-21 16:10:04 +02:00
|
|
|
stats_tmplist = []
|
2015-04-20 09:17:45 +02:00
|
|
|
for error in sorted(data['errors'], key=lambda k: k['line']):
|
|
|
|
stats_tmplist.append(error['severity'])
|
|
|
|
|
|
|
|
stats_countlist[filename] = dict(Counter(stats_tmplist))
|
|
|
|
|
|
|
|
# get top ten for each severity
|
|
|
|
SEVERITIES = "error", "warning", "portability", "performance", "style", "unusedFunction", "information", "missingInclude", "internal"
|
|
|
|
|
|
|
|
with io.open(os.path.join(options.report_dir, 'stats.html'), 'w') as stats_file:
|
|
|
|
|
2020-01-20 15:25:12 +01:00
|
|
|
stats_file.write(HTML_HEAD.replace('id="menu"', 'id="menu_index"', 1).replace("Defects:", "Back to summary", 1) % (options.title, '', options.title, 'Statistics', ''))
|
2015-04-20 09:17:45 +02:00
|
|
|
stats_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
|
|
|
|
|
|
|
|
for sev in SEVERITIES:
|
|
|
|
_sum = 0
|
2014-07-21 16:10:04 +02:00
|
|
|
stats_templist = {}
|
2015-04-20 09:17:45 +02:00
|
|
|
|
2014-07-21 16:10:04 +02:00
|
|
|
# if the we have an style warning but we are checking for
|
|
|
|
# portability, we have to skip it to prevent KeyError
|
|
|
|
try:
|
2015-04-20 09:17:45 +02:00
|
|
|
for filename in stats_countlist:
|
2014-07-21 16:10:04 +02:00
|
|
|
try: # also bail out if we have a file with no sev-results
|
2015-04-20 09:17:45 +02:00
|
|
|
_sum += stats_countlist[filename][sev]
|
2021-01-31 14:27:11 +01:00
|
|
|
stats_templist[filename] = int(stats_countlist[filename][sev]) # file : amount,
|
2015-04-20 09:17:45 +02:00
|
|
|
except KeyError:
|
|
|
|
continue
|
2014-07-21 16:10:04 +02:00
|
|
|
# don't print "0 style" etc, if no style warnings were found
|
2021-01-31 14:27:11 +01:00
|
|
|
if _sum == 0:
|
2019-10-31 09:26:43 +01:00
|
|
|
continue
|
2015-04-20 09:17:45 +02:00
|
|
|
except KeyError:
|
|
|
|
continue
|
2020-01-18 01:49:46 +01:00
|
|
|
stats_file.write("<p>Top 10 files for " + sev + " severity, total findings: " + str(_sum) + "<br>\n")
|
2015-04-20 09:17:45 +02:00
|
|
|
|
|
|
|
# sort, so that the file with the most severities per type is first
|
2014-07-21 16:10:04 +02:00
|
|
|
stats_list_sorted = sorted(stats_templist.items(), key=operator.itemgetter(1, 0), reverse=True)
|
2015-04-20 09:17:45 +02:00
|
|
|
it = 0
|
|
|
|
LENGTH = 0
|
|
|
|
|
2014-07-21 16:10:04 +02:00
|
|
|
for i in stats_list_sorted: # printing loop
|
|
|
|
# for aesthetics: if it's the first iteration of the loop, get
|
|
|
|
# the max length of the number string
|
2021-01-31 14:27:11 +01:00
|
|
|
if it == 0:
|
2014-07-21 16:10:04 +02:00
|
|
|
LENGTH = len(str(i[1])) # <- length of longest number, now get the difference and try to make other numbers align to it
|
2015-04-20 09:17:45 +02:00
|
|
|
|
2020-01-18 01:49:46 +01:00
|
|
|
stats_file.write(" " * 3 + str(i[1]) + " " * (1 + LENGTH - len(str(i[1]))) + "<a href=\"" + files[i[0]]['htmlfile'] + "\"> " + i[0] + "</a><br>\n")
|
2015-04-20 09:17:45 +02:00
|
|
|
it += 1
|
2021-01-31 14:27:11 +01:00
|
|
|
if it == 10: # print only the top 10
|
2015-04-20 09:17:45 +02:00
|
|
|
break
|
|
|
|
stats_file.write("</p>\n")
|
|
|
|
|
2019-11-01 15:06:44 +01:00
|
|
|
stats_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
|
|
|
|
|
2014-07-22 01:12:27 +02:00
|
|
|
print("\nOpen '" + options.report_dir + "/index.html' to see the results.")
|