2012-09-07 18:44:46 +02:00
|
|
|
#!/usr/bin/env python
|
2013-10-23 15:27:27 +02:00
|
|
|
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
import io
|
2009-12-08 21:09:21 +01:00
|
|
|
import sys
|
|
|
|
import optparse
|
|
|
|
import os
|
2015-04-20 09:17:45 +02:00
|
|
|
import operator
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2015-04-20 09:17:45 +02:00
|
|
|
from collections import Counter
|
2009-12-08 21:09:21 +01:00
|
|
|
from pygments import highlight
|
2012-09-07 18:44:46 +02:00
|
|
|
from pygments.lexers import guess_lexer_for_filename
|
2009-12-08 21:09:21 +01:00
|
|
|
from pygments.formatters import HtmlFormatter
|
|
|
|
from xml.sax import parse as xml_parse
|
|
|
|
from xml.sax import SAXParseException as XmlParseException
|
|
|
|
from xml.sax.handler import ContentHandler as XmlContentHandler
|
2014-10-15 21:49:22 +02:00
|
|
|
from xml.sax.saxutils import escape
|
2009-12-08 21:09:21 +01:00
|
|
|
"""
|
|
|
|
Turns a cppcheck xml file into a browsable html report along
|
|
|
|
with syntax highlighted source code.
|
|
|
|
"""
|
|
|
|
|
2009-12-11 12:39:39 +01:00
|
|
|
STYLE_FILE = """
|
2013-10-13 10:03:09 +02:00
|
|
|
body {
|
2013-10-17 14:49:48 +02:00
|
|
|
font: 13px Arial, Verdana, Sans-Serif;
|
2012-12-23 11:50:52 +01:00
|
|
|
margin: 0;
|
2014-06-05 22:39:44 +02:00
|
|
|
width: auto;
|
|
|
|
}
|
|
|
|
|
|
|
|
h1 {
|
2014-07-29 13:56:31 +02:00
|
|
|
margin: 10px;
|
2014-06-05 22:39:44 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#footer > p {
|
2014-07-29 13:56:31 +02:00
|
|
|
margin: 4px;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
2012-09-15 22:41:56 +02:00
|
|
|
.error {
|
2012-09-28 20:25:46 +02:00
|
|
|
background-color: #ffb7b7;
|
2012-09-15 22:41:56 +02:00
|
|
|
}
|
|
|
|
|
2013-10-17 14:49:48 +02:00
|
|
|
.error2 {
|
|
|
|
background-color: #faa;
|
|
|
|
border: 1px dotted black;
|
2013-10-24 07:12:21 +02:00
|
|
|
display: inline-block;
|
2013-10-17 14:49:48 +02:00
|
|
|
margin-left: 4px;
|
2013-10-24 07:12:21 +02:00
|
|
|
}
|
|
|
|
|
2014-07-17 23:05:00 +02:00
|
|
|
.inconclusive {
|
|
|
|
background-color: #B6B6B4;
|
|
|
|
}
|
|
|
|
|
|
|
|
.inconclusive2 {
|
|
|
|
background-color: #B6B6B4;
|
|
|
|
border: 1px dotted black;
|
|
|
|
display: inline-block;
|
|
|
|
margin-left: 4px;
|
|
|
|
}
|
2014-10-15 21:49:22 +02:00
|
|
|
|
|
|
|
div.verbose {
|
|
|
|
display: inline-block;
|
|
|
|
vertical-align: top;
|
|
|
|
cursor: help;
|
|
|
|
}
|
|
|
|
|
|
|
|
div.verbose div.content {
|
|
|
|
display: none;
|
|
|
|
position: absolute;
|
|
|
|
padding: 10px;
|
|
|
|
margin: 4px;
|
|
|
|
max-width: 40%;
|
|
|
|
white-space: pre-wrap;
|
|
|
|
border: 1px solid black;
|
|
|
|
background-color: #FFFFCC;
|
|
|
|
cursor: auto;
|
|
|
|
}
|
|
|
|
|
2013-10-24 07:12:21 +02:00
|
|
|
.highlight .hll {
|
|
|
|
padding: 1px;
|
2013-10-17 14:49:48 +02:00
|
|
|
}
|
2009-12-11 12:39:39 +01:00
|
|
|
|
|
|
|
#header {
|
2012-12-23 11:50:52 +01:00
|
|
|
border-bottom: thin solid #aaa;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#menu {
|
2013-10-17 14:49:48 +02:00
|
|
|
float: left;
|
2012-09-28 20:25:46 +02:00
|
|
|
margin-top: 5px;
|
|
|
|
text-align: left;
|
2014-06-01 11:28:02 +02:00
|
|
|
width: 150px;
|
|
|
|
height: 75%;
|
2014-05-31 23:22:24 +02:00
|
|
|
position: fixed;
|
2014-06-01 01:26:23 +02:00
|
|
|
overflow: auto;
|
2014-06-01 11:28:02 +02:00
|
|
|
z-index: 1;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
2014-07-23 12:23:52 +02:00
|
|
|
#menu_index {
|
|
|
|
float: left;
|
|
|
|
margin-top: 5px;
|
|
|
|
padding-left: 5px;
|
|
|
|
text-align: left;
|
|
|
|
width: 200px;
|
|
|
|
height: 75%;
|
|
|
|
position: fixed;
|
|
|
|
overflow: auto;
|
|
|
|
z-index: 1;
|
|
|
|
}
|
|
|
|
|
2009-12-11 12:39:39 +01:00
|
|
|
#menu > a {
|
2012-09-28 20:25:46 +02:00
|
|
|
display: block;
|
2014-06-01 11:28:02 +02:00
|
|
|
margin-left: 10px;
|
2014-06-01 01:26:23 +02:00
|
|
|
font: 12px;
|
2014-06-01 11:28:02 +02:00
|
|
|
z-index: 1;
|
|
|
|
}
|
|
|
|
|
2015-03-10 13:49:10 +01:00
|
|
|
#filename {
|
|
|
|
margin-left: 10px;
|
|
|
|
font: 12px;
|
|
|
|
z-index: 1;
|
|
|
|
}
|
|
|
|
|
2014-06-01 11:28:02 +02:00
|
|
|
.highlighttable {
|
|
|
|
background-color:white;
|
|
|
|
z-index: 10;
|
|
|
|
position: relative;
|
|
|
|
margin: -10 px;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#content {
|
2014-06-01 11:28:02 +02:00
|
|
|
background-color: white;
|
2013-10-24 07:12:21 +02:00
|
|
|
-webkit-box-sizing: content-box;
|
|
|
|
-moz-box-sizing: content-box;
|
|
|
|
box-sizing: content-box;
|
2012-09-28 20:25:46 +02:00
|
|
|
float: left;
|
|
|
|
margin: 5px;
|
2014-06-01 11:28:02 +02:00
|
|
|
margin-left: 10px;
|
2012-12-23 11:50:52 +01:00
|
|
|
padding: 0 10px 10px 10px;
|
2013-10-17 14:49:48 +02:00
|
|
|
width: 80%;
|
2014-06-01 11:28:02 +02:00
|
|
|
padding-left: 150px;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
|
2014-07-23 12:23:52 +02:00
|
|
|
#content_index {
|
|
|
|
background-color: white;
|
|
|
|
-webkit-box-sizing: content-box;
|
|
|
|
-moz-box-sizing: content-box;
|
|
|
|
box-sizing: content-box;
|
|
|
|
float: left;
|
|
|
|
margin: 5px;
|
|
|
|
margin-left: 10px;
|
|
|
|
padding: 0 10px 10px 10px;
|
|
|
|
width: 80%;
|
|
|
|
padding-left: 200px;
|
|
|
|
}
|
|
|
|
|
2013-10-13 00:42:17 +02:00
|
|
|
.linenos {
|
2013-10-17 14:49:48 +02:00
|
|
|
border-right: thin solid #aaa;
|
2013-10-24 07:12:21 +02:00
|
|
|
color: lightgray;
|
2013-10-17 14:49:48 +02:00
|
|
|
padding-right: 6px;
|
2013-10-13 00:42:17 +02:00
|
|
|
}
|
|
|
|
|
2009-12-11 12:39:39 +01:00
|
|
|
#footer {
|
2012-12-23 11:50:52 +01:00
|
|
|
border-top: thin solid #aaa;
|
2012-09-28 20:25:46 +02:00
|
|
|
clear: both;
|
2013-10-13 10:03:09 +02:00
|
|
|
font-size: 90%;
|
2013-10-17 14:49:48 +02:00
|
|
|
margin-top: 5px;
|
|
|
|
}
|
|
|
|
|
|
|
|
#footer ul {
|
|
|
|
list-style-type: none;
|
|
|
|
padding-left: 0;
|
2009-12-11 12:39:39 +01:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
HTML_HEAD = """
|
2013-10-17 14:49:48 +02:00
|
|
|
<!DOCTYPE html>
|
2013-10-13 10:03:09 +02:00
|
|
|
<html lang="en">
|
2009-12-08 21:09:21 +01:00
|
|
|
<head>
|
2013-10-13 10:03:09 +02:00
|
|
|
<meta charset="utf-8">
|
|
|
|
<title>Cppcheck - HTML report - %s</title>
|
2013-10-17 14:49:48 +02:00
|
|
|
<link rel="stylesheet" href="style.css">
|
2014-06-05 22:09:28 +02:00
|
|
|
<style>
|
|
|
|
%s
|
2014-10-15 21:49:22 +02:00
|
|
|
</style>
|
|
|
|
<script language="javascript">
|
|
|
|
function getStyle(el,styleProp) {
|
|
|
|
if (el.currentStyle)
|
|
|
|
var y = el.currentStyle[styleProp];
|
|
|
|
else if (window.getComputedStyle)
|
|
|
|
var y = document.defaultView.getComputedStyle(el,null).getPropertyValue(styleProp);
|
|
|
|
return y;
|
|
|
|
}
|
|
|
|
function toggle() {
|
|
|
|
var el = this.expandable_content;
|
|
|
|
var mark = this.expandable_marker;
|
|
|
|
if (el.style.display == "block") {
|
|
|
|
el.style.display = "none";
|
|
|
|
mark.innerHTML = "[+]";
|
|
|
|
} else {
|
|
|
|
el.style.display = "block";
|
|
|
|
mark.innerHTML = "[-]";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
function init_expandables() {
|
|
|
|
var elts = document.getElementsByClassName("expandable");
|
|
|
|
for (var i = 0; i < elts.length; i++) {
|
|
|
|
var el = elts[i];
|
|
|
|
var clickable = el.getElementsByTagName("span")[0];
|
|
|
|
var marker = clickable.getElementsByClassName("marker")[0];
|
|
|
|
var content = el.getElementsByClassName("content")[0];
|
|
|
|
var width = clickable.clientWidth - parseInt(getStyle(content, "padding-left")) - parseInt(getStyle(content, "padding-right"));
|
|
|
|
content.style.width = width + "px";
|
|
|
|
clickable.expandable_content = content;
|
|
|
|
clickable.expandable_marker = marker;
|
|
|
|
clickable.onclick = toggle;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
</script>
|
2009-12-08 21:09:21 +01:00
|
|
|
</head>
|
2014-10-15 21:49:22 +02:00
|
|
|
<body onload="init_expandables()">
|
2009-12-11 12:39:39 +01:00
|
|
|
<div id="header">
|
2014-06-01 14:17:15 +02:00
|
|
|
<h1>Cppcheck report - %s: %s </h1>
|
2009-12-11 12:39:39 +01:00
|
|
|
</div>
|
2014-06-01 01:26:23 +02:00
|
|
|
<div id="menu" dir="rtl">
|
2015-03-10 13:49:10 +01:00
|
|
|
<p id="filename"><a href="index.html">Defects:</a> %s</p>
|
2014-06-01 00:12:58 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
HTML_HEAD_END = """
|
2009-12-11 12:39:39 +01:00
|
|
|
</div>
|
|
|
|
<div id="content">
|
2009-12-08 21:09:21 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
HTML_FOOTER = """
|
2009-12-11 12:39:39 +01:00
|
|
|
</div>
|
|
|
|
<div id="footer">
|
2013-10-13 10:03:09 +02:00
|
|
|
<p>
|
2014-06-05 22:39:44 +02:00
|
|
|
Cppcheck %s - a tool for static C/C++ code analysis</br>
|
|
|
|
</br>
|
|
|
|
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a></br>
|
|
|
|
IRC: <a href="irc://irc.freenode.net/cppcheck">irc://irc.freenode.net/cppcheck</a></br>
|
|
|
|
<p>
|
2009-12-11 12:39:39 +01:00
|
|
|
</div>
|
2009-12-08 21:09:21 +01:00
|
|
|
</body>
|
|
|
|
</html>
|
|
|
|
"""
|
|
|
|
|
2013-10-24 07:12:21 +02:00
|
|
|
HTML_ERROR = "<span class='error2'><--- %s</span>\n"
|
2014-07-17 23:05:00 +02:00
|
|
|
HTML_INCONCLUSIVE = "<span class='inconclusive2'><--- %s</span>\n"
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2014-10-15 21:49:22 +02:00
|
|
|
HTML_EXPANDABLE_ERROR = "<div class='verbose expandable'><span class='error2'><--- %s <span class='marker'>[+]</span></span><div class='content'>%s</div></div>\n"""
|
|
|
|
HTML_EXPANDABLE_INCONCLUSIVE = "<div class='verbose expandable'><span class='inconclusive2'><--- %s <span class='marker'>[+]</span></span><div class='content'>%s</div></div>\n"""
|
|
|
|
|
|
|
|
# escape() and unescape() takes care of &, < and >.
|
|
|
|
html_escape_table = {
|
|
|
|
'"': """,
|
|
|
|
"'": "'"
|
|
|
|
}
|
2015-03-20 01:29:22 +01:00
|
|
|
html_unescape_table = {v: k for k, v in html_escape_table.items()}
|
|
|
|
|
2014-10-15 21:49:22 +02:00
|
|
|
|
|
|
|
def html_escape(text):
|
|
|
|
return escape(text, html_escape_table)
|
2014-07-29 13:56:31 +02:00
|
|
|
|
2015-03-20 01:29:22 +01:00
|
|
|
|
2010-01-12 21:35:54 +01:00
|
|
|
class AnnotateCodeFormatter(HtmlFormatter):
|
|
|
|
errors = []
|
|
|
|
|
|
|
|
def wrap(self, source, outfile):
|
|
|
|
line_no = 1
|
|
|
|
for i, t in HtmlFormatter.wrap(self, source, outfile):
|
2013-10-24 14:17:09 +02:00
|
|
|
# If this is a source code line we want to add a span tag at the
|
|
|
|
# end.
|
2010-01-12 21:35:54 +01:00
|
|
|
if i == 1:
|
|
|
|
for error in self.errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
if error['line'] == line_no:
|
2014-07-17 23:05:00 +02:00
|
|
|
try:
|
|
|
|
if error['inconclusive'] == 'true':
|
2015-03-20 01:29:22 +01:00
|
|
|
if error.get('verbose') and (error['verbose'] != error['msg']): # only print verbose msg if it really differs from actual message
|
2014-10-15 21:49:22 +02:00
|
|
|
index = t.rfind('\n')
|
|
|
|
t = t[:index] + HTML_EXPANDABLE_INCONCLUSIVE % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index+1:]
|
|
|
|
else:
|
|
|
|
t = t.replace('\n', HTML_INCONCLUSIVE % error['msg'])
|
2014-07-17 23:05:00 +02:00
|
|
|
except KeyError:
|
2014-11-08 23:48:32 +01:00
|
|
|
if error.get('verbose') and (error['verbose'] != error['msg']):
|
2014-10-15 21:49:22 +02:00
|
|
|
index = t.rfind('\n')
|
|
|
|
t = t[:index] + HTML_EXPANDABLE_ERROR % (error['msg'], html_escape(error['verbose'].replace("\\012", '\n'))) + t[index+1:]
|
|
|
|
else:
|
|
|
|
t = t.replace('\n', HTML_ERROR % error['msg'])
|
2014-07-17 23:05:00 +02:00
|
|
|
|
2010-01-12 21:35:54 +01:00
|
|
|
line_no = line_no + 1
|
|
|
|
yield i, t
|
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
class CppCheckHandler(XmlContentHandler):
|
2013-10-17 14:49:48 +02:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
"""Parses the cppcheck xml file and produces a list of all its errors."""
|
2013-10-05 17:03:31 +02:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
XmlContentHandler.__init__(self)
|
|
|
|
self.errors = []
|
2013-10-24 14:17:09 +02:00
|
|
|
self.version = '1'
|
2014-07-29 13:56:31 +02:00
|
|
|
self.versionCppcheck = ''
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
def startElement(self, name, attributes):
|
2013-10-24 14:17:09 +02:00
|
|
|
if name == 'results':
|
|
|
|
self.version = attributes.get('version', self.version)
|
2013-10-05 17:03:31 +02:00
|
|
|
|
|
|
|
if self.version == '1':
|
|
|
|
self.handleVersion1(name, attributes)
|
|
|
|
else:
|
|
|
|
self.handleVersion2(name, attributes)
|
|
|
|
|
|
|
|
def handleVersion1(self, name, attributes):
|
2013-10-24 14:17:09 +02:00
|
|
|
if name != 'error':
|
2009-12-08 21:09:21 +01:00
|
|
|
return
|
|
|
|
|
2013-10-05 17:03:31 +02:00
|
|
|
self.errors.append({
|
2013-10-24 14:17:09 +02:00
|
|
|
'file': attributes.get('file', ''),
|
|
|
|
'line': int(attributes.get('line', 0)),
|
|
|
|
'id': attributes['id'],
|
|
|
|
'severity': attributes['severity'],
|
|
|
|
'msg': attributes['msg']
|
2013-10-05 17:03:31 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
def handleVersion2(self, name, attributes):
|
2014-05-25 14:36:52 +02:00
|
|
|
if name == 'cppcheck':
|
|
|
|
self.versionCppcheck = attributes['version']
|
2013-10-24 14:17:09 +02:00
|
|
|
if name == 'error':
|
2014-07-17 23:05:00 +02:00
|
|
|
try:
|
|
|
|
self.errors.append({
|
|
|
|
'file': '',
|
|
|
|
'line': 0,
|
|
|
|
'id': attributes['id'],
|
|
|
|
'severity': attributes['severity'],
|
|
|
|
'msg': attributes['msg'],
|
2014-10-15 21:49:22 +02:00
|
|
|
'verbose': attributes.get('verbose'),
|
2014-07-17 23:05:00 +02:00
|
|
|
'inconclusive': attributes['inconclusive']
|
|
|
|
})
|
|
|
|
except KeyError:
|
2014-07-18 20:09:12 +02:00
|
|
|
self.errors.append({
|
2014-07-17 23:05:00 +02:00
|
|
|
'file': '',
|
|
|
|
'line': 0,
|
|
|
|
'id': attributes['id'],
|
|
|
|
'severity': attributes['severity'],
|
2014-10-15 21:49:22 +02:00
|
|
|
'msg': attributes['msg'],
|
|
|
|
'verbose': attributes.get('verbose')
|
2014-07-17 23:05:00 +02:00
|
|
|
})
|
2013-10-24 14:17:09 +02:00
|
|
|
elif name == 'location':
|
2013-10-05 17:03:31 +02:00
|
|
|
assert self.errors
|
2013-10-24 14:17:09 +02:00
|
|
|
self.errors[-1]['file'] = attributes['file']
|
|
|
|
self.errors[-1]['line'] = int(attributes['line'])
|
2013-10-05 17:03:31 +02:00
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
# Configure all the options this little utility is using.
|
|
|
|
parser = optparse.OptionParser()
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.add_option('--title', dest='title',
|
|
|
|
help='The title of the project.',
|
|
|
|
default='[project name]')
|
|
|
|
parser.add_option('--file', dest='file',
|
|
|
|
help='The cppcheck xml output file to read defects '
|
|
|
|
'from. Default is reading from stdin.')
|
|
|
|
parser.add_option('--report-dir', dest='report_dir',
|
|
|
|
help='The directory where the HTML report content is '
|
|
|
|
'written.')
|
|
|
|
parser.add_option('--source-dir', dest='source_dir',
|
|
|
|
help='Base directory where source code files can be '
|
|
|
|
'found.')
|
|
|
|
parser.add_option('--source-encoding', dest='source_encoding',
|
|
|
|
help='Encoding of source code.', default='utf-8')
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Parse options and make sure that we have an output directory set.
|
|
|
|
options, args = parser.parse_args()
|
2014-07-22 01:12:27 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
sys.argv[1]
|
2014-07-29 13:56:31 +02:00
|
|
|
except IndexError: # no arguments give, print --help
|
2014-07-22 01:12:27 +02:00
|
|
|
parser.print_help()
|
|
|
|
quit()
|
|
|
|
|
2009-12-08 21:09:21 +01:00
|
|
|
if not options.report_dir:
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.error('No report directory set.')
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Get the directory where source code files are located.
|
|
|
|
source_dir = os.getcwd()
|
|
|
|
if options.source_dir:
|
|
|
|
source_dir = options.source_dir
|
|
|
|
|
|
|
|
# Get the stream that we read cppcheck errors from.
|
2013-10-24 14:17:09 +02:00
|
|
|
input_file = sys.stdin
|
2009-12-08 21:09:21 +01:00
|
|
|
if options.file:
|
2013-10-05 17:03:31 +02:00
|
|
|
if not os.path.exists(options.file):
|
2013-10-24 14:17:09 +02:00
|
|
|
parser.error('cppcheck xml file: %s not found.' % options.file)
|
|
|
|
input_file = io.open(options.file, 'r')
|
2015-03-05 18:33:46 +01:00
|
|
|
else:
|
|
|
|
parser.error('No cppcheck xml file specified. (--file=)')
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Parse the xml file and produce a simple list of errors.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Parsing xml report.')
|
2009-12-08 21:09:21 +01:00
|
|
|
try:
|
|
|
|
contentHandler = CppCheckHandler()
|
2013-10-24 14:17:09 +02:00
|
|
|
xml_parse(input_file, contentHandler)
|
2012-09-07 18:44:46 +02:00
|
|
|
except XmlParseException as msg:
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Failed to parse cppcheck xml file: %s' % msg)
|
2009-12-08 21:09:21 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
# We have a list of errors. But now we want to group them on
|
|
|
|
# each source code file. Lets create a files dictionary that
|
|
|
|
# will contain a list of all the errors in that file. For each
|
2013-10-13 10:03:09 +02:00
|
|
|
# file we will also generate a HTML filename to use.
|
2009-12-08 21:09:21 +01:00
|
|
|
files = {}
|
|
|
|
file_no = 0
|
|
|
|
for error in contentHandler.errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
filename = error['file']
|
2009-12-08 21:09:21 +01:00
|
|
|
if filename not in files.keys():
|
2013-10-24 14:17:09 +02:00
|
|
|
files[filename] = {
|
|
|
|
'errors': [], 'htmlfile': str(file_no) + '.html'}
|
2009-12-08 21:09:21 +01:00
|
|
|
file_no = file_no + 1
|
2013-10-24 14:17:09 +02:00
|
|
|
files[filename]['errors'].append(error)
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Make sure that the report directory is created if it doesn't exist.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating %s directory' % options.report_dir)
|
2009-12-08 21:09:21 +01:00
|
|
|
if not os.path.exists(options.report_dir):
|
|
|
|
os.mkdir(options.report_dir)
|
|
|
|
|
2013-10-13 10:03:09 +02:00
|
|
|
# Generate a HTML file with syntax highlighted source code for each
|
2009-12-08 21:09:21 +01:00
|
|
|
# file that contains one or more errors.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Processing errors')
|
2015-04-20 09:17:45 +02:00
|
|
|
|
2014-07-21 01:28:29 +02:00
|
|
|
decode_errors = []
|
2014-05-25 17:43:09 +02:00
|
|
|
for filename, data in sorted(files.items()):
|
2013-10-24 14:17:09 +02:00
|
|
|
htmlfile = data['htmlfile']
|
|
|
|
errors = data['errors']
|
2013-10-23 15:27:27 +02:00
|
|
|
|
|
|
|
lines = []
|
|
|
|
for error in errors:
|
2013-10-24 14:17:09 +02:00
|
|
|
lines.append(error['line'])
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
if filename == '':
|
2013-10-23 15:27:27 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
source_filename = os.path.join(source_dir, filename)
|
2013-10-24 14:17:09 +02:00
|
|
|
try:
|
2014-06-18 22:12:31 +02:00
|
|
|
with io.open(source_filename, 'r', encoding=options.source_encoding) as input_file:
|
2013-10-24 14:17:09 +02:00
|
|
|
content = input_file.read()
|
|
|
|
except IOError:
|
2015-03-05 17:09:19 +01:00
|
|
|
if (error['id'] == 'unmatchedSuppression'):
|
2015-03-20 01:29:22 +01:00
|
|
|
continue # file not found, bail out
|
2015-03-05 17:09:19 +01:00
|
|
|
else:
|
|
|
|
sys.stderr.write("ERROR: Source file '%s' not found.\n" %
|
2015-03-20 01:29:22 +01:00
|
|
|
source_filename)
|
2013-10-23 15:27:27 +02:00
|
|
|
continue
|
2014-07-21 01:28:29 +02:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
sys.stderr.write("WARNING: Unicode decode error in '%s'.\n" %
|
|
|
|
source_filename)
|
2014-07-29 13:56:31 +02:00
|
|
|
decode_errors.append(source_filename[2:]) # "[2:]" gets rid of "./" at beginning
|
2014-07-21 01:28:29 +02:00
|
|
|
continue
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
htmlFormatter = AnnotateCodeFormatter(linenos=True,
|
|
|
|
style='colorful',
|
|
|
|
hl_lines=lines,
|
|
|
|
lineanchors='line',
|
|
|
|
encoding=options.source_encoding)
|
2013-10-23 15:27:27 +02:00
|
|
|
htmlFormatter.errors = errors
|
2014-10-15 21:49:22 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
with io.open(os.path.join(options.report_dir, htmlfile),
|
|
|
|
'w') as output_file:
|
2013-10-23 15:27:27 +02:00
|
|
|
output_file.write(HTML_HEAD %
|
|
|
|
(options.title,
|
2014-06-05 22:09:28 +02:00
|
|
|
htmlFormatter.get_style_defs('.highlight'),
|
2014-06-01 14:17:15 +02:00
|
|
|
options.title,
|
2015-03-10 13:49:10 +01:00
|
|
|
filename,
|
|
|
|
filename.split('/')[-1]))
|
2014-06-01 14:17:15 +02:00
|
|
|
|
2014-07-21 21:33:55 +02:00
|
|
|
for error in sorted(errors, key=lambda k: k['line']):
|
2014-06-01 01:26:23 +02:00
|
|
|
output_file.write("<a href='%s#line-%d'> %s %s</a>" % (data['htmlfile'], error['line'], error['id'], error['line']))
|
|
|
|
|
2014-06-01 00:12:58 +02:00
|
|
|
output_file.write(HTML_HEAD_END)
|
2014-07-22 14:20:01 +02:00
|
|
|
try:
|
|
|
|
lexer = guess_lexer_for_filename(source_filename, '')
|
|
|
|
except:
|
|
|
|
sys.stderr.write("ERROR: Couldn't determine lexer for the file' " + source_filename + " '. Won't be able to syntax highlight this file.")
|
|
|
|
output_file.write("\n <tr><td colspan='4'> Could not generated content because pygments failed to retrieve the determine code type.</td></tr>")
|
|
|
|
output_file.write("\n <tr><td colspan='4'> Sorry about this.</td></tr>")
|
|
|
|
continue
|
|
|
|
|
2011-04-16 11:22:44 +02:00
|
|
|
if options.source_encoding:
|
|
|
|
lexer.encoding = options.source_encoding
|
2010-01-14 09:47:38 +01:00
|
|
|
|
2013-10-23 15:27:27 +02:00
|
|
|
output_file.write(
|
|
|
|
highlight(content, lexer, htmlFormatter).decode(
|
|
|
|
options.source_encoding))
|
|
|
|
|
2014-06-23 19:07:58 +02:00
|
|
|
output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
|
2013-10-23 15:27:27 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
print(' ' + filename)
|
2009-12-08 21:09:21 +01:00
|
|
|
|
|
|
|
# Generate a master index.html file that will contain a list of
|
|
|
|
# all the errors created.
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating index.html')
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
with io.open(os.path.join(options.report_dir, 'index.html'),
|
|
|
|
'w') as output_file:
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2014-07-30 01:08:16 +02:00
|
|
|
stats_count = 0
|
2014-07-23 12:23:52 +02:00
|
|
|
stats = []
|
|
|
|
for filename, data in sorted(files.items()):
|
|
|
|
for error in data['errors']:
|
2014-07-29 13:56:31 +02:00
|
|
|
stats.append(error['id']) # get the stats
|
2014-07-30 01:08:16 +02:00
|
|
|
stats_count += 1
|
2014-07-23 12:23:52 +02:00
|
|
|
|
|
|
|
stat_html = []
|
2014-07-24 16:15:29 +02:00
|
|
|
# the following lines sort the stat primary by value (occurrences),
|
|
|
|
# but if two IDs occur equally often, then we sort them alphabetically by warning ID
|
|
|
|
try:
|
|
|
|
cnt_max = Counter(stats).most_common()[0][1]
|
|
|
|
except IndexError:
|
|
|
|
cnt_max = 0
|
|
|
|
|
|
|
|
try:
|
|
|
|
cnt_min = Counter(stats).most_common()[-1][1]
|
|
|
|
except IndexError:
|
|
|
|
cnt_min = 0
|
|
|
|
|
|
|
|
for occurrences in reversed(range(cnt_min, cnt_max+1)):
|
2014-07-29 13:56:31 +02:00
|
|
|
for _id in [k for k, v in sorted(Counter(stats).items()) if v == occurrences]:
|
2014-07-24 16:15:29 +02:00
|
|
|
stat_html.append(" " + str(dict(Counter(stats).most_common())[_id]) + " " + str(_id) + "<br/>\n")
|
2014-07-23 12:23:52 +02:00
|
|
|
|
2015-03-10 13:49:10 +01:00
|
|
|
output_file.write(HTML_HEAD.replace('id="menu" dir="rtl"', 'id="menu_index"', 1).replace("Defects:", "Defect summary;", 1) % (options.title, '', options.title, '', ''))
|
2015-04-20 09:17:45 +02:00
|
|
|
output_file.write(' <p>\n' + ' ' + str(stats_count) + ' total<br/><br/>\n' + ''.join(stat_html) + '<br/><br/><a href="stats.html">Statistics</a></p>')
|
2014-07-23 12:23:52 +02:00
|
|
|
output_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
|
2014-05-31 23:47:44 +02:00
|
|
|
output_file.write(' <table>\n')
|
2013-10-24 14:17:09 +02:00
|
|
|
output_file.write(
|
2014-05-31 23:47:44 +02:00
|
|
|
' <tr><th>Line</th><th>Id</th><th>Severity</th><th>Message</th></tr>')
|
2014-05-25 17:43:09 +02:00
|
|
|
for filename, data in sorted(files.items()):
|
2014-07-29 13:56:31 +02:00
|
|
|
if filename in decode_errors: # don't print a link but a note
|
2014-07-21 01:28:29 +02:00
|
|
|
output_file.write("\n <tr><td colspan='4'>%s</td></tr>" % (filename))
|
|
|
|
output_file.write("\n <tr><td colspan='4'> Could not generated due to UnicodeDecodeError</td></tr>")
|
|
|
|
else:
|
2015-03-20 01:29:22 +01:00
|
|
|
if filename.endswith('*'): # assume unmatched suppression
|
2015-03-05 17:09:19 +01:00
|
|
|
output_file.write(
|
|
|
|
"\n <tr><td colspan='4'>%s</td></tr>" %
|
|
|
|
(filename))
|
|
|
|
else:
|
|
|
|
output_file.write(
|
|
|
|
"\n <tr><td colspan='4'><a href='%s'>%s</a></td></tr>" %
|
|
|
|
(data['htmlfile'], filename))
|
|
|
|
|
2014-07-21 21:33:55 +02:00
|
|
|
for error in sorted(data['errors'], key=lambda k: k['line']):
|
2014-07-21 01:28:29 +02:00
|
|
|
error_class = ''
|
|
|
|
try:
|
|
|
|
if error['inconclusive'] == 'true':
|
|
|
|
error_class = 'class="inconclusive"'
|
|
|
|
error['severity'] += ", inconcl."
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if error['severity'] == 'error':
|
|
|
|
error_class = 'class="error"'
|
|
|
|
if error['id'] == 'missingInclude':
|
|
|
|
output_file.write(
|
|
|
|
'\n <tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>' %
|
|
|
|
(error['id'], error['severity'], error['msg']))
|
2015-03-05 17:09:19 +01:00
|
|
|
elif (error['id'] == 'unmatchedSuppression') and filename.endswith('*'):
|
|
|
|
output_file.write(
|
|
|
|
"\n <tr><td></td><td>%s</td><td>%s</td><td %s>%s</td></tr>" %
|
|
|
|
(error['id'], error['severity'], error_class,
|
|
|
|
error['msg']))
|
2014-07-21 01:28:29 +02:00
|
|
|
else:
|
|
|
|
output_file.write(
|
|
|
|
"\n <tr><td><a href='%s#line-%d'>%d</a></td><td>%s</td><td>%s</td><td %s>%s</td></tr>" %
|
|
|
|
(data['htmlfile'], error['line'], error['line'],
|
|
|
|
error['id'], error['severity'], error_class,
|
|
|
|
error['msg']))
|
|
|
|
|
2014-05-31 23:47:44 +02:00
|
|
|
output_file.write('\n </table>')
|
2014-05-25 14:36:52 +02:00
|
|
|
output_file.write(HTML_FOOTER % contentHandler.versionCppcheck)
|
2013-10-24 14:17:09 +02:00
|
|
|
|
2014-07-23 12:23:52 +02:00
|
|
|
if (decode_errors):
|
|
|
|
sys.stderr.write("\nGenerating html failed for the following files: " + ' '.join(decode_errors))
|
|
|
|
sys.stderr.write("\nConsider changing source-encoding (for example: \"htmlreport ... --source-encoding=\"iso8859-1\"\"\n")
|
2014-07-21 01:28:29 +02:00
|
|
|
|
2013-10-24 14:17:09 +02:00
|
|
|
print('Creating style.css file')
|
|
|
|
with io.open(os.path.join(options.report_dir, 'style.css'),
|
|
|
|
'w') as css_file:
|
|
|
|
css_file.write(STYLE_FILE)
|
2014-07-22 01:12:27 +02:00
|
|
|
|
2015-04-20 09:17:45 +02:00
|
|
|
|
|
|
|
print("Creating stats.html (statistics)\n")
|
|
|
|
stats_countlist={}
|
|
|
|
|
|
|
|
for filename, data in sorted(files.items()):
|
|
|
|
if (filename == ''):
|
|
|
|
continue
|
|
|
|
stats_tmplist=[]
|
|
|
|
for error in sorted(data['errors'], key=lambda k: k['line']):
|
|
|
|
stats_tmplist.append(error['severity'])
|
|
|
|
|
|
|
|
stats_countlist[filename] = dict(Counter(stats_tmplist))
|
|
|
|
|
|
|
|
# get top ten for each severity
|
|
|
|
SEVERITIES = "error", "warning", "portability", "performance", "style", "unusedFunction", "information", "missingInclude", "internal"
|
|
|
|
|
|
|
|
with io.open(os.path.join(options.report_dir, 'stats.html'), 'w') as stats_file:
|
|
|
|
|
|
|
|
stats_file.write(HTML_HEAD.replace('id="menu" dir="rtl"', 'id="menu_index"', 1).replace("Defects:", "Back to summary", 1) % (options.title, '', options.title, 'Statistics', ''))
|
|
|
|
stats_file.write(HTML_HEAD_END.replace("content", "content_index", 1))
|
|
|
|
|
|
|
|
for sev in SEVERITIES:
|
|
|
|
_sum = 0
|
|
|
|
stats_templist={}
|
|
|
|
|
|
|
|
try: # if the we have an style warning but we are checking for portability, we have to skip it to prevent KeyError
|
|
|
|
for filename in stats_countlist:
|
|
|
|
try: # also bail out if we have a file with no sev-results
|
|
|
|
_sum += stats_countlist[filename][sev]
|
|
|
|
stats_templist[filename] = (int)(stats_countlist[filename][sev]) # file : amount,
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
if (_sum == 0): # don't print "0 style" etc, if no style warnings were found
|
|
|
|
break
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
stats_file.write("<p>Top 10 files for " + sev + " severity, total findings: " + str(_sum) + "</br>\n")
|
|
|
|
|
|
|
|
|
|
|
|
# sort, so that the file with the most severities per type is first
|
|
|
|
stats_list_sorted = sorted(stats_templist.items(), key=operator.itemgetter(1,0), reverse=True)
|
|
|
|
it = 0
|
|
|
|
LENGTH = 0
|
|
|
|
|
|
|
|
for i in stats_list_sorted: # printing loop
|
|
|
|
# for aesthetics: if it's the first iteration of the loop, get the max length of the number string
|
|
|
|
if (it == 0):
|
|
|
|
LENGTH = len(str(i[1])) # <- length of longest number, now get the difference and try to make other numbers align to it
|
|
|
|
|
|
|
|
stats_file.write(" "*3 + str(i[1]) + " "*(1 + LENGTH - len(str(i[1]))) + "<a href=\"" + files[i[0]]['htmlfile'] + "\"> " + i[0] + "</a></br>\n")
|
|
|
|
it += 1
|
|
|
|
if (it == 10): # print only the top 10
|
|
|
|
break
|
|
|
|
stats_file.write("</p>\n")
|
|
|
|
|
2014-07-22 01:12:27 +02:00
|
|
|
print("\nOpen '" + options.report_dir + "/index.html' to see the results.")
|
2015-04-20 09:17:45 +02:00
|
|
|
|