2018-11-06 20:46:07 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
#
|
|
|
|
# cppcheck addon for naming conventions
|
|
|
|
# An enhanced version. Configuration is taken from a json file
|
|
|
|
# It supports to check for type-based prefixes in function or variable names.
|
2024-01-02 15:01:02 +01:00
|
|
|
# Aside from include guard naming, include guard presence can also be tested.
|
2018-11-06 20:46:07 +01:00
|
|
|
#
|
|
|
|
# Example usage (variable name must start with lowercase, function name must start with uppercase):
|
|
|
|
# $ cppcheck --dump path-to-src/
|
2019-01-09 18:16:51 +01:00
|
|
|
# $ python namingng.py test.c.dump
|
2018-11-06 20:46:07 +01:00
|
|
|
#
|
|
|
|
# JSON format:
|
|
|
|
#
|
|
|
|
# {
|
2024-01-02 15:01:02 +01:00
|
|
|
# "RE_VARNAME": ["[a-z]*[a-zA-Z0-9_]*\\Z"],
|
2018-11-06 20:46:07 +01:00
|
|
|
# "RE_PRIVATE_MEMBER_VARIABLE": null,
|
2024-01-02 15:01:02 +01:00
|
|
|
# "RE_FUNCTIONNAME": ["[a-z0-9A-Z]*\\Z"],
|
|
|
|
# "_comment": "comments can be added to the config with underscore-prefixed keys",
|
|
|
|
# "include_guard": {
|
|
|
|
# "input": "path",
|
|
|
|
# "prefix": "GUARD_",
|
|
|
|
# "case": "upper",
|
|
|
|
# "max_linenr": 5,
|
|
|
|
# "RE_HEADERFILE": "[^/].*\\.h\\Z",
|
|
|
|
# "required": true
|
|
|
|
# },
|
2018-11-06 20:46:07 +01:00
|
|
|
# "var_prefixes": {"uint32_t": "ui32"},
|
|
|
|
# "function_prefixes": {"uint16_t": "ui16",
|
|
|
|
# "uint32_t": "ui32"}
|
|
|
|
# }
|
|
|
|
#
|
2019-03-26 15:20:32 +01:00
|
|
|
# RE_VARNAME, RE_PRIVATE_MEMBER_VARIABLE and RE_FUNCTIONNAME are regular expressions to cover the basic names
|
2018-11-06 20:46:07 +01:00
|
|
|
# In var_prefixes and function_prefixes there are the variable-type/prefix pairs
|
|
|
|
|
2019-12-30 17:30:17 +01:00
|
|
|
import cppcheckdata
|
2018-11-06 20:46:07 +01:00
|
|
|
import sys
|
addons/namingng.py: Improve file name checking feature. (#5802)
(note: comment updated after force push; initial PR was incomplete)
namingng.py attempted to derive the source filename from the name of the
dumpfile. However, the dumpfile is not necessarily named according to
this pattern, e.g. cppcheck will add the pid to the filename, making
RE_FILE rules
fail. Taking the first item of data.files seem to be more robust.
To get the basename of the file, `os.path.basename()` is used. This
solves (theoretical) issues on platforms with a different path
separator.
With this patch, all filenames are checked, not just those provided on
the cppcheck command line. This is useful as header files will now also
be part of this check, even if not explicitly specified on the command
line.
The "RE_FILE" key of the configuration JSON may contain a list of
regular expressions, where any match will lead to acceptance of the
filename.
Both the full path and the basename of the files are tested.
One use case for this combination of features is:
```
"RE_FILE":[
"/.*\\.h\\Z",
"[a-z][a-z0-9_]*[a-z0-9]\\.[ch]\\Z"
]
```
This will accept any file naming convention of the platform used
(assuming platform files are all referenced using an absolute path),
while enforcing a particular naming scheme for project files.
2023-12-27 18:56:29 +01:00
|
|
|
import os
|
2018-11-06 20:46:07 +01:00
|
|
|
import re
|
|
|
|
import argparse
|
|
|
|
import json
|
|
|
|
|
2019-11-12 09:47:48 +01:00
|
|
|
# Auxiliary class
|
|
|
|
class DataStruct:
|
2024-01-03 14:00:47 +01:00
|
|
|
def __init__(self, file, linenr, string, column=0):
|
2019-01-09 18:16:51 +01:00
|
|
|
self.file = file
|
|
|
|
self.linenr = linenr
|
|
|
|
self.str = string
|
2024-01-03 14:00:47 +01:00
|
|
|
self.column = column
|
2018-11-06 20:46:07 +01:00
|
|
|
|
2024-01-03 14:00:47 +01:00
|
|
|
def reportNamingError(location,message,errorId='namingConvention',severity='style',extra='',column=None):
|
|
|
|
cppcheckdata.reportError(location,severity,message,'namingng',errorId,extra,columnOverride=column)
|
2019-11-12 09:47:48 +01:00
|
|
|
|
2024-01-02 15:01:02 +01:00
|
|
|
def configError(error,fatal=True):
|
|
|
|
print('config error: %s'%error)
|
|
|
|
if fatal:
|
|
|
|
sys.exit(1)
|
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
def validateConfigREs(list_or_dict,json_key):
|
|
|
|
have_error = False
|
|
|
|
for item in list_or_dict:
|
|
|
|
try:
|
|
|
|
re.compile(item)
|
|
|
|
except re.error as err:
|
|
|
|
configError("item '%s' of '%s' is not a valid regular expression: %s"%(item,json_key,err),fatal=False)
|
|
|
|
have_error = True
|
|
|
|
continue
|
|
|
|
if not isinstance(list_or_dict,dict):
|
|
|
|
continue
|
|
|
|
# item is actually a dict key; check value
|
|
|
|
value = list_or_dict[item]
|
|
|
|
if (not isinstance(value,list) or len(value) != 2
|
|
|
|
or not isinstance(value[0],bool) or not isinstance(value[1],str)):
|
|
|
|
configError("item '%s' of '%s' must be an array [bool,string]"%(item,json_key),fatal=False)
|
|
|
|
have_error = True
|
|
|
|
|
|
|
|
return have_error
|
|
|
|
|
2018-11-06 20:46:07 +01:00
|
|
|
def loadConfig(configfile):
|
2024-01-02 15:01:02 +01:00
|
|
|
if not os.path.exists(configfile):
|
|
|
|
configError("cannot find config file '%s'"%configfile)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(configfile) as fh:
|
2024-01-04 16:26:54 +01:00
|
|
|
data = json.load(fh)
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
configError("error parsing config file as JSON at line %d: %s"%(e.lineno,e.msg))
|
2024-01-02 15:01:02 +01:00
|
|
|
except Exception as e:
|
|
|
|
configError("error opening config file '%s': %s"%(configfile,e))
|
|
|
|
|
|
|
|
if not isinstance(data, dict):
|
|
|
|
configError('config file must contain a JSON object at the top level')
|
|
|
|
|
|
|
|
# All errors are emitted before bailing out, to make the unit test more
|
|
|
|
# effective.
|
|
|
|
have_error = False
|
|
|
|
|
|
|
|
# Put config items in a class, so that settings can be accessed using
|
|
|
|
# config.feature
|
|
|
|
class Config:
|
|
|
|
pass
|
|
|
|
config = Config()
|
|
|
|
|
|
|
|
mapping = {
|
2024-01-04 16:26:54 +01:00
|
|
|
'file': ('RE_FILE', (list,)),
|
|
|
|
'namespace': ('RE_NAMESPACE', (list,dict)),
|
|
|
|
'include_guard': ('include_guard', (dict,)),
|
|
|
|
'variable': ('RE_VARNAME', (list,dict)),
|
|
|
|
'variable_prefixes': ('var_prefixes', (dict,), {}),
|
|
|
|
'private_member': ('RE_PRIVATE_MEMBER_VARIABLE', (list,dict)),
|
|
|
|
'public_member': ('RE_PUBLIC_MEMBER_VARIABLE', (list,dict)),
|
|
|
|
'global_variable': ('RE_GLOBAL_VARNAME', (list,dict)),
|
|
|
|
'function_name': ('RE_FUNCTIONNAME', (list,dict)),
|
|
|
|
'function_prefixes': ('function_prefixes', (dict,), {}),
|
|
|
|
'class_name': ('RE_CLASS_NAME', (list,dict)),
|
|
|
|
'skip_one_char_variables': ('skip_one_char_variables', (bool,)),
|
2024-01-02 15:01:02 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# parse defined keys and store as members of config object
|
|
|
|
for key,opts in mapping.items():
|
|
|
|
json_key = opts[0]
|
|
|
|
req_type = opts[1]
|
|
|
|
default = None if len(opts)<3 else opts[2]
|
|
|
|
|
|
|
|
value = data.pop(json_key,default)
|
2024-01-04 16:26:54 +01:00
|
|
|
if value is not None and type(value) not in req_type:
|
|
|
|
req_typename = ' or '.join([tp.__name__ for tp in req_type])
|
2024-01-02 15:01:02 +01:00
|
|
|
got_typename = type(value).__name__
|
|
|
|
configError('%s must be %s (not %s), or not set'%(json_key,req_typename,got_typename),fatal=False)
|
|
|
|
have_error = True
|
|
|
|
continue
|
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
# type list implies that this is either a list of REs or a dict with RE keys
|
|
|
|
if list in req_type and value is not None:
|
|
|
|
re_error = validateConfigREs(value,json_key)
|
|
|
|
if re_error:
|
|
|
|
have_error = True
|
2024-01-02 15:01:02 +01:00
|
|
|
|
|
|
|
setattr(config,key,value)
|
|
|
|
|
|
|
|
# check remaining keys, only accept underscore-prefixed comments
|
|
|
|
for key,value in data.items():
|
|
|
|
if key == '' or key[0] != '_':
|
|
|
|
configError("unknown config key '%s'"%key,fatal=False)
|
|
|
|
have_error = True
|
|
|
|
|
|
|
|
if have_error:
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
return config
|
2018-11-06 20:46:07 +01:00
|
|
|
|
2019-11-12 09:47:48 +01:00
|
|
|
|
2023-12-30 20:54:03 +01:00
|
|
|
def evalExpr(conf, exp, mockToken, msgType):
|
2024-01-04 16:26:54 +01:00
|
|
|
report_as_error = False
|
|
|
|
msg = msgType + ' ' + mockToken.str + ' violates naming convention'
|
|
|
|
|
2019-01-09 18:16:51 +01:00
|
|
|
if isinstance(conf, dict):
|
2024-01-04 16:26:54 +01:00
|
|
|
report_as_error = conf[exp][0]
|
|
|
|
msg += ': ' + conf[exp][1]
|
|
|
|
|
|
|
|
res = re.match(exp,mockToken.str)
|
|
|
|
if bool(res) == report_as_error:
|
|
|
|
reportNamingError(mockToken,msg)
|
2018-11-06 20:46:07 +01:00
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
def check_include_guard_name(conf,directive):
|
2024-01-02 15:01:02 +01:00
|
|
|
parts = directive.str.split()
|
|
|
|
if len(parts) != 2:
|
|
|
|
msg = 'syntax error'
|
|
|
|
reportNamingError(directive,msg,'syntax')
|
2024-01-03 14:00:47 +01:00
|
|
|
return None,None
|
2024-01-02 15:01:02 +01:00
|
|
|
guard_name = parts[1]
|
2024-01-03 14:00:47 +01:00
|
|
|
guard_column = 1+directive.str.find(guard_name)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
|
|
|
filename = directive.file
|
2024-01-04 16:26:54 +01:00
|
|
|
if conf.include_guard.get('input','path') == 'basename':
|
2024-01-02 15:01:02 +01:00
|
|
|
filename = os.path.basename(filename)
|
2024-01-04 16:26:54 +01:00
|
|
|
use_case = conf.include_guard.get('case','upper')
|
2024-01-02 15:01:02 +01:00
|
|
|
if use_case == 'upper':
|
|
|
|
filename = filename.upper()
|
|
|
|
elif use_case == 'lower':
|
|
|
|
filename = filename.lower()
|
|
|
|
elif use_case == 'keep':
|
|
|
|
pass # keep filename case as-is
|
|
|
|
else:
|
|
|
|
print("invalid config value for 'case': '%s'"%use_case,file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
barename = re.sub('[^A-Za-z0-9]','_',filename).strip('_')
|
2024-01-04 16:26:54 +01:00
|
|
|
expect_guard_name = conf.include_guard.get('prefix','') + barename + conf.include_guard.get('suffix','')
|
2024-01-02 15:01:02 +01:00
|
|
|
if expect_guard_name != guard_name:
|
|
|
|
msg = 'include guard naming violation; %s != %s'%(guard_name,expect_guard_name)
|
2024-01-03 14:00:47 +01:00
|
|
|
reportNamingError(directive,msg,'includeGuardName',column=guard_column)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
2024-01-03 14:00:47 +01:00
|
|
|
return guard_name,guard_column
|
2024-01-02 15:01:02 +01:00
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
def check_include_guards(conf,cfg,unguarded_include_files):
|
2024-01-02 15:01:02 +01:00
|
|
|
# Scan for '#ifndef FILE_H' as the first directive, in the first N lines.
|
|
|
|
# Then test whether the next directive #defines the found name.
|
|
|
|
# Various tests are done:
|
|
|
|
# - check include guards for their naming and consistency
|
|
|
|
# - test whether include guards are in place
|
2024-01-04 16:26:54 +01:00
|
|
|
max_linenr = conf.include_guard.get('max_linenr', 5)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
2024-01-03 14:00:47 +01:00
|
|
|
def report(directive,msg,errorId,column=0):
|
|
|
|
reportNamingError(directive,msg,errorId,column=column)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
2024-01-03 14:00:47 +01:00
|
|
|
def report_pending_ifndef(directive,column):
|
|
|
|
report(directive,'include guard #ifndef is not followed by #define','includeGuardIncomplete',column=column)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
|
|
|
last_fn = None
|
|
|
|
pending_ifndef = None
|
|
|
|
phase = 0
|
|
|
|
for directive in cfg.directives:
|
|
|
|
if last_fn != directive.file:
|
|
|
|
if pending_ifndef:
|
2024-01-03 14:00:47 +01:00
|
|
|
report_pending_ifndef(pending_ifndef,guard_column)
|
2024-01-02 15:01:02 +01:00
|
|
|
pending_ifndef = None
|
|
|
|
last_fn = directive.file
|
|
|
|
phase = 0
|
|
|
|
if phase == -1:
|
|
|
|
# ignore (the remainder of) this file
|
|
|
|
continue
|
|
|
|
if not re.match(include_guard_header_re,directive.file):
|
|
|
|
phase = -1
|
|
|
|
continue
|
|
|
|
|
|
|
|
if directive.linenr > max_linenr:
|
2024-01-04 16:26:54 +01:00
|
|
|
if phase == 0 and conf.include_guard.get('required',1):
|
2024-01-02 15:01:02 +01:00
|
|
|
report(directive,'include guard not found before line %d'%max_linenr,'includeGuardMissing')
|
|
|
|
phase = -1
|
|
|
|
continue
|
|
|
|
|
|
|
|
if phase == 0:
|
|
|
|
# looking for '#ifndef FILE_H'
|
|
|
|
if not directive.str.startswith('#ifndef'):
|
2024-01-04 16:26:54 +01:00
|
|
|
if conf.include_guard.get('required',1):
|
2024-01-02 15:01:02 +01:00
|
|
|
report(directive,'first preprocessor directive should be include guard #ifndef','includeGuardMissing')
|
|
|
|
phase = -1
|
|
|
|
continue
|
2024-01-04 16:26:54 +01:00
|
|
|
guard_name,guard_column = check_include_guard_name(conf,directive)
|
2024-01-02 15:01:02 +01:00
|
|
|
if guard_name == None:
|
|
|
|
phase = -1
|
|
|
|
continue
|
|
|
|
pending_ifndef = directive
|
|
|
|
phase = 1
|
|
|
|
elif phase == 1:
|
|
|
|
pending_ifndef = None
|
|
|
|
# looking for '#define FILE_H'
|
|
|
|
if not directive.str.startswith('#define'):
|
|
|
|
report(directive,'second preprocessor directive should be include guard #define','includeGuardIncomplete')
|
|
|
|
phase = -1
|
|
|
|
continue
|
|
|
|
parts = directive.str.split()
|
|
|
|
if len(parts) == 1:
|
|
|
|
report(directive,'syntax error','syntax')
|
|
|
|
phase = -1
|
|
|
|
continue
|
|
|
|
if guard_name != parts[1]:
|
2024-01-03 14:00:47 +01:00
|
|
|
report(directive,'include guard does not guard; %s != %s'%(guard_name,parts[1]),'includeGuardAwayFromDuty',severity='warning',column=guard_column)
|
2024-01-02 15:01:02 +01:00
|
|
|
|
|
|
|
unguarded_include_files.remove(directive.file)
|
|
|
|
|
|
|
|
phase = -1
|
|
|
|
if pending_ifndef:
|
2024-01-03 14:00:47 +01:00
|
|
|
report_pending_ifndef(pending_ifndef,guard_column)
|
2019-11-12 09:47:48 +01:00
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
def process(dumpfiles, configfile):
|
2018-11-06 20:46:07 +01:00
|
|
|
conf = loadConfig(configfile)
|
|
|
|
|
2024-01-02 15:01:02 +01:00
|
|
|
if conf.include_guard:
|
|
|
|
global include_guard_header_re
|
|
|
|
include_guard_header_re = conf.include_guard.get('RE_HEADERFILE',"[^/].*\\.h\\Z")
|
|
|
|
|
2018-11-06 20:46:07 +01:00
|
|
|
for afile in dumpfiles:
|
|
|
|
if not afile[-5:] == '.dump':
|
|
|
|
continue
|
2023-12-30 20:54:03 +01:00
|
|
|
if not args.cli:
|
|
|
|
print('Checking ' + afile + '...')
|
2019-12-27 08:50:56 +01:00
|
|
|
data = cppcheckdata.CppcheckData(afile)
|
2024-01-04 16:26:54 +01:00
|
|
|
process_data(conf,data)
|
|
|
|
|
|
|
|
def check_file_naming(conf,data):
|
|
|
|
for source_file in data.files:
|
|
|
|
basename = os.path.basename(source_file)
|
|
|
|
good = False
|
|
|
|
for exp in conf.file:
|
|
|
|
good |= bool(re.match(exp, source_file))
|
|
|
|
good |= bool(re.match(exp, basename))
|
|
|
|
if not good:
|
|
|
|
mockToken = DataStruct(source_file, 0, basename)
|
|
|
|
reportNamingError(mockToken, 'File name ' + basename + ' violates naming convention')
|
|
|
|
|
|
|
|
def check_namespace_naming(conf,data):
|
|
|
|
for tk in data.rawTokens:
|
|
|
|
if tk.str != 'namespace':
|
|
|
|
continue
|
|
|
|
mockToken = DataStruct(tk.next.file, tk.next.linenr, tk.next.str, tk.next.column)
|
|
|
|
for exp in conf.namespace:
|
|
|
|
evalExpr(conf.namespace, exp, mockToken, 'Namespace')
|
|
|
|
|
|
|
|
def check_variable_naming(conf,cfg):
|
|
|
|
for var in cfg.variables:
|
|
|
|
if not var.nameToken:
|
|
|
|
continue
|
|
|
|
if var.access in ('Global','Public','Private'):
|
|
|
|
continue
|
|
|
|
prev = var.nameToken.previous
|
|
|
|
varType = prev.str
|
|
|
|
while "*" in varType and len(varType.replace("*", "")) == 0:
|
|
|
|
prev = prev.previous
|
|
|
|
varType = prev.str + varType
|
|
|
|
|
|
|
|
if args.debugprint:
|
|
|
|
print("Variable Name: " + str(var.nameToken.str))
|
|
|
|
print("original Type Name: " + str(var.nameToken.valueType.originalTypeName))
|
|
|
|
print("Type Name: " + var.nameToken.valueType.type)
|
|
|
|
print("Sign: " + str(var.nameToken.valueType.sign))
|
|
|
|
print("variable type: " + varType)
|
|
|
|
print("\n")
|
|
|
|
print("\t-- {} {}".format(varType, str(var.nameToken.str)))
|
|
|
|
|
|
|
|
if conf.skip_one_char_variables and len(var.nameToken.str) == 1:
|
|
|
|
continue
|
|
|
|
if varType in conf.variable_prefixes:
|
|
|
|
prefix = conf.variable_prefixes[varType]
|
|
|
|
if not var.nameToken.str.startswith(prefix):
|
|
|
|
reportNamingError(var.typeStartToken,
|
|
|
|
'Variable ' +
|
|
|
|
var.nameToken.str +
|
|
|
|
' violates naming convention',
|
|
|
|
column=var.nameToken.column)
|
|
|
|
|
|
|
|
mockToken = DataStruct(var.typeStartToken.file, var.typeStartToken.linenr, var.nameToken.str, var.nameToken.column)
|
|
|
|
for exp in conf.variable:
|
|
|
|
evalExpr(conf.variable, exp, mockToken, 'Variable')
|
|
|
|
|
|
|
|
# Naming check for Global, Private and Public member variables
|
|
|
|
def check_gpp_naming(conf_list,cfg,access,message):
|
|
|
|
for var in cfg.variables:
|
|
|
|
if var.access != access:
|
|
|
|
continue
|
|
|
|
mockToken = DataStruct(var.typeStartToken.file, var.typeStartToken.linenr, var.nameToken.str, var.nameToken.column)
|
|
|
|
for exp in conf_list:
|
|
|
|
evalExpr(conf_list, exp, mockToken, message)
|
2019-01-09 18:16:51 +01:00
|
|
|
|
2024-01-04 16:26:54 +01:00
|
|
|
def check_function_naming(conf,cfg):
|
|
|
|
for token in cfg.tokenlist:
|
|
|
|
if not token.function:
|
|
|
|
continue
|
|
|
|
if token.function.type in ('Constructor', 'Destructor', 'CopyConstructor', 'MoveConstructor'):
|
|
|
|
continue
|
|
|
|
retval = token.previous.str
|
|
|
|
prev = token.previous
|
|
|
|
while "*" in retval and len(retval.replace("*", "")) == 0:
|
|
|
|
prev = prev.previous
|
|
|
|
retval = prev.str + retval
|
|
|
|
if args.debugprint:
|
|
|
|
print("\t:: {} {}".format(retval, token.function.name))
|
|
|
|
|
|
|
|
if retval and retval in conf.function_prefixes:
|
|
|
|
if not token.function.name.startswith(conf.function_prefixes[retval]):
|
|
|
|
reportNamingError(token, 'Function ' + token.function.name + ' violates naming convention', column=token.column)
|
|
|
|
mockToken = DataStruct(token.file, token.linenr, token.function.name, token.column)
|
|
|
|
msgType = 'Function'
|
|
|
|
for exp in conf.function_name:
|
|
|
|
evalExpr(conf.function_name, exp, mockToken, msgType)
|
|
|
|
|
|
|
|
def check_class_naming(conf,cfg):
|
|
|
|
for fnc in cfg.functions:
|
|
|
|
if fnc.type not in ('Constructor','Destructor'):
|
|
|
|
continue
|
|
|
|
mockToken = DataStruct(fnc.tokenDef.file, fnc.tokenDef.linenr, fnc.name, fnc.tokenDef.column)
|
|
|
|
msgType = 'Class ' + fnc.type
|
|
|
|
for exp in conf.class_name:
|
|
|
|
evalExpr(conf.class_name, exp, mockToken, msgType)
|
|
|
|
|
|
|
|
def process_data(conf,data):
|
|
|
|
if conf.file:
|
|
|
|
check_file_naming(conf,data)
|
|
|
|
|
|
|
|
if conf.namespace:
|
|
|
|
check_namespace_naming(conf,data)
|
|
|
|
|
|
|
|
unguarded_include_files = []
|
|
|
|
if conf.include_guard and conf.include_guard.get('required',1):
|
|
|
|
unguarded_include_files = [fn for fn in data.files if re.match(include_guard_header_re,fn)]
|
|
|
|
|
|
|
|
for cfg in data.configurations:
|
|
|
|
if not args.cli:
|
|
|
|
print('Checking config %s...' % cfg.name)
|
|
|
|
if conf.variable:
|
|
|
|
check_variable_naming(conf,cfg)
|
|
|
|
if conf.private_member:
|
|
|
|
check_gpp_naming(conf.private_member,cfg,'Private','Private member variable')
|
|
|
|
if conf.public_member:
|
|
|
|
check_gpp_naming(conf.public_member,cfg,'Public','Public member variable')
|
|
|
|
if conf.global_variable:
|
|
|
|
check_gpp_naming(conf.global_variable,cfg,'Global','Global variable')
|
|
|
|
if conf.function_name:
|
|
|
|
check_function_naming(conf,cfg)
|
|
|
|
if conf.class_name:
|
|
|
|
check_class_naming(conf,cfg)
|
2024-01-02 15:01:02 +01:00
|
|
|
if conf.include_guard:
|
2024-01-04 16:26:54 +01:00
|
|
|
check_include_guards(conf,cfg,unguarded_include_files)
|
|
|
|
|
|
|
|
for fn in unguarded_include_files:
|
|
|
|
mockToken = DataStruct(fn,0,os.path.basename(fn))
|
|
|
|
reportNamingError(mockToken,'Missing include guard','includeGuardMissing')
|
2019-11-12 09:47:48 +01:00
|
|
|
|
2018-11-06 20:46:07 +01:00
|
|
|
if __name__ == "__main__":
|
2023-12-30 20:54:03 +01:00
|
|
|
parser = cppcheckdata.ArgumentParser()
|
2018-11-06 20:46:07 +01:00
|
|
|
parser.add_argument("--debugprint", action="store_true", default=False,
|
|
|
|
help="Add debug prints")
|
2023-12-30 20:54:03 +01:00
|
|
|
parser.add_argument("--configfile", type=str, default="namingng.config.json",
|
2018-11-06 20:46:07 +01:00
|
|
|
help="Naming check config file")
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
2024-01-04 16:26:54 +01:00
|
|
|
process(args.dumpfile, args.configfile)
|
2020-08-29 07:44:13 +02:00
|
|
|
|
2018-11-06 20:46:07 +01:00
|
|
|
sys.exit(0)
|