2012-12-02 12:36:55 +01:00
|
|
|
#!/usr/bin/python
|
2013-01-10 11:10:45 +01:00
|
|
|
#
|
|
|
|
# Cppcheck - A tool for static C/C++ code analysis
|
2016-01-01 23:04:16 +01:00
|
|
|
# Copyright (C) 2007-2016 Cppcheck team.
|
2013-01-10 11:10:45 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2016-05-04 11:19:07 +02:00
|
|
|
import io
|
2013-01-09 19:15:13 +01:00
|
|
|
import os
|
2013-01-09 19:22:40 +01:00
|
|
|
import sys
|
2012-12-02 12:36:55 +01:00
|
|
|
import re
|
|
|
|
import glob
|
2013-01-09 19:41:03 +01:00
|
|
|
import argparse
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
class MatchCompiler:
|
2013-10-13 11:01:50 +02:00
|
|
|
|
2015-01-09 22:50:42 +01:00
|
|
|
def __init__(self, verify_mode=False, show_skipped=False):
|
2013-01-07 22:27:57 +01:00
|
|
|
self._verifyMode = verify_mode
|
2015-01-09 22:50:42 +01:00
|
|
|
self._showSkipped = show_skipped
|
2013-01-04 03:56:21 +01:00
|
|
|
self._reset()
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _reset(self):
|
2013-01-04 04:47:01 +01:00
|
|
|
self._rawMatchFunctions = []
|
|
|
|
self._matchFunctionCache = {}
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _generateCacheSignature(
|
|
|
|
self, pattern, endToken=None, varId=None, isFindMatch=False):
|
2013-01-04 04:47:01 +01:00
|
|
|
sig = pattern
|
|
|
|
|
|
|
|
if endToken:
|
|
|
|
sig += '|ENDTOKEN'
|
|
|
|
else:
|
|
|
|
sig += '|NO-ENDTOKEN'
|
|
|
|
|
|
|
|
if varId:
|
|
|
|
sig += '|VARID'
|
|
|
|
else:
|
|
|
|
sig += '|NO-VARID'
|
|
|
|
|
|
|
|
if isFindMatch:
|
|
|
|
sig += '|ISFINDMATCH'
|
|
|
|
else:
|
|
|
|
sig += '|NORMALMATCH'
|
|
|
|
|
|
|
|
return sig
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _lookupMatchFunctionId(
|
|
|
|
self, pattern, endToken=None, varId=None, isFindMatch=False):
|
|
|
|
signature = self._generateCacheSignature(
|
|
|
|
pattern, endToken, varId, isFindMatch)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
if signature in self._matchFunctionCache:
|
|
|
|
return self._matchFunctionCache[signature]
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _insertMatchFunctionId(
|
|
|
|
self, id, pattern, endToken=None, varId=None, isFindMatch=False):
|
|
|
|
signature = self._generateCacheSignature(
|
|
|
|
pattern, endToken, varId, isFindMatch)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
# function signature should not be in the cache
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(
|
|
|
|
self._lookupMatchFunctionId(
|
|
|
|
pattern,
|
|
|
|
endToken,
|
|
|
|
varId,
|
|
|
|
isFindMatch) is None)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
self._matchFunctionCache[signature] = id
|
2013-01-04 03:56:21 +01:00
|
|
|
|
|
|
|
def _compileCmd(self, tok):
|
2013-01-04 03:38:40 +01:00
|
|
|
if tok == '%any%':
|
|
|
|
return 'true'
|
2015-12-31 01:15:49 +01:00
|
|
|
elif tok == '%assign%':
|
2016-01-05 13:28:42 +01:00
|
|
|
return 'tok->isAssignmentOp()'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%bool%':
|
|
|
|
return 'tok->isBoolean()'
|
|
|
|
elif tok == '%char%':
|
2015-08-14 20:46:13 +02:00
|
|
|
return '(tok->tokType()==Token::eChar)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%comp%':
|
|
|
|
return 'tok->isComparisonOp()'
|
|
|
|
elif tok == '%num%':
|
|
|
|
return 'tok->isNumber()'
|
2013-03-01 11:52:44 +01:00
|
|
|
elif tok == '%cop%':
|
|
|
|
return 'tok->isConstOp()'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%op%':
|
|
|
|
return 'tok->isOp()'
|
|
|
|
elif tok == '%or%':
|
2015-08-14 20:46:13 +02:00
|
|
|
return '(tok->tokType() == Token::eBitOp && tok->str()==MatchCompiler::makeConstString("|") )'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%oror%':
|
2015-08-14 20:46:13 +02:00
|
|
|
return '(tok->tokType() == Token::eLogicalOp && tok->str()==MatchCompiler::makeConstString("||"))'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%str%':
|
2015-08-14 20:46:13 +02:00
|
|
|
return '(tok->tokType()==Token::eString)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%type%':
|
2016-11-26 15:41:59 +01:00
|
|
|
return '(tok->isName() && tok->varId()==0U && !tok->isKeyword())'
|
2015-01-31 10:50:39 +01:00
|
|
|
elif tok == '%name%':
|
2013-01-04 03:38:40 +01:00
|
|
|
return 'tok->isName()'
|
2015-01-31 10:50:39 +01:00
|
|
|
elif tok == '%var%':
|
2015-01-31 12:36:22 +01:00
|
|
|
return '(tok->varId() != 0)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%varid%':
|
|
|
|
return '(tok->isName() && tok->varId()==varid)'
|
2013-03-02 16:45:26 +01:00
|
|
|
elif (len(tok) > 2) and (tok[0] == "%"):
|
2015-03-20 01:31:41 +01:00
|
|
|
print("unhandled:" + tok)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
2015-06-07 16:18:40 +02:00
|
|
|
'(tok->str()==MatchCompiler::makeConstString("' + tok + '"))'
|
2013-10-13 11:01:50 +02:00
|
|
|
)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compilePattern(self, pattern, nr, varid,
|
|
|
|
isFindMatch=False, tokenType="const Token"):
|
2013-01-04 03:38:40 +01:00
|
|
|
ret = ''
|
|
|
|
returnStatement = ''
|
|
|
|
|
|
|
|
if isFindMatch:
|
2013-05-15 18:18:58 +02:00
|
|
|
ret = '\n ' + tokenType + ' * tok = start_tok;\n'
|
2013-01-04 03:44:04 +01:00
|
|
|
returnStatement = 'continue;\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
|
|
|
arg2 = ''
|
|
|
|
if varid:
|
|
|
|
arg2 = ', const unsigned int varid'
|
|
|
|
|
2013-01-04 04:47:01 +01:00
|
|
|
ret = '// pattern: ' + pattern + '\n'
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += 'static bool match' + \
|
|
|
|
str(nr) + '(' + tokenType + '* tok' + arg2 + ') {\n'
|
2013-01-04 03:44:04 +01:00
|
|
|
returnStatement = 'return false;\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
tokens = pattern.split(' ')
|
|
|
|
gotoNextToken = ''
|
|
|
|
checked_varid = False
|
|
|
|
for tok in tokens:
|
|
|
|
if tok == '':
|
|
|
|
continue
|
|
|
|
ret += gotoNextToken
|
|
|
|
gotoNextToken = ' tok = tok->next();\n'
|
|
|
|
|
|
|
|
# if varid is provided, check that it's non-zero on first use
|
2013-03-02 16:45:26 +01:00
|
|
|
if varid and tok.find('%varid%') != -1 and checked_varid is False:
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' if (varid==0U)\n'
|
|
|
|
ret += ' throw InternalError(tok, "Internal error. Token::Match called with varid 0. Please report this to Cppcheck developers");\n'
|
|
|
|
checked_varid = True
|
|
|
|
|
|
|
|
# [abc]
|
|
|
|
if (len(tok) > 2) and (tok[0] == '[') and (tok[-1] == ']'):
|
2015-06-07 16:18:40 +02:00
|
|
|
ret += ' if (!tok || tok->str().size()!=1U || !strchr("' + tok[1:-1] + '", tok->str()[0]))\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' ' + returnStatement
|
2013-01-04 03:09:41 +01:00
|
|
|
|
2016-05-04 11:19:07 +02:00
|
|
|
# a|b|c
|
2016-02-01 09:55:16 +01:00
|
|
|
elif tok.find('|') > 0:
|
2013-01-04 03:38:40 +01:00
|
|
|
tokens2 = tok.split('|')
|
2016-11-26 16:08:14 +01:00
|
|
|
logicalOp = ' || '
|
2013-01-04 03:38:40 +01:00
|
|
|
if "" in tokens2:
|
|
|
|
ret += ' if (tok && ('
|
|
|
|
else:
|
|
|
|
ret += ' if (!tok || !('
|
|
|
|
first = True
|
|
|
|
for tok2 in tokens2:
|
|
|
|
if tok2 == '':
|
|
|
|
continue
|
|
|
|
if not first:
|
|
|
|
ret += logicalOp
|
|
|
|
first = False
|
2016-11-26 15:41:59 +01:00
|
|
|
ret += self._compileCmd(tok2)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2016-11-26 16:08:14 +01:00
|
|
|
ret += '))\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
if "" in tokens2:
|
|
|
|
ret += ' tok = tok->next();\n'
|
|
|
|
gotoNextToken = ''
|
|
|
|
else:
|
|
|
|
ret += ' ' + returnStatement
|
|
|
|
|
|
|
|
# !!a
|
2013-01-04 03:44:04 +01:00
|
|
|
elif tok[0:2] == "!!":
|
2015-06-07 16:18:40 +02:00
|
|
|
ret += ' if (tok && tok->str() == MatchCompiler::makeConstString("' + tok[2:] + '"))\n'
|
2013-01-04 03:09:41 +01:00
|
|
|
ret += ' ' + returnStatement
|
2013-01-04 03:38:40 +01:00
|
|
|
gotoNextToken = ' tok = tok ? tok->next() : NULL;\n'
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
2013-01-04 03:56:21 +01:00
|
|
|
ret += ' if (!tok || !' + self._compileCmd(tok) + ')\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' ' + returnStatement
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
if isFindMatch:
|
|
|
|
ret += ' return start_tok;\n'
|
2012-12-02 12:36:55 +01:00
|
|
|
else:
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' return true;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _compileFindPattern(self, pattern, findmatchnr, endToken, varId):
|
2013-01-04 03:38:40 +01:00
|
|
|
more_args = ''
|
|
|
|
endCondition = ''
|
|
|
|
if endToken:
|
2013-05-15 18:18:58 +02:00
|
|
|
more_args += ', const Token * end'
|
2013-01-04 03:38:40 +01:00
|
|
|
endCondition = ' && start_tok != end'
|
|
|
|
if varId:
|
|
|
|
more_args += ', unsigned int varid'
|
|
|
|
|
2013-01-04 04:47:01 +01:00
|
|
|
ret = '// pattern: ' + pattern + '\n'
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += 'template<class T> static T * findmatch' + \
|
|
|
|
str(findmatchnr) + '(T * start_tok' + more_args + ') {\n'
|
|
|
|
ret += ' for (; start_tok' + endCondition + \
|
|
|
|
'; start_tok = start_tok->next()) {\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-05-15 18:18:58 +02:00
|
|
|
ret += self._compilePattern(pattern, -1, varId, True, 'T')
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' }\n'
|
|
|
|
ret += ' return NULL;\n}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def parseMatch(self, line, pos1):
|
|
|
|
parlevel = 0
|
|
|
|
args = []
|
|
|
|
argstart = 0
|
|
|
|
pos = pos1
|
|
|
|
inString = False
|
|
|
|
while pos < len(line):
|
|
|
|
if inString:
|
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = False
|
2012-12-08 19:29:16 +01:00
|
|
|
elif line[pos] == '"':
|
2013-01-04 03:38:40 +01:00
|
|
|
inString = True
|
|
|
|
elif line[pos] == '(':
|
|
|
|
parlevel += 1
|
|
|
|
if parlevel == 1:
|
|
|
|
argstart = pos + 1
|
|
|
|
elif line[pos] == ')':
|
|
|
|
parlevel -= 1
|
|
|
|
if parlevel == 0:
|
|
|
|
ret = []
|
2013-10-13 11:01:50 +02:00
|
|
|
ret.append(line[pos1:pos + 1])
|
2013-01-04 03:38:40 +01:00
|
|
|
for arg in args:
|
|
|
|
ret.append(arg)
|
|
|
|
ret.append(line[argstart:pos])
|
|
|
|
return ret
|
|
|
|
elif line[pos] == ',' and parlevel == 1:
|
|
|
|
args.append(line[argstart:pos])
|
2012-12-08 19:29:16 +01:00
|
|
|
argstart = pos + 1
|
2013-01-04 03:38:40 +01:00
|
|
|
pos += 1
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2015-06-07 16:18:40 +02:00
|
|
|
def _isInString(self, line, pos1):
|
|
|
|
pos = 0
|
|
|
|
inString = False
|
|
|
|
while pos != pos1:
|
2016-11-26 16:08:14 +01:00
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = not inString
|
2015-06-07 16:18:40 +02:00
|
|
|
pos += 1
|
2015-08-21 10:55:19 +02:00
|
|
|
return inString
|
2015-06-07 16:18:40 +02:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
def _parseStringComparison(self, line, pos1):
|
|
|
|
startPos = 0
|
|
|
|
endPos = 0
|
|
|
|
pos = pos1
|
|
|
|
inString = False
|
|
|
|
while pos < len(line):
|
|
|
|
if inString:
|
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = False
|
2013-10-13 11:01:50 +02:00
|
|
|
endPos = pos + 1
|
2013-01-04 03:38:40 +01:00
|
|
|
return (startPos, endPos)
|
2012-12-31 01:40:58 +01:00
|
|
|
elif line[pos] == '"':
|
2013-01-04 03:38:40 +01:00
|
|
|
startPos = pos
|
|
|
|
inString = True
|
|
|
|
pos += 1
|
2012-12-31 15:30:27 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
return None
|
2012-12-31 15:30:27 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compileVerifyTokenMatch(
|
|
|
|
self, is_simplematch, verifyNumber, pattern, patternNumber, varId):
|
2013-01-07 22:27:57 +01:00
|
|
|
more_args = ''
|
|
|
|
if varId:
|
|
|
|
more_args = ', const unsigned int varid'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret = 'static bool match_verify' + \
|
|
|
|
str(verifyNumber) + '(const Token *tok' + more_args + ') {\n'
|
2013-01-07 22:27:57 +01:00
|
|
|
|
|
|
|
origMatchName = 'Match'
|
|
|
|
if is_simplematch:
|
|
|
|
origMatchName = 'simpleMatch'
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(varId is None)
|
2013-01-07 22:27:57 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' bool res_compiled_match = match' + \
|
|
|
|
str(patternNumber) + '(tok'
|
2013-01-07 22:27:57 +01:00
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' bool res_parsed_match = Token::' + \
|
|
|
|
origMatchName + '(tok, "' + pattern + '"'
|
2013-01-07 22:27:57 +01:00
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
|
|
|
ret += '\n'
|
|
|
|
# Don't use assert() here, it's disabled for optimized builds.
|
|
|
|
# We also need to verify builds in 'release' mode
|
2013-01-09 18:56:00 +01:00
|
|
|
ret += ' if (res_parsed_match != res_compiled_match) {\n'
|
|
|
|
# ret += ' std::cout << "res_parsed_match' + str(verifyNumber) + ': " << res_parsed_match << ", res_compiled_match: " << res_compiled_match << "\\n";\n'
|
|
|
|
# ret += ' if (tok)\n'
|
|
|
|
# ret += ' std::cout << "tok: " << tok->str();\n'
|
|
|
|
# ret += ' if (tok->next())\n'
|
|
|
|
# ret += ' std::cout << "tok next: " << tok->next()->str();\n'
|
2014-07-02 14:28:21 +02:00
|
|
|
ret += ' throw InternalError(tok, "Internal error. compiled match returned different result than parsed match: ' + pattern + '");\n'
|
2013-01-09 18:56:00 +01:00
|
|
|
ret += ' }\n'
|
2013-01-07 22:27:57 +01:00
|
|
|
ret += ' return res_compiled_match;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _replaceSpecificTokenMatch(
|
|
|
|
self, is_simplematch, line, start_pos, end_pos, pattern, tok, varId):
|
2013-01-07 21:39:49 +01:00
|
|
|
more_args = ''
|
|
|
|
if varId:
|
|
|
|
more_args = ',' + varId
|
|
|
|
|
|
|
|
# Compile function or use previously compiled one
|
2013-10-13 11:01:50 +02:00
|
|
|
patternNumber = self._lookupMatchFunctionId(
|
|
|
|
pattern, None, varId, False)
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
if patternNumber is None:
|
2013-01-07 21:39:49 +01:00
|
|
|
patternNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._insertMatchFunctionId(
|
|
|
|
patternNumber,
|
|
|
|
pattern,
|
|
|
|
None,
|
|
|
|
varId,
|
|
|
|
False)
|
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compilePattern(pattern, patternNumber, varId))
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2013-01-07 22:27:57 +01:00
|
|
|
functionName = "match"
|
|
|
|
if self._verifyMode:
|
|
|
|
verifyNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileVerifyTokenMatch(
|
|
|
|
is_simplematch,
|
|
|
|
verifyNumber,
|
|
|
|
pattern,
|
|
|
|
patternNumber,
|
|
|
|
varId))
|
2013-01-07 22:27:57 +01:00
|
|
|
|
|
|
|
# inject verify function
|
|
|
|
functionName = "match_verify"
|
|
|
|
patternNumber = verifyNumber
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
|
|
|
line[:start_pos] + functionName + str(
|
|
|
|
patternNumber) + '(' + tok + more_args + ')' + line[start_pos + end_pos:]
|
|
|
|
)
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
def _replaceTokenMatch(self, line, linenr, filename):
|
2013-01-04 03:38:40 +01:00
|
|
|
while True:
|
2013-01-07 22:27:57 +01:00
|
|
|
is_simplematch = False
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::Match(')
|
|
|
|
if pos1 == -1:
|
2013-01-07 22:27:57 +01:00
|
|
|
is_simplematch = True
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::simpleMatch(')
|
|
|
|
if pos1 == -1:
|
|
|
|
break
|
|
|
|
|
|
|
|
res = self.parseMatch(line, pos1)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2013-01-04 03:38:40 +01:00
|
|
|
break
|
2013-01-04 04:47:01 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
# assert that Token::Match has either 2 or 3 arguments
|
|
|
|
assert(len(res) == 3 or len(res) == 4)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
2013-01-07 21:39:49 +01:00
|
|
|
end_pos = len(res[0])
|
|
|
|
tok = res[1]
|
|
|
|
raw_pattern = res[2]
|
|
|
|
varId = None
|
|
|
|
if len(res) == 4:
|
|
|
|
varId = res[3]
|
|
|
|
|
2015-01-09 22:39:25 +01:00
|
|
|
res = re.match(r'\s*"((?:.|\\")*?)"\s*$', raw_pattern)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2015-01-09 22:50:42 +01:00
|
|
|
if self._showSkipped:
|
2016-11-24 00:06:42 +01:00
|
|
|
print(filename +":" + str(linenr) +" skipping match pattern:" + raw_pattern)
|
2013-01-07 21:39:49 +01:00
|
|
|
break # Non-const pattern - bailout
|
2013-01-04 04:47:01 +01:00
|
|
|
|
2013-01-07 21:39:49 +01:00
|
|
|
pattern = res.group(1)
|
2013-10-13 11:01:50 +02:00
|
|
|
line = self._replaceSpecificTokenMatch(
|
|
|
|
is_simplematch,
|
|
|
|
line,
|
|
|
|
pos1,
|
|
|
|
end_pos,
|
|
|
|
pattern,
|
|
|
|
tok,
|
|
|
|
varId)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
return line
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compileVerifyTokenFindMatch(
|
|
|
|
self, is_findsimplematch, verifyNumber, pattern, patternNumber, endToken, varId):
|
2013-01-13 20:48:19 +01:00
|
|
|
more_args = ''
|
|
|
|
if endToken:
|
2013-05-15 18:18:58 +02:00
|
|
|
more_args += ', const Token * endToken'
|
2013-01-13 20:48:19 +01:00
|
|
|
if varId:
|
|
|
|
more_args += ', const unsigned int varid'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret = 'template < class T > static T * findmatch_verify' + \
|
|
|
|
str(verifyNumber) + '(T * tok' + more_args + ') {\n'
|
2013-01-13 20:48:19 +01:00
|
|
|
|
|
|
|
origFindMatchName = 'findmatch'
|
|
|
|
if is_findsimplematch:
|
2014-07-02 14:28:21 +02:00
|
|
|
origFindMatchName = 'findsimplematch'
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(varId is None)
|
2013-01-13 20:48:19 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' T * res_compiled_findmatch = findmatch' + \
|
|
|
|
str(patternNumber) + '(tok'
|
2013-01-13 20:48:19 +01:00
|
|
|
if endToken:
|
|
|
|
ret += ', endToken'
|
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' T * res_parsed_findmatch = Token::' + \
|
|
|
|
origFindMatchName + '(tok, "' + pattern + '"'
|
2013-01-13 20:48:19 +01:00
|
|
|
if endToken:
|
|
|
|
ret += ', endToken'
|
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
|
|
|
ret += '\n'
|
|
|
|
# Don't use assert() here, it's disabled for optimized builds.
|
|
|
|
# We also need to verify builds in 'release' mode
|
|
|
|
ret += ' if (res_parsed_findmatch != res_compiled_findmatch) {\n'
|
2014-07-02 14:28:21 +02:00
|
|
|
ret += ' throw InternalError(tok, "Internal error. compiled findmatch returned different result than parsed findmatch: ' + pattern + '");\n'
|
2013-01-13 20:48:19 +01:00
|
|
|
ret += ' }\n'
|
|
|
|
ret += ' return res_compiled_findmatch;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _replaceSpecificFindTokenMatch(
|
|
|
|
self, is_findsimplematch, line, start_pos, end_pos, pattern, tok, endToken, varId):
|
2013-01-13 20:26:25 +01:00
|
|
|
more_args = ''
|
|
|
|
if endToken:
|
|
|
|
more_args += ',' + endToken
|
|
|
|
if varId:
|
|
|
|
more_args += ',' + varId
|
|
|
|
|
|
|
|
# Compile function or use previously compiled one
|
2013-10-13 11:01:50 +02:00
|
|
|
findMatchNumber = self._lookupMatchFunctionId(
|
|
|
|
pattern, endToken, varId, True)
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
if findMatchNumber is None:
|
2013-01-13 20:26:25 +01:00
|
|
|
findMatchNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._insertMatchFunctionId(
|
|
|
|
findMatchNumber,
|
|
|
|
pattern,
|
|
|
|
endToken,
|
|
|
|
varId,
|
|
|
|
True)
|
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileFindPattern(
|
|
|
|
pattern,
|
|
|
|
findMatchNumber,
|
|
|
|
endToken,
|
|
|
|
varId))
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2013-01-13 20:48:19 +01:00
|
|
|
functionName = "findmatch"
|
|
|
|
if self._verifyMode:
|
|
|
|
verifyNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileVerifyTokenFindMatch(
|
|
|
|
is_findsimplematch,
|
|
|
|
verifyNumber,
|
|
|
|
pattern,
|
|
|
|
findMatchNumber,
|
|
|
|
endToken,
|
|
|
|
varId))
|
2013-01-13 20:48:19 +01:00
|
|
|
|
|
|
|
# inject verify function
|
|
|
|
functionName = "findmatch_verify"
|
|
|
|
findMatchNumber = verifyNumber
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
|
|
|
line[:start_pos] + functionName + str(
|
2015-06-07 16:18:40 +02:00
|
|
|
findMatchNumber) + '(' + tok + more_args + ') ' + line[start_pos + end_pos:]
|
2013-10-13 11:01:50 +02:00
|
|
|
)
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
def _replaceTokenFindMatch(self, line, linenr, filename):
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = 0
|
|
|
|
while True:
|
2013-01-13 20:48:19 +01:00
|
|
|
is_findsimplematch = True
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::findsimplematch(')
|
|
|
|
if pos1 == -1:
|
2013-01-13 20:48:19 +01:00
|
|
|
is_findsimplematch = False
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::findmatch(')
|
|
|
|
if pos1 == -1:
|
|
|
|
break
|
|
|
|
|
|
|
|
res = self.parseMatch(line, pos1)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2013-01-04 03:38:40 +01:00
|
|
|
break
|
2013-01-13 20:17:30 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(len(res) >= 3 or len(res) < 6)
|
2016-11-26 16:08:14 +01:00
|
|
|
# assert that Token::find(simple)match has either 2, 3 or 4
|
|
|
|
# arguments
|
2013-01-13 20:17:30 +01:00
|
|
|
|
|
|
|
g0 = res[0]
|
2013-01-13 20:26:25 +01:00
|
|
|
tok = res[1]
|
2013-01-13 20:17:30 +01:00
|
|
|
pattern = res[2]
|
|
|
|
|
|
|
|
# Check for varId
|
|
|
|
varId = None
|
2013-01-13 20:48:19 +01:00
|
|
|
if not is_findsimplematch and g0.find("%varid%") != -1:
|
2013-01-13 20:17:30 +01:00
|
|
|
if len(res) == 5:
|
|
|
|
varId = res[4]
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
2013-01-13 20:17:30 +01:00
|
|
|
varId = res[3]
|
|
|
|
|
|
|
|
# endToken support. We resolve the overloaded type by checking if varId is used or not.
|
|
|
|
# Function protoypes:
|
|
|
|
# Token *findsimplematch(const Token *tok, const char pattern[]);
|
|
|
|
# Token *findsimplematch(const Token *tok, const char pattern[], const Token *end);
|
|
|
|
# Token *findmatch(const Token *tok, const char pattern[], unsigned int varId = 0);
|
2013-10-13 11:01:50 +02:00
|
|
|
# Token *findmatch(const Token *tok, const char pattern[], const
|
|
|
|
# Token *end, unsigned int varId = 0);
|
2013-01-13 20:17:30 +01:00
|
|
|
endToken = None
|
2016-11-26 16:08:14 +01:00
|
|
|
if ((is_findsimplematch and len(res) == 4) or
|
|
|
|
(not is_findsimplematch and varId and (len(res) == 5)) or
|
|
|
|
(not is_findsimplematch and varId is None and len(res) == 4)):
|
2013-01-13 20:17:30 +01:00
|
|
|
endToken = res[3]
|
|
|
|
|
2015-01-09 22:39:25 +01:00
|
|
|
res = re.match(r'\s*"((?:.|\\")*?)"\s*$', pattern)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2015-01-09 22:50:42 +01:00
|
|
|
if self._showSkipped:
|
2016-11-24 00:06:42 +01:00
|
|
|
print(filename +":" + str(linenr) +" skipping findmatch pattern:" + pattern)
|
2013-01-13 20:17:30 +01:00
|
|
|
break # Non-const pattern - bailout
|
|
|
|
|
|
|
|
pattern = res.group(1)
|
2013-10-13 11:01:50 +02:00
|
|
|
line = self._replaceSpecificFindTokenMatch(
|
|
|
|
is_findsimplematch,
|
|
|
|
line,
|
|
|
|
pos1,
|
|
|
|
len(g0),
|
|
|
|
pattern,
|
|
|
|
tok,
|
|
|
|
endToken,
|
|
|
|
varId)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
return line
|
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _replaceCStrings(self, line):
|
2013-01-04 03:38:40 +01:00
|
|
|
while True:
|
2015-06-07 16:18:40 +02:00
|
|
|
match = re.search('(==|!=) *"', line)
|
2013-01-04 03:38:40 +01:00
|
|
|
if not match:
|
|
|
|
break
|
|
|
|
|
2015-06-07 16:18:40 +02:00
|
|
|
if self._isInString(line, match.start()):
|
|
|
|
break
|
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
res = self._parseStringComparison(line, match.start())
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2013-01-04 03:38:40 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
startPos = res[0]
|
|
|
|
endPos = res[1]
|
2013-10-13 11:01:50 +02:00
|
|
|
text = line[startPos + 1:endPos - 1]
|
2015-06-07 16:18:40 +02:00
|
|
|
line = line[:startPos] + 'MatchCompiler::makeConstStringBegin' + text + 'MatchCompiler::makeConstStringEnd' + line[endPos:]
|
|
|
|
line = line.replace('MatchCompiler::makeConstStringBegin', 'MatchCompiler::makeConstString("')
|
|
|
|
line = line.replace('MatchCompiler::makeConstStringEnd', '")')
|
2013-01-04 03:38:40 +01:00
|
|
|
return line
|
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
def convertFile(self, srcname, destname, line_directive):
|
2013-01-04 03:56:21 +01:00
|
|
|
self._reset()
|
|
|
|
|
2017-01-01 10:27:07 +01:00
|
|
|
fin = io.open(srcname, "rt", encoding="utf-8")
|
2013-01-04 03:38:40 +01:00
|
|
|
srclines = fin.readlines()
|
|
|
|
fin.close()
|
|
|
|
|
|
|
|
header = '#include "token.h"\n'
|
|
|
|
header += '#include "errorlogger.h"\n'
|
2015-06-07 16:18:40 +02:00
|
|
|
header += '#include "matchcompiler.h"\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
header += '#include <string>\n'
|
|
|
|
header += '#include <cstring>\n'
|
2013-01-09 18:56:00 +01:00
|
|
|
# header += '#include <iostream>\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
code = ''
|
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
linenr = 0
|
2013-01-04 03:38:40 +01:00
|
|
|
for line in srclines:
|
2016-11-24 00:06:42 +01:00
|
|
|
linenr += 1
|
2013-01-04 03:38:40 +01:00
|
|
|
# Compile Token::Match and Token::simpleMatch
|
2016-11-24 00:06:42 +01:00
|
|
|
line = self._replaceTokenMatch(line, linenr, srcname)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
# Compile Token::findsimplematch
|
2016-11-24 00:06:42 +01:00
|
|
|
line = self._replaceTokenFindMatch(line, linenr, srcname)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
# Cache plain C-strings in C++ strings
|
2013-01-04 03:56:21 +01:00
|
|
|
line = self._replaceCStrings(line)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
code += line
|
|
|
|
|
|
|
|
# Compute matchFunctions
|
|
|
|
strFunctions = ''
|
2013-01-04 04:47:01 +01:00
|
|
|
for function in self._rawMatchFunctions:
|
2013-01-04 03:44:04 +01:00
|
|
|
strFunctions += function
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
lineno = ''
|
|
|
|
if line_directive:
|
|
|
|
lineno = '#line 1 "' + srcname + '"\n'
|
|
|
|
|
2017-01-01 10:27:07 +01:00
|
|
|
fout = io.open(destname, 'wt', encoding="utf-8")
|
2015-07-23 10:49:53 +02:00
|
|
|
fout.write(header + strFunctions + lineno + code)
|
2013-01-04 03:38:40 +01:00
|
|
|
fout.close()
|
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2013-01-11 16:21:17 +01:00
|
|
|
def main():
|
|
|
|
# Main program
|
|
|
|
|
|
|
|
# Argument handling
|
2013-10-13 11:01:50 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Compile Token::Match() calls into native C++ code')
|
2013-01-11 16:21:17 +01:00
|
|
|
parser.add_argument('--verify', action='store_true', default=False,
|
2013-03-02 16:45:26 +01:00
|
|
|
help='verify compiled matches against on-the-fly parser. Slow!')
|
2015-01-09 22:50:42 +01:00
|
|
|
parser.add_argument('--show-skipped', action='store_true', default=False,
|
|
|
|
help='show skipped (non-static) patterns')
|
2015-08-21 10:55:19 +02:00
|
|
|
parser.add_argument('--read-dir', default="lib",
|
2015-07-23 10:49:53 +02:00
|
|
|
help='directory from which files are read')
|
|
|
|
parser.add_argument('--write-dir', default="build",
|
|
|
|
help='directory into which files are written')
|
|
|
|
parser.add_argument('--prefix', default="",
|
|
|
|
help='prefix for build files')
|
|
|
|
parser.add_argument('--line', action='store_true', default=False,
|
|
|
|
help='add line directive to input files into build files')
|
|
|
|
parser.add_argument('file', nargs='*',
|
|
|
|
help='file to complile')
|
2013-01-11 16:21:17 +01:00
|
|
|
args = parser.parse_args()
|
2015-07-23 10:49:53 +02:00
|
|
|
lib_dir = args.read_dir
|
|
|
|
build_dir = args.write_dir
|
|
|
|
line_directive = args.line
|
|
|
|
files = args.file
|
|
|
|
|
|
|
|
# Check if we are invoked from the right place
|
|
|
|
if not os.path.exists(lib_dir):
|
|
|
|
print('Directory "' + lib_dir + '"not found.')
|
|
|
|
sys.exit(-1)
|
|
|
|
|
|
|
|
# Create build directory if needed
|
|
|
|
if not os.path.exists(build_dir):
|
|
|
|
os.makedirs(build_dir)
|
|
|
|
if not os.path.isdir(build_dir):
|
|
|
|
raise Exception(build_dir + ' is not a directory')
|
2013-01-11 16:21:17 +01:00
|
|
|
|
2015-01-09 22:50:42 +01:00
|
|
|
mc = MatchCompiler(verify_mode=args.verify,
|
|
|
|
show_skipped=args.show_skipped)
|
2013-01-11 16:21:17 +01:00
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
if not files:
|
|
|
|
# select all *.cpp files in lib_dir
|
|
|
|
for f in glob.glob(lib_dir + '/*.cpp'):
|
2015-08-21 10:55:19 +02:00
|
|
|
files.append(f[len(lib_dir) + 1:])
|
2015-07-23 10:49:53 +02:00
|
|
|
|
|
|
|
# convert files
|
|
|
|
for fi in files:
|
|
|
|
pi = lib_dir + '/' + fi
|
|
|
|
fo = args.prefix + fi
|
|
|
|
po = build_dir + '/' + fo
|
|
|
|
print(pi + ' => ' + po)
|
|
|
|
mc.convertFile(pi, po, line_directive)
|
2013-01-11 16:21:17 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|