2017-06-04 22:51:48 +02:00
|
|
|
#!/usr/bin/env python
|
2013-01-10 11:10:45 +01:00
|
|
|
#
|
|
|
|
# Cppcheck - A tool for static C/C++ code analysis
|
2022-01-28 18:30:12 +01:00
|
|
|
# Copyright (C) 2007-2021 Cppcheck team.
|
2013-01-10 11:10:45 +01:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2016-05-04 11:19:07 +02:00
|
|
|
import io
|
2013-01-09 19:15:13 +01:00
|
|
|
import os
|
2013-01-09 19:22:40 +01:00
|
|
|
import sys
|
2012-12-02 12:36:55 +01:00
|
|
|
import re
|
|
|
|
import glob
|
2013-01-09 19:41:03 +01:00
|
|
|
import argparse
|
2017-05-19 20:36:54 +02:00
|
|
|
import errno
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2022-06-18 21:30:42 +02:00
|
|
|
tokTypes = {
|
|
|
|
'+': ['eArithmeticalOp'],
|
|
|
|
'-': ['eArithmeticalOp'],
|
|
|
|
'*': ['eArithmeticalOp'],
|
|
|
|
'/': ['eArithmeticalOp'],
|
|
|
|
'%': ['eArithmeticalOp'],
|
|
|
|
'>>': ['eArithmeticalOp'],
|
|
|
|
'<<': ['eArithmeticalOp'],
|
|
|
|
'=': ['eAssignmentOp'],
|
|
|
|
'+=': ['eAssignmentOp'],
|
|
|
|
'-=': ['eAssignmentOp'],
|
|
|
|
'*=': ['eAssignmentOp'],
|
|
|
|
'/=': ['eAssignmentOp'],
|
|
|
|
'%=': ['eAssignmentOp'],
|
|
|
|
'&=': ['eAssignmentOp'],
|
|
|
|
'|=': ['eAssignmentOp'],
|
|
|
|
'^=': ['eAssignmentOp'],
|
|
|
|
'&': ['eBitOp'],
|
|
|
|
'^': ['eBitOp'],
|
|
|
|
'~': ['eBitOp'],
|
|
|
|
'true': ['eBoolean'],
|
|
|
|
'false': ['eBoolean'],
|
|
|
|
'{': ['eBracket'],
|
|
|
|
'}': ['eBracket'],
|
|
|
|
'<': ['eBracket', 'eComparisonOp'],
|
|
|
|
'>': ['eBracket', 'eComparisonOp'],
|
|
|
|
'==': ['eComparisonOp'],
|
|
|
|
'!=': ['eComparisonOp'],
|
|
|
|
'<=': ['eComparisonOp'],
|
|
|
|
'>=': ['eComparisonOp'],
|
|
|
|
'<=>': ['eComparisonOp'],
|
|
|
|
'...': ['eEllipsis'],
|
|
|
|
',': ['eExtendedOp'],
|
|
|
|
'?': ['eExtendedOp'],
|
|
|
|
':': ['eExtendedOp'],
|
|
|
|
'(': ['eExtendedOp'],
|
|
|
|
')': ['eExtendedOp'],
|
|
|
|
'[': ['eExtendedOp', 'eLambda'],
|
|
|
|
']': ['eExtendedOp', 'eLambda'],
|
|
|
|
'++': ['eIncDecOp'],
|
|
|
|
'--': ['eIncDecOp'],
|
|
|
|
'asm': ['eKeyword'],
|
|
|
|
'auto': ['eKeyword', 'eType'],
|
|
|
|
'break': ['eKeyword'],
|
|
|
|
'case': ['eKeyword'],
|
|
|
|
'const': ['eKeyword'],
|
|
|
|
'continue': ['eKeyword'],
|
|
|
|
'default': ['eKeyword'],
|
|
|
|
'do': ['eKeyword'],
|
|
|
|
'else': ['eKeyword'],
|
|
|
|
'enum': ['eKeyword'],
|
|
|
|
'extern': ['eKeyword'],
|
|
|
|
'for': ['eKeyword'],
|
|
|
|
'goto': ['eKeyword'],
|
|
|
|
'if': ['eKeyword'],
|
|
|
|
'inline': ['eKeyword'],
|
|
|
|
'register': ['eKeyword'],
|
|
|
|
'restrict': ['eKeyword'],
|
|
|
|
'return': ['eKeyword'],
|
|
|
|
'sizeof': ['eKeyword'],
|
|
|
|
'static': ['eKeyword'],
|
|
|
|
'struct': ['eKeyword'],
|
|
|
|
'switch': ['eKeyword'],
|
|
|
|
'typedef': ['eKeyword'],
|
|
|
|
'union': ['eKeyword'],
|
|
|
|
'volatile': ['eKeyword'],
|
|
|
|
'while': ['eKeyword'],
|
|
|
|
'void': ['eKeyword', 'eType'],
|
|
|
|
'&&': ['eLogicalOp'],
|
|
|
|
'!': ['eLogicalOp']
|
|
|
|
}
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
class MatchCompiler:
|
2013-10-13 11:01:50 +02:00
|
|
|
|
2015-01-09 22:50:42 +01:00
|
|
|
def __init__(self, verify_mode=False, show_skipped=False):
|
2013-01-07 22:27:57 +01:00
|
|
|
self._verifyMode = verify_mode
|
2015-01-09 22:50:42 +01:00
|
|
|
self._showSkipped = show_skipped
|
2013-01-04 03:56:21 +01:00
|
|
|
self._reset()
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _reset(self):
|
2013-01-04 04:47:01 +01:00
|
|
|
self._rawMatchFunctions = []
|
|
|
|
self._matchFunctionCache = {}
|
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
2013-10-13 11:01:50 +02:00
|
|
|
def _generateCacheSignature(
|
2017-07-22 11:05:50 +02:00
|
|
|
pattern, endToken=None, varId=None, isFindMatch=False):
|
2013-01-04 04:47:01 +01:00
|
|
|
sig = pattern
|
|
|
|
|
|
|
|
if endToken:
|
|
|
|
sig += '|ENDTOKEN'
|
|
|
|
else:
|
|
|
|
sig += '|NO-ENDTOKEN'
|
|
|
|
|
|
|
|
if varId:
|
|
|
|
sig += '|VARID'
|
|
|
|
else:
|
|
|
|
sig += '|NO-VARID'
|
|
|
|
|
|
|
|
if isFindMatch:
|
|
|
|
sig += '|ISFINDMATCH'
|
|
|
|
else:
|
|
|
|
sig += '|NORMALMATCH'
|
|
|
|
|
|
|
|
return sig
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _lookupMatchFunctionId(
|
|
|
|
self, pattern, endToken=None, varId=None, isFindMatch=False):
|
|
|
|
signature = self._generateCacheSignature(
|
|
|
|
pattern, endToken, varId, isFindMatch)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
if signature in self._matchFunctionCache:
|
|
|
|
return self._matchFunctionCache[signature]
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _insertMatchFunctionId(
|
|
|
|
self, id, pattern, endToken=None, varId=None, isFindMatch=False):
|
|
|
|
signature = self._generateCacheSignature(
|
|
|
|
pattern, endToken, varId, isFindMatch)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
# function signature should not be in the cache
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(
|
|
|
|
self._lookupMatchFunctionId(
|
|
|
|
pattern,
|
|
|
|
endToken,
|
|
|
|
varId,
|
|
|
|
isFindMatch) is None)
|
2013-01-04 04:47:01 +01:00
|
|
|
|
|
|
|
self._matchFunctionCache[signature] = id
|
2013-01-04 03:56:21 +01:00
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
|
|
|
def _compileCmd(tok):
|
2013-01-04 03:38:40 +01:00
|
|
|
if tok == '%any%':
|
|
|
|
return 'true'
|
2015-12-31 01:15:49 +01:00
|
|
|
elif tok == '%assign%':
|
2016-01-05 13:28:42 +01:00
|
|
|
return 'tok->isAssignmentOp()'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%bool%':
|
|
|
|
return 'tok->isBoolean()'
|
|
|
|
elif tok == '%char%':
|
2020-12-31 19:28:06 +01:00
|
|
|
return '(tok->tokType() == Token::eChar)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%comp%':
|
|
|
|
return 'tok->isComparisonOp()'
|
|
|
|
elif tok == '%num%':
|
|
|
|
return 'tok->isNumber()'
|
2013-03-01 11:52:44 +01:00
|
|
|
elif tok == '%cop%':
|
|
|
|
return 'tok->isConstOp()'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%op%':
|
|
|
|
return 'tok->isOp()'
|
|
|
|
elif tok == '%or%':
|
2020-12-31 19:28:06 +01:00
|
|
|
return '(tok->tokType() == Token::eBitOp && tok->str() == MatchCompiler::makeConstString("|") )'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%oror%':
|
2020-12-31 19:28:06 +01:00
|
|
|
return '(tok->tokType() == Token::eLogicalOp && tok->str() == MatchCompiler::makeConstString("||"))'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%str%':
|
2020-12-31 19:28:06 +01:00
|
|
|
return '(tok->tokType() == Token::eString)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%type%':
|
2022-06-08 09:24:20 +02:00
|
|
|
return '(tok->isName() && tok->varId() == 0U && (tok->str() != MatchCompiler::makeConstString("delete") || !tok->isKeyword()))'
|
2015-01-31 10:50:39 +01:00
|
|
|
elif tok == '%name%':
|
2013-01-04 03:38:40 +01:00
|
|
|
return 'tok->isName()'
|
2015-01-31 10:50:39 +01:00
|
|
|
elif tok == '%var%':
|
2015-01-31 12:36:22 +01:00
|
|
|
return '(tok->varId() != 0)'
|
2013-01-04 03:38:40 +01:00
|
|
|
elif tok == '%varid%':
|
2020-12-31 19:28:06 +01:00
|
|
|
return '(tok->isName() && tok->varId() == varid)'
|
2013-03-02 16:45:26 +01:00
|
|
|
elif (len(tok) > 2) and (tok[0] == "%"):
|
2015-03-20 01:31:41 +01:00
|
|
|
print("unhandled:" + tok)
|
2022-06-18 21:30:42 +02:00
|
|
|
elif tok in tokTypes:
|
|
|
|
cond = ' || '.join(['tok->tokType() == Token::{}'.format(tokType) for tokType in tokTypes[tok]])
|
|
|
|
return '(({cond}) && tok->str() == MatchCompiler::makeConstString("{tok}"))'.format(cond=cond, tok=tok)
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
2020-12-31 19:28:06 +01:00
|
|
|
'(tok->str() == MatchCompiler::makeConstString("' + tok + '"))'
|
2013-10-13 11:01:50 +02:00
|
|
|
)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compilePattern(self, pattern, nr, varid,
|
|
|
|
isFindMatch=False, tokenType="const Token"):
|
2013-01-04 03:38:40 +01:00
|
|
|
if isFindMatch:
|
2013-05-15 18:18:58 +02:00
|
|
|
ret = '\n ' + tokenType + ' * tok = start_tok;\n'
|
2013-01-04 03:44:04 +01:00
|
|
|
returnStatement = 'continue;\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
|
|
|
arg2 = ''
|
|
|
|
if varid:
|
2019-07-15 13:49:35 +02:00
|
|
|
arg2 = ', const int varid'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-01-04 04:47:01 +01:00
|
|
|
ret = '// pattern: ' + pattern + '\n'
|
2022-10-06 07:58:48 +02:00
|
|
|
ret += 'static inline bool match' + \
|
2013-10-13 11:01:50 +02:00
|
|
|
str(nr) + '(' + tokenType + '* tok' + arg2 + ') {\n'
|
2013-01-04 03:44:04 +01:00
|
|
|
returnStatement = 'return false;\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
tokens = pattern.split(' ')
|
|
|
|
gotoNextToken = ''
|
|
|
|
checked_varid = False
|
|
|
|
for tok in tokens:
|
|
|
|
if tok == '':
|
|
|
|
continue
|
|
|
|
ret += gotoNextToken
|
|
|
|
gotoNextToken = ' tok = tok->next();\n'
|
|
|
|
|
|
|
|
# if varid is provided, check that it's non-zero on first use
|
2017-06-05 13:23:00 +02:00
|
|
|
if varid and '%varid%' in tok and not checked_varid:
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' if (varid==0U)\n'
|
2017-06-04 22:51:48 +02:00
|
|
|
ret += ' throw InternalError(tok, "Internal error. Token::Match called with varid 0. ' +\
|
|
|
|
'Please report this to Cppcheck developers");\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
checked_varid = True
|
|
|
|
|
|
|
|
# [abc]
|
|
|
|
if (len(tok) > 2) and (tok[0] == '[') and (tok[-1] == ']'):
|
2020-12-31 19:28:06 +01:00
|
|
|
ret += ' if (!tok || tok->str().size() != 1U || !strchr("' + tok[1:-1] + '", tok->str()[0]))\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' ' + returnStatement
|
2013-01-04 03:09:41 +01:00
|
|
|
|
2016-05-04 11:19:07 +02:00
|
|
|
# a|b|c
|
2017-06-05 13:41:38 +02:00
|
|
|
elif tok.find('|') > 0:
|
2013-01-04 03:38:40 +01:00
|
|
|
tokens2 = tok.split('|')
|
2016-11-26 16:08:14 +01:00
|
|
|
logicalOp = ' || '
|
2013-01-04 03:38:40 +01:00
|
|
|
if "" in tokens2:
|
|
|
|
ret += ' if (tok && ('
|
|
|
|
else:
|
|
|
|
ret += ' if (!tok || !('
|
|
|
|
first = True
|
|
|
|
for tok2 in tokens2:
|
|
|
|
if tok2 == '':
|
|
|
|
continue
|
|
|
|
if not first:
|
|
|
|
ret += logicalOp
|
|
|
|
first = False
|
2016-11-26 15:41:59 +01:00
|
|
|
ret += self._compileCmd(tok2)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2016-11-26 16:08:14 +01:00
|
|
|
ret += '))\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
if "" in tokens2:
|
|
|
|
ret += ' tok = tok->next();\n'
|
|
|
|
gotoNextToken = ''
|
|
|
|
else:
|
|
|
|
ret += ' ' + returnStatement
|
|
|
|
|
|
|
|
# !!a
|
2013-01-04 03:44:04 +01:00
|
|
|
elif tok[0:2] == "!!":
|
2015-06-07 16:18:40 +02:00
|
|
|
ret += ' if (tok && tok->str() == MatchCompiler::makeConstString("' + tok[2:] + '"))\n'
|
2013-01-04 03:09:41 +01:00
|
|
|
ret += ' ' + returnStatement
|
2020-05-12 21:49:39 +02:00
|
|
|
gotoNextToken = ' tok = tok ? tok->next() : nullptr;\n'
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
2017-06-04 22:51:48 +02:00
|
|
|
negatedTok = "!" + self._compileCmd(tok)
|
2017-04-02 16:30:16 +02:00
|
|
|
# fold !true => false ; !false => true
|
|
|
|
# this avoids cppcheck warnings about condition always being true/false
|
2017-07-22 11:05:50 +02:00
|
|
|
if negatedTok == "!false":
|
2017-04-02 16:30:16 +02:00
|
|
|
negatedTok = "true"
|
2017-07-22 11:05:50 +02:00
|
|
|
elif negatedTok == "!true":
|
2017-04-02 16:30:16 +02:00
|
|
|
negatedTok = "false"
|
|
|
|
ret += ' if (!tok || ' + negatedTok + ')\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' ' + returnStatement
|
2012-12-02 12:36:55 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
if isFindMatch:
|
|
|
|
ret += ' return start_tok;\n'
|
2012-12-02 12:36:55 +01:00
|
|
|
else:
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' return true;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _compileFindPattern(self, pattern, findmatchnr, endToken, varId):
|
2013-01-04 03:38:40 +01:00
|
|
|
more_args = ''
|
|
|
|
endCondition = ''
|
|
|
|
if endToken:
|
2013-05-15 18:18:58 +02:00
|
|
|
more_args += ', const Token * end'
|
2013-01-04 03:38:40 +01:00
|
|
|
endCondition = ' && start_tok != end'
|
|
|
|
if varId:
|
2019-07-15 13:49:35 +02:00
|
|
|
more_args += ', int varid'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-01-04 04:47:01 +01:00
|
|
|
ret = '// pattern: ' + pattern + '\n'
|
2022-10-06 07:58:48 +02:00
|
|
|
ret += 'template<class T> static inline T * findmatch' + \
|
2013-10-13 11:01:50 +02:00
|
|
|
str(findmatchnr) + '(T * start_tok' + more_args + ') {\n'
|
|
|
|
ret += ' for (; start_tok' + endCondition + \
|
|
|
|
'; start_tok = start_tok->next()) {\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2013-05-15 18:18:58 +02:00
|
|
|
ret += self._compilePattern(pattern, -1, varId, True, 'T')
|
2013-01-04 03:38:40 +01:00
|
|
|
ret += ' }\n'
|
2020-05-12 21:49:39 +02:00
|
|
|
ret += ' return nullptr;\n}\n'
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
|
|
|
def parseMatch(line, pos1):
|
2013-01-04 03:38:40 +01:00
|
|
|
parlevel = 0
|
|
|
|
args = []
|
|
|
|
argstart = 0
|
|
|
|
pos = pos1
|
|
|
|
inString = False
|
|
|
|
while pos < len(line):
|
|
|
|
if inString:
|
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = False
|
2012-12-08 19:29:16 +01:00
|
|
|
elif line[pos] == '"':
|
2013-01-04 03:38:40 +01:00
|
|
|
inString = True
|
|
|
|
elif line[pos] == '(':
|
|
|
|
parlevel += 1
|
|
|
|
if parlevel == 1:
|
|
|
|
argstart = pos + 1
|
|
|
|
elif line[pos] == ')':
|
|
|
|
parlevel -= 1
|
|
|
|
if parlevel == 0:
|
2017-07-22 11:05:50 +02:00
|
|
|
ret = [line[pos1:pos + 1]]
|
|
|
|
ret.extend(args)
|
2013-01-04 03:38:40 +01:00
|
|
|
ret.append(line[argstart:pos])
|
|
|
|
return ret
|
|
|
|
elif line[pos] == ',' and parlevel == 1:
|
|
|
|
args.append(line[argstart:pos])
|
2012-12-08 19:29:16 +01:00
|
|
|
argstart = pos + 1
|
2013-01-04 03:38:40 +01:00
|
|
|
pos += 1
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
|
|
|
def _isInString(line, pos1):
|
2015-06-07 16:18:40 +02:00
|
|
|
pos = 0
|
|
|
|
inString = False
|
|
|
|
while pos != pos1:
|
2016-11-26 16:08:14 +01:00
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = not inString
|
2015-06-07 16:18:40 +02:00
|
|
|
pos += 1
|
2015-08-21 10:55:19 +02:00
|
|
|
return inString
|
2015-06-07 16:18:40 +02:00
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
|
|
|
def _parseStringComparison(line, pos1):
|
2013-01-04 03:38:40 +01:00
|
|
|
startPos = 0
|
|
|
|
pos = pos1
|
|
|
|
inString = False
|
|
|
|
while pos < len(line):
|
|
|
|
if inString:
|
|
|
|
if line[pos] == '\\':
|
|
|
|
pos += 1
|
|
|
|
elif line[pos] == '"':
|
|
|
|
inString = False
|
2013-10-13 11:01:50 +02:00
|
|
|
endPos = pos + 1
|
2017-07-22 11:05:50 +02:00
|
|
|
return startPos, endPos
|
2012-12-31 01:40:58 +01:00
|
|
|
elif line[pos] == '"':
|
2013-01-04 03:38:40 +01:00
|
|
|
startPos = pos
|
|
|
|
inString = True
|
|
|
|
pos += 1
|
2012-12-31 15:30:27 +01:00
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
return None
|
2012-12-31 15:30:27 +01:00
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compileVerifyTokenMatch(
|
2017-07-22 11:05:50 +02:00
|
|
|
is_simplematch, verifyNumber, pattern, patternNumber, varId):
|
2013-01-07 22:27:57 +01:00
|
|
|
more_args = ''
|
|
|
|
if varId:
|
2019-07-15 13:49:35 +02:00
|
|
|
more_args = ', const int varid'
|
2013-01-07 22:27:57 +01:00
|
|
|
|
2022-10-06 07:58:48 +02:00
|
|
|
ret = 'static inline bool match_verify' + \
|
2013-10-13 11:01:50 +02:00
|
|
|
str(verifyNumber) + '(const Token *tok' + more_args + ') {\n'
|
2013-01-07 22:27:57 +01:00
|
|
|
|
|
|
|
origMatchName = 'Match'
|
|
|
|
if is_simplematch:
|
|
|
|
origMatchName = 'simpleMatch'
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(varId is None)
|
2013-01-07 22:27:57 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' bool res_compiled_match = match' + \
|
|
|
|
str(patternNumber) + '(tok'
|
2013-01-07 22:27:57 +01:00
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' bool res_parsed_match = Token::' + \
|
|
|
|
origMatchName + '(tok, "' + pattern + '"'
|
2013-01-07 22:27:57 +01:00
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
|
|
|
ret += '\n'
|
|
|
|
# Don't use assert() here, it's disabled for optimized builds.
|
|
|
|
# We also need to verify builds in 'release' mode
|
2013-01-09 18:56:00 +01:00
|
|
|
ret += ' if (res_parsed_match != res_compiled_match) {\n'
|
2017-06-04 22:51:48 +02:00
|
|
|
# ret += ' std::cout << "res_parsed_match' + str(verifyNumber) +\
|
|
|
|
# ': " << res_parsed_match << ", res_compiled_match: " << res_compiled_match << "\\n";\n'
|
|
|
|
# ret += ' if (tok)\n'
|
|
|
|
# ret += ' std::cout << "tok: " << tok->str();\n'
|
|
|
|
# ret += ' if (tok->next())\n'
|
|
|
|
# ret += ' std::cout << "tok next: " << tok->next()->str();\n'
|
2020-12-31 19:28:06 +01:00
|
|
|
ret += ' throw InternalError(tok, "Internal error. ' +\
|
|
|
|
'Compiled match returned different result than parsed match: ' + pattern + '");\n'
|
2013-01-09 18:56:00 +01:00
|
|
|
ret += ' }\n'
|
2013-01-07 22:27:57 +01:00
|
|
|
ret += ' return res_compiled_match;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _replaceSpecificTokenMatch(
|
|
|
|
self, is_simplematch, line, start_pos, end_pos, pattern, tok, varId):
|
2013-01-07 21:39:49 +01:00
|
|
|
more_args = ''
|
|
|
|
if varId:
|
|
|
|
more_args = ',' + varId
|
|
|
|
|
|
|
|
# Compile function or use previously compiled one
|
2013-10-13 11:01:50 +02:00
|
|
|
patternNumber = self._lookupMatchFunctionId(
|
|
|
|
pattern, None, varId, False)
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
if patternNumber is None:
|
2013-01-07 21:39:49 +01:00
|
|
|
patternNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._insertMatchFunctionId(
|
|
|
|
patternNumber,
|
|
|
|
pattern,
|
|
|
|
None,
|
|
|
|
varId,
|
|
|
|
False)
|
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compilePattern(pattern, patternNumber, varId))
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2013-01-07 22:27:57 +01:00
|
|
|
functionName = "match"
|
|
|
|
if self._verifyMode:
|
|
|
|
verifyNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileVerifyTokenMatch(
|
|
|
|
is_simplematch,
|
|
|
|
verifyNumber,
|
|
|
|
pattern,
|
|
|
|
patternNumber,
|
|
|
|
varId))
|
2013-01-07 22:27:57 +01:00
|
|
|
|
|
|
|
# inject verify function
|
|
|
|
functionName = "match_verify"
|
|
|
|
patternNumber = verifyNumber
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
|
|
|
line[:start_pos] + functionName + str(
|
|
|
|
patternNumber) + '(' + tok + more_args + ')' + line[start_pos + end_pos:]
|
|
|
|
)
|
2013-01-07 21:39:49 +01:00
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
def _replaceTokenMatch(self, line, linenr, filename):
|
2020-12-31 19:28:06 +01:00
|
|
|
for func in ('Match', 'simpleMatch'):
|
|
|
|
is_simplematch = func == 'simpleMatch'
|
|
|
|
pattern_start = 0
|
|
|
|
while True:
|
2022-02-11 21:20:55 +01:00
|
|
|
# skip comments
|
|
|
|
if line.strip().startswith('//'):
|
|
|
|
break
|
|
|
|
|
2020-12-31 19:28:06 +01:00
|
|
|
pos1 = line.find('Token::' + func + '(', pattern_start)
|
|
|
|
if pos1 == -1:
|
|
|
|
break
|
|
|
|
|
|
|
|
res = self.parseMatch(line, pos1)
|
|
|
|
if res is None:
|
|
|
|
break
|
|
|
|
|
|
|
|
# assert that Token::Match has either 2 or 3 arguments
|
|
|
|
assert(len(res) == 3 or len(res) == 4)
|
|
|
|
|
|
|
|
end_pos = len(res[0])
|
|
|
|
tok = res[1]
|
|
|
|
raw_pattern = res[2]
|
|
|
|
varId = None
|
|
|
|
if len(res) == 4:
|
|
|
|
varId = res[3]
|
|
|
|
|
|
|
|
pattern_start = pos1 + end_pos
|
|
|
|
res = re.match(r'\s*"((?:.|\\")*?)"\s*$', raw_pattern)
|
|
|
|
if res is None:
|
|
|
|
if self._showSkipped:
|
|
|
|
print(filename + ":" + str(linenr) + " skipping match pattern:" + raw_pattern)
|
|
|
|
continue # Non-const pattern - bailout
|
|
|
|
|
|
|
|
pattern = res.group(1)
|
|
|
|
orig_len = len(line)
|
|
|
|
line = self._replaceSpecificTokenMatch(
|
|
|
|
is_simplematch,
|
|
|
|
line,
|
|
|
|
pos1,
|
|
|
|
end_pos,
|
|
|
|
pattern,
|
|
|
|
tok,
|
|
|
|
varId)
|
|
|
|
pattern_start += len(line) - orig_len
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
return line
|
|
|
|
|
2017-07-22 11:05:50 +02:00
|
|
|
@staticmethod
|
2013-10-13 11:01:50 +02:00
|
|
|
def _compileVerifyTokenFindMatch(
|
2017-07-22 11:05:50 +02:00
|
|
|
is_findsimplematch, verifyNumber, pattern, patternNumber, endToken, varId):
|
2013-01-13 20:48:19 +01:00
|
|
|
more_args = ''
|
|
|
|
if endToken:
|
2013-05-15 18:18:58 +02:00
|
|
|
more_args += ', const Token * endToken'
|
2013-01-13 20:48:19 +01:00
|
|
|
if varId:
|
2019-07-15 13:49:35 +02:00
|
|
|
more_args += ', const int varid'
|
2013-01-13 20:48:19 +01:00
|
|
|
|
2022-10-06 07:58:48 +02:00
|
|
|
ret = 'template < class T > static inline T * findmatch_verify' + \
|
2013-10-13 11:01:50 +02:00
|
|
|
str(verifyNumber) + '(T * tok' + more_args + ') {\n'
|
2013-01-13 20:48:19 +01:00
|
|
|
|
|
|
|
origFindMatchName = 'findmatch'
|
|
|
|
if is_findsimplematch:
|
2014-07-02 14:28:21 +02:00
|
|
|
origFindMatchName = 'findsimplematch'
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(varId is None)
|
2013-01-13 20:48:19 +01:00
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' T * res_compiled_findmatch = findmatch' + \
|
|
|
|
str(patternNumber) + '(tok'
|
2013-01-13 20:48:19 +01:00
|
|
|
if endToken:
|
|
|
|
ret += ', endToken'
|
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
ret += ' T * res_parsed_findmatch = Token::' + \
|
|
|
|
origFindMatchName + '(tok, "' + pattern + '"'
|
2013-01-13 20:48:19 +01:00
|
|
|
if endToken:
|
|
|
|
ret += ', endToken'
|
|
|
|
if varId:
|
|
|
|
ret += ', varid'
|
|
|
|
ret += ');\n'
|
|
|
|
|
|
|
|
ret += '\n'
|
|
|
|
# Don't use assert() here, it's disabled for optimized builds.
|
|
|
|
# We also need to verify builds in 'release' mode
|
|
|
|
ret += ' if (res_parsed_findmatch != res_compiled_findmatch) {\n'
|
2017-06-04 22:51:48 +02:00
|
|
|
ret += ' throw InternalError(tok, "Internal error. ' +\
|
2020-12-31 19:28:06 +01:00
|
|
|
'Compiled findmatch returned different result than parsed findmatch: ' + pattern + '");\n'
|
2013-01-13 20:48:19 +01:00
|
|
|
ret += ' }\n'
|
|
|
|
ret += ' return res_compiled_findmatch;\n'
|
|
|
|
ret += '}\n'
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
def _replaceSpecificFindTokenMatch(
|
|
|
|
self, is_findsimplematch, line, start_pos, end_pos, pattern, tok, endToken, varId):
|
2013-01-13 20:26:25 +01:00
|
|
|
more_args = ''
|
|
|
|
if endToken:
|
|
|
|
more_args += ',' + endToken
|
|
|
|
if varId:
|
|
|
|
more_args += ',' + varId
|
|
|
|
|
|
|
|
# Compile function or use previously compiled one
|
2013-10-13 11:01:50 +02:00
|
|
|
findMatchNumber = self._lookupMatchFunctionId(
|
|
|
|
pattern, endToken, varId, True)
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
if findMatchNumber is None:
|
2013-01-13 20:26:25 +01:00
|
|
|
findMatchNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._insertMatchFunctionId(
|
|
|
|
findMatchNumber,
|
|
|
|
pattern,
|
|
|
|
endToken,
|
|
|
|
varId,
|
|
|
|
True)
|
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileFindPattern(
|
|
|
|
pattern,
|
|
|
|
findMatchNumber,
|
|
|
|
endToken,
|
|
|
|
varId))
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2013-01-13 20:48:19 +01:00
|
|
|
functionName = "findmatch"
|
|
|
|
if self._verifyMode:
|
|
|
|
verifyNumber = len(self._rawMatchFunctions) + 1
|
2013-10-13 11:01:50 +02:00
|
|
|
self._rawMatchFunctions.append(
|
|
|
|
self._compileVerifyTokenFindMatch(
|
|
|
|
is_findsimplematch,
|
|
|
|
verifyNumber,
|
|
|
|
pattern,
|
|
|
|
findMatchNumber,
|
|
|
|
endToken,
|
|
|
|
varId))
|
2013-01-13 20:48:19 +01:00
|
|
|
|
|
|
|
# inject verify function
|
|
|
|
functionName = "findmatch_verify"
|
|
|
|
findMatchNumber = verifyNumber
|
|
|
|
|
2013-10-13 11:01:50 +02:00
|
|
|
return (
|
|
|
|
line[:start_pos] + functionName + str(
|
2015-06-07 16:18:40 +02:00
|
|
|
findMatchNumber) + '(' + tok + more_args + ') ' + line[start_pos + end_pos:]
|
2013-10-13 11:01:50 +02:00
|
|
|
)
|
2013-01-13 20:26:25 +01:00
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
def _replaceTokenFindMatch(self, line, linenr, filename):
|
2013-01-04 03:38:40 +01:00
|
|
|
while True:
|
2013-01-13 20:48:19 +01:00
|
|
|
is_findsimplematch = True
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::findsimplematch(')
|
|
|
|
if pos1 == -1:
|
2013-01-13 20:48:19 +01:00
|
|
|
is_findsimplematch = False
|
2013-01-04 03:38:40 +01:00
|
|
|
pos1 = line.find('Token::findmatch(')
|
|
|
|
if pos1 == -1:
|
|
|
|
break
|
|
|
|
|
|
|
|
res = self.parseMatch(line, pos1)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2013-01-04 03:38:40 +01:00
|
|
|
break
|
2013-01-13 20:17:30 +01:00
|
|
|
|
2017-06-04 22:51:48 +02:00
|
|
|
# assert that Token::find(simple)match has either 2, 3 or 4 arguments
|
2013-10-13 11:01:50 +02:00
|
|
|
assert(len(res) >= 3 or len(res) < 6)
|
2013-01-13 20:17:30 +01:00
|
|
|
|
|
|
|
g0 = res[0]
|
2013-01-13 20:26:25 +01:00
|
|
|
tok = res[1]
|
2013-01-13 20:17:30 +01:00
|
|
|
pattern = res[2]
|
|
|
|
|
|
|
|
# Check for varId
|
|
|
|
varId = None
|
2017-06-05 13:23:00 +02:00
|
|
|
if not is_findsimplematch and "%varid%" in g0:
|
2013-01-13 20:17:30 +01:00
|
|
|
if len(res) == 5:
|
|
|
|
varId = res[4]
|
2013-01-04 03:38:40 +01:00
|
|
|
else:
|
2013-01-13 20:17:30 +01:00
|
|
|
varId = res[3]
|
|
|
|
|
|
|
|
# endToken support. We resolve the overloaded type by checking if varId is used or not.
|
2018-02-04 20:53:43 +01:00
|
|
|
# Function prototypes:
|
2013-01-13 20:17:30 +01:00
|
|
|
# Token *findsimplematch(const Token *tok, const char pattern[]);
|
|
|
|
# Token *findsimplematch(const Token *tok, const char pattern[], const Token *end);
|
2019-07-15 13:49:35 +02:00
|
|
|
# Token *findmatch(const Token *tok, const char pattern[], int varId = 0);
|
2013-10-13 11:01:50 +02:00
|
|
|
# Token *findmatch(const Token *tok, const char pattern[], const
|
2019-07-15 13:49:35 +02:00
|
|
|
# Token *end, int varId = 0);
|
2013-01-13 20:17:30 +01:00
|
|
|
endToken = None
|
2016-11-26 16:08:14 +01:00
|
|
|
if ((is_findsimplematch and len(res) == 4) or
|
|
|
|
(not is_findsimplematch and varId and (len(res) == 5)) or
|
|
|
|
(not is_findsimplematch and varId is None and len(res) == 4)):
|
2013-01-13 20:17:30 +01:00
|
|
|
endToken = res[3]
|
|
|
|
|
2015-01-09 22:39:25 +01:00
|
|
|
res = re.match(r'\s*"((?:.|\\")*?)"\s*$', pattern)
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2015-01-09 22:50:42 +01:00
|
|
|
if self._showSkipped:
|
2017-06-04 22:51:48 +02:00
|
|
|
print(filename + ":" + str(linenr) + " skipping findmatch pattern:" + pattern)
|
2013-01-13 20:17:30 +01:00
|
|
|
break # Non-const pattern - bailout
|
|
|
|
|
|
|
|
pattern = res.group(1)
|
2013-10-13 11:01:50 +02:00
|
|
|
line = self._replaceSpecificFindTokenMatch(
|
|
|
|
is_findsimplematch,
|
|
|
|
line,
|
|
|
|
pos1,
|
|
|
|
len(g0),
|
|
|
|
pattern,
|
|
|
|
tok,
|
|
|
|
endToken,
|
|
|
|
varId)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
return line
|
|
|
|
|
2013-01-04 03:56:21 +01:00
|
|
|
def _replaceCStrings(self, line):
|
2013-01-04 03:38:40 +01:00
|
|
|
while True:
|
2015-06-07 16:18:40 +02:00
|
|
|
match = re.search('(==|!=) *"', line)
|
2013-01-04 03:38:40 +01:00
|
|
|
if not match:
|
|
|
|
break
|
|
|
|
|
2015-06-07 16:18:40 +02:00
|
|
|
if self._isInString(line, match.start()):
|
|
|
|
break
|
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
res = self._parseStringComparison(line, match.start())
|
2013-03-02 16:45:26 +01:00
|
|
|
if res is None:
|
2013-01-04 03:38:40 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
startPos = res[0]
|
|
|
|
endPos = res[1]
|
2013-10-13 11:01:50 +02:00
|
|
|
text = line[startPos + 1:endPos - 1]
|
2017-06-04 22:51:48 +02:00
|
|
|
line = line[:startPos] + 'MatchCompiler::makeConstStringBegin' +\
|
|
|
|
text + 'MatchCompiler::makeConstStringEnd' + line[endPos:]
|
2015-06-07 16:18:40 +02:00
|
|
|
line = line.replace('MatchCompiler::makeConstStringBegin', 'MatchCompiler::makeConstString("')
|
|
|
|
line = line.replace('MatchCompiler::makeConstStringEnd', '")')
|
2013-01-04 03:38:40 +01:00
|
|
|
return line
|
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
def convertFile(self, srcname, destname, line_directive):
|
2013-01-04 03:56:21 +01:00
|
|
|
self._reset()
|
|
|
|
|
2017-01-01 10:27:07 +01:00
|
|
|
fin = io.open(srcname, "rt", encoding="utf-8")
|
2013-01-04 03:38:40 +01:00
|
|
|
srclines = fin.readlines()
|
|
|
|
fin.close()
|
|
|
|
|
2020-05-12 21:49:39 +02:00
|
|
|
code = u''
|
|
|
|
|
|
|
|
modified = False
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
linenr = 0
|
2013-01-04 03:38:40 +01:00
|
|
|
for line in srclines:
|
2020-05-12 21:49:39 +02:00
|
|
|
if not modified:
|
|
|
|
line_orig = line
|
|
|
|
|
2016-11-24 00:06:42 +01:00
|
|
|
linenr += 1
|
2013-01-04 03:38:40 +01:00
|
|
|
# Compile Token::Match and Token::simpleMatch
|
2016-11-24 00:06:42 +01:00
|
|
|
line = self._replaceTokenMatch(line, linenr, srcname)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
# Compile Token::findsimplematch
|
2016-11-24 00:06:42 +01:00
|
|
|
line = self._replaceTokenFindMatch(line, linenr, srcname)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
|
|
|
# Cache plain C-strings in C++ strings
|
2013-01-04 03:56:21 +01:00
|
|
|
line = self._replaceCStrings(line)
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2020-05-12 21:49:39 +02:00
|
|
|
if not modified and not line_orig == line:
|
|
|
|
modified = True
|
|
|
|
|
2013-01-04 03:38:40 +01:00
|
|
|
code += line
|
|
|
|
|
|
|
|
# Compute matchFunctions
|
2020-05-12 21:49:39 +02:00
|
|
|
strFunctions = u''
|
2013-01-04 04:47:01 +01:00
|
|
|
for function in self._rawMatchFunctions:
|
2013-01-04 03:44:04 +01:00
|
|
|
strFunctions += function
|
2013-01-04 03:38:40 +01:00
|
|
|
|
2020-05-12 21:49:39 +02:00
|
|
|
lineno = u''
|
2015-07-23 10:49:53 +02:00
|
|
|
if line_directive:
|
2020-05-12 21:49:39 +02:00
|
|
|
lineno = u'#line 1 "' + srcname + '"\n'
|
|
|
|
|
|
|
|
header = u'#include "matchcompiler.h"\n'
|
|
|
|
header += u'#include <string>\n'
|
|
|
|
header += u'#include <cstring>\n'
|
|
|
|
if len(self._rawMatchFunctions):
|
|
|
|
header += u'#include "errorlogger.h"\n'
|
|
|
|
header += u'#include "token.h"\n'
|
2015-07-23 10:49:53 +02:00
|
|
|
|
2017-01-01 10:27:07 +01:00
|
|
|
fout = io.open(destname, 'wt', encoding="utf-8")
|
2020-05-12 21:49:39 +02:00
|
|
|
if modified or len(self._rawMatchFunctions):
|
|
|
|
fout.write(header)
|
|
|
|
fout.write(strFunctions)
|
|
|
|
fout.write(lineno)
|
|
|
|
fout.write(code)
|
2013-01-04 03:38:40 +01:00
|
|
|
fout.close()
|
|
|
|
|
2013-03-02 16:45:26 +01:00
|
|
|
|
2013-01-11 16:21:17 +01:00
|
|
|
def main():
|
|
|
|
# Main program
|
|
|
|
|
|
|
|
# Argument handling
|
2013-10-13 11:01:50 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Compile Token::Match() calls into native C++ code')
|
2013-01-11 16:21:17 +01:00
|
|
|
parser.add_argument('--verify', action='store_true', default=False,
|
2013-03-02 16:45:26 +01:00
|
|
|
help='verify compiled matches against on-the-fly parser. Slow!')
|
2015-01-09 22:50:42 +01:00
|
|
|
parser.add_argument('--show-skipped', action='store_true', default=False,
|
|
|
|
help='show skipped (non-static) patterns')
|
2015-08-21 10:55:19 +02:00
|
|
|
parser.add_argument('--read-dir', default="lib",
|
2015-07-23 10:49:53 +02:00
|
|
|
help='directory from which files are read')
|
|
|
|
parser.add_argument('--write-dir', default="build",
|
|
|
|
help='directory into which files are written')
|
|
|
|
parser.add_argument('--prefix', default="",
|
|
|
|
help='prefix for build files')
|
|
|
|
parser.add_argument('--line', action='store_true', default=False,
|
|
|
|
help='add line directive to input files into build files')
|
|
|
|
parser.add_argument('file', nargs='*',
|
2019-01-06 17:15:57 +01:00
|
|
|
help='file to compile')
|
2013-01-11 16:21:17 +01:00
|
|
|
args = parser.parse_args()
|
2015-07-23 10:49:53 +02:00
|
|
|
lib_dir = args.read_dir
|
|
|
|
build_dir = args.write_dir
|
|
|
|
line_directive = args.line
|
|
|
|
files = args.file
|
|
|
|
|
|
|
|
# Check if we are invoked from the right place
|
|
|
|
if not os.path.exists(lib_dir):
|
|
|
|
print('Directory "' + lib_dir + '"not found.')
|
|
|
|
sys.exit(-1)
|
|
|
|
|
|
|
|
# Create build directory if needed
|
2017-05-19 20:36:54 +02:00
|
|
|
try:
|
2015-07-23 10:49:53 +02:00
|
|
|
os.makedirs(build_dir)
|
2017-05-19 20:36:54 +02:00
|
|
|
except OSError as e:
|
|
|
|
# due to race condition in case of parallel build,
|
|
|
|
# makedirs may fail. Ignore that; if there's actual
|
|
|
|
# problem with directory creation, it'll be caught
|
|
|
|
# by the following isdir check
|
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise
|
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
if not os.path.isdir(build_dir):
|
|
|
|
raise Exception(build_dir + ' is not a directory')
|
2013-01-11 16:21:17 +01:00
|
|
|
|
2015-01-09 22:50:42 +01:00
|
|
|
mc = MatchCompiler(verify_mode=args.verify,
|
|
|
|
show_skipped=args.show_skipped)
|
2013-01-11 16:21:17 +01:00
|
|
|
|
2015-07-23 10:49:53 +02:00
|
|
|
if not files:
|
|
|
|
# select all *.cpp files in lib_dir
|
|
|
|
for f in glob.glob(lib_dir + '/*.cpp'):
|
2015-08-21 10:55:19 +02:00
|
|
|
files.append(f[len(lib_dir) + 1:])
|
2015-07-23 10:49:53 +02:00
|
|
|
|
|
|
|
# convert files
|
|
|
|
for fi in files:
|
|
|
|
pi = lib_dir + '/' + fi
|
|
|
|
fo = args.prefix + fi
|
|
|
|
po = build_dir + '/' + fo
|
|
|
|
print(pi + ' => ' + po)
|
|
|
|
mc.convertFile(pi, po, line_directive)
|
2013-01-11 16:21:17 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|