tokenizer: made the 'SizeOfType' public. bug fix in the tokenizer (strings longer than 1000 characters).

This commit is contained in:
Daniel Marjamäki 2007-05-30 20:08:05 +00:00
parent 000d5c0249
commit f3bbdbf144
2 changed files with 12 additions and 4 deletions

View File

@ -183,7 +183,7 @@ static void combine_2tokens(TOKEN *tok, const char str1[], const char str2[])
std::map<std::string, unsigned int> TypeSize;
static int SizeOfType(const char type[])
int SizeOfType(const char type[])
{
if (!type)
return 0;
@ -416,8 +416,11 @@ void Tokenize(const char FileName[])
do
{
// Append token..
*pToken = c;
pToken++;
if ( pToken < &CurrentToken[sizeof(CurrentToken)-10] )
{
*pToken = c;
pToken++;
}
// Special sequence '\.'
if (special)
@ -526,6 +529,7 @@ void SimplifyTokenList()
// typedefs..
/*
TOKEN *prev = NULL;
for (TOKEN *tok = tokens; tok; tok = tok->next)
{
@ -557,6 +561,7 @@ void SimplifyTokenList()
{
DeleteNextToken(tok2);
InsertTokens( tok2, type0, len );
tok2 = gettok(tok2, len);
}
}
}
@ -584,7 +589,7 @@ void SimplifyTokenList()
}
prev = tok;
}
*/
// Fill the map TypeSize..

View File

@ -20,6 +20,9 @@ extern struct TOKEN *tokens, *tokens_back;
void Tokenize(const char FileName[]);
// Return size.
int SizeOfType(const char type[]);
// Simplify tokenlist
// -----------------------------
void SimplifyTokenList();