From f3bbdbf144be3bd670c3be267de8dad74d1c843b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Wed, 30 May 2007 20:08:05 +0000 Subject: [PATCH] tokenizer: made the 'SizeOfType' public. bug fix in the tokenizer (strings longer than 1000 characters). --- tokenize.cpp | 13 +++++++++---- tokenize.h | 3 +++ 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/tokenize.cpp b/tokenize.cpp index 86b7c0fc0..e588db78d 100644 --- a/tokenize.cpp +++ b/tokenize.cpp @@ -183,7 +183,7 @@ static void combine_2tokens(TOKEN *tok, const char str1[], const char str2[]) std::map TypeSize; -static int SizeOfType(const char type[]) +int SizeOfType(const char type[]) { if (!type) return 0; @@ -416,8 +416,11 @@ void Tokenize(const char FileName[]) do { // Append token.. - *pToken = c; - pToken++; + if ( pToken < &CurrentToken[sizeof(CurrentToken)-10] ) + { + *pToken = c; + pToken++; + } // Special sequence '\.' if (special) @@ -526,6 +529,7 @@ void SimplifyTokenList() // typedefs.. + /* TOKEN *prev = NULL; for (TOKEN *tok = tokens; tok; tok = tok->next) { @@ -557,6 +561,7 @@ void SimplifyTokenList() { DeleteNextToken(tok2); InsertTokens( tok2, type0, len ); + tok2 = gettok(tok2, len); } } } @@ -584,7 +589,7 @@ void SimplifyTokenList() } prev = tok; } - + */ // Fill the map TypeSize.. diff --git a/tokenize.h b/tokenize.h index 0abb50b20..5fd20c889 100644 --- a/tokenize.h +++ b/tokenize.h @@ -20,6 +20,9 @@ extern struct TOKEN *tokens, *tokens_back; void Tokenize(const char FileName[]); +// Return size. +int SizeOfType(const char type[]); + // Simplify tokenlist // ----------------------------- void SimplifyTokenList();