28 weOwnTokenBuffer (
false)
30 #if defined(_LogStream_) 32 logger1 (
"C:\\Temp\\XmlTokenizer-1.txt"),
33 logger2 (
"C:\\Temp\\XmlTokenizer-2.txt")
46 weOwnTokenBuffer (
false)
47 #if defined(_LogStream_) 49 logger1 (
"C:\\Temp\\XmlTokenizer-1.txt"),
50 logger2 (
"C:\\Temp\\XmlTokenizer-2.txt")
54 weOwnTokenBuffer =
true;
67 weOwnTokenBuffer (
false)
68 #if defined(_LogStream_) 70 logger1 (
"C:\\Temp\\XmlTokenizer-1.txt"),
71 logger2 (
"C:\\Temp\\XmlTokenizer-2.txt")
78 weOwnTokenBuffer =
true;
100 entityMap.insert (pair<KKStr,
char> (
"quot",
'"'));
101 entityMap.insert (pair<KKStr,
char> (
"amp",
'&'));
102 entityMap.insert (pair<KKStr,
char> (
"apos",
'\''));
103 entityMap.insert (pair<KKStr,
char> (
"lt",
'<'));
104 entityMap.insert (pair<KKStr,
char> (
"gt",
'>'));
105 entityMap.insert (pair<KKStr,
char> (
"tab",
'\t'));
106 entityMap.insert (pair<KKStr,
char> (
"lf",
'\n'));
107 entityMap.insert (pair<KKStr,
char> (
"cr",
'\r'));
113 while ((tokenList.size () < tokenListLen) && (!atEndOfFile))
115 ReadInNextLogicalToken ();
124 map<KKStr,
char>::const_iterator idx;
125 idx = entityMap.find (entityName);
126 if (idx == entityMap.end ())
137 while ((tokenList.size () < 1) && (!atEndOfFile))
138 ReadInNextLogicalToken ();
140 if (tokenList.size () < 1)
142 #if defined(_LogStream_) 143 logger2 <<
"GetNextToken return NULL" << endl;
151 KKStrPtr t = tokenList.front ();
152 tokenList.pop_front ();
154 #if defined(_LogStream_) 155 logger2 <<
"GetNextToken size[" << s <<
"] :" << (t ? (*t) :
"NULL") << endl;
172 if (atEndOfFile && (tokenList.size () < 1))
180 while ((t != NULL) && (*t
!= delToken))
182 tokens->PushOnBack (t);
187 tokens->PushOnBack (t);
196 tokenList.push_front (t);
204 while ((tokenList.size () < (idx + 1)) && !atEndOfFile)
205 ReadInNextLogicalToken ();
207 if (idx >= tokenList.size ())
209 #if defined(_LogStream_) 210 logger2 <<
"Peek idx[" << idx <<
"] returning NULL" << endl;
215 #if defined(_LogStream_) 216 logger2 <<
"Peek idx[" << idx <<
"] :" << *(tokenList[idx]) << endl;
219 return tokenList[idx];
228 while ((tokenList.size () < 1) && (!atEndOfFile))
229 ReadInNextLogicalToken ();
231 return (tokenList.size () < 1);
246 #if defined(_LogStream_) 247 logger1 << endl <<
"GetNextChar atEndOfFile = true;" << endl;
257 #if defined(_LogStream_) 258 logger1 << endl <<
"GetNextChar atEndOfFile = true;" << endl;
263 #if defined(_LogStream_) 264 logger1 << firstChar;
266 if (firstChar ==
'\r')
274 #if defined(_LogStream_) 287 KKStrPtr t = GetNextTokenRaw ();
295 tokenList.push_back (t);
298 #if defined(_LogStream_) 299 logger2 <<
"ReadInNextLogicalToken size[" << tokenList.size () <<
"] :" << (t ? (*t) :
"NULL RETURNED") << endl;
308 if (strchr (
" ", c) == NULL)
323 while (WhiteSpaceChar (firstChar) && (!atEndOfFile))
330 KKStrPtr nextRawToken = NULL;
332 if (firstChar ==
'<')
335 nextRawToken = ProcessTagToken ();
340 nextRawToken = ProcessBodyToken ();
352 KKStrPtr token =
new KKStr(100
);
356 while ((!atEndOfFile) && (firstChar !=
'>'))
358 if ((firstChar ==
'"') || (firstChar ==
'\''))
361 char endingQuoteChar = firstChar;
365 while ((!atEndOfFile) && (firstChar != endingQuoteChar))
367 if (firstChar ==
'\\')
372 case 't': firstChar =
'\t';
break;
373 case 'n': firstChar =
'\n';
break;
374 case 'r': firstChar =
'\r';
break;
375 case '0': firstChar =
'\0';
break;
376 case '\\': firstChar =
'\\';
break;
377 case '"': firstChar =
'"';
break;
384 if (firstChar == endingQuoteChar)
403 if (firstChar ==
'\r') GetNextChar ();
404 if (firstChar ==
'\n') GetNextChar ();
438 while (entityName
.Len () > 0)
446 char ch = LookUpEntity (entityName);
456 KKStrPtr token =
new KKStr(512
);
458 while ((!atEndOfFile) && (firstChar !=
'<') && (firstChar !=
'\n'))
460 if (firstChar ==
'&')
468 if ((firstChar ==
'\n') && (!atEndOfFile))
void PushTokenOnFront(KKStrPtr t)
places token at current position such that it will be the next token extracted from the stream...
KKStr(kkint32 size)
Creates a KKStr object that pre-allocates space for 'size' characters.
TokenBufferStr(const KKStr &_buff)
virtual void UnGetNextChar()=0
Manages the break down a stream into a set of logical tokens compatible with the XML format...
TokenBufferStream(const KKStr &_fileName)
virtual char PeekNextChar()=0
char ExtractLastChar()
Removes the last character from the string and returns it to the caller.
XmlTokenizer(const KKStr &_fileName, bool &_fileOpened)
unsigned __int32 kkuint32
virtual bool EndOfFile()=0
XmlTokenizer(TokenBufferPtr _in)
Constructs a XmlTokenizer using the provided [[TokenBuffer]] _in as the data stream source...
kkuint32 Len() const
Returns the number of characters in the string.
static KKStr Concat(const std::vector< std::string > &values)
Concatenates the list of 'std::string' strings.
XmlTokenizer(const KKStr &_str)
Manages the extraction of xml tokens from a KKStr instance; accomplishes this by building a [[TokenBu...
KKStrConstPtr Peek(kkuint32 idx)
Allows you to look at future tokens in the stream; index of 0 would be the next token to be extracted...
bool operator!=(const KKStr &right) const
TokenBuffer * TokenBufferPtr
KKStrConstPtr operator[](kkuint32 idx)
KKStrListPtr GetNextTokens(const KKStr &delToken)
Returns a list of tokens up to and including the first occurrence of 'delToken'.
virtual char GetNextChar()=0
KKStrPtr GetNextToken()
Will retrieve the next token in the stream which will be either a tag token or up to one line of the ...