Click here to Skip to main content
Click here to Skip to main content
Add your own
alternative version

Tokenizer and analyzer package supporting precedence prioritized rules

, 1 Jan 2002
A library allowing you to conveniently build a custom tokenizer and analyzer supporting precedence priorized rules
cxtpackagetut_win32vc.zip
cxTokenizer
cxTokenizer.dsp
cxTokenizer.plg
cxtPackage
cxtPackage.plg
cxtPackage.dsp
mathTok
mathTok.plg
mathTok.dsp
simpleCalc
simpleCalc.bmp
simpleCalc.dsp
simpleCalc.plg
tkCommon
cxAnalyzer
cxAnalyzer.dsp
cxAnalyzer.plg
cxtpackage_win32vc.zip
cxTokenizer.dsp
cxTokenizer.plg
Debug
cxAnalyzer.dsp
cxAnalyzer.plg
cxtPackage.plg
cxtPackage.dsp
grammaride.zip
pkgcomplete.zip
cxTokenizerMatchTokenRule.inl
cxTokenizer.dsp
cxtPackage.dsp
emptyTestApp
emptyTestApp.clw
res
emptyTestApp.ico
emptyTestApp.dsp
grammarIDE
grammarIDE.clw
res
grammarIDEDoc.ico
icon1.ico
zoomable.ico
Toolbar.bmp
grammarIDE.ico
vssver.scc
grammarIDE.dsp
simpleCalc.bmp
simpleCalc.dsp
vssver.scc
cxaToken.inl
cxAnalyzer.dsp
pkgsrconly.zip
cxTokenizerMatchTokenRule.inl
cxTokenizer.dsp
cxtPackage.dsp
emptyTestApp.clw
emptyTestApp.ico
emptyTestApp.dsp
vssver.scc
cxaToken.inl
cxAnalyzer.dsp
#ifdef _DEBUG
#include "cxTokenizerTextInputStream.h"

bool	cxTokenizer::fCheckValid() const
	{
	if(!m_tcContextInfo.fCheckValid())
		{
		TRACE(_T("cxTokenizer::fCheckValid() - context information corrupted.\n"));
		return false;
		}

	if(!m_stkPtisData.empty())
		{
		ASSERT(!m_setPtisData.empty());

		const stktis_type::container_type& c = m_stkPtisData._Get_c();
		stktis_type::container_type::const_iterator it;
		for(it=c.begin();it!=c.end();it++)
			{
			if( !(*it)->fCheckValid())
				{
				TRACE(_T("cxTokenizer::fCheckValid() - input stream is not valid.\n"));
				return false;
				}
			if( m_setPtisData.find( (*it) )==m_setPtisData.end() )
				{
				TRACE(_T("cxTokenizer::fCheckValid() - input stream is contained in stack but not in set.\n"));
				return false;
				}
			}
		}

	if(m_ptlReceiver==NULL)
		{
		TRACE(_T("cxTokenizer::fCheckValid() - receiver is NULL.\n"));
		return false;
		}

	if(!m_ptlReceiver->fCheckValid())
		{
		TRACE(_T("cxTokenizer::fCheckValid() - receiver is not valid.\n"));
		return false;
		}

	if(m_ptmLexxerMap==NULL)
		{
		TRACE(_T("cxTokenizer::fCheckValid() - lexxer map is NULL.\n"));
		return false;
		}

	if(!m_ptmLexxerMap->fCheckValid())
		{
		TRACE(_T("cxTokenizer::fCheckValid() - lexxer map is not valid.\n"));
		return false;
		}

	return true;
	}

bool	cxTokenizer::fRunDiagnostics()
	{
	class	cxTestListener : public cxTokenizerListener
	{
	// Attributes
	public:
		int		nTokenNumber;
		bool	fValid;

	// Operations
	public:
		virtual	bool			fCheckValid() const	{ return true; };
		virtual bool			fShouldDelete() const { return false; };
		virtual	void			vRegisterToken(const std::tstring& strToken, cxTokenizerTokenRule* pptrRule)
			{
			TRACE(_T("Token #%02d: %s\n"),nTokenNumber, strToken.data());
			switch(nTokenNumber)
				{
				case 0:
					if(strToken.compare(_T("test"))!=0 ||
						pptrRule==NULL)
						fValid=false;
					break;
				case 1:
					if(strToken.compare(_T("--"))!=0)
						fValid=false;
					break;
				case 2:
					if(strToken.compare(_T("-"))!=0)
						fValid=false;
					break;
				case 3:
					if(	strToken.compare(_T("test1"))!=0 ||
						pptrRule!=NULL)
						fValid=false;
					break;
				case 4:
					if(strToken.compare(_T("--"))!=0)
						fValid=false;
					break;
				case 5:
					if(	strToken.compare(_T("1234"))!=0 || 
						pptrRule->fIsFlagSet(ttrf_character_rule))
						fValid=false;
					break;
				case 6:
					if(strToken.compare(_T("-"))!=0)
						fValid=false;
					break;
				case 7:
					if(strToken.compare(_T("test"))!=0)
						fValid=false;
					break;
				};
			nTokenNumber++;
			};
	};

	std::tstringstream	strm(
		_T("[seperators]\n")
		_T("-\n")
		_T("--\n")
		_T("[tokens]\n")
		_T("test\n")
		_T("[rules]\n")
		_T("strings\n")
		_T("numbers\n") );

/*	cxTokenizerMap				map;
	cxTokenizerTextInputStream	tis(_T("test---test1--1234-\"test\""));
	cooTestListener				lis;
	cxTokenizer					lexxer(&tis,&map,&lis);

	lis.nTokenNumber	=0;
	lis.fValid			=true;

	// Initialize map
	map.vLoadFromStream(strm);

	while(!tis.fIsEofReached())
		lexxer.vParseCharacter();

	return lis.fValid?lexxer.fCheckValid():false;*/
	return false;
	}
#endif

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article has no explicit license attached to it but may contain usage terms in the article text or the download files themselves. If in doubt please contact the author via the discussion board below.

A list of licenses authors might use can be found here

Share

About the Author

Alexander Berthold
Web Developer
Germany Germany
No Biography provided

| Advertise | Privacy | Mobile
Web03 | 2.8.140827.1 | Last Updated 2 Jan 2002
Article Copyright 2001 by Alexander Berthold
Everything else Copyright © CodeProject, 1999-2014
Terms of Service
Layout: fixed | fluid