Click here to Skip to main content
15,885,875 members
Articles / Programming Languages / C++

Tokenizer and analyzer package supporting precedence prioritized rules

Rate me:
Please Sign up or sign in to vote.
5.00/5 (4 votes)
1 Jan 20023 min read 181.5K   2.8K   54  
A library allowing you to conveniently build a custom tokenizer and analyzer supporting precedence priorized rules
/*********************************************************************
	Copyright (C) 2001 by

		Alexander Berthold, alexander-berthold@web.de.
		Hoegestr. 54
		79108 Freiburg i. Breisgau
		Germany

    -- This file is part of cxtPackage --

    "cxtPackage" is free software; you can redistribute it and/or 
	modify it under the terms of the GNU Lesser General Public 
	License as published by the Free Software Foundation; either 
	version 2 of the License, or any later version.

    "cxtPackage" is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU Lesser General Public License for more details.

    You should have received a copy of the GNU Lesser General Public
	License along with "cxtPackage"; if not, write to the Free 
	Software  Foundation, Inc., 59 Temple Place, Suite 330, 
	Boston, MA  02111-1307  USA

    ---------------------------------------------------------------
      If you find any bugs or if you make other corrections/
	  enhancements, i'd appreciate if you'd let me know about 
	  that. My email is
  
       alexander-berthold@web.de
  
      If you share this code, do not remove this text.
    ---------------------------------------------------------------
		
*********************************************************************/

// cxtPackage.cpp: implementation of the cxtPackage class.
//
//////////////////////////////////////////////////////////////////////

#include "stdafx.h"
#include "cxtPackage.h"
//#include "cxTokenizerPreprocessorTokenRules.h"

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////

	cxtPackage::cxtPackage(std::tstringstream& init)
		:	m_pxtTokenizer(NULL),
			m_patmRuleMap(NULL),
			m_pamParser(NULL),
			m_patsTokens(NULL),
			m_pnBreakIDValues(NULL),
			m_pxtpDelimClass(NULL)
		{
		// Initialize the tokenizer map
		m_xtmTokMap.vLoadFromStream(init);

		try {
			// Create + initialize the rule map
			m_patmRuleMap	=new cxAnalyzerTypeMap();
			m_patmRuleMap->vInitFromTokenizerMap(&m_xtmTokMap);

			// Create + initialize the analyzer
			m_pamParser		=new cxAnalyzerMain(m_patmRuleMap);
			m_pamParser->fLoadFromStream(init);

			// Create the tokenizer
			m_pxtTokenizer	=new cxTokenizer(&m_xtmTokMap,this);

			// Create the token stream
			m_patsTokens	=new cxaTokenStream(m_patmRuleMap);

			// Initialize the current token position
			m_itCurrentToken=m_patsTokens->end();
			}
		catch(cxAnalyzerException& e)
			{
			delete m_patsTokens;
			delete m_pxtTokenizer;
			delete m_pamParser;
			delete m_patmRuleMap;
			throw e;
			}
		}

	cxtPackage::~cxtPackage()
		{
		ASSERT(m_pxtpDelimClass==NULL);
		delete[] m_pnBreakIDValues;
		delete m_pxtTokenizer;
		delete m_pamParser;
		delete m_patmRuleMap;
		delete m_patsTokens;
		}

	void cxtPackage::vReset()
		{
		ASSERT(m_pxtpDelimClass==NULL);
		ASSERT(m_pxtTokenizer->fCheckValid());
		ASSERT(m_pamParser->fCheckValid());
		m_pxtTokenizer->vReset();
		m_pamParser->vReset();
		vFlush();
		}

//////////////////////////////////////////////////////////////////////
// Operations
//////////////////////////////////////////////////////////////////////
	bool cxtPackage::fPreprocessorGetErrors(std::vector<std::tstring>& dest)
		{
/*		ASSERT(fCheckValid());
		ASSERT(m_pxtTokenizer!=NULL);
		ASSERT(m_pxtTokenizer->fCheckValid());

		const cxTokenizerContext *pctx = m_pxtTokenizer->ptcGetContext();
		const cxTokenizerTokenRule *prule = NULL;
		const cxTokenizerPreprocessorGlobalContextCookie *pCookie = NULL;
		prule = pxtmGetTokenizerMap()->pttrGetRuleForString("preprocessor");
		if(prule==NULL)
			return FALSE;
		VERIFY(pctx->fGetGlobalConstCookie(prule,&pCookie));

		const std::vector<std::tstring>& errorVec = pCookie->getErrorVec();
		if(errorVec.empty()) dest.clear();
		else dest = errorVec;

		return true;*/
		return false;
		}

	void cxtPackage::vSetStartFromBeginning()
		{
		m_itCurrentToken = m_patsTokens->begin();
		}

	void cxtPackage::vSetStartFrom(const cxaTokenStream::const_iterator itStartPos)
		{
		m_itCurrentToken = itStartPos;
		}

	void cxtPackage::vFlush()
		{
		m_patsTokens->clear();
		m_itCurrentToken=m_patsTokens->end();
		}

	void cxtPackage::vFlush(cxaTokenStream::const_iterator itEndPos)
		{
		cxaTokenStream::iterator nc_itEndPos = *((cxaTokenStream::iterator*)&itEndPos);
		m_patsTokens->erase(m_patsTokens->begin(),nc_itEndPos);
		m_itCurrentToken=itEndPos;
		}

	void cxtPackage::vSetInputStream(cxTokenizerInputStream *pxtis)
		{
#ifdef _DEBUG
		if(pxtis!=NULL)
			{
			ASSERT(pxtis->fCheckValid());
			}
#endif
		ASSERT(m_pxtTokenizer!=NULL);
		if(pxtis==NULL)
			{
			// Detach all input streams
			m_pxtTokenizer->vDetachInputStream(NULL);
			m_pxtTokenizer->vCleanupInputStreams();
			}
		else
			m_pxtTokenizer->vPushInputStream(pxtis);

		ASSERT(m_pxtTokenizer->fCheckValid());
		}

	void cxtPackage::vSetDelimeterIDs(const int *pnIDValues, int nCount)
		{
#ifdef _DEBUG
		if(nCount==0)
			{
			ASSERT(pnIDValues==NULL);
			}
		else
			{
			ASSERT(pnIDValues!=NULL);
			}
#endif

		delete[] m_pnBreakIDValues;
		if(nCount!=0)
			{
			m_pnBreakIDValues = new int[nCount];
			for(int i=0;i<nCount;i++) m_pnBreakIDValues[i]=pnIDValues[i];
			m_nCountBreakIDValues = nCount;
			}
		else
			{
			m_pnBreakIDValues = NULL;
			m_nCountBreakIDValues = 0;
			}
		}

	int	cxtPackage::nReadUntilDelimeter()
		{
		ASSERT(m_pxtTokenizer->ptiGetInputStream()!=NULL);
		ASSERT(m_pxtTokenizer->ptiGetInputStream()->fCheckValid());

		// Reset the break flag and the token counter
		m_fBreakRead	=false;
		m_nTokenCount	=0;

		while(m_pxtTokenizer->ptiGetInputStream())
			{
			cxTokenizerInputStream *ptisCur = m_pxtTokenizer->ptiGetInputStream();
			for(;!m_pxtTokenizer->ptiGetInputStream()->fIsEofReached();)
				m_pxtTokenizer->vParseCharacter();
			m_pxtTokenizer->vDetachInputStream(m_pxtTokenizer->ptiGetInputStream());
			}

		return m_nTokenCount;
		}

	int	cxtPackage::nReadUntilDelimeter(cxtpDelimeterClassBase *pxtpDelimClass)
		{
		ASSERT(m_pxtTokenizer->ptiGetInputStream()!=NULL);
		ASSERT(m_pxtTokenizer->ptiGetInputStream()->fCheckValid());

		// Reset the break flag and the token counter
		m_fBreakRead	=false;
		m_nTokenCount	=0;
		m_pxtpDelimClass=pxtpDelimClass;

		while(!m_pxtTokenizer->ptiGetInputStream()->fIsEofReached() && !m_fBreakRead)
			m_pxtTokenizer->vParseCharacter();

		m_pxtpDelimClass=NULL;

		return m_nTokenCount;
		}

	cxaParseBranch *cxtPackage::papbCheckForRule(
						int nIDValue, 
						cxaTokenStream::const_iterator *pend, 
						cxaStatusCookie* pascCondition,
						bool fTestOnly)
		{
		bool			fResult;

		m_patsTokens->vDump();
		if(	m_itCurrentToken==m_patsTokens->end())
			m_itCurrentToken = m_patsTokens->begin();

#ifndef ANALYZER_NO_OPTIMIZATION
			m_pamParser->vClearCache();
#endif

		if(fTestOnly)
			{
			fResult	=m_pamParser->fCheckRule(
						m_patmRuleMap->nGetAtmTypeFor(nIDValue),
						m_patsTokens,pascCondition,
						m_itCurrentToken,pend,NULL);

			return fResult?CXTCFR_VALID:CXTCFR_INVALID;
			}
		else
			{
			cxAnalyzerTree	atTree;

			fResult	=m_pamParser->fCheckRule(
						m_patmRuleMap->nGetAtmTypeFor(nIDValue),
						m_patsTokens,pascCondition,
						m_itCurrentToken,pend,&atTree);

			if(fResult)
				{
				cxaParseTree *paptTemp = atTree.paptBuildTreeInternal(m_pamParser);
				cxaParseBranch *papbRes = paptTemp->papbDetach();
				delete paptTemp;
				return papbRes;
				}
			else
				return NULL;
			}

		}

	cxaParseBranch *cxtPackage::papbCheckForRuleAtm(
						int nAtmType, 
						cxaTokenStream::const_iterator *pend, 
						cxaStatusCookie* pascCondition,
						bool fTestOnly)
		{
		bool			fResult;

		if(	m_itCurrentToken==m_patsTokens->end())
			m_itCurrentToken = m_patsTokens->begin();

#ifndef ANALYZER_NO_OPTIMIZATION
			m_pamParser->vClearCache();
#endif

		if(fTestOnly)
			{
			fResult	=m_pamParser->fCheckRule(
						nAtmType,
						m_patsTokens,pascCondition,
						m_itCurrentToken,pend,NULL);

			return fResult?CXTCFR_VALID:CXTCFR_INVALID;
			}
		else
			{
			cxAnalyzerTree	atTree;

			fResult	=m_pamParser->fCheckRule(
						nAtmType,
						m_patsTokens,pascCondition,
						m_itCurrentToken,pend,&atTree);

			if(fResult)
				{
				cxaParseTree *paptTemp = atTree.paptBuildTreeInternal(m_pamParser);
				cxaParseBranch *papbRes = paptTemp->papbDetach();
				delete paptTemp;
				return papbRes;
				}
			else
				return NULL;
			}

		}

	void cxtPackage::vRebalance(cxaParseBranch *papbBranch, int nIDValue) const
		{
		int	nAtmType = m_patmRuleMap->nGetAtmTypeFor(nIDValue);
		vRebalanceAtm(papbBranch,nAtmType);
		}

	void cxtPackage::vRebalanceAtm(cxaParseBranch *papbBranch, int nAtmType) const
		{
		ASSERT(papbBranch->fCheckValid());
		cxaParseTree::vRebalance(papbBranch,nAtmType);
		ASSERT(papbBranch->fCheckValid());
		}

/*	bool cxtPackage::fIsImplicitRule(int nAtmTypeTestFor, int nAtmTypeSource) const
		{
		if(nAtmTypeTestFor == nAtmTypeSource)
			return true;

		if(m_pamParser->fIsImplicitRule(nAtmTypeTestFor, nAtmTypeSource, NULL))
			return true;

		if(m_pamParser->fIsImplicitRule(nAtmTypeSource, nAtmTypeTestFor, NULL))
			return true;

		return false;
		}
*/
//////////////////////////////////////////////////////////////////////
// cxTokenizerListener operations
//////////////////////////////////////////////////////////////////////

	void cxtPackage::vRegisterToken(const std::tstring& strTokenText, const cxTokenizerTokenRule* pttrRule,
									const cxTokenizerInputStream *ptisStream)
		{
		int		nIDValue = TOKEN_ID_INVALID;
		void	*pvData = NULL;

		if(pttrRule!=NULL)
			nIDValue = pttrRule->nGetIDValue();

		if(m_pxtpDelimClass!=NULL)
			{
			if(m_pxtpDelimClass->fIsDelimeter(nIDValue, strTokenText))
				m_fBreakRead = true;
			}
		else
			{
			if(m_pnBreakIDValues!=NULL)
				for(int i=0;i<m_nCountBreakIDValues;i++)
					if(m_pnBreakIDValues[i]==nIDValue)
						m_fBreakRead = true;

			}

		m_patsTokens->vRegisterToken(strTokenText,pttrRule,nIDValue,pvData,ptisStream);

		m_nTokenCount++;
		}

//////////////////////////////////////////////////////////////////////
// ctkCheckValid operations
//////////////////////////////////////////////////////////////////////

#ifdef _DEBUG
	bool cxtPackage::fCheckValid() const
		{
		return true;
		}

	bool cxtPackage::fRunDiagnostics()
		{
		return false;
		}
#endif

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article has no explicit license attached to it but may contain usage terms in the article text or the download files themselves. If in doubt please contact the author via the discussion board below.

A list of licenses authors might use can be found here


Written By
Web Developer
Germany Germany
This member has not yet provided a Biography. Assume it's interesting and varied, and probably something to do with programming.

Comments and Discussions