Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | Directories | File List | Namespace Members | Class Members | File Members | Related Pages

OgreCompiler2Pass.h

Go to the documentation of this file.
00001 /*
00002 -----------------------------------------------------------------------------
00003 This source file is part of OGRE
00004 (Object-oriented Graphics Rendering Engine)
00005 For the latest info, see http://www.ogre3d.org
00006 
00007 Copyright (c) 2000-2006 Torus Knot Software Ltd
00008 Also see acknowledgements in Readme.html
00009 
00010 This program is free software; you can redistribute it and/or modify it under
00011 the terms of the GNU Lesser General Public License as published by the Free Software
00012 Foundation; either version 2 of the License, or (at your option) any later
00013 version.
00014 
00015 This program is distributed in the hope that it will be useful, but WITHOUT
00016 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
00017 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
00018 
00019 You should have received a copy of the GNU Lesser General Public License along with
00020 this program; if not, write to the Free Software Foundation, Inc., 59 Temple
00021 Place - Suite 330, Boston, MA 02111-1307, USA, or go to
00022 http://www.gnu.org/copyleft/lesser.txt.
00023 
00024 You may alternatively use this source under the terms of a specific version of
00025 the OGRE Unrestricted License provided you have obtained such a license from
00026 Torus Knot Software Ltd.
00027 -----------------------------------------------------------------------------
00028 */
00029 
00030 
00031 #ifndef __Compiler2Pass_H__
00032 #define __Compiler2Pass_H__
00033 
00034 #include "OgrePrerequisites.h"
00035 #include "OgreStdHeaders.h"
00036 
00037 namespace Ogre {
00038 
00152     class _OgreExport Compiler2Pass
00153     {
00154 
00155     protected:
00156 
00157         // BNF operation types
00158         enum OperationType {otUNKNOWN, otRULE, otAND, otOR, otOPTIONAL,
00159                             otREPEAT, otDATA, otNOT_TEST, otINSERT_TOKEN, otEND};
00160 
00164         struct TokenRule
00165         {
00166             OperationType operation;
00167             size_t tokenID;
00168 
00169             TokenRule(void) : operation(otUNKNOWN), tokenID(0) {}
00170             TokenRule(const OperationType ot, const size_t token)
00171                 : operation(ot), tokenID(token) {}
00172         };
00173 
00174         typedef std::vector<TokenRule> TokenRuleContainer;
00175         typedef TokenRuleContainer::iterator TokenRuleIterator;
00176 
00177         static const size_t SystemTokenBase = 1000;
00178         enum SystemRuleToken {
00179             _no_token_ = SystemTokenBase,
00180             _character_,
00181             _value_,
00182             _no_space_skip_
00183         };
00184 
00185         enum BNF_ID {BNF_UNKOWN = 0,
00186             BNF_SYNTAX, BNF_RULE, BNF_IDENTIFIER, BNF_IDENTIFIER_RIGHT, BNF_IDENTIFIER_CHARACTERS, BNF_ID_BEGIN, BNF_ID_END,
00187             BNF_CONSTANT_BEGIN, BNF_SET_RULE, BNF_EXPRESSION,
00188             BNF_AND_TERM, BNF_OR_TERM, BNF_TERM, BNF_TERM_ID, BNF_CONSTANT, BNF_OR, BNF_TERMINAL_SYMBOL, BNF_TERMINAL_START,
00189             BNF_REPEAT_EXPRESSION, BNF_REPEAT_BEGIN, BNF_REPEAT_END, BNF_SET, BNF_SET_BEGIN, BNF_SET_END,
00190             BNF_NOT_TEST, BNF_NOT_TEST_BEGIN, BNF_CONDITIONAL_TOKEN_INSERT, BNF_OPTIONAL_EXPRESSION,
00191             BNF_NOT_EXPRESSION, BNF_NOT_CHK,
00192             BNF_OPTIONAL_BEGIN, BNF_OPTIONAL_END, BNF_NO_TOKEN_START, BNF_SINGLEQUOTE, BNF_SINGLE_QUOTE_EXC, BNF_SET_END_EXC,
00193             BNF_ANY_CHARACTER, BNF_SPECIAL_CHARACTERS1,
00194             BNF_SPECIAL_CHARACTERS2, BNF_WHITE_SPACE_CHK,
00195 
00196             BNF_LETTER, BNF_LETTER_DIGIT, BNF_DIGIT, BNF_WHITE_SPACE,
00197             BNF_ALPHA_SET, BNF_NUMBER_SET, BNF_SPECIAL_CHARACTER_SET1,
00198             BNF_SPECIAL_CHARACTER_SET2, BNF_SPECIAL_CHARACTER_SET3, BNF_NOT_CHARS,
00199 
00200             // do not remove - this indicates where manually defined tokens end and where auto-gen ones start
00201             BNF_AUTOTOKENSTART
00202         };
00203 
00204 
00206         struct LexemeTokenDef
00207         {
00208             size_t ID;                  
00209             bool hasAction;            
00210             bool isNonTerminal;        
00211             size_t ruleID;              
00212             bool isCaseSensitive;        
00213             String lexeme;             
00214 
00215             LexemeTokenDef(void) : ID(0), hasAction(false), isNonTerminal(false), ruleID(0), isCaseSensitive(false) {}
00216             LexemeTokenDef( const size_t ID, const String& lexeme, const bool hasAction = false, const bool caseSensitive = false )
00217                 : ID(ID)
00218                 , hasAction(hasAction)
00219                 , isNonTerminal(false)
00220                 , ruleID(0)
00221                 , isCaseSensitive(caseSensitive)
00222                 , lexeme(lexeme)
00223             {
00224             }
00225 
00226         };
00227 
00228         typedef std::vector<LexemeTokenDef> LexemeTokenDefContainer;
00229         typedef LexemeTokenDefContainer::iterator LexemeTokenDefIterator;
00230 
00231         typedef std::map<std::string, size_t> LexemeTokenMap;
00232         typedef LexemeTokenMap::iterator TokenKeyIterator;
00234 
00235 
00237         struct TokenInst
00238         {
00239         size_t NTTRuleID;           
00240         size_t tokenID;                 
00241         size_t line;                
00242         size_t pos;             
00243         bool found;                
00244         };
00245 
00246         typedef std::vector<TokenInst> TokenInstContainer;
00247         typedef TokenInstContainer::iterator TokenInstIterator;
00248 
00249         // token que, definitions, rules
00250         struct TokenState
00251         {
00252             TokenInstContainer       tokenQue;
00253             LexemeTokenDefContainer  lexemeTokenDefinitions;
00254             TokenRuleContainer       rootRulePath;
00255             LexemeTokenMap           lexemeTokenMap;
00256         };
00257 
00258         TokenState* mClientTokenState;
00259 
00261         TokenState* mActiveTokenState;
00263         mutable size_t mPass2TokenQuePosition;
00267         size_t mPreviousActionQuePosition;
00270         size_t mNextActionQuePosition;
00271 
00273         const String* mSource;
00275         String mSourceName;
00276         size_t mEndOfSource;
00277 
00278         size_t mCurrentLine; 
00279         size_t mCharPos;     
00280         size_t mErrorCharPos; 
00281 
00284         std::map<size_t, float> mConstants;
00287         typedef std::map<size_t, String> LabelContainer;
00288         LabelContainer mLabels;
00291         bool mLabelIsActive;
00294         size_t mActiveLabelKey;
00296         String* mActiveLabel;
00299         bool mNoSpaceSkip;
00302         bool mNoTerminalToken;
00305         size_t mInsertTokenID;
00306 
00308         uint mActiveContexts;
00309 
00318         bool doPass1();
00319 
00329         bool doPass2();
00330 
00337         virtual void executeTokenAction(const size_t tokenID) = 0;
00344         virtual size_t getAutoTokenIDStart() const = 0;
00347         virtual void setupTokenDefinitions(void) = 0;
00356         const TokenInst& getNextToken(const size_t expectedTokenID = 0) const
00357         {
00358             skipToken();
00359             return getCurrentToken(expectedTokenID);
00360         }
00369         const TokenInst& getCurrentToken(const size_t expectedTokenID = 0) const;
00377         bool testNextTokenID(const size_t expectedTokenID) const;
00378 
00382         bool testCurrentTokenID(const size_t expectedTokenID) const
00383         {
00384             return mActiveTokenState->tokenQue[mPass2TokenQuePosition].tokenID == expectedTokenID;
00385         }
00388         void skipToken(void) const;
00391         void replaceToken(void);
00397         float getNextTokenValue(void) const
00398         {
00399             skipToken();
00400             return getCurrentTokenValue();
00401         }
00408         float getCurrentTokenValue(void) const;
00416         const String& getNextTokenLabel(void) const
00417         {
00418             skipToken();
00419             return getCurrentTokenLabel();
00420         }
00425         const String& getCurrentTokenLabel(void) const;
00428         size_t getNextTokenID(void) const { return getNextToken().tokenID; }
00431         size_t getCurrentTokenID(void) const { return getCurrentToken().tokenID; }
00435         const String& getNextTokenLexeme(void) const
00436         {
00437             skipToken();
00438             return getCurrentTokenLexeme();
00439         }
00443         const String& getCurrentTokenLexeme(void) const;
00446         size_t getPass2TokenQueCount(void) const;
00451         size_t getRemainingTokensForAction(void) const;
00462         void setPass2TokenQuePosition(size_t pos, const bool activateAction = false);
00465         size_t getPass2TokenQuePosition(void) const { return mPass2TokenQuePosition; }
00475         bool setNextActionQuePosition(size_t pos, const bool search = false);
00487         size_t addLexemeToken(const String& lexeme, const size_t token, const bool hasAction = false, const bool caseSensitive = false);
00488 
00499         void setClientBNFGrammer(void);
00500 
00501 
00502 
00504         void findEOL();
00505 
00513         bool isFloatValue(float& fvalue, size_t& charsize) const;
00514 
00523         bool isCharacterLabel(const size_t rulepathIDX);
00531         bool isLexemeMatch(const String& lexeme, const bool caseSensitive) const;
00533         bool isEndOfSource() const { return mCharPos >= mEndOfSource; }
00535         bool positionToNextLexeme();
00556         bool processRulePath( size_t rulepathIDX);
00557 
00558 
00561         void setActiveContexts(const uint contexts){ mActiveContexts = contexts; }
00562 
00564         void skipComments();
00565 
00567         void skipEOL();
00568 
00570         void skipWhiteSpace();
00571 
00572 
00581         bool ValidateToken(const size_t rulepathIDX, const size_t activeRuleID);
00582 
00587         void verifyTokenRuleLinks(const String& grammerName);
00591         void checkTokenActionTrigger(void);
00598         String getBNFGrammerTextFromRulePath(size_t ruleID, const size_t level = 0);
00599 
00600 
00601     private:
00602         // used for interpreting BNF script
00603         // keep it as static so that only one structure is created
00604         // no matter how many times this class is instantiated.
00605         static TokenState mBNFTokenState;
00606         // maintain a map of BNF grammer
00607         typedef std::map<String, TokenState> TokenStateContainer;
00608         static TokenStateContainer mClientTokenStates;
00610         void activatePreviousTokenAction(void);
00612         void initBNFCompiler(void);
00614         void buildClientBNFRulePaths(void);
00616         void modifyLastRule(const OperationType pendingRuleOp, const size_t tokenID);
00621         size_t getClientLexemeTokenID(const String& lexeme, const bool isCaseSensitive = false);
00623         void extractNonTerminal(const OperationType pendingRuleOp);
00625         void extractTerminal(const OperationType pendingRuleOp, const bool notoken = false);
00627         void extractSet(const OperationType pendingRuleOp);
00629         void extractNumericConstant(const OperationType pendingRuleOp);
00631         void setConditionalTokenInsert(void);
00633         String getLexemeText(size_t& ruleID, const size_t level = 0);
00634 
00635 
00636     public:
00637 
00639         Compiler2Pass();
00640         virtual ~Compiler2Pass() {}
00641 
00653         bool compile(const String& source, const String& sourceName);
00656         virtual const String& getClientBNFGrammer(void) const = 0;
00657 
00660         virtual const String& getClientGrammerName(void) const = 0;
00661 
00662     };
00663 
00664 }
00665 
00666 #endif
00667 

Copyright © 2000-2005 by The OGRE Team
Creative Commons License
This work is licensed under a Creative Commons Attribution-ShareAlike 2.5 License.
Last modified Sun May 6 10:54:21 2007