00001 /* 00002 ----------------------------------------------------------------------------- 00003 This source file is part of OGRE 00004 (Object-oriented Graphics Rendering Engine) 00005 For the latest info, see http://www.stevestreeting.com/ogre/ 00006 00007 Copyright (c) 2000-2005 The OGRE Team 00008 Also see acknowledgements in Readme.html 00009 00010 This program is free software; you can redistribute it and/or modify it under 00011 the terms of the GNU General Public License as published by the Free Software 00012 Foundation; either version 2 of the License, or (at your option) any later 00013 version. 00014 00015 This program is distributed in the hope that it will be useful, but WITHOUT 00016 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 00017 FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 00018 00019 You should have received a copy of the GNU General Public License along with 00020 this program; if not, write to the Free Software Foundation, Inc., 59 Temple 00021 Place - Suite 330, Boston, MA 02111-1307, USA, or go to 00022 http://www.gnu.org/copyleft/gpl.html. 00023 ----------------------------------------------------------------------------- 00024 */ 00025 00026 00027 #ifndef __Compiler2Pass_H__ 00028 #define __Compiler2Pass_H__ 00029 00030 #include <vector> 00031 #include "OgrePrerequisites.h" 00032 00033 namespace Ogre { 00034 00148 class _OgreExport Compiler2Pass 00149 { 00150 00151 protected: 00152 00153 // BNF operation types 00154 enum OperationType {otUNKNOWN, otRULE, otAND, otOR, otOPTIONAL, 00155 otREPEAT, otDATA, otNOT_TEST, otINSERT_TOKEN, otEND}; 00156 00160 struct TokenRule 00161 { 00162 OperationType operation; 00163 size_t tokenID; 00164 00165 TokenRule(void) : operation(otUNKNOWN), tokenID(0) {} 00166 TokenRule(const OperationType ot, const size_t token) 00167 : operation(ot), tokenID(token) {} 00168 }; 00169 00170 typedef std::vector<TokenRule> TokenRuleContainer; 00171 typedef TokenRuleContainer::iterator TokenRuleIterator; 00172 00173 static const size_t SystemTokenBase = 1000; 00174 enum SystemRuleToken { 00175 _no_token_ = SystemTokenBase, 00176 _character_, 00177 _value_, 00178 _no_space_skip_ 00179 }; 00180 00181 enum BNF_ID {BNF_UNKOWN = 0, 00182 BNF_SYNTAX, BNF_RULE, BNF_IDENTIFIER, BNF_IDENTIFIER_RIGHT, BNF_IDENTIFIER_CHARACTERS, BNF_ID_BEGIN, BNF_ID_END, 00183 BNF_CONSTANT_BEGIN, BNF_SET_RULE, BNF_EXPRESSION, 00184 BNF_AND_TERM, BNF_OR_TERM, BNF_TERM, BNF_TERM_ID, BNF_CONSTANT, BNF_OR, BNF_TERMINAL_SYMBOL, BNF_TERMINAL_START, 00185 BNF_REPEAT_EXPRESSION, BNF_REPEAT_BEGIN, BNF_REPEAT_END, BNF_SET, BNF_SET_BEGIN, BNF_SET_END, 00186 BNF_NOT_TEST, BNF_NOT_TEST_BEGIN, BNF_CONDITIONAL_TOKEN_INSERT, BNF_OPTIONAL_EXPRESSION, 00187 BNF_NOT_EXPRESSION, BNF_NOT_CHK, 00188 BNF_OPTIONAL_BEGIN, BNF_OPTIONAL_END, BNF_NO_TOKEN_START, BNF_SINGLEQUOTE, BNF_SINGLE_QUOTE_EXC, BNF_SET_END_EXC, 00189 BNF_ANY_CHARACTER, BNF_SPECIAL_CHARACTERS1, 00190 BNF_SPECIAL_CHARACTERS2, BNF_WHITE_SPACE_CHK, 00191 00192 BNF_LETTER, BNF_LETTER_DIGIT, BNF_DIGIT, BNF_WHITE_SPACE, 00193 BNF_ALPHA_SET, BNF_NUMBER_SET, BNF_SPECIAL_CHARACTER_SET1, 00194 BNF_SPECIAL_CHARACTER_SET2, BNF_SPECIAL_CHARACTER_SET3, BNF_NOT_CHARS 00195 }; 00196 00197 00199 struct LexemeTokenDef 00200 { 00201 size_t ID; 00202 bool hasAction; 00203 bool isNonTerminal; 00204 size_t ruleID; 00205 bool isCaseSensitive; 00206 String lexeme; 00207 00208 LexemeTokenDef(void) : ID(0), hasAction(false), isNonTerminal(false), ruleID(0), isCaseSensitive(false) {} 00209 LexemeTokenDef( const size_t ID, const String& lexeme, const bool hasAction = false, const bool caseSensitive = false ) 00210 : ID(ID) 00211 , hasAction(hasAction) 00212 , isNonTerminal(false) 00213 , ruleID(0) 00214 , isCaseSensitive(caseSensitive) 00215 , lexeme(lexeme) 00216 { 00217 } 00218 00219 }; 00220 00221 typedef std::vector<LexemeTokenDef> LexemeTokenDefContainer; 00222 typedef LexemeTokenDefContainer::iterator LexemeTokenDefIterator; 00223 00224 typedef std::map<std::string, size_t> LexemeTokenMap; 00225 typedef LexemeTokenMap::iterator TokenKeyIterator; 00227 00228 00230 struct TokenInst 00231 { 00232 size_t NTTRuleID; 00233 size_t tokenID; 00234 size_t line; 00235 size_t pos; 00236 bool found; 00237 }; 00238 00239 typedef std::vector<TokenInst> TokenInstContainer; 00240 typedef TokenInstContainer::iterator TokenInstIterator; 00241 00242 // token que, definitions, rules 00243 struct TokenState 00244 { 00245 TokenInstContainer tokenQue; 00246 LexemeTokenDefContainer lexemeTokenDefinitions; 00247 TokenRuleContainer rootRulePath; 00248 LexemeTokenMap lexemeTokenMap; 00249 }; 00250 00251 TokenState* mClientTokenState; 00252 00254 TokenState* mActiveTokenState; 00256 mutable size_t mPass2TokenQuePosition; 00260 size_t mPreviousActionQuePosition; 00263 size_t mNextActionQuePosition; 00264 00266 const String* mSource; 00268 String mSourceName; 00269 size_t mEndOfSource; 00270 00271 size_t mCurrentLine; 00272 size_t mCharPos; 00273 size_t mErrorCharPos; 00274 00277 std::map<size_t, float> mConstants; 00280 std::map<size_t, String> mLabels; 00283 bool mLabelIsActive; 00286 size_t mActiveLabelKey; 00289 bool mNoSpaceSkip; 00292 bool mNoTerminalToken; 00295 size_t mInsertTokenID; 00296 00298 uint mActiveContexts; 00299 00308 bool doPass1(); 00309 00319 bool doPass2(); 00320 00327 virtual void executeTokenAction(const size_t tokenID) = 0; 00330 virtual void setupTokenDefinitions(void) = 0; 00339 const TokenInst& getNextToken(const size_t expectedTokenID = 0) const 00340 { 00341 skipToken(); 00342 return getCurrentToken(expectedTokenID); 00343 } 00352 const TokenInst& getCurrentToken(const size_t expectedTokenID = 0) const; 00359 bool testNextTokenID(const size_t expectedTokenID) const; 00360 00364 bool testCurrentTokenID(const size_t expectedTokenID) const 00365 { 00366 return mActiveTokenState->tokenQue[mPass2TokenQuePosition].tokenID == expectedTokenID; 00367 } 00370 void skipToken(void) const; 00373 void replaceToken(void); 00379 float getNextTokenValue(void) const 00380 { 00381 skipToken(); 00382 return getCurrentTokenValue(); 00383 } 00390 float getCurrentTokenValue(void) const; 00398 const String& getNextTokenLabel(void) const 00399 { 00400 skipToken(); 00401 return getCurrentTokenLabel(); 00402 } 00407 const String& getCurrentTokenLabel(void) const; 00410 size_t getNextTokenID(void) const { return getNextToken().tokenID; } 00413 size_t getCurrentTokenID(void) const { return getCurrentToken().tokenID; } 00417 const String& getNextTokenLexeme(void) const 00418 { 00419 skipToken(); 00420 return getCurrentTokenLexeme(); 00421 } 00425 const String& getCurrentTokenLexeme(void) const; 00428 size_t getPass2TokenQueCount(void) const; 00433 size_t getRemainingTokensForAction(void) const; 00444 void setPass2TokenQuePosition(size_t pos, const bool activateAction = false); 00447 size_t getPass2TokenQuePosition(void) const { return mPass2TokenQuePosition; } 00457 bool setNextActionQuePosition(size_t pos, const bool search = false); 00468 void addLexemeToken(const String& lexeme, const size_t token, const bool hasAction = false, const bool caseSensitive = false); 00469 00480 void setClientBNFGrammer(void); 00481 00482 00483 00485 void findEOL(); 00486 00494 bool isFloatValue(float& fvalue, size_t& charsize) const; 00495 00504 bool isCharacterLabel(const size_t rulepathIDX); 00512 bool isLexemeMatch(const String& lexeme, const bool caseSensitive) const; 00514 bool positionToNextLexeme(); 00535 bool processRulePath( size_t rulepathIDX); 00536 00537 00540 void setActiveContexts(const uint contexts){ mActiveContexts = contexts; } 00541 00543 void skipComments(); 00544 00546 void skipEOL(); 00547 00549 void skipWhiteSpace(); 00550 00551 00560 bool ValidateToken(const size_t rulepathIDX, const size_t activeRuleID); 00561 00566 void verifyTokenRuleLinks(const String& grammerName); 00570 void checkTokenActionTrigger(void); 00577 String getBNFGrammerTextFromRulePath(size_t ruleID, const size_t level = 0); 00578 00579 00580 private: 00581 // used for interpreting BNF script 00582 // keep it as static so that only one structure is created 00583 // no matter how many times this class is instantiated. 00584 static TokenState mBNFTokenState; 00585 // maintain a map of BNF grammer 00586 typedef std::map<String, TokenState> TokenStateContainer; 00587 static TokenStateContainer mClientTokenStates; 00589 void activatePreviousTokenAction(void); 00591 void initBNFCompiler(void); 00593 void buildClientBNFRulePaths(void); 00595 void modifyLastRule(const OperationType pendingRuleOp, const size_t tokenID); 00600 size_t getClientLexemeTokenID(const String& lexeme, const bool isCaseSensitive = false); 00602 void extractNonTerminal(const OperationType pendingRuleOp); 00604 void extractTerminal(const OperationType pendingRuleOp, const bool notoken = false); 00606 void extractSet(const OperationType pendingRuleOp); 00608 void extractNumericConstant(const OperationType pendingRuleOp); 00610 void setConditionalTokenInsert(void); 00612 String getLexemeText(size_t& ruleID, const size_t level = 0); 00613 00614 public: 00615 00617 Compiler2Pass(); 00618 virtual ~Compiler2Pass() {} 00619 00631 bool compile(const String& source, const String& sourceName); 00634 virtual const String& getClientBNFGrammer(void) = 0; 00635 00638 virtual const String& getClientGrammerName(void) const = 0; 00639 00640 }; 00641 00642 } 00643 00644 #endif 00645
Copyright © 2000-2005 by The OGRE Team
This work is licensed under a Creative Commons Attribution-ShareAlike 2.5 License.
Last modified Sun Sep 17 15:39:07 2006