diff --git a/btparser/keywords.h b/btparser/keywords.h index 7a56efb..0bb0898 100644 --- a/btparser/keywords.h +++ b/btparser/keywords.h @@ -23,15 +23,12 @@ DEF_KEYWORD(typedef) DEF_KEYWORD(sizeof) DEF_KEYWORD(void) DEF_KEYWORD(union) +DEF_KEYWORD(local) DEF_KEYWORD(signed) DEF_KEYWORD(unsigned) -DEF_KEYWORD(local) DEF_KEYWORD(bool) -DEF_KEYWORD(true) -DEF_KEYWORD(false) - DEF_KEYWORD(char) DEF_KEYWORD(uchar) DEF_KEYWORD(wchar_t) @@ -79,5 +76,7 @@ DEF_KEYWORD(ULONG_PTR) DEF_KEYWORD(VQUAD) DEF_KEYWORD(UINT32) +DEF_KEYWORD(true) +DEF_KEYWORD(false) DEF_KEYWORD(nullptr) DEF_KEYWORD(NULL) \ No newline at end of file diff --git a/btparser/lexer.cpp b/btparser/lexer.cpp index dd7daf5..efa7e53 100644 --- a/btparser/lexer.cpp +++ b/btparser/lexer.cpp @@ -45,6 +45,13 @@ bool Lexer::ReadInputFile(const std::string & filename) return FileHelper::ReadAllData(filename, mInput); } +void Lexer::SetInputData(const std::string & data) +{ + resetLexerState(); + for(auto & ch : data) + mInput.push_back(ch); +} + bool Lexer::DoLexing(std::vector & tokens, std::string & error) { while (true) @@ -84,7 +91,7 @@ bool Lexer::Test(const std::function & lexEnum, sprintf_s(newlineText, "\n%d: ", line + 1); toks.append(newlineText); } - toks.append(tokString(tok)); + toks.append(TokString(tok)); appendCh(toks, ' '); lexEnum(toks); } while (tok != tok_eof && tok != tok_error); @@ -426,7 +433,32 @@ void Lexer::setupTokenMaps() #undef DEF_OP_SINGLE } -std::string Lexer::tokString(Token tok) +std::string Lexer::TokString(const TokenState & ts) +{ + switch(ts.Token) + { + case tok_eof: return "tok_eof"; + case tok_error: return StringUtils::sprintf("error(line %d, col %d, \"%s\")", ts.CurLine + 1, ts.LineIndex, mError.c_str()); + case tok_identifier: return ts.IdentifierStr; + case tok_number: return StringUtils::sprintf(mIsHexNumberVal ? "0x%llX" : "%llu", ts.NumberVal); + case tok_stringlit: return StringUtils::sprintf("\"%s\"", StringUtils::Escape(ts.StringLit).c_str()); + case tok_charlit: + { + std::string s; + s = ts.CharLit; + return StringUtils::sprintf("'%s'", StringUtils::Escape(s).c_str()); + } + default: + { + auto found = mReverseTokenMap.find(ts.Token); + if(found != mReverseTokenMap.end()) + return found->second; + return ""; + } + } +} + +std::string Lexer::TokString(Token tok) { switch (tok) { diff --git a/btparser/lexer.h b/btparser/lexer.h index 11c55aa..c5046c6 100644 --- a/btparser/lexer.h +++ b/btparser/lexer.h @@ -45,12 +45,20 @@ public: size_t CurLine = 0; size_t LineIndex = 0; + + bool IsType() const + { + return Token >= tok_signed && Token <= tok_UINT32; + } }; explicit Lexer(); bool ReadInputFile(const std::string & filename); + void SetInputData(const std::string & data); bool DoLexing(std::vector & tokens, std::string & error); bool Test(const std::function & lexEnum, bool output = true); + std::string TokString(Token tok); + std::string TokString(const TokenState & ts); private: TokenState mState; @@ -71,7 +79,6 @@ private: void setupTokenMaps(); Token reportError(const std::string & error); void reportWarning(const std::string & warning); - std::string tokString(Token tok); int peekChar(size_t distance = 0); int readChar(); bool checkString(const std::string & expected);