mirror of https://github.com/x64dbg/btparser
slight keyword reorder + helper functions
This commit is contained in:
parent
119546444d
commit
5c666ffce9
|
@ -23,15 +23,12 @@ DEF_KEYWORD(typedef)
|
||||||
DEF_KEYWORD(sizeof)
|
DEF_KEYWORD(sizeof)
|
||||||
DEF_KEYWORD(void)
|
DEF_KEYWORD(void)
|
||||||
DEF_KEYWORD(union)
|
DEF_KEYWORD(union)
|
||||||
|
DEF_KEYWORD(local)
|
||||||
|
|
||||||
DEF_KEYWORD(signed)
|
DEF_KEYWORD(signed)
|
||||||
DEF_KEYWORD(unsigned)
|
DEF_KEYWORD(unsigned)
|
||||||
DEF_KEYWORD(local)
|
|
||||||
|
|
||||||
DEF_KEYWORD(bool)
|
DEF_KEYWORD(bool)
|
||||||
DEF_KEYWORD(true)
|
|
||||||
DEF_KEYWORD(false)
|
|
||||||
|
|
||||||
DEF_KEYWORD(char)
|
DEF_KEYWORD(char)
|
||||||
DEF_KEYWORD(uchar)
|
DEF_KEYWORD(uchar)
|
||||||
DEF_KEYWORD(wchar_t)
|
DEF_KEYWORD(wchar_t)
|
||||||
|
@ -79,5 +76,7 @@ DEF_KEYWORD(ULONG_PTR)
|
||||||
DEF_KEYWORD(VQUAD)
|
DEF_KEYWORD(VQUAD)
|
||||||
DEF_KEYWORD(UINT32)
|
DEF_KEYWORD(UINT32)
|
||||||
|
|
||||||
|
DEF_KEYWORD(true)
|
||||||
|
DEF_KEYWORD(false)
|
||||||
DEF_KEYWORD(nullptr)
|
DEF_KEYWORD(nullptr)
|
||||||
DEF_KEYWORD(NULL)
|
DEF_KEYWORD(NULL)
|
|
@ -45,6 +45,13 @@ bool Lexer::ReadInputFile(const std::string & filename)
|
||||||
return FileHelper::ReadAllData(filename, mInput);
|
return FileHelper::ReadAllData(filename, mInput);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Lexer::SetInputData(const std::string & data)
|
||||||
|
{
|
||||||
|
resetLexerState();
|
||||||
|
for(auto & ch : data)
|
||||||
|
mInput.push_back(ch);
|
||||||
|
}
|
||||||
|
|
||||||
bool Lexer::DoLexing(std::vector<TokenState> & tokens, std::string & error)
|
bool Lexer::DoLexing(std::vector<TokenState> & tokens, std::string & error)
|
||||||
{
|
{
|
||||||
while (true)
|
while (true)
|
||||||
|
@ -84,7 +91,7 @@ bool Lexer::Test(const std::function<void(const std::string & line)> & lexEnum,
|
||||||
sprintf_s(newlineText, "\n%d: ", line + 1);
|
sprintf_s(newlineText, "\n%d: ", line + 1);
|
||||||
toks.append(newlineText);
|
toks.append(newlineText);
|
||||||
}
|
}
|
||||||
toks.append(tokString(tok));
|
toks.append(TokString(tok));
|
||||||
appendCh(toks, ' ');
|
appendCh(toks, ' ');
|
||||||
lexEnum(toks);
|
lexEnum(toks);
|
||||||
} while (tok != tok_eof && tok != tok_error);
|
} while (tok != tok_eof && tok != tok_error);
|
||||||
|
@ -426,7 +433,32 @@ void Lexer::setupTokenMaps()
|
||||||
#undef DEF_OP_SINGLE
|
#undef DEF_OP_SINGLE
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string Lexer::tokString(Token tok)
|
std::string Lexer::TokString(const TokenState & ts)
|
||||||
|
{
|
||||||
|
switch(ts.Token)
|
||||||
|
{
|
||||||
|
case tok_eof: return "tok_eof";
|
||||||
|
case tok_error: return StringUtils::sprintf("error(line %d, col %d, \"%s\")", ts.CurLine + 1, ts.LineIndex, mError.c_str());
|
||||||
|
case tok_identifier: return ts.IdentifierStr;
|
||||||
|
case tok_number: return StringUtils::sprintf(mIsHexNumberVal ? "0x%llX" : "%llu", ts.NumberVal);
|
||||||
|
case tok_stringlit: return StringUtils::sprintf("\"%s\"", StringUtils::Escape(ts.StringLit).c_str());
|
||||||
|
case tok_charlit:
|
||||||
|
{
|
||||||
|
std::string s;
|
||||||
|
s = ts.CharLit;
|
||||||
|
return StringUtils::sprintf("'%s'", StringUtils::Escape(s).c_str());
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
{
|
||||||
|
auto found = mReverseTokenMap.find(ts.Token);
|
||||||
|
if(found != mReverseTokenMap.end())
|
||||||
|
return found->second;
|
||||||
|
return "<UNKNOWN TOKEN>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string Lexer::TokString(Token tok)
|
||||||
{
|
{
|
||||||
switch (tok)
|
switch (tok)
|
||||||
{
|
{
|
||||||
|
|
|
@ -45,12 +45,20 @@ public:
|
||||||
|
|
||||||
size_t CurLine = 0;
|
size_t CurLine = 0;
|
||||||
size_t LineIndex = 0;
|
size_t LineIndex = 0;
|
||||||
|
|
||||||
|
bool IsType() const
|
||||||
|
{
|
||||||
|
return Token >= tok_signed && Token <= tok_UINT32;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
explicit Lexer();
|
explicit Lexer();
|
||||||
bool ReadInputFile(const std::string & filename);
|
bool ReadInputFile(const std::string & filename);
|
||||||
|
void SetInputData(const std::string & data);
|
||||||
bool DoLexing(std::vector<TokenState> & tokens, std::string & error);
|
bool DoLexing(std::vector<TokenState> & tokens, std::string & error);
|
||||||
bool Test(const std::function<void(const std::string & line)> & lexEnum, bool output = true);
|
bool Test(const std::function<void(const std::string & line)> & lexEnum, bool output = true);
|
||||||
|
std::string TokString(Token tok);
|
||||||
|
std::string TokString(const TokenState & ts);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
TokenState mState;
|
TokenState mState;
|
||||||
|
@ -71,7 +79,6 @@ private:
|
||||||
void setupTokenMaps();
|
void setupTokenMaps();
|
||||||
Token reportError(const std::string & error);
|
Token reportError(const std::string & error);
|
||||||
void reportWarning(const std::string & warning);
|
void reportWarning(const std::string & warning);
|
||||||
std::string tokString(Token tok);
|
|
||||||
int peekChar(size_t distance = 0);
|
int peekChar(size_t distance = 0);
|
||||||
int readChar();
|
int readChar();
|
||||||
bool checkString(const std::string & expected);
|
bool checkString(const std::string & expected);
|
||||||
|
|
Loading…
Reference in New Issue