slight keyword reorder + helper functions

This commit is contained in:
mrexodia 2016-11-21 23:40:49 +01:00
parent 119546444d
commit 5c666ffce9
No known key found for this signature in database
GPG Key ID: FC89E0AAA0C1AAD8
3 changed files with 45 additions and 7 deletions

View File

@ -23,15 +23,12 @@ DEF_KEYWORD(typedef)
DEF_KEYWORD(sizeof)
DEF_KEYWORD(void)
DEF_KEYWORD(union)
DEF_KEYWORD(local)
DEF_KEYWORD(signed)
DEF_KEYWORD(unsigned)
DEF_KEYWORD(local)
DEF_KEYWORD(bool)
DEF_KEYWORD(true)
DEF_KEYWORD(false)
DEF_KEYWORD(char)
DEF_KEYWORD(uchar)
DEF_KEYWORD(wchar_t)
@ -79,5 +76,7 @@ DEF_KEYWORD(ULONG_PTR)
DEF_KEYWORD(VQUAD)
DEF_KEYWORD(UINT32)
DEF_KEYWORD(true)
DEF_KEYWORD(false)
DEF_KEYWORD(nullptr)
DEF_KEYWORD(NULL)

View File

@ -45,6 +45,13 @@ bool Lexer::ReadInputFile(const std::string & filename)
return FileHelper::ReadAllData(filename, mInput);
}
void Lexer::SetInputData(const std::string & data)
{
resetLexerState();
for(auto & ch : data)
mInput.push_back(ch);
}
bool Lexer::DoLexing(std::vector<TokenState> & tokens, std::string & error)
{
while (true)
@ -84,7 +91,7 @@ bool Lexer::Test(const std::function<void(const std::string & line)> & lexEnum,
sprintf_s(newlineText, "\n%d: ", line + 1);
toks.append(newlineText);
}
toks.append(tokString(tok));
toks.append(TokString(tok));
appendCh(toks, ' ');
lexEnum(toks);
} while (tok != tok_eof && tok != tok_error);
@ -426,7 +433,32 @@ void Lexer::setupTokenMaps()
#undef DEF_OP_SINGLE
}
std::string Lexer::tokString(Token tok)
std::string Lexer::TokString(const TokenState & ts)
{
switch(ts.Token)
{
case tok_eof: return "tok_eof";
case tok_error: return StringUtils::sprintf("error(line %d, col %d, \"%s\")", ts.CurLine + 1, ts.LineIndex, mError.c_str());
case tok_identifier: return ts.IdentifierStr;
case tok_number: return StringUtils::sprintf(mIsHexNumberVal ? "0x%llX" : "%llu", ts.NumberVal);
case tok_stringlit: return StringUtils::sprintf("\"%s\"", StringUtils::Escape(ts.StringLit).c_str());
case tok_charlit:
{
std::string s;
s = ts.CharLit;
return StringUtils::sprintf("'%s'", StringUtils::Escape(s).c_str());
}
default:
{
auto found = mReverseTokenMap.find(ts.Token);
if(found != mReverseTokenMap.end())
return found->second;
return "<UNKNOWN TOKEN>";
}
}
}
std::string Lexer::TokString(Token tok)
{
switch (tok)
{

View File

@ -45,12 +45,20 @@ public:
size_t CurLine = 0;
size_t LineIndex = 0;
bool IsType() const
{
return Token >= tok_signed && Token <= tok_UINT32;
}
};
explicit Lexer();
bool ReadInputFile(const std::string & filename);
void SetInputData(const std::string & data);
bool DoLexing(std::vector<TokenState> & tokens, std::string & error);
bool Test(const std::function<void(const std::string & line)> & lexEnum, bool output = true);
std::string TokString(Token tok);
std::string TokString(const TokenState & ts);
private:
TokenState mState;
@ -71,7 +79,6 @@ private:
void setupTokenMaps();
Token reportError(const std::string & error);
void reportWarning(const std::string & warning);
std::string tokString(Token tok);
int peekChar(size_t distance = 0);
int readChar();
bool checkString(const std::string & expected);