293 lines
7.4 KiB
C++
293 lines
7.4 KiB
C++
#include "lexer.hpp"
|
|
namespace BoxScript::Lexer
|
|
{
|
|
LexToken::LexToken()
|
|
{
|
|
this->text="";
|
|
this->type = ERROR;
|
|
}
|
|
LexToken::LexToken(std::string _text)
|
|
{
|
|
this->text = _text;
|
|
if(_text.size() > 0)
|
|
{
|
|
this->type=_text[0] >= '0' && _text[0] <= '9' ? NUMBER : IDENTIFER;
|
|
}
|
|
else{
|
|
this->type = ERROR;
|
|
}
|
|
}
|
|
class Lexer
|
|
{
|
|
private:
|
|
std::string text;
|
|
int filePos;
|
|
void read_char(char* chr,bool* esc)
|
|
{
|
|
int txt = text[filePos++];
|
|
if(txt == '\\')
|
|
{
|
|
txt = text[filePos++];
|
|
if(txt == 'x')
|
|
{
|
|
std::string txt="0x";
|
|
filePos+=2;
|
|
for(int i = 0;i<2;i++)
|
|
{
|
|
txt += text[filePos++];
|
|
}
|
|
*chr= (char)std::stol(txt,0,0);
|
|
}else
|
|
{
|
|
if(txt == 'n')
|
|
{
|
|
*chr= '\n';
|
|
}else
|
|
if(txt == 'r')
|
|
{
|
|
*chr='\r';
|
|
}else
|
|
if(txt == 't')
|
|
{
|
|
*chr='\t';
|
|
}else{
|
|
*chr = (char)txt;
|
|
}
|
|
}
|
|
*esc=true;
|
|
}
|
|
else{
|
|
*esc=false;
|
|
*chr=(char)txt;
|
|
}
|
|
}
|
|
std::string read_string()
|
|
{
|
|
std::string b="";
|
|
while(true)
|
|
{
|
|
char chr='\0';
|
|
bool esc=false;
|
|
read_char(&chr,&esc);
|
|
if(chr == '\"' && !esc)
|
|
{
|
|
break;
|
|
}
|
|
b+=chr;
|
|
}
|
|
return b;
|
|
}
|
|
public:
|
|
Lexer(std::string _text)
|
|
{
|
|
this->text=_text;
|
|
this->filePos=0;
|
|
}
|
|
std::vector<LexToken> Lex()
|
|
{
|
|
std::vector<LexToken> tokens;
|
|
std::string b="";
|
|
while(filePos < text.length())
|
|
{
|
|
char c = text[filePos++];
|
|
if(c == '\"')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
std::string str = read_string();
|
|
LexToken token;
|
|
token.text = str;
|
|
token.type = STRING;
|
|
tokens.push_back(token);
|
|
}else if(c == '#')
|
|
{
|
|
while(text[filePos++] != '\n');
|
|
}
|
|
else
|
|
if(c == '\'')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
char chr='\0';
|
|
bool esc;
|
|
read_char(&chr,&esc);
|
|
LexToken token;
|
|
token.text="";
|
|
token.text += chr;
|
|
token.type = NUMBER;
|
|
tokens.push_back(token);
|
|
|
|
}else if(c == '+')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="+";
|
|
token.type = PLUS;
|
|
tokens.push_back(token);
|
|
}else if(c == '-')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="-";
|
|
token.type = MINUS;
|
|
tokens.push_back(token);
|
|
}else if(c == '+')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="*";
|
|
token.type = MULTIPLY;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '/')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="/";
|
|
token.type = DIVIDE;
|
|
tokens.push_back(token);
|
|
}else if(c == '%')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="%";
|
|
token.type = MOD;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '{')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="{";
|
|
token.type = LBRACE;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '}')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="}";
|
|
token.type = RBRACE;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '(')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="(";
|
|
token.type = LPAREN;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == ')')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text=")";
|
|
token.type = RPAREN;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '=')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text="=";
|
|
token.type = EQUALS;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == ',')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text=",";
|
|
token.type = COMMA;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == ';')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
LexToken token;
|
|
token.text=";";
|
|
token.type = SEMI;
|
|
tokens.push_back(token);
|
|
}
|
|
else if(c == '\n' || c == ' ' || c == '\t')
|
|
{
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
|
|
}else{
|
|
b += c;
|
|
}
|
|
}
|
|
if(b.length() > 0)
|
|
{
|
|
tokens.push_back(LexToken(b));
|
|
b="";
|
|
}
|
|
return tokens;
|
|
}
|
|
|
|
};
|
|
std::vector<LexToken> Lex(std::string text)
|
|
{
|
|
Lexer l(text);
|
|
return l.Lex();
|
|
}
|
|
}; |