tlang-runtime-compiler/TLang.Lexer/LexContext.cs

136 lines
4.6 KiB
C#
Raw Normal View History

2023-07-27 03:31:32 +00:00
using System.Collections.Generic;
namespace TLang.Lexer
{
public class LexContext
{
public static LexContext operator+(LexContext left,LexContext right)
{
List<LexToken> tokens=new List<LexToken>();
tokens.AddRange(left.Tokens);
tokens.AddRange(right.Tokens);
LexContext ctx=new LexContext(tokens);
ctx.Offset = left.Offset;
return ctx;
}
public LexContext(IReadOnlyList<LexToken> tokens)
{
Tokens = tokens;
}
public IReadOnlyList<LexToken> Tokens {get;}
public int Offset {get;set;} = 0;
public LexContext FromOffset(int offset)
{
return new LexContext(Tokens){Offset = offset};
}
public LexContext FromOffset()
{
return FromOffset(Offset);
}
2023-07-29 00:11:09 +00:00
2023-07-27 03:31:32 +00:00
public void Add(int i=1)
{
Offset++;
}
public LexEntryContext NextEntriesNoSpaces(bool consumeIfTrue,params string[] tokenText)
{
List<LexToken> tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokenText.Length >= Tokens.Count) return new LexEntryContext(tokens,offset,false);
for(int i = 0; i<tokenText.Length; i++)
{
if(Tokens[i+offset].IsDocumentation || Tokens[i+offset].SpacesBetweenThisAndNext || Tokens[i+offset].IsChar || Tokens[i+offset].IsString || Tokens[i+offset].Text != tokenText[i]) return new LexEntryContext(tokens,offset,false);
tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokenText.Length;
return new LexEntryContext(tokens,offset,true);
2023-07-30 07:55:10 +00:00
2023-07-27 03:31:32 +00:00
}
public LexEntryContext NextEntries(bool consumeIfTrue,params string[] tokenText)
{
List<LexToken> tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokenText.Length >= Tokens.Count) return new LexEntryContext(tokens,offset,false);
for(int i = 0; i<tokenText.Length; i++)
{
if(Tokens[i+offset].IsDocumentation || Tokens[i+offset].IsChar || Tokens[i+offset].IsString || Tokens[i+offset].Text != tokenText[i]) return new LexEntryContext(tokens,offset,false);
tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokenText.Length;
return new LexEntryContext(tokens,offset,true);
}
public bool NextEntryIsAnyOf(bool consumeIfTrue,out LexToken token, params string[] tokenText)
{
token = new LexToken();
if(Offset >= Tokens.Count) return false;
foreach(var item in tokenText)
{
2023-07-29 00:11:09 +00:00
if(!Tokens[Offset].IsDocumentation && !Tokens[Offset].IsChar && !Tokens[Offset].IsString && Tokens[Offset].Text == item)
2023-07-27 03:31:32 +00:00
{
token = Tokens[Offset];
if(consumeIfTrue) Offset++;
return true;
}
}
return false;
}
2023-07-29 00:11:09 +00:00
public LexToken NextEntry()
2023-07-27 03:31:32 +00:00
{
2023-07-29 00:11:09 +00:00
2023-07-27 03:31:32 +00:00
if(Offset < Tokens.Count)
{
return Tokens[Offset++];
}
return LexToken.Empty;
2023-07-29 00:11:09 +00:00
2023-07-27 03:31:32 +00:00
}
public LexToken PeekEntry
{
get
{
if(Offset < Tokens.Count)
{
return Tokens[Offset];
}
return LexToken.Empty;
}
}
public LexLineInfo CurrentLineInfo => PeekEntry.Position;
public bool MoreTokens => Offset < Tokens.Count;
public string PopDocumentation()
{
if(Offset >= Tokens.Count || !Tokens[Offset].IsDocumentation) return "";
return Tokens[Offset++].Text;
}
public LexEntryContext NextEntries(bool consumeIfTrue,params LexToken[] tokens)
{
List<LexToken> _tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokens.Length >= Tokens.Count) return new LexEntryContext(_tokens,offset,false);
for(int i = 0; i<tokens.Length; i++)
{
if(Tokens[i+offset].SameToken(tokens[i])) return new LexEntryContext(_tokens,offset,false);
_tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokens.Length;
return new LexEntryContext(_tokens,offset,true);
}
}
}