First Commit

This commit is contained in:
Mike Nolan 2023-07-26 22:31:32 -05:00
commit c3afc99003
59 changed files with 2504 additions and 0 deletions

398
.gitignore vendored Normal file
View File

@ -0,0 +1,398 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml

415
TLang.Lexer/Class1.cs Normal file
View File

@ -0,0 +1,415 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace TLang.Lexer
{
public class Lex
{
TextReader reader;
private Lex(TextReader reader)
{
this.reader = reader;
}
private LexLineInfo lineInfo = new LexLineInfo();
private LexLineInfo beginLInfo = new LexLineInfo();
private List<LexToken> tokens = new List<LexToken>();
private StringBuilder builder = new StringBuilder();
private void Flush()
{
if(builder.Length > 0)
{
tokens.Add(LexToken.Token(builder.ToString()).WithLineInfo(beginLInfo));
builder.Clear();
}
}
private void Append(int c)
{
if(c == -1) return;
if(builder.Length == 0)
beginLInfo = lineInfo.Clone();
builder.Append((char)c);
}
public static LexContext GetTokens(TextReader reader,string filename="memory.tlang")
{
Lex lex = new Lex(reader);
lex.StartLexing();
return new LexContext(lex.tokens);
}
private void ParseChar()
{
var lineInfo = this.lineInfo.Clone();
//'c'
var (mChar,Escaped,EOF) = ReadChar();
tokens.Add(LexToken.Char(mChar.ToString()).WithLineInfo(lineInfo));
_ = Next;
}
private void ParseString(bool interoplated)
{
var lineInfo = this.lineInfo.Clone();
//$"Demi Lovato is { variable + 5 } years Old"; -> String.Join("Demi Lovato is ",( variable + 5)," years Old");
StringBuilder b = new StringBuilder();
var (mChar,Escaped,EOF) = ReadChar();
while((mChar != '\"' || Escaped) && !EOF)
{
b.Append(mChar);
(mChar,Escaped,EOF) = ReadChar();
}
if(interoplated)
{
tokens.Add(LexToken.Token("String").WithLineInfo(lineInfo));
tokens.Add(LexToken.Token(".").WithLineInfo(lineInfo));
tokens.Add(LexToken.Token("Concat").WithLineInfo(lineInfo));
tokens.Add(LexToken.Token("(").WithLineInfo(lineInfo));
int e = 0;
int escapeI = 0;
StringBuilder b2 = new StringBuilder();
for(int i = 0;i< b.Length;i++)
{
if(b[i] == '{' )
{
if((i+1 < b.Length && b[i+1] != '{') || escapeI >= 1)
{
if(b2.Length > 0 && escapeI < 1)
{
if(e > 0) tokens.Add(LexToken.Token(",").WithLineInfo(lineInfo));
tokens.Add(LexToken.String(b2.ToString()).WithLineInfo(lineInfo));
b2.Clear();
e++;
}
escapeI++;
if(escapeI > 1)
{
b2.Append('{');
}
}
else
{
b2.Append('{');
i++;
}
}
else if(b[i] == '}')
{
if(escapeI >= 1)
{
escapeI--;
if(b2.Length > 0 && escapeI == 0)
{
if(e > 0) tokens.Add(LexToken.Token(",").WithLineInfo(lineInfo));
tokens.Add(LexToken.Token("(").WithLineInfo(lineInfo));
tokens.AddRange(Lex.GetTokensFromString(b2.ToString(),"compilerGenerated.tlang").Tokens);
tokens.Add(LexToken.Token(")").WithLineInfo(lineInfo));
b2.Clear();
e++;
}
if(escapeI >= 1)
{
b2.Append("}");
}
}
}
else {
b2.Append(b[i]);
}
}
if(b2.Length > 0)
{
if(escapeI > 0)
{
if(e > 0) tokens.Add(LexToken.Token(",").WithLineInfo(lineInfo));
tokens.Add(LexToken.Token("(").WithLineInfo(lineInfo));
tokens.AddRange(Lex.GetTokensFromString(b2.ToString(),"compilerGenerated.tlang").Tokens);
tokens.Add(LexToken.Token(")").WithLineInfo(lineInfo));
b2.Clear();
e++;
}
else
{
if(e > 0) tokens.Add(LexToken.Token(",").WithLineInfo(lineInfo));
tokens.Add(LexToken.String(b2.ToString()).WithLineInfo(lineInfo));
b2.Clear();
e++;
}
}
tokens.Add(LexToken.Token(")").WithLineInfo(lineInfo));
}
else
{
tokens.Add(LexToken.String(b.ToString()).WithLineInfo(lineInfo));
}
}
private (char Char, bool Escaped,bool EOF) ReadChar()
{
int next = Next;
if(next == -1) return (' ',false,true);
if(next == '\\')
{
next = Next;
if(next == 'n')
{
return ('\n',true,false);
}
else if(next == 'r')
{
return ('\r',true,false);
}
else if(next == 't')
{
return ('\t',true,false);
}
else if(next == 'x')
{
string hexCode = new string(new char[]{(char)Next,(char)Next});
int val= int.Parse(hexCode,System.Globalization.NumberStyles.HexNumber);
return ((char)val,true,false);
}
else
{
return ((char)next,true,false);
}
}
return ((char)next,false,false);
}
private void StartLexing()
{
int c;
while((c=Next) != -1)
{
var peek = Peek;
var lineInfo = this.lineInfo.Clone();
switch(c)
{
case '/':
if(peek == '/')
{
_ = Next;
//single line comment
Flush();
while(Next != '\n');
}
else if(peek == '*')
{
var next = Next;
while((next = Next) != -1)
{
if(next == '*')
{
if(Peek == '/')
{
_ = Next;
break;
}
}
}
}
else if(peek == '^')
{
var next = Next;
StringBuilder b = new StringBuilder();
while((next = Next) != -1)
{
if(next == '^')
{
if(Peek == '/')
{
_ = Next;
break;
}
}
b.Append((char)next);
}
tokens.Add(LexToken.Documentation(b.ToString()).WithLineInfo(lineInfo));
}
else if(peek == '=')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}=").WithLineInfo(lineInfo));
}
else
{
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
}
break;
case '\"':
Flush();
ParseString(false);
break;
case '\'':
Flush();
ParseChar();
break;
case '$':
Flush();
if(peek == '\"')
{
_ = Next;
ParseString(true);
}
break;
case '^':
{
Flush();
if(peek == c)
{
_ = Next;
peek=Peek;
if(peek == '=')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}{(char)c}=").WithLineInfo(lineInfo));
}
else
{
tokens.Add(LexToken.Token($"{(char)c}{(char)c}").WithLineInfo(lineInfo));
}
}
else if(peek == '=')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}=").WithLineInfo(lineInfo));
}
else {
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
}
}
break;
case '+':
case '-':
case '<':
case '>':
case '|':
case '&':
{
Flush();
if(peek == c)
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}{(char)c}").WithLineInfo(lineInfo));
}
else if(peek == '=')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}=").WithLineInfo(lineInfo));
}
else {
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
}
}
break;
case '*':
case '%':
case '!':
{
Flush();
if(peek == '=')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}=").WithLineInfo(lineInfo));
}
else {
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
}
}
break;
case '[':
case ']':
case '(':
case ')':
case ':':
case '{':
case '}':
case ';':
case '?':
case '.':
case ',':
{
Flush();
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
break;
}
case '=':
{
Flush();
if(peek == c)
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}{(char)c}").WithLineInfo(lineInfo));
}
else if(peek == '>')
{
_ = Next;
tokens.Add(LexToken.Token($"{(char)c}>").WithLineInfo(lineInfo));
}
else {
tokens.Add(LexToken.Token($"{(char)c}").WithLineInfo(lineInfo));
}
}
break;
case '\t':
case '\n':
case '\r':
case ' ':
Flush();
SetSpacesBetweenTrue();
break;
default:
Append(c);
break;
}
}
Flush();
}
private void SetSpacesBetweenTrue()
{
if(tokens.Count > 0)
{
tokens[tokens.Count-1].SpacesBetweenThisAndNext = true;
}
}
private int Next {
get{
int read= reader.Read();
if(read != -1)
lineInfo.AppendChar((char)read);
return read;
}
}
private int Peek => reader.Peek();
public static LexContext GetTokensFromFile(string str)
{
using(var f = File.OpenText(str))
{
return GetTokens(f,str);
}
}
public static LexContext GetTokensFromString(string str,string filename="memory.tlang")
{
return GetTokens(new StringReader(str));
}
}
}

139
TLang.Lexer/LexContext.cs Normal file
View File

@ -0,0 +1,139 @@
using System.Collections.Generic;
namespace TLang.Lexer
{
public class LexContext
{
public static LexContext operator+(LexContext left,LexContext right)
{
List<LexToken> tokens=new List<LexToken>();
tokens.AddRange(left.Tokens);
tokens.AddRange(right.Tokens);
LexContext ctx=new LexContext(tokens);
ctx.Offset = left.Offset;
return ctx;
}
public LexContext(IReadOnlyList<LexToken> tokens)
{
Tokens = tokens;
}
public IReadOnlyList<LexToken> Tokens {get;}
public int Offset {get;set;} = 0;
public LexContext FromOffset(int offset)
{
return new LexContext(Tokens){Offset = offset};
}
public LexContext FromOffset()
{
return FromOffset(Offset);
}
public LexToken Next(int i = 0)
{
if(Offset + i < Tokens.Count) return Tokens[Offset + i];
return new LexToken();
}
public void Add(int i=1)
{
Offset++;
}
public LexEntryContext NextEntriesNoSpaces(bool consumeIfTrue,params string[] tokenText)
{
List<LexToken> tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokenText.Length >= Tokens.Count) return new LexEntryContext(tokens,offset,false);
for(int i = 0; i<tokenText.Length; i++)
{
if(Tokens[i+offset].IsDocumentation || Tokens[i+offset].SpacesBetweenThisAndNext || Tokens[i+offset].IsChar || Tokens[i+offset].IsString || Tokens[i+offset].Text != tokenText[i]) return new LexEntryContext(tokens,offset,false);
tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokenText.Length;
return new LexEntryContext(tokens,offset,true);
}
public LexEntryContext NextEntries(bool consumeIfTrue,params string[] tokenText)
{
List<LexToken> tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokenText.Length >= Tokens.Count) return new LexEntryContext(tokens,offset,false);
for(int i = 0; i<tokenText.Length; i++)
{
if(Tokens[i+offset].IsDocumentation || Tokens[i+offset].IsChar || Tokens[i+offset].IsString || Tokens[i+offset].Text != tokenText[i]) return new LexEntryContext(tokens,offset,false);
tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokenText.Length;
return new LexEntryContext(tokens,offset,true);
}
public bool NextEntryIsAnyOf(bool consumeIfTrue,out LexToken token, params string[] tokenText)
{
token = new LexToken();
if(Offset >= Tokens.Count) return false;
foreach(var item in tokenText)
{
if(Tokens[Offset].IsDocumentation || Tokens[Offset].IsChar || Tokens[Offset].IsString || Tokens[Offset].Text == item)
{
token = Tokens[Offset];
if(consumeIfTrue) Offset++;
return true;
}
}
return false;
}
public LexToken NextEntry
{
get {
if(Offset < Tokens.Count)
{
return Tokens[Offset++];
}
return LexToken.Empty;
}
}
public LexToken PeekEntry
{
get
{
if(Offset < Tokens.Count)
{
return Tokens[Offset];
}
return LexToken.Empty;
}
}
public LexLineInfo CurrentLineInfo => PeekEntry.Position;
public bool MoreTokens => Offset < Tokens.Count;
public string PopDocumentation()
{
if(Offset >= Tokens.Count || !Tokens[Offset].IsDocumentation) return "";
return Tokens[Offset++].Text;
}
public LexEntryContext NextEntries(bool consumeIfTrue,params LexToken[] tokens)
{
List<LexToken> _tokens = new List<LexToken>();
int offset = Offset;
if(Offset + tokens.Length >= Tokens.Count) return new LexEntryContext(_tokens,offset,false);
for(int i = 0; i<tokens.Length; i++)
{
if(Tokens[i+offset].SameToken(tokens[i])) return new LexEntryContext(_tokens,offset,false);
_tokens.Add(Tokens[i+offset]);
}
if(consumeIfTrue) Offset += tokens.Length;
return new LexEntryContext(_tokens,offset,true);
}
}
}

View File

@ -0,0 +1,19 @@
using System.Collections.Generic;
namespace TLang.Lexer
{
public class LexEntryContext
{
public LexEntryContext(List<LexToken> tokens,int offset,bool success)
{
Tokens = tokens;
Offset = offset;
Success = success;
}
public IReadOnlyList<LexToken> Tokens {get;}
public int Offset {get;}
public bool Success {get;}
}
}

View File

@ -0,0 +1,64 @@
using System;
namespace TLang.Lexer
{
public class LexLineInfo : IEquatable<LexLineInfo>
{
public LexLineInfo()
{
Line = 1;
Position = 0;
Column = 1;
FileName="memory.tlang";
}
public string FileName {get;set;}
public int Line {get;set;}
public int Column {get;set;}
public int Position {get;set;}
public void AppendChar(char c)
{
Position++;
if(c == '\t')
{
Column += 4;
}
else if(c == '\r')
{
Column = 1;
}
else if(c == '\n')
{
Column = 1;
Line += 1;
}
else {
Column++;
}
}
public LexLineInfo Clone()
{
LexLineInfo lineInfo=new LexLineInfo();
lineInfo.Column = Column;
lineInfo.FileName = FileName;
lineInfo.Line = Line;
lineInfo.Position = Position;
return lineInfo;
}
public bool Equals(LexLineInfo other)
{
return Line == other.Line && Column == other.Column && FileName == other.FileName && Position == other.Position;
}
public override string ToString()
{
return $"in file: {FileName}:{Line} offset: {Position}, col: {Column}";
}
}
}

78
TLang.Lexer/LexToken.cs Normal file
View File

@ -0,0 +1,78 @@
using System;
namespace TLang.Lexer
{
public class LexToken : IEquatable<LexToken>
{
public static LexToken Empty => new LexToken();
public bool IsEmpty => Text.Length == 0 && !IsDocumentation && !IsChar && !IsString;
public static LexToken Documentation(string text)
{
LexToken token = new LexToken();
token.IsDocumentation =true;
token.IsChar = false;
token.IsString = false;
token.Text = text;
return token;
}
public static LexToken Token(string text)
{
LexToken token = new LexToken();
token.IsDocumentation=false;
token.IsChar = false;
token.IsString = false;
token.Text = text;
return token;
}
public static LexToken String(string text)
{
LexToken token = new LexToken();
token.IsDocumentation=false;
token.IsChar = false;
token.IsString = true;
token.Text = text;
return token;
}
public static LexToken Char(string text)
{
LexToken token = new LexToken();
token.IsDocumentation=false;
token.IsChar = true;
token.IsString = false;
token.Text = text;
return token;
}
public bool SpacesBetweenThisAndNext {get;set;} = false;
public LexToken WithLineInfo(LexLineInfo lineInfo)
{
Position = lineInfo.Clone();
return this;
}
public LexToken WithLineInfo(int line,int col,int offset,string filename="memory.tlang")
{
Position = new LexLineInfo(){Line = line,Column = col,Position = offset,FileName = filename};
return this;
}
public LexLineInfo Position {get;set;} = new LexLineInfo();
public string Text {get;set;}="";
public bool IsString {get;set;}=false;
public bool IsChar {get;set;}=false;
public bool IsDocumentation {get;set;}=false;
public bool SameToken(LexToken token)
{
return Text == token.Text && IsChar == token.IsChar && IsString == token.IsString && IsDocumentation == token.IsDocumentation;
}
public bool Equals(LexToken token)
{
return Text == token.Text && IsChar == token.IsChar && IsString == token.IsString && IsDocumentation == token.IsDocumentation && SpacesBetweenThisAndNext == token.SpacesBetweenThisAndNext && Position.Equals(token.Position);
}
public bool IsTokenWith(string text)
{
return !IsChar && !IsDocumentation && !IsString && Text == text;
}
}
}

View File

@ -0,0 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
</Project>

14
TLang.Parser/AddNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class AddNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public AddNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

14
TLang.Parser/BAndNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class BAndNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public BAndNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

14
TLang.Parser/BOrNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class BOrNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public BOrNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,16 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class BitwiseNotNode : Node
{
public Node Node {get;set;}
public BitwiseNotNode(Node node)
{
Node = node;
}
}
}

View File

@ -0,0 +1,6 @@
namespace TLang.Parser
{
public class BreakNode : Node
{
}
}

9
TLang.Parser/CallNode.cs Normal file
View File

@ -0,0 +1,9 @@
using System.Collections.Generic;
namespace TLang.Parser
{
public class CallNode : SymbolNode
{
public List<Node> Arguments {get;set;}
}
}

18
TLang.Parser/CaseNode.cs Normal file
View File

@ -0,0 +1,18 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class CaseNode : Node
{
public Node Variable {get;set;}
public Node Body {get;set;}
public CaseNode(Node expr, Node node)
{
Variable = expr;
Body = node;
}
}
}

16
TLang.Parser/CharNode.cs Normal file
View File

@ -0,0 +1,16 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class CharNode : Node
{
public string Text {get;set;}
public CharNode(string text)
{
Text = text;
}
}
}

653
TLang.Parser/Class1.cs Normal file
View File

@ -0,0 +1,653 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using TLang.Lexer;
namespace TLang.Parser
{
public class Parse
{
private Parse(LexContext ctx)
{
Context = ctx;
}
private LexContext Context {get;set;}
private Node Node {get;set;}
private void ParseNodeRoot()
{
Node = ParseNode(true);
}
public static Node ParseFromLexContext(params LexContext[] ctx)
{
if(ctx.Length > 0)
{
var ctx2 = ctx[0];
for(int i = 1;i<ctx.Length;i++)
{
ctx2 += ctx[i];
}
Parse p=new Parse(ctx2);
p.ParseNodeRoot();
return p.Node;
}
return new Node();
}
public static Node ParseFromTextReader(TextReader reader,string filename="memory.tlang")
{
var lexed = Lex.GetTokens(reader,filename);
return ParseFromLexContext(lexed);
}
public static Node ParseFromFiles(params string[] filenames)
{
return ParseFromLexContext(filenames.Select<string,LexContext>(e=>Lex.GetTokensFromFile(e)).ToArray());
}
public static Node ParseFromString(string str,string filename="memory.tlang")
{
var lexed=Lex.GetTokensFromString(str,filename);
return ParseFromLexContext(lexed);
}
private Node ParseNode(bool isRoot=false,bool inCase=false)
{
ScopeNode node = new ScopeNode();
node.LineInfo = Context.CurrentLineInfo;
while(Context.Offset < Context.Tokens.Count && (isRoot || !Context.NextEntries(true,"}").Success))
{
if(Context.NextEntries(true,"{").Success)
{
node.Add(ParseNode());
}
node.Add(ParseAssigable());
}
return node;
}
private Node ParseAssigable()
{
Node expr = ParseLOr();
while(Context.NextEntryIsAnyOf(true,out var token,"=","+=","-=","*=","/=","%=","|=","&=","^=","^^="))
{
if(token.Text == "=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,ParseLOr()){LineInfo=token.Position};
}
if(token.Text == "+=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new AddNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "-=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new SubtractNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "*=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new MultiplyNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "/=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new DivideNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "%=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new ModuloNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "|=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new BOrNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "&=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new BAndNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "^=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new XOrNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
if(token.Text == "^^=")
{
var symbolNode = expr as SymbolNode;
if(symbolNode == null) throw new CompilerError("Assigning variable to something that is not a symbol",expr);
expr = new SetVariableNode(symbolNode.Name,new PowNode(new GetVariableNode(symbolNode.Name),ParseLOr())){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseLOr()
{
Node expr = ParseLAnd();
while(Context.NextEntryIsAnyOf(true,out var token,"||"))
{
expr = new LOrNode(expr,ParseLAnd()){LineInfo=token.Position};
}
return expr;
}
private Node ParseLAnd()
{
Node expr = ParseBOr();
while(Context.NextEntryIsAnyOf(true,out var token,"&&"))
{
expr = new LAndNode(expr,ParseBOr()){LineInfo=token.Position};
}
return expr;
}
private Node ParseBOr()
{
Node expr = ParseXOr();
while(Context.NextEntryIsAnyOf(true,out var token,"|"))
{
expr = new BOrNode(expr,ParseXOr()){LineInfo=token.Position};
}
return expr;
}
private Node ParseXOr()
{
Node expr = ParseBAnd();
while(Context.NextEntryIsAnyOf(true,out var token,"^"))
{
expr = new XOrNode(expr,ParseBAnd()){LineInfo=token.Position};
}
return expr;
}
private Node ParseBAnd()
{
Node expr = ParseEq();
while(Context.NextEntryIsAnyOf(true,out var token,"&"))
{
expr = new BAndNode(expr,ParseEq()){LineInfo=token.Position};
}
return expr;
}
private Node ParseEq()
{
Node expr = ParseRo();
while(Context.NextEntryIsAnyOf(true,out var token,"==","!="))
{
if(token.Text == "==")
{
expr = new EqualsNode(expr,ParseRo()){LineInfo=token.Position};
}
if(token.Text == "!=")
{
expr = new NotEqualsNode(expr,ParseRo()){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseRo()
{
Node expr = ParseRo();
while(Context.NextEntryIsAnyOf(true,out var token,"<","<=",">",">="))
{
if(token.Text == "<")
{
expr = new LessThanNode(expr,ParseShift()){LineInfo=token.Position};
}
if(token.Text == "<=")
{
expr = new LessThanEqualsNode(expr,ParseShift()){LineInfo=token.Position};
}
if(token.Text == ">")
{
expr = new GreaterThanNode(expr,ParseShift()){LineInfo=token.Position};
}
if(token.Text == ">=")
{
expr = new GreaterThanEqualsNode(expr,ParseShift()){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseShift()
{
Node expr = ParseSum();
while(Context.NextEntryIsAnyOf(true,out var token,"<<",">>"))
{
if(token.Text == "<<")
{
expr = new LeftShiftNode(expr,ParseSum()){LineInfo=token.Position};
}
if(token.Text == ">>")
{
expr = new RightShiftNode(expr,ParseSum()){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseSum()
{
Node expr = ParseFactor();
while(Context.NextEntryIsAnyOf(true,out var token,"+","-"))
{
if(token.Text == "+")
{
expr = new AddNode(expr,ParseFactor()){LineInfo=token.Position};
}
if(token.Text == "-")
{
expr = new SubtractNode(expr,ParseFactor()){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseFactor()
{
Node expr = ParseExpo();
while(Context.NextEntryIsAnyOf(true,out var token,"*","/","%"))
{
if(token.Text == "*")
{
expr = new MultiplyNode(expr,ParseExpo()){LineInfo=token.Position};
}
if(token.Text == "/")
{
expr = new DivideNode(expr,ParseExpo()){LineInfo=token.Position};
}
if(token.Text == "%")
{
expr = new ModuloNode(expr,ParseExpo()){LineInfo=token.Position};
}
}
return expr;
}
private Node ParseExpo()
{
Node expr = ParseValue();
while(Context.NextEntryIsAnyOf(true,out var token,"^^"))
{
expr = new PowNode(expr,ParseValue());
}
return expr;
}
private Node ParseValue()
{
var doc = Context.PopDocumentation();
var token = Context.NextEntry;
if(token.IsString) return new StringNode(token.Text){LineInfo = token.Position};
if(token.IsChar) return new CharNode(token.Text){LineInfo = token.Position};
if(token.Text == "(")
{
var assignable = ParseAssigable();
assignable.LineInfo = token.Position;
if(!Context.NextEntries(true,")").Success) throw new CompilerError("symbol must be a ')'",assignable);
return assignable;
}
if(token.Text == "class")
{
ClassNode classInstance = new ClassNode();
classInstance.Name = Context.NextEntry.Text;
classInstance.Documentation = doc;
if(Context.NextEntries(true,":").Success)
{
classInstance.InheritsFrom = Context.NextEntry.Text;
}
Context.NextEntries(true,"{");
while(Context.MoreTokens && !Context.NextEntries(true,"}").Success)
{
var doc2 = Context.PopDocumentation();
var token2 = Context.NextEntry;
var modifier = token2.Text;
if(Context.NextEntryIsAnyOf(true,out var token3,"func","abst"))
{
var name = Context.NextEntry.Text;
if(Context.NextEntries(true,"(").Success)
{
var pos = Context.PeekEntry.Position;
List<string> args=new List<string>();
while(!Context.NextEntries(true,")").Success)
{
if(Context.NextEntries(true,",").Success) continue;
if(Context.Offset < Context.Tokens.Count)
{
var n0 = ParseNode();
var n= n0 as GetVariableNode;
if(n != null)
{
args.Add(n.Name);
}
else
{
throw new CompilerError("Argument must be a GetVariableNode",n0);
}
}
}
ClassEntryNode nod=new ClassEntryNode(){LineInfo = token2.Position};
nod.Abstract = token3.Text == "abst";
nod.Documentation = doc2;
nod.Modifier = modifier;
nod.Name = name;
nod.InitialValue = new ClosureNode(args,ParseNode()){LineInfo = pos};
classInstance.Entries.Add(nod);
}
}
else
{
ClassEntryNode nod = new ClassEntryNode(){LineInfo = token2.Position};
nod.Documentation = doc2;
nod.Name = Context.NextEntry.Text;
if(!Context.NextEntries(true,"=").Success) throw new CompilerError("Invalid member",classInstance);
nod.Modifier = modifier;
nod.InitialValue = ParseNode();
classInstance.Entries.Add(nod);
}
}
return classInstance;
}
if(token.Text == "brk" || token.Text == "break")
{
return new BreakNode(){LineInfo = token.Position};
}
if(token.Text == "cont" || token.Text == "continue")
{
return new ContinueNode(){LineInfo = token.Position};
}
if(token.Text == "ret" || token.Text == "return")
{
return new ReturnNode(ParseNode()){LineInfo = token.Position};
}
if(token.Text == "case")
{
var expr = ParseNode();
if(!Context.NextEntries(true,":").Success) throw new CompilerError("Missing :",expr);
return new CaseNode(expr,ParseNode(true,true)){LineInfo = token.Position};
}
if(token.Text == "default")
{
var cur = Context.CurrentLineInfo;
var r=Context.NextEntries(true,":");
if(!r.Success) throw new CompilerError("Missing :",new Node(){LineInfo = cur});
return new DefaultNode(ParseNode(true,true)){LineInfo = token.Position};
}
if(token.Text == "-")
{
return new NegativeNode(ParseNode());
}
if(token.Text == "~")
{
return new BitwiseNotNode(ParseNode());
}
if(token.Text == "!")
{
return new NotNode(ParseNode());
}
if(long.TryParse(token.Text,out var number))
{
if(Context.NextEntries(true,".").Success)
{
var entry=Context.NextEntry;
if(!entry.IsChar && !entry.IsDocumentation && !entry.IsEmpty && !entry.IsString && double.TryParse($"{number}.{entry.Text}",out var number2))
{
return new ConstNumberNode(number2);
}
}
//just a long
return new ConstNumberNode(number);
}
else
{
if(Context.NextEntries(true,"(").Success)
{
if(token.Text == "func")
{
List<string> args=new List<string>();
while(!Context.NextEntries(true,")").Success)
{
if(Context.NextEntries(true,",").Success) continue;
if(Context.Offset < Context.Tokens.Count)
{
var n0 = ParseNode();
var n= n0 as GetVariableNode;
if(n != null)
{
args.Add(n.Name);
}
else
{
throw new CompilerError("Argument must be a GetVariableNode",n0);
}
}
}
return new ClosureNode(args,ParseNode());
}
else if(token.Text == "for")
{
Node init=new Node();
Node condition = new Node();
Node inc = new Node();
Node body = new Node();
if(Context.MoreTokens && (Context.PeekEntry.Text != ")" || Context.PeekEntry.Text != ";"))
{
init = ParseNode();
}
Context.NextEntries(true,";");
if(Context.MoreTokens && (Context.PeekEntry.Text != ")" || Context.PeekEntry.Text != ";"))
{
condition = ParseNode();
}
Context.NextEntries(true,";");
if(Context.MoreTokens && (Context.PeekEntry.Text != ")" || Context.PeekEntry.Text != ";"))
{
inc = ParseNode();
}
Context.NextEntries(true,")");
if(Context.MoreTokens)
body = ParseNode();
return new ForLoopNode(init,condition,inc,body);
}
else if(token.Text == "each")
{
SymbolNode varNode = new GetVariableNode("item"){LineInfo = token.Position};
Node expr = ParseNode();
if(Context.NextEntries(true,":").Success)
{
var vNode = expr as SymbolNode;
if(vNode != null)
{
varNode = vNode;
}
expr = ParseNode();
}
Context.NextEntries(true,")");
if(Context.MoreTokens)
{
return new EachLoopNode(varNode,expr,ParseNode()){LineInfo = token.Position};
}
}
else if(token.Text == "switch")
{
Node cond = ParseNode();
Context.NextEntries(true,")");
Node body = new Node();
if(!Context.NextEntries(true,";").Success)
{
body = ParseNode();
}
var scopeBody = body as ScopeNode;
if(scopeBody != null)
{
scopeBody.IsSwitchScope=true;
scopeBody.SwitchCondition = cond;
}
return body;
}
else if(token.Text == "while" || token.Text == "do")
{
Node cond = ParseNode();
Context.NextEntries(true,")");
Node body = new Node();
if(!Context.NextEntries(true,";").Success)
{
body = ParseNode();
}
return new WhileLoop(cond,body,token.Text == "do");
}
else if(token.Text == "if")
{
Node cond = ParseNode();
Context.NextEntries(true,")");
Node yes = new Node();
Node no = new Node();
if(Context.MoreTokens && !Context.NextEntries(false,"else").Success)
{
yes = ParseNode();
}
if(Context.MoreTokens && Context.NextEntries(true,"else").Success)
{
no = ParseNode();
}
return new IfNode(cond,yes,no);
}
else
{
var fcall = new FunctionCallNode(token.Text){LineInfo = token.Position};
SymbolNode ret = fcall;
while(Context.MoreTokens && !Context.NextEntries(true,")").Success)
{
if(Context.NextEntries(true,",").Success) continue;
fcall.Arguments.Add(ParseNode());
}
while(Context.MoreTokens && Context.NextEntries(true,"[").Success)
{
var p = ParseNode();
ret = new GetArrayNode(ret,p){LineInfo = p.LineInfo};
Context.NextEntries(true,"]");
}
while(Context.MoreTokens && Context.NextEntries(true,".").Success)
{
var token2 = Context.NextEntry;
if(Context.MoreTokens && Context.NextEntries(true,"(").Success)
{
var mCall = new MethodCallNode(ret,token2.Text){LineInfo=token2.Position};
ret = mCall;
while(Context.MoreTokens && !Context.NextEntries(true,")").Success)
{
if(Context.NextEntries(true,",").Success) continue;
mCall.Arguments.Add(ParseNode());
}
}
else if(Context.MoreTokens && Context.NextEntries(false,"[").Success)
{
while(Context.MoreTokens && Context.NextEntries(true,"[").Success)
{
var p = ParseNode();
ret = new GetArrayNode(ret,p){LineInfo = p.LineInfo};
Context.NextEntries(true,"]");
}
}
else
{
ret = new GetMemberNode(ret,token2.Text){LineInfo = token2.Position};
}
}
if(Context.MoreTokens && Context.NextEntries(true,"++").Success)
{
var r = new PostFixIncrementNode(ret){LineInfo = ret.LineInfo};
ret = r;
}
if(Context.MoreTokens && Context.NextEntries(true,"--").Success)
{
var r = new PostFixDecrementNode(ret){LineInfo = ret.LineInfo};
ret = r;
}
return ret;
}
}
else{
Node ret = new Node();
if(token.Text == "func")
{
var res = new GetVariableNode(Context.NextEntry.Text);
}
}
}
throw new CompilerError("End of ParseValue func",new Node(){LineInfo = token.Position});
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class ClassEntryNode : Node
{
public string Name {get;set;}="";
public string Modifier {get;set;}="";
public string Documentation {get;set;}="";
public Node InitialValue {get;set;}=new Node();
public bool Abstract { get; set; }=false;
}
}

13
TLang.Parser/ClassNode.cs Normal file
View File

@ -0,0 +1,13 @@
using System.Collections.Generic;
namespace TLang.Parser
{
public class ClassNode : Node
{
public string Documentation {get;set;}
public string Name {get;set;}
public string InheritsFrom {get;set;}="object";
public List<ClassEntryNode> Entries {get;set;}=new List<ClassEntryNode>();
}
}

View File

@ -0,0 +1,16 @@
using System.Collections.Generic;
namespace TLang.Parser
{
public class ClosureNode : Node
{
public List<string> Arguments {get;set;}
public Node Node {get;set;}
public ClosureNode(List<string> args, Node node)
{
Arguments = args;
Node = node;
}
}
}

View File

@ -0,0 +1,39 @@
using System;
using System.Runtime.Serialization;
namespace TLang.Parser
{
[Serializable]
public class CompilerError : Exception
{
private string ErrorMessage {get;set;}
private Node CurrentNode {get;set;}
public CompilerError()
{
}
public CompilerError(string message) : base(message)
{
}
public CompilerError(string v, Node node) : base(GenerateMessage(v,node))
{
this.ErrorMessage = v;
this.CurrentNode = node;
}
private static string GenerateMessage(string v, Node node)
{
return $"Compiler Error {node.LineInfo}: {v}";
}
public CompilerError(string message, Exception innerException) : base(message, innerException)
{
}
protected CompilerError(SerializationInfo info, StreamingContext context) : base(info, context)
{
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class ConstNumberNode : Node
{
public double Value {get;set;}
public ConstNumberNode(double number2)
{
this.Value = number2;
}
}
}

View File

@ -0,0 +1,6 @@
namespace TLang.Parser
{
public class ContinueNode : Node
{
}
}

View File

@ -0,0 +1,16 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class DefaultNode : Node
{
public Node Node {get;set;}
public DefaultNode(Node node)
{
Node = node;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class DivideNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public DivideNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,16 @@
namespace TLang.Parser
{
public class EachLoopNode : Node
{
public SymbolNode Name {get;set;}
public Node Itterator {get;set;}
public Node Body {get;set;}
public EachLoopNode(SymbolNode varNode, Node expr, Node node)
{
Name = varNode;
Itterator = expr;
Body = node;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class EqualsNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public EqualsNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,18 @@
namespace TLang.Parser
{
public class ForLoopNode : Node
{
public Node Init {get;set;}
public Node Condition {get;set;}
public Node Increment {get;set;}
public Node Body {get;set;}
public ForLoopNode(Node init, Node condition, Node inc, Node body)
{
Init = init;
Condition = condition;
Increment = inc;
Body = body;
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class FunctionCallNode : CallNode
{
public FunctionCallNode(string text)
{
Name = text;
}
}
}

View File

@ -0,0 +1,15 @@
namespace TLang.Parser
{
public class GetArrayNode : SymbolNode
{
public SymbolNode Symbol {get;set;}
public Node Expression {get;set;}
public GetArrayNode(SymbolNode sym, Node expr)
{
Symbol = sym;
this.Name = sym.Name;
Expression = expr;
}
}
}

View File

@ -0,0 +1,17 @@
using TLang.Lexer;
namespace TLang.Parser
{
internal class GetMemberNode : SymbolNode
{
public SymbolNode Symbol {get;set;}
public GetMemberNode(SymbolNode symbol, string name)
{
Symbol = symbol;
Name = name;
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class GetVariableNode : SymbolNode
{
public GetVariableNode(string name)
{
Name = name;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class GreaterThanEqualsNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public GreaterThanEqualsNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class GreaterThanNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public GreaterThanNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

16
TLang.Parser/IfNode.cs Normal file
View File

@ -0,0 +1,16 @@
namespace TLang.Parser
{
internal class IfNode : Node
{
public Node Condition {get;set;}
public Node Yes {get;set;}
public Node No {get;set;}
public IfNode(Node cond, Node yes, Node no)
{
Condition = cond;
Yes = yes;
No = no;
}
}
}

14
TLang.Parser/LAndNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class LAndNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public LAndNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

14
TLang.Parser/LOrNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class LOrNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public LOrNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class LeftShiftNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public LeftShiftNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class LessThanEqualsNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public LessThanEqualsNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class LessThanNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public LessThanNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,19 @@
using System.Collections.Generic;
using TLang.Lexer;
namespace TLang.Parser
{
public class MethodCallNode : SymbolNode
{
public SymbolNode Symbol {get;set;}
public List<Node> Arguments {get;set;}=new List<Node>();
public MethodCallNode(SymbolNode symbol, string name)
{
Symbol = symbol;
Name = name;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class ModuloNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public ModuloNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class MultiplyNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public MultiplyNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,16 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class NegativeNode : Node
{
public Node Node {get;set;}
public NegativeNode(Node node)
{
Node = node;
}
}
}

10
TLang.Parser/Node.cs Normal file
View File

@ -0,0 +1,10 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class Node
{
public LexLineInfo LineInfo {get;set;}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class NotEqualsNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public NotEqualsNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

16
TLang.Parser/NotNode.cs Normal file
View File

@ -0,0 +1,16 @@
using TLang.Lexer;
namespace TLang.Parser
{
public class NotNode : Node
{
public Node Node {get;set;}
public NotNode(Node node)
{
Node = node;
}
}
}

View File

@ -0,0 +1,15 @@
namespace TLang.Parser
{
public class PostFixDecrementNode : SymbolNode
{
public SymbolNode SymbolNode {get;set;}
public PostFixDecrementNode(SymbolNode symbol)
{
SymbolNode = symbol;
Name = symbol.Name;
}
}
}

View File

@ -0,0 +1,15 @@
namespace TLang.Parser
{
public class PostFixIncrementNode : SymbolNode
{
public SymbolNode SymbolNode {get;set;}
public PostFixIncrementNode(SymbolNode symbol)
{
SymbolNode = symbol;
Name = symbol.Name;
}
}
}

14
TLang.Parser/PowNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class PowNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public PowNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class ReturnNode : Node
{
public Node Expression {get;set;}
public ReturnNode(Node node)
{
Expression = node;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class RightShiftNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public RightShiftNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

16
TLang.Parser/ScopeNode.cs Normal file
View File

@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
namespace TLang.Parser
{
public class ScopeNode : Node
{
public bool IsSwitchScope {get;set;}
public Node SwitchCondition {get;set;}=new Node();
public List<Node> Nodes {get;set;}=new List<Node>();
public void Add(Node node)
{
Nodes.Add(node);
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class SetVariableNode : SymbolNode
{
public Node Expression {get;set;}
public SetVariableNode(string name, Node expr)
{
Name = name;
Expression = expr;
}
}
}

View File

@ -0,0 +1,12 @@
namespace TLang.Parser
{
public class StringNode : Node
{
public string Text {get;set;}
public StringNode(string text)
{
Text = text;
}
}
}

View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class SubtractNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public SubtractNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}

View File

@ -0,0 +1,7 @@
namespace TLang.Parser
{
public class SymbolNode : Node
{
public string Name {get;set;}
}
}

View File

@ -0,0 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<ItemGroup>
<ProjectReference Include="..\TLang.Lexer\TLang.Lexer.csproj" />
</ItemGroup>
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
</Project>

17
TLang.Parser/WhileLoop.cs Normal file
View File

@ -0,0 +1,17 @@
namespace TLang.Parser
{
public class WhileLoop : Node
{
public Node Condition {get;set;}
public Node Body {get;set;}
public bool IsDo {get;set;}
public WhileLoop(Node cond, Node body, bool v)
{
Condition = cond;
Body = body;
IsDo = v;
}
}
}

14
TLang.Parser/XOrNode.cs Normal file
View File

@ -0,0 +1,14 @@
namespace TLang.Parser
{
public class XOrNode : Node
{
private Node Left {get;set;}
private Node Right {get;set;}
public XOrNode(Node left, Node right)
{
Left = left;
Right = right;
}
}
}