5using System.Collections.Generic;
6using System.Collections.ObjectModel;
7using System.Globalization;
20 #region Private Variables
29 #region Private Methods
38 Dictionary<string, bool> included =
new Dictionary<string, bool>();
45 Lexer lexer =
new Lexer(
new List<string> { line });
47 List<Token> tokenStream =
null;
58 if (tokenStream.Count == 0)
61 if (tokenStream[0].Type !=
TokenType.Include)
64 if (tokenStream.Count < 2)
67 if (tokenStream[1].Type !=
TokenType.String)
68 throw new ParserException(
"Nach einem 'include' Befehl wird ein String (Pfad) erwartet.");
70 if (tokenStream.Count < 3)
71 throw new ParserException(
"Semicolon ';' am Ende eines 'include' Statement erwartet.");
73 if (tokenStream[2].Type !=
TokenType.SemiColon)
74 throw new ParserException(
"Semicolon ';' am Ende eines 'include' Statement erwartet.");
76 if (tokenStream.Count > 3)
77 throw new ParserException(
"Es wird nichts nach dem Semicolon ';' am Ende eines 'include' Statement erwartet.");
79 string include = (string)tokenStream[1].Lexeme;
83 if (included.ContainsKey(include))
90 included[include] =
true;
100 #region Public Methods
112 return int.Parse(st.
Lexeme, CultureInfo.InvariantCulture);
116 return float.Parse(st.
Lexeme, CultureInfo.InvariantCulture);
120 return double.Parse(st.
Lexeme, CultureInfo.InvariantCulture);
124 return bool.Parse(st.
Lexeme);
129 if (st.
Lexeme.Length == 1)
133 if (st.
Lexeme.Length >= 3 &&
140 throw new FormatException($
"Ungültiges Char-Lexeme: '{st.Lexeme}'");
151 if (st.
Lexeme.Length >= 2 &&
171 if (lexemeStr ==
null)
177 return int.Parse(lexemeStr, CultureInfo.InvariantCulture);
180 return float.Parse(lexemeStr, CultureInfo.InvariantCulture);
183 return double.Parse(lexemeStr, CultureInfo.InvariantCulture);
186 return bool.Parse(lexemeStr);
190 if (lexemeStr.Length == 1)
194 if (lexemeStr.Length >= 3 &&
195 lexemeStr[0] ==
'\'' &&
196 lexemeStr[^1] ==
'\'')
201 throw new FormatException($
"Ungültiges Char-Lexeme: '{lexemeStr}'");
205 if (lexemeStr.Length >= 2 &&
206 lexemeStr[0] ==
'"' &&
207 lexemeStr[^1] ==
'"')
209 return lexemeStr.Substring(1, lexemeStr.Length - 2);
234 List<Token> tokenStream = lexer.
GetTokens();
238 parser.DebugMode = this.manager.
Debug;
245 Optimizer optimizer =
new Optimizer(
executable);
247 optimizer.OptimizerInfo =
false;
249 optimizer.Optimize();
255 catch (Exception exception)
272 List<Token> tokenStream =
null;
275 tokenStream =
new List<Token>();
276 using (var fs =
new FileStream(
scriptName, FileMode.Open, FileAccess.Read))
277 using (var br =
new BinaryReader(fs))
279 int count = br.ReadInt32();
281 for (
int i = 0; i < count; i++)
283 var typeInt = br.ReadInt32();
284 var lexemeStr = br.ReadString();
285 var line = br.ReadInt32();
286 var column = br.ReadInt32();
292 tokenStream.Add(
new Token(type, lexeme, line, column, text));
298 string json = File.ReadAllText(
scriptName, Encoding.UTF8);
300 var serializableTokens = JsonSerializer.Deserialize<List<SerializableToken>>(json);
303 tokenStream = serializableTokens
304 .Select(st =>
new Token(
315 parser.DebugMode = this.manager.
Debug;
323 Optimizer optimizer =
new Optimizer(
executable);
325 optimizer.OptimizerInfo =
false;
327 optimizer.Optimize();
333 catch (Exception exception)
350 List<Token> tokenStream = lexer.
GetTokens();
352 using (var fs =
new FileStream(fileName, FileMode.Create, FileAccess.Write))
353 using (var bw =
new BinaryWriter(fs))
356 bw.Write(tokenStream.Count);
358 foreach (var t
in tokenStream)
360 bw.Write((
int)t.Type);
361 bw.Write(t.Lexeme?.ToString() ??
"");
369 catch (Exception exception)
385 List<Token> tokenStream = lexer.
GetTokens();
390 Lexeme = t.Lexeme?.ToString(),
396 var options =
new JsonSerializerOptions
401 string json = JsonSerializer.Serialize(serializableTokens, options);
404 File.WriteAllText(fileName, json, Encoding.UTF8);
407 catch (Exception exception)
420 #region Public Properties
441 StringBuilder sb =
new StringBuilder();
447 return sb.ToString();
The lexical analyzer (Lexer) breaks code (written in sentences) into a series of known Token and pass...
List< Token > GetTokens()
The parser builds an ScriptStack.Runtime.Executable out of the Token stream returned from the ScriptS...
Executable Parse()
Parse the token stream into an executable.
A lexical token or simply token is a string with an assigned and thus identified meaning.
A function, forward declared in a script.
List< String > sourceCode
static object ConvertLexeme(SerializableToken st)
Script(Manager manager, string scriptName, bool binary)
static object ConvertLexeme(TokenType type, string lexemeStr)
void CompileJSON(string fileName)
void Scan(string scriptName)
Script(Manager manager, string scriptName)
void CompileBinary(string fileName)
ScriptStack.Collections.ReadOnlyDictionary< String, Function > Functions
ReadOnlyCollection< String > SourceLines
An interface to modify the default process of reading text files into Script's.
List< String > Scan(String strResourceName)
TokenType
Known types of Token.