using System; using System.Collections.Generic; using System.Globalization; using System.Text; namespace NTERA.Interpreter { public class Lexer { private readonly string source; private Marker sourceMarker; private char currentChar; private LexerType _type; public LexerType Type { get => _type; internal set { _type = value; InitTokenDictionaries(); } } public Marker TokenMarker { get; set; } public string Identifer { get; set; } public Value Value { get; set; } public Lexer(string input, LexerType type = LexerType.Both) { Type = type; source = input; sourceMarker = new Marker(-1, 1, 0); } public void GoTo(Marker marker) { sourceMarker = marker; } char GetNextChar(bool peek = false) { if (sourceMarker.Pointer + 1 >= source.Length) { sourceMarker.Pointer = source.Length; return currentChar = (char)0; } if (peek) return currentChar = source[sourceMarker.Pointer + 1]; sourceMarker.Column++; sourceMarker.Pointer++; if ((currentChar = source[sourceMarker.Pointer]) == '\n') { sourceMarker.Column = 1; sourceMarker.Line++; } return currentChar; } private Dictionary TokenDictionary; private Dictionary TokenLineDictionary; private Dictionary TokenCharDictionary; private void InitTokenDictionaries() { TokenDictionary = new Dictionary(StringComparer.InvariantCultureIgnoreCase); TokenLineDictionary = new Dictionary(StringComparer.InvariantCultureIgnoreCase); TokenCharDictionary = new Dictionary(); foreach (Token token in Enum.GetValues(typeof(Token))) { foreach (var attribute in Utility.GetEnumAttributes(token)) { if (attribute.IsLineKeyword) TokenLineDictionary[attribute.Keyword] = token; else TokenDictionary[attribute.Keyword] = token; } foreach (var attribute in Utility.GetEnumAttributes(token)) { if ((attribute.LexerContext & Type) > 0) TokenCharDictionary[attribute.Character] = token; } } } private static bool IsWhitespace(char c) { return char.IsWhiteSpace(c) && c != '\n'; } private static bool IsEndOfLine(char c) { return c == '\n' || c == '\r' || c == '\0'; } private static bool IsEscape(char c) { return c == '%' || c == '{'; } private Token DetermineToken(char c) { if (TokenCharDictionary.TryGetValue(currentChar, out Token charToken)) return charToken; switch (currentChar) { case ';': //semicolon is comment while (currentChar != '\n') GetNextChar(); return Token.NewLine; case '<': if (!Type.HasFlag(LexerType.Real)) break; if (GetNextChar(true) == '>') { GetNextChar(); return Token.NotEqual; } else if (GetNextChar(true) == '=') { GetNextChar(); return Token.LessEqual; } else return Token.Less; case '>': if (!Type.HasFlag(LexerType.Real)) break; if (GetNextChar(true) == '=') { GetNextChar(); return Token.MoreEqual; } else return Token.More; case '+': if (GetNextChar(true) == '=') { GetNextChar(); return Token.Append; } else return Token.Plus; case '%': StringBuilder builder = new StringBuilder(); while (GetNextChar() != '%') builder.Append(currentChar); Value = $"%{builder}%"; return Token.Value; case '"': string str = ""; while (GetNextChar() != '"') { if (currentChar == '\\') { switch (char.ToLower(GetNextChar())) { case 'n': str += '\n'; break; case 't': str += '\t'; break; case '\\': str += '\\'; break; case '"': str += '"'; break; } } else { str += currentChar; } } Value = new Value(str); return Token.Value; case (char)0: return Token.EOF; } return Token.Unknown; } public IEnumerable GetTokens() { while (true) { while (IsWhitespace(GetNextChar()) && Type != LexerType.String || currentChar == '\r') { } TokenMarker = sourceMarker; Token token = DetermineToken(currentChar); if (token == Token.EOF) { yield return Token.EOF; yield break; } if (token != Token.Unknown) { yield return token; continue; } StringBuilder bodyBuilder = new StringBuilder(currentChar.ToString()); while ((!IsEscape(GetNextChar(true)) || Type != LexerType.String) && DetermineToken(GetNextChar(true)) == Token.Unknown && (!IsWhitespace(GetNextChar(true)) || Type == LexerType.String) && GetNextChar(true) != '\r') { bodyBuilder.Append(GetNextChar()); } string result = bodyBuilder.ToString(); if (double.TryParse(result, NumberStyles.Float, CultureInfo.InvariantCulture, out var real)) { Value = real; yield return Token.Value; continue; } if (result.StartsWith("0x") && int.TryParse(result.Replace("0x", ""), NumberStyles.HexNumber, CultureInfo.CurrentCulture, out int hexResult)) { Value = hexResult; yield return Token.Value; continue; } Identifer = bodyBuilder.ToString(); if (TokenDictionary.TryGetValue(Identifer, out token)) { yield return token; continue; } if (Type == LexerType.String && char.IsWhiteSpace(Identifer[0])) Identifer = Identifer.Substring(1); if (TokenLineDictionary.TryGetValue(Identifer, out token)) { bodyBuilder = new StringBuilder(); while (!IsEndOfLine(GetNextChar(true))) bodyBuilder.Append(GetNextChar()); yield return token; string strValue = bodyBuilder.ToString(); if (strValue.Length > 0 && char.IsWhiteSpace(strValue[0])) strValue = strValue.Substring(1); Value = new Value(strValue); yield return Token.Value; yield return currentChar == '\0' ? Token.EOF : Token.NewLine; continue; } yield return Token.Identifer; } } } }