From d0a4b28308a4e3228159f5051085e45a61d88e67 Mon Sep 17 00:00:00 2001 From: olivier Date: Thu, 8 Mar 2018 15:25:31 +0100 Subject: [PATCH 01/31] fsm lexer typing : remove unused type parameter --- ParserTests/GenericLexerTests.cs | 2 +- samples/ParserExample/Program.cs | 22 ++--- sly/lexer/GenericLexer.cs | 44 +++++----- sly/lexer/Lexer.cs | 2 +- sly/lexer/LexerException.cs | 2 +- sly/lexer/fsm/FSMLexer.cs | 18 ++--- sly/lexer/fsm/FSMLexerBuilder.cs | 134 +++++++++++++++---------------- sly/lexer/fsm/FSMTransition.cs | 6 +- sly/parser/parser/Parser.cs | 4 +- 9 files changed, 115 insertions(+), 119 deletions(-) diff --git a/ParserTests/GenericLexerTests.cs b/ParserTests/GenericLexerTests.cs index 52eef79b..1702bf54 100644 --- a/ParserTests/GenericLexerTests.cs +++ b/ParserTests/GenericLexerTests.cs @@ -386,7 +386,7 @@ public void TestLexerError() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; string source = "hello world 2 + 2 "; - var errException = Assert.Throws>(() => lexer.Tokenize(source).ToList()); + var errException = Assert.Throws(() => lexer.Tokenize(source).ToList()); var error = errException.Error; Assert.Equal(0, error.Line); Assert.Equal(13, error.Column); diff --git a/samples/ParserExample/Program.cs b/samples/ParserExample/Program.cs index 861f7003..2d311e3c 100644 --- a/samples/ParserExample/Program.cs +++ b/samples/ParserExample/Program.cs @@ -120,7 +120,7 @@ static void TestFactorial() static void testLexerBuilder() { - var builder = new FSMLexerBuilder(); + var builder = new FSMLexerBuilder(); // conf @@ -135,13 +135,13 @@ static void testLexerBuilder() // string literal - builder.Transition('\"', JsonToken.STRING) + builder.Transition('\"') .Mark("in_string") - .ExceptTransitionTo(new char[] { '\"', '\\' }, "in_string", JsonToken.STRING) - .Transition('\\',JsonToken.STRING) + .ExceptTransitionTo(new char[] { '\"', '\\' }, "in_string") + .Transition('\\') .Mark("escape") - .AnyTransitionTo(' ',"in_string",JsonToken.STRING) - .Transition('\"', JsonToken.STRING) + .AnyTransitionTo(' ',"in_string") + .Transition('\"') .End(JsonToken.STRING) .Mark("string_end") .CallBack((FSMMatch match) => { @@ -179,15 +179,15 @@ static void testLexerBuilder() //numeric builder.GoTo("start") - .RangeTransition('0', '9', JsonToken.INT, JsonToken.DOUBLE) + .RangeTransition('0', '9') .Mark("in_int") - .RangeTransitionTo('0', '9', "in_int", JsonToken.INT, JsonToken.DOUBLE) + .RangeTransitionTo('0', '9', "in_int") .End(JsonToken.INT) - .Transition('.', JsonToken.DOUBLE) + .Transition('.') .Mark("start_double") - .RangeTransition('0', '9', JsonToken.INT, JsonToken.INT, JsonToken.DOUBLE) + .RangeTransition('0', '9') .Mark("in_double") - .RangeTransitionTo('0', '9', "in_double", JsonToken.INT, JsonToken.DOUBLE) + .RangeTransitionTo('0', '9', "in_double") .End(JsonToken.DOUBLE); diff --git a/sly/lexer/GenericLexer.cs b/sly/lexer/GenericLexer.cs index 26a23b1c..edfd4286 100644 --- a/sly/lexer/GenericLexer.cs +++ b/sly/lexer/GenericLexer.cs @@ -61,7 +61,7 @@ public class GenericLexer : ILexer where IN : struct public static string multi_line_comment_start = "multi_line_comment_start"; - protected FSMLexer LexerFsm; + protected FSMLexer LexerFsm; protected BuildExtension ExtensionBuilder; @@ -69,7 +69,7 @@ public class GenericLexer : ILexer where IN : struct protected IN identifierDerivedToken; protected IN intDerivedToken; protected IN doubleDerivedToken; - public FSMLexerBuilder FSMBuilder; + public FSMLexerBuilder FSMBuilder; protected char StringDelimiterChar; @@ -91,7 +91,7 @@ public GenericLexer(IdentifierType idType = IdentifierType.Alpha, BuildExtension private void InitializeStaticLexer(IdentifierType idType = IdentifierType.Alpha, params GenericToken[] staticTokens) { - FSMBuilder = new FSMLexerBuilder(); + FSMBuilder = new FSMLexerBuilder(); StringCounter = 0; // conf @@ -113,17 +113,17 @@ private void InitializeStaticLexer(IdentifierType idType = IdentifierType.Alpha, if (staticTokens.ToList().Contains(GenericToken.Int) || staticTokens.ToList().Contains(GenericToken.Double)) { FSMBuilder = FSMBuilder.GoTo(start) - .RangeTransition('0', '9', GenericToken.Int, GenericToken.Double) + .RangeTransition('0', '9') .Mark(in_int) - .RangeTransitionTo('0', '9', in_int, GenericToken.Int, GenericToken.Double) + .RangeTransitionTo('0', '9', in_int) .End(GenericToken.Int); if (staticTokens.ToList().Contains(GenericToken.Double)) { - FSMBuilder.Transition('.', GenericToken.Double) + FSMBuilder.Transition('.') .Mark(start_double) - .RangeTransition('0', '9', GenericToken.Int, GenericToken.Int, GenericToken.Double) + .RangeTransition('0', '9') .Mark(in_double) - .RangeTransitionTo('0', '9', in_double, GenericToken.Int, GenericToken.Double) + .RangeTransitionTo('0', '9', in_double) .End(GenericToken.Double); } } @@ -137,28 +137,28 @@ private void InitializeIdentifier(IdentifierType idType = IdentifierType.Alpha) // identifier FSMBuilder.GoTo(start). - RangeTransition('a', 'z', GenericToken.Identifier). + RangeTransition('a', 'z'). Mark(in_identifier) .End(GenericToken.Identifier); FSMBuilder.GoTo(start). - RangeTransitionTo('A', 'Z', in_identifier, GenericToken.Identifier, GenericToken.Identifier). - RangeTransitionTo('a', 'z', in_identifier, GenericToken.Identifier). - RangeTransitionTo('A', 'Z', in_identifier, GenericToken.Identifier). + RangeTransitionTo('A', 'Z', in_identifier). + RangeTransitionTo('a', 'z', in_identifier). + RangeTransitionTo('A', 'Z', in_identifier). End(GenericToken.Identifier); if (idType == IdentifierType.AlphaNumeric || idType == IdentifierType.AlphaNumericDash) { FSMBuilder.GoTo(in_identifier). - RangeTransitionTo('0', '9', in_identifier, GenericToken.Identifier, GenericToken.Identifier); + RangeTransitionTo('0', '9', in_identifier); } if (idType == IdentifierType.AlphaNumericDash) { FSMBuilder.GoTo(in_identifier). - TransitionTo('-', in_identifier, GenericToken.Identifier, GenericToken.Identifier). - TransitionTo('_', in_identifier, GenericToken.Identifier, GenericToken.Identifier); + TransitionTo('-', in_identifier). + TransitionTo('_', in_identifier); FSMBuilder.GoTo(start). - TransitionTo('_', in_identifier, GenericToken.Identifier, GenericToken.Identifier); + TransitionTo('_', in_identifier); } } @@ -332,13 +332,13 @@ public void AddStringLexem(IN token, string stringDelimiter, string escapeDelimi { FSMBuilder.GoTo(start); - FSMBuilder.Transition(StringDelimiterChar, GenericToken.String) + FSMBuilder.Transition(StringDelimiterChar) .Mark(in_string+StringCounter) - .ExceptTransitionTo(new char[] { StringDelimiterChar, EscapeStringDelimiterChar }, in_string+StringCounter, GenericToken.String) - .Transition(EscapeStringDelimiterChar, GenericToken.String) + .ExceptTransitionTo(new char[] { StringDelimiterChar, EscapeStringDelimiterChar }, in_string+StringCounter) + .Transition(EscapeStringDelimiterChar) .Mark(escape_string+StringCounter) - .AnyTransitionTo(' ', in_string+StringCounter, GenericToken.String) - .Transition(StringDelimiterChar, GenericToken.String) + .AnyTransitionTo(' ', in_string+StringCounter) + .Transition(StringDelimiterChar) .End(GenericToken.String) .Mark(string_end+StringCounter) .CallBack(callback); @@ -394,7 +394,7 @@ public void AddSugarLexem(IN token, string specialValue) FSMBuilder.GoTo(start); for (int i = 0; i < specialValue.Length; i++) { - FSMBuilder.SafeTransition(specialValue[i], GenericToken.SugarToken); + FSMBuilder.SafeTransition(specialValue[i]); } FSMBuilder.End(GenericToken.SugarToken) .CallBack(callback); diff --git a/sly/lexer/Lexer.cs b/sly/lexer/Lexer.cs index 5ef26789..1d394d13 100644 --- a/sly/lexer/Lexer.cs +++ b/sly/lexer/Lexer.cs @@ -53,7 +53,7 @@ public IEnumerable> Tokenize(string source) if (matchedDefinition == null) { - throw new LexerException(new LexicalError(currentLine,currentColumn, source[currentIndex])); + throw new LexerException(new LexicalError(currentLine,currentColumn, source[currentIndex])); } else { diff --git a/sly/lexer/LexerException.cs b/sly/lexer/LexerException.cs index e68b6440..f1447597 100644 --- a/sly/lexer/LexerException.cs +++ b/sly/lexer/LexerException.cs @@ -7,7 +7,7 @@ namespace sly.lexer { - public class LexerException : Exception + public class LexerException : Exception { public LexicalError Error { get; set; } diff --git a/sly/lexer/fsm/FSMLexer.cs b/sly/lexer/fsm/FSMLexer.cs index 6d7721d9..cd24e5a2 100644 --- a/sly/lexer/fsm/FSMLexer.cs +++ b/sly/lexer/fsm/FSMLexer.cs @@ -27,10 +27,10 @@ public class FSMMatch } } - public class FSMLexer + public class FSMLexer { - private Dictionary>> Transitions; + private Dictionary> Transitions; private Dictionary> Nodes; @@ -52,7 +52,7 @@ public class FSMLexer public FSMLexer() { Nodes = new Dictionary>(); - Transitions = new Dictionary>>(); + Transitions = new Dictionary>(); Callbacks = new Dictionary>(); Actions = new Dictionary(); IgnoreWhiteSpace = false; @@ -111,24 +111,24 @@ internal void SetAction(int nodeId, NodeAction action) #region build - public FSMTransition GetTransition(int nodeId, char token) + public FSMTransition GetTransition(int nodeId, char token) { - FSMTransition transition = null; + FSMTransition transition = null; if (HasState(nodeId)) { if (Transitions.ContainsKey(nodeId)) { var leavingTransitions = Transitions[nodeId]; - transition = leavingTransitions.FirstOrDefault((FSMTransition t) => t.Match(token)); + transition = leavingTransitions.FirstOrDefault((FSMTransition t) => t.Match(token)); } } return transition; } - public void AddTransition(FSMTransition transition) + public void AddTransition(FSMTransition transition) { - var transitions = new List>(); + var transitions = new List(); if (Transitions.ContainsKey(transition.FromNode)) { transitions = Transitions[transition.FromNode]; @@ -273,7 +273,7 @@ public FSMMatch Run(string source, int start) if (lastNode == 0 && !tokenStarted && !successes.Any() && CurrentPosition < source.Length) { - throw new LexerException(new LexicalError(CurrentLine, CurrentColumn, source[CurrentPosition])); + throw new LexerException(new LexicalError(CurrentLine, CurrentColumn, source[CurrentPosition])); } ; } diff --git a/sly/lexer/fsm/FSMLexerBuilder.cs b/sly/lexer/fsm/FSMLexerBuilder.cs index b464b3f9..054ccef9 100644 --- a/sly/lexer/fsm/FSMLexerBuilder.cs +++ b/sly/lexer/fsm/FSMLexerBuilder.cs @@ -12,10 +12,10 @@ namespace sly.lexer.fsm public delegate string NodeAction(string value); public delegate bool TransitionPrecondition(string value); - public class FSMLexerBuilder + public class FSMLexerBuilder { - public FSMLexer Fsm { get; private set; } + public FSMLexer Fsm { get; private set; } private int CurrentState; @@ -27,7 +27,7 @@ public class FSMLexerBuilder public FSMLexerBuilder() { - Fsm = new FSMLexer(); + Fsm = new FSMLexer(); CurrentState = 0; Marks = new Dictionary(); Fsm.AddNode(default(N)); @@ -36,7 +36,7 @@ public FSMLexerBuilder() #region MARKS - public FSMLexerBuilder GoTo(int state) + public FSMLexerBuilder GoTo(int state) { if (Fsm.HasState(state)) { @@ -48,7 +48,7 @@ public FSMLexerBuilder GoTo(int state) } return this; } - public FSMLexerBuilder GoTo(string mark) + public FSMLexerBuilder GoTo(string mark) { if (Marks.ContainsKey(mark)) { @@ -62,7 +62,7 @@ public FSMLexerBuilder GoTo(string mark) } - public FSMLexerBuilder Mark(string mark) + public FSMLexerBuilder Mark(string mark) { Marks[mark] = CurrentState; Fsm.GetNode(CurrentState).Mark = mark; @@ -87,13 +87,13 @@ public FSMNode GetNode(string mark) #region special chars - public FSMLexerBuilder IgnoreWS() + public FSMLexerBuilder IgnoreWS() { Fsm.IgnoreWhiteSpace = true; return this; } - public FSMLexerBuilder IgnoreEOL() + public FSMLexerBuilder IgnoreEOL() { Fsm.IgnoreEOL = true; return this; @@ -101,7 +101,7 @@ public FSMLexerBuilder IgnoreEOL() - public FSMLexerBuilder WhiteSpace(char spacechar) + public FSMLexerBuilder WhiteSpace(char spacechar) { Fsm.WhiteSpaces.Add(spacechar); return this; @@ -116,7 +116,7 @@ public FSMLexerBuilder WhiteSpace(char spacechar) #region NODES - public FSMLexerBuilder End(N nodeValue) + public FSMLexerBuilder End(N nodeValue) { if (Fsm.HasState(CurrentState)) { @@ -129,7 +129,7 @@ public FSMLexerBuilder End(N nodeValue) return this; } - public FSMLexerBuilder CallBack(NodeCallback callback) + public FSMLexerBuilder CallBack(NodeCallback callback) { if (Fsm.HasState(CurrentState)) { @@ -139,7 +139,7 @@ public FSMLexerBuilder CallBack(NodeCallback callback) return this; } - public FSMLexerBuilder Action(NodeAction action) { + public FSMLexerBuilder Action(NodeAction action) { if (Fsm.HasState(CurrentState)) { Fsm.SetAction(CurrentState, action); @@ -155,49 +155,47 @@ public FSMLexerBuilder Action(NodeAction action) { - public FSMLexerBuilder SafeTransition(char input, params T[] transitionData) + public FSMLexerBuilder SafeTransition(char input) { var transition = Fsm.GetTransition(CurrentState, input); if (transition != null) { - transition.TransitionValues.AddRange(transitionData); CurrentState = transition.ToNode; } else { - return TransitionTo(input, Fsm.NewNodeId, transitionData); + return TransitionTo(input, Fsm.NewNodeId); } return this; } - public FSMLexerBuilder SafeTransition(char input, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder SafeTransition(char input, TransitionPrecondition precondition) { var transition = Fsm.GetTransition(CurrentState, input); if (transition != null) { - transition.TransitionValues.AddRange(transitionData); CurrentState = transition.ToNode; } else { - return TransitionTo(input, Fsm.NewNodeId, precondition, transitionData); + return TransitionTo(input, Fsm.NewNodeId, precondition); } return this; } - public FSMLexerBuilder Transition(char input, params T[] transitionData) + public FSMLexerBuilder Transition(char input) { - return TransitionTo(input, Fsm.NewNodeId, transitionData); + return TransitionTo(input, Fsm.NewNodeId); } - public FSMLexerBuilder Transition(char input, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder Transition(char input, TransitionPrecondition precondition) { - return TransitionTo(input, Fsm.NewNodeId, precondition, transitionData); + return TransitionTo(input, Fsm.NewNodeId, precondition); } - public FSMLexerBuilder ConstantTransition(string constant, TransitionPrecondition precondition = null) + public FSMLexerBuilder ConstantTransition(string constant, TransitionPrecondition precondition = null) { char c = constant[0]; this.SafeTransition(c, precondition); @@ -209,7 +207,7 @@ public FSMLexerBuilder ConstantTransition(string constant, TransitionPreco return this; } - public FSMLexerBuilder RepetitionTransition(int count, string pattern, TransitionPrecondition precondition = null) + public FSMLexerBuilder RepetitionTransition(int count, string pattern, TransitionPrecondition precondition = null) { if (count > 0 && !string.IsNullOrEmpty(pattern)) { @@ -237,62 +235,62 @@ public FSMLexerBuilder RepetitionTransition(int count, string pattern, Tra } - public FSMLexerBuilder RangeTransition(char start, char end, params T[] transitionData) + public FSMLexerBuilder RangeTransition(char start, char end) { - return RangeTransitionTo(start, end, Fsm.NewNodeId, transitionData); + return RangeTransitionTo(start, end, Fsm.NewNodeId); } - public FSMLexerBuilder RangeTransition(char start, char end, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder RangeTransition(char start, char end, TransitionPrecondition precondition) { - return RangeTransitionTo(start, end, Fsm.NewNodeId, precondition, transitionData); + return RangeTransitionTo(start, end, Fsm.NewNodeId, precondition); } - public FSMLexerBuilder ExceptTransition(char[] exceptions, params T[] transitionData) + public FSMLexerBuilder ExceptTransition(char[] exceptions) { - return ExceptTransitionTo(exceptions, Fsm.NewNodeId, transitionData); + return ExceptTransitionTo(exceptions, Fsm.NewNodeId); } - public FSMLexerBuilder ExceptTransition(char[] exceptions, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder ExceptTransition(char[] exceptions, TransitionPrecondition precondition) { - return ExceptTransitionTo(exceptions, Fsm.NewNodeId, precondition, transitionData); + return ExceptTransitionTo(exceptions, Fsm.NewNodeId, precondition); } - public FSMLexerBuilder AnyTransition(char input, params T[] transitionData) + public FSMLexerBuilder AnyTransition(char input) { - return AnyTransitionTo(input, Fsm.NewNodeId, transitionData); + return AnyTransitionTo(input, Fsm.NewNodeId); } - public FSMLexerBuilder AnyTransition(char input, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder AnyTransition(char input, TransitionPrecondition precondition) { - return AnyTransitionTo(input, Fsm.NewNodeId, precondition, transitionData); + return AnyTransitionTo(input, Fsm.NewNodeId, precondition); } - public FSMLexerBuilder TransitionTo(char input, int toNode, params T[] transitionData) + public FSMLexerBuilder TransitionTo(char input, int toNode) { AbstractTransitionCheck checker = new TransitionSingle(input); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder TransitionTo(char input, int toNode, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder TransitionTo(char input, int toNode, TransitionPrecondition precondition) { AbstractTransitionCheck checker = new TransitionSingle(input, precondition); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; @@ -300,132 +298,132 @@ public FSMLexerBuilder TransitionTo(char input, int toNode, TransitionPrec - public FSMLexerBuilder RangeTransitionTo(char start, char end, int toNode, params T[] transitionData) + public FSMLexerBuilder RangeTransitionTo(char start, char end, int toNode) { AbstractTransitionCheck checker = new TransitionRange(start, end); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder RangeTransitionTo(char start, char end, int toNode, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder RangeTransitionTo(char start, char end, int toNode, TransitionPrecondition precondition) { AbstractTransitionCheck checker = new TransitionRange(start, end, precondition); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, int toNode, params T[] transitionData) + public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, int toNode) { AbstractTransitionCheck checker = new TransitionAnyExcept(exceptions); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, int toNode, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, int toNode, TransitionPrecondition precondition) { AbstractTransitionCheck checker = new TransitionAnyExcept(precondition, exceptions); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder AnyTransitionTo(char input, int toNode, params T[] transitionData) + public FSMLexerBuilder AnyTransitionTo(char input, int toNode) { AbstractTransitionCheck checker = new TransitionAny(input); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder AnyTransitionTo(char input, int toNode, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder AnyTransitionTo(char input, int toNode, TransitionPrecondition precondition) { AbstractTransitionCheck checker = new TransitionAny(input, precondition); if (!Fsm.HasState(toNode)) { Fsm.AddNode(); } - var transition = new FSMTransition(checker, CurrentState, toNode, transitionData.ToList()); + var transition = new FSMTransition(checker, CurrentState, toNode); Fsm.AddTransition(transition); CurrentState = toNode; return this; } - public FSMLexerBuilder TransitionTo(char input, string toNodeMark, params T[] transitionData) + public FSMLexerBuilder TransitionTo(char input, string toNodeMark) { int toNode = Marks[toNodeMark]; - return TransitionTo(input, toNode, transitionData); + return TransitionTo(input, toNode); } - public FSMLexerBuilder TransitionTo(char input, string toNodeMark, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder TransitionTo(char input, string toNodeMark, TransitionPrecondition precondition) { int toNode = Marks[toNodeMark]; - return TransitionTo(input, toNode, precondition, transitionData); + return TransitionTo(input, toNode, precondition); } - public FSMLexerBuilder RangeTransitionTo(char start, char end, string toNodeMark, params T[] transitionData) + public FSMLexerBuilder RangeTransitionTo(char start, char end, string toNodeMark) { int toNode = Marks[toNodeMark]; - return RangeTransitionTo(start, end, toNode, transitionData); + return RangeTransitionTo(start, end, toNode); } - public FSMLexerBuilder RangeTransitionTo(char start, char end, string toNodeMark, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder RangeTransitionTo(char start, char end, string toNodeMark, TransitionPrecondition precondition) { int toNode = Marks[toNodeMark]; - return RangeTransitionTo(start, end, toNode, precondition, transitionData); + return RangeTransitionTo(start, end, toNode, precondition); } - public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, string toNodeMark, params T[] transitionData) + public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, string toNodeMark) { int toNode = Marks[toNodeMark]; - return ExceptTransitionTo(exceptions, toNode, transitionData); + return ExceptTransitionTo(exceptions, toNode); } - public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, string toNodeMark, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder ExceptTransitionTo(char[] exceptions, string toNodeMark, TransitionPrecondition precondition) { int toNode = Marks[toNodeMark]; - return ExceptTransitionTo(exceptions, toNode, precondition, transitionData); + return ExceptTransitionTo(exceptions, toNode, precondition); } - public FSMLexerBuilder AnyTransitionTo(char input, string toNodeMark, params T[] transitionData) + public FSMLexerBuilder AnyTransitionTo(char input, string toNodeMark) { int toNode = Marks[toNodeMark]; - return AnyTransitionTo(input, toNode, transitionData); + return AnyTransitionTo(input, toNode); } - public FSMLexerBuilder AnyTransitionTo(char input, string toNodeMark, TransitionPrecondition precondition, params T[] transitionData) + public FSMLexerBuilder AnyTransitionTo(char input, string toNodeMark, TransitionPrecondition precondition) { int toNode = Marks[toNodeMark]; - return AnyTransitionTo(input, toNode, precondition, transitionData); + return AnyTransitionTo(input, toNode, precondition); } diff --git a/sly/lexer/fsm/FSMTransition.cs b/sly/lexer/fsm/FSMTransition.cs index 2d36e1ca..486e388d 100644 --- a/sly/lexer/fsm/FSMTransition.cs +++ b/sly/lexer/fsm/FSMTransition.cs @@ -5,20 +5,18 @@ namespace sly.lexer.fsm { - public class FSMTransition + public class FSMTransition { public AbstractTransitionCheck Check { get; set; } - public List TransitionValues { get; set; } public int FromNode; public int ToNode; - internal FSMTransition(AbstractTransitionCheck check, int from ,int to, List values ) + internal FSMTransition(AbstractTransitionCheck check, int from ,int to) { Check = check; - TransitionValues = values; FromNode = from; ToNode = to; } diff --git a/sly/parser/parser/Parser.cs b/sly/parser/parser/Parser.cs index 653230d4..4c4c092a 100644 --- a/sly/parser/parser/Parser.cs +++ b/sly/parser/parser/Parser.cs @@ -60,12 +60,12 @@ public ParseResult Parse(string source, string startingNonTerminal = nul IList> tokens = Lexer.Tokenize(source).ToList>(); result = Parse(tokens, startingNonTerminal); } - catch(LexerException e) + catch(LexerException e) { result = new ParseResult(); result.IsError = true; result.Errors = new List(); - result.Errors.Add((e as LexerException).Error); + result.Errors.Add((e as LexerException).Error); } return result; } From 1674e6e34ddd34d58776ea2bf7546724a860551e Mon Sep 17 00:00:00 2001 From: olivier Date: Thu, 8 Mar 2018 15:51:30 +0100 Subject: [PATCH 02/31] remove unused code --- ParserTests/GenericLexerTests.cs | 1 + sly/utils/EnumUtil.cs | 39 -------------------------------- 2 files changed, 1 insertion(+), 39 deletions(-) delete mode 100644 sly/utils/EnumUtil.cs diff --git a/ParserTests/GenericLexerTests.cs b/ParserTests/GenericLexerTests.cs index 52eef79b..33756dda 100644 --- a/ParserTests/GenericLexerTests.cs +++ b/ParserTests/GenericLexerTests.cs @@ -391,6 +391,7 @@ public void TestLexerError() Assert.Equal(0, error.Line); Assert.Equal(13, error.Column); Assert.Equal('2', error.UnexpectedChar); + Assert.Contains("Unrecognized symbol",error.ToString()); diff --git a/sly/utils/EnumUtil.cs b/sly/utils/EnumUtil.cs deleted file mode 100644 index 322ce689..00000000 --- a/sly/utils/EnumUtil.cs +++ /dev/null @@ -1,39 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Text; - -namespace sly.utils -{ - public class EnumUtil - { - - public static IEnumerable GetValues() where IN : struct - { - return (IN[])(Enum.GetValues(typeof(IN))); - } - - private static Dictionary GetValuesByLabel() where IN : struct - { - var dic = new Dictionary(); - var values = GetValues(); - foreach(IN v in values) - { - dic[v.ToString()] = v; - } - return dic; - } - - public static bool TryGetValue(string label, out IN v ) where IN: struct { - var found = false; - v = default(IN); - var dict = GetValuesByLabel(); - if (found = dict.ContainsKey(label)) - { - found = true; - v = dict[label]; - } - return found; - } - - } -} From 948a1967daa5c33d86fc58f5c29f0e02a663312c Mon Sep 17 00:00:00 2001 From: olivier Date: Thu, 8 Mar 2018 16:03:26 +0100 Subject: [PATCH 03/31] mixed end of lines test in comments --- ParserTests/CommentsTests.cs | 202 +++++++++++++++++++++-------------- 1 file changed, 122 insertions(+), 80 deletions(-) diff --git a/ParserTests/CommentsTests.cs b/ParserTests/CommentsTests.cs index a6c49aac..67c9413f 100644 --- a/ParserTests/CommentsTests.cs +++ b/ParserTests/CommentsTests.cs @@ -53,84 +53,125 @@ public void TestGenericSingleLineComment() Assert.Equal(4, tokens.Count); - var token1 = tokens[0]; + var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; - - Assert.Equal(CommentsToken.INT,token1.TokenID); - Assert.Equal("1",token1.Value); - Assert.Equal(0,token1.Position.Line); - Assert.Equal(0,token1.Position.Column); - Assert.Equal(CommentsToken.INT,token2.TokenID); - Assert.Equal("2",token2.Value); - Assert.Equal(1,token2.Position.Line); - Assert.Equal(0,token2.Position.Column); - Assert.Equal(CommentsToken.COMMENT,token3.TokenID); - Assert.Equal(" single line comment",token3.Value); - Assert.Equal(1,token3.Position.Line); - Assert.Equal(2,token3.Position.Column); - Assert.Equal(CommentsToken.DOUBLE,token4.TokenID); - Assert.Equal("3.0",token4.Value); - Assert.Equal(2,token4.Position.Line); - Assert.Equal(0,token4.Position.Column); + + Assert.Equal(CommentsToken.INT, token1.TokenID); + Assert.Equal("1", token1.Value); + Assert.Equal(0, token1.Position.Line); + Assert.Equal(0, token1.Position.Column); + Assert.Equal(CommentsToken.INT, token2.TokenID); + Assert.Equal("2", token2.Value); + Assert.Equal(1, token2.Position.Line); + Assert.Equal(0, token2.Position.Column); + Assert.Equal(CommentsToken.COMMENT, token3.TokenID); + Assert.Equal(" single line comment", token3.Value); + Assert.Equal(1, token3.Position.Line); + Assert.Equal(2, token3.Position.Column); + Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); + Assert.Equal("3.0", token4.Value); + Assert.Equal(2, token4.Position.Line); + Assert.Equal(0, token4.Position.Column); } -[Fact] + [Fact] public void TestGenericMultiLineComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer; - + string dump = lexer.ToString(); string code = @"1 2 /* multi line comment on 2 lines */ 3.0"; - + var tokens = lexer.Tokenize(code).ToList(); Assert.Equal(4, tokens.Count); - var token1 = tokens[0]; + var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; - Assert.Equal(CommentsToken.INT,token1.TokenID); - Assert.Equal("1",token1.Value); - Assert.Equal(0,token1.Position.Line); - Assert.Equal(0,token1.Position.Column); + Assert.Equal(CommentsToken.INT, token1.TokenID); + Assert.Equal("1", token1.Value); + Assert.Equal(0, token1.Position.Line); + Assert.Equal(0, token1.Position.Column); - Assert.Equal(CommentsToken.INT,token2.TokenID); - Assert.Equal("2",token2.Value); - Assert.Equal(1,token2.Position.Line); - Assert.Equal(0,token2.Position.Column); - Assert.Equal(CommentsToken.COMMENT,token3.TokenID); + Assert.Equal(CommentsToken.INT, token2.TokenID); + Assert.Equal("2", token2.Value); + Assert.Equal(1, token2.Position.Line); + Assert.Equal(0, token2.Position.Column); + Assert.Equal(CommentsToken.COMMENT, token3.TokenID); Assert.Equal(@" multi line -comment on 2 lines ",token3.Value); - Assert.Equal(1,token3.Position.Line); - Assert.Equal(2,token3.Position.Column); - Assert.Equal(CommentsToken.DOUBLE,token4.TokenID); - Assert.Equal("3.0",token4.Value); - Assert.Equal(2,token4.Position.Line); - Assert.Equal(22,token4.Position.Column); +comment on 2 lines ", token3.Value); + Assert.Equal(1, token3.Position.Line); + Assert.Equal(2, token3.Position.Column); + Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); + Assert.Equal("3.0", token4.Value); + Assert.Equal(2, token4.Position.Line); + Assert.Equal(22, token4.Position.Column); + + } + + [Fact] + public void TestMixedEOLComment() + { + var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); + Assert.False(lexerRes.IsError); + var lexer = lexerRes.Result as GenericLexer; + + + string dump = lexer.ToString(); + string code = "1\n2\r\n/* multi line \rcomment on 2 lines */ 3.0"; + List> tokens = null; + tokens = lexer.Tokenize(code).ToList(); + + Assert.Equal(4, tokens.Count); + + var token1 = tokens[0]; + var token2 = tokens[1]; + var token3 = tokens[2]; + var token4 = tokens[3]; + + Assert.Equal(CommentsToken.INT, token1.TokenID); + Assert.Equal("1", token1.Value); + Assert.Equal(0, token1.Position.Line); + Assert.Equal(0, token1.Position.Column); + + Assert.Equal(CommentsToken.INT, token2.TokenID); + Assert.Equal("2", token2.Value); + Assert.Equal(1, token2.Position.Line); + Assert.Equal(0, token2.Position.Column); + Assert.Equal(CommentsToken.COMMENT, token3.TokenID); + Assert.Equal(" multi line \rcomment on 2 lines ", token3.Value); + Assert.Equal(2, token3.Position.Line); + Assert.Equal(0, token3.Position.Column); + Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); + Assert.Equal("3.0", token4.Value); + Assert.Equal(3, token4.Position.Line); + Assert.Equal(22, token4.Position.Column); } [Fact] - public void TestInnerMultiComment() { + public void TestInnerMultiComment() + { var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer; - + string dump = lexer.ToString(); string code = @"1 @@ -138,82 +179,83 @@ public void TestInnerMultiComment() { 4 "; - var tokens = lexer.Tokenize(code).ToList(); + var tokens = lexer.Tokenize(code).ToList(); Assert.Equal(5, tokens.Count); - var token1 = tokens[0]; + var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; var token5 = tokens[4]; - Assert.Equal(CommentsToken.INT,token1.TokenID); - Assert.Equal("1",token1.Value); - Assert.Equal(0,token1.Position.Line); - Assert.Equal(0,token1.Position.Column); + Assert.Equal(CommentsToken.INT, token1.TokenID); + Assert.Equal("1", token1.Value); + Assert.Equal(0, token1.Position.Line); + Assert.Equal(0, token1.Position.Column); - Assert.Equal(CommentsToken.INT,token2.TokenID); - Assert.Equal("2",token2.Value); - Assert.Equal(1,token2.Position.Line); - Assert.Equal(0,token2.Position.Column); + Assert.Equal(CommentsToken.INT, token2.TokenID); + Assert.Equal("2", token2.Value); + Assert.Equal(1, token2.Position.Line); + Assert.Equal(0, token2.Position.Column); - Assert.Equal(CommentsToken.COMMENT,token3.TokenID); - Assert.Equal(@" inner ",token3.Value); - Assert.Equal(1,token3.Position.Line); - Assert.Equal(2,token3.Position.Column); + Assert.Equal(CommentsToken.COMMENT, token3.TokenID); + Assert.Equal(@" inner ", token3.Value); + Assert.Equal(1, token3.Position.Line); + Assert.Equal(2, token3.Position.Column); - Assert.Equal(CommentsToken.INT,token4.TokenID); - Assert.Equal("3",token4.Value); - Assert.Equal(1,token4.Position.Line); - Assert.Equal(14,token4.Position.Column); + Assert.Equal(CommentsToken.INT, token4.TokenID); + Assert.Equal("3", token4.Value); + Assert.Equal(1, token4.Position.Line); + Assert.Equal(14, token4.Position.Column); - Assert.Equal(CommentsToken.INT,token5.TokenID); - Assert.Equal("4",token5.Value); - Assert.Equal(2,token5.Position.Line); - Assert.Equal(0,token5.Position.Column); + Assert.Equal(CommentsToken.INT, token5.TokenID); + Assert.Equal("4", token5.Value); + Assert.Equal(2, token5.Position.Line); + Assert.Equal(0, token5.Position.Column); } [Fact] - public void NotEndingMultiComment() { + public void NotEndingMultiComment() + { var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer; - + string dump = lexer.ToString(); string code = @"1 2 /* not ending comment"; - var tokens = lexer.Tokenize(code).ToList(); + var tokens = lexer.Tokenize(code).ToList(); Assert.Equal(3, tokens.Count); - var token1 = tokens[0]; + var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; - - Assert.Equal(CommentsToken.INT,token1.TokenID); - Assert.Equal("1",token1.Value); - Assert.Equal(0,token1.Position.Line); - Assert.Equal(0,token1.Position.Column); - Assert.Equal(CommentsToken.INT,token2.TokenID); - Assert.Equal("2",token2.Value); - Assert.Equal(1,token2.Position.Line); - Assert.Equal(0,token2.Position.Column); + Assert.Equal(CommentsToken.INT, token1.TokenID); + Assert.Equal("1", token1.Value); + Assert.Equal(0, token1.Position.Line); + Assert.Equal(0, token1.Position.Column); + + Assert.Equal(CommentsToken.INT, token2.TokenID); + Assert.Equal("2", token2.Value); + Assert.Equal(1, token2.Position.Line); + Assert.Equal(0, token2.Position.Column); - Assert.Equal(CommentsToken.COMMENT,token3.TokenID); + Assert.Equal(CommentsToken.COMMENT, token3.TokenID); Assert.Equal(@" not ending -comment",token3.Value); - Assert.Equal(1,token3.Position.Line); - Assert.Equal(2,token3.Position.Column); +comment", token3.Value); + Assert.Equal(1, token3.Position.Line); + Assert.Equal(2, token3.Position.Column); + - } From c7d1575d1115c5602a44e491c8e96044df4b8086 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Sun, 18 Mar 2018 13:06:44 +0100 Subject: [PATCH 04/31] debug code cleaning --- sly/parser/syntax/ISyntaxNode.cs | 1 - sly/parser/syntax/ManySyntaxNode.cs | 9 +-------- sly/parser/syntax/SyntaxEpsilon.cs | 6 +----- sly/parser/syntax/SyntaxLeaf.cs | 10 ++-------- sly/parser/syntax/SyntaxNode.cs | 25 ++----------------------- 5 files changed, 6 insertions(+), 45 deletions(-) diff --git a/sly/parser/syntax/ISyntaxNode.cs b/sly/parser/syntax/ISyntaxNode.cs index f891f350..1cf7ac9b 100644 --- a/sly/parser/syntax/ISyntaxNode.cs +++ b/sly/parser/syntax/ISyntaxNode.cs @@ -10,7 +10,6 @@ public interface ISyntaxNode where IN : struct { bool IsTerminal(); - string Dump(string tab); } } diff --git a/sly/parser/syntax/ManySyntaxNode.cs b/sly/parser/syntax/ManySyntaxNode.cs index 2a5335d7..094f94a1 100644 --- a/sly/parser/syntax/ManySyntaxNode.cs +++ b/sly/parser/syntax/ManySyntaxNode.cs @@ -45,13 +45,6 @@ public override string ToString() } - public new string Dump(string tab) - { - StringBuilder dump = new StringBuilder(); - dump.AppendLine($"{tab}(<{Name}>* {Children.Count} ["); - Children.ForEach(c => dump.AppendLine($"{c.Dump(tab + "\t")},")); - dump.AppendLine($"{tab}]"); - return dump.ToString(); - } + } } \ No newline at end of file diff --git a/sly/parser/syntax/SyntaxEpsilon.cs b/sly/parser/syntax/SyntaxEpsilon.cs index ac206afa..712f81a0 100644 --- a/sly/parser/syntax/SyntaxEpsilon.cs +++ b/sly/parser/syntax/SyntaxEpsilon.cs @@ -16,11 +16,7 @@ public bool IsTerminal() { return true; } - public string Dump(string tab) - { - - return $"{tab}(e)"; - } + } } \ No newline at end of file diff --git a/sly/parser/syntax/SyntaxLeaf.cs b/sly/parser/syntax/SyntaxLeaf.cs index 7d1053b1..747f6a13 100644 --- a/sly/parser/syntax/SyntaxLeaf.cs +++ b/sly/parser/syntax/SyntaxLeaf.cs @@ -15,19 +15,13 @@ public SyntaxLeaf(Token token) this.Token = token; } - public override string ToString() - { - return $"<{this.Token.TokenID}>{this.Token.Value}"; - } + public bool IsTerminal() { return true; } - public string Dump(string tab) - { - return $"{tab}({this.Token.TokenID} : {this.Token.Value})"; - } + } } \ No newline at end of file diff --git a/sly/parser/syntax/SyntaxNode.cs b/sly/parser/syntax/SyntaxNode.cs index 335d21dc..12a84dcd 100644 --- a/sly/parser/syntax/SyntaxNode.cs +++ b/sly/parser/syntax/SyntaxNode.cs @@ -117,12 +117,7 @@ public SyntaxNode(string name, List> children = null, MethodInfo this.Visitor = visitor; } - public override string ToString() - { - string r = Name+"(\n"; - Children.ForEach(c => r += c.ToString() + ",\n"); - return r+"\n)"; - } + public void AddChildren(List> children) { @@ -140,23 +135,7 @@ public bool IsTerminal() { - public string Dump(string tab) - { - StringBuilder dump = new StringBuilder(); - string bypass = IsByPassNode ? "#BYPASS#" : ""; - string precedence = Operation != null ? $"@{Operation.Precedence}@" : ""; - if (IsExpressionNode) - { - dump.AppendLine($"{tab}(operation:>{Operator}< {bypass} {precedence} ["); - } - else - { - dump.AppendLine($"{tab}({Name} {bypass} {precedence} ["); - } - Children.ForEach(c => dump.AppendLine($"{c.Dump(tab + "\t")},")); - dump.AppendLine($"{tab}]"); - return dump.ToString(); - } + } } \ No newline at end of file From 5c035b8c36401c48ad172d93bb45cd661e1cc6e3 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Sun, 18 Mar 2018 13:17:59 +0100 Subject: [PATCH 05/31] code cleaning --- sly/parser/syntax/ClauseSequence.cs | 13 +------------ sly/parser/syntax/EmptyClause.cs | 18 ------------------ sly/parser/syntax/ManySyntaxNode.cs | 20 -------------------- sly/parser/syntax/Rule.cs | 1 - sly/parser/syntax/SyntaxNode.cs | 24 ------------------------ 5 files changed, 1 insertion(+), 75 deletions(-) delete mode 100644 sly/parser/syntax/EmptyClause.cs diff --git a/sly/parser/syntax/ClauseSequence.cs b/sly/parser/syntax/ClauseSequence.cs index d5f4dfb5..c836c4b4 100644 --- a/sly/parser/syntax/ClauseSequence.cs +++ b/sly/parser/syntax/ClauseSequence.cs @@ -16,18 +16,7 @@ public ClauseSequence(IClause item) Clauses.Add(item); } - - public ClauseSequence(List> items) - { - Clauses = new List>(); - Clauses.AddRange(items); - } - - public void Add(IClause clause) - { - Clauses.Add(clause); - } - + public void AddRange(List> clauses) { Clauses.AddRange(clauses); diff --git a/sly/parser/syntax/EmptyClause.cs b/sly/parser/syntax/EmptyClause.cs deleted file mode 100644 index 6b6f5462..00000000 --- a/sly/parser/syntax/EmptyClause.cs +++ /dev/null @@ -1,18 +0,0 @@ -using System; - -namespace sly.parser.syntax -{ - - public class EmptyClause : IClause - { - - public bool Check(T nextToken) { - return true; - } - - public bool MayBeEmpty() - { - return true; - } - } -} \ No newline at end of file diff --git a/sly/parser/syntax/ManySyntaxNode.cs b/sly/parser/syntax/ManySyntaxNode.cs index 094f94a1..056d5ff9 100644 --- a/sly/parser/syntax/ManySyntaxNode.cs +++ b/sly/parser/syntax/ManySyntaxNode.cs @@ -16,13 +16,6 @@ public class ManySyntaxNode : SyntaxNode where IN : struct public ManySyntaxNode(string name) : base(name, new List>()) { } - - public ManySyntaxNode(string name, List> children) : base(name,children) - { - this.Name = name; - this.AddChildren(children); - } - public void Add(ISyntaxNode child) @@ -30,19 +23,6 @@ public void Add(ISyntaxNode child) Children.Add(child); } - public void AddRange(List> children) - { - Children.AddRange(children); - } - - - - public override string ToString() - { - string r = Name+"(\n"; - Children.ForEach(c => r += c.ToString() + ",\n"); - return r+"\n)"; - } diff --git a/sly/parser/syntax/Rule.cs b/sly/parser/syntax/Rule.cs index c75cd539..7eecd207 100644 --- a/sly/parser/syntax/Rule.cs +++ b/sly/parser/syntax/Rule.cs @@ -95,7 +95,6 @@ public bool MayBeEmpty { get { return Clauses == null || Clauses.Count == 0 - || (Clauses.Count == 1 && Clauses[0] is EmptyClause) || (Clauses.Count == 1 && Clauses[0].MayBeEmpty()); } } diff --git a/sly/parser/syntax/SyntaxNode.cs b/sly/parser/syntax/SyntaxNode.cs index 12a84dcd..98eb31ef 100644 --- a/sly/parser/syntax/SyntaxNode.cs +++ b/sly/parser/syntax/SyntaxNode.cs @@ -38,30 +38,6 @@ public class SyntaxNode : ISyntaxNode where IN : struct public bool IsRightAssociative => Associativity == Associativity.Right; - public SyntaxLeaf Operator { get { - SyntaxLeaf oper = null; - if (IsExpressionNode) - { - int operatorIndex = -1; - if (IsBinaryOperationNode) { - operatorIndex = 1; - } - else if (IsUnaryOperationNode) - { - operatorIndex = 0; - } - - if (operatorIndex > 0 && Children[operatorIndex] is SyntaxLeaf leaf) - { - oper = leaf; - } - } - return oper; - } - } - - - public ISyntaxNode Left { get { From 0b229e51ea4831b7ea8bfa49b114e50bf8dd9811 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 19 Mar 2018 08:10:25 +0100 Subject: [PATCH 06/31] code cleaning --- sly/parser/generator/EBNFSyntaxTreeVisitor.cs | 6 +---- sly/parser/generator/SyntaxTreeVisitor.cs | 6 +---- sly/parser/syntax/Clause.cs | 2 -- sly/parser/syntax/ClauseSequence.cs | 6 ++--- sly/parser/syntax/ISyntaxNode.cs | 4 +--- sly/parser/syntax/NonTerminalClause.cs | 4 ---- sly/parser/syntax/OneOrMoreClause.cs | 4 ---- sly/parser/syntax/SyntaxEpsilon.cs | 22 ------------------- sly/parser/syntax/SyntaxLeaf.cs | 9 -------- sly/parser/syntax/SyntaxNode.cs | 11 +--------- sly/parser/syntax/ZeroOrMoreClause.cs | 4 ---- 11 files changed, 6 insertions(+), 72 deletions(-) delete mode 100644 sly/parser/syntax/SyntaxEpsilon.cs diff --git a/sly/parser/generator/EBNFSyntaxTreeVisitor.cs b/sly/parser/generator/EBNFSyntaxTreeVisitor.cs index e3995428..5200cac3 100644 --- a/sly/parser/generator/EBNFSyntaxTreeVisitor.cs +++ b/sly/parser/generator/EBNFSyntaxTreeVisitor.cs @@ -24,11 +24,7 @@ protected override SyntaxVisitorResult Visit(ISyntaxNode n) if (n is SyntaxLeaf) { return Visit(n as SyntaxLeaf); - } - else if (n is SyntaxEpsilon) - { - return Visit(n as SyntaxEpsilon); - } + } else if (n is ManySyntaxNode) { return Visit(n as ManySyntaxNode); diff --git a/sly/parser/generator/SyntaxTreeVisitor.cs b/sly/parser/generator/SyntaxTreeVisitor.cs index bad1114d..be8fdf07 100644 --- a/sly/parser/generator/SyntaxTreeVisitor.cs +++ b/sly/parser/generator/SyntaxTreeVisitor.cs @@ -112,11 +112,7 @@ protected virtual SyntaxVisitorResult Visit(ISyntaxNode n) if (n is SyntaxLeaf) { return Visit(n as SyntaxLeaf); - } - else if (n is SyntaxEpsilon) - { - return Visit(n as SyntaxEpsilon); - } + } else if (n is SyntaxNode) { return Visit(n as SyntaxNode); diff --git a/sly/parser/syntax/Clause.cs b/sly/parser/syntax/Clause.cs index 6364b351..00a17651 100644 --- a/sly/parser/syntax/Clause.cs +++ b/sly/parser/syntax/Clause.cs @@ -11,8 +11,6 @@ namespace sly.parser.syntax /// public interface IClause : GrammarNode { - bool Check(T nextToken); - bool MayBeEmpty(); } diff --git a/sly/parser/syntax/ClauseSequence.cs b/sly/parser/syntax/ClauseSequence.cs index c836c4b4..4edb2684 100644 --- a/sly/parser/syntax/ClauseSequence.cs +++ b/sly/parser/syntax/ClauseSequence.cs @@ -26,14 +26,12 @@ public void AddRange(ClauseSequence seq) { AddRange(seq.Clauses); } - - public bool Check(T nextToken) { - return true; - } public bool MayBeEmpty() { return true; } + + } } \ No newline at end of file diff --git a/sly/parser/syntax/ISyntaxNode.cs b/sly/parser/syntax/ISyntaxNode.cs index 1cf7ac9b..0275cdaf 100644 --- a/sly/parser/syntax/ISyntaxNode.cs +++ b/sly/parser/syntax/ISyntaxNode.cs @@ -8,8 +8,6 @@ namespace sly.parser.syntax { public interface ISyntaxNode where IN : struct { - bool IsTerminal(); - - + } } diff --git a/sly/parser/syntax/NonTerminalClause.cs b/sly/parser/syntax/NonTerminalClause.cs index 752b4f51..f8b6abdf 100644 --- a/sly/parser/syntax/NonTerminalClause.cs +++ b/sly/parser/syntax/NonTerminalClause.cs @@ -8,10 +8,6 @@ public NonTerminalClause(string name) { NonTerminalName = name; } - public bool Check(T nextToken) - { - return true; - } public override string ToString() { diff --git a/sly/parser/syntax/OneOrMoreClause.cs b/sly/parser/syntax/OneOrMoreClause.cs index 9855ccda..7a86d8eb 100644 --- a/sly/parser/syntax/OneOrMoreClause.cs +++ b/sly/parser/syntax/OneOrMoreClause.cs @@ -10,10 +10,6 @@ public OneOrMoreClause(IClause clause) { Clause = clause; } - public bool Check(T nextToken) - { - return true; - } public override string ToString() diff --git a/sly/parser/syntax/SyntaxEpsilon.cs b/sly/parser/syntax/SyntaxEpsilon.cs deleted file mode 100644 index 712f81a0..00000000 --- a/sly/parser/syntax/SyntaxEpsilon.cs +++ /dev/null @@ -1,22 +0,0 @@ - -using sly.parser.syntax; -using sly.lexer; - -namespace sly.parser.syntax -{ - - public class SyntaxEpsilon : ISyntaxNode where IN : struct { - - - public override string ToString() - { - return $"_"; - } - public bool IsTerminal() { - return true; - } - - - - } -} \ No newline at end of file diff --git a/sly/parser/syntax/SyntaxLeaf.cs b/sly/parser/syntax/SyntaxLeaf.cs index 747f6a13..ff2a5d5a 100644 --- a/sly/parser/syntax/SyntaxLeaf.cs +++ b/sly/parser/syntax/SyntaxLeaf.cs @@ -14,14 +14,5 @@ public SyntaxLeaf(Token token) { this.Token = token; } - - - - public bool IsTerminal() { - return true; - } - - - } } \ No newline at end of file diff --git a/sly/parser/syntax/SyntaxNode.cs b/sly/parser/syntax/SyntaxNode.cs index 98eb31ef..c3e42031 100644 --- a/sly/parser/syntax/SyntaxNode.cs +++ b/sly/parser/syntax/SyntaxNode.cs @@ -36,8 +36,6 @@ public class SyntaxNode : ISyntaxNode where IN : struct public bool IsLeftAssociative => Associativity == Associativity.Left; - public bool IsRightAssociative => Associativity == Associativity.Right; - public ISyntaxNode Left { get { @@ -104,14 +102,7 @@ public void AddChild(ISyntaxNode child) { this.Children.Add(child); } - - public bool IsTerminal() { - return false; - } - - - - + } } \ No newline at end of file diff --git a/sly/parser/syntax/ZeroOrMoreClause.cs b/sly/parser/syntax/ZeroOrMoreClause.cs index 59114a2d..09aaa625 100644 --- a/sly/parser/syntax/ZeroOrMoreClause.cs +++ b/sly/parser/syntax/ZeroOrMoreClause.cs @@ -10,10 +10,6 @@ public ZeroOrMoreClause(IClause clause) { Clause = clause; } - public bool Check(T nextToken) - { - return true; - } public override string ToString() { From ba0eb60dd50728f515216f364d51c1574ca2e538 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 23 Mar 2018 10:46:36 +0100 Subject: [PATCH 07/31] discarded tokens (EBNF notation) --- samples/while/parser/WhileParser.cs | 32 +++++++++---------- sly/lexer/Lexer.cs | 6 ++-- sly/lexer/Token.cs | 7 ++-- sly/parser/generator/EBNFParserBuilder.cs | 2 +- sly/parser/generator/EBNFSyntaxTreeVisitor.cs | 9 ++++-- sly/parser/generator/EbnfToken.cs | 10 +++--- sly/parser/generator/RuleParser.cs | 17 ++++++++-- sly/parser/generator/SyntaxTreeVisitor.cs | 4 ++- .../EBNFRecursiveDescentSyntaxParser.cs | 6 +--- .../llparser/RecursiveDescentSyntaxParser.cs | 9 ++---- sly/parser/syntax/TerminalClause.cs | 8 +++++ 11 files changed, 66 insertions(+), 44 deletions(-) diff --git a/samples/while/parser/WhileParser.cs b/samples/while/parser/WhileParser.cs index ca27da03..85488993 100644 --- a/samples/while/parser/WhileParser.cs +++ b/samples/while/parser/WhileParser.cs @@ -12,8 +12,8 @@ public class WhileParser #region statements - [Production("statement : LPAREN statement RPAREN ")] - public WhileAST block(Token discardLpar, Statement statement, Token discardRpar) + [Production("statement : LPAREN [d] statement RPAREN [d]")] + public WhileAST block( Statement statement) { return statement; } @@ -32,47 +32,47 @@ public WhileAST sequenceStatements(WhileAST first, List next) return seq; } - [Production("additionalStatements : SEMICOLON statementPrim")] - public WhileAST additional(Token semi, WhileAST statement) + [Production("additionalStatements : SEMICOLON [d] statementPrim")] + public WhileAST additional(WhileAST statement) { return statement; } - [Production("statementPrim: IF WhileParser_expressions THEN statement ELSE statement")] - public WhileAST ifStmt(Token discardIf, WhileAST cond, Token dicardThen, WhileAST thenStmt, Token dicardElse, Statement elseStmt) + [Production("statementPrim: IF [d] WhileParser_expressions THEN [d] statement ELSE [d] statement")] + public WhileAST ifStmt(WhileAST cond, WhileAST thenStmt, Statement elseStmt) { IfStatement stmt = new IfStatement(cond as Expression, thenStmt as Statement, elseStmt); return stmt; } - [Production("statementPrim: WHILE WhileParser_expressions DO statement")] - public WhileAST whileStmt(Token discardWhile, WhileAST cond, Token dicardDo, WhileAST blockStmt) + [Production("statementPrim: WHILE [d] WhileParser_expressions DO [d] statement")] + public WhileAST whileStmt(WhileAST cond, WhileAST blockStmt) { WhileStatement stmt = new WhileStatement(cond as Expression, blockStmt as Statement); return stmt; } - [Production("statementPrim: IDENTIFIER ASSIGN WhileParser_expressions")] - public WhileAST assignStmt(Token variable, Token discardAssign, Expression value) + [Production("statementPrim: IDENTIFIER ASSIGN [d] WhileParser_expressions")] + public WhileAST assignStmt(Token variable, Expression value) { AssignStatement assign = new AssignStatement(variable.StringWithoutQuotes, value); return assign; } - [Production("statementPrim: SKIP")] - public WhileAST skipStmt(Token discard) + [Production("statementPrim: SKIP [d]")] + public WhileAST skipStmt() { return new SkipStatement(); } - [Production("statementPrim: RETURN WhileParser_expressions")] - public WhileAST returnStmt(Token discard, WhileAST expression) + [Production("statementPrim: RETURN [d] WhileParser_expressions")] + public WhileAST returnStmt( WhileAST expression) { return new ReturnStatement(expression as Expression); } - [Production("statementPrim: PRINT WhileParser_expressions")] - public WhileAST skipStmt(Token discard, WhileAST expression) + [Production("statementPrim: PRINT [d] WhileParser_expressions")] + public WhileAST skipStmt(WhileAST expression) { return new PrintStatement(expression as Expression); } diff --git a/sly/lexer/Lexer.cs b/sly/lexer/Lexer.cs index 1d394d13..6dba8064 100644 --- a/sly/lexer/Lexer.cs +++ b/sly/lexer/Lexer.cs @@ -38,12 +38,12 @@ public IEnumerable> Tokenize(string source) currentColumn = currentIndex - currentLineStartIndex+1; TokenDefinition matchedDefinition = null; int matchLength = 0; - + foreach (var rule in tokenDefinitions) { - var match = rule.Regex.Match(source, currentIndex); + var match = rule.Regex.Match(source.Substring(currentIndex)); - if (match.Success && (match.Index - currentIndex) == 0) + if (match.Success && match.Index == 0) { matchedDefinition = rule; matchLength = match.Length; diff --git a/sly/lexer/Token.cs b/sly/lexer/Token.cs index 519dffe6..10bd3d7b 100644 --- a/sly/lexer/Token.cs +++ b/sly/lexer/Token.cs @@ -5,7 +5,8 @@ namespace sly.lexer public enum CommentType { Single, - Multi + Multi, + No } public class Token { @@ -20,7 +21,9 @@ public class Token public T TokenID { get; set; } public bool IsComment { get; set; } - public CommentType CommentType {get; set;} = CommentType.Single; + public bool Discarded { get; set; } = false; + + public CommentType CommentType {get; set;} = CommentType.No; public bool IsMultiLineComment => CommentType == CommentType.Multi; diff --git a/sly/parser/generator/EBNFParserBuilder.cs b/sly/parser/generator/EBNFParserBuilder.cs index 00241fa3..3b206bde 100644 --- a/sly/parser/generator/EBNFParserBuilder.cs +++ b/sly/parser/generator/EBNFParserBuilder.cs @@ -130,7 +130,7 @@ protected virtual ParserConfiguration ExtractEbnfParserConfiguration(Typ foreach (ProductionAttribute attr in attributes) { - string ruleString = attr.RuleString; + string ruleString = attr.RuleString; ParseResult> parseResult = grammarParser.Parse(ruleString); if (!parseResult.IsError) { diff --git a/sly/parser/generator/EBNFSyntaxTreeVisitor.cs b/sly/parser/generator/EBNFSyntaxTreeVisitor.cs index 5200cac3..6215f37a 100644 --- a/sly/parser/generator/EBNFSyntaxTreeVisitor.cs +++ b/sly/parser/generator/EBNFSyntaxTreeVisitor.cs @@ -40,7 +40,7 @@ protected override SyntaxVisitorResult Visit(ISyntaxNode n) } private SyntaxVisitorResult Visit(SyntaxNode node) - { + { SyntaxVisitorResult < IN, OUT > result = SyntaxVisitorResult.NoneResult(); if (node.Visitor != null || node.IsByPassNode) { @@ -54,7 +54,10 @@ private SyntaxVisitorResult Visit(SyntaxNode node) if (v.IsToken) { - args.Add(v.TokenResult); + if (!v.Discarded) + { + args.Add(v.TokenResult); + } } else if (v.IsValue) { @@ -140,7 +143,7 @@ private SyntaxVisitorResult Visit(ManySyntaxNode node) private SyntaxVisitorResult Visit(SyntaxLeaf leaf) - { + { return SyntaxVisitorResult.NewToken(leaf.Token); } } diff --git a/sly/parser/generator/EbnfToken.cs b/sly/parser/generator/EbnfToken.cs index 01a9cb4a..9f9b7699 100644 --- a/sly/parser/generator/EbnfToken.cs +++ b/sly/parser/generator/EbnfToken.cs @@ -8,7 +8,7 @@ namespace sly.parser.generator public enum EbnfToken { - [Lexeme("[A-Za-z][A-Za-z0-9_]*") ] + [Lexeme("^[A-Za-z][A-Za-z0-9_]*") ] IDENTIFIER = 1, [Lexeme(":")] COLON = 2, @@ -17,9 +17,11 @@ public enum EbnfToken [Lexeme("\\+")] ONEORMORE = 4, [Lexeme("[ \\t]+",true)] - WS = 5, - [Lexeme("\\n\\r]+",true,true)] - EOL = 6 + WS = 5, + [LexemeAttribute("^\\[d\\]")] + DISCARD = 6, + [Lexeme("[\\n\\r]+",true,true)] + EOL = 7 } diff --git a/sly/parser/generator/RuleParser.cs b/sly/parser/generator/RuleParser.cs index 6c0de107..6cece16a 100644 --- a/sly/parser/generator/RuleParser.cs +++ b/sly/parser/generator/RuleParser.cs @@ -47,7 +47,7 @@ public ClauseSequence SingleClause(IClause clause) [Production("clause : IDENTIFIER ZEROORMORE")] public IClause ZeroMoreClause(Token id, Token discarded) { - IClause innerClause = BuildTerminalOrNonTerimal(id.Value); + IClause innerClause = BuildTerminalOrNonTerimal(id.Value,true); return new ZeroOrMoreClause(innerClause); } @@ -58,6 +58,13 @@ public IClause OneMoreClause(Token id, Token discarded return new OneOrMoreClause(innerClause); } + [Production("clause : IDENTIFIER DISCARD ")] + public IClause SimpleDiscardedClause(Token id, Token discard) + { + IClause clause = BuildTerminalOrNonTerimal(id.Value, true); + return clause; + } + [Production("clause : IDENTIFIER ")] public IClause SimpleClause(Token id) { @@ -65,7 +72,11 @@ public IClause SimpleClause(Token id) return clause; } - private IClause BuildTerminalOrNonTerimal(string name) + + + + + private IClause BuildTerminalOrNonTerimal(string name, bool discard = false) { IN token = default(IN); @@ -79,7 +90,7 @@ private IClause BuildTerminalOrNonTerimal(string name) } if (isTerminal) { - clause = new TerminalClause(token); + clause = new TerminalClause(token,discard); } else { diff --git a/sly/parser/generator/SyntaxTreeVisitor.cs b/sly/parser/generator/SyntaxTreeVisitor.cs index be8fdf07..b528fb64 100644 --- a/sly/parser/generator/SyntaxTreeVisitor.cs +++ b/sly/parser/generator/SyntaxTreeVisitor.cs @@ -22,7 +22,9 @@ public class SyntaxVisitorResult where IN : struct private bool isTok; - public bool IsToken => isTok; + public bool IsToken => isTok; + + public bool Discarded => IsToken && TokenResult != null && TokenResult.Discarded; private bool isVal; diff --git a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs index 14d1611c..81482580 100644 --- a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs +++ b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs @@ -11,11 +11,7 @@ namespace sly.parser.llparser { public class EBNFRecursiveDescentSyntaxParser : RecursiveDescentSyntaxParser where IN : struct { - //public ParserConfiguration Configuration { get; set; } - - //public string StartingNonTerminal { get; set; } - - + public EBNFRecursiveDescentSyntaxParser(ParserConfiguration configuration, string startingNonTerminal) : base(configuration,startingNonTerminal) { Configuration = configuration; diff --git a/sly/parser/parser/llparser/RecursiveDescentSyntaxParser.cs b/sly/parser/parser/llparser/RecursiveDescentSyntaxParser.cs index 3ef5241d..de0f5815 100644 --- a/sly/parser/parser/llparser/RecursiveDescentSyntaxParser.cs +++ b/sly/parser/parser/llparser/RecursiveDescentSyntaxParser.cs @@ -288,7 +288,9 @@ public SyntaxParseResult ParseTerminal(IList> tokens, TerminalClau SyntaxParseResult result = new SyntaxParseResult(); result.IsError = !term.Check(tokens[position].TokenID); result.EndingPosition = !result.IsError ? position + 1 : position; - result.Root = new SyntaxLeaf(tokens[position]); + Token token = tokens[position]; + token.Discarded = term.Discarded; + result.Root = new SyntaxLeaf(token); return result; } @@ -296,11 +298,6 @@ public SyntaxParseResult ParseTerminal(IList> tokens, TerminalClau public SyntaxParseResult ParseNonTerminal(IList> tokens, NonTerminalClause nonTermClause, int currentPosition) { - if (nonTermClause.NonTerminalName == "members") - { - ; - } - NonTerminal nt = Configuration.NonTerminals[nonTermClause.NonTerminalName]; bool found = false; bool isError = false; diff --git a/sly/parser/syntax/TerminalClause.cs b/sly/parser/syntax/TerminalClause.cs index 872bb0a6..13e7e05a 100644 --- a/sly/parser/syntax/TerminalClause.cs +++ b/sly/parser/syntax/TerminalClause.cs @@ -7,9 +7,17 @@ namespace sly.parser.syntax public class TerminalClause : IClause { public T ExpectedToken {get; set;} + + public bool Discarded { get; set; } + public TerminalClause(T token) { ExpectedToken = token; } + + public TerminalClause(T token, bool discard) : this(token) + { + Discarded = discard; + } public bool Check(T nextToken) { return nextToken.Equals(ExpectedToken); } From 97a27dcf09f6174446aa0978423712185066e618 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 23 Mar 2018 10:59:23 +0100 Subject: [PATCH 08/31] discarded tokens (BNF notation) --- samples/expressionParser/ExpressionParser.cs | 4 ++-- sly/parser/generator/ParserBuilder.cs | 7 +++++++ sly/parser/generator/SyntaxTreeVisitor.cs | 5 ++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/samples/expressionParser/ExpressionParser.cs b/samples/expressionParser/ExpressionParser.cs index 1c7154fc..50b5189d 100644 --- a/samples/expressionParser/ExpressionParser.cs +++ b/samples/expressionParser/ExpressionParser.cs @@ -20,8 +20,8 @@ public int Primary(Token intToken) return intToken.IntValue; } - [Production("primary: LPAREN expression RPAREN")] - public int Group(object discaredLParen, int groupValue ,object discardedRParen) + [Production("primary: LPAREN [d] expression RPAREN [d]")] + public int Group(int groupValue) { return groupValue; } diff --git a/sly/parser/generator/ParserBuilder.cs b/sly/parser/generator/ParserBuilder.cs index 0ba85fab..dec62fd7 100644 --- a/sly/parser/generator/ParserBuilder.cs +++ b/sly/parser/generator/ParserBuilder.cs @@ -214,6 +214,13 @@ private Rule BuildNonTerminal(Tuple ntAndRule) { clause = new TerminalClause(token); } + else if (item == "[d]") + { + if (clauses.Last() is TerminalClause discardedTerminal) + { + discardedTerminal.Discarded = true; + } + } else { clause = new NonTerminalClause(item); diff --git a/sly/parser/generator/SyntaxTreeVisitor.cs b/sly/parser/generator/SyntaxTreeVisitor.cs index b528fb64..3f01db87 100644 --- a/sly/parser/generator/SyntaxTreeVisitor.cs +++ b/sly/parser/generator/SyntaxTreeVisitor.cs @@ -141,7 +141,10 @@ private SyntaxVisitorResult Visit(SyntaxNode node) if (v.IsToken) { - args.Add(v.TokenResult); + if (!v.Discarded) + { + args.Add(v.TokenResult); + } } else if (v.IsValue) { From c201af42e9f5d659a3cf7b6595b2f57d2bf3f984 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 6 Apr 2018 09:07:45 +0200 Subject: [PATCH 09/31] vs code integration --- .vscode/launch.json | 28 ++++++++++++++++++++++++++++ .vscode/settings.json | 3 +++ .vscode/tasks.json | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 .vscode/launch.json create mode 100644 .vscode/settings.json create mode 100644 .vscode/tasks.json diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..28b2a3a7 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,28 @@ +{ + // Use IntelliSense to find out which attributes exist for C# debugging + // Use hover for the description of the existing attributes + // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md + "version": "0.2.0", + "configurations": [ + { + "name": ".NET Core Launch (console)", + "type": "coreclr", + "request": "launch", + "preLaunchTask": "build", + // If you have changed target frameworks, make sure to update the program path. + "program": "${workspaceRoot}/samples/ParserExample/bin/Debug/netcoreapp2.0/ParserExample.dll", + "args": [], + "cwd": "${workspaceRoot}/samples/ParserExample", + // For more information about the 'console' field, see https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md#console-terminal-window + "console": "internalConsole", + "stopAtEntry": false, + "internalConsoleOptions": "openOnSessionStart" + }, + { + "name": ".NET Core Attach", + "type": "coreclr", + "request": "attach", + "processId": "${command:pickProcess}" + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..4a8b391b --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "dotnet-test-explorer.testProjectPath": "./ParserTests" +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 00000000..68809aa9 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,38 @@ +{ + "version": "0.1.0", + "command": "dotnet", + "isShellCommand": true, + "args": [], + "tasks": [ + { + "taskName": "build", + "command": "dotnet", + "args": [ + "build", + "${workspaceFolder}/ParserTests/ParserTests.csproj" + ], + "problemMatcher": "$msCompile", + }, + { + "taskName": "unit tests", + "command": "dotnet", + "args": [ + "test", + "${workspaceFolder}/ParserTests/ParserTests.csproj" + ], + "problemMatcher": "$msCompile", + }, + { + "taskName": "test with coverage", + "command": "dotnet", + "args": [ + "test", + "/p:CollectCoverage=true", + "/p:CoverletOutputFormat=lcov", + "/p:CoverletOutput=./lcov.info", + "${workspaceFolder}/ParserTests/ParserTests.csproj" + ], + "problemMatcher": "$msCompile", + }, + ] +} \ No newline at end of file From 1484453981bf489cbe0465b0dce9f9f828471ca2 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 6 Apr 2018 13:37:43 +0200 Subject: [PATCH 10/31] vscode & coverlet integration --- .vscode/tasks.json | 5 ++++- ParserTests/ParserTests.csproj | 10 ++++++---- samples/ParserExample/ParserExample.csproj | 2 +- .../SimpleExpressionParser.csproj | 2 +- samples/expressionParser/expressionParser.csproj | 2 +- samples/jsonparser/jsonparser.csproj | 2 +- samples/while/while.csproj | 2 +- sly/sly.csproj | 2 +- 8 files changed, 16 insertions(+), 11 deletions(-) diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 68809aa9..fdb8ff81 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -7,6 +7,7 @@ { "taskName": "build", "command": "dotnet", + "isBuildCommand": true, "args": [ "build", "${workspaceFolder}/ParserTests/ParserTests.csproj" @@ -16,6 +17,7 @@ { "taskName": "unit tests", "command": "dotnet", + "isTestCommand": true, "args": [ "test", "${workspaceFolder}/ParserTests/ParserTests.csproj" @@ -24,7 +26,8 @@ }, { "taskName": "test with coverage", - "command": "dotnet", + "command": "dotnet", + "isTestCommand": true, "args": [ "test", "/p:CollectCoverage=true", diff --git a/ParserTests/ParserTests.csproj b/ParserTests/ParserTests.csproj index 2742f701..90037ba4 100644 --- a/ParserTests/ParserTests.csproj +++ b/ParserTests/ParserTests.csproj @@ -1,15 +1,17 @@  - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 - - - + + + + + diff --git a/samples/ParserExample/ParserExample.csproj b/samples/ParserExample/ParserExample.csproj index 001cd0bf..eadb006b 100644 --- a/samples/ParserExample/ParserExample.csproj +++ b/samples/ParserExample/ParserExample.csproj @@ -2,7 +2,7 @@ Exe - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 diff --git a/samples/SimpleExpressionParser/SimpleExpressionParser.csproj b/samples/SimpleExpressionParser/SimpleExpressionParser.csproj index da5a89af..106a01ce 100644 --- a/samples/SimpleExpressionParser/SimpleExpressionParser.csproj +++ b/samples/SimpleExpressionParser/SimpleExpressionParser.csproj @@ -1,6 +1,6 @@ - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 diff --git a/samples/expressionParser/expressionParser.csproj b/samples/expressionParser/expressionParser.csproj index 89a882b8..466aecfd 100644 --- a/samples/expressionParser/expressionParser.csproj +++ b/samples/expressionParser/expressionParser.csproj @@ -1,6 +1,6 @@  - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 diff --git a/samples/jsonparser/jsonparser.csproj b/samples/jsonparser/jsonparser.csproj index bac780f3..b644cfd9 100644 --- a/samples/jsonparser/jsonparser.csproj +++ b/samples/jsonparser/jsonparser.csproj @@ -1,6 +1,6 @@  - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 diff --git a/samples/while/while.csproj b/samples/while/while.csproj index 386d8f84..3de1370b 100644 --- a/samples/while/while.csproj +++ b/samples/while/while.csproj @@ -1,6 +1,6 @@  - netcoreapp2.0;net45 + netcoreapp2.0 b3b00 csly.whileLang diff --git a/sly/sly.csproj b/sly/sly.csproj index b4ee278c..763cbb34 100644 --- a/sly/sly.csproj +++ b/sly/sly.csproj @@ -1,6 +1,6 @@  - netcoreapp2.0;net45 + netcoreapp2.0 #LY is a parser generator halfway between parser combinators and parser generator like ANTLR b3b00 2.0.7 From a9673cc68c187a2d2da2d0d9243186bb6fa78078 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 08:19:23 +0200 Subject: [PATCH 11/31] upgrade coverlet version --- .gitignore | 4 ++++ ParserTests/ParserTests.csproj | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 4bc9d78b..337afaa7 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,7 @@ packages/ *.orig dotnet.psess +my_app_coverage.xml +sly.xml +sly_coverage.xml +ParserTests/lcov.info diff --git a/ParserTests/ParserTests.csproj b/ParserTests/ParserTests.csproj index 90037ba4..275351ff 100644 --- a/ParserTests/ParserTests.csproj +++ b/ParserTests/ParserTests.csproj @@ -11,7 +11,7 @@ - + From 998f1fbbf6d5323d2a972bc23298a32525d22f4d Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 08:48:36 +0200 Subject: [PATCH 12/31] xunit warnings --- ParserTests/ParserConfigurationTests.cs | 8 ++-- ParserTests/WhileTests.cs | 59 +++++++++++++------------ 2 files changed, 34 insertions(+), 33 deletions(-) diff --git a/ParserTests/ParserConfigurationTests.cs b/ParserTests/ParserConfigurationTests.cs index 9573055e..2d05dbbf 100644 --- a/ParserTests/ParserConfigurationTests.cs +++ b/ParserTests/ParserConfigurationTests.cs @@ -57,9 +57,9 @@ public void TestGrammarBuildErrors() Assert.Equal(2, result.Errors.Count); var warnerrors = result.Errors.Where(e => e.Level == sly.buildresult.ErrorLevel.WARN).ToList(); var errorerrors = result.Errors.Where(e => e.Level == sly.buildresult.ErrorLevel.ERROR).ToList(); - Assert.Equal(1, warnerrors.Count); + Assert.Single(warnerrors); Assert.True(warnerrors[0].Message.Contains("R3") && warnerrors[0].Message.Contains("never used")); - Assert.Equal(1, errorerrors.Count); + Assert.Single(errorerrors); Assert.True(errorerrors[0].Message.Contains("R2") && errorerrors[0].Message.Contains("not exist")); } @@ -73,10 +73,10 @@ public void TestLexerBuildErrors() Assert.Equal(2, result.Errors.Count); var errors = result.Errors.Where(e => e.Level == ErrorLevel.ERROR).ToList(); var warnings = result.Errors.Where(e => e.Level == ErrorLevel.WARN).ToList(); - Assert.Equal(1, errors.Count); + Assert.Single(errors); var errorMessage = errors[0].Message; Assert.True(errorMessage.Contains(BadTokens.BadRegex.ToString()) && errorMessage.Contains("BadRegex")); - Assert.Equal(1, warnings.Count); + Assert.Single(warnings); var warnMessage = warnings[0].Message; Assert.True(warnMessage.Contains(BadTokens.MissingLexeme.ToString()) && warnMessage.Contains("not have Lexeme")); ; diff --git a/ParserTests/WhileTests.cs b/ParserTests/WhileTests.cs index 0082c6eb..2daad83f 100644 --- a/ParserTests/WhileTests.cs +++ b/ParserTests/WhileTests.cs @@ -59,13 +59,13 @@ public void TestAssignAdd() Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; - Assert.IsAssignableFrom(typeof(AssignStatement), seq.Get(0)); + Assert.IsType(seq.Get(0)); AssignStatement assign = seq.Get(0) as AssignStatement; Assert.Equal("a", assign.VariableName); Expression val = assign.Value; - Assert.IsAssignableFrom(typeof(BinaryOperation), val); + Assert.IsType(val); BinaryOperation bin = val as BinaryOperation; Assert.Equal(BinaryOperator.ADD, bin.Operator); Assert.Equal(1, (bin.Left as IntegerConstant)?.Value); @@ -84,9 +84,9 @@ public void TestSkip() Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; - Assert.IsAssignableFrom(typeof(SkipStatement), seq.Get(0)); + Assert.IsType(seq.Get(0)); } [Fact] @@ -100,12 +100,12 @@ public void TestPrintBoolExpression() Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; - Assert.IsAssignableFrom(typeof(PrintStatement), seq.Get(0)); + Assert.IsType(seq.Get(0)); PrintStatement print = seq.Get(0) as PrintStatement; Expression expr = print.Value; - Assert.IsAssignableFrom(typeof(BinaryOperation), expr); + Assert.IsType(expr); BinaryOperation bin = expr as BinaryOperation; Assert.Equal(BinaryOperator.AND, bin.Operator); Assert.Equal(true, (bin.Left as BoolConstant)?.Value); @@ -123,18 +123,18 @@ public void TestInfiniteWhile() Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; - Assert.IsAssignableFrom(typeof(WhileStatement), seq.Get(0)); + Assert.IsType(seq.Get(0)); WhileStatement whil = seq.Get(0) as WhileStatement; Expression cond = whil.Condition; - Assert.IsAssignableFrom(typeof(BoolConstant), cond); + Assert.IsType(cond); Assert.Equal(true, (cond as BoolConstant).Value); Statement s = whil.BlockStmt; - Assert.IsAssignableFrom(typeof(SequenceStatement), whil.BlockStmt); + Assert.IsType( whil.BlockStmt); SequenceStatement seqBlock = whil.BlockStmt as SequenceStatement; Assert.Equal(1, seqBlock.Count); - Assert.IsAssignableFrom(typeof(SkipStatement), seqBlock.Get(0)); + Assert.IsType(seqBlock.Get(0)); ; } @@ -149,32 +149,32 @@ public void TestIfThenElse() Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; - Assert.IsAssignableFrom(typeof(IfStatement), seq.Get(0)); + Assert.IsType(seq.Get(0)); IfStatement si = seq.Get(0) as IfStatement; Expression cond = si.Condition; - Assert.IsAssignableFrom(typeof(BoolConstant), cond); + Assert.IsType(cond); Assert.Equal(true, (cond as BoolConstant).Value); Statement s = si.ThenStmt; - Assert.IsAssignableFrom(typeof(SequenceStatement), si.ThenStmt); + Assert.IsType(si.ThenStmt); SequenceStatement thenBlock = si.ThenStmt as SequenceStatement; Assert.Equal(1, thenBlock.Count); - Assert.IsAssignableFrom(typeof(AssignStatement), thenBlock.Get(0)); + Assert.IsType(thenBlock.Get(0)); AssignStatement thenAssign = thenBlock.Get(0) as AssignStatement; Assert.Equal("a", thenAssign.VariableName); - Assert.IsAssignableFrom(typeof(StringConstant), thenAssign.Value); + Assert.IsType(thenAssign.Value); Assert.Equal("hello", (thenAssign.Value as StringConstant).Value); ; - Assert.IsAssignableFrom(typeof(SequenceStatement), si.ElseStmt); + Assert.IsType(si.ElseStmt); SequenceStatement elseBlock = si.ElseStmt as SequenceStatement; Assert.Equal(1, elseBlock.Count); - Assert.IsAssignableFrom(typeof(AssignStatement), elseBlock.Get(0)); + Assert.IsType(elseBlock.Get(0)); AssignStatement elseAssign = elseBlock.Get(0) as AssignStatement; Assert.Equal("b", elseAssign.VariableName); - Assert.IsAssignableFrom(typeof(StringConstant), elseAssign.Value); + Assert.IsType(elseAssign.Value); Assert.Equal("world", (elseAssign.Value as StringConstant).Value); } @@ -189,14 +189,15 @@ public void TestSkipSkipSequence() ParseResult result = parser.Parse("(skip; skip; skip)"); Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; Assert.Equal(3, seq.Count); - Assert.IsAssignableFrom(typeof(SkipStatement), seq.Get(0)); - Assert.IsAssignableFrom(typeof(SkipStatement), seq.Get(1)); - Assert.IsAssignableFrom(typeof(SkipStatement), seq.Get(2)); + Assert.IsType(seq.Get(0)); + Assert.IsType(seq.Get(1)); + Assert.IsType(seq.Get(2)); } + [Fact] public void TestSkipAssignSequence() { @@ -206,14 +207,14 @@ public void TestSkipAssignSequence() ParseResult result = parser.Parse("(a:=1; b:=2; c:=3)"); Assert.False(result.IsError); Assert.NotNull(result.Result); - Assert.IsAssignableFrom(typeof(SequenceStatement), result.Result); + Assert.IsType(result.Result); SequenceStatement seq = result.Result as SequenceStatement; Assert.Equal(3, seq.Count); string[] names = new string[] { "a", "b", "c" }; for (int i = 0; i < names.Length; i++) { - Assert.IsAssignableFrom(typeof(AssignStatement), seq.Get(i)); + Assert.IsType(seq.Get(i)); AssignStatement assign = seq.Get(i) as AssignStatement; Assert.Equal(names[i], assign.VariableName); Assert.Equal(i + 1, (assign.Value as IntegerConstant).Value); @@ -277,7 +278,7 @@ public void TestFactorialProgramExec() Assert.False(result.IsError); Assert.NotNull(result.Result); Interpreter interpreter = new Interpreter(); - var context = interpreter.Interprete(result.Result); + var context = interpreter.Interprete(result.Result, true); Assert.Equal(2, context.variables.Count); Assert.True(CheckIntVariable(context, "i", 11)); Assert.True(CheckIntVariable(context, "r", 3628800)); From 60e96c8878a488f2e9cbbc09fa9c9866d745c3c0 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 08:52:31 +0200 Subject: [PATCH 13/31] xunit warnings --- ParserTests/JsonGenericTests.cs | 2 +- ParserTests/JsonTests.cs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ParserTests/JsonGenericTests.cs b/ParserTests/JsonGenericTests.cs index 4bac9d74..758a89f8 100644 --- a/ParserTests/JsonGenericTests.cs +++ b/ParserTests/JsonGenericTests.cs @@ -83,7 +83,7 @@ public void TestFalseBooleanValue() Assert.True(r.Result.IsValue); JValue val = ((JValue)r.Result); Assert.True(val.IsBool); - Assert.Equal(false, val.GetValue()); + Assert.False(val.GetValue()); } [Fact] diff --git a/ParserTests/JsonTests.cs b/ParserTests/JsonTests.cs index e86981f0..7181ea99 100644 --- a/ParserTests/JsonTests.cs +++ b/ParserTests/JsonTests.cs @@ -71,7 +71,7 @@ public void TestTrueBooleanValue() JValue val = ((JValue)r.Result); Assert.True(val.IsBool); Assert.True(val.IsBool); - Assert.Equal(true, val.GetValue()); + Assert.True(val.GetValue()); } [Fact] @@ -83,7 +83,7 @@ public void TestFalseBooleanValue() Assert.True(r.Result.IsValue); JValue val = ((JValue)r.Result); Assert.True(val.IsBool); - Assert.Equal(false, val.GetValue()); + Assert.False(val.GetValue()); } [Fact] From 2d7efb6b3d7c66d75c62399752daaa31e3dd7aec Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 09:03:06 +0200 Subject: [PATCH 14/31] warnings cleaning --- ParserTests/EBNFTests.cs | 12 ++++++------ ParserTests/GenericLexerTests.cs | 30 +++++++++++++++--------------- samples/ParserExample/Program.cs | 21 +-------------------- 3 files changed, 22 insertions(+), 41 deletions(-) diff --git a/ParserTests/EBNFTests.cs b/ParserTests/EBNFTests.cs index f4e150f8..e8b88c4c 100644 --- a/ParserTests/EBNFTests.cs +++ b/ParserTests/EBNFTests.cs @@ -132,15 +132,15 @@ public void TestParseBuild() NonTerminal nt = Parser.Configuration.NonTerminals["R"]; Assert.Equal(2,nt.Rules.Count); nt = Parser.Configuration.NonTerminals["A"]; - Assert.Equal(1,nt.Rules.Count); + Assert.Single(nt.Rules); Rule rule = nt.Rules[0]; - Assert.Equal(1,rule.Clauses.Count); - Assert.IsType(typeof(OneOrMoreClause), rule.Clauses[0]); + Assert.Single(rule.Clauses); + Assert.IsType(rule.Clauses[0]); nt = Parser.Configuration.NonTerminals["B"]; - Assert.Equal(1,nt.Rules.Count); + Assert.Single(nt.Rules); rule = nt.Rules[0]; - Assert.Equal(1, rule.Clauses.Count); - Assert.IsType(typeof(ZeroOrMoreClause), rule.Clauses[0]); + Assert.Single(rule.Clauses); + Assert.IsType< ZeroOrMoreClause>(rule.Clauses[0]); ; } diff --git a/ParserTests/GenericLexerTests.cs b/ParserTests/GenericLexerTests.cs index 4aa69662..6c8f807c 100644 --- a/ParserTests/GenericLexerTests.cs +++ b/ParserTests/GenericLexerTests.cs @@ -202,13 +202,13 @@ public void TestExtensions() Assert.Equal("3.14", tokens[1].Value); tokens = lexer.Tokenize("'that''s it'").ToList(); - Assert.Equal(1,tokens.Count); + Assert.Single(tokens); Token tok = tokens[0]; Assert.Equal(Extensions.CHAINE,tok.TokenID); Assert.Equal("'that's it'",tokens[0].Value); tokens = lexer.Tokenize("'et voilà'").ToList(); - Assert.Equal(1,tokens.Count); + Assert.Single(tokens); tok = tokens[0]; Assert.Equal(Extensions.CHAINE,tok.TokenID); Assert.Equal("'et voilà'",tokens[0].Value); @@ -223,7 +223,7 @@ public void TestAlphaId() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("alpha").ToList(); - Assert.Equal(1, r.Count); + Assert.Single(r); Token tok = r[0]; Assert.Equal(AlphaId.ID, tok.TokenID); Assert.Equal("alpha", tok.StringWithoutQuotes); @@ -239,7 +239,7 @@ public void TestAlphaNumId() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("alpha123").ToList(); - Assert.Equal(1, r.Count); + Assert..Single(r); Token tok = r[0]; Assert.Equal(AlphaNumId.ID, tok.TokenID); Assert.Equal("alpha123", tok.StringWithoutQuotes); @@ -253,7 +253,7 @@ public void TestAlphaNumDashId() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("alpha-123_").ToList(); - Assert.Equal(1, r.Count); + Assert..Single(r); Token tok = r[0]; Assert.Equal(AlphaNumDashId.ID, tok.TokenID); Assert.Equal("alpha-123_", tok.StringWithoutQuotes); @@ -267,7 +267,7 @@ public void TestAlphaNumDashIdStartsWithUnderscore() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("_alpha-123_").ToList(); - Assert.Equal(1, r.Count); + Assert.Single(r); Token tok = r[0]; Assert.Equal(AlphaNumDashId.ID, tok.TokenID); Assert.Equal("_alpha-123_", tok.StringWithoutQuotes); @@ -282,7 +282,7 @@ public void TestDoubleQuotedString() var lexer = lexerRes.Result; string source = "hello \\\"world "; var r = lexer.Tokenize($"\"{source}\"").ToList(); - Assert.Equal(1, r.Count); + Assert..Single(r); Token tok = r[0]; Assert.Equal(DoubleQuotedString.DoubleString, tok.TokenID); Assert.Equal(source, tok.StringWithoutQuotes); @@ -296,7 +296,7 @@ public void TestSingleQuotedString() var lexer = lexerRes.Result; string source = "hello \\'world "; var r = lexer.Tokenize($"'{source}'").ToList(); - Assert.Equal(1, r.Count); + Assert.Single(r); Token tok = r[0]; Assert.Equal(SingleQuotedString.SingleString, tok.TokenID); Assert.Equal(source, tok.StringWithoutQuotes); @@ -310,7 +310,7 @@ public void TestDefaultQuotedString() var lexer = lexerRes.Result; string source = "hello \\\"world "; var r = lexer.Tokenize($"\"{source}\"").ToList(); - Assert.Equal(1, r.Count); + Assert.Single(r); Token tok = r[0]; Assert.Equal(DefaultQuotedString.DefaultString, tok.TokenID); Assert.Equal(source, tok.StringWithoutQuotes); @@ -324,13 +324,13 @@ public void TestSelfEscapedString() var lexer = lexerRes.Result as GenericLexer; Assert.NotNull(lexer); var tokens = lexer.Tokenize("'that''s it'").ToList(); - Assert.Equal(1,tokens.Count); + Assert.Single(tokens); Token tok = tokens[0]; Assert.Equal(SelfEscapedString.STRING,tok.TokenID); Assert.Equal("'that's it'",tokens[0].Value); tokens = lexer.Tokenize("'et voilà'").ToList(); - Assert.Equal(1,tokens.Count); + Assert.Single(tokens); tok = tokens[0]; Assert.Equal(SelfEscapedString.STRING,tok.TokenID); Assert.Equal("'et voilà'",tokens[0].Value); @@ -362,10 +362,10 @@ public void TestBadLetterStringDelimiter() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); Assert.True(lexerRes.IsError); - Assert.Equal(1, lexerRes.Errors.Count); + Assert.Single(lexerRes.Errors); var error = lexerRes.Errors[0]; Assert.Equal(ErrorLevel.FATAL, error.Level); - Assert.True(error.Message.Contains("can not start with a letter")); + Assert.Contains("can not start with a letter",error.Message); } [Fact] @@ -373,10 +373,10 @@ public void TestBadEmptyStringDelimiter() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult>()); Assert.True(lexerRes.IsError); - Assert.Equal(1, lexerRes.Errors.Count); + Assert.Single(lexerRes.Errors); var error = lexerRes.Errors[0]; Assert.Equal(ErrorLevel.FATAL, error.Level); - Assert.True(error.Message.Contains("must be 1 character length")); + Assert.Contains("must be 1 character length", error.Message); } [Fact] diff --git a/samples/ParserExample/Program.cs b/samples/ParserExample/Program.cs index 2d311e3c..fb2f3b96 100644 --- a/samples/ParserExample/Program.cs +++ b/samples/ParserExample/Program.cs @@ -227,26 +227,7 @@ static void testGenericLexerWhile() i := i + 1 ) )"; - //WhileParser wp = new WhileParser(); - //sw.Reset(); - //sw.Start(); - //ParserBuilder wbuilder = new ParserBuilder(); - //var buildResult = wbuilder.BuildParser(wp, ParserType.EBNF_LL_RECURSIVE_DESCENT, "statement"); - //var parser = buildResult.Result; - //var r = parser.Parse(source); - //sw.Stop(); - //Console.WriteLine($"regex parser : {sw.ElapsedMilliseconds} ms"); - //if (!r.IsError) - //{ - // var interpreter = new Interpreter(); - // var ctx = interpreter.Interprete(r.Result); - // ; - - //} - //else - //{ - // r.Errors.ForEach(e => Console.WriteLine(e.ToString())); - //} + sw.Reset(); sw.Start(); From e6f8797c2f1032b46e1d9ad62310029dc924a2ba Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 09:03:16 +0200 Subject: [PATCH 15/31] qiuet while interpreter --- samples/while/interpreter/Interpreter.cs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/samples/while/interpreter/Interpreter.cs b/samples/while/interpreter/Interpreter.cs index e4916470..917f7747 100644 --- a/samples/while/interpreter/Interpreter.cs +++ b/samples/while/interpreter/Interpreter.cs @@ -115,8 +115,17 @@ public override string ToString() public class Interpreter { + private bool IsQuiet = false; + private ExpressionEvaluator evaluator; + public InterpreterContext Interprete(WhileAST ast, bool quiet = false) + { + IsQuiet = false; + evaluator = new ExpressionEvaluator(quiet); + return Interprete(ast, new InterpreterContext()); + } + public InterpreterContext Interprete(WhileAST ast) { evaluator = new ExpressionEvaluator(); @@ -161,7 +170,9 @@ private void Interprete(AssignStatement ast, InterpreterContext context) private void Interprete(PrintStatement ast, InterpreterContext context) { TypedValue val = evaluator.Evaluate(ast.Value, context); - Console.WriteLine(val.StringValue); + if (!IsQuiet) { + Console.WriteLine(val.StringValue); + } } private void Interprete(SequenceStatement ast, InterpreterContext context) @@ -236,10 +247,13 @@ public bool Match(WhileType l, WhileType r) class ExpressionEvaluator { + bool IsQuiet {get; set;} = false; + private Dictionary> binaryOperationSignatures; - public ExpressionEvaluator() + public ExpressionEvaluator(bool quiet = false) { + IsQuiet = quiet; binaryOperationSignatures = new Dictionary>() { { BinaryOperator.ADD,new List { From c2301760d3f9e685c03810ea8ccc771e133b37fb Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 13:03:30 +0200 Subject: [PATCH 16/31] compilation fix --- ParserTests/GenericLexerTests.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ParserTests/GenericLexerTests.cs b/ParserTests/GenericLexerTests.cs index 6c8f807c..cfcaa9d6 100644 --- a/ParserTests/GenericLexerTests.cs +++ b/ParserTests/GenericLexerTests.cs @@ -239,7 +239,7 @@ public void TestAlphaNumId() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("alpha123").ToList(); - Assert..Single(r); + Assert.Single(r); Token tok = r[0]; Assert.Equal(AlphaNumId.ID, tok.TokenID); Assert.Equal("alpha123", tok.StringWithoutQuotes); @@ -253,7 +253,7 @@ public void TestAlphaNumDashId() Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("alpha-123_").ToList(); - Assert..Single(r); + Assert.Single(r); Token tok = r[0]; Assert.Equal(AlphaNumDashId.ID, tok.TokenID); Assert.Equal("alpha-123_", tok.StringWithoutQuotes); @@ -282,7 +282,7 @@ public void TestDoubleQuotedString() var lexer = lexerRes.Result; string source = "hello \\\"world "; var r = lexer.Tokenize($"\"{source}\"").ToList(); - Assert..Single(r); + Assert.Single(r); Token tok = r[0]; Assert.Equal(DoubleQuotedString.DoubleString, tok.TokenID); Assert.Equal(source, tok.StringWithoutQuotes); From fcbf3fe5c2315f1d2002fc9be00f4d8d351cf92a Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 13:06:19 +0200 Subject: [PATCH 17/31] quiet test while lang --- ParserTests/WhileTests.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParserTests/WhileTests.cs b/ParserTests/WhileTests.cs index 2daad83f..dbde66a1 100644 --- a/ParserTests/WhileTests.cs +++ b/ParserTests/WhileTests.cs @@ -249,7 +249,7 @@ public void TestCounterProgramExec() Assert.False(result.IsError); Assert.NotNull(result.Result); Interpreter interpreter = new Interpreter(); - var context = interpreter.Interprete(result.Result); + var context = interpreter.Interprete(result.Result,true); Assert.Equal(1,context.variables.Count); Assert.True(CheckIntVariable(context, "a", 10)); From a2c12aca4a97f83bdef9c82d339579cd49d6285e Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 9 Apr 2018 13:14:11 +0200 Subject: [PATCH 18/31] UT fix --- ParserTests/EBNFTests.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ParserTests/EBNFTests.cs b/ParserTests/EBNFTests.cs index e8b88c4c..d876f08c 100644 --- a/ParserTests/EBNFTests.cs +++ b/ParserTests/EBNFTests.cs @@ -135,12 +135,12 @@ public void TestParseBuild() Assert.Single(nt.Rules); Rule rule = nt.Rules[0]; Assert.Single(rule.Clauses); - Assert.IsType(rule.Clauses[0]); + Assert.IsType>(rule.Clauses[0]); nt = Parser.Configuration.NonTerminals["B"]; Assert.Single(nt.Rules); rule = nt.Rules[0]; Assert.Single(rule.Clauses); - Assert.IsType< ZeroOrMoreClause>(rule.Clauses[0]); + Assert.IsType>(rule.Clauses[0]); ; } From 4a507c3079a514ebb04a18ec699ac8dce0f004e0 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Wed, 18 Apr 2018 13:34:55 +0200 Subject: [PATCH 19/31] upgrade coverlet --- ParserTests/ParserTests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParserTests/ParserTests.csproj b/ParserTests/ParserTests.csproj index 275351ff..c1212a14 100644 --- a/ParserTests/ParserTests.csproj +++ b/ParserTests/ParserTests.csproj @@ -11,7 +11,7 @@ - + From 8f7d76d58bd34c99d468f71bcb5fc1e2e8856e80 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 20 Apr 2018 09:00:17 +0200 Subject: [PATCH 20/31] ajout de lcov.info --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 337afaa7..72fdbdf2 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,4 @@ my_app_coverage.xml sly.xml sly_coverage.xml ParserTests/lcov.info +ParserTests/lcov.info.info From 6beef428a8dd0801d8ca5842311e462337847641 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 20 Apr 2018 09:01:54 +0200 Subject: [PATCH 21/31] upgrade coverlet --- ParserTests/ParserTests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParserTests/ParserTests.csproj b/ParserTests/ParserTests.csproj index c1212a14..3978e349 100644 --- a/ParserTests/ParserTests.csproj +++ b/ParserTests/ParserTests.csproj @@ -11,7 +11,7 @@ - + From dd15529761bc3c97398b703aefd71102372da132 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 4 May 2018 13:35:34 +0200 Subject: [PATCH 22/31] xunit warnings removal --- ParserTests/CommentsTests.cs | 1 + ParserTests/ErrorTests.cs | 4 +- ParserTests/ExpressionGeneratorTests.cs | 31 ++++--------- ParserTests/ExpressionTests.cs | 11 ----- ParserTests/JsonGenericTests.cs | 2 +- ParserTests/WhileTests.cs | 10 ++-- .../SimpleExpressionToken.cs | 46 +++++++++++++++++++ 7 files changed, 63 insertions(+), 42 deletions(-) create mode 100644 samples/SimpleExpressionParser/SimpleExpressionToken.cs diff --git a/ParserTests/CommentsTests.cs b/ParserTests/CommentsTests.cs index 67c9413f..2d4fe038 100644 --- a/ParserTests/CommentsTests.cs +++ b/ParserTests/CommentsTests.cs @@ -47,6 +47,7 @@ public void TestGenericSingleLineComment() string dump = lexer.ToString(); + var tokens = lexer.Tokenize(@"1 2 // single line comment 3.0").ToList(); diff --git a/ParserTests/ErrorTests.cs b/ParserTests/ErrorTests.cs index 6a34881c..6655e8f0 100644 --- a/ParserTests/ErrorTests.cs +++ b/ParserTests/ErrorTests.cs @@ -29,7 +29,7 @@ public void TestJsonSyntaxError() Assert.Null(r.Result); Assert.NotNull(r.Errors); Assert.True(r.Errors.Count > 0); - Assert.IsAssignableFrom(typeof(UnexpectedTokenSyntaxError), r.Errors[0]); + Assert.IsType>(r.Errors[0]); UnexpectedTokenSyntaxError error = r.Errors[0] as UnexpectedTokenSyntaxError; Assert.Equal(JsonToken.COMMA, error?.UnexpectedToken.TokenID); @@ -69,7 +69,7 @@ public void TestLexicalError() Assert.True(r.IsError); Assert.NotNull(r.Errors); Assert.True(r.Errors.Count > 0); - Assert.IsAssignableFrom(typeof(LexicalError), r.Errors[0]); + Assert.IsType(r.Errors[0]); LexicalError error = r.Errors[0] as LexicalError; Assert.Equal(1, error.Line); Assert.Equal(3, error.Column); diff --git a/ParserTests/ExpressionGeneratorTests.cs b/ParserTests/ExpressionGeneratorTests.cs index c819454e..0bbccfd9 100644 --- a/ParserTests/ExpressionGeneratorTests.cs +++ b/ParserTests/ExpressionGeneratorTests.cs @@ -41,7 +41,7 @@ public void TestBuild() nonterminals.Add(pair.Value); } NonTerminal nt = nonterminals[0]; // operan - Assert.Equal(1, nt.Rules.Count); + Assert.Single(nt.Rules); Assert.Equal("operand", nt.Name); nt = nonterminals[1]; Assert.Equal(2, nt.Rules.Count); @@ -61,9 +61,9 @@ public void TestBuild() Assert.Contains("100", nt.Name); Assert.Contains("MINUS", nt.Name); nt = nonterminals[5]; - Assert.Equal(1, nt.Rules.Count); + Assert.Single(nt.Rules); Assert.Equal(StartingRule, nt.Name); - Assert.Equal(1, nt.Rules[0].Clauses.Count); + Assert.Single(nt.Rules[0].Clauses); } @@ -73,7 +73,6 @@ public void TestSingleValue() BuildParser(); ParseResult r = Parser.Result.Parse("1",StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1, r.Result); } @@ -82,8 +81,7 @@ public void TestSingleNegativeValue() { BuildParser(); ParseResult r = Parser.Result.Parse("-1", StartingRule); - Assert.False(r.IsError); - Assert.NotNull(r.Result); + Assert.False(r.IsError); Assert.Equal(-1, r.Result); } @@ -92,9 +90,8 @@ public void TestTermPlus() { BuildParser(); ParseResult r = Parser.Result.Parse("1 + 1", StartingRule); - Assert.False(r.IsError); - Assert.NotNull(r.Result); - Assert.IsAssignableFrom(typeof(int), r.Result); + Assert.False(r.IsError); + Assert.IsType < int>(r.Result); Assert.Equal(2, (int)r.Result); } @@ -104,7 +101,6 @@ public void TestTermMinus() BuildParser(); ParseResult r = Parser.Result.Parse("1 - 1", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(0, r.Result); } @@ -113,9 +109,8 @@ public void TestFactorTimes() { BuildParser(); ParseResult r = Parser.Result.Parse("2*2", StartingRule); - Assert.False(r.IsError); - Assert.NotNull(r.Result); - Assert.IsAssignableFrom(typeof(int), r.Result); + Assert.False(r.IsError); + Assert.IsType < int>(r.Result); Assert.Equal(4, r.Result); } @@ -125,7 +120,6 @@ public void TestFactorDivide() BuildParser(); ParseResult r = Parser.Result.Parse("42/2", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(21, r.Result); } @@ -135,7 +129,6 @@ public void TestUnaryPrecedence() BuildParser(); ParseResult r = Parser.Result.Parse("-1 * 2", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(-2, r.Result); } @@ -146,7 +139,6 @@ public void TestPrecedence() BuildParser(); ParseResult r = Parser.Result.Parse("-1 + 2 * 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(5, r.Result); } @@ -156,7 +148,6 @@ public void TestGroup() BuildParser(); ParseResult r = Parser.Result.Parse("(-1 + 2) * 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(3, r.Result); } @@ -166,19 +157,16 @@ public void TestAssociativityTerm() BuildParser(); ParseResult r = Parser.Result.Parse("1 - 2 - 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1-2-3, r.Result); r = Parser.Result.Parse("1 - 2 - 3 - 4", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1 - 2 - 3 - 4, r.Result); r = Parser.Result.Parse("1 - 2 + 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1 - 2 + 3, r.Result); } @@ -188,19 +176,16 @@ public void TestAssociativityFactor() BuildParser(); ParseResult r = Parser.Result.Parse("1 / 2 / 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1 / 2 / 3, r.Result); r = Parser.Result.Parse("1 / 2 / 3 / 4", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1 / 2 / 3 / 4, r.Result); r = Parser.Result.Parse("1 / 2 * 3", StartingRule); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1 / 2 * 3, r.Result); } diff --git a/ParserTests/ExpressionTests.cs b/ParserTests/ExpressionTests.cs index 5e0e277a..21b8e83e 100644 --- a/ParserTests/ExpressionTests.cs +++ b/ParserTests/ExpressionTests.cs @@ -27,7 +27,6 @@ public void TestSingleValue() { ParseResult r = Parser.Parse("1"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(1, r.Result); } @@ -36,7 +35,6 @@ public void TestSingleNegativeValue() { ParseResult r = Parser.Parse("-1"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(-1, r.Result); } @@ -45,8 +43,6 @@ public void TestTermPlus() { ParseResult r = Parser.Parse("1 + 1"); Assert.False(r.IsError); - Assert.NotNull(r.Result); - Assert.IsAssignableFrom(typeof(int), r.Result); Assert.Equal(2, (int)r.Result); } @@ -55,7 +51,6 @@ public void TestTermMinus() { ParseResult r = Parser.Parse("1 - 1"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(0, r.Result); } @@ -64,8 +59,6 @@ public void TestFactorTimes() { ParseResult r = Parser.Parse("2*2"); Assert.False(r.IsError); - Assert.NotNull(r.Result); - Assert.IsAssignableFrom(typeof(int), r.Result); Assert.Equal(4, r.Result); } @@ -74,7 +67,6 @@ public void TestFactorDivide() { ParseResult r = Parser.Parse("42/2"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(21, r.Result); } @@ -83,7 +75,6 @@ public void TestGroup() { ParseResult r = Parser.Parse("(2 + 2)"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(4, r.Result); } @@ -92,7 +83,6 @@ public void TestGroup2() { ParseResult r = Parser.Parse("6 * (2 + 2)"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(24, r.Result); } @@ -101,7 +91,6 @@ public void TestPrecedence() { ParseResult r = Parser.Parse("6 * 2 + 2"); Assert.False(r.IsError); - Assert.NotNull(r.Result); Assert.Equal(14, r.Result); } diff --git a/ParserTests/JsonGenericTests.cs b/ParserTests/JsonGenericTests.cs index 758a89f8..8aa34402 100644 --- a/ParserTests/JsonGenericTests.cs +++ b/ParserTests/JsonGenericTests.cs @@ -71,7 +71,7 @@ public void TestTrueBooleanValue() JValue val = ((JValue)r.Result); Assert.True(val.IsBool); Assert.True(val.IsBool); - Assert.Equal(true, val.GetValue()); + Assert.True(val.GetValue()); } [Fact] diff --git a/ParserTests/WhileTests.cs b/ParserTests/WhileTests.cs index dbde66a1..4507a015 100644 --- a/ParserTests/WhileTests.cs +++ b/ParserTests/WhileTests.cs @@ -108,8 +108,8 @@ public void TestPrintBoolExpression() Assert.IsType(expr); BinaryOperation bin = expr as BinaryOperation; Assert.Equal(BinaryOperator.AND, bin.Operator); - Assert.Equal(true, (bin.Left as BoolConstant)?.Value); - Assert.Equal(false, (bin.Right as BoolConstant)?.Value); + Assert.True((bin.Left as BoolConstant)?.Value); + Assert.False((bin.Right as BoolConstant)?.Value); } [Fact] @@ -129,7 +129,7 @@ public void TestInfiniteWhile() WhileStatement whil = seq.Get(0) as WhileStatement; Expression cond = whil.Condition; Assert.IsType(cond); - Assert.Equal(true, (cond as BoolConstant).Value); + Assert.True((cond as BoolConstant).Value); Statement s = whil.BlockStmt; Assert.IsType( whil.BlockStmt); SequenceStatement seqBlock = whil.BlockStmt as SequenceStatement; @@ -155,7 +155,7 @@ public void TestIfThenElse() IfStatement si = seq.Get(0) as IfStatement; Expression cond = si.Condition; Assert.IsType(cond); - Assert.Equal(true, (cond as BoolConstant).Value); + Assert.True((cond as BoolConstant).Value); Statement s = si.ThenStmt; Assert.IsType(si.ThenStmt); @@ -250,7 +250,7 @@ public void TestCounterProgramExec() Assert.NotNull(result.Result); Interpreter interpreter = new Interpreter(); var context = interpreter.Interprete(result.Result,true); - Assert.Equal(1,context.variables.Count); + Assert.Single(context.variables); Assert.True(CheckIntVariable(context, "a", 10)); diff --git a/samples/SimpleExpressionParser/SimpleExpressionToken.cs b/samples/SimpleExpressionParser/SimpleExpressionToken.cs new file mode 100644 index 00000000..7b8fcac5 --- /dev/null +++ b/samples/SimpleExpressionParser/SimpleExpressionToken.cs @@ -0,0 +1,46 @@ +using System; +using System.Collections.Generic; +using System.Text; +using sly.lexer; + +namespace simpleExpressionParser +{ + public enum SimpleExpressionToken + { + // float number + [Lexeme(GenericToken.Double)] + DOUBLE = 1, + + // integer + [Lexeme(GenericToken.Int)] + INT = 3, + + [Lexeme(GenericToken.Identifier)] + IDENTIFIER = 4, + + // the + operator + [Lexeme(GenericToken.SugarToken,"+")] + PLUS = 5, + + // the - operator + [Lexeme(GenericToken.SugarToken,"-")] + MINUS = 6, + + // the * operator + [Lexeme(GenericToken.SugarToken,"*")] + TIMES = 7, + + // the / operator + [Lexeme(GenericToken.SugarToken,"/")] + DIVIDE = 8, + + // a left paranthesis ( + [Lexeme(GenericToken.SugarToken,"(")] + LPAREN = 9, + + // a right paranthesis ) + [Lexeme(GenericToken.SugarToken,")")] + RPAREN = 10, + + } +} From b5c8421cde64a387d2f9e2afd604f8d9a433b24e Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 4 May 2018 13:40:59 +0200 Subject: [PATCH 23/31] upgrade coverlet --- ParserTests/ParserTests.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParserTests/ParserTests.csproj b/ParserTests/ParserTests.csproj index 3978e349..442c95f3 100644 --- a/ParserTests/ParserTests.csproj +++ b/ParserTests/ParserTests.csproj @@ -11,7 +11,7 @@ - + From 67e6cc653203b98fe2ed13875e41957a4eb2a62a Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 4 May 2018 15:48:50 +0200 Subject: [PATCH 24/31] bugfix keyword vs identifier --- sly/lexer/GenericLexer.cs | 64 ++++++++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 25 deletions(-) diff --git a/sly/lexer/GenericLexer.cs b/sly/lexer/GenericLexer.cs index edfd4286..c1789e87 100644 --- a/sly/lexer/GenericLexer.cs +++ b/sly/lexer/GenericLexer.cs @@ -85,7 +85,7 @@ public GenericLexer(IdentifierType idType = IdentifierType.Alpha, BuildExtension { InitializeStaticLexer(idType, staticTokens); derivedTokens = new Dictionary>(); - ExtensionBuilder = extensionBuilder; + ExtensionBuilder = extensionBuilder; } @@ -190,7 +190,18 @@ public void AddLexeme(GenericToken generic, IN token) { case GenericToken.Identifier: { - match.Properties[DerivedToken] = identifierDerivedToken; + if (derivedTokens.ContainsKey(GenericToken.Identifier)) { + var possibleTokens = derivedTokens[GenericToken.Identifier]; + if (possibleTokens.ContainsKey(match.Result.Value)) + { + match.Properties[DerivedToken] = possibleTokens[match.Result.Value]; + } + } + else + { + match.Properties[DerivedToken] = identifierDerivedToken; + } + ; break; } case GenericToken.Int: @@ -291,8 +302,8 @@ public void AddKeyWord(IN token, string keyword) public void AddStringLexem(IN token, string stringDelimiter, string escapeDelimiterChar = "\\") - { - + { + if (string.IsNullOrEmpty(stringDelimiter) || stringDelimiter.Length > 1) { throw new InvalidLexerException($"bad lexem {stringDelimiter} : StringToken lexeme delimiter char <{token.ToString()}> must be 1 character length."); @@ -333,49 +344,52 @@ public void AddStringLexem(IN token, string stringDelimiter, string escapeDelimi FSMBuilder.GoTo(start); FSMBuilder.Transition(StringDelimiterChar) - .Mark(in_string+StringCounter) - .ExceptTransitionTo(new char[] { StringDelimiterChar, EscapeStringDelimiterChar }, in_string+StringCounter) + .Mark(in_string + StringCounter) + .ExceptTransitionTo(new char[] { StringDelimiterChar, EscapeStringDelimiterChar }, in_string + StringCounter) .Transition(EscapeStringDelimiterChar) - .Mark(escape_string+StringCounter) - .AnyTransitionTo(' ', in_string+StringCounter) + .Mark(escape_string + StringCounter) + .AnyTransitionTo(' ', in_string + StringCounter) .Transition(StringDelimiterChar) .End(GenericToken.String) - .Mark(string_end+StringCounter) + .Mark(string_end + StringCounter) .CallBack(callback); FSMBuilder.Fsm.StringDelimiter = StringDelimiterChar; } - else { - NodeAction collapseDelimiter = (string value) => { - if (value.EndsWith(""+StringDelimiterChar+StringDelimiterChar)) { - return value.Substring(0,value.Length-2)+StringDelimiterChar; + else + { + NodeAction collapseDelimiter = (string value) => + { + if (value.EndsWith("" + StringDelimiterChar + StringDelimiterChar)) + { + return value.Substring(0, value.Length - 2) + StringDelimiterChar; } return value; }; - - var exceptDelimiter = new char[]{StringDelimiterChar}; + + var exceptDelimiter = new char[] { StringDelimiterChar }; string in_string = "in_string_same"; string escaped = "escaped_same"; string delim = "delim_same"; - FSMBuilder.GoTo(start) + FSMBuilder.GoTo(start) .Transition(StringDelimiterChar) - .Mark(in_string+StringCounter) - .ExceptTransitionTo(exceptDelimiter,in_string+StringCounter) + .Mark(in_string + StringCounter) + .ExceptTransitionTo(exceptDelimiter, in_string + StringCounter) .Transition(StringDelimiterChar) - .Mark(escaped+StringCounter) + .Mark(escaped + StringCounter) .End(GenericToken.String) .CallBack(callback) .Transition(StringDelimiterChar) - .Mark(delim+StringCounter) - .Action(collapseDelimiter) - .ExceptTransitionTo(exceptDelimiter,in_string+StringCounter); + .Mark(delim + StringCounter) + .Action(collapseDelimiter) + .ExceptTransitionTo(exceptDelimiter, in_string + StringCounter); - FSMBuilder.GoTo(delim+StringCounter) - .TransitionTo(StringDelimiterChar,escaped+StringCounter) + FSMBuilder.GoTo(delim + StringCounter) + .TransitionTo(StringDelimiterChar, escaped + StringCounter) - .ExceptTransitionTo(exceptDelimiter,in_string+StringCounter); + .ExceptTransitionTo(exceptDelimiter, in_string + StringCounter); } } From ea63698d6c48292abcaaea6f44b36e2111c8958d Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Fri, 4 May 2018 15:50:35 +0200 Subject: [PATCH 25/31] bugfix when using repeated items at token stream end --- sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs index 81482580..7f7ef8c2 100644 --- a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs +++ b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs @@ -240,7 +240,7 @@ public SyntaxParseResult ParseZeroOrMore(IList> tokens, ZeroOrMore currentPosition = innerResult.EndingPosition; lastInnerResult = innerResult; } - stillOk = stillOk && innerResult != null && !(innerResult.IsError); + stillOk = stillOk && innerResult != null && !(innerResult.IsError) && currentPosition < tokens.Count; } From f1bd51c264af119e392de41aadb484fe1f6518a7 Mon Sep 17 00:00:00 2001 From: olivier Date: Sun, 6 May 2018 19:29:14 +0200 Subject: [PATCH 26/31] fix interpreter quite mode --- samples/while/interpreter/Interpreter.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/while/interpreter/Interpreter.cs b/samples/while/interpreter/Interpreter.cs index 917f7747..a9385d96 100644 --- a/samples/while/interpreter/Interpreter.cs +++ b/samples/while/interpreter/Interpreter.cs @@ -121,7 +121,7 @@ public class Interpreter public InterpreterContext Interprete(WhileAST ast, bool quiet = false) { - IsQuiet = false; + IsQuiet = quiet; evaluator = new ExpressionEvaluator(quiet); return Interprete(ast, new InterpreterContext()); } From be51d22e3aca95918d977316435fd822dabbbbf3 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 7 May 2018 09:00:42 +0200 Subject: [PATCH 27/31] string bugfix --- ParserTests/WhileTests.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ParserTests/WhileTests.cs b/ParserTests/WhileTests.cs index 4507a015..b6732aac 100644 --- a/ParserTests/WhileTests.cs +++ b/ParserTests/WhileTests.cs @@ -298,8 +298,8 @@ public void TestFactorialProgramExecAsIL() while i < 11 do ( r := r * i; - print "".r; - print "".i; + print """".r; + print """".i; i := i + 1 ); return r )"; From 3b1aa2c2bb4d461463cb0d14314f7d97b0e1327e Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Wed, 9 May 2018 18:11:04 +0200 Subject: [PATCH 28/31] identifier vs keyword bugfix --- sly/lexer/GenericLexer.cs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sly/lexer/GenericLexer.cs b/sly/lexer/GenericLexer.cs index c1789e87..99c001bd 100644 --- a/sly/lexer/GenericLexer.cs +++ b/sly/lexer/GenericLexer.cs @@ -196,6 +196,10 @@ public void AddLexeme(GenericToken generic, IN token) { match.Properties[DerivedToken] = possibleTokens[match.Result.Value]; } + else + { + match.Properties[DerivedToken] = identifierDerivedToken; + } } else { @@ -287,6 +291,7 @@ public void AddKeyWord(IN token, string keyword) match.Properties[DerivedToken] = identifierDerivedToken; } } + match.Properties[DerivedToken] = identifierDerivedToken; return match; }; @@ -494,7 +499,7 @@ public Token Transcode(FSMMatch match) var tok = new Token(); tok.Value = match.Result.Value; tok.Position = match.Result.Position; - tok.StringDelimiter = StringDelimiterChar; + tok.StringDelimiter = StringDelimiterChar; tok.TokenID = (IN)match.Properties[DerivedToken]; return tok; } From 0eb694ab5316749153d0b4804943af04d0675ccb Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Wed, 9 May 2018 18:11:04 +0200 Subject: [PATCH 29/31] identifier vs keyword bugfix --- sly/lexer/GenericLexer.cs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/sly/lexer/GenericLexer.cs b/sly/lexer/GenericLexer.cs index c1789e87..8e28c795 100644 --- a/sly/lexer/GenericLexer.cs +++ b/sly/lexer/GenericLexer.cs @@ -196,6 +196,10 @@ public void AddLexeme(GenericToken generic, IN token) { match.Properties[DerivedToken] = possibleTokens[match.Result.Value]; } + else + { + match.Properties[DerivedToken] = identifierDerivedToken; + } } else { @@ -287,6 +291,10 @@ public void AddKeyWord(IN token, string keyword) match.Properties[DerivedToken] = identifierDerivedToken; } } + else + { + match.Properties[DerivedToken] = identifierDerivedToken; + } return match; }; @@ -494,7 +502,7 @@ public Token Transcode(FSMMatch match) var tok = new Token(); tok.Value = match.Result.Value; tok.Position = match.Result.Position; - tok.StringDelimiter = StringDelimiterChar; + tok.StringDelimiter = StringDelimiterChar; tok.TokenID = (IN)match.Properties[DerivedToken]; return tok; } From 8422cdb1e07b7f28a20a334bc70c438e28babf48 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 14 May 2018 11:01:50 +0200 Subject: [PATCH 30/31] feature : add modifier ? for EBNF grammars --- ParserTests/EBNFTests.cs | 189 +++++++++++------- sly/lexer/Token.cs | 14 ++ sly/parser/generator/EbnfToken.cs | 8 +- sly/parser/generator/RuleParser.cs | 7 + .../EBNFRecursiveDescentSyntaxParser.cs | 96 +++++++-- sly/parser/syntax/OptionClause.cs | 23 +++ 6 files changed, 239 insertions(+), 98 deletions(-) create mode 100644 sly/parser/syntax/OptionClause.cs diff --git a/ParserTests/EBNFTests.cs b/ParserTests/EBNFTests.cs index d876f08c..f48d7298 100644 --- a/ParserTests/EBNFTests.cs +++ b/ParserTests/EBNFTests.cs @@ -14,6 +14,47 @@ namespace ParserTests { + public enum OptionTestToken + { + + [Lexeme("a")] + a = 1, + [Lexeme("b")] + b = 2, + [Lexeme("c")] + c = 3, + [Lexeme("e")] + e = 4, + [Lexeme("f")] + f = 5, + + [Lexeme("[ \\t]+", true)] + WS = 100, + [Lexeme("\\n\\r]+", true, true)] + EOF = 101 + } + + public class OptionTestParser{ + + [Production("root : a B c? ")] + public string root(Token a, string b, Token c) { + string r = $"R({a.StringWithoutQuotes},{b}"; + if (c.IsEmpty) { + r = $"{r},)"; + } + else { + r = $"{r},{c.Value})"; + } + + return r; + } + + [Production("B : b ")] + public string bee(Token b) { + return $"B({b.Value})"; + } + } + public class EBNFTests { @@ -29,14 +70,14 @@ public enum TokenType e = 4, [Lexeme("f")] f = 5, - [Lexeme("[ \\t]+",true)] + [Lexeme("[ \\t]+", true)] WS = 100, [Lexeme("\\n\\r]+", true, true)] EOL = 101 } - + [Production("R : A B c ")] @@ -72,7 +113,7 @@ public string RManyNT(Token e, Token f) public string A(List> astr) { string result = "A("; - result += (string) astr + result += (string)astr .Select(a => a.Value) .Aggregate((a1, a2) => a1 + ", " + a2); result += ")"; @@ -94,6 +135,8 @@ public string B(List> bstr) return "B()"; } + + private Parser Parser; @@ -121,6 +164,16 @@ private BuildResult> BuildEbnfJsonParser() return result; } + private BuildResult> BuildOptionParser() + { + OptionTestParser parserInstance = new OptionTestParser(); + ParserBuilder builder = new ParserBuilder(); + + var result = + builder.BuildParser(parserInstance, ParserType.EBNF_LL_RECURSIVE_DESCENT, "root"); + return result; + } + [Fact] public void TestParseBuild() { @@ -128,9 +181,9 @@ public void TestParseBuild() Assert.False(buildResult.IsError); Parser = buildResult.Result; Assert.Equal(typeof(EBNFRecursiveDescentSyntaxParser), Parser.SyntaxParser.GetType()); - Assert.Equal(4,Parser.Configuration.NonTerminals.Count); + Assert.Equal(4, Parser.Configuration.NonTerminals.Count); NonTerminal nt = Parser.Configuration.NonTerminals["R"]; - Assert.Equal(2,nt.Rules.Count); + Assert.Equal(2, nt.Rules.Count); nt = Parser.Configuration.NonTerminals["A"]; Assert.Single(nt.Rules); Rule rule = nt.Rules[0]; @@ -155,17 +208,17 @@ public void TestOneOrMoreNonTerminal() Assert.False(result.IsError); Assert.Equal("R(G(e,f),G(e,f))", result.Result.ToString().Replace(" ", "")); } - - [Fact] + + [Fact] public void TestOneOrMoreWithMany() { var buildResult = BuildParser(); Assert.False(buildResult.IsError); Parser = buildResult.Result; - ParseResult result = Parser.Parse("aaa b c"); + ParseResult result = Parser.Parse("aaa b c"); Assert.False(result.IsError); - Assert.Equal("R(A(a,a,a),B(b),c)",result.Result.ToString().Replace(" ","")); + Assert.Equal("R(A(a,a,a),B(b),c)", result.Result.ToString().Replace(" ", "")); } [Fact] @@ -174,7 +227,7 @@ public void TestOneOrMoreWithOne() var buildResult = BuildParser(); Assert.False(buildResult.IsError); Parser = buildResult.Result; - ParseResult result = Parser.Parse(" b c"); + ParseResult result = Parser.Parse(" b c"); Assert.True(result.IsError); } @@ -184,7 +237,7 @@ public void TestZeroOrMoreWithOne() var buildResult = BuildParser(); Assert.False(buildResult.IsError); Parser = buildResult.Result; - ParseResult result = Parser.Parse("a b c"); + ParseResult result = Parser.Parse("a b c"); Assert.False(result.IsError); Assert.Equal("R(A(a),B(b),c)", result.Result.ToString().Replace(" ", "")); } @@ -195,7 +248,7 @@ public void TestZeroOrMoreWithMany() var buildResult = BuildParser(); Assert.False(buildResult.IsError); Parser = buildResult.Result; - ParseResult result = Parser.Parse("a bb c"); + ParseResult result = Parser.Parse("a bb c"); Assert.False(result.IsError); Assert.Equal("R(A(a),B(b,b),c)", result.Result.ToString().Replace(" ", "")); } @@ -206,7 +259,7 @@ public void TestZeroOrMoreWithNone() var buildResult = BuildParser(); Assert.False(buildResult.IsError); Parser = buildResult.Result; - ParseResult result = Parser.Parse("a c"); + ParseResult result = Parser.Parse("a c"); Assert.False(result.IsError); Assert.Equal("R(A(a),B(),c)", result.Result.ToString().Replace(" ", "")); } @@ -219,15 +272,15 @@ public void TestJsonList() Assert.False(buildResult.IsError); Parser jsonParser = buildResult.Result; - ParseResult result = jsonParser.Parse("[1,2,3,4]"); + ParseResult result = jsonParser.Parse("[1,2,3,4]"); Assert.False(result.IsError); Assert.True(result.Result.IsList); - JList list = (JList) result.Result; + JList list = (JList)result.Result; Assert.Equal(4, list.Count); - AssertInt(list,0,1); - AssertInt(list,1,2); - AssertInt(list,2,3); - AssertInt(list,3,4); + AssertInt(list, 0, 1); + AssertInt(list, 1, 2); + AssertInt(list, 2, 3); + AssertInt(list, 3, 4); } [Fact] @@ -236,89 +289,71 @@ public void TestJsonObject() var buildResult = BuildEbnfJsonParser(); Assert.False(buildResult.IsError); Parser jsonParser = buildResult.Result; - ParseResult result = jsonParser.Parse("{\"one\":1,\"two\":2,\"three\":\"trois\" }"); + ParseResult result = jsonParser.Parse("{\"one\":1,\"two\":2,\"three\":\"trois\" }"); Assert.False(result.IsError); Assert.True(result.Result.IsObject); - JObject o = (JObject) result.Result; + JObject o = (JObject)result.Result; Assert.Equal(3, o.Count); - AssertInt(o,"one",1); - AssertInt(o,"two",2); - AssertString(o,"three","trois"); + AssertInt(o, "one", 1); + AssertInt(o, "two", 2); + AssertString(o, "three", "trois"); + } + + + [Fact] + public void TestNonEmptyOption() { + var buildResult = BuildOptionParser(); + Assert.False(buildResult.IsError); + var optionParser = buildResult.Result; + + var result = optionParser.Parse("a b c"); + Assert.Equal("R(a,B(b),c)", result.Result); } + + [Fact] + public void TestEmptyOption() { + var buildResult = BuildOptionParser(); + Assert.False(buildResult.IsError); + var optionParser = buildResult.Result; + + var result = optionParser.Parse("a b"); + Assert.Equal("R(a,B(b),)", result.Result); + } + private void AssertString(JObject obj, string key, string value) { Assert.True(obj.ContainsKey(key)); Assert.True(obj[key].IsValue); - JValue val = (JValue) obj[key]; + JValue val = (JValue)obj[key]; Assert.True(val.IsString); - Assert.Equal(value, val.GetValue() ); + Assert.Equal(value, val.GetValue()); } - + private void AssertInt(JObject obj, string key, int value) { Assert.True(obj.ContainsKey(key)); Assert.True(obj[key].IsValue); - JValue val = (JValue) obj[key]; + JValue val = (JValue)obj[key]; Assert.True(val.IsInt); - Assert.Equal(value, val.GetValue() ); + Assert.Equal(value, val.GetValue()); } - - - private void AssertDouble(JObject obj, string key, double value) - { - Assert.True(obj.ContainsKey(key)); - Assert.True(obj[key].IsValue); - JValue val = (JValue) obj[key]; - Assert.True(val.IsDouble); - Assert.Equal(value, val.GetValue() ); - } - - - private void AssertString(JList list, int index, string value) - { - Assert.True(list[index].IsValue); - JValue val = (JValue) list[index]; - Assert.True(val.IsString); - Assert.Equal(value, val.GetValue() ); - } - + + + + + private void AssertInt(JList list, int index, int value) { Assert.True(list[index].IsValue); - JValue val = (JValue) list[index]; + JValue val = (JValue)list[index]; Assert.True(val.IsInt); - Assert.Equal(value, val.GetValue() ); - } - - - private void AssertDouble(JList list, int index, double value) - { - Assert.True(list[index].IsValue); - JValue val = (JValue) list[index]; - Assert.True(val.IsDouble); - Assert.Equal(value, val.GetValue() ); - } - - - private void AssertBool(JList list, int index, bool value) - { - Assert.True(list[index].IsValue); - JValue val = (JValue) list[index]; - Assert.True(val.IsBool); - Assert.Equal(value, val.GetValue() ); - } - - - private void AssertObject(JList list, int index, int count) - { - Assert.True(list[index].IsObject); - JObject val = (JObject) list[index]; - Assert.Equal(count, val.Count); + Assert.Equal(value, val.GetValue()); } + } } diff --git a/sly/lexer/Token.cs b/sly/lexer/Token.cs index 10bd3d7b..ac67dab8 100644 --- a/sly/lexer/Token.cs +++ b/sly/lexer/Token.cs @@ -17,6 +17,7 @@ public class Token public char StringDelimiter = '"'; + public TokenPosition Position { get; set; } public T TokenID { get; set; } public bool IsComment { get; set; } @@ -25,6 +26,8 @@ public class Token public CommentType CommentType {get; set;} = CommentType.No; + public bool IsEmpty {get; set;} = false; + public bool IsMultiLineComment => CommentType == CommentType.Multi; public bool IsSingleLineComment => CommentType == CommentType.Single; @@ -47,12 +50,23 @@ public Token(T token, string value, TokenPosition position, bool isCommentStart } + + public Token() { End = true; TokenID = DefaultToken; } + public static Token Empty() { + var empty = new Token(); + empty.IsEmpty = true; + return empty; + } + + + + public bool IsEndOfStream { get diff --git a/sly/parser/generator/EbnfToken.cs b/sly/parser/generator/EbnfToken.cs index 9f9b7699..0caa1680 100644 --- a/sly/parser/generator/EbnfToken.cs +++ b/sly/parser/generator/EbnfToken.cs @@ -17,11 +17,13 @@ public enum EbnfToken [Lexeme("\\+")] ONEORMORE = 4, [Lexeme("[ \\t]+",true)] - WS = 5, + WS = 5, + [LexemeAttribute("^\\?")] + OPTION = 6, [LexemeAttribute("^\\[d\\]")] - DISCARD = 6, + DISCARD = 7, [Lexeme("[\\n\\r]+",true,true)] - EOL = 7 + EOL = 8 } diff --git a/sly/parser/generator/RuleParser.cs b/sly/parser/generator/RuleParser.cs index 6cece16a..cb748998 100644 --- a/sly/parser/generator/RuleParser.cs +++ b/sly/parser/generator/RuleParser.cs @@ -58,6 +58,13 @@ public IClause OneMoreClause(Token id, Token discarded return new OneOrMoreClause(innerClause); } + [Production("clause : IDENTIFIER OPTION")] + public IClause OptionClause(Token id, Token discarded) + { + IClause innerClause = BuildTerminalOrNonTerimal(id.Value); + return new OptionClause(innerClause); + } + [Production("clause : IDENTIFIER DISCARD ")] public IClause SimpleDiscardedClause(Token id, Token discard) { diff --git a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs index 7f7ef8c2..80dceba4 100644 --- a/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs +++ b/sly/parser/parser/llparser/EBNFRecursiveDescentSyntaxParser.cs @@ -9,10 +9,10 @@ namespace sly.parser.llparser { - public class EBNFRecursiveDescentSyntaxParser : RecursiveDescentSyntaxParser where IN : struct + public class EBNFRecursiveDescentSyntaxParser : RecursiveDescentSyntaxParser where IN : struct { - - public EBNFRecursiveDescentSyntaxParser(ParserConfiguration configuration, string startingNonTerminal) : base(configuration,startingNonTerminal) + + public EBNFRecursiveDescentSyntaxParser(ParserConfiguration configuration, string startingNonTerminal) : base(configuration, startingNonTerminal) { Configuration = configuration; StartingNonTerminal = startingNonTerminal; @@ -21,9 +21,9 @@ public EBNFRecursiveDescentSyntaxParser(ParserConfiguration configuratio #region STARTING_TOKENS - - protected override void InitStartingTokensForRule(Dictionary> nonTerminals, Rule rule) + + protected override void InitStartingTokensForRule(Dictionary> nonTerminals, Rule rule) { if (rule.PossibleLeadingTokens == null || rule.PossibleLeadingTokens.Count == 0) { @@ -107,10 +107,25 @@ private void InitStartingTokensWithOneOrMore(Rule rule, OneOrMoreClause } } + private void InitStartingTokensWithOption(Rule rule, OptionClause optionClause, + Dictionary> nonTerminals) + { + if (optionClause.Clause is TerminalClause) + { + TerminalClause term = optionClause.Clause as TerminalClause; + InitStartingTokensWithTerminal(rule, term); + } + else if (optionClause.Clause is NonTerminalClause) + { + NonTerminalClause nonterm = optionClause.Clause as NonTerminalClause; + InitStartingTokensWithNonTerminal(rule, nonterm, nonTerminals); + } + } + #endregion #region parsing - + public override SyntaxParseResult Parse(IList> tokens, Rule rule, int position, string nonTerminalName) { @@ -122,7 +137,7 @@ public override SyntaxParseResult Parse(IList> tokens, Rule ru { if (rule.Clauses != null && rule.Clauses.Count > 0) { - children = new List>(); + children = new List>(); foreach (IClause clause in rule.Clauses) { if (clause is TerminalClause) @@ -138,7 +153,7 @@ public override SyntaxParseResult Parse(IList> tokens, Rule ru { Token tok = tokens[currentPosition]; errors.Add(new UnexpectedTokenSyntaxError(tok, - ((TerminalClause) clause).ExpectedToken)); + ((TerminalClause)clause).ExpectedToken)); } isError = isError || termRes.IsError; } @@ -157,14 +172,16 @@ public override SyntaxParseResult Parse(IList> tokens, Rule ru } isError = isError || nonTerminalResult.IsError; } - + else if (clause is OneOrMoreClause || clause is ZeroOrMoreClause) { SyntaxParseResult manyResult = null; - if (clause is OneOrMoreClause oneOrMore) { + if (clause is OneOrMoreClause oneOrMore) + { manyResult = ParseOneOrMore(tokens, oneOrMore, currentPosition); } - else if (clause is ZeroOrMoreClause zeroOrMore) { + else if (clause is ZeroOrMoreClause zeroOrMore) + { manyResult = ParseZeroOrMore(tokens, zeroOrMore, currentPosition); } if (!manyResult.IsError) @@ -177,13 +194,14 @@ public override SyntaxParseResult Parse(IList> tokens, Rule ru if (manyResult.Errors != null && manyResult.Errors.Count > 0) { errors.AddRange(manyResult.Errors); - } + } } isError = isError || manyResult.IsError; } - if (isError) + else if (clause is OptionClause option) { - break; + var optionResult = ParseOption(tokens, option, currentPosition); + children.Add(optionResult.Root); } } } @@ -206,7 +224,7 @@ public override SyntaxParseResult Parse(IList> tokens, Rule ru return result; } - + public SyntaxParseResult ParseZeroOrMore(IList> tokens, ZeroOrMoreClause clause, int position) { SyntaxParseResult result = new SyntaxParseResult(); @@ -286,12 +304,12 @@ public SyntaxParseResult ParseOneOrMore(IList> tokens, OneOrMoreCl if (nextResult != null && !nextResult.IsError) { currentPosition = nextResult.EndingPosition; - ManySyntaxNode moreChildren = (ManySyntaxNode) nextResult.Root; + ManySyntaxNode moreChildren = (ManySyntaxNode)nextResult.Root; manyNode.Children.AddRange(moreChildren.Children); } isError = false; } - + else { isError = true; @@ -304,7 +322,49 @@ public SyntaxParseResult ParseOneOrMore(IList> tokens, OneOrMoreCl return result; } - + public SyntaxParseResult ParseOption(IList> tokens, OptionClause clause, int position) + { + SyntaxParseResult result = new SyntaxParseResult(); + ManySyntaxNode manyNode = new ManySyntaxNode(""); + int currentPosition = position; + IClause innerClause = clause.Clause; + + SyntaxParseResult innerResult = null; + + if (innerClause is TerminalClause) + { + manyNode.IsManyTokens = true; + innerResult = ParseTerminal(tokens, innerClause as TerminalClause, currentPosition); + } + else if (innerClause is NonTerminalClause) + { + manyNode.IsManyValues = true; + innerResult = ParseNonTerminal(tokens, innerClause as NonTerminalClause, currentPosition); + } + else + { + throw new NotImplementedException("unable to apply repeater to " + innerClause.GetType().Name); + } + + + if (innerResult.IsError) + { + result = new SyntaxParseResult(); + result.IsError = false; + result.Root = new SyntaxLeaf(Token.Empty()); + result.EndingPosition = position; + } + else + { + result = innerResult; + } + + return result; + + + } + + #endregion } diff --git a/sly/parser/syntax/OptionClause.cs b/sly/parser/syntax/OptionClause.cs new file mode 100644 index 00000000..1d1cdc7c --- /dev/null +++ b/sly/parser/syntax/OptionClause.cs @@ -0,0 +1,23 @@ +namespace sly.parser.syntax +{ + + public class OptionClause : IClause + { + public IClause Clause { get; set; } + public OptionClause(IClause clause) + { + Clause = clause; + } + + public override string ToString() + { + return $"{Clause.ToString()}?"; + } + + public bool MayBeEmpty() + { + return true; + } + + } +} \ No newline at end of file From e5a710e7684b8d40105ebca0df1cac8c9f124594 Mon Sep 17 00:00:00 2001 From: Olivier Duhart Date: Mon, 14 May 2018 11:05:30 +0200 Subject: [PATCH 31/31] upgrade to version 2.1.0 --- sly/sly.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sly/sly.csproj b/sly/sly.csproj index 763cbb34..6e26f3e6 100644 --- a/sly/sly.csproj +++ b/sly/sly.csproj @@ -3,7 +3,7 @@ netcoreapp2.0 #LY is a parser generator halfway between parser combinators and parser generator like ANTLR b3b00 - 2.0.7 + 2.1.0 https://github.com/b3b00/sly https://github.com/b3b00/sly https://github.com/b3b00/sly/blob/master/LICENSE