diff --git a/ParserTests/Issue239/Issue239Lexer.cs b/ParserTests/Issue239/Issue239Lexer.cs index 49efbf2a..146c4b78 100644 --- a/ParserTests/Issue239/Issue239Lexer.cs +++ b/ParserTests/Issue239/Issue239Lexer.cs @@ -1,7 +1,20 @@ +using sly.lexer; + namespace ParserTests.Issue239 { public enum Issue239Lexer { + [AlphaNumDashId] + ID, + [Keyword("int")] + INT, + [Int] + INT_LITERAL, + [Sugar("=")] + ASSIGN, + [Sugar(";")] + SEMI + } } \ No newline at end of file diff --git a/ParserTests/Issue239/Issue239Parser.cs b/ParserTests/Issue239/Issue239Parser.cs index becf2b97..f46ff1d8 100644 --- a/ParserTests/Issue239/Issue239Parser.cs +++ b/ParserTests/Issue239/Issue239Parser.cs @@ -1,7 +1,30 @@ +using System.Collections.Generic; +using sly.lexer; +using sly.parser.generator; + namespace ParserTests.Issue239 { public class Issue239Parser { + [Production("statements : statement*")] + public object Statements(List statements) + { + return statements; + } + + [Production("statement: INT[d] ID SEMI[d]")] + public object IntDeclaration(Token id) + { + return $"{id.Value} is an int;\n"; + } + + [Production("statement : ID ASSIGN[d] INT_LITERAL SEMI[d]")] + public object Assignement(Token id, Token value) + { + return $"{id.Value} is equal to {value.IntValue}\n"; + } + + } } \ No newline at end of file diff --git a/ParserTests/Issue239/Issue239Tests.cs b/ParserTests/Issue239/Issue239Tests.cs index f0ce4a95..87f8c4dc 100644 --- a/ParserTests/Issue239/Issue239Tests.cs +++ b/ParserTests/Issue239/Issue239Tests.cs @@ -1,7 +1,36 @@ +using System.Collections.Generic; +using expressionparser; +using simpleExpressionParser; +using sly.parser; +using sly.parser.generator; +using Xunit; + namespace ParserTests.Issue239 { public class Issue239Tests { - + private static Parser BuildParser() + { + var StartingRule = $"statements"; + var parserInstance = new Issue239Parser(); + var builder = new ParserBuilder(); + var pBuild = builder.BuildParser(parserInstance, ParserType.EBNF_LL_RECURSIVE_DESCENT, StartingRule); + Assert.True(pBuild.IsOk); + Assert.NotNull(pBuild.Result); + return pBuild.Result; + } + + + + [Fact] + public static void TestOk() + { + var parser = BuildParser(); + var parseResult = parser.Parse("int x; int y; a = 12;"); + Assert.True(parseResult.IsOk); + Assert.IsAssignableFrom>(parseResult.Result); + var lst = parseResult.Result as List; + Assert.Equal(3, lst.Count); + } } } \ No newline at end of file diff --git a/ParserTests/lexer/GenericLexerTests.cs b/ParserTests/lexer/GenericLexerTests.cs index 3512330f..1c5af689 100644 --- a/ParserTests/lexer/GenericLexerTests.cs +++ b/ParserTests/lexer/GenericLexerTests.cs @@ -125,6 +125,13 @@ public enum Extensions [Lexeme(GenericToken.Double)] DOUBLE } + + public enum ShortExtensions + { + [Extension] DATE, + + [Double] DOUBLE + } public class ParserUsingLexerExtensions @@ -196,6 +203,27 @@ public static void AddExtension(Extensions token, LexemeAttribute lexem, Generic .CallBack(callback); } } + + public static void AddShortExtension(ShortExtensions token, LexemeAttribute lexem, GenericLexer lexer) + { + if (token == ShortExtensions.DATE) + { + NodeCallback callback = match => + { + match.Properties[GenericLexer.DerivedToken] = Extensions.DATE; + return match; + }; + + var fsmBuilder = lexer.FSMBuilder; + + fsmBuilder.GoTo(GenericLexer.in_double) + .Transition('.', CheckDate) + .Mark("start_date") + .RepetitionTransition(4, "[0-9]") + .End(GenericToken.Extension) + .CallBack(callback); + } + } } public enum CharTokens { @@ -290,7 +318,8 @@ public enum CustomId { EOS, - [Lexeme(GenericToken.Identifier, IdentifierType.Custom, "A-Za-z", "-_0-9A-Za-z")] + [CustomId("A-Za-z", "-_0-9A-Za-z")] + // [Lexeme(GenericToken.Identifier, IdentifierType.Custom, "A-Za-z", "-_0-9A-Za-z")] ID, [Lexeme(GenericToken.SugarToken, "-", "_")] @@ -775,6 +804,26 @@ public void TestExtensions() Assert.Equal("3.14", r.Tokens[1].Value); } + + [Fact] + public void TestShortExtensions() + { + var lexerRes = + LexerBuilder.BuildLexer(new BuildResult>(), ExtendedGenericLexer.AddShortExtension); + Assert.False(lexerRes.IsError); + var lexer = lexerRes.Result as GenericLexer; + Assert.NotNull(lexer); + + var r = lexer.Tokenize("20.02.2018 3.14"); + Assert.True(r.IsOk); + + Assert.Equal(3, r.Tokens.Count); + Assert.Equal(ShortExtensions.DATE, r.Tokens[0].TokenID); + Assert.Equal("20.02.2018", r.Tokens[0].Value); + Assert.Equal(ShortExtensions.DOUBLE, r.Tokens[1].TokenID); + Assert.Equal("3.14", r.Tokens[1].Value); + + } [Fact] public void TestExtensionsPreconditionFailure()