From 269d7276996c3c8410b96d712c3afb63215c2598 Mon Sep 17 00:00:00 2001 From: b3b00 Date: Thu, 2 May 2024 08:49:56 +0200 Subject: [PATCH] cleaning, sonar linting --- src/sly/lexer/GenericLexer.cs | 8 ++-- src/sly/lexer/Token.cs | 2 + src/sly/parser/parser/Parser.cs | 72 +++++++++++++++++---------------- 3 files changed, 42 insertions(+), 40 deletions(-) diff --git a/src/sly/lexer/GenericLexer.cs b/src/sly/lexer/GenericLexer.cs index a89de92b..4cd3b86f 100644 --- a/src/sly/lexer/GenericLexer.cs +++ b/src/sly/lexer/GenericLexer.cs @@ -111,9 +111,7 @@ protected readonly protected char StringDelimiterChar; - private readonly IEqualityComparer KeyWordComparer; - - private readonly StringComparison KeyWordComparison; + private readonly IEqualityComparer _keyWordComparer; public GenericLexer(IdentifierType idType = IdentifierType.Alpha, Action> extensionBuilder = null, @@ -128,7 +126,7 @@ public GenericLexer(Config lexerConfig, GenericToken[] staticTokens) derivedTokens = new Dictionary>(); ExtensionBuilder = lexerConfig.ExtensionBuilder; - KeyWordComparer = lexerConfig.KeyWordComparer; + _keyWordComparer = lexerConfig.KeyWordComparer; SubLexersFsm = new Dictionary>(); InitializeStaticLexer(lexerConfig, staticTokens); } @@ -505,7 +503,7 @@ public void AddLexeme(GenericToken genericToken, BuildResult> result, if (genericToken == GenericToken.Identifier) { tokensForGeneric = - new Dictionary(KeyWordComparer); + new Dictionary(_keyWordComparer); } else { diff --git a/src/sly/lexer/Token.cs b/src/sly/lexer/Token.cs index 3ae94d70..3ccc85bb 100644 --- a/src/sly/lexer/Token.cs +++ b/src/sly/lexer/Token.cs @@ -184,6 +184,8 @@ public Token Previous(int channelId) public bool IsIndent { get; set; } public bool IsUnIndent { get; set; } + + public bool IsIndentation => IsIndent || IsUnIndent; public int IndentationLevel { get; set; } diff --git a/src/sly/parser/parser/Parser.cs b/src/sly/parser/parser/Parser.cs index 78dfd06d..c69f342f 100644 --- a/src/sly/parser/parser/Parser.cs +++ b/src/sly/parser/parser/Parser.cs @@ -111,41 +111,7 @@ public ParseResult ParseWithContext(string source, object context, stri return result; } - private List> AutoCloseIndentation(List> tokens) - { - if (SyntaxParser is EBNFRecursiveDescentSyntaxParser ebnf && ebnf.Configuration.AutoCloseIndentations) - { - var indents = tokens - .Where(x => x.IsIndent || x.IsUnIndent); - if (indents.Any()) - { - var finalIndentation = indents - .Select(x => x.IsIndent ? 1 : -1) - .Aggregate((int x, int y) => x + y); - if (finalIndentation > 0) - { - tokens = tokens.Take(tokens.Count - 1).ToList(); - for (int i = 0; i < finalIndentation; i++) - { - tokens.Add(new Token() - { - IsUnIndent = true, - IsEOS = false, - IsEOL = false, - IndentationLevel = finalIndentation - i - 1 - }); - } - - tokens.Add(new Token() - { - IsEOS = true - }); - } - } - } - - return tokens; - } + public ParseResult ParseWithContext(IList> tokens, object parsingContext = null, string startingNonTerminal = null) @@ -197,5 +163,41 @@ public ParseResult ParseWithContext(IList> tokens, object par return result; } + + private List> AutoCloseIndentation(List> tokens) + { + if (SyntaxParser is EBNFRecursiveDescentSyntaxParser ebnf && ebnf.Configuration.AutoCloseIndentations) + { + var indents = tokens + .Where(x => x.IsIndentation); + if (indents.Any()) + { + var finalIndentation = indents + .Select(x => x.IsIndent ? 1 : -1) + .Aggregate((int x, int y) => x + y); + if (finalIndentation > 0) + { + tokens = tokens.Take(tokens.Count - 1).ToList(); + for (int i = 0; i < finalIndentation; i++) + { + tokens.Add(new Token() + { + IsUnIndent = true, + IsEOS = false, + IsEOL = false, + IndentationLevel = finalIndentation - i - 1 + }); + } + + tokens.Add(new Token() + { + IsEOS = true + }); + } + } + } + + return tokens; + } } } \ No newline at end of file