Skip to content

Commit

Permalink
Merge pull request #69 from b3b00/dev
Browse files Browse the repository at this point in the history
2.1.0 version
  • Loading branch information
b3b00 authored May 14, 2018
2 parents 5a1013e + e5a710e commit ee92b06
Show file tree
Hide file tree
Showing 57 changed files with 839 additions and 726 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,8 @@ packages/
*.orig

dotnet.psess
my_app_coverage.xml
sly.xml
sly_coverage.xml
ParserTests/lcov.info
ParserTests/lcov.info.info
28 changes: 28 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
// Use IntelliSense to find out which attributes exist for C# debugging
// Use hover for the description of the existing attributes
// For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md
"version": "0.2.0",
"configurations": [
{
"name": ".NET Core Launch (console)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceRoot}/samples/ParserExample/bin/Debug/netcoreapp2.0/ParserExample.dll",
"args": [],
"cwd": "${workspaceRoot}/samples/ParserExample",
// For more information about the 'console' field, see https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md#console-terminal-window
"console": "internalConsole",
"stopAtEntry": false,
"internalConsoleOptions": "openOnSessionStart"
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"request": "attach",
"processId": "${command:pickProcess}"
}
]
}
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"dotnet-test-explorer.testProjectPath": "./ParserTests"
}
41 changes: 41 additions & 0 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{
"version": "0.1.0",
"command": "dotnet",
"isShellCommand": true,
"args": [],
"tasks": [
{
"taskName": "build",
"command": "dotnet",
"isBuildCommand": true,
"args": [
"build",
"${workspaceFolder}/ParserTests/ParserTests.csproj"
],
"problemMatcher": "$msCompile",
},
{
"taskName": "unit tests",
"command": "dotnet",
"isTestCommand": true,
"args": [
"test",
"${workspaceFolder}/ParserTests/ParserTests.csproj"
],
"problemMatcher": "$msCompile",
},
{
"taskName": "test with coverage",
"command": "dotnet",
"isTestCommand": true,
"args": [
"test",
"/p:CollectCoverage=true",
"/p:CoverletOutputFormat=lcov",
"/p:CoverletOutput=./lcov.info",
"${workspaceFolder}/ParserTests/ParserTests.csproj"
],
"problemMatcher": "$msCompile",
},
]
}
203 changes: 123 additions & 80 deletions ParserTests/CommentsTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,173 +47,216 @@ public void TestGenericSingleLineComment()

string dump = lexer.ToString();


var tokens = lexer.Tokenize(@"1
2 // single line comment
3.0").ToList();

Assert.Equal(4, tokens.Count);

var token1 = tokens[0];
var token1 = tokens[0];
var token2 = tokens[1];
var token3 = tokens[2];
var token4 = tokens[3];

Assert.Equal(CommentsToken.INT,token1.TokenID);
Assert.Equal("1",token1.Value);
Assert.Equal(0,token1.Position.Line);
Assert.Equal(0,token1.Position.Column);
Assert.Equal(CommentsToken.INT,token2.TokenID);
Assert.Equal("2",token2.Value);
Assert.Equal(1,token2.Position.Line);
Assert.Equal(0,token2.Position.Column);
Assert.Equal(CommentsToken.COMMENT,token3.TokenID);
Assert.Equal(" single line comment",token3.Value);
Assert.Equal(1,token3.Position.Line);
Assert.Equal(2,token3.Position.Column);
Assert.Equal(CommentsToken.DOUBLE,token4.TokenID);
Assert.Equal("3.0",token4.Value);
Assert.Equal(2,token4.Position.Line);
Assert.Equal(0,token4.Position.Column);

Assert.Equal(CommentsToken.INT, token1.TokenID);
Assert.Equal("1", token1.Value);
Assert.Equal(0, token1.Position.Line);
Assert.Equal(0, token1.Position.Column);
Assert.Equal(CommentsToken.INT, token2.TokenID);
Assert.Equal("2", token2.Value);
Assert.Equal(1, token2.Position.Line);
Assert.Equal(0, token2.Position.Column);
Assert.Equal(CommentsToken.COMMENT, token3.TokenID);
Assert.Equal(" single line comment", token3.Value);
Assert.Equal(1, token3.Position.Line);
Assert.Equal(2, token3.Position.Column);
Assert.Equal(CommentsToken.DOUBLE, token4.TokenID);
Assert.Equal("3.0", token4.Value);
Assert.Equal(2, token4.Position.Line);
Assert.Equal(0, token4.Position.Column);

}


[Fact]
[Fact]
public void TestGenericMultiLineComment()
{
var lexerRes = LexerBuilder.BuildLexer<CommentsToken>(new BuildResult<ILexer<CommentsToken>>());
Assert.False(lexerRes.IsError);
var lexer = lexerRes.Result as GenericLexer<CommentsToken>;


string dump = lexer.ToString();

string code = @"1
2 /* multi line
comment on 2 lines */ 3.0";


var tokens = lexer.Tokenize(code).ToList();

Assert.Equal(4, tokens.Count);

var token1 = tokens[0];
var token1 = tokens[0];
var token2 = tokens[1];
var token3 = tokens[2];
var token4 = tokens[3];

Assert.Equal(CommentsToken.INT,token1.TokenID);
Assert.Equal("1",token1.Value);
Assert.Equal(0,token1.Position.Line);
Assert.Equal(0,token1.Position.Column);
Assert.Equal(CommentsToken.INT, token1.TokenID);
Assert.Equal("1", token1.Value);
Assert.Equal(0, token1.Position.Line);
Assert.Equal(0, token1.Position.Column);

Assert.Equal(CommentsToken.INT,token2.TokenID);
Assert.Equal("2",token2.Value);
Assert.Equal(1,token2.Position.Line);
Assert.Equal(0,token2.Position.Column);
Assert.Equal(CommentsToken.COMMENT,token3.TokenID);
Assert.Equal(CommentsToken.INT, token2.TokenID);
Assert.Equal("2", token2.Value);
Assert.Equal(1, token2.Position.Line);
Assert.Equal(0, token2.Position.Column);
Assert.Equal(CommentsToken.COMMENT, token3.TokenID);
Assert.Equal(@" multi line
comment on 2 lines ",token3.Value);
Assert.Equal(1,token3.Position.Line);
Assert.Equal(2,token3.Position.Column);
Assert.Equal(CommentsToken.DOUBLE,token4.TokenID);
Assert.Equal("3.0",token4.Value);
Assert.Equal(2,token4.Position.Line);
Assert.Equal(22,token4.Position.Column);
comment on 2 lines ", token3.Value);
Assert.Equal(1, token3.Position.Line);
Assert.Equal(2, token3.Position.Column);
Assert.Equal(CommentsToken.DOUBLE, token4.TokenID);
Assert.Equal("3.0", token4.Value);
Assert.Equal(2, token4.Position.Line);
Assert.Equal(22, token4.Position.Column);

}

[Fact]
public void TestInnerMultiComment() {
public void TestMixedEOLComment()
{
var lexerRes = LexerBuilder.BuildLexer<CommentsToken>(new BuildResult<ILexer<CommentsToken>>());
Assert.False(lexerRes.IsError);
var lexer = lexerRes.Result as GenericLexer<CommentsToken>;



string dump = lexer.ToString();
string code = "1\n2\r\n/* multi line \rcomment on 2 lines */ 3.0";
List<Token<CommentsToken>> tokens = null;
tokens = lexer.Tokenize(code).ToList();

Assert.Equal(4, tokens.Count);

var token1 = tokens[0];
var token2 = tokens[1];
var token3 = tokens[2];
var token4 = tokens[3];

Assert.Equal(CommentsToken.INT, token1.TokenID);
Assert.Equal("1", token1.Value);
Assert.Equal(0, token1.Position.Line);
Assert.Equal(0, token1.Position.Column);

Assert.Equal(CommentsToken.INT, token2.TokenID);
Assert.Equal("2", token2.Value);
Assert.Equal(1, token2.Position.Line);
Assert.Equal(0, token2.Position.Column);
Assert.Equal(CommentsToken.COMMENT, token3.TokenID);
Assert.Equal(" multi line \rcomment on 2 lines ", token3.Value);
Assert.Equal(2, token3.Position.Line);
Assert.Equal(0, token3.Position.Column);
Assert.Equal(CommentsToken.DOUBLE, token4.TokenID);
Assert.Equal("3.0", token4.Value);
Assert.Equal(3, token4.Position.Line);
Assert.Equal(22, token4.Position.Column);

}

[Fact]
public void TestInnerMultiComment()
{
var lexerRes = LexerBuilder.BuildLexer<CommentsToken>(new BuildResult<ILexer<CommentsToken>>());
Assert.False(lexerRes.IsError);
var lexer = lexerRes.Result as GenericLexer<CommentsToken>;


string dump = lexer.ToString();

string code = @"1
2 /* inner */ 3
4
";

var tokens = lexer.Tokenize(code).ToList();
var tokens = lexer.Tokenize(code).ToList();

Assert.Equal(5, tokens.Count);

var token1 = tokens[0];
var token1 = tokens[0];
var token2 = tokens[1];
var token3 = tokens[2];
var token4 = tokens[3];
var token5 = tokens[4];


Assert.Equal(CommentsToken.INT,token1.TokenID);
Assert.Equal("1",token1.Value);
Assert.Equal(0,token1.Position.Line);
Assert.Equal(0,token1.Position.Column);
Assert.Equal(CommentsToken.INT, token1.TokenID);
Assert.Equal("1", token1.Value);
Assert.Equal(0, token1.Position.Line);
Assert.Equal(0, token1.Position.Column);

Assert.Equal(CommentsToken.INT,token2.TokenID);
Assert.Equal("2",token2.Value);
Assert.Equal(1,token2.Position.Line);
Assert.Equal(0,token2.Position.Column);
Assert.Equal(CommentsToken.INT, token2.TokenID);
Assert.Equal("2", token2.Value);
Assert.Equal(1, token2.Position.Line);
Assert.Equal(0, token2.Position.Column);

Assert.Equal(CommentsToken.COMMENT,token3.TokenID);
Assert.Equal(@" inner ",token3.Value);
Assert.Equal(1,token3.Position.Line);
Assert.Equal(2,token3.Position.Column);
Assert.Equal(CommentsToken.COMMENT, token3.TokenID);
Assert.Equal(@" inner ", token3.Value);
Assert.Equal(1, token3.Position.Line);
Assert.Equal(2, token3.Position.Column);

Assert.Equal(CommentsToken.INT,token4.TokenID);
Assert.Equal("3",token4.Value);
Assert.Equal(1,token4.Position.Line);
Assert.Equal(14,token4.Position.Column);
Assert.Equal(CommentsToken.INT, token4.TokenID);
Assert.Equal("3", token4.Value);
Assert.Equal(1, token4.Position.Line);
Assert.Equal(14, token4.Position.Column);

Assert.Equal(CommentsToken.INT,token5.TokenID);
Assert.Equal("4",token5.Value);
Assert.Equal(2,token5.Position.Line);
Assert.Equal(0,token5.Position.Column);
Assert.Equal(CommentsToken.INT, token5.TokenID);
Assert.Equal("4", token5.Value);
Assert.Equal(2, token5.Position.Line);
Assert.Equal(0, token5.Position.Column);
}

[Fact]
public void NotEndingMultiComment() {
public void NotEndingMultiComment()
{
var lexerRes = LexerBuilder.BuildLexer<CommentsToken>(new BuildResult<ILexer<CommentsToken>>());
Assert.False(lexerRes.IsError);
var lexer = lexerRes.Result as GenericLexer<CommentsToken>;


string dump = lexer.ToString();

string code = @"1
2 /* not ending
comment";

var tokens = lexer.Tokenize(code).ToList();
var tokens = lexer.Tokenize(code).ToList();

Assert.Equal(3, tokens.Count);

var token1 = tokens[0];
var token1 = tokens[0];
var token2 = tokens[1];
var token3 = tokens[2];


Assert.Equal(CommentsToken.INT,token1.TokenID);
Assert.Equal("1",token1.Value);
Assert.Equal(0,token1.Position.Line);
Assert.Equal(0,token1.Position.Column);

Assert.Equal(CommentsToken.INT,token2.TokenID);
Assert.Equal("2",token2.Value);
Assert.Equal(1,token2.Position.Line);
Assert.Equal(0,token2.Position.Column);
Assert.Equal(CommentsToken.INT, token1.TokenID);
Assert.Equal("1", token1.Value);
Assert.Equal(0, token1.Position.Line);
Assert.Equal(0, token1.Position.Column);

Assert.Equal(CommentsToken.INT, token2.TokenID);
Assert.Equal("2", token2.Value);
Assert.Equal(1, token2.Position.Line);
Assert.Equal(0, token2.Position.Column);

Assert.Equal(CommentsToken.COMMENT,token3.TokenID);
Assert.Equal(CommentsToken.COMMENT, token3.TokenID);
Assert.Equal(@" not ending
comment",token3.Value);
Assert.Equal(1,token3.Position.Line);
Assert.Equal(2,token3.Position.Column);
comment", token3.Value);
Assert.Equal(1, token3.Position.Line);
Assert.Equal(2, token3.Position.Column);



}


Expand Down
Loading

0 comments on commit ee92b06

Please sign in to comment.