diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1c84905ad..70676f2b7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,6 +17,8 @@ jobs: run: | python -m pip install --upgrade pip python -m pip install -r ./requirements.txt + - name: Generate parser + run: antlr4 -Dlanguage=Python3 project/langRules.yaml -visitor -o project/lang - name: Test with pytest run: | python ./scripts/run_tests.py diff --git a/project/langRules.yaml b/project/langRules.yaml new file mode 100644 index 000000000..586c92ab5 --- /dev/null +++ b/project/langRules.yaml @@ -0,0 +1,21 @@ +grammar langRules; + +prog: stmt*; + +stmt: bind | add | remove | declare; + declare: 'let' VAR 'is' 'graph'; + bind: 'let' VAR '=' expr; + remove: 'remove' ('vertex' | 'edge' | 'vertices') expr 'from' VAR; + add: 'add' ('vertex' | 'edge') expr 'to' VAR; + +expr: NUM | CHAR | VAR | edge_expr | set_expr | regexp | select; + set_expr: '[' expr (',' expr)* ']'; + edge_expr: '(' expr ',' expr ',' expr ')'; + regexp: CHAR | VAR | '(' regexp ')' | regexp '|' regexp | regexp '^' range | regexp '.' regexp | regexp '&' regexp; + range: '[' NUM '..' NUM? ']'; + select: v_filter? v_filter? 'return' VAR (',' VAR)? 'where' VAR 'reachable' 'from' VAR 'in' VAR 'by' expr; + v_filter: 'for' VAR 'in' expr; + +VAR: [a-z] [a-z0-9]*; +NUM: ([1-9][0-9]*) | '0'; +CHAR: '\u0022' [a-z] '\u0022'; diff --git a/project/task11.py b/project/task11.py new file mode 100644 index 000000000..06e88bd50 --- /dev/null +++ b/project/task11.py @@ -0,0 +1,41 @@ +from project.lang.project.languageVisitor import languageVisitor +from project.lang.project.languageLexer import languageLexer +from project.lang.project.languageParser import languageParser + +from antlr4 import * +from antlr4.InputStream import InputStream + + +class NodeCounter(languageVisitor): + def __init__(self): + super().__init__() + self.counter = 0 + + def enterEveryRule(self, _): + self.counter += 1 + + +class TreeToProgVisitor(languageVisitor): + def __init__(self): + super().__init__() + self.visits = [] + + def enterEveryRule(self, rule): + self.visits.append(rule.get_text()) + + +def nodes_count(tree: ParserRuleContext) -> int: + visitor = NodeCounter() + tree.accept(visitor) + return visitor.counter + + +def tree_to_prog(tree: ParserRuleContext) -> str: + visitor = TreeToProgVisitor() + tree.accept(visitor) + return "".join(visitor.visits) + + +def prog_to_tree(program: str) -> tuple[ParserRuleContext, bool]: + parser = languageParser(CommonTokenStream(languageLexer(InputStream(program)))) + return parser.prog(), parser.getNumberOfSyntaxErrors() == 0 diff --git a/requirements.txt b/requirements.txt index 04c4f5c00..bf66ffb80 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ antlr4-python3-runtime +antlr4-tools black cfpq-data grammarinator @ git+https://github.com/renatahodovan/grammarinator.git@f3ffa71