diff --git a/.DS_Store b/.DS_Store index bdc8bd790..e4d2716c0 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 23658c414..0df38c074 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,6 +20,9 @@ jobs: python -m pip install --upgrade pip pip install -r requirements.txt + - name: Generate antlr4 dependencies + run: antlr4 -Dlanguage=Python3 -Xexact-output-dir project/lang.g4 -o project/lang + - name: Run tests run: | python scripts/run_tests.py diff --git a/.gitignore b/.gitignore index 91c4207d9..27fa252ac 100644 --- a/.gitignore +++ b/.gitignore @@ -104,6 +104,7 @@ celerybeat.pid # Environments .env .venv +project/lang env/ venv/ ENV/ diff --git a/project/lang.g4 b/project/lang.g4 new file mode 100644 index 000000000..d95c3ef98 --- /dev/null +++ b/project/lang.g4 @@ -0,0 +1,36 @@ +grammar lang; + +WS: [ \r\n\t]+ -> skip; + +prog: stmt* EOF; + +stmt: bind | add | remove | declare; + +declare: 'let' VAR 'is' 'graph'; +bind: 'let' VAR '=' expr; +remove: 'remove' ('vertex' | 'edge' | 'vertices') expr 'from' VAR; +add: 'add' ('vertex'|'edge') expr 'to' VAR; + + +expr: NUM | CHAR | VAR | edge_expr | set_expr | regexp | select; + +set_expr: '[' expr (',' expr)* ']'; +edge_expr: '(' expr ',' expr ',' expr ')'; +regexp: + CHAR + | VAR + | '(' regexp ')' + | regexp '|' regexp + | regexp '^' range + | regexp '.' regexp + | regexp '&' regexp; + +range: '[' NUM '..' NUM? ']'; +select: v_filter? v_filter? 'return' VAR (',' VAR)? 'where' VAR 'reachable' + 'from' VAR 'in' VAR 'by' expr; + +v_filter: 'for' VAR 'in' expr; + +VAR: [a-z] [a-z0-9]*; +NUM: '0' | ([1-9] [0-9]*); +CHAR: '"' [a-z] '"'; diff --git a/project/task11.py b/project/task11.py new file mode 100644 index 000000000..600ac7b2b --- /dev/null +++ b/project/task11.py @@ -0,0 +1,49 @@ +from project.lang.langLexer import langLexer +from project.lang.langParser import langParser +from project.lang.langListener import langListener + +from antlr4 import ParserRuleContext, CommonTokenStream +from antlr4.InputStream import InputStream + + +class NodeCountListener(langListener): + + def __init__(self) -> None: + super(langListener, self).__init__() + self.count = 0 + + def enterEveryRule(self, ctx): + self.count += 1 + + def get_count(self): + return self.count + + +class StringifyListener(langListener): + + def __init__(self): + super(langListener, self).__init__() + self.result = "" + + def enterEveryRule(self, rule): + self.result += rule.getText() + + def get_result(self): + return self.result + + +def prog_to_tree(program: str) -> tuple[ParserRuleContext, bool]: + parser = langParser(CommonTokenStream(langLexer(InputStream(program)))) + return parser.prog(), (parser.getNumberOfSyntaxErrors() == 0) + + +def nodes_count(tree: ParserRuleContext) -> int: + listener = NodeCountListener() + tree.enterRule(listener) + return listener.get_count() + + +def tree_to_prog(tree: ParserRuleContext) -> str: + listener = StringifyListener() + tree.enterRule(listener) + return listener.get_result() diff --git a/project/task7.py b/project/task7.py index badd17b3a..fa2f70a95 100644 --- a/project/task7.py +++ b/project/task7.py @@ -53,7 +53,7 @@ def cfpq_with_matrix( (p.body[0].to_text(), p.body[1].to_text()) ) - for i in range(graph.number_of_nodes() ** 2): + for j in range(graph.number_of_nodes() ** 2): for N, NN in N_to_NN.items(): for Nl, Nr in NN: M_new[N] += M[Nl] @ M[Nr] diff --git a/requirements.txt b/requirements.txt index 04c4f5c00..bf66ffb80 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ antlr4-python3-runtime +antlr4-tools black cfpq-data grammarinator @ git+https://github.com/renatahodovan/grammarinator.git@f3ffa71 diff --git a/tests/.DS_Store b/tests/.DS_Store index cc2d7b888..67aada108 100644 Binary files a/tests/.DS_Store and b/tests/.DS_Store differ diff --git a/tests/autotests/.DS_Store b/tests/autotests/.DS_Store new file mode 100644 index 000000000..5ec935225 Binary files /dev/null and b/tests/autotests/.DS_Store differ diff --git a/tests/test_task_1.py b/tests/test_task_1.py deleted file mode 100644 index 5e3adb92f..000000000 --- a/tests/test_task_1.py +++ /dev/null @@ -1,35 +0,0 @@ -import networkx as nx -import os -import tempfile -import pytest -from project import create_labeled_two_cycle_graph, graph_info - - -@pytest.fixture() -def start(): - print("Start test") - - -def test_create_labeled_two_cycle_graph(): - with tempfile.NamedTemporaryFile(suffix=".dot") as tmp_file: - create_labeled_two_cycle_graph(3, 4, ("a", "b"), tmp_file.name) - assert os.path.exists(tmp_file.name) - - G = nx.drawing.nx_pydot.read_dot(tmp_file.name) - - assert G.number_of_nodes() == 8 - assert G.number_of_edges() == 9 - for _, _, data in G.edges(data=True): - assert data["label"] in ["a", "b"] - - -def test_graph_info(): - G = nx.Graph() - G.add_nodes_from([1, 2, 3]) - G.add_edges_from([(1, 2, {"label": "a"}), (2, 3, {"label": "b"})]) - - node_count, edge_count, labels = graph_info(G) - - assert node_count == 3 - assert edge_count == 2 - assert labels == {"a", "b"}