Skip to content

Commit

Permalink
Merge branch 'topics/jupyter-integration-tests'
Browse files Browse the repository at this point in the history
  • Loading branch information
jacquev6 committed Nov 14, 2023
2 parents b7f0498 + 81a75cb commit 109936d
Show file tree
Hide file tree
Showing 91 changed files with 4,734 additions and 2,308 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ __pycache__/
/liblincs.cpython-*-x86_64-linux-gnu.so
/*.dll
/liblincs.cp*-win_amd64.pyd
.ipynb_checkpoints/
4 changes: 4 additions & 0 deletions development/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -134,14 +134,18 @@ RUN --mount=type=cache,target=/root/.cache/pip,sharing=locked \
set -x \
&& pip3 install \
auditwheel \
bash_kernel \
Chrones \
gcovr \
joblib \
jupyterlab \
semver \
sphinx \
sphinx-click \
sphinx-jsonschema \
sphinxcontrib-details-directive \
twine \
&& python3 -m bash_kernel.install \
&& for python_version in $LINCS_DEV_PYTHON_VERSIONS; do python$python_version -m pip install \
build \
|| exit 1; done
Expand Down
187 changes: 106 additions & 81 deletions development/cycle.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
# Copyright 2023 Vincent Jacques

from __future__ import annotations
import copy
import glob
import json
import multiprocessing
import os
import random
import re
import shutil
import subprocess
import textwrap
import time

import click
import jinja2
import joblib


@click.command()
Expand Down Expand Up @@ -76,7 +82,7 @@ def main(with_docs, single_python_version, unit_coverage, skip_long, skip_unit,

python_versions = os.environ["LINCS_DEV_PYTHON_VERSIONS"].split(" ")
if single_python_version:
python_versions = [python_versions[0]]
python_versions = [python_versions[-1]]
os.environ["LINCS_DEV_PYTHON_VERSIONS"] = python_versions[0]

shutil.rmtree("build", ignore_errors=True)
Expand Down Expand Up @@ -130,9 +136,6 @@ def main(with_docs, single_python_version, unit_coverage, skip_long, skip_unit,
if stop_after_unit:
pass
else:
print_title("Making integration tests from documentation")
make_integration_tests_from_doc()

# Install lincs
###############

Expand All @@ -145,8 +148,11 @@ def main(with_docs, single_python_version, unit_coverage, skip_long, skip_unit,
# With lincs installed
######################

print_title("Running integration tests")
run_integration_tests(skip_long=skip_long, forbid_gpu=forbid_gpu)
print_title("Running Jupyter notebooks (integration tests, documentation sources)")
run_notebooks(skip_long=skip_long, forbid_gpu=forbid_gpu)

print_title("Updating templates (documentation sources)")
update_templates()

if with_docs:
print_title("Building Sphinx documentation")
Expand Down Expand Up @@ -192,54 +198,6 @@ def run_python_tests(*, python_version):
)


def make_integration_tests_from_doc():
output_files = {}
current_prefix = ""
current_output_file_name = None
for input_file_name in glob.glob("doc-sources/*.rst"):
output_prefix = input_file_name[12:-4]
with open(input_file_name) as f:
lines = f.readlines()
for line in lines:
line = line.rstrip()

if line == f"{current_prefix}.. STOP":
assert current_output_file_name
current_output_file_name = None
if current_output_file_name:
m = re.fullmatch(r".. APPEND-TO-LAST-LINE( .+)", line)
if m:
assert output_files[current_output_file_name]
last_line_index = -1
while output_files[current_output_file_name][last_line_index] == "":
last_line_index -= 1
output_files[current_output_file_name][last_line_index] += m.group(1)
elif line.startswith(current_prefix + " "):
output_files[current_output_file_name].append(line)
elif line == "" and output_files[current_output_file_name]:
output_files[current_output_file_name].append("")

m = re.fullmatch(r"( *).. (START|EXTEND) (.+)", line)
if m:
assert current_output_file_name is None, (input_file_name, current_output_file_name)
current_prefix = m.group(1)
current_output_file_name = os.path.join(output_prefix, m.group(3))
if m.group(2) == "START":
output_files[current_output_file_name] = []
assert current_output_file_name is None, (input_file_name, current_output_file_name)

shutil.rmtree("integration-tests/from-documentation", ignore_errors=True)
for file_name, file_contents in output_files.items():
file_path = os.path.join("integration-tests", "from-documentation", file_name)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
while file_contents and file_contents[-1] == "":
file_contents.pop()
with open(file_path, "w") as f:
f.write(textwrap.dedent("\n".join(file_contents) + "\n"))
with open("integration-tests/from-documentation/.gitignore", "w") as f:
f.write("*\n")


def build_sphinx_documentation():
env = dict(os.environ)
env["LINCS_DEV_FORBID_GPU"] = "false"
Expand Down Expand Up @@ -278,40 +236,107 @@ def build_sphinx_documentation():

shutil.copy("COPYING", "docs/")
shutil.copy("COPYING.LESSER", "docs/")


def run_integration_tests(*, skip_long, forbid_gpu):
ok = True
# Sorted: alphabetical order just happens to match a dependency order between a few tests.
for test_file_name in sorted(glob.glob("integration-tests/**/run.sh", recursive=True)):
test_name = test_file_name[18:-7]

if skip_long and os.path.isfile(os.path.join(os.path.dirname(test_file_name), "is-long")):
print_title(f"{test_name}: SKIPPED (is long)", '-')
continue

if forbid_gpu and os.path.isfile(os.path.join(os.path.dirname(test_file_name), "uses-gpu")):
print_title(f"{test_name}: SKIPPED (uses GPU)", '-')
continue

print_title(test_name, '-')

shutil.copy("doc-sources/get-started/get-started.ipynb", "docs/")


def run_notebooks(*, skip_long, forbid_gpu):
def run_notebook(notebook_path):
# Work around race condition where two Jupyter instances try to open the same TCP port,
# resulting in a zmq.error.ZMQError: Address already in use (addr='tcp://127.0.0.1:39787')
time.sleep(random.random() * 5)

original_cell_sources = {}

# Ensure perfect reproducibility
with open(notebook_path) as f:
notebook = json.load(f)
for (i, cell) in enumerate(notebook["cells"]):
if cell["cell_type"] == "code":
original_cell_sources[i] = copy.deepcopy(cell["source"])
for (i, append) in enumerate(cell["metadata"].get("append_to_source", [])):
if i < len(cell["source"]):
if append != "":
cell["source"][i] = cell["source"][i].rstrip() + " " + append + "\n"
else:
cell["source"][-1] += "\n"
cell["source"].append(append + "\n")
with open(notebook_path, "w") as f:
json.dump(notebook, f, indent=1, sort_keys=True)
f.write("\n")

subprocess.run(
["git", "clean", "-fXd", os.path.dirname(notebook_path)],
check=True,
capture_output=True,
)
try:
subprocess.run(
["bash", "run.sh"],
cwd=os.path.dirname(test_file_name),
["jupyter", "nbconvert", "--to", "notebook", "--execute", "--inplace", "--log-level=WARN", notebook_path],
check=True,
capture_output=True,
)
except subprocess.CalledProcessError as e:
print("FAILED")
print(flush=True)
ok = False
else:
print()
if not ok:
print("INTEGRATION TESTS FAILED")
print_title(f"{notebook_path}: FAILED", '-')
print(e.stdout.decode())
print(e.stderr.decode())
return False
finally:
# Reduce git diff
with open(notebook_path) as f:
notebook = json.load(f)
for (i, cell) in enumerate(notebook["cells"]):
cell["metadata"].pop("execution", None)
if cell["cell_type"] == "code":
cell["source"] = original_cell_sources[i]
with open(notebook_path, "w") as f:
json.dump(notebook, f, indent=1, sort_keys=True)
f.write("\n")

print_title(f"{notebook_path}: OK", '-')
return True

jobs = []

for notebook_path in sorted(glob.glob("**/*.ipynb", recursive=True)):
if skip_long and os.path.isfile(os.path.join(os.path.dirname(notebook_path), "is-long")):
print_title(f"{notebook_path}: SKIPPED (is long)", '-')
continue

if forbid_gpu and os.path.isfile(os.path.join(os.path.dirname(notebook_path), "uses-gpu")):
print_title(f"{notebook_path}: SKIPPED (uses GPU)", '-')
continue

jobs.append(joblib.delayed(run_notebook)(notebook_path))

results = joblib.Parallel(n_jobs=multiprocessing.cpu_count() - 1)(jobs)

if not all(results):
print("Some notebooks FAILED")
exit(1)


def update_templates():
env = jinja2.Environment(
loader=jinja2.FileSystemLoader("doc-sources"),
keep_trailing_newline=True,
)

env.globals["notebooks"] = {}
for notebook_path in glob.glob("**/*.ipynb", recursive=True):
with open(notebook_path) as f:
env.globals["notebooks"][notebook_path] = json.load(f)

for template_path in glob.glob("**/*.tmpl", recursive=True):
output_path = template_path[:-5]
print(template_path, "->", output_path)
template = env.get_template(os.path.basename(template_path))
with open(output_path, "w") as f:
if output_path.endswith(".rst"):
f.write(f".. WARNING: this file is generated from '{template_path}'. MANUAL EDITS WILL BE LOST.\n\n")
else:
assert False, "Unknown extension for warning comment"
f.write(template.render())


if __name__ == "__main__":
main()
22 changes: 15 additions & 7 deletions development/publish.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Copyright 2023 Vincent Jacques

from __future__ import annotations
import glob
import subprocess

import click
Expand All @@ -12,13 +11,15 @@

@click.command()
@click.argument("level", type=click.Choice(["patch", "minor", "major"]))
def main(level):
@click.option("--dry-run", is_flag=True)
def main(level, dry_run):
check_cleanliness()
new_version = bump_version(level)
update_changelog(new_version)
build_sphinx_documentation()
publish(new_version)
prepare_next_version(new_version)
if not dry_run:
publish(new_version)
prepare_next_version(new_version)


def check_cleanliness():
Expand Down Expand Up @@ -137,12 +138,19 @@ def write_version(old_version, new_version):
else:
f.write(line)

for file_name in glob.glob("doc-sources/*.rst"):
if file_name == "doc-sources/changelog.rst":
for file_name in subprocess.run(["git", "ls-files"], stdout=subprocess.PIPE, universal_newlines=True, check=True).stdout.splitlines():
if file_name.endswith(".png"):
continue
if file_name.startswith("docs/"):
continue
if file_name.startswith("vendored/"):
continue
if file_name.startswith("lincs/liblincs/vendored/"):
continue

with open(file_name) as f:
lines = f.readlines()
lines = [line.replace(old_version, new_version) for line in lines]
lines = [line.replace(f"(with lincs version {old_version})", f"(with lincs version {new_version})") for line in lines]
with open(file_name, "w") as f:
for line in lines:
f.write(line)
Expand Down
Binary file modified doc-sources/alternatives.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified doc-sources/concept-example-model.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 109936d

Please sign in to comment.