Skip to content

Commit

Permalink
[refactor] Extract duplicated arrange to make DRY
Browse files Browse the repository at this point in the history
  • Loading branch information
ftnext committed Jul 29, 2024
1 parent fe39ccc commit 106a911
Showing 1 changed file with 16 additions and 23 deletions.
39 changes: 16 additions & 23 deletions tests/test_show_external_link_icon.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
import shutil
from pathlib import Path

import pytest
from bs4 import BeautifulSoup


def assert_is_external(reference, expected_url: str) -> None:
assert reference["href"] == expected_url
assert reference["target"] == "_blank"
assert reference["rel"] == ["noopener", "noreferrer"]


def test_see_external_link_icon(
@pytest.fixture
def parsed_built_html(
make_app,
sphinx_test_tempdir: Path,
rootdir: Path,
Expand All @@ -24,30 +20,27 @@ def test_see_external_link_icon(
app.build()

html = (app.outdir / "index.html").read_text()
soup = BeautifulSoup(html, "html.parser")
references = soup.find_all("a", {"class": "reference"})
return BeautifulSoup(html, "html.parser")


def assert_is_external(reference, expected_url: str) -> None:
assert reference["href"] == expected_url
assert reference["target"] == "_blank"
assert reference["rel"] == ["noopener", "noreferrer"]


def test_see_external_link_icon(parsed_built_html):
references = parsed_built_html.find_all("a", {"class": "reference"})

ref = references[0]
assert ref.text == "https://pypi.org/project/sphinx-new-tab-link/ "
assert ref.svg
assert_is_external(ref, "https://pypi.org/project/sphinx-new-tab-link/")


def test_external_link_icon_as_image_target(
make_app, sphinx_test_tempdir: Path, rootdir: Path
):
def test_external_link_icon_as_image_target(parsed_built_html):
# https://github.com/ftnext/sphinx-new-tab-link/issues/16
srcdir = sphinx_test_tempdir / "external-link-icon"
if not srcdir.exists():
testroot_path = rootdir / "test-external-link-icon"
shutil.copytree(testroot_path, srcdir)

app = make_app("html", srcdir=srcdir)
app.build()

html = (app.outdir / "index.html").read_text()
soup = BeautifulSoup(html, "html.parser")
references = soup.find_all("a", {"class": "reference"})
references = parsed_built_html.find_all("a", {"class": "reference"})

ref = references[1]
assert_is_external(
Expand Down

0 comments on commit 106a911

Please sign in to comment.