Skip to content

Commit

Permalink
Modernize unit tests for get_vfpa_hadcp worker
Browse files Browse the repository at this point in the history
Modified the unit tests in test_get_vfpa_hadcp.py to use pytest fixtures and
monkeypatching for more accurate and isolated tests. This includes new mocks
for make_hour_dataset and write_netcdf, as well as changes in logging and
managing file paths. Particular attention given was given to ensuring accurate
capture of log messages.
  • Loading branch information
douglatornell committed Jul 16, 2024
1 parent 858813a commit 6e7b345
Showing 1 changed file with 108 additions and 23 deletions.
131 changes: 108 additions & 23 deletions tests/workers/test_get_vfpa_hadcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,15 @@
"""Unit tests for SalishSeaCast get_vfpa_hadcp worker.
"""
import logging
import os
import textwrap
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import patch

import arrow
import nemo_nowcast
import pytest
import xarray

from nowcast.workers import get_vfpa_hadcp

Expand Down Expand Up @@ -124,49 +125,133 @@ def test_failure(self, caplog):
assert msg_type == "failure"


@patch("nowcast.workers.get_vfpa_hadcp.logger", autospec=True)
@patch("nowcast.workers.get_vfpa_hadcp._make_hour_dataset", autospec=True)
class TestGetVFPA_HADCP:
"""Unit test for get_vfpa_hadcp() function."""

def test_checklist_create(self, m_mk_hr_ds, m_logger, config):
@staticmethod
@pytest.fixture
def mock_make_hour_dataset(monkeypatch):

def _mock_make_hour_dataset(csv_dir, utc_start_hr, place):
return xarray.Dataset()

monkeypatch.setattr(
get_vfpa_hadcp, "_make_hour_dataset", _mock_make_hour_dataset
)

@staticmethod
@pytest.fixture
def mock_write_netcdf(monkeypatch):
def _mock_write_netcdf(ds, nc_filepath):
return

monkeypatch.setattr(get_vfpa_hadcp, "_write_netcdf", _mock_write_netcdf)

@pytest.mark.parametrize("nc_file_exists", (True, False))
def test_log_messages(
self,
nc_file_exists,
mock_make_hour_dataset,
mock_write_netcdf,
config,
caplog,
tmp_path,
monkeypatch,
):
dest_dir = tmp_path
monkeypatch.setitem(
config["observations"]["hadcp data"], "dest dir", os.fspath(dest_dir)
)
nc_filepath = dest_dir / "VFPA_2ND_NARROWS_HADCP_2s_202407.nc"
if nc_file_exists:
nc_filepath.write_bytes(b"")
parsed_args = SimpleNamespace(data_date=arrow.get("2024-07-13"))
caplog.set_level(logging.DEBUG)
get_vfpa_hadcp.get_vfpa_hadcp(parsed_args, config)
assert caplog.records[0].levelname == "INFO"
expected = (
"processing VFPA HADCP data from 2nd Narrows Rail Bridge for 2024-07-13"
)
assert caplog.messages[0] == expected
if not nc_file_exists:
assert caplog.records[1].levelname == "INFO"
assert caplog.records[1].message.startswith("created")
assert caplog.messages[1].endswith("VFPA_2ND_NARROWS_HADCP_2s_202407.nc")
for rec_num, hr in zip(range(2, 24), range(1, 23)):
assert caplog.records[rec_num].levelname == "DEBUG"
expected = f"no data for 2024-07-13 {hr:02d}:00 hour"
assert caplog.messages[rec_num] == expected
assert caplog.records[25].levelname == "INFO"
expected = f"added VFPA HADCP data from 2nd Narrows Rail Bridge for 2024-07-13 to {nc_filepath}"
assert caplog.messages[25] == expected

def test_checklist_create(
self,
mock_make_hour_dataset,
mock_write_netcdf,
config,
caplog,
tmp_path,
monkeypatch,
):
dest_dir = tmp_path
monkeypatch.setitem(
config["observations"]["hadcp data"], "dest dir", os.fspath(dest_dir)
)
nc_filepath = dest_dir / "VFPA_2ND_NARROWS_HADCP_2s_201810.nc"
parsed_args = SimpleNamespace(data_date=arrow.get("2018-10-01"))
caplog.set_level(logging.DEBUG)
checklist = get_vfpa_hadcp.get_vfpa_hadcp(parsed_args, config)
expected = {
"created": "opp/obs/AISDATA/netcdf/VFPA_2ND_NARROWS_HADCP_2s_201810.nc",
"created": f"{nc_filepath}",
"UTC date": "2018-10-01",
}
assert checklist == expected

@patch(
"nowcast.workers.get_vfpa_hadcp.Path.exists", return_value=True, autospec=True
)
@patch("nowcast.workers.get_vfpa_hadcp.xarray", autospec=True)
def test_checklist_extend(self, m_xarray, m_exists, m_mk_hr_ds, m_logger, config):
def test_checklist_extend(
self,
mock_make_hour_dataset,
mock_write_netcdf,
config,
caplog,
tmp_path,
monkeypatch,
):
dest_dir = tmp_path
monkeypatch.setitem(
config["observations"]["hadcp data"], "dest dir", os.fspath(dest_dir)
)
nc_filepath = dest_dir / "VFPA_2ND_NARROWS_HADCP_2s_201810.nc"
xarray.DataArray().to_netcdf(nc_filepath)
parsed_args = SimpleNamespace(data_date=arrow.get("2018-10-21"))
caplog.set_level(logging.DEBUG)
checklist = get_vfpa_hadcp.get_vfpa_hadcp(parsed_args, config)
expected = {
"extended": "opp/obs/AISDATA/netcdf/VFPA_2ND_NARROWS_HADCP_2s_201810.nc",
"extended": f"{nc_filepath}",
"UTC date": "2018-10-21",
}
assert checklist == expected

@pytest.mark.parametrize("ds_exists", (True, False))
@patch("nowcast.workers.get_vfpa_hadcp.xarray", autospec=True)
def test_checklist_missing_data(
self, m_xarray, m_mk_hr_ds, m_logger, ds_exists, config
self,
mock_make_hour_dataset,
mock_write_netcdf,
config,
caplog,
tmp_path,
monkeypatch,
):
parsed_args = SimpleNamespace(data_date=arrow.get("2018-12-23"))
m_mk_hr_ds.side_effect = ValueError
p_exists = patch(
"nowcast.workers.get_vfpa_hadcp.Path.exists",
return_value=ds_exists,
autospec=True,
dest_dir = tmp_path
monkeypatch.setitem(
config["observations"]["hadcp data"], "dest dir", os.fspath(dest_dir)
)
with p_exists:
checklist = get_vfpa_hadcp.get_vfpa_hadcp(parsed_args, config)
nc_filepath = dest_dir / "VFPA_2ND_NARROWS_HADCP_2s_201812.nc"
nc_filepath.write_bytes(b"")
caplog.set_level(logging.DEBUG)
parsed_args = SimpleNamespace(data_date=arrow.get("2018-12-23"))
checklist = get_vfpa_hadcp.get_vfpa_hadcp(parsed_args, config)
expected = {
"missing data": "opp/obs/AISDATA/netcdf/VFPA_2ND_NARROWS_HADCP_2s_201812.nc",
"missing data": f"{nc_filepath}",
"UTC date": "2018-12-23",
}
assert checklist == expected

0 comments on commit 6e7b345

Please sign in to comment.