Skip to content

Commit

Permalink
Merge pull request #444 from wri/develop
Browse files Browse the repository at this point in the history
Develop -> master merge of package upgrades, improved API key error
  • Loading branch information
dmannarino authored Nov 27, 2023
2 parents be6070e + 5881275 commit 1640584
Show file tree
Hide file tree
Showing 10 changed files with 1,909 additions and 1,691 deletions.
17 changes: 14 additions & 3 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,29 @@
*__pycache__*
*.idea*

#MyPy
# MyPy
.mypy_cache/*

# Docker Files
docker-compose.dev.yml
docker-compose.prod.yml
docker-compose.test.yml

# GIT
.git

# Ignore Files
.gitignore

# Terraform
# Mac stuff
*.DS_Store

# Test stuff
tests/cobertura.xml
tests_v2/cobertura.xml

# Terraform stuff
*terraform*

# Virtual Environments
.venv*
.venv*
10 changes: 7 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,17 @@

# Environment Files
/.env
.python-version

# Mac stuff
*.DS_Store

# Test stuff
tests/cobertura.xml
tests_v2/cobertura.xml

# TF stuff
# Terraform stuff
terraform/*

# Virtualenvs
.venv*
# Virtual Environments
.venv*
3,516 changes: 1,853 additions & 1,663 deletions Pipfile.lock

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions app/routes/authentication/authentication.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,16 @@ async def create_api_key(
detail="Domain name did not match the request origin or referrer.",
)

# Give a good error code/message if user is specifying an alias that exists for
# another one of his API keys.
prev_keys: List[ORMApiKey] = await api_keys.get_api_keys_from_user(user_id=user_id)
for key in prev_keys:
if key.alias == api_key_data.alias:
raise HTTPException(
status_code=409,
detail="Key with specified alias already exists; use a different alias"
)

row: ORMApiKey = await api_keys.create_api_key(user_id=user_id, **input_data)

is_internal = api_key_is_internal(
Expand Down
5 changes: 2 additions & 3 deletions batch/pixetl.dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
FROM globalforestwatch/pixetl:v1.7.4

FROM globalforestwatch/pixetl:v1.7.5

# Copy scripts
COPY ./batch/scripts/ /opt/scripts/
Expand All @@ -19,4 +18,4 @@ WORKDIR /tmp
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8

ENTRYPOINT ["/opt/scripts/report_status.sh"]
ENTRYPOINT ["/opt/scripts/report_status.sh"]
1 change: 1 addition & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,7 @@ def client():
except Exception as ex:
print(f"Exception deleting asset {asset['asset_id']}: {ex}")
try:
# FIXME: Mock-out cache invalidation function
_ = client.delete(f"/dataset/{ds_id}/{version}")
except Exception as ex:
print(f"Exception deleting version {version}: {ex}")
Expand Down
21 changes: 10 additions & 11 deletions tests/routes/datasets/test_assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import httpx
import pytest
from botocore.exceptions import ClientError
from httpx import AsyncClient

from app.application import ContextEngine
from app.crud import tasks
Expand Down Expand Up @@ -255,21 +256,22 @@ async def test_auxiliary_raster_asset(async_client, httpd, logs):


@pytest.mark.asyncio
async def test_auxiliary_vector_asset(async_client, batch_client, httpd):
async def test_rasterize_vector_asset(async_client: AsyncClient, batch_client, httpd):
""""""
_, logs = batch_client

# Add a dataset, version, and default asset
dataset = "test_vector"
version = "v1.1.1"
grid = "10/40000"

pixetl_output_files = [
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/gdal-geotiff/extent.geojson",
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/geotiff/extent.geojson",
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/gdal-geotiff/tiles.geojson",
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/geotiff/tiles.geojson",
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/gdal-geotiff/90N_000E.tif",
f"{dataset}/{version}/raster/epsg-4326/90/27008/gfw_fid/geotiff/90N_000E.tif",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/gdal-geotiff/extent.geojson",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/geotiff/extent.geojson",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/gdal-geotiff/tiles.geojson",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/geotiff/tiles.geojson",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/gdal-geotiff/60N_010E.tif",
f"{dataset}/{version}/raster/epsg-4326/{grid}/gfw_fid/geotiff/60N_010E.tif",
]

for key in pixetl_output_files:
Expand All @@ -294,15 +296,12 @@ async def test_auxiliary_vector_asset(async_client, batch_client, httpd):
# vector asset
asset_payload = {
"asset_type": "Raster tile set",
"asset_uri": "http://www.osnews.com",
"is_managed": True,
"creation_options": {
"data_type": FAKE_INT_DATA_PARAMS["dtype"],
"pixel_meaning": "gfw_fid",
"grid": "90/27008",
"grid": grid,
"resampling": "nearest",
"overwrite": True,
"subset": "90N_000E",
},
}

Expand Down
3 changes: 2 additions & 1 deletion tests_v2/unit/app/crud/test_api_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import asyncpg
import boto3
import pytest
import pytest_asyncio
from moto import mock_apigateway

from app.application import ContextEngine
Expand All @@ -31,7 +32,7 @@
)


@pytest.fixture(autouse=True)
@pytest_asyncio.fixture(autouse=True)
@pytest.mark.asyncio
async def delete_api_keys():
yield
Expand Down
6 changes: 3 additions & 3 deletions tests_v2/unit/app/routes/datasets/test_query.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from typing import Tuple
from unittest.mock import Mock
from urllib.parse import parse_qsl, urlparse

import pytest
from _pytest.monkeypatch import MonkeyPatch
Expand Down Expand Up @@ -235,11 +236,10 @@ async def test_redirect_get_query(
params=params,
follow_redirects=False,
)

assert response.status_code == 308
assert (
response.headers["location"]
== f"/dataset/{dataset_name}/{version_name}/query/json?{response.request.url.query.decode('utf-8')}"
parse_qsl(urlparse(response.headers["location"]).query, strict_parsing=True)
== parse_qsl(urlparse(f"/dataset/{dataset_name}/{version_name}/query/json?{response.request.url.query.decode('utf-8')}").query, strict_parsing=True)
)


Expand Down
11 changes: 7 additions & 4 deletions tests_v2/unit/app/routes/datasets/test_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
from _pytest.monkeypatch import MonkeyPatch
from httpx import AsyncClient

from tests_v2.fixtures.metadata.version import VERSION_METADATA
from app.routes.datasets import versions
from app.tasks import batch
from tests_v2.fixtures.creation_options.versions import VECTOR_SOURCE_CREATION_OPTIONS
from tests_v2.fixtures.metadata.version import VERSION_METADATA
from tests_v2.unit.app.routes.utils import assert_jsend
from tests_v2.utils import BatchJobMock, void_coroutine
from tests_v2.utils import BatchJobMock, void_function


@pytest.mark.asyncio
Expand Down Expand Up @@ -69,6 +69,9 @@ async def test_delete_version_metadata(
f"/dataset/{dataset_name}/{version_name}/metadata",
)
assert resp.status_code == 404


@pytest.mark.asyncio
async def test_create_version_bare_minimum(
async_client: AsyncClient, generic_dataset, monkeypatch: MonkeyPatch
):
Expand All @@ -78,7 +81,7 @@ async def test_create_version_bare_minimum(
# patch all functions which reach out to external services
batch_job_mock = BatchJobMock()
monkeypatch.setattr(batch, "submit_batch_job", batch_job_mock.submit_batch_job)
monkeypatch.setattr(versions, "_verify_source_file_access", void_coroutine)
monkeypatch.setattr(versions, "_verify_source_file_access", void_function)

payload = {"creation_options": VECTOR_SOURCE_CREATION_OPTIONS}

Expand All @@ -99,7 +102,7 @@ async def test_append_version_bare_minimum(
# patch all functions which reach out to external services
batch_job_mock = BatchJobMock()
monkeypatch.setattr(batch, "submit_batch_job", batch_job_mock.submit_batch_job)
monkeypatch.setattr(versions, "_verify_source_file_access", void_coroutine)
monkeypatch.setattr(versions, "_verify_source_file_access", void_function)

payload = {"source_uri": ["s3://some_bucket/test.shp.zip"]}

Expand Down

0 comments on commit 1640584

Please sign in to comment.