Skip to content

Commit

Permalink
Merge pull request #790 from CSCfi/feature/update_typing
Browse files Browse the repository at this point in the history
Feature/update typing
  • Loading branch information
csc-jm authored Sep 6, 2023
2 parents bf1b3f5 + f23caf0 commit 58cbdff
Show file tree
Hide file tree
Showing 37 changed files with 404 additions and 319 deletions.
5 changes: 3 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Configuration file for the Sphinx documentation builder."""

import datetime
from typing import Callable

from sphinx.application import Sphinx

# -- Project information -----------------------------------------------------

Expand Down Expand Up @@ -62,7 +63,7 @@
html_static_path = ["_static"]


def setup(app: Callable) -> None:
def setup(app: Sphinx) -> None:
"""Add custom stylesheet."""
app.add_css_file("style.css")

Expand Down
18 changes: 9 additions & 9 deletions metadata_backend/api/auth.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""Handle Access for request and OIDC workflow."""

import time
from typing import Dict, List, Union
from typing import Any

import aiohttp_session
from aiohttp import web
from aiohttp.client_exceptions import ClientConnectorError, InvalidURL
from aiohttp.web import Request, Response
from oidcrp.exception import OidcServiceError
from oidcrp.rp_handler import RPHandler
from oidcrp.exception import OidcServiceError # type: ignore[import, unused-ignore]
from oidcrp.rp_handler import RPHandler # type: ignore[import, unused-ignore]
from yarl import URL

from ..conf.conf import aai_config
Expand All @@ -19,15 +19,15 @@

# Type aliases
# ProjectList is a list of projects and their origins
ProjectList = List[Dict[str, str]]
ProjectList = list[dict[str, str]]
# UserData contains user profile from AAI userinfo, such as name, username and projects
UserData = Dict[str, Union[ProjectList, str]]
UserData = dict[str, ProjectList | str]


class AccessHandler:
"""Handler for user access methods."""

def __init__(self, aai: Dict) -> None:
def __init__(self, aai: dict[str, Any]) -> None:
"""Define AAI variables and paths.
:param aai: dictionary with AAI specific config
Expand Down Expand Up @@ -249,7 +249,7 @@ async def _set_user(
browser_session["user_info"] = user_id
return user_id

async def _create_user_data(self, userinfo: Dict) -> UserData:
async def _create_user_data(self, userinfo: dict[str, Any]) -> UserData:
"""Parse user profile data from userinfo endpoint response.
:param userinfo: dict from userinfo containing user profile
Expand Down Expand Up @@ -277,7 +277,7 @@ async def _create_user_data(self, userinfo: Dict) -> UserData:

return user_data

async def _get_projects_from_userinfo(self, userinfo: Dict) -> ProjectList:
async def _get_projects_from_userinfo(self, userinfo: dict[str, Any]) -> ProjectList:
"""Parse projects and groups from userinfo endpoint response.
:param userinfo: dict from userinfo containing user profile
Expand Down Expand Up @@ -322,7 +322,7 @@ def __init__(self) -> None:
"""Get AAI credentials from config."""
super().__init__(base_url=URL(aai_config["oidc_url"].rstrip("/")))

async def _healtcheck(self) -> Dict:
async def _healtcheck(self) -> dict[str, str]:
"""Check AAI service hearthbeat.
This will return a JSON with well-known OIDC endpoints.
Expand Down
16 changes: 8 additions & 8 deletions metadata_backend/api/handlers/common.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
"""Functions shared between handlers."""
import csv
import string
from typing import Any, Dict, List, Tuple
from typing import Any

from aiohttp import BodyPartReader, MultipartReader, hdrs, web
from aiohttp.web import Request
from defusedxml.ElementTree import ParseError
from defusedxml.ElementTree import ParseError # type: ignore[import, unused-ignore]
from xmlschema import XMLResource

from ...conf.conf import schema_types
Expand All @@ -15,7 +15,7 @@

async def multipart_content(
req: Request, extract_one: bool = False, expect_xml: bool = False
) -> Tuple[List[Tuple[Any, str, str]], str]:
) -> tuple[list[tuple[Any, str, str]], str]:
"""Get content(s) and schema type(s) of a multipart request (from either csv or xml format).
Note: for multiple files support check: https://docs.aiohttp.org/en/stable/multipart.html#hacking-multipart
Expand All @@ -26,8 +26,8 @@ async def multipart_content(
:raises: HTTPBadRequest for multiple different reasons
:returns: content and schema type for each uploaded file and file type of the upload
"""
xml_files: List[Tuple[str, str, str]] = []
csv_files: List[Tuple[Dict, str, str]] = []
xml_files: list[tuple[str, str, str]] = []
csv_files: list[tuple[dict[str, Any], str, str]] = []
try:
reader = await req.multipart()
except AssertionError as exc:
Expand Down Expand Up @@ -75,7 +75,7 @@ async def multipart_content(
return _get_content_with_type(xml_files, csv_files)


async def _extract_upload(part: BodyPartReader) -> Tuple[str, str]:
async def _extract_upload(part: BodyPartReader) -> tuple[str, str]:
"""Extract a submitted file from upload.
:param part: Multipart reader for single body part
Expand Down Expand Up @@ -137,8 +137,8 @@ def _check_xml(content: str) -> bool:


def _get_content_with_type(
xml_files: List[Tuple[str, str, str]], csv_files: List[Tuple[Dict, str, str]]
) -> Tuple[List[Tuple[Any, str, str]], str]:
xml_files: list[tuple[str, str, str]], csv_files: list[tuple[dict[str, Any], str, str]]
) -> tuple[list[tuple[Any, str, str]], str]:
"""Return either list of XML or CSV files with the file type info.
:param xml_files: List of xml contents with schema types
Expand Down
30 changes: 17 additions & 13 deletions metadata_backend/api/handlers/object.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Handle HTTP methods for server."""
from datetime import datetime
from math import ceil
from typing import Any, Dict, List, Tuple, Union
from typing import Any

import ujson
from aiohttp import web
Expand Down Expand Up @@ -37,7 +37,7 @@ async def _handle_query(self, req: Request) -> Response:
per_page = self._get_page_param(req, "per_page", 10)
db_client = req.app["db_client"]

filter_list: List = [] # DEPRECATED, users don't own submissions anymore
filter_list: list[Any] = [] # DEPRECATED, users don't own submissions anymore
data, page_num, page_size, total_objects = await ObjectOperator(db_client).query_metadata_database(
collection, req.query, page, per_page, filter_list
)
Expand Down Expand Up @@ -135,8 +135,8 @@ async def post_object(self, req: Request) -> Response:
LOG.info(reason)
raise web.HTTPBadRequest(reason=reason)

content: Union[Dict[str, Any], str, List[Tuple[Any, str, str]]]
operator: Union[ObjectOperator, XMLObjectOperator]
content: dict[str, Any] | str | list[tuple[Any, str, str]]
operator: ObjectOperator | XMLObjectOperator
if req.content_type == "multipart/form-data":
_only_xml = schema_type not in _allowed_csv
files, cont_type = await multipart_content(req, extract_one=True, expect_xml=_only_xml)
Expand Down Expand Up @@ -174,9 +174,9 @@ async def post_object(self, req: Request) -> Response:

# Add a new metadata object or multiple objects if multiple were extracted
url = f"{req.scheme}://{req.host}{req.path}"
data: Union[List[Dict[str, str]], Dict[str, str]]
objects: List[Tuple[Dict[str, Any], str]] = []
if isinstance(content, List):
data: list[dict[str, str]] | dict[str, str]
objects: list[tuple[dict[str, Any], str]] = []
if isinstance(content, list):
LOG.debug("Inserting multiple objects for collection: %r.", schema_type)
if is_single_instance and len(content) > 1:
reason = f"Submission of type {workflow.name} can only have one '{schema_type}'. Cannot add multiple."
Expand Down Expand Up @@ -296,8 +296,8 @@ async def put_object(self, req: Request) -> Response:
collection = f"draft-{schema_type}" if req.path.startswith(f"{API_PREFIX}/drafts") else schema_type

db_client = req.app["db_client"]
content: Union[Dict, str]
operator: Union[ObjectOperator, XMLObjectOperator]
content: dict[str, Any] | str
operator: ObjectOperator | XMLObjectOperator
filename = ""
if req.content_type == "multipart/form-data":
files, _ = await multipart_content(req, extract_one=True, expect_xml=True)
Expand Down Expand Up @@ -347,7 +347,7 @@ async def patch_object(self, req: Request) -> Response:
collection = f"draft-{schema_type}" if req.path.startswith(f"{API_PREFIX}/drafts") else schema_type

db_client = req.app["db_client"]
operator: Union[ObjectOperator, XMLObjectOperator]
operator: ObjectOperator | XMLObjectOperator
if req.content_type == "multipart/form-data":
reason = "XML patching is not possible."
raise web.HTTPUnsupportedMediaType(reason=reason)
Expand Down Expand Up @@ -380,7 +380,9 @@ async def patch_object(self, req: Request) -> Response:
LOG.info("PATCH object with accession ID: %r in collection: %r was successful.", accession_id, collection)
return web.Response(body=body, status=200, content_type="application/json")

def _prepare_submission_patch_new_object(self, schema: str, objects: List, cont_type: str) -> List:
def _prepare_submission_patch_new_object(
self, schema: str, objects: list[Any], cont_type: str
) -> list[dict[str, Any]]:
"""Prepare patch operations list for adding an object or objects to a submission.
:param schema: schema of objects to be added to the submission
Expand All @@ -400,7 +402,7 @@ def _prepare_submission_patch_new_object(self, schema: str, objects: List, cont_
path = "/metadataObjects/-"

patch = []
patch_ops: Dict[str, Any] = {}
patch_ops: dict[str, Any] = {}
for obj, filename in objects:
try:
title = obj["descriptor"]["studyTitle"] if schema in ["study", "draft-study"] else obj["title"]
Expand Down Expand Up @@ -429,7 +431,9 @@ def _prepare_submission_patch_new_object(self, schema: str, objects: List, cont_

return patch

def _prepare_submission_patch_update_object(self, schema: str, data: Dict, filename: str = "") -> List:
def _prepare_submission_patch_update_object(
self, schema: str, data: dict[str, Any], filename: str = ""
) -> list[dict[str, Any]]:
"""Prepare patch operation for updating object's title in a submission.
:param schema: schema of object to be updated
Expand Down
38 changes: 23 additions & 15 deletions metadata_backend/api/handlers/publish.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Handle HTTP methods for server."""
from datetime import date, datetime
from typing import Dict, List, Tuple
from typing import Any

import ujson
from aiohttp import web
Expand All @@ -20,7 +20,7 @@ class PublishSubmissionAPIHandler(RESTAPIIntegrationHandler):
"""API Handler for publishing submissions."""

@staticmethod
def _make_discovery_url(obj_data: Dict) -> str:
def _make_discovery_url(obj_data: dict[str, Any]) -> str:
"""Make an url that points to a discovery service."""
if "metaxIdentifier" in obj_data:
url = f"{doi_config['discovery_url']}{obj_data['metaxIdentifier']}"
Expand All @@ -29,7 +29,9 @@ def _make_discovery_url(obj_data: Dict) -> str:
return url

@staticmethod
def _prepare_datacite_study(study_data: Dict, general_info: Dict, discovery_url: str) -> Dict:
def _prepare_datacite_study(
study_data: dict[str, Any], general_info: dict[str, Any], discovery_url: str
) -> dict[str, Any]:
"""Prepare Study object for publishing.
:param study_data: Study Object read from the database
Expand Down Expand Up @@ -99,7 +101,9 @@ def _prepare_datacite_study(study_data: Dict, general_info: Dict, discovery_url:

return study

def _prepare_datacite_dataset(self, study_doi: str, dataset_data: Dict, general_info: Dict) -> Dict:
def _prepare_datacite_dataset(
self, study_doi: str, dataset_data: dict[str, Any], general_info: dict[str, Any]
) -> dict[str, Any]:
"""Prepare Dataset object for publishing.
:param study_doi: Study DOI to link dataset to study at Datacite
Expand Down Expand Up @@ -181,7 +185,9 @@ def _prepare_datacite_dataset(self, study_doi: str, dataset_data: Dict, general_

return dataset

async def _prepare_datacite_publication(self, obj_op: ObjectOperator, submission: Dict) -> Tuple[dict, list]:
async def _prepare_datacite_publication(
self, obj_op: ObjectOperator, submission: dict[str, Any]
) -> tuple[dict[str, Any], list[dict[str, Any]]]:
"""Prepare dictionary with values for the Datacite DOI update.
We need to prepare data for Study and Datasets, publish doi for each,
Expand All @@ -194,9 +200,9 @@ async def _prepare_datacite_publication(self, obj_op: ObjectOperator, submission
:param submission: Submission data
:returns: Tuple with the Study and list of Datasets and list of identifiers for publishing to Metax
"""
datacite_study = {}
datacite_datasets: List[dict] = []
datacite_bpdatasets: List[dict] = []
datacite_study: dict[str, Any] = {}
datacite_datasets: list[dict[str, Any]] = []
datacite_bpdatasets: list[dict[str, Any]] = []

# we need to re-format these for Datacite, as in the JSON schemas
# we split the words so that front-end will display them nicely
Expand Down Expand Up @@ -291,7 +297,9 @@ async def _prepare_datacite_publication(self, obj_op: ObjectOperator, submission

return datacite_study, datacite_datasets

async def _publish_datacite(self, submission: dict, obj_op: ObjectOperator, operator: SubmissionOperator) -> dict:
async def _publish_datacite(
self, submission: dict[str, Any], obj_op: ObjectOperator, operator: SubmissionOperator
) -> dict[str, Any]:
"""Prepare dictionary with values to be published to Metax.
:param submission: Submission data
Expand Down Expand Up @@ -337,8 +345,8 @@ async def _publish_datacite(self, submission: dict, obj_op: ObjectOperator, oper
return datacite_study

async def _pre_publish_metax(
self, submission: dict, obj_op: ObjectOperator, operator: SubmissionOperator, external_user_id: str
) -> List[dict]:
self, submission: dict[str, Any], obj_op: ObjectOperator, operator: SubmissionOperator, external_user_id: str
) -> list[dict[str, Any]]:
"""Prepare dictionary with values to be published to Metax.
:param submission: Submission data
Expand All @@ -347,7 +355,7 @@ async def _pre_publish_metax(
:param external_user_id: user_id
:returns: Whether publishing to Metax succeeded
"""
metax_datasets: List[dict] = []
metax_datasets: list[dict[str, Any]] = []
async for _, schema, object_data in self.iter_submission_objects_data(submission, obj_op):
if schema in DATACITE_SCHEMAS:
doi = object_data["doi"]
Expand All @@ -374,14 +382,14 @@ async def _pre_publish_metax(
)
return metax_datasets

async def _publish_rems(self, submission: dict, obj_op: ObjectOperator) -> None:
async def _publish_rems(self, submission: dict[str, Any], obj_op: ObjectOperator) -> None:
"""Prepare dictionary with values to be published to REMS.
:param submission: Submission data
:param obj_op: ObjectOperator for reading objects from database.
:returns: Whether publishing to REMS succeeded
"""
rems_datasets: List[dict] = []
rems_datasets: list[dict[str, Any]] = []

async for accession_id, schema, object_data in self.iter_submission_objects_data(submission, obj_op):
if schema in {"dataset", "bpdataset"}:
Expand Down Expand Up @@ -572,7 +580,7 @@ async def publish_submission(self, req: Request) -> Response:

# Patch the submission into a published state
_now = int(datetime.now().timestamp())
patch = [
patch: list[dict[str, Any]] = [
{"op": "replace", "path": "/published", "value": True},
{"op": "replace", "path": "/drafts", "value": []},
{"op": "add", "path": "/datePublished", "value": _now},
Expand Down
Loading

0 comments on commit 58cbdff

Please sign in to comment.