From 38e332fa5ee42459be683349376c3600dce9818a Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Wed, 4 Oct 2023 17:15:08 -0400 Subject: [PATCH 01/13] Serval app initial commit --- samples/ServalApp/REAME.md | 6 + samples/ServalApp/builds.db | Bin 0 -> 12288 bytes samples/ServalApp/db.py | 33 + samples/ServalApp/send_updates.py | 62 + samples/ServalApp/serval_app.py | 87 + samples/ServalApp/serval_auth_module.py | 41 + samples/ServalApp/serval_client_module.py | 3646 +++++++++++++++++++++ samples/ServalApp/serval_email_module.py | 81 + samples/ServalApp/start_app.sh | 4 + 9 files changed, 3960 insertions(+) create mode 100644 samples/ServalApp/REAME.md create mode 100644 samples/ServalApp/builds.db create mode 100644 samples/ServalApp/db.py create mode 100644 samples/ServalApp/send_updates.py create mode 100644 samples/ServalApp/serval_app.py create mode 100644 samples/ServalApp/serval_auth_module.py create mode 100644 samples/ServalApp/serval_client_module.py create mode 100644 samples/ServalApp/serval_email_module.py create mode 100755 samples/ServalApp/start_app.sh diff --git a/samples/ServalApp/REAME.md b/samples/ServalApp/REAME.md new file mode 100644 index 00000000..37b195be --- /dev/null +++ b/samples/ServalApp/REAME.md @@ -0,0 +1,6 @@ +### Running the Serval APP +Before running the app, verify that both `SERVAL_APP_EMAIL_PASSWORD` and `SERVAL_APP_PASSCODE` are appropriately populated. +Then, run: +``` +./start_app.sh +``` \ No newline at end of file diff --git a/samples/ServalApp/builds.db b/samples/ServalApp/builds.db new file mode 100644 index 0000000000000000000000000000000000000000..101600b0ba31890622f4aa419da9ea1a501c8b76 GIT binary patch literal 12288 zcmeI%K}&={6bJCpqFP~bZ;OZE9hH_KqC=-m7hxmY$}HHExM_>zYOM-g>s$1h`UHKG zj?JtYd+`vIr~kk>Z^m)n@Vh;{zH;IsqWf&Q2nsr6Jtj%)gorU_@xH-(-FA7fSx@*V zt;J2D#rB6EUGq8OD zl4w@ONtjnpdF0xrXOm}Ij!o4X=}Ko6`Ef|MraKy&E=?{xn%p?ffRs*@&f_#{e7gwZ zr2bB9&x?Rp-`@x6$A(zD%a)I2zIKFIymU{`O?OIX_LTJc1_Sy@XDG&=ywx9S5{uI? zdd{B`Uf26US!AoZU)|q7s76uO?+Dkfc=UK}{w4x62tWV=5P$##AOHafKmY;|fB*#A s7MQn?P!Tyq{f|HMEodVJ1Rwwb2tWV=5P$##AOHafK;SO~j(&Z}7xOY!tN;K2 literal 0 HcmV?d00001 diff --git a/samples/ServalApp/db.py b/samples/ServalApp/db.py new file mode 100644 index 00000000..97f97ab7 --- /dev/null +++ b/samples/ServalApp/db.py @@ -0,0 +1,33 @@ +from sqlalchemy.orm import declarative_base +from sqlalchemy import Column, MetaData, String, Enum, create_engine +import enum + +class State(enum.Enum): + Pending = 0 + Active = 1 + Completed = 2 + Faulted = 3 + +metadata = MetaData() +Base = declarative_base(metadata=metadata) + +class Build(Base): + __tablename__ = "builds" + build_id = Column("build_id",String,primary_key=True) + engine_id = Column("engine_id",String,primary_key=True) + email = Column("email",String) + state = Column("state",Enum(State)) + corpus_id = Column("corpus_id",String) + + def __str__(self): + return str({'build_id':self.build_id, 'engine_id':self.engine_id,'email':self.email,'state':self.state,'corpus_id':self.corpus_id}) + + def __repr__(self): + return self.__str__() + +def create_tables(): + engine = create_engine("sqlite:///builds.db") + metadata.drop_all(bind=engine) + metadata.create_all(bind=engine) + + diff --git a/samples/ServalApp/send_updates.py b/samples/ServalApp/send_updates.py new file mode 100644 index 00000000..631b33fe --- /dev/null +++ b/samples/ServalApp/send_updates.py @@ -0,0 +1,62 @@ +from serval_client_module import * +from serval_auth_module import * +import os +from time import sleep +from db import Build, State +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from serval_email_module import ServalAppEmailServer + +def main(): + def started(build:Build, email_server:ServalAppEmailServer): + print(f"\tStarted {build}") + session.delete(build) + email_server.send_build_started_email(build.email) + session.add(Build(build_id=build.build_id, engine_id=build.engine_id, email=build.email, state=State.Active, corpus_id=build.corpus_id)) + + def faulted(build:Build, email_server:ServalAppEmailServer): + print(f"\tFaulted {build}") + session.delete(build) + email_server.send_build_faulted_email(build.email) + + def completed(build:Build, email_server:ServalAppEmailServer): + print(f"\tCompleted {build}") + session.delete(build) + pretranslations = client.translation_engines_get_all_pretranslations(build.engine_id, build.corpus_id) + email_server.send_build_completed_email(build.email, '\n'.join([f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" for pretranslation in pretranslations])) + + def update(build:Build, email_server:ServalAppEmailServer): + print(f"\tUpdated {build}") + + serval_auth = ServalBearerAuth() + client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) + responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} + + engine = create_engine("sqlite:///builds.db") + Session = sessionmaker(bind=engine) + session = Session() + + def get_update(build:Build, email_server:ServalAppEmailServer): + build_update = client.translation_engines_get_build(id=build.engine_id, build_id=build.build_id) + if build.state == State.Pending and build_update.state == "Active": + started(build, email_server) + else: + responses.get(build_update.state, update)(build, email_server) + session.commit() + + def send_updates(email_server:ServalAppEmailServer): + print(f"Checking for updates:") + builds = session.query(Build).all() + for build in builds: + try: + get_update(build, email_server) + except Exception as e: + print(f"\tFailed to update {build} because of exception {e}") + sleep(60) + + with ServalAppEmailServer(os.environ.get('SERVAL_APP_EMAIL_PASSWORD')) as email_server: + while(True): + send_updates(email_server) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py new file mode 100644 index 00000000..be5647c7 --- /dev/null +++ b/samples/ServalApp/serval_app.py @@ -0,0 +1,87 @@ +import streamlit as st +from serval_client_module import * +from serval_auth_module import * +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from db import Build +from time import sleep + +serval_auth = ServalBearerAuth() +client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) +engine = create_engine("sqlite:///builds.db") +Session = sessionmaker(bind=engine) +session = Session() + +def submit(): + engine = json.loads(client.translation_engines_create(TranslationEngineConfig(source_language=st.session_state['source_language'],target_language=st.session_state['target_language'],type='Nmt',name=f'serval_app_engine:{st.session_state["email"]}'))) + source_file = json.loads(client.data_files_create(st.session_state['source_file'], format="Text")) + target_file = json.loads(client.data_files_create(st.session_state['target_file'], format="Text")) + corpus = json.loads(client.translation_engines_add_corpus( + engine['id'], + TranslationCorpusConfig( + source_files=[TranslationCorpusFileConfig(file_id=source_file['id'], text_id=st.session_state['source_file'].name)], + target_files=[TranslationCorpusFileConfig(file_id=target_file['id'], text_id=st.session_state['source_file'].name)], + source_language=st.session_state['source_language'], + target_language=st.session_state['target_language'] + ) + ) + ) + build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids=[st.session_state['source_file'].name])]))) + session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) + session.commit() + +def already_active_build_for(email:str): + return len(session.query(Build).where(Build.email == email).all()) > 0 + +def is_valid_passcode(passcode:str): + return passcode == os.environ.get('SERVAL_APP_PASSCODE') + +st.subheader("Neural Machine Translation") + +tried_to_submit = st.session_state.get('tried_to_submit', False) +with st.form(key="NmtTranslationForm"): + st.session_state['source_language'] = st.text_input(label="Source language tag before submitting", placeholder="en") + if st.session_state.get('source_language','') == '' and tried_to_submit: + st.warning("Please enter a source language tag", icon='⬆️') + + st.session_state['source_file'] = st.file_uploader(label="Source File") + if st.session_state.get('source_file',None) is None and tried_to_submit: + st.warning("Please upload a source file before submitting", icon='⬆️') + + st.session_state['target_language'] = st.text_input(label="Target language tag", placeholder="es") + if st.session_state.get('target_language','') == '' and tried_to_submit: + st.warning("Please enter a target language tag before submitting", icon='⬆️') + + st.session_state['target_file'] = st.file_uploader(label="Target File") + if st.session_state.get('target_file',None) is None and tried_to_submit: + st.warning("Please upload a target file before submitting", icon='⬆️') + + st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") + if st.session_state.get('email','') == '' and tried_to_submit: + st.warning("Please enter an email address", icon='⬆️') + + st.session_state['passcode'] = st.text_input(label="Passcode", placeholder="") + if st.session_state.get('passcode','') == '' and tried_to_submit: + st.warning("Please enter the passcode", icon='⬆️') + + if tried_to_submit: + st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) + if st.form_submit_button("Generate translations"): + if not is_valid_passcode(st.session_state.get('passcode','')): + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "The passcode was invalid." + st.rerun() + elif already_active_build_for(st.session_state['email']): + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." + st.rerun() + elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and st.session_state['source_file'] is not None and st.session_state['target_file'] is not None and st.session_state['email'] != '': + submit() + st.session_state['tried_to_submit'] = False + st.toast("Translations are on their way! You'll receive an email when your translation job has begun.") + sleep(4) + st.rerun() + else: + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "Some required fields were left blank. Please fill in all fields above" + st.rerun() \ No newline at end of file diff --git a/samples/ServalApp/serval_auth_module.py b/samples/ServalApp/serval_auth_module.py new file mode 100644 index 00000000..c9a38076 --- /dev/null +++ b/samples/ServalApp/serval_auth_module.py @@ -0,0 +1,41 @@ +import requests +import json +import os +import time + +class ServalBearerAuth(requests.auth.AuthBase): + def __init__(self): + self.__client_id = os.environ.get("SERVAL_CLIENT_ID") + assert(self.__client_id is not None) + self.__client_secret = os.environ.get("SERVAL_CLIENT_SECRET") + assert(self.__client_secret is not None) + self.__auth_url = os.environ.get("SERVAL_AUTH_URL") + assert(self.__auth_url is not None) + self.update_token() + self.__last_time_fetched = time.time() + def __call__(self, r): + if(time.time() - self.__last_time_fetched > 20*60): + self.update_token() + r.headers["authorization"] = "Bearer " + self.token + return r + + def update_token(self): + data = { + "client_id": f"{self.__client_id}", + "client_secret":f"{self.__client_secret}", + "audience":"https://machine.sil.org", + "grant_type":"client_credentials" + } + + encoded_data = json.dumps(data).encode('utf-8') + r = None + try: + r:requests.Response = requests.post( + url=f'{self.__auth_url}/oauth/token', + data=encoded_data, + headers={"content-type": "application/json"} + ) + self.token = r.json()['access_token'] if r is not None else None + except Exception as e: + raise ValueError(f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status}. Original exception: {e}") + diff --git a/samples/ServalApp/serval_client_module.py b/samples/ServalApp/serval_client_module.py new file mode 100644 index 00000000..bf805d79 --- /dev/null +++ b/samples/ServalApp/serval_client_module.py @@ -0,0 +1,3646 @@ +#!/usr/bin/env python3 +# Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING! +"""Implements the client for Translation Engines.""" + +# pylint: skip-file +# pydocstyle: add-ignore=D105,D107,D401 + +import contextlib +import json +from typing import Any, BinaryIO, Dict, List, MutableMapping, Optional, cast + +import requests +import requests.auth + + +def from_obj(obj: Any, expected: List[type], path: str = '') -> Any: + """ + Checks and converts the given obj along the expected types. + + :param obj: to be converted + :param expected: list of types representing the (nested) structure + :param path: to the object used for debugging + :return: the converted object + """ + if not expected: + raise ValueError("`expected` is empty, but at least one type needs to be specified.") + + exp = expected[0] + + if exp == float: + if isinstance(obj, int): + return float(obj) + + if isinstance(obj, float): + return obj + + raise ValueError( + 'Expected object of type int or float at {!r}, but got {}.'.format(path, type(obj))) + + if exp in [bool, int, str, list, dict]: + if not isinstance(obj, exp): + raise ValueError( + 'Expected object of type {} at {!r}, but got {}.'.format(exp, path, type(obj))) + + if exp in [bool, int, float, str]: + return obj + + if exp == list: + lst = [] # type: List[Any] + for i, value in enumerate(obj): + lst.append( + from_obj(value, expected=expected[1:], path='{}[{}]'.format(path, i))) + + return lst + + if exp == dict: + adict = dict() # type: Dict[str, Any] + for key, value in obj.items(): + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {!r}, got: {}'.format(path, type(key))) + + adict[key] = from_obj(value, expected=expected[1:], path='{}[{!r}]'.format(path, key)) + + return adict + + if exp == DataFile: + return data_file_from_obj(obj, path=path) + + if exp == TranslationEngine: + return translation_engine_from_obj(obj, path=path) + + if exp == TranslationEngineConfig: + return translation_engine_config_from_obj(obj, path=path) + + if exp == TranslationResult: + return translation_result_from_obj(obj, path=path) + + if exp == AlignedWordPair: + return aligned_word_pair_from_obj(obj, path=path) + + if exp == Phrase: + return phrase_from_obj(obj, path=path) + + if exp == WordGraph: + return word_graph_from_obj(obj, path=path) + + if exp == WordGraphArc: + return word_graph_arc_from_obj(obj, path=path) + + if exp == SegmentPair: + return segment_pair_from_obj(obj, path=path) + + if exp == TranslationCorpus: + return translation_corpus_from_obj(obj, path=path) + + if exp == ResourceLink: + return resource_link_from_obj(obj, path=path) + + if exp == TranslationCorpusFile: + return translation_corpus_file_from_obj(obj, path=path) + + if exp == TranslationCorpusConfig: + return translation_corpus_config_from_obj(obj, path=path) + + if exp == TranslationCorpusFileConfig: + return translation_corpus_file_config_from_obj(obj, path=path) + + if exp == TranslationCorpusUpdateConfig: + return translation_corpus_update_config_from_obj(obj, path=path) + + if exp == Pretranslation: + return pretranslation_from_obj(obj, path=path) + + if exp == TranslationBuild: + return translation_build_from_obj(obj, path=path) + + if exp == PretranslateCorpus: + return pretranslate_corpus_from_obj(obj, path=path) + + if exp == TranslationBuildConfig: + return translation_build_config_from_obj(obj, path=path) + + if exp == PretranslateCorpusConfig: + return pretranslate_corpus_config_from_obj(obj, path=path) + + if exp == Webhook: + return webhook_from_obj(obj, path=path) + + if exp == WebhookConfig: + return webhook_config_from_obj(obj, path=path) + + raise ValueError("Unexpected `expected` type: {}".format(exp)) + + +def to_jsonable(obj: Any, expected: List[type], path: str = "") -> Any: + """ + Checks and converts the given object along the expected types to a JSON-able representation. + + :param obj: to be converted + :param expected: list of types representing the (nested) structure + :param path: path to the object used for debugging + :return: JSON-able representation of the object + """ + if not expected: + raise ValueError("`expected` is empty, but at least one type needs to be specified.") + + exp = expected[0] + if not isinstance(obj, exp): + raise ValueError('Expected object of type {} at path {!r}, but got {}.'.format( + exp, path, type(obj))) + + # Assert on primitive types to help type-hinting. + if exp == bool: + assert isinstance(obj, bool) + return obj + + if exp == int: + assert isinstance(obj, int) + return obj + + if exp == float: + assert isinstance(obj, float) + return obj + + if exp == str: + assert isinstance(obj, str) + return obj + + if exp == list: + assert isinstance(obj, list) + + lst = [] # type: List[Any] + for i, value in enumerate(obj): + lst.append( + to_jsonable(value, expected=expected[1:], path='{}[{}]'.format(path, i))) + + return lst + + if exp == dict: + assert isinstance(obj, dict) + + adict = dict() # type: Dict[str, Any] + for key, value in obj.items(): + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {!r}, got: {}'.format(path, type(key))) + + adict[key] = to_jsonable( + value, + expected=expected[1:], + path='{}[{!r}]'.format(path, key)) + + return adict + + if exp == DataFile: + assert isinstance(obj, DataFile) + return data_file_to_jsonable(obj, path=path) + + if exp == TranslationEngine: + assert isinstance(obj, TranslationEngine) + return translation_engine_to_jsonable(obj, path=path) + + if exp == TranslationEngineConfig: + assert isinstance(obj, TranslationEngineConfig) + return translation_engine_config_to_jsonable(obj, path=path) + + if exp == TranslationResult: + assert isinstance(obj, TranslationResult) + return translation_result_to_jsonable(obj, path=path) + + if exp == AlignedWordPair: + assert isinstance(obj, AlignedWordPair) + return aligned_word_pair_to_jsonable(obj, path=path) + + if exp == Phrase: + assert isinstance(obj, Phrase) + return phrase_to_jsonable(obj, path=path) + + if exp == WordGraph: + assert isinstance(obj, WordGraph) + return word_graph_to_jsonable(obj, path=path) + + if exp == WordGraphArc: + assert isinstance(obj, WordGraphArc) + return word_graph_arc_to_jsonable(obj, path=path) + + if exp == SegmentPair: + assert isinstance(obj, SegmentPair) + return segment_pair_to_jsonable(obj, path=path) + + if exp == TranslationCorpus: + assert isinstance(obj, TranslationCorpus) + return translation_corpus_to_jsonable(obj, path=path) + + if exp == ResourceLink: + assert isinstance(obj, ResourceLink) + return resource_link_to_jsonable(obj, path=path) + + if exp == TranslationCorpusFile: + assert isinstance(obj, TranslationCorpusFile) + return translation_corpus_file_to_jsonable(obj, path=path) + + if exp == TranslationCorpusConfig: + assert isinstance(obj, TranslationCorpusConfig) + return translation_corpus_config_to_jsonable(obj, path=path) + + if exp == TranslationCorpusFileConfig: + assert isinstance(obj, TranslationCorpusFileConfig) + return translation_corpus_file_config_to_jsonable(obj, path=path) + + if exp == TranslationCorpusUpdateConfig: + assert isinstance(obj, TranslationCorpusUpdateConfig) + return translation_corpus_update_config_to_jsonable(obj, path=path) + + if exp == Pretranslation: + assert isinstance(obj, Pretranslation) + return pretranslation_to_jsonable(obj, path=path) + + if exp == TranslationBuild: + assert isinstance(obj, TranslationBuild) + return translation_build_to_jsonable(obj, path=path) + + if exp == PretranslateCorpus: + assert isinstance(obj, PretranslateCorpus) + return pretranslate_corpus_to_jsonable(obj, path=path) + + if exp == TranslationBuildConfig: + assert isinstance(obj, TranslationBuildConfig) + return translation_build_config_to_jsonable(obj, path=path) + + if exp == PretranslateCorpusConfig: + assert isinstance(obj, PretranslateCorpusConfig) + return pretranslate_corpus_config_to_jsonable(obj, path=path) + + if exp == Webhook: + assert isinstance(obj, Webhook) + return webhook_to_jsonable(obj, path=path) + + if exp == WebhookConfig: + assert isinstance(obj, WebhookConfig) + return webhook_config_to_jsonable(obj, path=path) + + raise ValueError("Unexpected `expected` type: {}".format(exp)) + + +class DataFile: + def __init__( + self, + id: str, + url: str, + format: str, + revision: int, + name: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + self.format = format + + self.revision = revision + + self.name = name + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to data_file_to_jsonable. + + :return: JSON-able representation + """ + return data_file_to_jsonable(self) + + +def new_data_file() -> DataFile: + """Generates an instance of DataFile with default values.""" + return DataFile( + id='', + url='', + format='', + revision=0) + + +def data_file_from_obj(obj: Any, path: str = "") -> DataFile: + """ + Generates an instance of DataFile from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of DataFile + :param path: path to the object used for debugging + :return: parsed instance of DataFile + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + format_from_obj = from_obj( + obj['format'], + expected=[str], + path=path + '.format') # type: str + + revision_from_obj = from_obj( + obj['revision'], + expected=[int], + path=path + '.revision') # type: int + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + return DataFile( + id=id_from_obj, + url=url_from_obj, + format=format_from_obj, + revision=revision_from_obj, + name=name_from_obj) + + +def data_file_to_jsonable( + data_file: DataFile, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of DataFile. + + :param data_file: instance of DataFile to be JSON-ized + :param path: path to the data_file used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = data_file.id + + res['url'] = data_file.url + + res['format'] = data_file.format + + res['revision'] = data_file.revision + + if data_file.name is not None: + res['name'] = data_file.name + + return res + + +class TranslationEngine: + def __init__( + self, + id: str, + url: str, + source_language: str, + target_language: str, + type: str, + is_building: bool, + model_revision: int, + confidence: float, + corpus_size: int, + name: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + self.source_language = source_language + + self.target_language = target_language + + self.type = type + + self.is_building = is_building + + self.model_revision = model_revision + + self.confidence = confidence + + self.corpus_size = corpus_size + + self.name = name + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_engine_to_jsonable. + + :return: JSON-able representation + """ + return translation_engine_to_jsonable(self) + + +def new_translation_engine() -> TranslationEngine: + """Generates an instance of TranslationEngine with default values.""" + return TranslationEngine( + id='', + url='', + source_language='', + target_language='', + type='', + is_building=False, + model_revision=0, + confidence=0.0, + corpus_size=0) + + +def translation_engine_from_obj(obj: Any, path: str = "") -> TranslationEngine: + """ + Generates an instance of TranslationEngine from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationEngine + :param path: path to the object used for debugging + :return: parsed instance of TranslationEngine + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + source_language_from_obj = from_obj( + obj['sourceLanguage'], + expected=[str], + path=path + '.sourceLanguage') # type: str + + target_language_from_obj = from_obj( + obj['targetLanguage'], + expected=[str], + path=path + '.targetLanguage') # type: str + + type_from_obj = from_obj( + obj['type'], + expected=[str], + path=path + '.type') # type: str + + is_building_from_obj = from_obj( + obj['isBuilding'], + expected=[bool], + path=path + '.isBuilding') # type: bool + + model_revision_from_obj = from_obj( + obj['modelRevision'], + expected=[int], + path=path + '.modelRevision') # type: int + + confidence_from_obj = from_obj( + obj['confidence'], + expected=[float], + path=path + '.confidence') # type: float + + corpus_size_from_obj = from_obj( + obj['corpusSize'], + expected=[int], + path=path + '.corpusSize') # type: int + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + return TranslationEngine( + id=id_from_obj, + url=url_from_obj, + source_language=source_language_from_obj, + target_language=target_language_from_obj, + type=type_from_obj, + is_building=is_building_from_obj, + model_revision=model_revision_from_obj, + confidence=confidence_from_obj, + corpus_size=corpus_size_from_obj, + name=name_from_obj) + + +def translation_engine_to_jsonable( + translation_engine: TranslationEngine, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationEngine. + + :param translation_engine: instance of TranslationEngine to be JSON-ized + :param path: path to the translation_engine used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = translation_engine.id + + res['url'] = translation_engine.url + + res['sourceLanguage'] = translation_engine.source_language + + res['targetLanguage'] = translation_engine.target_language + + res['type'] = translation_engine.type + + res['isBuilding'] = translation_engine.is_building + + res['modelRevision'] = translation_engine.model_revision + + res['confidence'] = translation_engine.confidence + + res['corpusSize'] = translation_engine.corpus_size + + if translation_engine.name is not None: + res['name'] = translation_engine.name + + return res + + +class TranslationEngineConfig: + def __init__( + self, + source_language: str, + target_language: str, + type: str, + name: Optional[str] = None) -> None: + """Initializes with the given values.""" + # The source language tag. + self.source_language = source_language + + # The target language tag. + self.target_language = target_language + + # The translation engine type. + self.type = type + + # The translation engine name. + self.name = name + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_engine_config_to_jsonable. + + :return: JSON-able representation + """ + return translation_engine_config_to_jsonable(self) + + +def new_translation_engine_config() -> TranslationEngineConfig: + """Generates an instance of TranslationEngineConfig with default values.""" + return TranslationEngineConfig( + source_language='', + target_language='', + type='') + + +def translation_engine_config_from_obj(obj: Any, path: str = "") -> TranslationEngineConfig: + """ + Generates an instance of TranslationEngineConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationEngineConfig + :param path: path to the object used for debugging + :return: parsed instance of TranslationEngineConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_language_from_obj = from_obj( + obj['sourceLanguage'], + expected=[str], + path=path + '.sourceLanguage') # type: str + + target_language_from_obj = from_obj( + obj['targetLanguage'], + expected=[str], + path=path + '.targetLanguage') # type: str + + type_from_obj = from_obj( + obj['type'], + expected=[str], + path=path + '.type') # type: str + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + return TranslationEngineConfig( + source_language=source_language_from_obj, + target_language=target_language_from_obj, + type=type_from_obj, + name=name_from_obj) + + +def translation_engine_config_to_jsonable( + translation_engine_config: TranslationEngineConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationEngineConfig. + + :param translation_engine_config: instance of TranslationEngineConfig to be JSON-ized + :param path: path to the translation_engine_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceLanguage'] = translation_engine_config.source_language + + res['targetLanguage'] = translation_engine_config.target_language + + res['type'] = translation_engine_config.type + + if translation_engine_config.name is not None: + res['name'] = translation_engine_config.name + + return res + + +class TranslationResult: + def __init__( + self, + translation: str, + source_tokens: List[str], + target_tokens: List[str], + confidences: List[float], + sources: List[List[str]], + alignment: List['AlignedWordPair'], + phrases: List['Phrase']) -> None: + """Initializes with the given values.""" + self.translation = translation + + self.source_tokens = source_tokens + + self.target_tokens = target_tokens + + self.confidences = confidences + + self.sources = sources + + self.alignment = alignment + + self.phrases = phrases + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_result_to_jsonable. + + :return: JSON-able representation + """ + return translation_result_to_jsonable(self) + + +def new_translation_result() -> TranslationResult: + """Generates an instance of TranslationResult with default values.""" + return TranslationResult( + translation='', + source_tokens=[], + target_tokens=[], + confidences=[], + sources=[], + alignment=[], + phrases=[]) + + +def translation_result_from_obj(obj: Any, path: str = "") -> TranslationResult: + """ + Generates an instance of TranslationResult from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationResult + :param path: path to the object used for debugging + :return: parsed instance of TranslationResult + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + translation_from_obj = from_obj( + obj['translation'], + expected=[str], + path=path + '.translation') # type: str + + source_tokens_from_obj = from_obj( + obj['sourceTokens'], + expected=[list, str], + path=path + '.sourceTokens') # type: List[str] + + target_tokens_from_obj = from_obj( + obj['targetTokens'], + expected=[list, str], + path=path + '.targetTokens') # type: List[str] + + confidences_from_obj = from_obj( + obj['confidences'], + expected=[list, float], + path=path + '.confidences') # type: List[float] + + sources_from_obj = from_obj( + obj['sources'], + expected=[list, list, str], + path=path + '.sources') # type: List[List[str]] + + alignment_from_obj = from_obj( + obj['alignment'], + expected=[list, AlignedWordPair], + path=path + '.alignment') # type: List['AlignedWordPair'] + + phrases_from_obj = from_obj( + obj['phrases'], + expected=[list, Phrase], + path=path + '.phrases') # type: List['Phrase'] + + return TranslationResult( + translation=translation_from_obj, + source_tokens=source_tokens_from_obj, + target_tokens=target_tokens_from_obj, + confidences=confidences_from_obj, + sources=sources_from_obj, + alignment=alignment_from_obj, + phrases=phrases_from_obj) + + +def translation_result_to_jsonable( + translation_result: TranslationResult, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationResult. + + :param translation_result: instance of TranslationResult to be JSON-ized + :param path: path to the translation_result used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['translation'] = translation_result.translation + + res['sourceTokens'] = to_jsonable( + translation_result.source_tokens, + expected=[list, str], + path='{}.sourceTokens'.format(path)) + + res['targetTokens'] = to_jsonable( + translation_result.target_tokens, + expected=[list, str], + path='{}.targetTokens'.format(path)) + + res['confidences'] = to_jsonable( + translation_result.confidences, + expected=[list, float], + path='{}.confidences'.format(path)) + + res['sources'] = to_jsonable( + translation_result.sources, + expected=[list, list, str], + path='{}.sources'.format(path)) + + res['alignment'] = to_jsonable( + translation_result.alignment, + expected=[list, AlignedWordPair], + path='{}.alignment'.format(path)) + + res['phrases'] = to_jsonable( + translation_result.phrases, + expected=[list, Phrase], + path='{}.phrases'.format(path)) + + return res + + +class AlignedWordPair: + def __init__( + self, + source_index: int, + target_index: int) -> None: + """Initializes with the given values.""" + self.source_index = source_index + + self.target_index = target_index + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to aligned_word_pair_to_jsonable. + + :return: JSON-able representation + """ + return aligned_word_pair_to_jsonable(self) + + +def new_aligned_word_pair() -> AlignedWordPair: + """Generates an instance of AlignedWordPair with default values.""" + return AlignedWordPair( + source_index=0, + target_index=0) + + +def aligned_word_pair_from_obj(obj: Any, path: str = "") -> AlignedWordPair: + """ + Generates an instance of AlignedWordPair from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of AlignedWordPair + :param path: path to the object used for debugging + :return: parsed instance of AlignedWordPair + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_index_from_obj = from_obj( + obj['sourceIndex'], + expected=[int], + path=path + '.sourceIndex') # type: int + + target_index_from_obj = from_obj( + obj['targetIndex'], + expected=[int], + path=path + '.targetIndex') # type: int + + return AlignedWordPair( + source_index=source_index_from_obj, + target_index=target_index_from_obj) + + +def aligned_word_pair_to_jsonable( + aligned_word_pair: AlignedWordPair, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of AlignedWordPair. + + :param aligned_word_pair: instance of AlignedWordPair to be JSON-ized + :param path: path to the aligned_word_pair used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceIndex'] = aligned_word_pair.source_index + + res['targetIndex'] = aligned_word_pair.target_index + + return res + + +class Phrase: + def __init__( + self, + source_segment_start: int, + source_segment_end: int, + target_segment_cut: int) -> None: + """Initializes with the given values.""" + self.source_segment_start = source_segment_start + + self.source_segment_end = source_segment_end + + self.target_segment_cut = target_segment_cut + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to phrase_to_jsonable. + + :return: JSON-able representation + """ + return phrase_to_jsonable(self) + + +def new_phrase() -> Phrase: + """Generates an instance of Phrase with default values.""" + return Phrase( + source_segment_start=0, + source_segment_end=0, + target_segment_cut=0) + + +def phrase_from_obj(obj: Any, path: str = "") -> Phrase: + """ + Generates an instance of Phrase from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of Phrase + :param path: path to the object used for debugging + :return: parsed instance of Phrase + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_segment_start_from_obj = from_obj( + obj['sourceSegmentStart'], + expected=[int], + path=path + '.sourceSegmentStart') # type: int + + source_segment_end_from_obj = from_obj( + obj['sourceSegmentEnd'], + expected=[int], + path=path + '.sourceSegmentEnd') # type: int + + target_segment_cut_from_obj = from_obj( + obj['targetSegmentCut'], + expected=[int], + path=path + '.targetSegmentCut') # type: int + + return Phrase( + source_segment_start=source_segment_start_from_obj, + source_segment_end=source_segment_end_from_obj, + target_segment_cut=target_segment_cut_from_obj) + + +def phrase_to_jsonable( + phrase: Phrase, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of Phrase. + + :param phrase: instance of Phrase to be JSON-ized + :param path: path to the phrase used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceSegmentStart'] = phrase.source_segment_start + + res['sourceSegmentEnd'] = phrase.source_segment_end + + res['targetSegmentCut'] = phrase.target_segment_cut + + return res + + +class WordGraph: + def __init__( + self, + source_tokens: List[str], + initial_state_score: float, + final_states: List[int], + arcs: List['WordGraphArc']) -> None: + """Initializes with the given values.""" + self.source_tokens = source_tokens + + self.initial_state_score = initial_state_score + + self.final_states = final_states + + self.arcs = arcs + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to word_graph_to_jsonable. + + :return: JSON-able representation + """ + return word_graph_to_jsonable(self) + + +def new_word_graph() -> WordGraph: + """Generates an instance of WordGraph with default values.""" + return WordGraph( + source_tokens=[], + initial_state_score=0.0, + final_states=[], + arcs=[]) + + +def word_graph_from_obj(obj: Any, path: str = "") -> WordGraph: + """ + Generates an instance of WordGraph from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of WordGraph + :param path: path to the object used for debugging + :return: parsed instance of WordGraph + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_tokens_from_obj = from_obj( + obj['sourceTokens'], + expected=[list, str], + path=path + '.sourceTokens') # type: List[str] + + initial_state_score_from_obj = from_obj( + obj['initialStateScore'], + expected=[float], + path=path + '.initialStateScore') # type: float + + final_states_from_obj = from_obj( + obj['finalStates'], + expected=[list, int], + path=path + '.finalStates') # type: List[int] + + arcs_from_obj = from_obj( + obj['arcs'], + expected=[list, WordGraphArc], + path=path + '.arcs') # type: List['WordGraphArc'] + + return WordGraph( + source_tokens=source_tokens_from_obj, + initial_state_score=initial_state_score_from_obj, + final_states=final_states_from_obj, + arcs=arcs_from_obj) + + +def word_graph_to_jsonable( + word_graph: WordGraph, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of WordGraph. + + :param word_graph: instance of WordGraph to be JSON-ized + :param path: path to the word_graph used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceTokens'] = to_jsonable( + word_graph.source_tokens, + expected=[list, str], + path='{}.sourceTokens'.format(path)) + + res['initialStateScore'] = word_graph.initial_state_score + + res['finalStates'] = to_jsonable( + word_graph.final_states, + expected=[list, int], + path='{}.finalStates'.format(path)) + + res['arcs'] = to_jsonable( + word_graph.arcs, + expected=[list, WordGraphArc], + path='{}.arcs'.format(path)) + + return res + + +class WordGraphArc: + def __init__( + self, + prev_state: int, + next_state: int, + score: float, + target_tokens: List[str], + confidences: List[float], + source_segment_start: int, + source_segment_end: int, + alignment: List['AlignedWordPair'], + sources: List[List[str]]) -> None: + """Initializes with the given values.""" + self.prev_state = prev_state + + self.next_state = next_state + + self.score = score + + self.target_tokens = target_tokens + + self.confidences = confidences + + self.source_segment_start = source_segment_start + + self.source_segment_end = source_segment_end + + self.alignment = alignment + + self.sources = sources + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to word_graph_arc_to_jsonable. + + :return: JSON-able representation + """ + return word_graph_arc_to_jsonable(self) + + +def new_word_graph_arc() -> WordGraphArc: + """Generates an instance of WordGraphArc with default values.""" + return WordGraphArc( + prev_state=0, + next_state=0, + score=0.0, + target_tokens=[], + confidences=[], + source_segment_start=0, + source_segment_end=0, + alignment=[], + sources=[]) + + +def word_graph_arc_from_obj(obj: Any, path: str = "") -> WordGraphArc: + """ + Generates an instance of WordGraphArc from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of WordGraphArc + :param path: path to the object used for debugging + :return: parsed instance of WordGraphArc + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + prev_state_from_obj = from_obj( + obj['prevState'], + expected=[int], + path=path + '.prevState') # type: int + + next_state_from_obj = from_obj( + obj['nextState'], + expected=[int], + path=path + '.nextState') # type: int + + score_from_obj = from_obj( + obj['score'], + expected=[float], + path=path + '.score') # type: float + + target_tokens_from_obj = from_obj( + obj['targetTokens'], + expected=[list, str], + path=path + '.targetTokens') # type: List[str] + + confidences_from_obj = from_obj( + obj['confidences'], + expected=[list, float], + path=path + '.confidences') # type: List[float] + + source_segment_start_from_obj = from_obj( + obj['sourceSegmentStart'], + expected=[int], + path=path + '.sourceSegmentStart') # type: int + + source_segment_end_from_obj = from_obj( + obj['sourceSegmentEnd'], + expected=[int], + path=path + '.sourceSegmentEnd') # type: int + + alignment_from_obj = from_obj( + obj['alignment'], + expected=[list, AlignedWordPair], + path=path + '.alignment') # type: List['AlignedWordPair'] + + sources_from_obj = from_obj( + obj['sources'], + expected=[list, list, str], + path=path + '.sources') # type: List[List[str]] + + return WordGraphArc( + prev_state=prev_state_from_obj, + next_state=next_state_from_obj, + score=score_from_obj, + target_tokens=target_tokens_from_obj, + confidences=confidences_from_obj, + source_segment_start=source_segment_start_from_obj, + source_segment_end=source_segment_end_from_obj, + alignment=alignment_from_obj, + sources=sources_from_obj) + + +def word_graph_arc_to_jsonable( + word_graph_arc: WordGraphArc, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of WordGraphArc. + + :param word_graph_arc: instance of WordGraphArc to be JSON-ized + :param path: path to the word_graph_arc used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['prevState'] = word_graph_arc.prev_state + + res['nextState'] = word_graph_arc.next_state + + res['score'] = word_graph_arc.score + + res['targetTokens'] = to_jsonable( + word_graph_arc.target_tokens, + expected=[list, str], + path='{}.targetTokens'.format(path)) + + res['confidences'] = to_jsonable( + word_graph_arc.confidences, + expected=[list, float], + path='{}.confidences'.format(path)) + + res['sourceSegmentStart'] = word_graph_arc.source_segment_start + + res['sourceSegmentEnd'] = word_graph_arc.source_segment_end + + res['alignment'] = to_jsonable( + word_graph_arc.alignment, + expected=[list, AlignedWordPair], + path='{}.alignment'.format(path)) + + res['sources'] = to_jsonable( + word_graph_arc.sources, + expected=[list, list, str], + path='{}.sources'.format(path)) + + return res + + +class SegmentPair: + def __init__( + self, + source_segment: str, + target_segment: str, + sentence_start: bool) -> None: + """Initializes with the given values.""" + self.source_segment = source_segment + + self.target_segment = target_segment + + self.sentence_start = sentence_start + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to segment_pair_to_jsonable. + + :return: JSON-able representation + """ + return segment_pair_to_jsonable(self) + + +def new_segment_pair() -> SegmentPair: + """Generates an instance of SegmentPair with default values.""" + return SegmentPair( + source_segment='', + target_segment='', + sentence_start=False) + + +def segment_pair_from_obj(obj: Any, path: str = "") -> SegmentPair: + """ + Generates an instance of SegmentPair from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of SegmentPair + :param path: path to the object used for debugging + :return: parsed instance of SegmentPair + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_segment_from_obj = from_obj( + obj['sourceSegment'], + expected=[str], + path=path + '.sourceSegment') # type: str + + target_segment_from_obj = from_obj( + obj['targetSegment'], + expected=[str], + path=path + '.targetSegment') # type: str + + sentence_start_from_obj = from_obj( + obj['sentenceStart'], + expected=[bool], + path=path + '.sentenceStart') # type: bool + + return SegmentPair( + source_segment=source_segment_from_obj, + target_segment=target_segment_from_obj, + sentence_start=sentence_start_from_obj) + + +def segment_pair_to_jsonable( + segment_pair: SegmentPair, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of SegmentPair. + + :param segment_pair: instance of SegmentPair to be JSON-ized + :param path: path to the segment_pair used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceSegment'] = segment_pair.source_segment + + res['targetSegment'] = segment_pair.target_segment + + res['sentenceStart'] = segment_pair.sentence_start + + return res + + +class TranslationCorpus: + def __init__( + self, + id: str, + url: str, + engine: 'ResourceLink', + source_language: str, + target_language: str, + source_files: List['TranslationCorpusFile'], + target_files: List['TranslationCorpusFile'], + name: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + self.engine = engine + + self.source_language = source_language + + self.target_language = target_language + + self.source_files = source_files + + self.target_files = target_files + + self.name = name + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_corpus_to_jsonable. + + :return: JSON-able representation + """ + return translation_corpus_to_jsonable(self) + + +def new_translation_corpus() -> TranslationCorpus: + """Generates an instance of TranslationCorpus with default values.""" + return TranslationCorpus( + id='', + url='', + engine=new_resource_link__, + source_language='', + target_language='', + source_files=[], + target_files=[]) + + +def translation_corpus_from_obj(obj: Any, path: str = "") -> TranslationCorpus: + """ + Generates an instance of TranslationCorpus from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationCorpus + :param path: path to the object used for debugging + :return: parsed instance of TranslationCorpus + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + engine_from_obj = from_obj( + obj['engine'], + expected=[ResourceLink], + path=path + '.engine') # type: 'ResourceLink' + + source_language_from_obj = from_obj( + obj['sourceLanguage'], + expected=[str], + path=path + '.sourceLanguage') # type: str + + target_language_from_obj = from_obj( + obj['targetLanguage'], + expected=[str], + path=path + '.targetLanguage') # type: str + + source_files_from_obj = from_obj( + obj['sourceFiles'], + expected=[list, TranslationCorpusFile], + path=path + '.sourceFiles') # type: List['TranslationCorpusFile'] + + target_files_from_obj = from_obj( + obj['targetFiles'], + expected=[list, TranslationCorpusFile], + path=path + '.targetFiles') # type: List['TranslationCorpusFile'] + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + return TranslationCorpus( + id=id_from_obj, + url=url_from_obj, + engine=engine_from_obj, + source_language=source_language_from_obj, + target_language=target_language_from_obj, + source_files=source_files_from_obj, + target_files=target_files_from_obj, + name=name_from_obj) + + +def translation_corpus_to_jsonable( + translation_corpus: TranslationCorpus, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationCorpus. + + :param translation_corpus: instance of TranslationCorpus to be JSON-ized + :param path: path to the translation_corpus used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = translation_corpus.id + + res['url'] = translation_corpus.url + + res['engine'] = to_jsonable( + translation_corpus.engine, + expected=[ResourceLink], + path='{}.engine'.format(path)) + + res['sourceLanguage'] = translation_corpus.source_language + + res['targetLanguage'] = translation_corpus.target_language + + res['sourceFiles'] = to_jsonable( + translation_corpus.source_files, + expected=[list, TranslationCorpusFile], + path='{}.sourceFiles'.format(path)) + + res['targetFiles'] = to_jsonable( + translation_corpus.target_files, + expected=[list, TranslationCorpusFile], + path='{}.targetFiles'.format(path)) + + if translation_corpus.name is not None: + res['name'] = translation_corpus.name + + return res + + +class ResourceLink: + def __init__( + self, + id: str, + url: str) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to resource_link_to_jsonable. + + :return: JSON-able representation + """ + return resource_link_to_jsonable(self) + + +def new_resource_link() -> ResourceLink: + """Generates an instance of ResourceLink with default values.""" + return ResourceLink( + id='', + url='') + + +def resource_link_from_obj(obj: Any, path: str = "") -> ResourceLink: + """ + Generates an instance of ResourceLink from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of ResourceLink + :param path: path to the object used for debugging + :return: parsed instance of ResourceLink + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + return ResourceLink( + id=id_from_obj, + url=url_from_obj) + + +def resource_link_to_jsonable( + resource_link: ResourceLink, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of ResourceLink. + + :param resource_link: instance of ResourceLink to be JSON-ized + :param path: path to the resource_link used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = resource_link.id + + res['url'] = resource_link.url + + return res + + +class TranslationCorpusFile: + def __init__( + self, + file: 'ResourceLink', + text_id: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.file = file + + self.text_id = text_id + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_corpus_file_to_jsonable. + + :return: JSON-able representation + """ + return translation_corpus_file_to_jsonable(self) + + +def new_translation_corpus_file() -> TranslationCorpusFile: + """Generates an instance of TranslationCorpusFile with default values.""" + return TranslationCorpusFile( + file=new_resource_link__) + + +def translation_corpus_file_from_obj(obj: Any, path: str = "") -> TranslationCorpusFile: + """ + Generates an instance of TranslationCorpusFile from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationCorpusFile + :param path: path to the object used for debugging + :return: parsed instance of TranslationCorpusFile + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + file_from_obj = from_obj( + obj['file'], + expected=[ResourceLink], + path=path + '.file') # type: 'ResourceLink' + + obj_text_id = obj.get('textId', None) + if obj_text_id is not None: + text_id_from_obj = from_obj( + obj_text_id, + expected=[str], + path=path + '.textId') # type: Optional[str] + else: + text_id_from_obj = None + + return TranslationCorpusFile( + file=file_from_obj, + text_id=text_id_from_obj) + + +def translation_corpus_file_to_jsonable( + translation_corpus_file: TranslationCorpusFile, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationCorpusFile. + + :param translation_corpus_file: instance of TranslationCorpusFile to be JSON-ized + :param path: path to the translation_corpus_file used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['file'] = to_jsonable( + translation_corpus_file.file, + expected=[ResourceLink], + path='{}.file'.format(path)) + + if translation_corpus_file.text_id is not None: + res['textId'] = translation_corpus_file.text_id + + return res + + +class TranslationCorpusConfig: + def __init__( + self, + source_language: str, + target_language: str, + source_files: List['TranslationCorpusFileConfig'], + target_files: List['TranslationCorpusFileConfig'], + name: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.source_language = source_language + + self.target_language = target_language + + self.source_files = source_files + + self.target_files = target_files + + # The corpus name. + self.name = name + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_corpus_config_to_jsonable. + + :return: JSON-able representation + """ + return translation_corpus_config_to_jsonable(self) + + +def new_translation_corpus_config() -> TranslationCorpusConfig: + """Generates an instance of TranslationCorpusConfig with default values.""" + return TranslationCorpusConfig( + source_language='', + target_language='', + source_files=[], + target_files=[]) + + +def translation_corpus_config_from_obj(obj: Any, path: str = "") -> TranslationCorpusConfig: + """ + Generates an instance of TranslationCorpusConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationCorpusConfig + :param path: path to the object used for debugging + :return: parsed instance of TranslationCorpusConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + source_language_from_obj = from_obj( + obj['sourceLanguage'], + expected=[str], + path=path + '.sourceLanguage') # type: str + + target_language_from_obj = from_obj( + obj['targetLanguage'], + expected=[str], + path=path + '.targetLanguage') # type: str + + source_files_from_obj = from_obj( + obj['sourceFiles'], + expected=[list, TranslationCorpusFileConfig], + path=path + '.sourceFiles') # type: List['TranslationCorpusFileConfig'] + + target_files_from_obj = from_obj( + obj['targetFiles'], + expected=[list, TranslationCorpusFileConfig], + path=path + '.targetFiles') # type: List['TranslationCorpusFileConfig'] + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + return TranslationCorpusConfig( + source_language=source_language_from_obj, + target_language=target_language_from_obj, + source_files=source_files_from_obj, + target_files=target_files_from_obj, + name=name_from_obj) + + +def translation_corpus_config_to_jsonable( + translation_corpus_config: TranslationCorpusConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationCorpusConfig. + + :param translation_corpus_config: instance of TranslationCorpusConfig to be JSON-ized + :param path: path to the translation_corpus_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['sourceLanguage'] = translation_corpus_config.source_language + + res['targetLanguage'] = translation_corpus_config.target_language + + res['sourceFiles'] = to_jsonable( + translation_corpus_config.source_files, + expected=[list, TranslationCorpusFileConfig], + path='{}.sourceFiles'.format(path)) + + res['targetFiles'] = to_jsonable( + translation_corpus_config.target_files, + expected=[list, TranslationCorpusFileConfig], + path='{}.targetFiles'.format(path)) + + if translation_corpus_config.name is not None: + res['name'] = translation_corpus_config.name + + return res + + +class TranslationCorpusFileConfig: + def __init__( + self, + file_id: str, + text_id: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.file_id = file_id + + self.text_id = text_id + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_corpus_file_config_to_jsonable. + + :return: JSON-able representation + """ + return translation_corpus_file_config_to_jsonable(self) + + +def new_translation_corpus_file_config() -> TranslationCorpusFileConfig: + """Generates an instance of TranslationCorpusFileConfig with default values.""" + return TranslationCorpusFileConfig( + file_id='') + + +def translation_corpus_file_config_from_obj(obj: Any, path: str = "") -> TranslationCorpusFileConfig: + """ + Generates an instance of TranslationCorpusFileConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationCorpusFileConfig + :param path: path to the object used for debugging + :return: parsed instance of TranslationCorpusFileConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + file_id_from_obj = from_obj( + obj['fileId'], + expected=[str], + path=path + '.fileId') # type: str + + obj_text_id = obj.get('textId', None) + if obj_text_id is not None: + text_id_from_obj = from_obj( + obj_text_id, + expected=[str], + path=path + '.textId') # type: Optional[str] + else: + text_id_from_obj = None + + return TranslationCorpusFileConfig( + file_id=file_id_from_obj, + text_id=text_id_from_obj) + + +def translation_corpus_file_config_to_jsonable( + translation_corpus_file_config: TranslationCorpusFileConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationCorpusFileConfig. + + :param translation_corpus_file_config: instance of TranslationCorpusFileConfig to be JSON-ized + :param path: path to the translation_corpus_file_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['fileId'] = translation_corpus_file_config.file_id + + if translation_corpus_file_config.text_id is not None: + res['textId'] = translation_corpus_file_config.text_id + + return res + + +class TranslationCorpusUpdateConfig: + def __init__( + self, + source_files: Optional[List['TranslationCorpusFileConfig']] = None, + target_files: Optional[List['TranslationCorpusFileConfig']] = None) -> None: + """Initializes with the given values.""" + self.source_files = source_files + + self.target_files = target_files + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_corpus_update_config_to_jsonable. + + :return: JSON-able representation + """ + return translation_corpus_update_config_to_jsonable(self) + + +def new_translation_corpus_update_config() -> TranslationCorpusUpdateConfig: + """Generates an instance of TranslationCorpusUpdateConfig with default values.""" + return TranslationCorpusUpdateConfig() + + +def translation_corpus_update_config_from_obj(obj: Any, path: str = "") -> TranslationCorpusUpdateConfig: + """ + Generates an instance of TranslationCorpusUpdateConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationCorpusUpdateConfig + :param path: path to the object used for debugging + :return: parsed instance of TranslationCorpusUpdateConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + obj_source_files = obj.get('sourceFiles', None) + if obj_source_files is not None: + source_files_from_obj = from_obj( + obj_source_files, + expected=[list, TranslationCorpusFileConfig], + path=path + '.sourceFiles') # type: Optional[List['TranslationCorpusFileConfig']] + else: + source_files_from_obj = None + + obj_target_files = obj.get('targetFiles', None) + if obj_target_files is not None: + target_files_from_obj = from_obj( + obj_target_files, + expected=[list, TranslationCorpusFileConfig], + path=path + '.targetFiles') # type: Optional[List['TranslationCorpusFileConfig']] + else: + target_files_from_obj = None + + return TranslationCorpusUpdateConfig( + source_files=source_files_from_obj, + target_files=target_files_from_obj) + + +def translation_corpus_update_config_to_jsonable( + translation_corpus_update_config: TranslationCorpusUpdateConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationCorpusUpdateConfig. + + :param translation_corpus_update_config: instance of TranslationCorpusUpdateConfig to be JSON-ized + :param path: path to the translation_corpus_update_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + if translation_corpus_update_config.source_files is not None: + res['sourceFiles'] = to_jsonable( + translation_corpus_update_config.source_files, + expected=[list, TranslationCorpusFileConfig], + path='{}.sourceFiles'.format(path)) + + if translation_corpus_update_config.target_files is not None: + res['targetFiles'] = to_jsonable( + translation_corpus_update_config.target_files, + expected=[list, TranslationCorpusFileConfig], + path='{}.targetFiles'.format(path)) + + return res + + +class Pretranslation: + def __init__( + self, + text_id: str, + refs: List[str], + translation: str) -> None: + """Initializes with the given values.""" + self.text_id = text_id + + self.refs = refs + + self.translation = translation + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to pretranslation_to_jsonable. + + :return: JSON-able representation + """ + return pretranslation_to_jsonable(self) + + +def new_pretranslation() -> Pretranslation: + """Generates an instance of Pretranslation with default values.""" + return Pretranslation( + text_id='', + refs=[], + translation='') + + +def pretranslation_from_obj(obj: Any, path: str = "") -> Pretranslation: + """ + Generates an instance of Pretranslation from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of Pretranslation + :param path: path to the object used for debugging + :return: parsed instance of Pretranslation + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + text_id_from_obj = from_obj( + obj['textId'], + expected=[str], + path=path + '.textId') # type: str + + refs_from_obj = from_obj( + obj['refs'], + expected=[list, str], + path=path + '.refs') # type: List[str] + + translation_from_obj = from_obj( + obj['translation'], + expected=[str], + path=path + '.translation') # type: str + + return Pretranslation( + text_id=text_id_from_obj, + refs=refs_from_obj, + translation=translation_from_obj) + + +def pretranslation_to_jsonable( + pretranslation: Pretranslation, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of Pretranslation. + + :param pretranslation: instance of Pretranslation to be JSON-ized + :param path: path to the pretranslation used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['textId'] = pretranslation.text_id + + res['refs'] = to_jsonable( + pretranslation.refs, + expected=[list, str], + path='{}.refs'.format(path)) + + res['translation'] = pretranslation.translation + + return res + + +class TranslationBuild: + def __init__( + self, + id: str, + url: str, + revision: int, + engine: 'ResourceLink', + step: int, + state: str, + name: Optional[str] = None, + pretranslate: Optional[List['PretranslateCorpus']] = None, + percent_completed: Optional[float] = None, + message: Optional[str] = None, + date_finished: Optional[str] = None) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + self.revision = revision + + self.engine = engine + + self.step = step + + # The current build job state. + self.state = state + + self.name = name + + self.pretranslate = pretranslate + + self.percent_completed = percent_completed + + self.message = message + + self.date_finished = date_finished + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_build_to_jsonable. + + :return: JSON-able representation + """ + return translation_build_to_jsonable(self) + + +def new_translation_build() -> TranslationBuild: + """Generates an instance of TranslationBuild with default values.""" + return TranslationBuild( + id='', + url='', + revision=0, + engine=new_resource_link__, + step=0, + state='') + + +def translation_build_from_obj(obj: Any, path: str = "") -> TranslationBuild: + """ + Generates an instance of TranslationBuild from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationBuild + :param path: path to the object used for debugging + :return: parsed instance of TranslationBuild + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + revision_from_obj = from_obj( + obj['revision'], + expected=[int], + path=path + '.revision') # type: int + + engine_from_obj = from_obj( + obj['engine'], + expected=[ResourceLink], + path=path + '.engine') # type: 'ResourceLink' + + step_from_obj = from_obj( + obj['step'], + expected=[int], + path=path + '.step') # type: int + + state_from_obj = from_obj( + obj['state'], + expected=[str], + path=path + '.state') # type: str + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + obj_pretranslate = obj.get('pretranslate', None) + if obj_pretranslate is not None: + pretranslate_from_obj = from_obj( + obj_pretranslate, + expected=[list, PretranslateCorpus], + path=path + '.pretranslate') # type: Optional[List['PretranslateCorpus']] + else: + pretranslate_from_obj = None + + obj_percent_completed = obj.get('percentCompleted', None) + if obj_percent_completed is not None: + percent_completed_from_obj = from_obj( + obj_percent_completed, + expected=[float], + path=path + '.percentCompleted') # type: Optional[float] + else: + percent_completed_from_obj = None + + obj_message = obj.get('message', None) + if obj_message is not None: + message_from_obj = from_obj( + obj_message, + expected=[str], + path=path + '.message') # type: Optional[str] + else: + message_from_obj = None + + obj_date_finished = obj.get('dateFinished', None) + if obj_date_finished is not None: + date_finished_from_obj = from_obj( + obj_date_finished, + expected=[str], + path=path + '.dateFinished') # type: Optional[str] + else: + date_finished_from_obj = None + + return TranslationBuild( + id=id_from_obj, + url=url_from_obj, + revision=revision_from_obj, + engine=engine_from_obj, + step=step_from_obj, + state=state_from_obj, + name=name_from_obj, + pretranslate=pretranslate_from_obj, + percent_completed=percent_completed_from_obj, + message=message_from_obj, + date_finished=date_finished_from_obj) + + +def translation_build_to_jsonable( + translation_build: TranslationBuild, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationBuild. + + :param translation_build: instance of TranslationBuild to be JSON-ized + :param path: path to the translation_build used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = translation_build.id + + res['url'] = translation_build.url + + res['revision'] = translation_build.revision + + res['engine'] = to_jsonable( + translation_build.engine, + expected=[ResourceLink], + path='{}.engine'.format(path)) + + res['step'] = translation_build.step + + res['state'] = translation_build.state + + if translation_build.name is not None: + res['name'] = translation_build.name + + if translation_build.pretranslate is not None: + res['pretranslate'] = to_jsonable( + translation_build.pretranslate, + expected=[list, PretranslateCorpus], + path='{}.pretranslate'.format(path)) + + if translation_build.percent_completed is not None: + res['percentCompleted'] = translation_build.percent_completed + + if translation_build.message is not None: + res['message'] = translation_build.message + + if translation_build.date_finished is not None: + res['dateFinished'] = translation_build.date_finished + + return res + + +class PretranslateCorpus: + def __init__( + self, + corpus: 'ResourceLink', + text_ids: Optional[List[str]] = None) -> None: + """Initializes with the given values.""" + self.corpus = corpus + + self.text_ids = text_ids + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to pretranslate_corpus_to_jsonable. + + :return: JSON-able representation + """ + return pretranslate_corpus_to_jsonable(self) + + +def new_pretranslate_corpus() -> PretranslateCorpus: + """Generates an instance of PretranslateCorpus with default values.""" + return PretranslateCorpus( + corpus=new_resource_link__) + + +def pretranslate_corpus_from_obj(obj: Any, path: str = "") -> PretranslateCorpus: + """ + Generates an instance of PretranslateCorpus from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of PretranslateCorpus + :param path: path to the object used for debugging + :return: parsed instance of PretranslateCorpus + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + corpus_from_obj = from_obj( + obj['corpus'], + expected=[ResourceLink], + path=path + '.corpus') # type: 'ResourceLink' + + obj_text_ids = obj.get('textIds', None) + if obj_text_ids is not None: + text_ids_from_obj = from_obj( + obj_text_ids, + expected=[list, str], + path=path + '.textIds') # type: Optional[List[str]] + else: + text_ids_from_obj = None + + return PretranslateCorpus( + corpus=corpus_from_obj, + text_ids=text_ids_from_obj) + + +def pretranslate_corpus_to_jsonable( + pretranslate_corpus: PretranslateCorpus, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of PretranslateCorpus. + + :param pretranslate_corpus: instance of PretranslateCorpus to be JSON-ized + :param path: path to the pretranslate_corpus used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['corpus'] = to_jsonable( + pretranslate_corpus.corpus, + expected=[ResourceLink], + path='{}.corpus'.format(path)) + + if pretranslate_corpus.text_ids is not None: + res['textIds'] = to_jsonable( + pretranslate_corpus.text_ids, + expected=[list, str], + path='{}.textIds'.format(path)) + + return res + + +class TranslationBuildConfig: + def __init__( + self, + name: Optional[str] = None, + pretranslate: Optional[List['PretranslateCorpusConfig']] = None) -> None: + """Initializes with the given values.""" + self.name = name + + self.pretranslate = pretranslate + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to translation_build_config_to_jsonable. + + :return: JSON-able representation + """ + return translation_build_config_to_jsonable(self) + + +def new_translation_build_config() -> TranslationBuildConfig: + """Generates an instance of TranslationBuildConfig with default values.""" + return TranslationBuildConfig() + + +def translation_build_config_from_obj(obj: Any, path: str = "") -> TranslationBuildConfig: + """ + Generates an instance of TranslationBuildConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of TranslationBuildConfig + :param path: path to the object used for debugging + :return: parsed instance of TranslationBuildConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + obj_name = obj.get('name', None) + if obj_name is not None: + name_from_obj = from_obj( + obj_name, + expected=[str], + path=path + '.name') # type: Optional[str] + else: + name_from_obj = None + + obj_pretranslate = obj.get('pretranslate', None) + if obj_pretranslate is not None: + pretranslate_from_obj = from_obj( + obj_pretranslate, + expected=[list, PretranslateCorpusConfig], + path=path + '.pretranslate') # type: Optional[List['PretranslateCorpusConfig']] + else: + pretranslate_from_obj = None + + return TranslationBuildConfig( + name=name_from_obj, + pretranslate=pretranslate_from_obj) + + +def translation_build_config_to_jsonable( + translation_build_config: TranslationBuildConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of TranslationBuildConfig. + + :param translation_build_config: instance of TranslationBuildConfig to be JSON-ized + :param path: path to the translation_build_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + if translation_build_config.name is not None: + res['name'] = translation_build_config.name + + if translation_build_config.pretranslate is not None: + res['pretranslate'] = to_jsonable( + translation_build_config.pretranslate, + expected=[list, PretranslateCorpusConfig], + path='{}.pretranslate'.format(path)) + + return res + + +class PretranslateCorpusConfig: + def __init__( + self, + corpus_id: str, + text_ids: Optional[List[str]] = None) -> None: + """Initializes with the given values.""" + self.corpus_id = corpus_id + + self.text_ids = text_ids + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to pretranslate_corpus_config_to_jsonable. + + :return: JSON-able representation + """ + return pretranslate_corpus_config_to_jsonable(self) + + +def new_pretranslate_corpus_config() -> PretranslateCorpusConfig: + """Generates an instance of PretranslateCorpusConfig with default values.""" + return PretranslateCorpusConfig( + corpus_id='') + + +def pretranslate_corpus_config_from_obj(obj: Any, path: str = "") -> PretranslateCorpusConfig: + """ + Generates an instance of PretranslateCorpusConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of PretranslateCorpusConfig + :param path: path to the object used for debugging + :return: parsed instance of PretranslateCorpusConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + corpus_id_from_obj = from_obj( + obj['corpusId'], + expected=[str], + path=path + '.corpusId') # type: str + + obj_text_ids = obj.get('textIds', None) + if obj_text_ids is not None: + text_ids_from_obj = from_obj( + obj_text_ids, + expected=[list, str], + path=path + '.textIds') # type: Optional[List[str]] + else: + text_ids_from_obj = None + + return PretranslateCorpusConfig( + corpus_id=corpus_id_from_obj, + text_ids=text_ids_from_obj) + + +def pretranslate_corpus_config_to_jsonable( + pretranslate_corpus_config: PretranslateCorpusConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of PretranslateCorpusConfig. + + :param pretranslate_corpus_config: instance of PretranslateCorpusConfig to be JSON-ized + :param path: path to the pretranslate_corpus_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['corpusId'] = pretranslate_corpus_config.corpus_id + + if pretranslate_corpus_config.text_ids is not None: + res['textIds'] = to_jsonable( + pretranslate_corpus_config.text_ids, + expected=[list, str], + path='{}.textIds'.format(path)) + + return res + + +class Webhook: + def __init__( + self, + id: str, + url: str, + payload_url: str, + events: List[str]) -> None: + """Initializes with the given values.""" + self.id = id + + self.url = url + + self.payload_url = payload_url + + self.events = events + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to webhook_to_jsonable. + + :return: JSON-able representation + """ + return webhook_to_jsonable(self) + + +def new_webhook() -> Webhook: + """Generates an instance of Webhook with default values.""" + return Webhook( + id='', + url='', + payload_url='', + events=[]) + + +def webhook_from_obj(obj: Any, path: str = "") -> Webhook: + """ + Generates an instance of Webhook from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of Webhook + :param path: path to the object used for debugging + :return: parsed instance of Webhook + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + id_from_obj = from_obj( + obj['id'], + expected=[str], + path=path + '.id') # type: str + + url_from_obj = from_obj( + obj['url'], + expected=[str], + path=path + '.url') # type: str + + payload_url_from_obj = from_obj( + obj['payloadUrl'], + expected=[str], + path=path + '.payloadUrl') # type: str + + events_from_obj = from_obj( + obj['events'], + expected=[list, str], + path=path + '.events') # type: List[str] + + return Webhook( + id=id_from_obj, + url=url_from_obj, + payload_url=payload_url_from_obj, + events=events_from_obj) + + +def webhook_to_jsonable( + webhook: Webhook, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of Webhook. + + :param webhook: instance of Webhook to be JSON-ized + :param path: path to the webhook used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['id'] = webhook.id + + res['url'] = webhook.url + + res['payloadUrl'] = webhook.payload_url + + res['events'] = to_jsonable( + webhook.events, + expected=[list, str], + path='{}.events'.format(path)) + + return res + + +class WebhookConfig: + def __init__( + self, + payload_url: str, + secret: str, + events: List[str]) -> None: + """Initializes with the given values.""" + # The payload URL. + self.payload_url = payload_url + + # The shared secret. + self.secret = secret + + # The webhook events. + self.events = events + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to webhook_config_to_jsonable. + + :return: JSON-able representation + """ + return webhook_config_to_jsonable(self) + + +def new_webhook_config() -> WebhookConfig: + """Generates an instance of WebhookConfig with default values.""" + return WebhookConfig( + payload_url='', + secret='', + events=[]) + + +def webhook_config_from_obj(obj: Any, path: str = "") -> WebhookConfig: + """ + Generates an instance of WebhookConfig from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of WebhookConfig + :param path: path to the object used for debugging + :return: parsed instance of WebhookConfig + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + payload_url_from_obj = from_obj( + obj['payloadUrl'], + expected=[str], + path=path + '.payloadUrl') # type: str + + secret_from_obj = from_obj( + obj['secret'], + expected=[str], + path=path + '.secret') # type: str + + events_from_obj = from_obj( + obj['events'], + expected=[list, str], + path=path + '.events') # type: List[str] + + return WebhookConfig( + payload_url=payload_url_from_obj, + secret=secret_from_obj, + events=events_from_obj) + + +def webhook_config_to_jsonable( + webhook_config: WebhookConfig, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of WebhookConfig. + + :param webhook_config: instance of WebhookConfig to be JSON-ized + :param path: path to the webhook_config used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['payloadUrl'] = webhook_config.payload_url + + res['secret'] = webhook_config.secret + + res['events'] = to_jsonable( + webhook_config.events, + expected=[list, str], + path='{}.events'.format(path)) + + return res + + +class RemoteCaller: + """Executes the remote calls to the server.""" + + def __init__( + self, + url_prefix: str, + auth: Optional[requests.auth.AuthBase] = None, + session: Optional[requests.Session] = None) -> None: + self.url_prefix = url_prefix + self.auth = auth + self.session = session + + if not self.session: + self.session = requests.Session() + self.session.auth = self.auth + + def data_files_get_all(self) -> List['DataFile']: + """ + Send a get request to /api/v1/files. + + :return: A list of all files owned by the client + """ + url = self.url_prefix + '/api/v1/files' + + resp = self.session.request(method='get', url=url) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, DataFile]) + + def data_files_create( + self, + file: BinaryIO, + format: str, + name: Optional[str] = None) -> bytes: + """ + Sample request: + + POST /files + { + "format": "text", + "name": "myTeam:myProject:myFile.txt" + } + + :param file: The file to upload. Max size: 100MB + :param format: + File format options: + * **Text**: One translation unit (a.k.a., verse) per line + * If there is a tab, the content before the tab is the unique identifier for the line + * Otherwise, no tabs should be used in the file. + * **Paratext**: A complete, zipped Paratext project backup: that is, a .zip archive of files including the USFM files and "Settings.xml" file. To generate a zipped backup for a project in Paratext, navigate to "Paratext/Advanced/Backup project to file..." and follow the dialogue. + :param name: + A name to help identify and distinguish the file. + Recommendation: Create a multi-part name to distinguish between projects, uses, languages, etc. + The name does not have to be unique. + Example: myTranslationTeam:myProject:myLanguage:myFile.txt + + :return: + """ + url = self.url_prefix + '/api/v1/files' + + data = {} # type: Dict[str, str] + + data['format'] = format + + if name is not None: + data['name'] = name + + files = {} # type: Dict[str, BinaryIO] + + files['file'] = file + + resp = self.session.request( + method='post', + url=url, + data=data, + files=files, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def data_files_get( + self, + id: str) -> 'DataFile': + """ + Send a get request to /api/v1/files/{id}. + + :param id: The unique identifier for the file + + :return: The file exists + """ + url = "".join([ + self.url_prefix, + '/api/v1/files/', + str(id)]) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[DataFile]) + + def data_files_update( + self, + id: str, + file: BinaryIO) -> 'DataFile': + """ + Send a patch request to /api/v1/files/{id}. + + :param id: The existing file's unique id + :param file: The updated file + + :return: The file was updated successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/files/', + str(id)]) + + files = {} # type: Dict[str, BinaryIO] + + files['file'] = file + + resp = self.session.request( + method='patch', + url=url, + files=files, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[DataFile]) + + def data_files_delete( + self, + id: str) -> bytes: + """ + If a file is in a corpora and the file is deleted, it will be automatically removed from the corpora. + If a build job has started before the file was deleted, the file will be used for the build job, even + though it will no longer be accessible through the API. + + :param id: The existing file's unique id + + :return: The file was deleted successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/files/', + str(id)]) + + resp = self.session.request( + method='delete', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_get_all(self) -> List['TranslationEngine']: + """ + Send a get request to /api/v1/translation/engines. + + :return: The engines + """ + url = self.url_prefix + '/api/v1/translation/engines' + + resp = self.session.request(method='get', url=url) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, TranslationEngine]) + + def translation_engines_create( + self, + engine_config: 'TranslationEngineConfig') -> bytes: + """ + ## Parameters + * **name**: A name to help identify and distinguish the file. + * Recommendation: Create a multi-part name to distinguish between projects, uses, etc. + * The name does not have to be unique, as the engine is uniquely identified by the auto-generated id + * **sourceLanguage**: The source language code (a valid [IETF language tag](https://en.wikipedia.org/wiki/IETF_language_tag) is recommended) + * **targetLanguage**: The target language code (a valid IETF language tag is recommended) + * **type**: **SmtTransfer** or **Nmt** or **Echo** + ### SmtTransfer + The Statistical Machine Translation Transfer Learning engine is primarily used for translation suggestions. + Typical endpoints: translate, get-word-graph, train-segment + ### Nmt + The Neural Machine Translation engine is primarily used for pretranslations. It is + fine tuned from the NLLB-200 from Meta and inherits the 200 language codes. Valid IETF language tags will be converted to an [NLLB-200 code](https://github.com/facebookresearch/flores/tree/main/flores200#languages-in-flores-200), and NLLB will be used as-is. + Typical endpoints: pretranslate + ### Echo + The Echo engine has full coverage of all Nmt and SmtTransfer endpoints. Endpoints like create and build + return empty responses. Endpoints like translate and get-word-graph echo the sent content back to the user + in a format that mocks Nmt or Smt. For example, translating a segment "test" with the Echo engine would + yield a translation response with translation "test". This engine is useful for debugging and testing purposes. + ## Sample request: + + { + "name": "myTeam:myProject:myEngine", + "sourceLanguage": "el", + "targetLanguage": "en", + "type": "Nmt" + } + + :param engine_config: The translation engine configuration (see above) + + :return: + """ + url = self.url_prefix + '/api/v1/translation/engines' + + data = to_jsonable( + engine_config, + expected=[TranslationEngineConfig]) + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_get( + self, + id: str) -> 'TranslationEngine': + """ + Send a get request to /api/v1/translation/engines/{id}. + + :param id: The translation engine id + + :return: The translation engine + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id)]) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationEngine]) + + def translation_engines_delete( + self, + id: str) -> bytes: + """ + Send a delete request to /api/v1/translation/engines/{id}. + + :param id: The translation engine id + + :return: The engine was successfully deleted + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id)]) + + resp = self.session.request( + method='delete', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_translate( + self, + id: str, + segment: str) -> 'TranslationResult': + """ + Send a post request to /api/v1/translation/engines/{id}/translate. + + :param id: The translation engine id + :param segment: The source segment + + :return: The translation result + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/translate']) + + data = segment + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationResult]) + + def translation_engines_translate_n( + self, + id: str, + n: int, + segment: str) -> List['TranslationResult']: + """ + Send a post request to /api/v1/translation/engines/{id}/translate/{n}. + + :param id: The translation engine id + :param n: The number of translations to generate + :param segment: The source segment + + :return: The translation results + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/translate/', + str(n)]) + + data = segment + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, TranslationResult]) + + def translation_engines_get_word_graph( + self, + id: str, + segment: str) -> 'WordGraph': + """ + Send a post request to /api/v1/translation/engines/{id}/get-word-graph. + + :param id: The translation engine id + :param segment: The source segment + + :return: The word graph result + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/get-word-graph']) + + data = segment + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[WordGraph]) + + def translation_engines_train_segment( + self, + id: str, + segment_pair: 'SegmentPair') -> bytes: + """ + What does `SentenceStart` do? + + :param id: The translation engine id + :param segment_pair: The segment pair + + :return: The engine was trained successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/train-segment']) + + data = to_jsonable( + segment_pair, + expected=[SegmentPair]) + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_add_corpus( + self, + id: str, + corpus_config: 'TranslationCorpusConfig') -> bytes: + """ + ## Parameters + * **name**: A name to help identify and distinguish the corpus from other corpora + * The name does not have to be unique since the corpus is uniquely identified by an auto-generated id + * **sourceLanguage**: The source language code + * Normally, this is the same as the engine sourceLanguage. This may change for future engines as a means of transfer learning. + * **targetLanguage**: The target language code + * **SourceFiles**: The source files associated with the corpus + * **FileId**: The unique id referencing the uploaded file + * **TextId**: The client-defined name to associate source and target files. + * If the TextIds in the SourceFiles and TargetFiles match, they will be used to train the engine. + * If selected for pretranslation when building, all SourceFiles that have no TargetFile, or lines of text in a SourceFile that have missing or blank lines in the TargetFile will be pretranslated. + * A TextId should only be used at most once in SourceFiles and in TargetFiles. + * If the file is a Paratext project, this field should be left blank. Any TextId provided will be ignored. + * **TargetFiles**: The source files associated with the corpus + * Same as SourceFiles. Parallel texts must have a matching TextId. + + :param id: The translation engine id + :param corpus_config: The corpus configuration (see remarks) + + :return: + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora']) + + data = to_jsonable( + corpus_config, + expected=[TranslationCorpusConfig]) + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_get_all_corpora( + self, + id: str) -> List['TranslationCorpus']: + """ + Send a get request to /api/v1/translation/engines/{id}/corpora. + + :param id: The translation engine id + + :return: The files + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora']) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, TranslationCorpus]) + + def translation_engines_update_corpus( + self, + id: str, + corpus_id: str, + corpus_config: 'TranslationCorpusUpdateConfig') -> 'TranslationCorpus': + """ + See posting a new corpus for details of use. Will completely replace corpus' file associations. + Will not affect jobs already queued or running. Will not affect existing pretranslations until new build is complete. + + :param id: The translation engine id + :param corpus_id: The corpus id + :param corpus_config: The corpus configuration + + :return: The corpus was updated successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora/', + str(corpus_id)]) + + data = to_jsonable( + corpus_config, + expected=[TranslationCorpusUpdateConfig]) + + + resp = self.session.request( + method='patch', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationCorpus]) + + def translation_engines_get_corpus( + self, + id: str, + corpus_id: str) -> 'TranslationCorpus': + """ + Send a get request to /api/v1/translation/engines/{id}/corpora/{corpusId}. + + :param id: The translation engine id + :param corpus_id: The corpus id + + :return: The corpus configuration + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora/', + str(corpus_id)]) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationCorpus]) + + def translation_engines_delete_corpus( + self, + id: str, + corpus_id: str) -> bytes: + """ + Removing a corpus will remove all pretranslations associated with that corpus. + + :param id: The translation engine id + :param corpus_id: The corpus id + + :return: The data file was deleted successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora/', + str(corpus_id)]) + + resp = self.session.request( + method='delete', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_get_all_pretranslations( + self, + id: str, + corpus_id: str, + text_id: Optional[str] = None) -> List['Pretranslation']: + """ + Pretranslations are arranged in a list of dictionaries with the following fields per pretranslation: + * **TextId**: The TextId of the SourceFile defined when the corpus was created. + * **Refs** (a list of strings): A list of references including: + * The references defined in the SourceFile per line, if any. + * An auto-generated reference of `[TextId]:[lineNumber]`, 1 indexed. + * **Translation**: the text of the pretranslation + + Pretranslations can be filtered by text id if provided. + + :param id: The translation engine id + :param corpus_id: The corpus id + :param text_id: The text id (optional) + + :return: The pretranslations + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/corpora/', + str(corpus_id), + '/pretranslations']) + + params = {} # type: Dict[str, str] + + if text_id is not None: + params['textId'] = text_id + + resp = self.session.request( + method='get', + url=url, + params=params, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, Pretranslation]) + + def translation_engines_get_all_builds( + self, + id: str) -> List['TranslationBuild']: + """ + Send a get request to /api/v1/translation/engines/{id}/builds. + + :param id: The translation engine id + + :return: The build jobs + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/builds']) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, TranslationBuild]) + + def translation_engines_start_build( + self, + id: str, + build_config: 'TranslationBuildConfig') -> bytes: + """ + Specify the corpora or textIds to pretranslate. Even when a corpus or textId + is selected for pretranslation, only "untranslated" text will be pretranslated: + that is, segments (lines of text) in the specified corpora or textId's that have + untranslated text but no translated text. If a corpus is a Paratext project, + you may flag a subset of books for pretranslation by including their [abbreviations](https://github.com/sillsdev/libpalaso/blob/master/SIL.Scripture/Canon.cs) + in the textIds parameter. If the engine does not support pretranslation, these fields have no effect. + + :param id: The translation engine id + :param build_config: The build config (see remarks) + + :return: + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/builds']) + + data = to_jsonable( + build_config, + expected=[TranslationBuildConfig]) + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def translation_engines_get_build( + self, + id: str, + build_id: str, + min_revision: Optional[int] = None) -> 'TranslationBuild': + """ + If the `minRevision` is not defined, the current build at whatever state it is + will be immediately returned. If `minRevision` is defined, Serval will wait for + up to 40 seconds for the engine to build to the `minRevision` specified, else + will timeout. + A use case is to actively query the state of the current build, where the subsequent + request sets the `minRevision` to the returned `revision` + 1. Note: this method + should use request throttling. + + :param id: The translation engine id + :param build_id: The build job id + :param min_revision: The minimum revision + + :return: The build job + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/builds/', + str(build_id)]) + + params = {} # type: Dict[str, str] + + if min_revision is not None: + params['minRevision'] = json.dumps(min_revision) + + resp = self.session.request( + method='get', + url=url, + params=params, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationBuild]) + + def translation_engines_get_current_build( + self, + id: str, + min_revision: Optional[int] = None) -> 'TranslationBuild': + """ + See "Get a Build Job" for details on minimum revision. + + :param id: The translation engine id + :param min_revision: The minimum revision + + :return: The build job + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/current-build']) + + params = {} # type: Dict[str, str] + + if min_revision is not None: + params['minRevision'] = json.dumps(min_revision) + + resp = self.session.request( + method='get', + url=url, + params=params, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[TranslationBuild]) + + def translation_engines_cancel_build( + self, + id: str) -> bytes: + """ + Send a post request to /api/v1/translation/engines/{id}/current-build/cancel. + + :param id: The translation engine id + + :return: The build job was cancelled successfully + """ + url = "".join([ + self.url_prefix, + '/api/v1/translation/engines/', + str(id), + '/current-build/cancel']) + + resp = self.session.request( + method='post', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def webhooks_get_all(self) -> List['Webhook']: + """ + Send a get request to /api/v1/hooks. + + :return: The webhooks. + """ + url = self.url_prefix + '/api/v1/hooks' + + resp = self.session.request(method='get', url=url) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[list, Webhook]) + + def webhooks_create( + self, + hook_config: 'WebhookConfig') -> bytes: + """ + Send a post request to /api/v1/hooks. + + :param hook_config: The webhook configuration. + + :return: + """ + url = self.url_prefix + '/api/v1/hooks' + + data = to_jsonable( + hook_config, + expected=[WebhookConfig]) + + + resp = self.session.request( + method='post', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + def webhooks_get( + self, + id: str) -> 'Webhook': + """ + Send a get request to /api/v1/hooks/{id}. + + :param id: The webhook id. + + :return: The webhook. + """ + url = "".join([ + self.url_prefix, + '/api/v1/hooks/', + str(id)]) + + resp = self.session.request( + method='get', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[Webhook]) + + def webhooks_delete( + self, + id: str) -> bytes: + """ + Send a delete request to /api/v1/hooks/{id}. + + :param id: The webhook id. + + :return: The webhook was successfully deleted. + """ + url = "".join([ + self.url_prefix, + '/api/v1/hooks/', + str(id)]) + + resp = self.session.request( + method='delete', + url=url, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return resp.content + + +# Automatically generated file by swagger_to. DO NOT EDIT OR APPEND ANYTHING! diff --git a/samples/ServalApp/serval_email_module.py b/samples/ServalApp/serval_email_module.py new file mode 100644 index 00000000..d61a20a9 --- /dev/null +++ b/samples/ServalApp/serval_email_module.py @@ -0,0 +1,81 @@ +from email.message import EmailMessage +import smtplib, ssl + +class ServalAppEmailServer: + def __init__(self, password, sender_address = 'serval-app@languagetechnology.org', host='mail.languagetechnology.org', port=465) -> None: + self.__password = password + self.sender_address = sender_address + self.host = host + self.port = port + self.server = None + + @property + def password(self): + return len(self.__password)*"*" + + def __enter__(self): + context = ssl.create_default_context() + self.server = smtplib.SMTP_SSL(host=self.host, port=self.port, context=context) + self.server.login(self.sender_address, self.__password) + return self + + def __exit__(self, *args): + self.server.close() + + def send_build_completed_email(self, recipient_address:str, pretranslations_file_data:str): + msg = EmailMessage() + msg.set_content( + ''' + Hi! + + Your NMT engine has completed building. Attached are the translations of untranslated source text in the files you included. + + If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. + + Thank you! + ''' + ) + msg['From'] = self.sender_address + msg['To'] = recipient_address + msg['Subject'] = 'Your NMT build job is complete!' + msg.add_attachment(pretranslations_file_data, filename='translations.txt') + self.server.send_message(msg) + + def send_build_faulted_email(self, recipient_address:str): + msg = EmailMessage() + msg.add_attachment( + ''' + Hi! + + Your NMT engine has failed to build. Please make sure the information you specified is correct and try again after a while. + + If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. + + Thank you! + ''' + ) + msg['From'] = self.sender_address + msg['To'] = recipient_address + msg['Subject'] = 'Your NMT build job has failed' + self.server.send_message(msg) + + def send_build_started_email(self, recipient_address:str): + msg = EmailMessage() + msg.set_content( + ''' + Hi! + + Your NMT engine has started building. We will contact you when it is complete. + + If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. + + Thank you! + ''' + ) + msg['From'] = self.sender_address + msg['To'] = recipient_address + msg['Subject'] = 'Your NMT build job has started building!' + + self.server.send_message(msg) + + diff --git a/samples/ServalApp/start_app.sh b/samples/ServalApp/start_app.sh new file mode 100755 index 00000000..f65c7d7f --- /dev/null +++ b/samples/ServalApp/start_app.sh @@ -0,0 +1,4 @@ +python3 send_updates.py & +SEND_UPDATES_PID=$! +streamlit run serval_app.py +kill $SEND_UPDATES_PID \ No newline at end of file From 8c6f692b1c38ba5eeac1b14ba88a321ee08bdd9c Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Fri, 6 Oct 2023 16:24:16 -0400 Subject: [PATCH 02/13] Update app to use Serval creds rather than passcode. Add footnote about IETF tags. --- samples/ServalApp/serval_app.py | 151 +++++++++++++----------- samples/ServalApp/serval_auth_module.py | 12 +- 2 files changed, 88 insertions(+), 75 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index be5647c7..2e2f4c45 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -5,83 +5,96 @@ from sqlalchemy.orm import sessionmaker from db import Build from time import sleep - -serval_auth = ServalBearerAuth() -client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) -engine = create_engine("sqlite:///builds.db") -Session = sessionmaker(bind=engine) -session = Session() -def submit(): - engine = json.loads(client.translation_engines_create(TranslationEngineConfig(source_language=st.session_state['source_language'],target_language=st.session_state['target_language'],type='Nmt',name=f'serval_app_engine:{st.session_state["email"]}'))) - source_file = json.loads(client.data_files_create(st.session_state['source_file'], format="Text")) - target_file = json.loads(client.data_files_create(st.session_state['target_file'], format="Text")) - corpus = json.loads(client.translation_engines_add_corpus( - engine['id'], - TranslationCorpusConfig( - source_files=[TranslationCorpusFileConfig(file_id=source_file['id'], text_id=st.session_state['source_file'].name)], - target_files=[TranslationCorpusFileConfig(file_id=target_file['id'], text_id=st.session_state['source_file'].name)], - source_language=st.session_state['source_language'], - target_language=st.session_state['target_language'] +serval_auth = None +if not st.session_state.get('authorized',False): + with st.form(key="Authorization Form"): + st.session_state['client_id'] = st.text_input(label='Client ID') + st.session_state['client_secret'] = st.text_input(label='Client Secret', type='password') + if st.form_submit_button("Authorize"): + try: + serval_auth = ServalBearerAuth(client_id=st.session_state['client_id'] if st.session_state['client_id'] != "" else "",client_secret=st.session_state['client_secret'] if st.session_state['client_secret'] != "" else "") + st.session_state['authorized'] = True + st.rerun() + except ValueError: + st.error('Unable to authorize - please check your credentials') +else: + client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) + engine = create_engine("sqlite:///builds.db") + Session = sessionmaker(bind=engine) + session = Session() + + def submit(): + engine = json.loads(client.translation_engines_create(TranslationEngineConfig(source_language=st.session_state['source_language'],target_language=st.session_state['target_language'],type='Nmt',name=f'serval_app_engine:{st.session_state["email"]}'))) + source_file = json.loads(client.data_files_create(st.session_state['source_file'], format="Text")) + target_file = json.loads(client.data_files_create(st.session_state['target_file'], format="Text")) + corpus = json.loads(client.translation_engines_add_corpus( + engine['id'], + TranslationCorpusConfig( + source_files=[TranslationCorpusFileConfig(file_id=source_file['id'], text_id=st.session_state['source_file'].name)], + target_files=[TranslationCorpusFileConfig(file_id=target_file['id'], text_id=st.session_state['source_file'].name)], + source_language=st.session_state['source_language'], + target_language=st.session_state['target_language'] + ) ) ) - ) - build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids=[st.session_state['source_file'].name])]))) - session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) - session.commit() + build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids=[st.session_state['source_file'].name])]))) + session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) + session.commit() + + def already_active_build_for(email:str): + return len(session.query(Build).where(Build.email == email).all()) > 0 + + def is_valid_passcode(passcode:str): + return passcode == os.environ.get('SERVAL_APP_PASSCODE') -def already_active_build_for(email:str): - return len(session.query(Build).where(Build.email == email).all()) > 0 + st.subheader("Neural Machine Translation") -def is_valid_passcode(passcode:str): - return passcode == os.environ.get('SERVAL_APP_PASSCODE') + tried_to_submit = st.session_state.get('tried_to_submit', False) + with st.form(key="NmtTranslationForm"): + st.session_state['source_language'] = st.text_input(label="Source language tag*", placeholder="en") + if st.session_state.get('source_language','') == '' and tried_to_submit: + st.warning("Please enter a source language tag before submitting", icon='⬆️') -st.subheader("Neural Machine Translation") + st.session_state['source_file'] = st.file_uploader(label="Source File") + if st.session_state.get('source_file',None) is None and tried_to_submit: + st.warning("Please upload a source file before submitting", icon='⬆️') -tried_to_submit = st.session_state.get('tried_to_submit', False) -with st.form(key="NmtTranslationForm"): - st.session_state['source_language'] = st.text_input(label="Source language tag before submitting", placeholder="en") - if st.session_state.get('source_language','') == '' and tried_to_submit: - st.warning("Please enter a source language tag", icon='⬆️') + st.session_state['target_language'] = st.text_input(label="Target language tag*", placeholder="es") + if st.session_state.get('target_language','') == '' and tried_to_submit: + st.warning("Please enter a target language tag before submitting", icon='⬆️') - st.session_state['source_file'] = st.file_uploader(label="Source File") - if st.session_state.get('source_file',None) is None and tried_to_submit: - st.warning("Please upload a source file before submitting", icon='⬆️') + st.session_state['target_file'] = st.file_uploader(label="Target File") + if st.session_state.get('target_file',None) is None and tried_to_submit: + st.warning("Please upload a target file before submitting", icon='⬆️') - st.session_state['target_language'] = st.text_input(label="Target language tag", placeholder="es") - if st.session_state.get('target_language','') == '' and tried_to_submit: - st.warning("Please enter a target language tag before submitting", icon='⬆️') + st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") + if st.session_state.get('email','') == '' and tried_to_submit: + st.warning("Please enter an email address", icon='⬆️') - st.session_state['target_file'] = st.file_uploader(label="Target File") - if st.session_state.get('target_file',None) is None and tried_to_submit: - st.warning("Please upload a target file before submitting", icon='⬆️') + st.session_state['passcode'] = st.text_input(label="Passcode", placeholder="") + if st.session_state.get('passcode','') == '' and tried_to_submit: + st.warning("Please enter the passcode", icon='⬆️') - st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") - if st.session_state.get('email','') == '' and tried_to_submit: - st.warning("Please enter an email address", icon='⬆️') - - st.session_state['passcode'] = st.text_input(label="Passcode", placeholder="") - if st.session_state.get('passcode','') == '' and tried_to_submit: - st.warning("Please enter the passcode", icon='⬆️') - - if tried_to_submit: - st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) - if st.form_submit_button("Generate translations"): - if not is_valid_passcode(st.session_state.get('passcode','')): - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "The passcode was invalid." - st.rerun() - elif already_active_build_for(st.session_state['email']): - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." - st.rerun() - elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and st.session_state['source_file'] is not None and st.session_state['target_file'] is not None and st.session_state['email'] != '': - submit() - st.session_state['tried_to_submit'] = False - st.toast("Translations are on their way! You'll receive an email when your translation job has begun.") - sleep(4) - st.rerun() - else: - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "Some required fields were left blank. Please fill in all fields above" - st.rerun() \ No newline at end of file + if tried_to_submit: + st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) + if st.form_submit_button("Generate translations"): + if not is_valid_passcode(st.session_state.get('passcode','')): + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "The passcode was invalid." + st.rerun() + elif already_active_build_for(st.session_state['email']): + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." + st.rerun() + elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and st.session_state['source_file'] is not None and st.session_state['target_file'] is not None and st.session_state['email'] != '': + submit() + st.session_state['tried_to_submit'] = False + st.toast("Translations are on their way! You'll receive an email when your translation job has begun.") + sleep(4) + st.rerun() + else: + st.session_state['tried_to_submit'] = True + st.session_state['error'] = "Some required fields were left blank. Please fill in all fields above" + st.rerun() + st.markdown("\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) for more information on IETF tags.", unsafe_allow_html=True) \ No newline at end of file diff --git a/samples/ServalApp/serval_auth_module.py b/samples/ServalApp/serval_auth_module.py index c9a38076..dbb53ea6 100644 --- a/samples/ServalApp/serval_auth_module.py +++ b/samples/ServalApp/serval_auth_module.py @@ -4,10 +4,10 @@ import time class ServalBearerAuth(requests.auth.AuthBase): - def __init__(self): - self.__client_id = os.environ.get("SERVAL_CLIENT_ID") + def __init__(self, client_id="", client_secret=""): + self.__client_id = client_id if client_id != "" else os.environ.get("SERVAL_CLIENT_ID") assert(self.__client_id is not None) - self.__client_secret = os.environ.get("SERVAL_CLIENT_SECRET") + self.__client_secret = client_secret if client_secret != "" else os.environ.get("SERVAL_CLIENT_SECRET") assert(self.__client_secret is not None) self.__auth_url = os.environ.get("SERVAL_AUTH_URL") assert(self.__auth_url is not None) @@ -18,7 +18,7 @@ def __call__(self, r): self.update_token() r.headers["authorization"] = "Bearer " + self.token return r - + def update_token(self): data = { "client_id": f"{self.__client_id}", @@ -36,6 +36,6 @@ def update_token(self): headers={"content-type": "application/json"} ) self.token = r.json()['access_token'] if r is not None else None - except Exception as e: - raise ValueError(f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status}. Original exception: {e}") + except Exception as e: + raise ValueError(f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status_code if r is not None else ''}. Original exception: {e}") From 63470aa8c60f989b06da71146bbf6b9af165fbe2 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Fri, 6 Oct 2023 16:29:17 -0400 Subject: [PATCH 03/13] Remove passcode remnants --- samples/ServalApp/serval_app.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 2e2f4c45..7be3a172 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -71,19 +71,10 @@ def is_valid_passcode(passcode:str): st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") if st.session_state.get('email','') == '' and tried_to_submit: st.warning("Please enter an email address", icon='⬆️') - - st.session_state['passcode'] = st.text_input(label="Passcode", placeholder="") - if st.session_state.get('passcode','') == '' and tried_to_submit: - st.warning("Please enter the passcode", icon='⬆️') - if tried_to_submit: st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) if st.form_submit_button("Generate translations"): - if not is_valid_passcode(st.session_state.get('passcode','')): - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "The passcode was invalid." - st.rerun() - elif already_active_build_for(st.session_state['email']): + if already_active_build_for(st.session_state['email']): st.session_state['tried_to_submit'] = True st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." st.rerun() From e44349b226768662c1b5888f6ba2241386014d5d Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Fri, 6 Oct 2023 16:50:37 -0400 Subject: [PATCH 04/13] Fixed authorization error message --- samples/ServalApp/serval_app.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 7be3a172..767a3005 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -12,13 +12,17 @@ st.session_state['client_id'] = st.text_input(label='Client ID') st.session_state['client_secret'] = st.text_input(label='Client Secret', type='password') if st.form_submit_button("Authorize"): - try: - serval_auth = ServalBearerAuth(client_id=st.session_state['client_id'] if st.session_state['client_id'] != "" else "",client_secret=st.session_state['client_secret'] if st.session_state['client_secret'] != "" else "") - st.session_state['authorized'] = True - st.rerun() - except ValueError: - st.error('Unable to authorize - please check your credentials') + st.session_state['authorized'] = True + st.rerun() + if st.session_state.get('authorization_failure', False): + st.error('Invalid credentials. Please check your credentials.') else: + try: + serval_auth = ServalBearerAuth(client_id=st.session_state['client_id'] if st.session_state['client_id'] != "" else "", client_secret=st.session_state['client_secret'] if st.session_state['client_secret'] != "" else "") + except ValueError: + st.session_state['authorized'] = False + st.session_state['authorization_failure'] = True + st.rerun() client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) engine = create_engine("sqlite:///builds.db") Session = sessionmaker(bind=engine) From cc4c9fe3910be84e11255c5194ea03140e50c059 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Thu, 19 Oct 2023 14:35:58 -0400 Subject: [PATCH 05/13] Fix mpy version --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 61d8a9a9..a3d290df 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -92,7 +92,7 @@ services: - ASPNETCORE_TranslationEngines__1=Nmt - ClearML__ApiServer=https://api.sil.hosted.allegro.ai - ClearML__Queue=production - - ClearML__DockerImage=ghcr.io/sillsdev/machine.py:0.9.5.1 + - ClearML__DockerImage=ghcr.io/sillsdev/machine.py:0.9.6.2 - "ClearML__AccessKey=${ClearML_AccessKey:?access key needed}" - "ClearML__SecretKey=${ClearML_SecretKey:?secret key needed}" - SharedFile__Uri=s3://aqua-ml-data/docker-compose/ @@ -137,7 +137,7 @@ services: - ASPNETCORE_TranslationEngines__1=Nmt - ClearML__ApiServer=https://api.sil.hosted.allegro.ai - ClearML__Queue=production - - ClearML__DockerImage=ghcr.io/sillsdev/machine.py:0.9.5.1 + - ClearML__DockerImage=ghcr.io/sillsdev/machine.py:0.9.6.2 - "ClearML__AccessKey=${ClearML_AccessKey:?access key needed}" - "ClearML__SecretKey=${ClearML_SecretKey:?secret key needed}" - SharedFile__Uri=s3://aqua-ml-data/docker-compose/ From da24e2f81c6f8fdee6fc21b1dcc504f1df584816 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Fri, 20 Oct 2023 10:24:40 -0400 Subject: [PATCH 06/13] Update API --- samples/ServalApp/serval_app.py | 3 - samples/ServalApp/serval_client_module.py | 187 ++++++++++++++++++++-- 2 files changed, 176 insertions(+), 14 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 767a3005..0acb2c50 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -49,9 +49,6 @@ def submit(): def already_active_build_for(email:str): return len(session.query(Build).where(Build.email == email).all()) > 0 - def is_valid_passcode(passcode:str): - return passcode == os.environ.get('SERVAL_APP_PASSCODE') - st.subheader("Neural Machine Translation") tried_to_submit = st.session_state.get('tried_to_submit', False) diff --git a/samples/ServalApp/serval_client_module.py b/samples/ServalApp/serval_client_module.py index bf805d79..a44f0871 100644 --- a/samples/ServalApp/serval_client_module.py +++ b/samples/ServalApp/serval_client_module.py @@ -73,6 +73,9 @@ def from_obj(obj: Any, expected: List[type], path: str = '') -> Any: if exp == TranslationEngineConfig: return translation_engine_config_from_obj(obj, path=path) + if exp == Queue: + return queue_from_obj(obj, path=path) + if exp == TranslationResult: return translation_result_from_obj(obj, path=path) @@ -205,6 +208,10 @@ def to_jsonable(obj: Any, expected: List[type], path: str = "") -> Any: assert isinstance(obj, TranslationEngineConfig) return translation_engine_config_to_jsonable(obj, path=path) + if exp == Queue: + assert isinstance(obj, Queue) + return queue_to_jsonable(obj, path=path) + if exp == TranslationResult: assert isinstance(obj, TranslationResult) return translation_result_to_jsonable(obj, path=path) @@ -684,6 +691,82 @@ def translation_engine_config_to_jsonable( return res +class Queue: + def __init__( + self, + size: int, + engine_type: str) -> None: + """Initializes with the given values.""" + self.size = size + + self.engine_type = engine_type + + def to_jsonable(self) -> MutableMapping[str, Any]: + """ + Dispatches the conversion to queue_to_jsonable. + + :return: JSON-able representation + """ + return queue_to_jsonable(self) + + +def new_queue() -> Queue: + """Generates an instance of Queue with default values.""" + return Queue( + size=0, + engine_type='') + + +def queue_from_obj(obj: Any, path: str = "") -> Queue: + """ + Generates an instance of Queue from a dictionary object. + + :param obj: a JSON-ed dictionary object representing an instance of Queue + :param path: path to the object used for debugging + :return: parsed instance of Queue + """ + if not isinstance(obj, dict): + raise ValueError('Expected a dict at path {}, but got: {}'.format(path, type(obj))) + + for key in obj: + if not isinstance(key, str): + raise ValueError( + 'Expected a key of type str at path {}, but got: {}'.format(path, type(key))) + + size_from_obj = from_obj( + obj['size'], + expected=[int], + path=path + '.size') # type: int + + engine_type_from_obj = from_obj( + obj['engineType'], + expected=[str], + path=path + '.engineType') # type: str + + return Queue( + size=size_from_obj, + engine_type=engine_type_from_obj) + + +def queue_to_jsonable( + queue: Queue, + path: str = "") -> MutableMapping[str, Any]: + """ + Generates a JSON-able mapping from an instance of Queue. + + :param queue: instance of Queue to be JSON-ized + :param path: path to the queue used for debugging + :return: a JSON-able representation + """ + res = dict() # type: Dict[str, Any] + + res['size'] = queue.size + + res['engineType'] = queue.engine_type + + return res + + class TranslationResult: def __init__( self, @@ -2088,7 +2171,9 @@ def __init__( pretranslate: Optional[List['PretranslateCorpus']] = None, percent_completed: Optional[float] = None, message: Optional[str] = None, - date_finished: Optional[str] = None) -> None: + queue_depth: Optional[int] = None, + date_finished: Optional[str] = None, + options: Optional[str] = None) -> None: """Initializes with the given values.""" self.id = id @@ -2111,8 +2196,12 @@ def __init__( self.message = message + self.queue_depth = queue_depth + self.date_finished = date_finished + self.options = options + def to_jsonable(self) -> MutableMapping[str, Any]: """ Dispatches the conversion to translation_build_to_jsonable. @@ -2215,6 +2304,15 @@ def translation_build_from_obj(obj: Any, path: str = "") -> TranslationBuild: else: message_from_obj = None + obj_queue_depth = obj.get('queueDepth', None) + if obj_queue_depth is not None: + queue_depth_from_obj = from_obj( + obj_queue_depth, + expected=[int], + path=path + '.queueDepth') # type: Optional[int] + else: + queue_depth_from_obj = None + obj_date_finished = obj.get('dateFinished', None) if obj_date_finished is not None: date_finished_from_obj = from_obj( @@ -2224,6 +2322,15 @@ def translation_build_from_obj(obj: Any, path: str = "") -> TranslationBuild: else: date_finished_from_obj = None + obj_options = obj.get('options', None) + if obj_options is not None: + options_from_obj = from_obj( + obj_options, + expected=[str], + path=path + '.options') # type: Optional[str] + else: + options_from_obj = None + return TranslationBuild( id=id_from_obj, url=url_from_obj, @@ -2235,7 +2342,9 @@ def translation_build_from_obj(obj: Any, path: str = "") -> TranslationBuild: pretranslate=pretranslate_from_obj, percent_completed=percent_completed_from_obj, message=message_from_obj, - date_finished=date_finished_from_obj) + queue_depth=queue_depth_from_obj, + date_finished=date_finished_from_obj, + options=options_from_obj) def translation_build_to_jsonable( @@ -2280,9 +2389,15 @@ def translation_build_to_jsonable( if translation_build.message is not None: res['message'] = translation_build.message + if translation_build.queue_depth is not None: + res['queueDepth'] = translation_build.queue_depth + if translation_build.date_finished is not None: res['dateFinished'] = translation_build.date_finished + if translation_build.options is not None: + res['options'] = translation_build.options + return res @@ -2376,12 +2491,15 @@ class TranslationBuildConfig: def __init__( self, name: Optional[str] = None, - pretranslate: Optional[List['PretranslateCorpusConfig']] = None) -> None: + pretranslate: Optional[List['PretranslateCorpusConfig']] = None, + options: Optional[str] = None) -> None: """Initializes with the given values.""" self.name = name self.pretranslate = pretranslate + self.options = options + def to_jsonable(self) -> MutableMapping[str, Any]: """ Dispatches the conversion to translation_build_config_to_jsonable. @@ -2430,9 +2548,19 @@ def translation_build_config_from_obj(obj: Any, path: str = "") -> TranslationBu else: pretranslate_from_obj = None + obj_options = obj.get('options', None) + if obj_options is not None: + options_from_obj = from_obj( + obj_options, + expected=[str], + path=path + '.options') # type: Optional[str] + else: + options_from_obj = None + return TranslationBuildConfig( name=name_from_obj, - pretranslate=pretranslate_from_obj) + pretranslate=pretranslate_from_obj, + options=options_from_obj) def translation_build_config_to_jsonable( @@ -2456,6 +2584,9 @@ def translation_build_config_to_jsonable( expected=[list, PretranslateCorpusConfig], path='{}.pretranslate'.format(path)) + if translation_build_config.options is not None: + res['options'] = translation_build_config.options + return res @@ -2789,8 +2920,11 @@ def data_files_create( :param format: File format options: * **Text**: One translation unit (a.k.a., verse) per line - * If there is a tab, the content before the tab is the unique identifier for the line - * Otherwise, no tabs should be used in the file. + * If a line contains a tab, characters before the tab are used as a unique identifier for the line, characters after the tab are understood as the content of the verse, and if there is another tab following the verse content, characters after this second tab are assumed to be column codes like "ss" etc. for sectioning and other formatting. See this example of a tab-delimited text file: + > verse_001_005 (tab) Ὑπομνῆσαι δὲ ὑμᾶς βούλομαι , εἰδότας ὑμᾶς ἅπαξ τοῦτο + > verse_001_006 (tab) Ἀγγέλους τε τοὺς μὴ τηρήσαντας τὴν ἑαυτῶν ἀρχήν , ἀλλὰ (tab) ss + > verse_001_007 (tab) Ὡς Σόδομα καὶ Γόμορρα , καὶ αἱ περὶ αὐτὰς πόλεις (tab) ss + * Otherwise, *no tabs* should be used in the file and a unique identifier will generated for each translation unit based on the line number. * **Paratext**: A complete, zipped Paratext project backup: that is, a .zip archive of files including the USFM files and "Settings.xml" file. To generate a zipped backup for a project in Paratext, navigate to "Paratext/Advanced/Backup project to file..." and follow the dialogue. :param name: A name to help identify and distinguish the file. @@ -3028,6 +3162,33 @@ def translation_engines_delete( resp.raise_for_status() return resp.content + def translation_engines_get_queue( + self, + engine_type: str) -> 'Queue': + """ + Send a get request to /api/v1/translation/engines/queues. + + :param engine_type: A valid engine type: SmtTransfer, Nmt, or Echo + + :return: Queue information for the specified engine type + """ + url = self.url_prefix + '/api/v1/translation/engines/queues' + + data = engine_type + + + resp = self.session.request( + method='get', + url=url, + json=data, + ) + + with contextlib.closing(resp): + resp.raise_for_status() + return from_obj( + obj=resp.json(), + expected=[Queue]) + def translation_engines_translate( self, id: str, @@ -3411,13 +3572,17 @@ def translation_engines_start_build( self, id: str, build_config: 'TranslationBuildConfig') -> bytes: - """ + r""" Specify the corpora or textIds to pretranslate. Even when a corpus or textId is selected for pretranslation, only "untranslated" text will be pretranslated: that is, segments (lines of text) in the specified corpora or textId's that have untranslated text but no translated text. If a corpus is a Paratext project, you may flag a subset of books for pretranslation by including their [abbreviations](https://github.com/sillsdev/libpalaso/blob/master/SIL.Scripture/Canon.cs) in the textIds parameter. If the engine does not support pretranslation, these fields have no effect. + + The `"options"` parameter of the build config provides the ability to pass build configuration parameters as a JSON string. + A typical use case would be to set `"options"` to `"{\"max_steps\":10}"` in order to configure the maximum + number of training iterations in order to reduce turnaround time for testing purposes. :param id: The translation engine id :param build_config: The build config (see remarks) @@ -3451,13 +3616,13 @@ def translation_engines_get_build( build_id: str, min_revision: Optional[int] = None) -> 'TranslationBuild': """ - If the `minRevision` is not defined, the current build at whatever state it is + If the `minRevision` is not defined, the current build, at whatever state it is, will be immediately returned. If `minRevision` is defined, Serval will wait for up to 40 seconds for the engine to build to the `minRevision` specified, else will timeout. A use case is to actively query the state of the current build, where the subsequent - request sets the `minRevision` to the returned `revision` + 1. Note: this method - should use request throttling. + request sets the `minRevision` to the returned `revision` + 1 and timeouts are handled gracefully. + Note: this method should use request throttling. :param id: The translation engine id :param build_id: The build job id @@ -3494,7 +3659,7 @@ def translation_engines_get_current_build( id: str, min_revision: Optional[int] = None) -> 'TranslationBuild': """ - See "Get a Build Job" for details on minimum revision. + See documentation on endpoint /translation/engines/{id}/builds/{id} - "Get a Build Job" for details on using `minRevision`. :param id: The translation engine id :param min_revision: The minimum revision From ba6a3af894c966f253fdea22f2dca22cfe12f44a Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Mon, 23 Oct 2023 12:05:16 -0400 Subject: [PATCH 07/13] Serval App working MVP compatible with streamlit deployment method Also now capable of handling paratext projects and multiple files as well as no target file(s). --- samples/ServalApp/README.md | 13 +++ samples/ServalApp/REAME.md | 6 -- samples/ServalApp/builds.db | Bin 12288 -> 20480 bytes samples/ServalApp/db.py | 5 +- samples/ServalApp/send_updates.py | 62 ------------- samples/ServalApp/serval_app.py | 106 +++++++++++++++++++---- samples/ServalApp/serval_email_module.py | 59 ++++++------- samples/ServalApp/start_app.sh | 4 - 8 files changed, 132 insertions(+), 123 deletions(-) create mode 100644 samples/ServalApp/README.md delete mode 100644 samples/ServalApp/REAME.md delete mode 100644 samples/ServalApp/send_updates.py delete mode 100755 samples/ServalApp/start_app.sh diff --git a/samples/ServalApp/README.md b/samples/ServalApp/README.md new file mode 100644 index 00000000..ae37df99 --- /dev/null +++ b/samples/ServalApp/README.md @@ -0,0 +1,13 @@ +### Running the Serval APP +Before running the app, verify that both `SERVAL_APP_EMAIL_PASSWORD` and `SERVAL_APP_PASSCODE` are appropriately populated. +Then, run: +``` +streamlit run serval_app.py +``` + +### Regenerating the Python Client +When the Serval API is updated, use the tool [swagger-to](https://pypi.org/project/swagger-to/) to generate a new `serval_client_module.py` using the following command: +``` +swagger_to_py_client.py --swagger_path path/to/swagger.json --outpath serval_client_module.py +``` +Note: You may need to delete the authorization-related elements of the "swagger.json" before generating. \ No newline at end of file diff --git a/samples/ServalApp/REAME.md b/samples/ServalApp/REAME.md deleted file mode 100644 index 37b195be..00000000 --- a/samples/ServalApp/REAME.md +++ /dev/null @@ -1,6 +0,0 @@ -### Running the Serval APP -Before running the app, verify that both `SERVAL_APP_EMAIL_PASSWORD` and `SERVAL_APP_PASSCODE` are appropriately populated. -Then, run: -``` -./start_app.sh -``` \ No newline at end of file diff --git a/samples/ServalApp/builds.db b/samples/ServalApp/builds.db index 101600b0ba31890622f4aa419da9ea1a501c8b76..d0e91a7f0fb9dcc2820cd668fd61e8a3d81eb4ab 100644 GIT binary patch delta 107 zcmZojXjs5FL0Yhnfq{V)ikX14@ttsBlZmg}p$YIRIRw8`l5; delta 66 zcmZozz}S#5L0Zs>fq{V;h+#l>qK+|8P|yA*FaHk)CVmD6eg^*2n*{|X@G&=fFivLX NKgr0n`7FP&0sxMK4M_k1 diff --git a/samples/ServalApp/db.py b/samples/ServalApp/db.py index 97f97ab7..ab59bbce 100644 --- a/samples/ServalApp/db.py +++ b/samples/ServalApp/db.py @@ -24,10 +24,7 @@ def __str__(self): def __repr__(self): return self.__str__() - -def create_tables(): +def clear_and_regenerate_tables(): engine = create_engine("sqlite:///builds.db") metadata.drop_all(bind=engine) metadata.create_all(bind=engine) - - diff --git a/samples/ServalApp/send_updates.py b/samples/ServalApp/send_updates.py deleted file mode 100644 index 631b33fe..00000000 --- a/samples/ServalApp/send_updates.py +++ /dev/null @@ -1,62 +0,0 @@ -from serval_client_module import * -from serval_auth_module import * -import os -from time import sleep -from db import Build, State -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker -from serval_email_module import ServalAppEmailServer - -def main(): - def started(build:Build, email_server:ServalAppEmailServer): - print(f"\tStarted {build}") - session.delete(build) - email_server.send_build_started_email(build.email) - session.add(Build(build_id=build.build_id, engine_id=build.engine_id, email=build.email, state=State.Active, corpus_id=build.corpus_id)) - - def faulted(build:Build, email_server:ServalAppEmailServer): - print(f"\tFaulted {build}") - session.delete(build) - email_server.send_build_faulted_email(build.email) - - def completed(build:Build, email_server:ServalAppEmailServer): - print(f"\tCompleted {build}") - session.delete(build) - pretranslations = client.translation_engines_get_all_pretranslations(build.engine_id, build.corpus_id) - email_server.send_build_completed_email(build.email, '\n'.join([f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" for pretranslation in pretranslations])) - - def update(build:Build, email_server:ServalAppEmailServer): - print(f"\tUpdated {build}") - - serval_auth = ServalBearerAuth() - client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) - responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} - - engine = create_engine("sqlite:///builds.db") - Session = sessionmaker(bind=engine) - session = Session() - - def get_update(build:Build, email_server:ServalAppEmailServer): - build_update = client.translation_engines_get_build(id=build.engine_id, build_id=build.build_id) - if build.state == State.Pending and build_update.state == "Active": - started(build, email_server) - else: - responses.get(build_update.state, update)(build, email_server) - session.commit() - - def send_updates(email_server:ServalAppEmailServer): - print(f"Checking for updates:") - builds = session.query(Build).all() - for build in builds: - try: - get_update(build, email_server) - except Exception as e: - print(f"\tFailed to update {build} because of exception {e}") - sleep(60) - - with ServalAppEmailServer(os.environ.get('SERVAL_APP_EMAIL_PASSWORD')) as email_server: - while(True): - send_updates(email_server) - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 0acb2c50..7e6e1e8e 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -1,10 +1,78 @@ import streamlit as st +from streamlit.runtime.scriptrunner import add_script_run_ctx from serval_client_module import * from serval_auth_module import * from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from db import Build from time import sleep +from threading import Thread +import os +from db import Build, State +from serval_email_module import ServalAppEmailServer +import re + +def send_emails(): + engine = create_engine("sqlite:///builds.db") + Session = sessionmaker(bind=engine) + session = Session() + try: + def started(build:Build, email_server:ServalAppEmailServer, data=None): + print(f"\tStarted {build}") + session.delete(build) + email_server.send_build_started_email(build.email) + session.add(Build(build_id=build.build_id, engine_id=build.engine_id, email=build.email, state=State.Active, corpus_id=build.corpus_id)) + + def faulted(build:Build, email_server:ServalAppEmailServer, data=None): + print(f"\tFaulted {build}") + session.delete(build) + email_server.send_build_faulted_email(build.email, error=data) + + def completed(build:Build, email_server:ServalAppEmailServer, data=None): + print(f"\tCompleted {build}") + session.delete(build) + pretranslations = client.translation_engines_get_all_pretranslations(build.engine_id, build.corpus_id) + email_server.send_build_completed_email(build.email, '\n'.join([f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" for pretranslation in pretranslations])) + + def update(build:Build, email_server:ServalAppEmailServer, data=None): + print(f"\tUpdated {build}") + + serval_auth = ServalBearerAuth() + client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) + responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} + + def get_update(build:Build, email_server:ServalAppEmailServer): + build_update = client.translation_engines_get_build(id=build.engine_id, build_id=build.build_id) + if build.state == State.Pending and build_update.state == "Active": + started(build, email_server) + else: + responses.get(build_update.state, update)(build, email_server, build_update.message) + session.commit() + + def send_updates(email_server:ServalAppEmailServer): + print(f"Checking for updates...") + with session.no_autoflush: + builds = session.query(Build).all() + for build in builds: + try: + get_update(build, email_server) + except Exception as e: + print(f"\tFailed to update {build} because of exception {e}") + raise e + + with ServalAppEmailServer(os.environ.get('SERVAL_APP_EMAIL_PASSWORD')) as email_server: + while(True): + send_updates(email_server) + sleep(300) #Once every five minutes... + except Exception as e: + print(e) + st.session_state['background_process_has_started'] = False + +if not st.session_state.get('background_process_has_started',False): + cron_thread = Thread(target=send_emails) + add_script_run_ctx(cron_thread) + cron_thread.start() + st.session_state['background_process_has_started'] = True serval_auth = None if not st.session_state.get('authorized',False): @@ -30,19 +98,19 @@ def submit(): engine = json.loads(client.translation_engines_create(TranslationEngineConfig(source_language=st.session_state['source_language'],target_language=st.session_state['target_language'],type='Nmt',name=f'serval_app_engine:{st.session_state["email"]}'))) - source_file = json.loads(client.data_files_create(st.session_state['source_file'], format="Text")) - target_file = json.loads(client.data_files_create(st.session_state['target_file'], format="Text")) + source_files = [json.loads(client.data_files_create(st.session_state['source_files'][i], format="Paratext" if st.session_state['source_files'][i].name[-4:] == '.zip' else "Text")) for i in range(len(st.session_state['source_files']))] + target_files = [json.loads(client.data_files_create(st.session_state['target_files'][i], format="Paratext" if st.session_state['target_files'][i].name[-4:] == '.zip' else "Text")) for i in range(len(st.session_state['target_files']))] corpus = json.loads(client.translation_engines_add_corpus( engine['id'], TranslationCorpusConfig( - source_files=[TranslationCorpusFileConfig(file_id=source_file['id'], text_id=st.session_state['source_file'].name)], - target_files=[TranslationCorpusFileConfig(file_id=target_file['id'], text_id=st.session_state['source_file'].name)], + source_files=[TranslationCorpusFileConfig(file_id=file['id'], text_id=name) for file, name in zip(source_files, list(map(lambda f: f.name, st.session_state['source_files'])))], + target_files=[TranslationCorpusFileConfig(file_id=file['id'], text_id=name) for file, name in zip(target_files, list(map(lambda f: f.name, st.session_state['target_files'])))], source_language=st.session_state['source_language'], target_language=st.session_state['target_language'] ) ) ) - build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids=[st.session_state['source_file'].name])]))) + build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids= [] if st.session_state['source_files'][0].name[-4:] == '.zip' else list(map(lambda f: f.name, st.session_state['source_files'])))], options="{\"max_steps\":10}"))) session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) session.commit() @@ -55,23 +123,28 @@ def already_active_build_for(email:str): with st.form(key="NmtTranslationForm"): st.session_state['source_language'] = st.text_input(label="Source language tag*", placeholder="en") if st.session_state.get('source_language','') == '' and tried_to_submit: - st.warning("Please enter a source language tag before submitting", icon='⬆️') + st.error("Please enter a source language tag before submitting", icon='⬆️') - st.session_state['source_file'] = st.file_uploader(label="Source File") - if st.session_state.get('source_file',None) is None and tried_to_submit: - st.warning("Please upload a source file before submitting", icon='⬆️') + st.session_state['source_files'] = st.file_uploader(label="Source File(s)", accept_multiple_files=True) + if len(st.session_state.get('source_files',[])) == 0 and tried_to_submit: + st.error("Please upload a source file before submitting", icon='⬆️') + if len(st.session_state.get('source_files',[])) > 1: + st.warning('Please note that source and target text files will be paired together by file name', icon='💡') st.session_state['target_language'] = st.text_input(label="Target language tag*", placeholder="es") if st.session_state.get('target_language','') == '' and tried_to_submit: - st.warning("Please enter a target language tag before submitting", icon='⬆️') + st.error("Please enter a target language tag before submitting", icon='⬆️') - st.session_state['target_file'] = st.file_uploader(label="Target File") - if st.session_state.get('target_file',None) is None and tried_to_submit: - st.warning("Please upload a target file before submitting", icon='⬆️') + st.session_state['target_files'] = st.file_uploader(label="Target File(s)", accept_multiple_files=True) + if len(st.session_state.get('target_files',[])) > 1: + st.warning('Please note that source and target text files will be paired together by file name', icon='💡') st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") if st.session_state.get('email','') == '' and tried_to_submit: - st.warning("Please enter an email address", icon='⬆️') + st.error("Please enter an email address", icon='⬆️') + elif not re.match(r"^\S+@\S+\.\S+$", st.session_state['email']) and tried_to_submit: + st.error("Please enter a valid email address", icon='⬆️') + st.session_state['email'] = '' if tried_to_submit: st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) if st.form_submit_button("Generate translations"): @@ -79,8 +152,9 @@ def already_active_build_for(email:str): st.session_state['tried_to_submit'] = True st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." st.rerun() - elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and st.session_state['source_file'] is not None and st.session_state['target_file'] is not None and st.session_state['email'] != '': - submit() + elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and len(st.session_state['source_files']) > 0 and st.session_state['email'] != '': + with st.spinner(): + submit() st.session_state['tried_to_submit'] = False st.toast("Translations are on their way! You'll receive an email when your translation job has begun.") sleep(4) diff --git a/samples/ServalApp/serval_email_module.py b/samples/ServalApp/serval_email_module.py index d61a20a9..5876e622 100644 --- a/samples/ServalApp/serval_email_module.py +++ b/samples/ServalApp/serval_email_module.py @@ -8,11 +8,11 @@ def __init__(self, password, sender_address = 'serval-app@languagetechnology.org self.host = host self.port = port self.server = None - + @property def password(self): return len(self.__password)*"*" - + def __enter__(self): context = ssl.create_default_context() self.server = smtplib.SMTP_SSL(host=self.host, port=self.port, context=context) @@ -21,38 +21,36 @@ def __enter__(self): def __exit__(self, *args): self.server.close() - + def send_build_completed_email(self, recipient_address:str, pretranslations_file_data:str): msg = EmailMessage() msg.set_content( - ''' - Hi! +'''Hi! - Your NMT engine has completed building. Attached are the translations of untranslated source text in the files you included. +Your NMT engine has completed building. Attached are the translations of untranslated source text in the files you included. - If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. - - Thank you! - ''' +If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. + +Thank you! +''' ) msg['From'] = self.sender_address msg['To'] = recipient_address msg['Subject'] = 'Your NMT build job is complete!' msg.add_attachment(pretranslations_file_data, filename='translations.txt') self.server.send_message(msg) - - def send_build_faulted_email(self, recipient_address:str): + + def send_build_faulted_email(self, recipient_address:str, error=""): msg = EmailMessage() - msg.add_attachment( - ''' - Hi! - - Your NMT engine has failed to build. Please make sure the information you specified is correct and try again after a while. - - If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. - - Thank you! - ''' + msg.set_content( +f'''Hi! + +Your NMT engine has failed to build{" with the following error message: " + error if error != "" else ""}. Please make sure the information you specified is correct and try again after a while. + +If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. + +Thank you! +''' ) msg['From'] = self.sender_address msg['To'] = recipient_address @@ -62,15 +60,14 @@ def send_build_faulted_email(self, recipient_address:str): def send_build_started_email(self, recipient_address:str): msg = EmailMessage() msg.set_content( - ''' - Hi! - - Your NMT engine has started building. We will contact you when it is complete. - - If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. - - Thank you! - ''' +'''Hi! + +Your NMT engine has started building. We will contact you when it is complete. + +If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. + +Thank you! +''' ) msg['From'] = self.sender_address msg['To'] = recipient_address diff --git a/samples/ServalApp/start_app.sh b/samples/ServalApp/start_app.sh deleted file mode 100755 index f65c7d7f..00000000 --- a/samples/ServalApp/start_app.sh +++ /dev/null @@ -1,4 +0,0 @@ -python3 send_updates.py & -SEND_UPDATES_PID=$! -streamlit run serval_app.py -kill $SEND_UPDATES_PID \ No newline at end of file From e96ef708a68c547a5fddcc83667177beb2ed6119 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Mon, 23 Oct 2023 12:10:12 -0400 Subject: [PATCH 08/13] Switch to production Serval --- samples/ServalApp/serval_app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 7e6e1e8e..8c89d16c 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -38,7 +38,7 @@ def update(build:Build, email_server:ServalAppEmailServer, data=None): print(f"\tUpdated {build}") serval_auth = ServalBearerAuth() - client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) + client = RemoteCaller(url_prefix="https://prod.serval-api.org",auth=serval_auth) responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} def get_update(build:Build, email_server:ServalAppEmailServer): @@ -91,7 +91,7 @@ def send_updates(email_server:ServalAppEmailServer): st.session_state['authorized'] = False st.session_state['authorization_failure'] = True st.rerun() - client = RemoteCaller(url_prefix="http://localhost",auth=serval_auth) + client = RemoteCaller(url_prefix="https://prod.serval-api.org",auth=serval_auth) engine = create_engine("sqlite:///builds.db") Session = sessionmaker(bind=engine) session = Session() From e205576d3ea3771d9c55dad501c47651e6cefd00 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Mon, 23 Oct 2023 14:40:50 -0400 Subject: [PATCH 09/13] Make fields configurable and move delete to end of updates --- samples/ServalApp/README.md | 2 +- samples/ServalApp/serval_app.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/samples/ServalApp/README.md b/samples/ServalApp/README.md index ae37df99..3cd517df 100644 --- a/samples/ServalApp/README.md +++ b/samples/ServalApp/README.md @@ -6,7 +6,7 @@ streamlit run serval_app.py ``` ### Regenerating the Python Client -When the Serval API is updated, use the tool [swagger-to](https://pypi.org/project/swagger-to/) to generate a new `serval_client_module.py` using the following command: +When the Serval API is updated, download the "swagger.json" from the swagger endpoint and use the tool [swagger-to](https://pypi.org/project/swagger-to/) to generate a new `serval_client_module.py` using the following command in this directory: ``` swagger_to_py_client.py --swagger_path path/to/swagger.json --outpath serval_client_module.py ``` diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 8c89d16c..6e64a699 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -19,26 +19,26 @@ def send_emails(): try: def started(build:Build, email_server:ServalAppEmailServer, data=None): print(f"\tStarted {build}") - session.delete(build) email_server.send_build_started_email(build.email) + session.delete(build) session.add(Build(build_id=build.build_id, engine_id=build.engine_id, email=build.email, state=State.Active, corpus_id=build.corpus_id)) def faulted(build:Build, email_server:ServalAppEmailServer, data=None): print(f"\tFaulted {build}") - session.delete(build) email_server.send_build_faulted_email(build.email, error=data) + session.delete(build) def completed(build:Build, email_server:ServalAppEmailServer, data=None): print(f"\tCompleted {build}") - session.delete(build) pretranslations = client.translation_engines_get_all_pretranslations(build.engine_id, build.corpus_id) email_server.send_build_completed_email(build.email, '\n'.join([f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" for pretranslation in pretranslations])) + session.delete(build) def update(build:Build, email_server:ServalAppEmailServer, data=None): print(f"\tUpdated {build}") serval_auth = ServalBearerAuth() - client = RemoteCaller(url_prefix="https://prod.serval-api.org",auth=serval_auth) + client = RemoteCaller(url_prefix=os.environ.get('SERVAL_HOST_URL'),auth=serval_auth) responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} def get_update(build:Build, email_server:ServalAppEmailServer): @@ -63,7 +63,7 @@ def send_updates(email_server:ServalAppEmailServer): with ServalAppEmailServer(os.environ.get('SERVAL_APP_EMAIL_PASSWORD')) as email_server: while(True): send_updates(email_server) - sleep(300) #Once every five minutes... + sleep(os.environ.get('SERVAL_APP_UPDATE_FREQ_SEC',300)) except Exception as e: print(e) st.session_state['background_process_has_started'] = False @@ -110,7 +110,7 @@ def submit(): ) ) ) - build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids= [] if st.session_state['source_files'][0].name[-4:] == '.zip' else list(map(lambda f: f.name, st.session_state['source_files'])))], options="{\"max_steps\":10}"))) + build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids= [] if st.session_state['source_files'][0].name[-4:] == '.zip' else list(map(lambda f: f.name, st.session_state['source_files'])))], options="{\"max_steps\":" + os.environ.get('SERVAL_APP_MAX_STEPS',10) + "}"))) session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) session.commit() From 8c6501947a73642d270673710b6ba864f55ff24e Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Mon, 23 Oct 2023 15:36:32 -0400 Subject: [PATCH 10/13] Formatting, gitignore, and poetry --- .gitignore | 2 + samples/ServalApp/db.py | 25 +- samples/ServalApp/pyproject.toml | 18 ++ samples/ServalApp/serval_app.py | 306 +++++++++++++++++------ samples/ServalApp/serval_auth_module.py | 43 ++-- samples/ServalApp/serval_email_module.py | 53 ++-- 6 files changed, 324 insertions(+), 123 deletions(-) create mode 100644 samples/ServalApp/pyproject.toml diff --git a/.gitignore b/.gitignore index 1aed303d..05f01900 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,5 @@ lib/ .vs appsettings.user.json artifacts + +.db diff --git a/samples/ServalApp/db.py b/samples/ServalApp/db.py index ab59bbce..fc065d23 100644 --- a/samples/ServalApp/db.py +++ b/samples/ServalApp/db.py @@ -2,28 +2,41 @@ from sqlalchemy import Column, MetaData, String, Enum, create_engine import enum + class State(enum.Enum): Pending = 0 Active = 1 Completed = 2 Faulted = 3 + metadata = MetaData() Base = declarative_base(metadata=metadata) + class Build(Base): __tablename__ = "builds" - build_id = Column("build_id",String,primary_key=True) - engine_id = Column("engine_id",String,primary_key=True) - email = Column("email",String) - state = Column("state",Enum(State)) - corpus_id = Column("corpus_id",String) + build_id = Column("build_id", String, primary_key=True) + engine_id = Column("engine_id", String, primary_key=True) + email = Column("email", String) + state = Column("state", Enum(State)) + corpus_id = Column("corpus_id", String) def __str__(self): - return str({'build_id':self.build_id, 'engine_id':self.engine_id,'email':self.email,'state':self.state,'corpus_id':self.corpus_id}) + return str( + { + "build_id": self.build_id, + "engine_id": self.engine_id, + "email": self.email, + "state": self.state, + "corpus_id": self.corpus_id, + } + ) def __repr__(self): return self.__str__() + + def clear_and_regenerate_tables(): engine = create_engine("sqlite:///builds.db") metadata.drop_all(bind=engine) diff --git a/samples/ServalApp/pyproject.toml b/samples/ServalApp/pyproject.toml new file mode 100644 index 00000000..b84afe31 --- /dev/null +++ b/samples/ServalApp/pyproject.toml @@ -0,0 +1,18 @@ +[tool.poetry] +name = "servalapp" +version = "0.1.0" +description = "" +authors = ["Your Name "] +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.8,<3.9.7 || >3.9.7,<4.0" +email = "^4.0.2" +streamlit = "^1.27.2" +requests = "^2.31.0" +SQLAlchemy = "^2.0.22" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 6e64a699..3bd052d4 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -12,44 +12,74 @@ from serval_email_module import ServalAppEmailServer import re + def send_emails(): engine = create_engine("sqlite:///builds.db") Session = sessionmaker(bind=engine) session = Session() try: - def started(build:Build, email_server:ServalAppEmailServer, data=None): + + def started(build: Build, email_server: ServalAppEmailServer, data=None): print(f"\tStarted {build}") email_server.send_build_started_email(build.email) session.delete(build) - session.add(Build(build_id=build.build_id, engine_id=build.engine_id, email=build.email, state=State.Active, corpus_id=build.corpus_id)) + session.add( + Build( + build_id=build.build_id, + engine_id=build.engine_id, + email=build.email, + state=State.Active, + corpus_id=build.corpus_id, + ) + ) - def faulted(build:Build, email_server:ServalAppEmailServer, data=None): + def faulted(build: Build, email_server: ServalAppEmailServer, data=None): print(f"\tFaulted {build}") email_server.send_build_faulted_email(build.email, error=data) session.delete(build) - def completed(build:Build, email_server:ServalAppEmailServer, data=None): + def completed(build: Build, email_server: ServalAppEmailServer, data=None): print(f"\tCompleted {build}") - pretranslations = client.translation_engines_get_all_pretranslations(build.engine_id, build.corpus_id) - email_server.send_build_completed_email(build.email, '\n'.join([f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" for pretranslation in pretranslations])) + pretranslations = client.translation_engines_get_all_pretranslations( + build.engine_id, build.corpus_id + ) + email_server.send_build_completed_email( + build.email, + "\n".join( + [ + f"{'|'.join(pretranslation.refs)}\t{pretranslation.translation}" + for pretranslation in pretranslations + ] + ), + ) session.delete(build) - def update(build:Build, email_server:ServalAppEmailServer, data=None): + def update(build: Build, email_server: ServalAppEmailServer, data=None): print(f"\tUpdated {build}") serval_auth = ServalBearerAuth() - client = RemoteCaller(url_prefix=os.environ.get('SERVAL_HOST_URL'),auth=serval_auth) - responses:"dict[str,function]" = {"Completed":completed, "Faulted":faulted, "Canceled":faulted} + client = RemoteCaller( + url_prefix=os.environ.get("SERVAL_HOST_URL"), auth=serval_auth + ) + responses: "dict[str,function]" = { + "Completed": completed, + "Faulted": faulted, + "Canceled": faulted, + } - def get_update(build:Build, email_server:ServalAppEmailServer): - build_update = client.translation_engines_get_build(id=build.engine_id, build_id=build.build_id) + def get_update(build: Build, email_server: ServalAppEmailServer): + build_update = client.translation_engines_get_build( + id=build.engine_id, build_id=build.build_id + ) if build.state == State.Pending and build_update.state == "Active": started(build, email_server) else: - responses.get(build_update.state, update)(build, email_server, build_update.message) + responses.get(build_update.state, update)( + build, email_server, build_update.message + ) session.commit() - def send_updates(email_server:ServalAppEmailServer): + def send_updates(email_server: ServalAppEmailServer): print(f"Checking for updates...") with session.no_autoflush: builds = session.query(Build).all() @@ -60,107 +90,229 @@ def send_updates(email_server:ServalAppEmailServer): print(f"\tFailed to update {build} because of exception {e}") raise e - with ServalAppEmailServer(os.environ.get('SERVAL_APP_EMAIL_PASSWORD')) as email_server: - while(True): + with ServalAppEmailServer( + os.environ.get("SERVAL_APP_EMAIL_PASSWORD") + ) as email_server: + while True: send_updates(email_server) - sleep(os.environ.get('SERVAL_APP_UPDATE_FREQ_SEC',300)) + sleep(os.environ.get("SERVAL_APP_UPDATE_FREQ_SEC", 300)) except Exception as e: print(e) - st.session_state['background_process_has_started'] = False + st.session_state["background_process_has_started"] = False + -if not st.session_state.get('background_process_has_started',False): +if not st.session_state.get("background_process_has_started", False): cron_thread = Thread(target=send_emails) add_script_run_ctx(cron_thread) cron_thread.start() - st.session_state['background_process_has_started'] = True + st.session_state["background_process_has_started"] = True serval_auth = None -if not st.session_state.get('authorized',False): +if not st.session_state.get("authorized", False): with st.form(key="Authorization Form"): - st.session_state['client_id'] = st.text_input(label='Client ID') - st.session_state['client_secret'] = st.text_input(label='Client Secret', type='password') + st.session_state["client_id"] = st.text_input(label="Client ID") + st.session_state["client_secret"] = st.text_input( + label="Client Secret", type="password" + ) if st.form_submit_button("Authorize"): - st.session_state['authorized'] = True + st.session_state["authorized"] = True st.rerun() - if st.session_state.get('authorization_failure', False): - st.error('Invalid credentials. Please check your credentials.') + if st.session_state.get("authorization_failure", False): + st.error("Invalid credentials. Please check your credentials.") else: try: - serval_auth = ServalBearerAuth(client_id=st.session_state['client_id'] if st.session_state['client_id'] != "" else "", client_secret=st.session_state['client_secret'] if st.session_state['client_secret'] != "" else "") + serval_auth = ServalBearerAuth( + client_id=st.session_state["client_id"] + if st.session_state["client_id"] != "" + else "", + client_secret=st.session_state["client_secret"] + if st.session_state["client_secret"] != "" + else "", + ) except ValueError: - st.session_state['authorized'] = False - st.session_state['authorization_failure'] = True + st.session_state["authorized"] = False + st.session_state["authorization_failure"] = True st.rerun() - client = RemoteCaller(url_prefix="https://prod.serval-api.org",auth=serval_auth) + client = RemoteCaller(url_prefix="https://prod.serval-api.org", auth=serval_auth) engine = create_engine("sqlite:///builds.db") Session = sessionmaker(bind=engine) session = Session() def submit(): - engine = json.loads(client.translation_engines_create(TranslationEngineConfig(source_language=st.session_state['source_language'],target_language=st.session_state['target_language'],type='Nmt',name=f'serval_app_engine:{st.session_state["email"]}'))) - source_files = [json.loads(client.data_files_create(st.session_state['source_files'][i], format="Paratext" if st.session_state['source_files'][i].name[-4:] == '.zip' else "Text")) for i in range(len(st.session_state['source_files']))] - target_files = [json.loads(client.data_files_create(st.session_state['target_files'][i], format="Paratext" if st.session_state['target_files'][i].name[-4:] == '.zip' else "Text")) for i in range(len(st.session_state['target_files']))] - corpus = json.loads(client.translation_engines_add_corpus( - engine['id'], - TranslationCorpusConfig( - source_files=[TranslationCorpusFileConfig(file_id=file['id'], text_id=name) for file, name in zip(source_files, list(map(lambda f: f.name, st.session_state['source_files'])))], - target_files=[TranslationCorpusFileConfig(file_id=file['id'], text_id=name) for file, name in zip(target_files, list(map(lambda f: f.name, st.session_state['target_files'])))], - source_language=st.session_state['source_language'], - target_language=st.session_state['target_language'] + engine = json.loads( + client.translation_engines_create( + TranslationEngineConfig( + source_language=st.session_state["source_language"], + target_language=st.session_state["target_language"], + type="Nmt", + name=f'serval_app_engine:{st.session_state["email"]}', ) ) ) - build = json.loads(client.translation_engines_start_build(engine['id'], TranslationBuildConfig(pretranslate=[PretranslateCorpusConfig(corpus_id=corpus["id"], text_ids= [] if st.session_state['source_files'][0].name[-4:] == '.zip' else list(map(lambda f: f.name, st.session_state['source_files'])))], options="{\"max_steps\":" + os.environ.get('SERVAL_APP_MAX_STEPS',10) + "}"))) - session.add(Build(build_id=build['id'],engine_id=engine['id'],email=st.session_state['email'],state=build['state'],corpus_id=corpus['id'])) + source_files = [ + json.loads( + client.data_files_create( + st.session_state["source_files"][i], + format="Paratext" + if st.session_state["source_files"][i].name[-4:] == ".zip" + else "Text", + ) + ) + for i in range(len(st.session_state["source_files"])) + ] + target_files = [ + json.loads( + client.data_files_create( + st.session_state["target_files"][i], + format="Paratext" + if st.session_state["target_files"][i].name[-4:] == ".zip" + else "Text", + ) + ) + for i in range(len(st.session_state["target_files"])) + ] + corpus = json.loads( + client.translation_engines_add_corpus( + engine["id"], + TranslationCorpusConfig( + source_files=[ + TranslationCorpusFileConfig(file_id=file["id"], text_id=name) + for file, name in zip( + source_files, + list( + map(lambda f: f.name, st.session_state["source_files"]) + ), + ) + ], + target_files=[ + TranslationCorpusFileConfig(file_id=file["id"], text_id=name) + for file, name in zip( + target_files, + list( + map(lambda f: f.name, st.session_state["target_files"]) + ), + ) + ], + source_language=st.session_state["source_language"], + target_language=st.session_state["target_language"], + ), + ) + ) + build = json.loads( + client.translation_engines_start_build( + engine["id"], + TranslationBuildConfig( + pretranslate=[ + PretranslateCorpusConfig( + corpus_id=corpus["id"], + text_ids=[] + if st.session_state["source_files"][0].name[-4:] == ".zip" + else list( + map(lambda f: f.name, st.session_state["source_files"]) + ), + ) + ], + options='{"max_steps":' + + os.environ.get("SERVAL_APP_MAX_STEPS", 10) + + "}", + ), + ) + ) + session.add( + Build( + build_id=build["id"], + engine_id=engine["id"], + email=st.session_state["email"], + state=build["state"], + corpus_id=corpus["id"], + ) + ) session.commit() - def already_active_build_for(email:str): + def already_active_build_for(email: str): return len(session.query(Build).where(Build.email == email).all()) > 0 st.subheader("Neural Machine Translation") - tried_to_submit = st.session_state.get('tried_to_submit', False) + tried_to_submit = st.session_state.get("tried_to_submit", False) with st.form(key="NmtTranslationForm"): - st.session_state['source_language'] = st.text_input(label="Source language tag*", placeholder="en") - if st.session_state.get('source_language','') == '' and tried_to_submit: - st.error("Please enter a source language tag before submitting", icon='⬆️') - - st.session_state['source_files'] = st.file_uploader(label="Source File(s)", accept_multiple_files=True) - if len(st.session_state.get('source_files',[])) == 0 and tried_to_submit: - st.error("Please upload a source file before submitting", icon='⬆️') - if len(st.session_state.get('source_files',[])) > 1: - st.warning('Please note that source and target text files will be paired together by file name', icon='💡') - - st.session_state['target_language'] = st.text_input(label="Target language tag*", placeholder="es") - if st.session_state.get('target_language','') == '' and tried_to_submit: - st.error("Please enter a target language tag before submitting", icon='⬆️') - - st.session_state['target_files'] = st.file_uploader(label="Target File(s)", accept_multiple_files=True) - if len(st.session_state.get('target_files',[])) > 1: - st.warning('Please note that source and target text files will be paired together by file name', icon='💡') - - st.session_state['email'] = st.text_input(label="Email", placeholder="johndoe@example.com") - if st.session_state.get('email','') == '' and tried_to_submit: - st.error("Please enter an email address", icon='⬆️') - elif not re.match(r"^\S+@\S+\.\S+$", st.session_state['email']) and tried_to_submit: - st.error("Please enter a valid email address", icon='⬆️') - st.session_state['email'] = '' + st.session_state["source_language"] = st.text_input( + label="Source language tag*", placeholder="en" + ) + if st.session_state.get("source_language", "") == "" and tried_to_submit: + st.error("Please enter a source language tag before submitting", icon="⬆️") + + st.session_state["source_files"] = st.file_uploader( + label="Source File(s)", accept_multiple_files=True + ) + if len(st.session_state.get("source_files", [])) == 0 and tried_to_submit: + st.error("Please upload a source file before submitting", icon="⬆️") + if len(st.session_state.get("source_files", [])) > 1: + st.warning( + "Please note that source and target text files will be paired together by file name", + icon="💡", + ) + + st.session_state["target_language"] = st.text_input( + label="Target language tag*", placeholder="es" + ) + if st.session_state.get("target_language", "") == "" and tried_to_submit: + st.error("Please enter a target language tag before submitting", icon="⬆️") + + st.session_state["target_files"] = st.file_uploader( + label="Target File(s)", accept_multiple_files=True + ) + if len(st.session_state.get("target_files", [])) > 1: + st.warning( + "Please note that source and target text files will be paired together by file name", + icon="💡", + ) + + st.session_state["email"] = st.text_input( + label="Email", placeholder="johndoe@example.com" + ) + if st.session_state.get("email", "") == "" and tried_to_submit: + st.error("Please enter an email address", icon="⬆️") + elif ( + not re.match(r"^\S+@\S+\.\S+$", st.session_state["email"]) + and tried_to_submit + ): + st.error("Please enter a valid email address", icon="⬆️") + st.session_state["email"] = "" if tried_to_submit: - st.error(st.session_state.get('error',"Something went wrong. Please try again in a moment.")) + st.error( + st.session_state.get( + "error", "Something went wrong. Please try again in a moment." + ) + ) if st.form_submit_button("Generate translations"): - if already_active_build_for(st.session_state['email']): - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." + if already_active_build_for(st.session_state["email"]): + st.session_state["tried_to_submit"] = True + st.session_state[ + "error" + ] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." st.rerun() - elif st.session_state['source_language'] != '' and st.session_state['target_language'] != '' and len(st.session_state['source_files']) > 0 and st.session_state['email'] != '': + elif ( + st.session_state["source_language"] != "" + and st.session_state["target_language"] != "" + and len(st.session_state["source_files"]) > 0 + and st.session_state["email"] != "" + ): with st.spinner(): submit() - st.session_state['tried_to_submit'] = False - st.toast("Translations are on their way! You'll receive an email when your translation job has begun.") + st.session_state["tried_to_submit"] = False + st.toast( + "Translations are on their way! You'll receive an email when your translation job has begun." + ) sleep(4) st.rerun() else: - st.session_state['tried_to_submit'] = True - st.session_state['error'] = "Some required fields were left blank. Please fill in all fields above" + st.session_state["tried_to_submit"] = True + st.session_state[ + "error" + ] = "Some required fields were left blank. Please fill in all fields above" st.rerun() - st.markdown("\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) for more information on IETF tags.", unsafe_allow_html=True) \ No newline at end of file + st.markdown( + "\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) for more information on IETF tags.", + unsafe_allow_html=True, + ) diff --git a/samples/ServalApp/serval_auth_module.py b/samples/ServalApp/serval_auth_module.py index dbb53ea6..48020bfa 100644 --- a/samples/ServalApp/serval_auth_module.py +++ b/samples/ServalApp/serval_auth_module.py @@ -3,18 +3,26 @@ import os import time + class ServalBearerAuth(requests.auth.AuthBase): def __init__(self, client_id="", client_secret=""): - self.__client_id = client_id if client_id != "" else os.environ.get("SERVAL_CLIENT_ID") - assert(self.__client_id is not None) - self.__client_secret = client_secret if client_secret != "" else os.environ.get("SERVAL_CLIENT_SECRET") - assert(self.__client_secret is not None) + self.__client_id = ( + client_id if client_id != "" else os.environ.get("SERVAL_CLIENT_ID") + ) + assert self.__client_id is not None + self.__client_secret = ( + client_secret + if client_secret != "" + else os.environ.get("SERVAL_CLIENT_SECRET") + ) + assert self.__client_secret is not None self.__auth_url = os.environ.get("SERVAL_AUTH_URL") - assert(self.__auth_url is not None) + assert self.__auth_url is not None self.update_token() self.__last_time_fetched = time.time() + def __call__(self, r): - if(time.time() - self.__last_time_fetched > 20*60): + if time.time() - self.__last_time_fetched > 20 * 60: self.update_token() r.headers["authorization"] = "Bearer " + self.token return r @@ -22,20 +30,21 @@ def __call__(self, r): def update_token(self): data = { "client_id": f"{self.__client_id}", - "client_secret":f"{self.__client_secret}", - "audience":"https://machine.sil.org", - "grant_type":"client_credentials" - } + "client_secret": f"{self.__client_secret}", + "audience": "https://machine.sil.org", + "grant_type": "client_credentials", + } - encoded_data = json.dumps(data).encode('utf-8') + encoded_data = json.dumps(data).encode("utf-8") r = None try: - r:requests.Response = requests.post( - url=f'{self.__auth_url}/oauth/token', + r: requests.Response = requests.post( + url=f"{self.__auth_url}/oauth/token", data=encoded_data, - headers={"content-type": "application/json"} + headers={"content-type": "application/json"}, ) - self.token = r.json()['access_token'] if r is not None else None + self.token = r.json()["access_token"] if r is not None else None except Exception as e: - raise ValueError(f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status_code if r is not None else ''}. Original exception: {e}") - + raise ValueError( + f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status_code if r is not None else ''}. Original exception: {e}" + ) diff --git a/samples/ServalApp/serval_email_module.py b/samples/ServalApp/serval_email_module.py index 5876e622..1a5cfc20 100644 --- a/samples/ServalApp/serval_email_module.py +++ b/samples/ServalApp/serval_email_module.py @@ -1,8 +1,15 @@ from email.message import EmailMessage import smtplib, ssl + class ServalAppEmailServer: - def __init__(self, password, sender_address = 'serval-app@languagetechnology.org', host='mail.languagetechnology.org', port=465) -> None: + def __init__( + self, + password, + sender_address="serval-app@languagetechnology.org", + host="mail.languagetechnology.org", + port=465, + ) -> None: self.__password = password self.sender_address = sender_address self.host = host @@ -11,7 +18,7 @@ def __init__(self, password, sender_address = 'serval-app@languagetechnology.org @property def password(self): - return len(self.__password)*"*" + return len(self.__password) * "*" def __enter__(self): context = ssl.create_default_context() @@ -22,57 +29,57 @@ def __enter__(self): def __exit__(self, *args): self.server.close() - def send_build_completed_email(self, recipient_address:str, pretranslations_file_data:str): + def send_build_completed_email( + self, recipient_address: str, pretranslations_file_data: str + ): msg = EmailMessage() msg.set_content( -'''Hi! + """Hi! Your NMT engine has completed building. Attached are the translations of untranslated source text in the files you included. If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. Thank you! -''' +""" ) - msg['From'] = self.sender_address - msg['To'] = recipient_address - msg['Subject'] = 'Your NMT build job is complete!' - msg.add_attachment(pretranslations_file_data, filename='translations.txt') + msg["From"] = self.sender_address + msg["To"] = recipient_address + msg["Subject"] = "Your NMT build job is complete!" + msg.add_attachment(pretranslations_file_data, filename="translations.txt") self.server.send_message(msg) - def send_build_faulted_email(self, recipient_address:str, error=""): + def send_build_faulted_email(self, recipient_address: str, error=""): msg = EmailMessage() msg.set_content( -f'''Hi! + f"""Hi! Your NMT engine has failed to build{" with the following error message: " + error if error != "" else ""}. Please make sure the information you specified is correct and try again after a while. If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. Thank you! -''' +""" ) - msg['From'] = self.sender_address - msg['To'] = recipient_address - msg['Subject'] = 'Your NMT build job has failed' + msg["From"] = self.sender_address + msg["To"] = recipient_address + msg["Subject"] = "Your NMT build job has failed" self.server.send_message(msg) - def send_build_started_email(self, recipient_address:str): + def send_build_started_email(self, recipient_address: str): msg = EmailMessage() msg.set_content( -'''Hi! + """Hi! Your NMT engine has started building. We will contact you when it is complete. If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. Thank you! -''' +""" ) - msg['From'] = self.sender_address - msg['To'] = recipient_address - msg['Subject'] = 'Your NMT build job has started building!' + msg["From"] = self.sender_address + msg["To"] = recipient_address + msg["Subject"] = "Your NMT build job has started building!" self.server.send_message(msg) - - From bfa3be98baac77dc699815282f0d84a03efbe634 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Tue, 24 Oct 2023 10:32:43 -0400 Subject: [PATCH 11/13] Remove db, add flake8 etc. --- .vscode/settings.json | 3 ++ samples/ServalApp/.flake8 | 5 +++ samples/ServalApp/builds.db | Bin 20480 -> 0 bytes samples/ServalApp/db.py | 8 ++--- samples/ServalApp/pyproject.toml | 2 +- samples/ServalApp/serval_app.py | 40 ++++++++++++++-------- samples/ServalApp/serval_auth_module.py | 6 ++-- samples/ServalApp/serval_client_module.py | 35 ++++++------------- samples/ServalApp/serval_email_module.py | 9 +++-- 9 files changed, 59 insertions(+), 49 deletions(-) create mode 100644 samples/ServalApp/.flake8 delete mode 100644 samples/ServalApp/builds.db diff --git a/.vscode/settings.json b/.vscode/settings.json index d2907b44..5d6ec7eb 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -30,4 +30,7 @@ } ], "dotnet.defaultSolution": "Serval.sln", + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter" + } } \ No newline at end of file diff --git a/samples/ServalApp/.flake8 b/samples/ServalApp/.flake8 new file mode 100644 index 00000000..ce6af763 --- /dev/null +++ b/samples/ServalApp/.flake8 @@ -0,0 +1,5 @@ +[flake8] +max-line-length = 120 +per-file-ignores = serval_app.py:F821,W605 +exclude = + serval_client_module.py \ No newline at end of file diff --git a/samples/ServalApp/builds.db b/samples/ServalApp/builds.db deleted file mode 100644 index d0e91a7f0fb9dcc2820cd668fd61e8a3d81eb4ab..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20480 zcmeI(Jx{_w7{Ku>ib@j#8)3V|1tOXl=#^`X#7_P_p7;=vgb)dlc+=;M@jOBR0R#|0009ILKmY**5cnekD%E@W2sDi`IbPiD;6uvPb(^4hWsCCiqT zv#P9ImCHG4#@tTJ>$u-OZ9ASnc~1`yLPura1Qj?cMi-sVQ{lxqW=8 diff --git a/samples/ServalApp/db.py b/samples/ServalApp/db.py index fc065d23..e205bd9a 100644 --- a/samples/ServalApp/db.py +++ b/samples/ServalApp/db.py @@ -1,7 +1,8 @@ -from sqlalchemy.orm import declarative_base -from sqlalchemy import Column, MetaData, String, Enum, create_engine import enum +from sqlalchemy import Column, Enum, MetaData, String, create_engine +from sqlalchemy.orm import declarative_base + class State(enum.Enum): Pending = 0 @@ -37,7 +38,6 @@ def __repr__(self): return self.__str__() -def clear_and_regenerate_tables(): +def create_db_if_not_exists(): engine = create_engine("sqlite:///builds.db") - metadata.drop_all(bind=engine) metadata.create_all(bind=engine) diff --git a/samples/ServalApp/pyproject.toml b/samples/ServalApp/pyproject.toml index b84afe31..536c96ce 100644 --- a/samples/ServalApp/pyproject.toml +++ b/samples/ServalApp/pyproject.toml @@ -2,7 +2,7 @@ name = "servalapp" version = "0.1.0" description = "" -authors = ["Your Name "] +authors = ["Eli Lowry "] readme = "README.md" [tool.poetry.dependencies] diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 3bd052d4..6abe56b8 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -1,16 +1,21 @@ +import json +import os +import re +import traceback +from threading import Thread +from time import sleep + import streamlit as st -from streamlit.runtime.scriptrunner import add_script_run_ctx -from serval_client_module import * -from serval_auth_module import * +from db import Build, State, create_db_if_not_exists +from serval_auth_module import ServalBearerAuth +from serval_client_module import (PretranslateCorpusConfig, RemoteCaller, TranslationBuildConfig, + TranslationCorpusConfig, TranslationCorpusFileConfig, TranslationEngineConfig) +from serval_email_module import ServalAppEmailServer from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker -from db import Build -from time import sleep -from threading import Thread -import os -from db import Build, State -from serval_email_module import ServalAppEmailServer -import re +from streamlit.runtime.scriptrunner import add_script_run_ctx + +create_db_if_not_exists() def send_emails(): @@ -80,7 +85,7 @@ def get_update(build: Build, email_server: ServalAppEmailServer): session.commit() def send_updates(email_server: ServalAppEmailServer): - print(f"Checking for updates...") + print("Checking for updates...") with session.no_autoflush: builds = session.query(Build).all() for build in builds: @@ -88,6 +93,7 @@ def send_updates(email_server: ServalAppEmailServer): get_update(build, email_server) except Exception as e: print(f"\tFailed to update {build} because of exception {e}") + traceback.print_exc() raise e with ServalAppEmailServer( @@ -133,7 +139,9 @@ def send_updates(email_server: ServalAppEmailServer): st.session_state["authorized"] = False st.session_state["authorization_failure"] = True st.rerun() - client = RemoteCaller(url_prefix="https://prod.serval-api.org", auth=serval_auth) + client = RemoteCaller( + url_prefix=os.environ.get("SERVAL_HOST_URL"), auth=serval_auth + ) engine = create_engine("sqlite:///builds.db") Session = sessionmaker(bind=engine) session = Session() @@ -213,7 +221,7 @@ def submit(): ) ], options='{"max_steps":' - + os.environ.get("SERVAL_APP_MAX_STEPS", 10) + + str(os.environ.get("SERVAL_APP_MAX_STEPS", 10)) + "}", ), ) @@ -290,7 +298,8 @@ def already_active_build_for(email: str): st.session_state["tried_to_submit"] = True st.session_state[ "error" - ] = "There is already an a pending or active build associated with this email address. Please wait for the previous build to finish." + ] = "There is already an a pending or active build associated with this email address. \ + Please wait for the previous build to finish." st.rerun() elif ( st.session_state["source_language"] != "" @@ -313,6 +322,7 @@ def already_active_build_for(email: str): ] = "Some required fields were left blank. Please fill in all fields above" st.rerun() st.markdown( - "\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) for more information on IETF tags.", + "\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) \ + for more information on IETF tags.", unsafe_allow_html=True, ) diff --git a/samples/ServalApp/serval_auth_module.py b/samples/ServalApp/serval_auth_module.py index 48020bfa..0fd265a8 100644 --- a/samples/ServalApp/serval_auth_module.py +++ b/samples/ServalApp/serval_auth_module.py @@ -1,8 +1,9 @@ -import requests import json import os import time +import requests + class ServalBearerAuth(requests.auth.AuthBase): def __init__(self, client_id="", client_secret=""): @@ -46,5 +47,6 @@ def update_token(self): self.token = r.json()["access_token"] if r is not None else None except Exception as e: raise ValueError( - f"Token cannot be None. Failed to retrieve token from auth server; responded with {r.status_code if r is not None else ''}. Original exception: {e}" + f"Token cannot be None. Failed to retrieve token from auth server; responded \ + with {r.status_code if r is not None else ''}. Original exception: {e}" ) diff --git a/samples/ServalApp/serval_client_module.py b/samples/ServalApp/serval_client_module.py index a44f0871..699fa5e1 100644 --- a/samples/ServalApp/serval_client_module.py +++ b/samples/ServalApp/serval_client_module.py @@ -2173,7 +2173,7 @@ def __init__( message: Optional[str] = None, queue_depth: Optional[int] = None, date_finished: Optional[str] = None, - options: Optional[str] = None) -> None: + options: Optional[Any] = None) -> None: """Initializes with the given values.""" self.id = id @@ -2322,14 +2322,7 @@ def translation_build_from_obj(obj: Any, path: str = "") -> TranslationBuild: else: date_finished_from_obj = None - obj_options = obj.get('options', None) - if obj_options is not None: - options_from_obj = from_obj( - obj_options, - expected=[str], - path=path + '.options') # type: Optional[str] - else: - options_from_obj = None + options_from_obj = obj.get('options', None) return TranslationBuild( id=id_from_obj, @@ -2492,7 +2485,7 @@ def __init__( self, name: Optional[str] = None, pretranslate: Optional[List['PretranslateCorpusConfig']] = None, - options: Optional[str] = None) -> None: + options: Optional[Any] = None) -> None: """Initializes with the given values.""" self.name = name @@ -2548,14 +2541,7 @@ def translation_build_config_from_obj(obj: Any, path: str = "") -> TranslationBu else: pretranslate_from_obj = None - obj_options = obj.get('options', None) - if obj_options is not None: - options_from_obj = from_obj( - obj_options, - expected=[str], - path=path + '.options') # type: Optional[str] - else: - options_from_obj = None + options_from_obj = obj.get('options', None) return TranslationBuildConfig( name=name_from_obj, @@ -3166,7 +3152,7 @@ def translation_engines_get_queue( self, engine_type: str) -> 'Queue': """ - Send a get request to /api/v1/translation/engines/queues. + Send a post request to /api/v1/translation/engines/queues. :param engine_type: A valid engine type: SmtTransfer, Nmt, or Echo @@ -3178,7 +3164,7 @@ def translation_engines_get_queue( resp = self.session.request( - method='get', + method='post', url=url, json=data, ) @@ -3572,7 +3558,7 @@ def translation_engines_start_build( self, id: str, build_config: 'TranslationBuildConfig') -> bytes: - r""" + """ Specify the corpora or textIds to pretranslate. Even when a corpus or textId is selected for pretranslation, only "untranslated" text will be pretranslated: that is, segments (lines of text) in the specified corpora or textId's that have @@ -3580,8 +3566,8 @@ def translation_engines_start_build( you may flag a subset of books for pretranslation by including their [abbreviations](https://github.com/sillsdev/libpalaso/blob/master/SIL.Scripture/Canon.cs) in the textIds parameter. If the engine does not support pretranslation, these fields have no effect. - The `"options"` parameter of the build config provides the ability to pass build configuration parameters as a JSON string. - A typical use case would be to set `"options"` to `"{\"max_steps\":10}"` in order to configure the maximum + The `"options"` parameter of the build config provides the ability to pass build configuration parameters as a JSON object. + A typical use case would be to set `"options"` to `{"max_steps":10}` in order to configure the maximum number of training iterations in order to reduce turnaround time for testing purposes. :param id: The translation engine id @@ -3622,7 +3608,8 @@ def translation_engines_get_build( will timeout. A use case is to actively query the state of the current build, where the subsequent request sets the `minRevision` to the returned `revision` + 1 and timeouts are handled gracefully. - Note: this method should use request throttling. + This method should use request throttling. + Note: Within the returned build, percentCompleted is a value between 0 and 1. :param id: The translation engine id :param build_id: The build job id diff --git a/samples/ServalApp/serval_email_module.py b/samples/ServalApp/serval_email_module.py index 1a5cfc20..2ad8c38a 100644 --- a/samples/ServalApp/serval_email_module.py +++ b/samples/ServalApp/serval_email_module.py @@ -1,5 +1,6 @@ +import smtplib +import ssl from email.message import EmailMessage -import smtplib, ssl class ServalAppEmailServer: @@ -36,7 +37,8 @@ def send_build_completed_email( msg.set_content( """Hi! -Your NMT engine has completed building. Attached are the translations of untranslated source text in the files you included. +Your NMT engine has completed building. Attached are the \ + translations of untranslated source text in the files you included. If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. @@ -54,7 +56,8 @@ def send_build_faulted_email(self, recipient_address: str, error=""): msg.set_content( f"""Hi! -Your NMT engine has failed to build{" with the following error message: " + error if error != "" else ""}. Please make sure the information you specified is correct and try again after a while. +Your NMT engine has failed to build{" with the following error message: " + error if error != "" else ""}. \ + Please make sure the information you specified is correct and try again after a while. If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. From 053d862a0e969f9b4fa232fa044f715e63a43504 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Tue, 24 Oct 2023 14:38:51 -0400 Subject: [PATCH 12/13] Add more info to emails, clean up poetry, proper logging --- samples/ServalApp/db.py | 24 +- samples/ServalApp/poetry.lock | 1498 ++++++++++++++++++++++ samples/ServalApp/pyproject.toml | 6 +- samples/ServalApp/serval_app.py | 81 +- samples/ServalApp/serval_email_module.py | 17 +- 5 files changed, 1585 insertions(+), 41 deletions(-) create mode 100644 samples/ServalApp/poetry.lock diff --git a/samples/ServalApp/db.py b/samples/ServalApp/db.py index e205bd9a..29c71d8f 100644 --- a/samples/ServalApp/db.py +++ b/samples/ServalApp/db.py @@ -17,22 +17,22 @@ class State(enum.Enum): class Build(Base): __tablename__ = "builds" + + __mapper_args__ = {"confirm_deleted_rows": False} + build_id = Column("build_id", String, primary_key=True) engine_id = Column("engine_id", String, primary_key=True) - email = Column("email", String) - state = Column("state", Enum(State)) - corpus_id = Column("corpus_id", String) + name = Column("name", String) + email = Column("email", String, nullable=False) + state = Column("state", Enum(State), nullable=False) + corpus_id = Column("corpus_id", String, nullable=False) + client_id = Column("client_id", String, nullable=False) + source_files = Column("source_files", String) + target_files = Column("target_files", String) def __str__(self): - return str( - { - "build_id": self.build_id, - "engine_id": self.engine_id, - "email": self.email, - "state": self.state, - "corpus_id": self.corpus_id, - } - ) + return f"Build name: {self.name}\nBuild id: {self.build_id}\nClient ID: {self.client_id}\nSource files: \ +{self.source_files}\nTarget files: {self.target_files}" def __repr__(self): return self.__str__() diff --git a/samples/ServalApp/poetry.lock b/samples/ServalApp/poetry.lock new file mode 100644 index 00000000..7b21fc25 --- /dev/null +++ b/samples/ServalApp/poetry.lock @@ -0,0 +1,1498 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "altair" +version = "5.1.2" +description = "Vega-Altair: A declarative statistical visualization library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "altair-5.1.2-py3-none-any.whl", hash = "sha256:7219708ec33c152e53145485040f428954ed15fd09b2a2d89e543e6d111dae7f"}, + {file = "altair-5.1.2.tar.gz", hash = "sha256:e5f52a71853a607c61ce93ad4a414b3d486cd0d46ac597a24ae8bd1ac99dd460"}, +] + +[package.dependencies] +jinja2 = "*" +jsonschema = ">=3.0" +numpy = "*" +packaging = "*" +pandas = ">=0.25" +toolz = "*" +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["anywidget", "black (<24)", "hatch", "ipython", "m2r", "mypy", "pandas-stubs", "pyarrow (>=11)", "pytest", "pytest-cov", "ruff", "types-jsonschema", "types-setuptools", "vega-datasets", "vegafusion[embed] (>=1.4.0)", "vl-convert-python (>=0.14.0)"] +doc = ["docutils", "geopandas", "jinja2", "myst-parser", "numpydoc", "pillow (>=9,<10)", "pydata-sphinx-theme", "scipy", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] + +[[package]] +name = "black" +version = "23.10.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blinker" +version = "1.6.3" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.7" +files = [ + {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"}, + {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"}, +] + +[[package]] +name = "cachetools" +version = "5.3.1" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, + {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.40" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, + {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] + +[[package]] +name = "greenlet" +version = "3.0.0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e09dea87cc91aea5500262993cbd484b41edf8af74f976719dd83fe724644cd6"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47932c434a3c8d3c86d865443fadc1fbf574e9b11d6650b656e602b1797908a"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdfaeecf8cc705d35d8e6de324bf58427d7eafb55f67050d8f28053a3d57118c"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a68d670c8f89ff65c82b936275369e532772eebc027c3be68c6b87ad05ca695"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ad562a104cd41e9d4644f46ea37167b93190c6d5e4048fcc4b80d34ecb278f"}, + {file = "greenlet-3.0.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02a807b2a58d5cdebb07050efe3d7deaf915468d112dfcf5e426d0564aa3aa4a"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b1660a15a446206c8545edc292ab5c48b91ff732f91b3d3b30d9a915d5ec4779"}, + {file = "greenlet-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:813720bd57e193391dfe26f4871186cf460848b83df7e23e6bef698a7624b4c9"}, + {file = "greenlet-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:aa15a2ec737cb609ed48902b45c5e4ff6044feb5dcdfcf6fa8482379190330d7"}, + {file = "greenlet-3.0.0-cp310-universal2-macosx_11_0_x86_64.whl", hash = "sha256:7709fd7bb02b31908dc8fd35bfd0a29fc24681d5cc9ac1d64ad07f8d2b7db62f"}, + {file = "greenlet-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:211ef8d174601b80e01436f4e6905aca341b15a566f35a10dd8d1e93f5dbb3b7"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6512592cc49b2c6d9b19fbaa0312124cd4c4c8a90d28473f86f92685cc5fef8e"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:871b0a8835f9e9d461b7fdaa1b57e3492dd45398e87324c047469ce2fc9f516c"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b505fcfc26f4148551826a96f7317e02c400665fa0883fe505d4fcaab1dabfdd"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123910c58234a8d40eaab595bc56a5ae49bdd90122dde5bdc012c20595a94c14"}, + {file = "greenlet-3.0.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:96d9ea57292f636ec851a9bb961a5cc0f9976900e16e5d5647f19aa36ba6366b"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b72b802496cccbd9b31acea72b6f87e7771ccfd7f7927437d592e5c92ed703c"}, + {file = "greenlet-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:527cd90ba3d8d7ae7dceb06fda619895768a46a1b4e423bdb24c1969823b8362"}, + {file = "greenlet-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:37f60b3a42d8b5499be910d1267b24355c495064f271cfe74bf28b17b099133c"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1482fba7fbed96ea7842b5a7fc11d61727e8be75a077e603e8ab49d24e234383"}, + {file = "greenlet-3.0.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:be557119bf467d37a8099d91fbf11b2de5eb1fd5fc5b91598407574848dc910f"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2f1922a39d5d59cc0e597987300df3396b148a9bd10b76a058a2f2772fc04"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1e22c22f7826096ad503e9bb681b05b8c1f5a8138469b255eb91f26a76634f2"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d363666acc21d2c204dd8705c0e0457d7b2ee7a76cb16ffc099d6799744ac99"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:334ef6ed8337bd0b58bb0ae4f7f2dcc84c9f116e474bb4ec250a8bb9bd797a66"}, + {file = "greenlet-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6672fdde0fd1a60b44fb1751a7779c6db487e42b0cc65e7caa6aa686874e79fb"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:952256c2bc5b4ee8df8dfc54fc4de330970bf5d79253c863fb5e6761f00dda35"}, + {file = "greenlet-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:269d06fa0f9624455ce08ae0179430eea61085e3cf6457f05982b37fd2cefe17"}, + {file = "greenlet-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9adbd8ecf097e34ada8efde9b6fec4dd2a903b1e98037adf72d12993a1c80b51"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b5ce7f40f0e2f8b88c28e6691ca6806814157ff05e794cdd161be928550f4c"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf94aa539e97a8411b5ea52fc6ccd8371be9550c4041011a091eb8b3ca1d810"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80dcd3c938cbcac986c5c92779db8e8ce51a89a849c135172c88ecbdc8c056b7"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a712c38e5fb4fd68e00dc3caf00b60cb65634d50e32281a9d6431b33b4af1"}, + {file = "greenlet-3.0.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5539f6da3418c3dc002739cb2bb8d169056aa66e0c83f6bacae0cd3ac26b423"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:343675e0da2f3c69d3fb1e894ba0a1acf58f481f3b9372ce1eb465ef93cf6fed"}, + {file = "greenlet-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:abe1ef3d780de56defd0c77c5ba95e152f4e4c4e12d7e11dd8447d338b85a625"}, + {file = "greenlet-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:e693e759e172fa1c2c90d35dea4acbdd1d609b6936115d3739148d5e4cd11947"}, + {file = "greenlet-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bdd696947cd695924aecb3870660b7545a19851f93b9d327ef8236bfc49be705"}, + {file = "greenlet-3.0.0-cp37-universal2-macosx_11_0_x86_64.whl", hash = "sha256:cc3e2679ea13b4de79bdc44b25a0c4fcd5e94e21b8f290791744ac42d34a0353"}, + {file = "greenlet-3.0.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:63acdc34c9cde42a6534518e32ce55c30f932b473c62c235a466469a710bfbf9"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a1a6244ff96343e9994e37e5b4839f09a0207d35ef6134dce5c20d260d0302c"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b822fab253ac0f330ee807e7485769e3ac85d5eef827ca224feaaefa462dc0d0"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8060b32d8586e912a7b7dac2d15b28dbbd63a174ab32f5bc6d107a1c4143f40b"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:621fcb346141ae08cb95424ebfc5b014361621b8132c48e538e34c3c93ac7365"}, + {file = "greenlet-3.0.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6bb36985f606a7c49916eff74ab99399cdfd09241c375d5a820bb855dfb4af9f"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10b5582744abd9858947d163843d323d0b67be9432db50f8bf83031032bc218d"}, + {file = "greenlet-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f351479a6914fd81a55c8e68963609f792d9b067fb8a60a042c585a621e0de4f"}, + {file = "greenlet-3.0.0-cp38-cp38-win32.whl", hash = "sha256:9de687479faec7db5b198cc365bc34addd256b0028956501f4d4d5e9ca2e240a"}, + {file = "greenlet-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:3fd2b18432e7298fcbec3d39e1a0aa91ae9ea1c93356ec089421fabc3651572b"}, + {file = "greenlet-3.0.0-cp38-universal2-macosx_11_0_x86_64.whl", hash = "sha256:3c0d36f5adc6e6100aedbc976d7428a9f7194ea79911aa4bf471f44ee13a9464"}, + {file = "greenlet-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4cd83fb8d8e17633ad534d9ac93719ef8937568d730ef07ac3a98cb520fd93e4"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a5b2d4cdaf1c71057ff823a19d850ed5c6c2d3686cb71f73ae4d6382aaa7a06"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e7dcdfad252f2ca83c685b0fa9fba00e4d8f243b73839229d56ee3d9d219314"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94e4e924d09b5a3e37b853fe5924a95eac058cb6f6fb437ebb588b7eda79870"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad6fb737e46b8bd63156b8f59ba6cdef46fe2b7db0c5804388a2d0519b8ddb99"}, + {file = "greenlet-3.0.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d55db1db455c59b46f794346efce896e754b8942817f46a1bada2d29446e305a"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56867a3b3cf26dc8a0beecdb4459c59f4c47cdd5424618c08515f682e1d46692"}, + {file = "greenlet-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a812224a5fb17a538207e8cf8e86f517df2080c8ee0f8c1ed2bdaccd18f38f4"}, + {file = "greenlet-3.0.0-cp39-cp39-win32.whl", hash = "sha256:0d3f83ffb18dc57243e0151331e3c383b05e5b6c5029ac29f754745c800f8ed9"}, + {file = "greenlet-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:831d6f35037cf18ca5e80a737a27d822d87cd922521d18ed3dbc8a6967be50ce"}, + {file = "greenlet-3.0.0-cp39-universal2-macosx_11_0_x86_64.whl", hash = "sha256:a048293392d4e058298710a54dfaefcefdf49d287cd33fb1f7d63d55426e4355"}, + {file = "greenlet-3.0.0.tar.gz", hash = "sha256:19834e3f91f485442adc1ee440171ec5d9a4840a1f7bd5ed97833544719ce10b"}, +] + +[package.extras] +docs = ["Sphinx"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.8.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.1.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.19.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.28.0" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pillow" +version = "10.1.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "protobuf" +version = "4.24.4" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, + {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, + {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, + {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, + {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, + {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, + {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, + {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, + {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, + {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, + {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, +] + +[[package]] +name = "pyarrow" +version = "13.0.0" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-13.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:1afcc2c33f31f6fb25c92d50a86b7a9f076d38acbcb6f9e74349636109550148"}, + {file = "pyarrow-13.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70fa38cdc66b2fc1349a082987f2b499d51d072faaa6b600f71931150de2e0e3"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd57b13a6466822498238877892a9b287b0a58c2e81e4bdb0b596dbb151cbb73"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ce69f7bf01de2e2764e14df45b8404fc6f1a5ed9871e8e08a12169f87b7a26"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:588f0d2da6cf1b1680974d63be09a6530fd1bd825dc87f76e162404779a157dc"}, + {file = "pyarrow-13.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6241afd72b628787b4abea39e238e3ff9f34165273fad306c7acf780dd850956"}, + {file = "pyarrow-13.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:fda7857e35993673fcda603c07d43889fca60a5b254052a462653f8656c64f44"}, + {file = "pyarrow-13.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:aac0ae0146a9bfa5e12d87dda89d9ef7c57a96210b899459fc2f785303dcbb67"}, + {file = "pyarrow-13.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7759994217c86c161c6a8060509cfdf782b952163569606bb373828afdd82e8"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868a073fd0ff6468ae7d869b5fc1f54de5c4255b37f44fb890385eb68b68f95d"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51be67e29f3cfcde263a113c28e96aa04362ed8229cb7c6e5f5c719003659d33"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:d1b4e7176443d12610874bb84d0060bf080f000ea9ed7c84b2801df851320295"}, + {file = "pyarrow-13.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:69b6f9a089d116a82c3ed819eea8fe67dae6105f0d81eaf0fdd5e60d0c6e0944"}, + {file = "pyarrow-13.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ab1268db81aeb241200e321e220e7cd769762f386f92f61b898352dd27e402ce"}, + {file = "pyarrow-13.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ee7490f0f3f16a6c38f8c680949551053c8194e68de5046e6c288e396dccee80"}, + {file = "pyarrow-13.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3ad79455c197a36eefbd90ad4aa832bece7f830a64396c15c61a0985e337287"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68fcd2dc1b7d9310b29a15949cdd0cb9bc34b6de767aff979ebf546020bf0ba0"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc6fd330fd574c51d10638e63c0d00ab456498fc804c9d01f2a61b9264f2c5b2"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e66442e084979a97bb66939e18f7b8709e4ac5f887e636aba29486ffbf373763"}, + {file = "pyarrow-13.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:0f6eff839a9e40e9c5610d3ff8c5bdd2f10303408312caf4c8003285d0b49565"}, + {file = "pyarrow-13.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b30a27f1cddf5c6efcb67e598d7823a1e253d743d92ac32ec1eb4b6a1417867"}, + {file = "pyarrow-13.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:09552dad5cf3de2dc0aba1c7c4b470754c69bd821f5faafc3d774bedc3b04bb7"}, + {file = "pyarrow-13.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3896ae6c205d73ad192d2fc1489cd0edfab9f12867c85b4c277af4d37383c18c"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6647444b21cb5e68b593b970b2a9a07748dd74ea457c7dadaa15fd469c48ada1"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47663efc9c395e31d09c6aacfa860f4473815ad6804311c5433f7085415d62a7"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b9ba6b6d34bd2563345488cf444510588ea42ad5613df3b3509f48eb80250afd"}, + {file = "pyarrow-13.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:d00d374a5625beeb448a7fa23060df79adb596074beb3ddc1838adb647b6ef09"}, + {file = "pyarrow-13.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c51afd87c35c8331b56f796eff954b9c7f8d4b7fef5903daf4e05fcf017d23a8"}, + {file = "pyarrow-13.0.0.tar.gz", hash = "sha256:83333726e83ed44b0ac94d8d7a21bbdee4a05029c3b1e8db58a863eec8fd8a33"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pydeck" +version = "0.8.0" +description = "Widget for deck.gl maps" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydeck-0.8.0-py2.py3-none-any.whl", hash = "sha256:a8fa7757c6f24bba033af39db3147cb020eef44012ba7e60d954de187f9ed4d5"}, + {file = "pydeck-0.8.0.tar.gz", hash = "sha256:07edde833f7cfcef6749124351195aa7dcd24663d4909fd7898dbd0b6fbc01ec"}, +] + +[package.dependencies] +jinja2 = ">=2.10.1" +numpy = ">=1.16.4" + +[package.extras] +carto = ["pydeck-carto"] +jupyter = ["ipykernel (>=5.1.2)", "ipython (>=5.8.0)", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.10.6" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, + {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, + {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, + {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, + {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, + {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, + {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, + {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, + {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, + {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, + {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, + {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, + {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.22" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, + {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, + {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "streamlit" +version = "1.27.2" +description = "A faster way to build and share data apps" +optional = false +python-versions = ">=3.8, !=3.9.7" +files = [ + {file = "streamlit-1.27.2-py2.py3-none-any.whl", hash = "sha256:726dd2bee638e0976aa72552900648d4ead4be28e30235355f10c25062669369"}, + {file = "streamlit-1.27.2.tar.gz", hash = "sha256:33f9ae0de5b7d59cd7daba87754c54ec837a76c24acfc41d1f8e5148f20903ee"}, +] + +[package.dependencies] +altair = ">=4.0,<6" +blinker = ">=1.0.0,<2" +cachetools = ">=4.0,<6" +click = ">=7.0,<9" +gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4" +importlib-metadata = ">=1.4,<7" +numpy = ">=1.19.3,<2" +packaging = ">=16.8,<24" +pandas = ">=1.3.0,<3" +pillow = ">=7.1.0,<11" +protobuf = ">=3.20,<5" +pyarrow = ">=6.0" +pydeck = ">=0.8.0b4,<1" +python-dateutil = ">=2.7.3,<3" +requests = ">=2.27,<3" +rich = ">=10.14.0,<14" +tenacity = ">=8.1.0,<9" +toml = ">=0.10.1,<2" +tornado = ">=6.0.3,<7" +typing-extensions = ">=4.3.0,<5" +tzlocal = ">=1.1,<6" +validators = ">=0.2,<1" +watchdog = {version = ">=2.1.5", markers = "platform_system != \"Darwin\""} + +[package.extras] +snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python (>=0.9.0)"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "toolz" +version = "0.12.0" +description = "List processing tools and functional utilities" +optional = false +python-versions = ">=3.5" +files = [ + {file = "toolz-0.12.0-py3-none-any.whl", hash = "sha256:2059bd4148deb1884bb0eb770a3cde70e7f954cfbbdc2285f1f2de01fd21eb6f"}, + {file = "toolz-0.12.0.tar.gz", hash = "sha256:88c570861c440ee3f2f6037c4654613228ff40c93a6c25e0eba70d17282c6194"}, +] + +[[package]] +name = "tornado" +version = "6.3.3" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "validators" +version = "0.22.0" +description = "Python Data Validation for Humans™" +optional = false +python-versions = ">=3.8" +files = [ + {file = "validators-0.22.0-py3-none-any.whl", hash = "sha256:61cf7d4a62bbae559f2e54aed3b000cea9ff3e2fdbe463f51179b92c58c9585a"}, + {file = "validators-0.22.0.tar.gz", hash = "sha256:77b2689b172eeeb600d9605ab86194641670cdb73b60afd577142a9397873370"}, +] + +[package.extras] +docs-offline = ["myst-parser (>=2.0.0)", "pypandoc-binary (>=1.11)", "sphinx (>=7.1.1)"] +docs-online = ["mkdocs (>=1.5.2)", "mkdocs-git-revision-date-localized-plugin (>=1.2.0)", "mkdocs-material (>=9.2.6)", "mkdocstrings[python] (>=0.22.0)", "pyaml (>=23.7.0)"] +hooks = ["pre-commit (>=3.3.3)"] +package = ["build (>=1.0.0)", "twine (>=4.0.2)"] +runner = ["tox (>=4.11.1)"] +sast = ["bandit[toml] (>=1.7.5)"] +testing = ["pytest (>=7.4.0)"] +tooling = ["black (>=23.7.0)", "pyright (>=1.1.325)", "ruff (>=0.0.287)"] +tooling-extras = ["pyaml (>=23.7.0)", "pypandoc-binary (>=1.11)", "pytest (>=7.4.0)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<3.9.7 || >3.9.7,<4.0" +content-hash = "baa3a911fd0ea6350a85fa259540dbefa142d7fce694fde97ddb57fb2e75183a" diff --git a/samples/ServalApp/pyproject.toml b/samples/ServalApp/pyproject.toml index 536c96ce..c936d1c3 100644 --- a/samples/ServalApp/pyproject.toml +++ b/samples/ServalApp/pyproject.toml @@ -6,12 +6,14 @@ authors = ["Eli Lowry "] readme = "README.md" [tool.poetry.dependencies] -python = ">=3.8,<3.9.7 || >3.9.7,<4.0" -email = "^4.0.2" +python = ">=3.8.1,<3.9.7 || >3.9.7,<4.0" streamlit = "^1.27.2" requests = "^2.31.0" SQLAlchemy = "^2.0.22" +[tool.poetry.group.dev.dependencies] +black = "^23.10.1" +flake8 = "^6.1.0" [build-system] requires = ["poetry-core"] diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 6abe56b8..9935aacf 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -1,22 +1,31 @@ import json import os import re -import traceback from threading import Thread from time import sleep import streamlit as st from db import Build, State, create_db_if_not_exists from serval_auth_module import ServalBearerAuth -from serval_client_module import (PretranslateCorpusConfig, RemoteCaller, TranslationBuildConfig, - TranslationCorpusConfig, TranslationCorpusFileConfig, TranslationEngineConfig) +from serval_client_module import ( + PretranslateCorpusConfig, + RemoteCaller, + TranslationBuildConfig, + TranslationCorpusConfig, + TranslationCorpusFileConfig, + TranslationEngineConfig, +) from serval_email_module import ServalAppEmailServer from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker +from streamlit.logger import get_logger, set_log_level from streamlit.runtime.scriptrunner import add_script_run_ctx create_db_if_not_exists() +set_log_level("INFO") +logger = get_logger(__name__) + def send_emails(): engine = create_engine("sqlite:///builds.db") @@ -25,8 +34,8 @@ def send_emails(): try: def started(build: Build, email_server: ServalAppEmailServer, data=None): - print(f"\tStarted {build}") - email_server.send_build_started_email(build.email) + logger.info(f"Started:\n{build}") + email_server.send_build_started_email(build.email, str(build)) session.delete(build) session.add( Build( @@ -39,12 +48,12 @@ def started(build: Build, email_server: ServalAppEmailServer, data=None): ) def faulted(build: Build, email_server: ServalAppEmailServer, data=None): - print(f"\tFaulted {build}") - email_server.send_build_faulted_email(build.email, error=data) + logger.warn(f"Faulted:\n{build}") + email_server.send_build_faulted_email(build.email, str(build), error=data) session.delete(build) def completed(build: Build, email_server: ServalAppEmailServer, data=None): - print(f"\tCompleted {build}") + logger.info(f"Completed:\{build}") pretranslations = client.translation_engines_get_all_pretranslations( build.engine_id, build.corpus_id ) @@ -56,11 +65,12 @@ def completed(build: Build, email_server: ServalAppEmailServer, data=None): for pretranslation in pretranslations ] ), + str(build), ) session.delete(build) - def update(build: Build, email_server: ServalAppEmailServer, data=None): - print(f"\tUpdated {build}") + def default_update(build: Build, email_server: ServalAppEmailServer, data=None): + logger.info(f"Updated:\n{build}") serval_auth = ServalBearerAuth() client = RemoteCaller( @@ -79,21 +89,22 @@ def get_update(build: Build, email_server: ServalAppEmailServer): if build.state == State.Pending and build_update.state == "Active": started(build, email_server) else: - responses.get(build_update.state, update)( + responses.get(build_update.state, default_update)( build, email_server, build_update.message ) session.commit() def send_updates(email_server: ServalAppEmailServer): - print("Checking for updates...") + logger.info("Checking for updates...") with session.no_autoflush: builds = session.query(Build).all() for build in builds: try: get_update(build, email_server) except Exception as e: - print(f"\tFailed to update {build} because of exception {e}") - traceback.print_exc() + logger.error( + f"Failed to update {build} because of exception {e}" + ) raise e with ServalAppEmailServer( @@ -101,9 +112,9 @@ def send_updates(email_server: ServalAppEmailServer): ) as email_server: while True: send_updates(email_server) - sleep(os.environ.get("SERVAL_APP_UPDATE_FREQ_SEC", 300)) + sleep(int(os.environ.get("SERVAL_APP_UPDATE_FREQ_SEC", 300))) except Exception as e: - print(e) + logger.exception(e) st.session_state["background_process_has_started"] = False @@ -153,7 +164,9 @@ def submit(): source_language=st.session_state["source_language"], target_language=st.session_state["target_language"], type="Nmt", - name=f'serval_app_engine:{st.session_state["email"]}', + name=st.session_state["build_name"] + if "build_name" in st.session_state + else f'serval_app_engine:{st.session_state["email"]}', ) ) ) @@ -162,7 +175,8 @@ def submit(): client.data_files_create( st.session_state["source_files"][i], format="Paratext" - if st.session_state["source_files"][i].name[-4:] == ".zip" + if st.session_state["source_files"][i].name[-4:] + in [".zip", ".tar", "r.gz"] else "Text", ) ) @@ -223,6 +237,9 @@ def submit(): options='{"max_steps":' + str(os.environ.get("SERVAL_APP_MAX_STEPS", 10)) + "}", + name=st.session_state["build_name"] + if "build_name" in st.session_state + else f'serval_app_engine:{st.session_state["email"]}', ), ) ) @@ -233,17 +250,35 @@ def submit(): email=st.session_state["email"], state=build["state"], corpus_id=corpus["id"], + client_id=st.session_state["client_id"], + source_files=", ".join( + list(map(lambda f: f.name, st.session_state["source_files"])) + ), + target_files=", ".join( + list(map(lambda f: f.name, st.session_state["target_files"])) + ), + name=st.session_state["build_name"], ) ) session.commit() - def already_active_build_for(email: str): - return len(session.query(Build).where(Build.email == email).all()) > 0 + def already_active_build_for(email: str, client: str): + return ( + len( + session.query(Build) + .where(Build.email == email and Build.client_id == client) + .all() + ) + > 0 + ) st.subheader("Neural Machine Translation") tried_to_submit = st.session_state.get("tried_to_submit", False) with st.form(key="NmtTranslationForm"): + st.session_state["build_name"] = st.text_input( + label="Build Name", placeholder="MyBuild (Optional)" + ) st.session_state["source_language"] = st.text_input( label="Source language tag*", placeholder="en" ) @@ -294,11 +329,13 @@ def already_active_build_for(email: str): ) ) if st.form_submit_button("Generate translations"): - if already_active_build_for(st.session_state["email"]): + if already_active_build_for( + st.session_state["email"], st.session_state["client_id"] + ): st.session_state["tried_to_submit"] = True st.session_state[ "error" - ] = "There is already an a pending or active build associated with this email address. \ + ] = "There is already an a pending or active build associated with this email address and client id. \ Please wait for the previous build to finish." st.rerun() elif ( diff --git a/samples/ServalApp/serval_email_module.py b/samples/ServalApp/serval_email_module.py index 2ad8c38a..daa58e17 100644 --- a/samples/ServalApp/serval_email_module.py +++ b/samples/ServalApp/serval_email_module.py @@ -31,11 +31,11 @@ def __exit__(self, *args): self.server.close() def send_build_completed_email( - self, recipient_address: str, pretranslations_file_data: str + self, recipient_address: str, pretranslations_file_data: str, build_info: str ): msg = EmailMessage() msg.set_content( - """Hi! + f"""Hi! Your NMT engine has completed building. Attached are the \ translations of untranslated source text in the files you included. @@ -43,6 +43,8 @@ def send_build_completed_email( If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. Thank you! + +{build_info} """ ) msg["From"] = self.sender_address @@ -51,7 +53,9 @@ def send_build_completed_email( msg.add_attachment(pretranslations_file_data, filename="translations.txt") self.server.send_message(msg) - def send_build_faulted_email(self, recipient_address: str, error=""): + def send_build_faulted_email( + self, recipient_address: str, build_info: str, error="" + ): msg = EmailMessage() msg.set_content( f"""Hi! @@ -62,6 +66,8 @@ def send_build_faulted_email(self, recipient_address: str, error=""): If you continue to experience difficulties using this application, please contact eli_lowry@sil.org. Thank you! + +{build_info} """ ) msg["From"] = self.sender_address @@ -69,16 +75,17 @@ def send_build_faulted_email(self, recipient_address: str, error=""): msg["Subject"] = "Your NMT build job has failed" self.server.send_message(msg) - def send_build_started_email(self, recipient_address: str): + def send_build_started_email(self, recipient_address: str, build_info: str): msg = EmailMessage() msg.set_content( - """Hi! + f"""Hi! Your NMT engine has started building. We will contact you when it is complete. If you are experiencing difficulties using this application, please contact eli_lowry@sil.org. Thank you! +{build_info} """ ) msg["From"] = self.sender_address From eed6bc5af0d0ffb90b41798c814ffd6951b3ab79 Mon Sep 17 00:00:00 2001 From: Enkidu93 Date: Tue, 24 Oct 2023 15:29:41 -0400 Subject: [PATCH 13/13] Add link to swagger concerning language codes --- samples/ServalApp/serval_app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/ServalApp/serval_app.py b/samples/ServalApp/serval_app.py index 9935aacf..22ab9734 100644 --- a/samples/ServalApp/serval_app.py +++ b/samples/ServalApp/serval_app.py @@ -359,7 +359,7 @@ def already_active_build_for(email: str, client: str): ] = "Some required fields were left blank. Please fill in all fields above" st.rerun() st.markdown( - "\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) \ - for more information on IETF tags.", + f"\* Use IETF tags if possible. See [here](https://en.wikipedia.org/wiki/IETF_language_tag) \ + for more information on IETF tags. For more details, see [the Serval API documentation]({os.environ.get('SERVAL_HOST_URL')}/swagger/index.html#/Translation%20Engines/TranslationEngines_Create).", unsafe_allow_html=True, )