diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..b386cc11 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +ops@nixtla.io. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/nixtla/client.py b/nixtla/client.py new file mode 100644 index 00000000..77a8d77b --- /dev/null +++ b/nixtla/client.py @@ -0,0 +1,889 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +import urllib.parse +from json.decoder import JSONDecodeError + +import httpx +import pydantic + +from .core.api_error import ApiError +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .core.jsonable_encoder import jsonable_encoder +from .errors.unprocessable_entity_error import UnprocessableEntityError +from .types.http_validation_error import HttpValidationError +from .types.multi_series_anomaly import MultiSeriesAnomaly +from .types.multi_series_cross_validation import MultiSeriesCrossValidation +from .types.multi_series_forecast import MultiSeriesForecast +from .types.multi_series_insample_forecast import MultiSeriesInsampleForecast +from .types.single_series_forecast import SingleSeriesForecast +from .types.single_series_insample_forecast import SingleSeriesInsampleForecast + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class Nixtla: + def __init__( + self, *, base_url: str, token: typing.Union[str, typing.Callable[[], str]], timeout: typing.Optional[float] = 60 + ): + self._client_wrapper = SyncClientWrapper( + base_url=base_url, token=token, httpx_client=httpx.Client(timeout=timeout) + ) + + def validate_token(self) -> typing.Any: + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def validate_token_front(self) -> typing.Any: + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token_front"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def health_health_get(self) -> typing.Any: + """ + Check if server is healthy. Used by the readiness probe to check server is healthy. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "health"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def forecast(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io + + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def historic_forecast(self, *, request: SingleSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: SingleSeriesInsampleForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def forecast_multi_series(self, *, request: MultiSeriesForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def historic_forecast_multi_series(self, *, request: MultiSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesInsampleForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def anomaly_detection_multi_series(self, *, request: MultiSeriesAnomaly) -> typing.Any: + """ + Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesAnomaly. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "anomaly_detection_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def cross_validation_multi_series(self, *, request: MultiSeriesCrossValidation) -> typing.Any: + """ + Perform Cross Validation for multiple series + + Parameters: + - request: MultiSeriesCrossValidation. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "cross_validation_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def model_input_size(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_input_size"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def model_params(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_params"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io + + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_historic(self, *, request: SingleSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: SingleSeriesInsampleForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_historic"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_multi_series(self, *, request: MultiSeriesForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_multi_series_historic(self, *, request: MultiSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesInsampleForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_historic"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_multi_series_anomalies(self, *, request: MultiSeriesAnomaly) -> typing.Any: + """ + Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesAnomaly. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_anomalies"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_multi_series_cross_validation(self, *, request: MultiSeriesCrossValidation) -> typing.Any: + """ + Perform Cross Validation for multiple series + + Parameters: + - request: MultiSeriesCrossValidation. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_cross_validation"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_input_size(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_input_size"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def timegpt_model_params(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_model_params"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncNixtla: + def __init__( + self, *, base_url: str, token: typing.Union[str, typing.Callable[[], str]], timeout: typing.Optional[float] = 60 + ): + self._client_wrapper = AsyncClientWrapper( + base_url=base_url, token=token, httpx_client=httpx.AsyncClient(timeout=timeout) + ) + + async def validate_token(self) -> typing.Any: + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def validate_token_front(self) -> typing.Any: + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token_front"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def health_health_get(self) -> typing.Any: + """ + Check if server is healthy. Used by the readiness probe to check server is healthy. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "health"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def forecast(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io + + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def historic_forecast(self, *, request: SingleSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: SingleSeriesInsampleForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def forecast_multi_series(self, *, request: MultiSeriesForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def historic_forecast_multi_series(self, *, request: MultiSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesInsampleForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def anomaly_detection_multi_series(self, *, request: MultiSeriesAnomaly) -> typing.Any: + """ + Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesAnomaly. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "anomaly_detection_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def cross_validation_multi_series(self, *, request: MultiSeriesCrossValidation) -> typing.Any: + """ + Perform Cross Validation for multiple series + + Parameters: + - request: MultiSeriesCrossValidation. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "cross_validation_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def model_input_size(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_input_size"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def model_params(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_params"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io + + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_historic(self, *, request: SingleSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: SingleSeriesInsampleForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_historic"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_multi_series(self, *, request: MultiSeriesForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_multi_series_historic(self, *, request: MultiSeriesInsampleForecast) -> typing.Any: + """ + Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesInsampleForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_historic"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_multi_series_anomalies(self, *, request: MultiSeriesAnomaly) -> typing.Any: + """ + Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. + + Parameters: + - request: MultiSeriesAnomaly. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_anomalies"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_multi_series_cross_validation(self, *, request: MultiSeriesCrossValidation) -> typing.Any: + """ + Perform Cross Validation for multiple series + + Parameters: + - request: MultiSeriesCrossValidation. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_cross_validation"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_input_size(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_input_size"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def timegpt_model_params(self, *, request: SingleSeriesForecast) -> typing.Any: + """ + Parameters: + - request: SingleSeriesForecast. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_model_params"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/nixtlats/core/__init__.py b/nixtla/core/__init__.py similarity index 60% rename from src/nixtlats/core/__init__.py rename to nixtla/core/__init__.py index 78a7f80f..24149550 100644 --- a/src/nixtlats/core/__init__.py +++ b/nixtla/core/__init__.py @@ -3,25 +3,15 @@ from .api_error import ApiError from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper from .datetime_utils import serialize_datetime -from .file import File, convert_file_dict_to_httpx_tuples -from .http_client import AsyncHttpClient, HttpClient from .jsonable_encoder import jsonable_encoder -from .pydantic_utilities import pydantic_v1 from .remove_none_from_dict import remove_none_from_dict -from .request_options import RequestOptions __all__ = [ "ApiError", "AsyncClientWrapper", - "AsyncHttpClient", "BaseClientWrapper", - "File", - "HttpClient", - "RequestOptions", "SyncClientWrapper", - "convert_file_dict_to_httpx_tuples", "jsonable_encoder", - "pydantic_v1", "remove_none_from_dict", "serialize_datetime", ] diff --git a/src/nixtlats/core/api_error.py b/nixtla/core/api_error.py similarity index 100% rename from src/nixtlats/core/api_error.py rename to nixtla/core/api_error.py diff --git a/nixtla/core/client_wrapper.py b/nixtla/core/client_wrapper.py new file mode 100644 index 00000000..eb2a42b8 --- /dev/null +++ b/nixtla/core/client_wrapper.py @@ -0,0 +1,45 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import httpx + + +class BaseClientWrapper: + def __init__(self, *, token: typing.Union[str, typing.Callable[[], str]], base_url: str): + self._token = token + self._base_url = base_url + + def get_headers(self) -> typing.Dict[str, str]: + headers: typing.Dict[str, str] = { + "X-Fern-Language": "Python", + "X-Fern-SDK-Name": "nixtla", + "X-Fern-SDK-Version": "0.0.0", + } + headers["Authorization"] = f"Bearer {self._get_token()}" + return headers + + def _get_token(self) -> str: + if isinstance(self._token, str): + return self._token + else: + return self._token() + + def get_base_url(self) -> str: + return self._base_url + + +class SyncClientWrapper(BaseClientWrapper): + def __init__( + self, *, token: typing.Union[str, typing.Callable[[], str]], base_url: str, httpx_client: httpx.Client + ): + super().__init__(token=token, base_url=base_url) + self.httpx_client = httpx_client + + +class AsyncClientWrapper(BaseClientWrapper): + def __init__( + self, *, token: typing.Union[str, typing.Callable[[], str]], base_url: str, httpx_client: httpx.AsyncClient + ): + super().__init__(token=token, base_url=base_url) + self.httpx_client = httpx_client diff --git a/src/nixtlats/core/datetime_utils.py b/nixtla/core/datetime_utils.py similarity index 100% rename from src/nixtlats/core/datetime_utils.py rename to nixtla/core/datetime_utils.py diff --git a/src/nixtlats/core/jsonable_encoder.py b/nixtla/core/jsonable_encoder.py similarity index 91% rename from src/nixtlats/core/jsonable_encoder.py rename to nixtla/core/jsonable_encoder.py index 7f482732..5c3cfac1 100644 --- a/src/nixtlats/core/jsonable_encoder.py +++ b/nixtla/core/jsonable_encoder.py @@ -16,8 +16,10 @@ from types import GeneratorType from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union +from pydantic import BaseModel +from pydantic.json import ENCODERS_BY_TYPE + from .datetime_utils import serialize_datetime -from .pydantic_utilities import pydantic_v1 SetIntStr = Set[Union[int, str]] DictIntStrAny = Dict[Union[int, str], Any] @@ -32,7 +34,7 @@ def generate_encoders_by_class_tuples( return encoders_by_class_tuples -encoders_by_class_tuples = generate_encoders_by_class_tuples(pydantic_v1.json.ENCODERS_BY_TYPE) +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: @@ -44,7 +46,7 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] for encoder_type, encoder_instance in custom_encoder.items(): if isinstance(obj, encoder_type): return encoder_instance(obj) - if isinstance(obj, pydantic_v1.BaseModel): + if isinstance(obj, BaseModel): encoder = getattr(obj.__config__, "json_encoders", {}) if custom_encoder: encoder.update(custom_encoder) @@ -61,10 +63,10 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] return str(obj) if isinstance(obj, (str, int, float, type(None))): return obj - if isinstance(obj, dt.datetime): - return serialize_datetime(obj) if isinstance(obj, dt.date): return str(obj) + if isinstance(obj, dt.datetime): + return serialize_datetime(obj) if isinstance(obj, dict): encoded_dict = {} allowed_keys = set(obj.keys()) @@ -80,8 +82,8 @@ def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any] encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) return encoded_list - if type(obj) in pydantic_v1.json.ENCODERS_BY_TYPE: - return pydantic_v1.json.ENCODERS_BY_TYPE[type(obj)](obj) + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) for encoder, classes_tuple in encoders_by_class_tuples.items(): if isinstance(obj, classes_tuple): return encoder(obj) diff --git a/src/nixtlats/core/remove_none_from_dict.py b/nixtla/core/remove_none_from_dict.py similarity index 100% rename from src/nixtlats/core/remove_none_from_dict.py rename to nixtla/core/remove_none_from_dict.py diff --git a/src/nixtlats/errors/__init__.py b/nixtla/errors/__init__.py similarity index 100% rename from src/nixtlats/errors/__init__.py rename to nixtla/errors/__init__.py diff --git a/src/nixtlats/errors/unprocessable_entity_error.py b/nixtla/errors/unprocessable_entity_error.py similarity index 100% rename from src/nixtlats/errors/unprocessable_entity_error.py rename to nixtla/errors/unprocessable_entity_error.py diff --git a/src/nixtlats/py.typed b/nixtla/py.typed similarity index 100% rename from src/nixtlats/py.typed rename to nixtla/py.typed diff --git a/src/nixtlats/types/__init__.py b/nixtla/types/__init__.py similarity index 72% rename from src/nixtlats/types/__init__.py rename to nixtla/types/__init__.py index 65d7387a..9a688610 100644 --- a/src/nixtlats/types/__init__.py +++ b/nixtla/types/__init__.py @@ -1,39 +1,49 @@ # This file was auto-generated by Fern from our API Definition. from .http_validation_error import HttpValidationError -from .model import Model from .multi_series_anomaly import MultiSeriesAnomaly +from .multi_series_anomaly_model import MultiSeriesAnomalyModel from .multi_series_cross_validation import MultiSeriesCrossValidation from .multi_series_cross_validation_fewshot_loss import MultiSeriesCrossValidationFewshotLoss from .multi_series_cross_validation_finetune_loss import MultiSeriesCrossValidationFinetuneLoss +from .multi_series_cross_validation_model import MultiSeriesCrossValidationModel from .multi_series_forecast import MultiSeriesForecast from .multi_series_forecast_fewshot_loss import MultiSeriesForecastFewshotLoss from .multi_series_forecast_finetune_loss import MultiSeriesForecastFinetuneLoss +from .multi_series_forecast_model import MultiSeriesForecastModel from .multi_series_input import MultiSeriesInput from .multi_series_insample_forecast import MultiSeriesInsampleForecast +from .multi_series_insample_forecast_model import MultiSeriesInsampleForecastModel from .single_series_forecast import SingleSeriesForecast from .single_series_forecast_fewshot_loss import SingleSeriesForecastFewshotLoss from .single_series_forecast_finetune_loss import SingleSeriesForecastFinetuneLoss +from .single_series_forecast_model import SingleSeriesForecastModel from .single_series_insample_forecast import SingleSeriesInsampleForecast +from .single_series_insample_forecast_model import SingleSeriesInsampleForecastModel from .validation_error import ValidationError from .validation_error_loc_item import ValidationErrorLocItem __all__ = [ "HttpValidationError", - "Model", "MultiSeriesAnomaly", + "MultiSeriesAnomalyModel", "MultiSeriesCrossValidation", "MultiSeriesCrossValidationFewshotLoss", "MultiSeriesCrossValidationFinetuneLoss", + "MultiSeriesCrossValidationModel", "MultiSeriesForecast", "MultiSeriesForecastFewshotLoss", "MultiSeriesForecastFinetuneLoss", + "MultiSeriesForecastModel", "MultiSeriesInput", "MultiSeriesInsampleForecast", + "MultiSeriesInsampleForecastModel", "SingleSeriesForecast", "SingleSeriesForecastFewshotLoss", "SingleSeriesForecastFinetuneLoss", + "SingleSeriesForecastModel", "SingleSeriesInsampleForecast", + "SingleSeriesInsampleForecastModel", "ValidationError", "ValidationErrorLocItem", ] diff --git a/src/nixtlats/types/http_validation_error.py b/nixtla/types/http_validation_error.py similarity index 82% rename from src/nixtlats/types/http_validation_error.py rename to nixtla/types/http_validation_error.py index 077c32af..426b4a9e 100644 --- a/src/nixtlats/types/http_validation_error.py +++ b/nixtla/types/http_validation_error.py @@ -3,12 +3,13 @@ import datetime as dt import typing +import pydantic + from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 from .validation_error import ValidationError -class HttpValidationError(pydantic_v1.BaseModel): +class HttpValidationError(pydantic.BaseModel): detail: typing.Optional[typing.List[ValidationError]] def json(self, **kwargs: typing.Any) -> str: @@ -21,6 +22,4 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/nixtla/types/multi_series_anomaly.py b/nixtla/types/multi_series_anomaly.py new file mode 100644 index 00000000..ce548bc9 --- /dev/null +++ b/nixtla/types/multi_series_anomaly.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .multi_series_anomaly_model import MultiSeriesAnomalyModel +from .multi_series_input import MultiSeriesInput + + +class MultiSeriesAnomaly(pydantic.BaseModel): + model: typing.Optional[MultiSeriesAnomalyModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="Specifies the confidence level for the prediction interval used in anomaly detection. It is represented as a percentage between 0 and 100. For instance, a level of 95 indicates that the generated prediction interval captures the true future observation 95% of the time. Any observed values outside of this interval would be considered anomalies. A higher level leads to wider prediction intervals and potentially fewer detected anomalies, whereas a lower level results in narrower intervals and potentially more detected anomalies. Default: 99." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[MultiSeriesInput] = pydantic.Field( + description='The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values.' + ) + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/model.py b/nixtla/types/multi_series_anomaly_model.py similarity index 75% rename from src/nixtlats/types/model.py rename to nixtla/types/multi_series_anomaly_model.py index 12e061d7..96db3cb9 100644 --- a/src/nixtlats/types/model.py +++ b/nixtla/types/multi_series_anomaly_model.py @@ -6,7 +6,7 @@ T_Result = typing.TypeVar("T_Result") -class Model(str, enum.Enum): +class MultiSeriesAnomalyModel(str, enum.Enum): """ Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. """ @@ -25,13 +25,13 @@ def visit( long_horizon: typing.Callable[[], T_Result], azureai: typing.Callable[[], T_Result], ) -> T_Result: - if self is Model.TIMEGPT_1: + if self is MultiSeriesAnomalyModel.TIMEGPT_1: return timegpt_1() - if self is Model.TIMEGPT_1_LONG_HORIZON: + if self is MultiSeriesAnomalyModel.TIMEGPT_1_LONG_HORIZON: return timegpt_1_long_horizon() - if self is Model.SHORT_HORIZON: + if self is MultiSeriesAnomalyModel.SHORT_HORIZON: return short_horizon() - if self is Model.LONG_HORIZON: + if self is MultiSeriesAnomalyModel.LONG_HORIZON: return long_horizon() - if self is Model.AZUREAI: + if self is MultiSeriesAnomalyModel.AZUREAI: return azureai() diff --git a/nixtla/types/multi_series_cross_validation.py b/nixtla/types/multi_series_cross_validation.py new file mode 100644 index 00000000..4b1c1ff1 --- /dev/null +++ b/nixtla/types/multi_series_cross_validation.py @@ -0,0 +1,60 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .multi_series_cross_validation_fewshot_loss import MultiSeriesCrossValidationFewshotLoss +from .multi_series_cross_validation_finetune_loss import MultiSeriesCrossValidationFinetuneLoss +from .multi_series_cross_validation_model import MultiSeriesCrossValidationModel +from .multi_series_input import MultiSeriesInput + + +class MultiSeriesCrossValidation(pydantic.BaseModel): + fewshot_steps: typing.Optional[int] = pydantic.Field(description="Deprecated. Please use finetune_steps instead.") + fewshot_loss: typing.Optional[MultiSeriesCrossValidationFewshotLoss] = pydantic.Field( + description="Deprecated. Please use finetune_loss instead." + ) + model: typing.Optional[MultiSeriesCrossValidationModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals." + ) + fh: typing.Optional[int] = pydantic.Field( + description="The forecasting horizon. This represents the number of time steps into the future that the forecast should predict." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[MultiSeriesInput] = pydantic.Field( + description='The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values.' + ) + n_windows: typing.Optional[int] = pydantic.Field(description="Number of windows to evaluate.") + step_size: typing.Optional[int] = pydantic.Field( + description="Step size between each cross validation window. If None it will be equal to the forecasting horizon." + ) + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + finetune_steps: typing.Optional[int] = pydantic.Field( + description="The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning." + ) + finetune_loss: typing.Optional[MultiSeriesCrossValidationFinetuneLoss] = pydantic.Field( + description="The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/multi_series_cross_validation_fewshot_loss.py b/nixtla/types/multi_series_cross_validation_fewshot_loss.py similarity index 100% rename from src/nixtlats/types/multi_series_cross_validation_fewshot_loss.py rename to nixtla/types/multi_series_cross_validation_fewshot_loss.py diff --git a/src/nixtlats/types/multi_series_cross_validation_finetune_loss.py b/nixtla/types/multi_series_cross_validation_finetune_loss.py similarity index 100% rename from src/nixtlats/types/multi_series_cross_validation_finetune_loss.py rename to nixtla/types/multi_series_cross_validation_finetune_loss.py diff --git a/nixtla/types/multi_series_cross_validation_model.py b/nixtla/types/multi_series_cross_validation_model.py new file mode 100644 index 00000000..046c6007 --- /dev/null +++ b/nixtla/types/multi_series_cross_validation_model.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class MultiSeriesCrossValidationModel(str, enum.Enum): + """ + Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. + """ + + TIMEGPT_1 = "timegpt-1" + TIMEGPT_1_LONG_HORIZON = "timegpt-1-long-horizon" + SHORT_HORIZON = "short-horizon" + LONG_HORIZON = "long-horizon" + AZUREAI = "azureai" + + def visit( + self, + timegpt_1: typing.Callable[[], T_Result], + timegpt_1_long_horizon: typing.Callable[[], T_Result], + short_horizon: typing.Callable[[], T_Result], + long_horizon: typing.Callable[[], T_Result], + azureai: typing.Callable[[], T_Result], + ) -> T_Result: + if self is MultiSeriesCrossValidationModel.TIMEGPT_1: + return timegpt_1() + if self is MultiSeriesCrossValidationModel.TIMEGPT_1_LONG_HORIZON: + return timegpt_1_long_horizon() + if self is MultiSeriesCrossValidationModel.SHORT_HORIZON: + return short_horizon() + if self is MultiSeriesCrossValidationModel.LONG_HORIZON: + return long_horizon() + if self is MultiSeriesCrossValidationModel.AZUREAI: + return azureai() diff --git a/nixtla/types/multi_series_forecast.py b/nixtla/types/multi_series_forecast.py new file mode 100644 index 00000000..e45588f0 --- /dev/null +++ b/nixtla/types/multi_series_forecast.py @@ -0,0 +1,56 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .multi_series_forecast_fewshot_loss import MultiSeriesForecastFewshotLoss +from .multi_series_forecast_finetune_loss import MultiSeriesForecastFinetuneLoss +from .multi_series_forecast_model import MultiSeriesForecastModel +from .multi_series_input import MultiSeriesInput + + +class MultiSeriesForecast(pydantic.BaseModel): + fewshot_steps: typing.Optional[int] = pydantic.Field(description="Deprecated. Please use finetune_steps instead.") + fewshot_loss: typing.Optional[MultiSeriesForecastFewshotLoss] = pydantic.Field( + description="Deprecated. Please use finetune_loss instead." + ) + model: typing.Optional[MultiSeriesForecastModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals." + ) + fh: typing.Optional[int] = pydantic.Field( + description="The forecasting horizon. This represents the number of time steps into the future that the forecast should predict." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[MultiSeriesInput] = pydantic.Field( + description='The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values.' + ) + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + finetune_steps: typing.Optional[int] = pydantic.Field( + description="The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning." + ) + finetune_loss: typing.Optional[MultiSeriesForecastFinetuneLoss] = pydantic.Field( + description="The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/multi_series_forecast_fewshot_loss.py b/nixtla/types/multi_series_forecast_fewshot_loss.py similarity index 100% rename from src/nixtlats/types/multi_series_forecast_fewshot_loss.py rename to nixtla/types/multi_series_forecast_fewshot_loss.py diff --git a/src/nixtlats/types/multi_series_forecast_finetune_loss.py b/nixtla/types/multi_series_forecast_finetune_loss.py similarity index 100% rename from src/nixtlats/types/multi_series_forecast_finetune_loss.py rename to nixtla/types/multi_series_forecast_finetune_loss.py diff --git a/nixtla/types/multi_series_forecast_model.py b/nixtla/types/multi_series_forecast_model.py new file mode 100644 index 00000000..d4a4be94 --- /dev/null +++ b/nixtla/types/multi_series_forecast_model.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class MultiSeriesForecastModel(str, enum.Enum): + """ + Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. + """ + + TIMEGPT_1 = "timegpt-1" + TIMEGPT_1_LONG_HORIZON = "timegpt-1-long-horizon" + SHORT_HORIZON = "short-horizon" + LONG_HORIZON = "long-horizon" + AZUREAI = "azureai" + + def visit( + self, + timegpt_1: typing.Callable[[], T_Result], + timegpt_1_long_horizon: typing.Callable[[], T_Result], + short_horizon: typing.Callable[[], T_Result], + long_horizon: typing.Callable[[], T_Result], + azureai: typing.Callable[[], T_Result], + ) -> T_Result: + if self is MultiSeriesForecastModel.TIMEGPT_1: + return timegpt_1() + if self is MultiSeriesForecastModel.TIMEGPT_1_LONG_HORIZON: + return timegpt_1_long_horizon() + if self is MultiSeriesForecastModel.SHORT_HORIZON: + return short_horizon() + if self is MultiSeriesForecastModel.LONG_HORIZON: + return long_horizon() + if self is MultiSeriesForecastModel.AZUREAI: + return azureai() diff --git a/src/nixtlats/types/multi_series_input.py b/nixtla/types/multi_series_input.py similarity index 81% rename from src/nixtlats/types/multi_series_input.py rename to nixtla/types/multi_series_input.py index ec19b69f..fc150fac 100644 --- a/src/nixtlats/types/multi_series_input.py +++ b/nixtla/types/multi_series_input.py @@ -3,11 +3,12 @@ import datetime as dt import typing +import pydantic + from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -class MultiSeriesInput(pydantic_v1.BaseModel): +class MultiSeriesInput(pydantic.BaseModel): columns: typing.List[str] data: typing.List[typing.Any] @@ -21,6 +22,4 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/nixtla/types/multi_series_insample_forecast.py b/nixtla/types/multi_series_insample_forecast.py new file mode 100644 index 00000000..d5961b47 --- /dev/null +++ b/nixtla/types/multi_series_insample_forecast.py @@ -0,0 +1,41 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .multi_series_input import MultiSeriesInput +from .multi_series_insample_forecast_model import MultiSeriesInsampleForecastModel + + +class MultiSeriesInsampleForecast(pydantic.BaseModel): + model: typing.Optional[MultiSeriesInsampleForecastModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[MultiSeriesInput] = pydantic.Field( + description='The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values.' + ) + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/nixtla/types/multi_series_insample_forecast_model.py b/nixtla/types/multi_series_insample_forecast_model.py new file mode 100644 index 00000000..b229600e --- /dev/null +++ b/nixtla/types/multi_series_insample_forecast_model.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class MultiSeriesInsampleForecastModel(str, enum.Enum): + """ + Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. + """ + + TIMEGPT_1 = "timegpt-1" + TIMEGPT_1_LONG_HORIZON = "timegpt-1-long-horizon" + SHORT_HORIZON = "short-horizon" + LONG_HORIZON = "long-horizon" + AZUREAI = "azureai" + + def visit( + self, + timegpt_1: typing.Callable[[], T_Result], + timegpt_1_long_horizon: typing.Callable[[], T_Result], + short_horizon: typing.Callable[[], T_Result], + long_horizon: typing.Callable[[], T_Result], + azureai: typing.Callable[[], T_Result], + ) -> T_Result: + if self is MultiSeriesInsampleForecastModel.TIMEGPT_1: + return timegpt_1() + if self is MultiSeriesInsampleForecastModel.TIMEGPT_1_LONG_HORIZON: + return timegpt_1_long_horizon() + if self is MultiSeriesInsampleForecastModel.SHORT_HORIZON: + return short_horizon() + if self is MultiSeriesInsampleForecastModel.LONG_HORIZON: + return long_horizon() + if self is MultiSeriesInsampleForecastModel.AZUREAI: + return azureai() diff --git a/nixtla/types/single_series_forecast.py b/nixtla/types/single_series_forecast.py new file mode 100644 index 00000000..fc5f80c3 --- /dev/null +++ b/nixtla/types/single_series_forecast.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .single_series_forecast_fewshot_loss import SingleSeriesForecastFewshotLoss +from .single_series_forecast_finetune_loss import SingleSeriesForecastFinetuneLoss +from .single_series_forecast_model import SingleSeriesForecastModel + + +class SingleSeriesForecast(pydantic.BaseModel): + fewshot_steps: typing.Optional[int] = pydantic.Field(description="Deprecated. Please use finetune_steps instead.") + fewshot_loss: typing.Optional[SingleSeriesForecastFewshotLoss] = pydantic.Field( + description="Deprecated. Please use finetune_loss instead." + ) + model: typing.Optional[SingleSeriesForecastModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals." + ) + fh: typing.Optional[int] = pydantic.Field( + description="The forecasting horizon. This represents the number of time steps into the future that the forecast should predict." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[typing.Dict[str, typing.List[float]]] = pydantic.Field( + description='The exogenous variables provided as a dictionary. Each key is a timestamp (string format: YYYY-MM-DD) and the corresponding value is a list of exogenous variable values at that time point. For example: {"2021-01-01": [0.1], "2021-01-02": [0.4]}. This should also include forecasting horizon (fh) additional timestamps to calculate the future values.' + ) + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + finetune_steps: typing.Optional[int] = pydantic.Field( + description="The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning." + ) + finetune_loss: typing.Optional[SingleSeriesForecastFinetuneLoss] = pydantic.Field( + description="The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/single_series_forecast_fewshot_loss.py b/nixtla/types/single_series_forecast_fewshot_loss.py similarity index 100% rename from src/nixtlats/types/single_series_forecast_fewshot_loss.py rename to nixtla/types/single_series_forecast_fewshot_loss.py diff --git a/src/nixtlats/types/single_series_forecast_finetune_loss.py b/nixtla/types/single_series_forecast_finetune_loss.py similarity index 100% rename from src/nixtlats/types/single_series_forecast_finetune_loss.py rename to nixtla/types/single_series_forecast_finetune_loss.py diff --git a/nixtla/types/single_series_forecast_model.py b/nixtla/types/single_series_forecast_model.py new file mode 100644 index 00000000..7ab1a64a --- /dev/null +++ b/nixtla/types/single_series_forecast_model.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class SingleSeriesForecastModel(str, enum.Enum): + """ + Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. + """ + + TIMEGPT_1 = "timegpt-1" + TIMEGPT_1_LONG_HORIZON = "timegpt-1-long-horizon" + SHORT_HORIZON = "short-horizon" + LONG_HORIZON = "long-horizon" + AZUREAI = "azureai" + + def visit( + self, + timegpt_1: typing.Callable[[], T_Result], + timegpt_1_long_horizon: typing.Callable[[], T_Result], + short_horizon: typing.Callable[[], T_Result], + long_horizon: typing.Callable[[], T_Result], + azureai: typing.Callable[[], T_Result], + ) -> T_Result: + if self is SingleSeriesForecastModel.TIMEGPT_1: + return timegpt_1() + if self is SingleSeriesForecastModel.TIMEGPT_1_LONG_HORIZON: + return timegpt_1_long_horizon() + if self is SingleSeriesForecastModel.SHORT_HORIZON: + return short_horizon() + if self is SingleSeriesForecastModel.LONG_HORIZON: + return long_horizon() + if self is SingleSeriesForecastModel.AZUREAI: + return azureai() diff --git a/nixtla/types/single_series_insample_forecast.py b/nixtla/types/single_series_insample_forecast.py new file mode 100644 index 00000000..29eb4eb9 --- /dev/null +++ b/nixtla/types/single_series_insample_forecast.py @@ -0,0 +1,38 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime +from .single_series_insample_forecast_model import SingleSeriesInsampleForecastModel + + +class SingleSeriesInsampleForecast(pydantic.BaseModel): + model: typing.Optional[SingleSeriesInsampleForecastModel] = pydantic.Field( + description="Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data." + ) + freq: typing.Optional[str] = pydantic.Field( + description="The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available." + ) + level: typing.Optional[typing.List[typing.Any]] = pydantic.Field( + description="A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals." + ) + y: typing.Optional[typing.Any] + x: typing.Optional[typing.Any] + clean_ex_first: typing.Optional[bool] = pydantic.Field( + description="A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model." + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/nixtla/types/single_series_insample_forecast_model.py b/nixtla/types/single_series_insample_forecast_model.py new file mode 100644 index 00000000..faef673a --- /dev/null +++ b/nixtla/types/single_series_insample_forecast_model.py @@ -0,0 +1,37 @@ +# This file was auto-generated by Fern from our API Definition. + +import enum +import typing + +T_Result = typing.TypeVar("T_Result") + + +class SingleSeriesInsampleForecastModel(str, enum.Enum): + """ + Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. + """ + + TIMEGPT_1 = "timegpt-1" + TIMEGPT_1_LONG_HORIZON = "timegpt-1-long-horizon" + SHORT_HORIZON = "short-horizon" + LONG_HORIZON = "long-horizon" + AZUREAI = "azureai" + + def visit( + self, + timegpt_1: typing.Callable[[], T_Result], + timegpt_1_long_horizon: typing.Callable[[], T_Result], + short_horizon: typing.Callable[[], T_Result], + long_horizon: typing.Callable[[], T_Result], + azureai: typing.Callable[[], T_Result], + ) -> T_Result: + if self is SingleSeriesInsampleForecastModel.TIMEGPT_1: + return timegpt_1() + if self is SingleSeriesInsampleForecastModel.TIMEGPT_1_LONG_HORIZON: + return timegpt_1_long_horizon() + if self is SingleSeriesInsampleForecastModel.SHORT_HORIZON: + return short_horizon() + if self is SingleSeriesInsampleForecastModel.LONG_HORIZON: + return long_horizon() + if self is SingleSeriesInsampleForecastModel.AZUREAI: + return azureai() diff --git a/src/nixtlats/types/validation_error.py b/nixtla/types/validation_error.py similarity index 83% rename from src/nixtlats/types/validation_error.py rename to nixtla/types/validation_error.py index 5feb70e7..3f782718 100644 --- a/src/nixtlats/types/validation_error.py +++ b/nixtla/types/validation_error.py @@ -3,12 +3,13 @@ import datetime as dt import typing +import pydantic + from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 from .validation_error_loc_item import ValidationErrorLocItem -class ValidationError(pydantic_v1.BaseModel): +class ValidationError(pydantic.BaseModel): loc: typing.List[ValidationErrorLocItem] msg: str type: str @@ -23,6 +24,4 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/validation_error_loc_item.py b/nixtla/types/validation_error_loc_item.py similarity index 100% rename from src/nixtlats/types/validation_error_loc_item.py rename to nixtla/types/validation_error_loc_item.py diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index c9c7fc1d..00000000 --- a/poetry.lock +++ /dev/null @@ -1,448 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.1" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "mypy" -version = "1.10.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, - {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, - {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, - {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, - {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, - {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, - {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, - {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, - {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, - {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, - {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, - {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, - {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, - {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, - {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, - {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, - {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, - {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, - {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, - {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, - {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, - {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, - {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pydantic" -version = "2.7.1" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.23.6" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, - {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, -] - -[package.dependencies] -pytest = ">=7.0.0,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "typing-extensions" -version = "4.11.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[metadata] -lock-version = "2.0" -python-versions = "^3.8" -content-hash = "9265776813ab39c8f5195d29e52270d0eaaeaef435e30903307bd1b52dd0eb95" diff --git a/src/nixtlats/__init__.py b/src/nixtlats/__init__.py deleted file mode 100644 index 9b8d1576..00000000 --- a/src/nixtlats/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from .types import ( - HttpValidationError, - Model, - MultiSeriesAnomaly, - MultiSeriesCrossValidation, - MultiSeriesCrossValidationFewshotLoss, - MultiSeriesCrossValidationFinetuneLoss, - MultiSeriesForecast, - MultiSeriesForecastFewshotLoss, - MultiSeriesForecastFinetuneLoss, - MultiSeriesInput, - MultiSeriesInsampleForecast, - SingleSeriesForecast, - SingleSeriesForecastFewshotLoss, - SingleSeriesForecastFinetuneLoss, - SingleSeriesInsampleForecast, - ValidationError, - ValidationErrorLocItem, -) -from .errors import UnprocessableEntityError -from .version import __version__ - -__all__ = [ - "HttpValidationError", - "Model", - "MultiSeriesAnomaly", - "MultiSeriesCrossValidation", - "MultiSeriesCrossValidationFewshotLoss", - "MultiSeriesCrossValidationFinetuneLoss", - "MultiSeriesForecast", - "MultiSeriesForecastFewshotLoss", - "MultiSeriesForecastFinetuneLoss", - "MultiSeriesInput", - "MultiSeriesInsampleForecast", - "SingleSeriesForecast", - "SingleSeriesForecastFewshotLoss", - "SingleSeriesForecastFinetuneLoss", - "SingleSeriesInsampleForecast", - "UnprocessableEntityError", - "ValidationError", - "ValidationErrorLocItem", - "__version__", -] diff --git a/src/nixtlats/client.py b/src/nixtlats/client.py deleted file mode 100644 index fbcdb293..00000000 --- a/src/nixtlats/client.py +++ /dev/null @@ -1,6751 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import httpx - -from .core.api_error import ApiError -from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from .core.jsonable_encoder import jsonable_encoder -from .core.pydantic_utilities import pydantic_v1 -from .core.remove_none_from_dict import remove_none_from_dict -from .core.request_options import RequestOptions -from .errors.unprocessable_entity_error import UnprocessableEntityError -from .types.http_validation_error import HttpValidationError -from .types.multi_series_anomaly import MultiSeriesAnomaly -from .types.multi_series_cross_validation import MultiSeriesCrossValidation -from .types.multi_series_forecast import MultiSeriesForecast -from .types.multi_series_insample_forecast import MultiSeriesInsampleForecast -from .types.single_series_forecast import SingleSeriesForecast -from .types.single_series_insample_forecast import SingleSeriesInsampleForecast - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class Nixtla: - """ - Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions. - - Parameters: - - base_url: str. The base url to use for requests from the client. - - - token: typing.Union[str, typing.Callable[[], str]]. - - - timeout: typing.Optional[float]. The timeout to be used, in seconds, for requests by default the timeout is 60 seconds, unless a custom httpx client is used, in which case a default is not set. - - - follow_redirects: typing.Optional[bool]. Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. - - - httpx_client: typing.Optional[httpx.Client]. The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. - --- - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - """ - - def __init__( - self, - *, - base_url: str, - token: typing.Union[str, typing.Callable[[], str]], - timeout: typing.Optional[float] = None, - follow_redirects: typing.Optional[bool] = False, - httpx_client: typing.Optional[httpx.Client] = None, - ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None - self._client_wrapper = SyncClientWrapper( - base_url=base_url, - token=token, - httpx_client=httpx_client - if httpx_client is not None - else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) - if follow_redirects is not None - else httpx.Client(timeout=_defaulted_timeout), - timeout=_defaulted_timeout, - ) - - def validate_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.validate_token() - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) - if request_options is not None - else None, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def validate_token_front(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.validate_token_front() - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token_front"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) - if request_options is not None - else None, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def health_health_get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Check if server is healthy. Used by the readiness probe to check server is healthy. - - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.health_health_get() - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "health"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def forecast( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io - - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.forecast( - request=SingleSeriesForecast( - fewshot_steps=0, - model=Model.TIMEGPT_1, - freq="D", - level=[90], - fh=7, - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - x={ - "2015-12-02": [0.47914512134886633, 0.15759798422978488], - "2015-12-03": [0.3871397019785533, 0.35449477858215317], - "2015-12-04": [0.9056171938719492, 0.3180404540017956], - "2015-12-05": [0.8341924745940501, 0.1345432562146388], - "2015-12-06": [0.9345407030264767, 0.688343987845517], - "2015-12-07": [0.48729867539182836, 0.4078489909891331], - "2015-12-08": [0.372592361808496, 0.9657378388402957], - "2015-12-09": [0.8246299079349072, 0.39136849723836475], - "2015-12-10": [0.10775659682068173, 0.7158552999785821], - "2015-12-11": [0.8714315742323742, 0.6959379478245031], - "2015-12-12": [0.983802256917285, 0.5028086048974361], - "2015-12-13": [0.1637276788158426, 0.010905206265749978], - "2015-12-14": [0.02385161083272247, 0.6959627057263087], - "2015-12-15": [0.6601897150570875, 0.33970973534268967], - "2015-12-16": [0.050941494525972186, 0.11738703133470063], - "2015-12-17": [0.3959302454356002, 0.8728419968758135], - "2015-12-18": [0.5860088140443799, 0.5731410710012429], - "2015-12-19": [0.3857084524732245, 0.7271245785381963], - "2015-12-20": [0.37342450516631565, 0.17888199947035843], - "2015-12-21": [0.6981516773360145, 0.6983330714339858], - "2015-12-22": [0.08978675718655482, 0.45744085066175544], - "2015-12-23": [0.6879810190692012, 0.7650472745914928], - "2015-12-24": [0.3780817922426982, 0.26541942400861085], - "2015-12-25": [0.36377456228425953, 0.631351987243267], - "2015-12-26": [0.8858056528262828, 0.9929861335485208], - "2015-12-27": [0.7655012724712326, 0.8089415357086998], - "2015-12-28": [0.7802318350754168, 0.7507408790195511], - "2015-12-29": [0.21655713897782725, 0.39237400153061963], - "2015-12-30": [0.1424758142857997, 0.8902624055897677], - "2015-12-31": [0.643477187971131, 0.8789182584456643], - "2016-01-01": [0.9369855677078311, 0.609162462729402], - "2016-01-02": [0.0002858612957059581, 0.23697970143653724], - "2016-01-03": [0.16280370690002544, 0.5975722202856767], - "2016-01-04": [0.49310729499396044, 0.30125004072447636], - "2016-01-05": [0.6780776618690909, 0.06790772949375545], - "2016-01-06": [0.14663698589706664, 0.8886978154852969], - "2016-01-07": [0.4422143343461119, 0.9217301518957838], - "2016-01-08": [0.5100515329575456, 0.8433884706466771], - "2016-01-09": [0.9304379891825107, 0.7132184980129253], - "2016-01-10": [0.7611198221207496, 0.08381565314231354], - "2016-01-11": [0.4876994455855225, 0.6305043963566345], - "2016-01-12": [0.3807596362774567, 0.44155005068612574], - "2016-01-13": [0.022917032041831265, 0.8746566219855559], - "2016-01-14": [0.06744988617005221, 0.4867830182954117], - "2016-01-15": [0.2876166788506098, 0.34029767862172733], - "2016-01-16": [0.03136920601698312, 0.7473159671597568], - "2016-01-17": [0.8903106284115768, 0.6144929198235487], - "2016-01-18": [0.7692999747639573, 0.7318695022998711], - "2016-01-19": [0.8265506190532937, 0.8094527295780038], - "2016-01-20": [0.9902157088998235, 0.22917399968565122], - }, - clean_ex_first=True, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def historic_forecast( - self, *, request: SingleSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: SingleSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesInsampleForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.historic_forecast( - request=SingleSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - clean_ex_first=True, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def forecast_multi_series( - self, *, request: MultiSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.forecast_multi_series( - request=MultiSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def historic_forecast_multi_series( - self, *, request: MultiSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesInsampleForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.historic_forecast_multi_series( - request=MultiSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def anomaly_detection_multi_series( - self, *, request: MultiSeriesAnomaly, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesAnomaly. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesAnomaly - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.anomaly_detection_multi_series( - request=MultiSeriesAnomaly(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "anomaly_detection_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def cross_validation_multi_series( - self, *, request: MultiSeriesCrossValidation, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Perform Cross Validation for multiple series - - Parameters: - - request: MultiSeriesCrossValidation. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesCrossValidation, MultiSeriesInput - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.cross_validation_multi_series( - request=MultiSeriesCrossValidation( - model=Model.TIMEGPT_1, - freq="D", - level=[99], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - x=MultiSeriesInput( - columns=[ - "unique_id", - "ds", - "normalized_fb_marketing_spend", - "normalized_google_marketing_spend", - ], - data=[ - { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 0.11477860928915096, - "3": 0.5844987825930846, - }, - { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 0.33533486335091345, - "3": 0.35513740030391416, - }, - { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 0.17530305631523613, - "3": 0.7727084723706166, - }, - { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 0.8299454704953892, - "3": 0.367618075472813, - }, - { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 0.15521852990872909, - "3": 0.3003195772529532, - }, - { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 0.14306595403784284, - "3": 0.7070431427763944, - }, - { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 0.19385462445583268, - "3": 0.3548795168204949, - }, - { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 0.13255788839340688, - "3": 0.17898066409986724, - }, - { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 0.1861402159494574, - "3": 0.7109080209067455, - }, - { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 0.6416719826327762, - "3": 0.029753630490418792, - }, - { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 0.07255444629238816, - "3": 0.29036094427412484, - }, - { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 0.5908563921496247, - "3": 0.4391612122175276, - }, - { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 0.4564108681832745, - "3": 0.36666762551686216, - }, - { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 0.5842063222179616, - "3": 0.7725253746268812, - }, - { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 0.7032064831874294, - "3": 0.011741920837716524, - }, - { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 0.1819442499802637, - "3": 0.024847038984041947, - }, - { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 0.0236088273599101, - "3": 0.6750163977742368, - }, - { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 0.05606508858414727, - "3": 0.7476944076745794, - }, - { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 0.23105144001754685, - "3": 0.0975599293231374, - }, - { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 0.046529460623970675, - "3": 0.27870858983273483, - }, - { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 0.8924837450040128, - "3": 0.17445021431427998, - }, - { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 0.7920741814997206, - "3": 0.7697260533294017, - }, - { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 0.8211104441803947, - "3": 0.04784956766271531, - }, - { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 0.007443038001649138, - "3": 0.14607178743713456, - }, - { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 0.007103558258280018, - "3": 0.9906539418801805, - }, - { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 0.6056133597337183, - "3": 0.3016989995056749, - }, - { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 0.888562531622971, - "3": 0.2644957566378502, - }, - { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 0.6596339635369193, - "3": 0.7132049526406711, - }, - { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 0.28384590920981123, - "3": 0.33014709090685734, - }, - { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 0.8909381460163546, - "3": 0.30893091621071356, - }, - { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 0.40276798240151035, - "3": 0.2879315229860593, - }, - { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 0.4429162268378485, - "3": 0.11018732403878562, - }, - { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 0.3828367947132504, - "3": 0.8222282482537393, - }, - { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 0.058573708628411714, - "3": 0.0882973927279318, - }, - { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 0.47488775903843816, - "3": 0.6966251353738419, - }, - { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 0.5977462692254242, - "3": 0.047135156470394235, - }, - { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 0.3717147185460784, - "3": 0.8692903226725259, - }, - { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 0.9309630632764513, - "3": 0.9735989812217806, - }, - { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 0.16911221748337524, - "3": 0.8789858343082052, - }, - { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 0.27006313791342074, - "3": 0.11164112580033247, - }, - { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 0.7963113076957269, - "3": 0.20769916997452176, - }, - { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 0.6303426283630738, - "3": 0.18090725407449992, - }, - { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 0.8493379197928459, - "3": 0.47689004776049004, - }, - { - "0": "PeytonManning", - "1": "2016-01-14", - "2": 0.23526591138844477, - "3": 0.8789956293235727, - }, - { - "0": "PeytonManning", - "1": "2016-01-15", - "2": 0.007675903871696255, - "3": 0.43933947505823356, - }, - { - "0": "PeytonManning", - "1": "2016-01-16", - "2": 0.47352555302489163, - "3": 0.2152806277171031, - }, - { - "0": "PeytonManning", - "1": "2016-01-17", - "2": 0.15708565708379674, - "3": 0.3691423619421067, - }, - { - "0": "PeytonManning", - "1": "2016-01-18", - "2": 0.1455797785998202, - "3": 0.12414992893989352, - }, - { - "0": "PeytonManning", - "1": "2016-01-19", - "2": 0.18260071049899873, - "3": 0.4700149101718504, - }, - { - "0": "PeytonManning", - "1": "2016-01-20", - "2": 0.815333051511914, - "3": 0.6872087681704713, - }, - { - "0": "TomBrady", - "1": "2015-12-02", - "2": 0.11477860928915096, - "3": 0.5844987825930846, - }, - { - "0": "TomBrady", - "1": "2015-12-03", - "2": 0.33533486335091345, - "3": 0.35513740030391416, - }, - { - "0": "TomBrady", - "1": "2015-12-04", - "2": 0.17530305631523613, - "3": 0.7727084723706166, - }, - { - "0": "TomBrady", - "1": "2015-12-05", - "2": 0.8299454704953892, - "3": 0.367618075472813, - }, - { - "0": "TomBrady", - "1": "2015-12-06", - "2": 0.15521852990872909, - "3": 0.3003195772529532, - }, - { - "0": "TomBrady", - "1": "2015-12-07", - "2": 0.14306595403784284, - "3": 0.7070431427763944, - }, - { - "0": "TomBrady", - "1": "2015-12-08", - "2": 0.19385462445583268, - "3": 0.3548795168204949, - }, - { - "0": "TomBrady", - "1": "2015-12-09", - "2": 0.13255788839340688, - "3": 0.17898066409986724, - }, - { - "0": "TomBrady", - "1": "2015-12-10", - "2": 0.1861402159494574, - "3": 0.7109080209067455, - }, - { - "0": "TomBrady", - "1": "2015-12-11", - "2": 0.6416719826327762, - "3": 0.029753630490418792, - }, - { - "0": "TomBrady", - "1": "2015-12-12", - "2": 0.07255444629238816, - "3": 0.29036094427412484, - }, - { - "0": "TomBrady", - "1": "2015-12-13", - "2": 0.5908563921496247, - "3": 0.4391612122175276, - }, - { - "0": "TomBrady", - "1": "2015-12-14", - "2": 0.4564108681832745, - "3": 0.36666762551686216, - }, - { - "0": "TomBrady", - "1": "2015-12-15", - "2": 0.5842063222179616, - "3": 0.7725253746268812, - }, - { - "0": "TomBrady", - "1": "2015-12-16", - "2": 0.7032064831874294, - "3": 0.011741920837716524, - }, - { - "0": "TomBrady", - "1": "2015-12-17", - "2": 0.1819442499802637, - "3": 0.024847038984041947, - }, - { - "0": "TomBrady", - "1": "2015-12-18", - "2": 0.0236088273599101, - "3": 0.6750163977742368, - }, - { - "0": "TomBrady", - "1": "2015-12-19", - "2": 0.05606508858414727, - "3": 0.7476944076745794, - }, - { - "0": "TomBrady", - "1": "2015-12-20", - "2": 0.23105144001754685, - "3": 0.0975599293231374, - }, - { - "0": "TomBrady", - "1": "2015-12-21", - "2": 0.046529460623970675, - "3": 0.27870858983273483, - }, - { - "0": "TomBrady", - "1": "2015-12-22", - "2": 0.8924837450040128, - "3": 0.17445021431427998, - }, - { - "0": "TomBrady", - "1": "2015-12-23", - "2": 0.7920741814997206, - "3": 0.7697260533294017, - }, - { - "0": "TomBrady", - "1": "2015-12-24", - "2": 0.8211104441803947, - "3": 0.04784956766271531, - }, - { - "0": "TomBrady", - "1": "2015-12-25", - "2": 0.007443038001649138, - "3": 0.14607178743713456, - }, - { - "0": "TomBrady", - "1": "2015-12-26", - "2": 0.007103558258280018, - "3": 0.9906539418801805, - }, - { - "0": "TomBrady", - "1": "2015-12-27", - "2": 0.6056133597337183, - "3": 0.3016989995056749, - }, - { - "0": "TomBrady", - "1": "2015-12-28", - "2": 0.888562531622971, - "3": 0.2644957566378502, - }, - { - "0": "TomBrady", - "1": "2015-12-29", - "2": 0.6596339635369193, - "3": 0.7132049526406711, - }, - { - "0": "TomBrady", - "1": "2015-12-30", - "2": 0.28384590920981123, - "3": 0.33014709090685734, - }, - { - "0": "TomBrady", - "1": "2015-12-31", - "2": 0.8909381460163546, - "3": 0.30893091621071356, - }, - { - "0": "TomBrady", - "1": "2016-01-01", - "2": 0.40276798240151035, - "3": 0.2879315229860593, - }, - { - "0": "TomBrady", - "1": "2016-01-02", - "2": 0.4429162268378485, - "3": 0.11018732403878562, - }, - { - "0": "TomBrady", - "1": "2016-01-03", - "2": 0.3828367947132504, - "3": 0.8222282482537393, - }, - { - "0": "TomBrady", - "1": "2016-01-04", - "2": 0.058573708628411714, - "3": 0.0882973927279318, - }, - { - "0": "TomBrady", - "1": "2016-01-05", - "2": 0.47488775903843816, - "3": 0.6966251353738419, - }, - { - "0": "TomBrady", - "1": "2016-01-06", - "2": 0.5977462692254242, - "3": 0.047135156470394235, - }, - { - "0": "TomBrady", - "1": "2016-01-07", - "2": 0.3717147185460784, - "3": 0.8692903226725259, - }, - { - "0": "TomBrady", - "1": "2016-01-08", - "2": 0.9309630632764513, - "3": 0.9735989812217806, - }, - { - "0": "TomBrady", - "1": "2016-01-09", - "2": 0.16911221748337524, - "3": 0.8789858343082052, - }, - { - "0": "TomBrady", - "1": "2016-01-10", - "2": 0.27006313791342074, - "3": 0.11164112580033247, - }, - { - "0": "TomBrady", - "1": "2016-01-11", - "2": 0.7963113076957269, - "3": 0.20769916997452176, - }, - { - "0": "TomBrady", - "1": "2016-01-12", - "2": 0.6303426283630738, - "3": 0.18090725407449992, - }, - { - "0": "TomBrady", - "1": "2016-01-13", - "2": 0.8493379197928459, - "3": 0.47689004776049004, - }, - { - "0": "TomBrady", - "1": "2016-01-14", - "2": 0.23526591138844477, - "3": 0.8789956293235727, - }, - { - "0": "TomBrady", - "1": "2016-01-15", - "2": 0.007675903871696255, - "3": 0.43933947505823356, - }, - { - "0": "TomBrady", - "1": "2016-01-16", - "2": 0.47352555302489163, - "3": 0.2152806277171031, - }, - { - "0": "TomBrady", - "1": "2016-01-17", - "2": 0.15708565708379674, - "3": 0.3691423619421067, - }, - { - "0": "TomBrady", - "1": "2016-01-18", - "2": 0.1455797785998202, - "3": 0.12414992893989352, - }, - { - "0": "TomBrady", - "1": "2016-01-19", - "2": 0.18260071049899873, - "3": 0.4700149101718504, - }, - { - "0": "TomBrady", - "1": "2016-01-20", - "2": 0.815333051511914, - "3": 0.6872087681704713, - }, - ], - ), - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "cross_validation_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def model_input_size( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.model_input_size( - request=SingleSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_input_size"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def model_params( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.model_params( - request=SingleSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_params"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io - - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt( - request=SingleSeriesForecast( - fewshot_steps=0, - model=Model.TIMEGPT_1, - freq="D", - level=[90], - fh=7, - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - x={ - "2015-12-02": [0.09323074669638598, 0.6368254238538134], - "2015-12-03": [0.3112324026372236, 0.6889811850504678], - "2015-12-04": [0.481377116596614, 0.9499072761820243], - "2015-12-05": [0.828714746663481, 0.8041572577576312], - "2015-12-06": [0.9057716575675051, 0.9704840572309358], - "2015-12-07": [0.11973559067683959, 0.5511723211494531], - "2015-12-08": [0.7606320950967038, 0.6502204496427739], - "2015-12-09": [0.6674402019606482, 0.5371174184068592], - "2015-12-10": [0.6309756549373285, 0.5962737337684483], - "2015-12-11": [0.43906877315373927, 0.1654105249520328], - "2015-12-12": [0.04035053656768495, 0.015327409476712961], - "2015-12-13": [0.24377685055222176, 0.8627561902830486], - "2015-12-14": [0.4681463148103986, 0.7889604220806076], - "2015-12-15": [0.08345764193688965, 0.1003789877662995], - "2015-12-16": [0.4771178995736667, 0.4958892103475401], - "2015-12-17": [0.3486535805107692, 0.1148378356945654], - "2015-12-18": [0.6630786017711142, 0.3849924472769859], - "2015-12-19": [0.011778418994099704, 0.5115520644698133], - "2015-12-20": [0.9038837166773512, 0.8848353753020387], - "2015-12-21": [0.7843282827835409, 0.3103093475671188], - "2015-12-22": [0.25362104487399484, 0.9254139777164822], - "2015-12-23": [0.30107456907444907, 0.33020953364803796], - "2015-12-24": [0.027255284501599086, 0.9447565679813503], - "2015-12-25": [0.40549024123597277, 0.9460884659190596], - "2015-12-26": [0.5016171568983224, 0.7067171277571931], - "2015-12-27": [0.827190957653689, 0.4005053404479477], - "2015-12-28": [0.2596591659030716, 0.47506589627248297], - "2015-12-29": [0.49720915846888825, 0.6751702910147392], - "2015-12-30": [0.6674937132903789, 0.9931963725210304], - "2015-12-31": [0.9291459144099821, 0.43747139612392505], - "2016-01-01": [0.9582822834991531, 0.04487218641374102], - "2016-01-02": [0.029270976123749515, 0.23398035651852978], - "2016-01-03": [0.515109021575783, 0.808477728186397], - "2016-01-04": [0.5435684835160831, 0.07870797706843025], - "2016-01-05": [0.7621319274444724, 0.8516085958801328], - "2016-01-06": [0.26156943890310125, 0.5111017026367417], - "2016-01-07": [0.0564861467325366, 0.12697067125791017], - "2016-01-08": [0.37440546078742454, 0.2012358008441526], - "2016-01-09": [0.29688542060907375, 0.43556021285702873], - "2016-01-10": [0.9898730907589448, 0.7713248129524874], - "2016-01-11": [0.49238285777903246, 0.7197147811618813], - "2016-01-12": [0.8318652201289694, 0.6091414491883265], - "2016-01-13": [0.02343720965025453, 0.7961211448973647], - "2016-01-14": [0.7633021263247786, 0.018469346610634263], - "2016-01-15": [0.493855721362772, 0.7451084649371831], - "2016-01-16": [0.21962262556226464, 0.9274272764531087], - "2016-01-17": [0.885379459035442, 0.3507870357790486], - "2016-01-18": [0.6460546060324733, 0.8225983771798888], - "2016-01-19": [0.3604601800609517, 0.4588944740666544], - "2016-01-20": [0.4191777260908942, 0.2500836388909915], - }, - clean_ex_first=True, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_historic( - self, *, request: SingleSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: SingleSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesInsampleForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_historic( - request=SingleSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - clean_ex_first=True, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_historic"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_multi_series( - self, *, request: MultiSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_multi_series( - request=MultiSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_multi_series_historic( - self, *, request: MultiSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesInsampleForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_multi_series_historic( - request=MultiSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_historic"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_multi_series_anomalies( - self, *, request: MultiSeriesAnomaly, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesAnomaly. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesAnomaly - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_multi_series_anomalies( - request=MultiSeriesAnomaly(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_anomalies"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_multi_series_cross_validation( - self, *, request: MultiSeriesCrossValidation, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Perform Cross Validation for multiple series - - Parameters: - - request: MultiSeriesCrossValidation. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesCrossValidation - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_multi_series_cross_validation( - request=MultiSeriesCrossValidation(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_cross_validation"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_input_size( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_input_size( - request=SingleSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_input_size"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def timegpt_model_params( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import Nixtla - - client = Nixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - client.timegpt_model_params( - request=SingleSeriesForecast(), - ) - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_model_params"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncNixtla: - """ - Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propogate to these functions. - - Parameters: - - base_url: str. The base url to use for requests from the client. - - - token: typing.Union[str, typing.Callable[[], str]]. - - - timeout: typing.Optional[float]. The timeout to be used, in seconds, for requests by default the timeout is 60 seconds, unless a custom httpx client is used, in which case a default is not set. - - - follow_redirects: typing.Optional[bool]. Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. - - - httpx_client: typing.Optional[httpx.AsyncClient]. The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. - --- - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - """ - - def __init__( - self, - *, - base_url: str, - token: typing.Union[str, typing.Callable[[], str]], - timeout: typing.Optional[float] = None, - follow_redirects: typing.Optional[bool] = False, - httpx_client: typing.Optional[httpx.AsyncClient] = None, - ): - _defaulted_timeout = timeout if timeout is not None else 60 if httpx_client is None else None - self._client_wrapper = AsyncClientWrapper( - base_url=base_url, - token=token, - httpx_client=httpx_client - if httpx_client is not None - else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) - if follow_redirects is not None - else httpx.AsyncClient(timeout=_defaulted_timeout), - timeout=_defaulted_timeout, - ) - - async def validate_token(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.validate_token() - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) - if request_options is not None - else None, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def validate_token_front(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.validate_token_front() - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "validate_token_front"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {}))) - if request_options is not None - else None, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def health_health_get(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.Any: - """ - Check if server is healthy. Used by the readiness probe to check server is healthy. - - Parameters: - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.health_health_get() - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "health"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def forecast( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io - - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.forecast( - request=SingleSeriesForecast( - fewshot_steps=0, - model=Model.TIMEGPT_1, - freq="D", - level=[90], - fh=7, - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - x={ - "2015-12-02": [0.47914512134886633, 0.15759798422978488], - "2015-12-03": [0.3871397019785533, 0.35449477858215317], - "2015-12-04": [0.9056171938719492, 0.3180404540017956], - "2015-12-05": [0.8341924745940501, 0.1345432562146388], - "2015-12-06": [0.9345407030264767, 0.688343987845517], - "2015-12-07": [0.48729867539182836, 0.4078489909891331], - "2015-12-08": [0.372592361808496, 0.9657378388402957], - "2015-12-09": [0.8246299079349072, 0.39136849723836475], - "2015-12-10": [0.10775659682068173, 0.7158552999785821], - "2015-12-11": [0.8714315742323742, 0.6959379478245031], - "2015-12-12": [0.983802256917285, 0.5028086048974361], - "2015-12-13": [0.1637276788158426, 0.010905206265749978], - "2015-12-14": [0.02385161083272247, 0.6959627057263087], - "2015-12-15": [0.6601897150570875, 0.33970973534268967], - "2015-12-16": [0.050941494525972186, 0.11738703133470063], - "2015-12-17": [0.3959302454356002, 0.8728419968758135], - "2015-12-18": [0.5860088140443799, 0.5731410710012429], - "2015-12-19": [0.3857084524732245, 0.7271245785381963], - "2015-12-20": [0.37342450516631565, 0.17888199947035843], - "2015-12-21": [0.6981516773360145, 0.6983330714339858], - "2015-12-22": [0.08978675718655482, 0.45744085066175544], - "2015-12-23": [0.6879810190692012, 0.7650472745914928], - "2015-12-24": [0.3780817922426982, 0.26541942400861085], - "2015-12-25": [0.36377456228425953, 0.631351987243267], - "2015-12-26": [0.8858056528262828, 0.9929861335485208], - "2015-12-27": [0.7655012724712326, 0.8089415357086998], - "2015-12-28": [0.7802318350754168, 0.7507408790195511], - "2015-12-29": [0.21655713897782725, 0.39237400153061963], - "2015-12-30": [0.1424758142857997, 0.8902624055897677], - "2015-12-31": [0.643477187971131, 0.8789182584456643], - "2016-01-01": [0.9369855677078311, 0.609162462729402], - "2016-01-02": [0.0002858612957059581, 0.23697970143653724], - "2016-01-03": [0.16280370690002544, 0.5975722202856767], - "2016-01-04": [0.49310729499396044, 0.30125004072447636], - "2016-01-05": [0.6780776618690909, 0.06790772949375545], - "2016-01-06": [0.14663698589706664, 0.8886978154852969], - "2016-01-07": [0.4422143343461119, 0.9217301518957838], - "2016-01-08": [0.5100515329575456, 0.8433884706466771], - "2016-01-09": [0.9304379891825107, 0.7132184980129253], - "2016-01-10": [0.7611198221207496, 0.08381565314231354], - "2016-01-11": [0.4876994455855225, 0.6305043963566345], - "2016-01-12": [0.3807596362774567, 0.44155005068612574], - "2016-01-13": [0.022917032041831265, 0.8746566219855559], - "2016-01-14": [0.06744988617005221, 0.4867830182954117], - "2016-01-15": [0.2876166788506098, 0.34029767862172733], - "2016-01-16": [0.03136920601698312, 0.7473159671597568], - "2016-01-17": [0.8903106284115768, 0.6144929198235487], - "2016-01-18": [0.7692999747639573, 0.7318695022998711], - "2016-01-19": [0.8265506190532937, 0.8094527295780038], - "2016-01-20": [0.9902157088998235, 0.22917399968565122], - }, - clean_ex_first=True, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def historic_forecast( - self, *, request: SingleSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: SingleSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesInsampleForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.historic_forecast( - request=SingleSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - clean_ex_first=True, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def forecast_multi_series( - self, *, request: MultiSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.forecast_multi_series( - request=MultiSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "forecast_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def historic_forecast_multi_series( - self, *, request: MultiSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesInsampleForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.historic_forecast_multi_series( - request=MultiSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "historic_forecast_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def anomaly_detection_multi_series( - self, *, request: MultiSeriesAnomaly, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesAnomaly. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesAnomaly - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.anomaly_detection_multi_series( - request=MultiSeriesAnomaly(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "anomaly_detection_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def cross_validation_multi_series( - self, *, request: MultiSeriesCrossValidation, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Perform Cross Validation for multiple series - - Parameters: - - request: MultiSeriesCrossValidation. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesCrossValidation, MultiSeriesInput - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.cross_validation_multi_series( - request=MultiSeriesCrossValidation( - model=Model.TIMEGPT_1, - freq="D", - level=[99], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - x=MultiSeriesInput( - columns=[ - "unique_id", - "ds", - "normalized_fb_marketing_spend", - "normalized_google_marketing_spend", - ], - data=[ - { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 0.11477860928915096, - "3": 0.5844987825930846, - }, - { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 0.33533486335091345, - "3": 0.35513740030391416, - }, - { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 0.17530305631523613, - "3": 0.7727084723706166, - }, - { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 0.8299454704953892, - "3": 0.367618075472813, - }, - { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 0.15521852990872909, - "3": 0.3003195772529532, - }, - { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 0.14306595403784284, - "3": 0.7070431427763944, - }, - { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 0.19385462445583268, - "3": 0.3548795168204949, - }, - { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 0.13255788839340688, - "3": 0.17898066409986724, - }, - { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 0.1861402159494574, - "3": 0.7109080209067455, - }, - { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 0.6416719826327762, - "3": 0.029753630490418792, - }, - { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 0.07255444629238816, - "3": 0.29036094427412484, - }, - { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 0.5908563921496247, - "3": 0.4391612122175276, - }, - { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 0.4564108681832745, - "3": 0.36666762551686216, - }, - { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 0.5842063222179616, - "3": 0.7725253746268812, - }, - { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 0.7032064831874294, - "3": 0.011741920837716524, - }, - { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 0.1819442499802637, - "3": 0.024847038984041947, - }, - { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 0.0236088273599101, - "3": 0.6750163977742368, - }, - { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 0.05606508858414727, - "3": 0.7476944076745794, - }, - { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 0.23105144001754685, - "3": 0.0975599293231374, - }, - { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 0.046529460623970675, - "3": 0.27870858983273483, - }, - { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 0.8924837450040128, - "3": 0.17445021431427998, - }, - { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 0.7920741814997206, - "3": 0.7697260533294017, - }, - { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 0.8211104441803947, - "3": 0.04784956766271531, - }, - { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 0.007443038001649138, - "3": 0.14607178743713456, - }, - { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 0.007103558258280018, - "3": 0.9906539418801805, - }, - { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 0.6056133597337183, - "3": 0.3016989995056749, - }, - { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 0.888562531622971, - "3": 0.2644957566378502, - }, - { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 0.6596339635369193, - "3": 0.7132049526406711, - }, - { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 0.28384590920981123, - "3": 0.33014709090685734, - }, - { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 0.8909381460163546, - "3": 0.30893091621071356, - }, - { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 0.40276798240151035, - "3": 0.2879315229860593, - }, - { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 0.4429162268378485, - "3": 0.11018732403878562, - }, - { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 0.3828367947132504, - "3": 0.8222282482537393, - }, - { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 0.058573708628411714, - "3": 0.0882973927279318, - }, - { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 0.47488775903843816, - "3": 0.6966251353738419, - }, - { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 0.5977462692254242, - "3": 0.047135156470394235, - }, - { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 0.3717147185460784, - "3": 0.8692903226725259, - }, - { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 0.9309630632764513, - "3": 0.9735989812217806, - }, - { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 0.16911221748337524, - "3": 0.8789858343082052, - }, - { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 0.27006313791342074, - "3": 0.11164112580033247, - }, - { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 0.7963113076957269, - "3": 0.20769916997452176, - }, - { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 0.6303426283630738, - "3": 0.18090725407449992, - }, - { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 0.8493379197928459, - "3": 0.47689004776049004, - }, - { - "0": "PeytonManning", - "1": "2016-01-14", - "2": 0.23526591138844477, - "3": 0.8789956293235727, - }, - { - "0": "PeytonManning", - "1": "2016-01-15", - "2": 0.007675903871696255, - "3": 0.43933947505823356, - }, - { - "0": "PeytonManning", - "1": "2016-01-16", - "2": 0.47352555302489163, - "3": 0.2152806277171031, - }, - { - "0": "PeytonManning", - "1": "2016-01-17", - "2": 0.15708565708379674, - "3": 0.3691423619421067, - }, - { - "0": "PeytonManning", - "1": "2016-01-18", - "2": 0.1455797785998202, - "3": 0.12414992893989352, - }, - { - "0": "PeytonManning", - "1": "2016-01-19", - "2": 0.18260071049899873, - "3": 0.4700149101718504, - }, - { - "0": "PeytonManning", - "1": "2016-01-20", - "2": 0.815333051511914, - "3": 0.6872087681704713, - }, - { - "0": "TomBrady", - "1": "2015-12-02", - "2": 0.11477860928915096, - "3": 0.5844987825930846, - }, - { - "0": "TomBrady", - "1": "2015-12-03", - "2": 0.33533486335091345, - "3": 0.35513740030391416, - }, - { - "0": "TomBrady", - "1": "2015-12-04", - "2": 0.17530305631523613, - "3": 0.7727084723706166, - }, - { - "0": "TomBrady", - "1": "2015-12-05", - "2": 0.8299454704953892, - "3": 0.367618075472813, - }, - { - "0": "TomBrady", - "1": "2015-12-06", - "2": 0.15521852990872909, - "3": 0.3003195772529532, - }, - { - "0": "TomBrady", - "1": "2015-12-07", - "2": 0.14306595403784284, - "3": 0.7070431427763944, - }, - { - "0": "TomBrady", - "1": "2015-12-08", - "2": 0.19385462445583268, - "3": 0.3548795168204949, - }, - { - "0": "TomBrady", - "1": "2015-12-09", - "2": 0.13255788839340688, - "3": 0.17898066409986724, - }, - { - "0": "TomBrady", - "1": "2015-12-10", - "2": 0.1861402159494574, - "3": 0.7109080209067455, - }, - { - "0": "TomBrady", - "1": "2015-12-11", - "2": 0.6416719826327762, - "3": 0.029753630490418792, - }, - { - "0": "TomBrady", - "1": "2015-12-12", - "2": 0.07255444629238816, - "3": 0.29036094427412484, - }, - { - "0": "TomBrady", - "1": "2015-12-13", - "2": 0.5908563921496247, - "3": 0.4391612122175276, - }, - { - "0": "TomBrady", - "1": "2015-12-14", - "2": 0.4564108681832745, - "3": 0.36666762551686216, - }, - { - "0": "TomBrady", - "1": "2015-12-15", - "2": 0.5842063222179616, - "3": 0.7725253746268812, - }, - { - "0": "TomBrady", - "1": "2015-12-16", - "2": 0.7032064831874294, - "3": 0.011741920837716524, - }, - { - "0": "TomBrady", - "1": "2015-12-17", - "2": 0.1819442499802637, - "3": 0.024847038984041947, - }, - { - "0": "TomBrady", - "1": "2015-12-18", - "2": 0.0236088273599101, - "3": 0.6750163977742368, - }, - { - "0": "TomBrady", - "1": "2015-12-19", - "2": 0.05606508858414727, - "3": 0.7476944076745794, - }, - { - "0": "TomBrady", - "1": "2015-12-20", - "2": 0.23105144001754685, - "3": 0.0975599293231374, - }, - { - "0": "TomBrady", - "1": "2015-12-21", - "2": 0.046529460623970675, - "3": 0.27870858983273483, - }, - { - "0": "TomBrady", - "1": "2015-12-22", - "2": 0.8924837450040128, - "3": 0.17445021431427998, - }, - { - "0": "TomBrady", - "1": "2015-12-23", - "2": 0.7920741814997206, - "3": 0.7697260533294017, - }, - { - "0": "TomBrady", - "1": "2015-12-24", - "2": 0.8211104441803947, - "3": 0.04784956766271531, - }, - { - "0": "TomBrady", - "1": "2015-12-25", - "2": 0.007443038001649138, - "3": 0.14607178743713456, - }, - { - "0": "TomBrady", - "1": "2015-12-26", - "2": 0.007103558258280018, - "3": 0.9906539418801805, - }, - { - "0": "TomBrady", - "1": "2015-12-27", - "2": 0.6056133597337183, - "3": 0.3016989995056749, - }, - { - "0": "TomBrady", - "1": "2015-12-28", - "2": 0.888562531622971, - "3": 0.2644957566378502, - }, - { - "0": "TomBrady", - "1": "2015-12-29", - "2": 0.6596339635369193, - "3": 0.7132049526406711, - }, - { - "0": "TomBrady", - "1": "2015-12-30", - "2": 0.28384590920981123, - "3": 0.33014709090685734, - }, - { - "0": "TomBrady", - "1": "2015-12-31", - "2": 0.8909381460163546, - "3": 0.30893091621071356, - }, - { - "0": "TomBrady", - "1": "2016-01-01", - "2": 0.40276798240151035, - "3": 0.2879315229860593, - }, - { - "0": "TomBrady", - "1": "2016-01-02", - "2": 0.4429162268378485, - "3": 0.11018732403878562, - }, - { - "0": "TomBrady", - "1": "2016-01-03", - "2": 0.3828367947132504, - "3": 0.8222282482537393, - }, - { - "0": "TomBrady", - "1": "2016-01-04", - "2": 0.058573708628411714, - "3": 0.0882973927279318, - }, - { - "0": "TomBrady", - "1": "2016-01-05", - "2": 0.47488775903843816, - "3": 0.6966251353738419, - }, - { - "0": "TomBrady", - "1": "2016-01-06", - "2": 0.5977462692254242, - "3": 0.047135156470394235, - }, - { - "0": "TomBrady", - "1": "2016-01-07", - "2": 0.3717147185460784, - "3": 0.8692903226725259, - }, - { - "0": "TomBrady", - "1": "2016-01-08", - "2": 0.9309630632764513, - "3": 0.9735989812217806, - }, - { - "0": "TomBrady", - "1": "2016-01-09", - "2": 0.16911221748337524, - "3": 0.8789858343082052, - }, - { - "0": "TomBrady", - "1": "2016-01-10", - "2": 0.27006313791342074, - "3": 0.11164112580033247, - }, - { - "0": "TomBrady", - "1": "2016-01-11", - "2": 0.7963113076957269, - "3": 0.20769916997452176, - }, - { - "0": "TomBrady", - "1": "2016-01-12", - "2": 0.6303426283630738, - "3": 0.18090725407449992, - }, - { - "0": "TomBrady", - "1": "2016-01-13", - "2": 0.8493379197928459, - "3": 0.47689004776049004, - }, - { - "0": "TomBrady", - "1": "2016-01-14", - "2": 0.23526591138844477, - "3": 0.8789956293235727, - }, - { - "0": "TomBrady", - "1": "2016-01-15", - "2": 0.007675903871696255, - "3": 0.43933947505823356, - }, - { - "0": "TomBrady", - "1": "2016-01-16", - "2": 0.47352555302489163, - "3": 0.2152806277171031, - }, - { - "0": "TomBrady", - "1": "2016-01-17", - "2": 0.15708565708379674, - "3": 0.3691423619421067, - }, - { - "0": "TomBrady", - "1": "2016-01-18", - "2": 0.1455797785998202, - "3": 0.12414992893989352, - }, - { - "0": "TomBrady", - "1": "2016-01-19", - "2": 0.18260071049899873, - "3": 0.4700149101718504, - }, - { - "0": "TomBrady", - "1": "2016-01-20", - "2": 0.815333051511914, - "3": 0.6872087681704713, - }, - ], - ), - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "cross_validation_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def model_input_size( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.model_input_size( - request=SingleSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_input_size"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def model_params( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.model_params( - request=SingleSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "model_params"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - This endpoint predicts the future values of a single time series based on the provided data. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io - - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt( - request=SingleSeriesForecast( - fewshot_steps=0, - model=Model.TIMEGPT_1, - freq="D", - level=[90], - fh=7, - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - x={ - "2015-12-02": [0.09323074669638598, 0.6368254238538134], - "2015-12-03": [0.3112324026372236, 0.6889811850504678], - "2015-12-04": [0.481377116596614, 0.9499072761820243], - "2015-12-05": [0.828714746663481, 0.8041572577576312], - "2015-12-06": [0.9057716575675051, 0.9704840572309358], - "2015-12-07": [0.11973559067683959, 0.5511723211494531], - "2015-12-08": [0.7606320950967038, 0.6502204496427739], - "2015-12-09": [0.6674402019606482, 0.5371174184068592], - "2015-12-10": [0.6309756549373285, 0.5962737337684483], - "2015-12-11": [0.43906877315373927, 0.1654105249520328], - "2015-12-12": [0.04035053656768495, 0.015327409476712961], - "2015-12-13": [0.24377685055222176, 0.8627561902830486], - "2015-12-14": [0.4681463148103986, 0.7889604220806076], - "2015-12-15": [0.08345764193688965, 0.1003789877662995], - "2015-12-16": [0.4771178995736667, 0.4958892103475401], - "2015-12-17": [0.3486535805107692, 0.1148378356945654], - "2015-12-18": [0.6630786017711142, 0.3849924472769859], - "2015-12-19": [0.011778418994099704, 0.5115520644698133], - "2015-12-20": [0.9038837166773512, 0.8848353753020387], - "2015-12-21": [0.7843282827835409, 0.3103093475671188], - "2015-12-22": [0.25362104487399484, 0.9254139777164822], - "2015-12-23": [0.30107456907444907, 0.33020953364803796], - "2015-12-24": [0.027255284501599086, 0.9447565679813503], - "2015-12-25": [0.40549024123597277, 0.9460884659190596], - "2015-12-26": [0.5016171568983224, 0.7067171277571931], - "2015-12-27": [0.827190957653689, 0.4005053404479477], - "2015-12-28": [0.2596591659030716, 0.47506589627248297], - "2015-12-29": [0.49720915846888825, 0.6751702910147392], - "2015-12-30": [0.6674937132903789, 0.9931963725210304], - "2015-12-31": [0.9291459144099821, 0.43747139612392505], - "2016-01-01": [0.9582822834991531, 0.04487218641374102], - "2016-01-02": [0.029270976123749515, 0.23398035651852978], - "2016-01-03": [0.515109021575783, 0.808477728186397], - "2016-01-04": [0.5435684835160831, 0.07870797706843025], - "2016-01-05": [0.7621319274444724, 0.8516085958801328], - "2016-01-06": [0.26156943890310125, 0.5111017026367417], - "2016-01-07": [0.0564861467325366, 0.12697067125791017], - "2016-01-08": [0.37440546078742454, 0.2012358008441526], - "2016-01-09": [0.29688542060907375, 0.43556021285702873], - "2016-01-10": [0.9898730907589448, 0.7713248129524874], - "2016-01-11": [0.49238285777903246, 0.7197147811618813], - "2016-01-12": [0.8318652201289694, 0.6091414491883265], - "2016-01-13": [0.02343720965025453, 0.7961211448973647], - "2016-01-14": [0.7633021263247786, 0.018469346610634263], - "2016-01-15": [0.493855721362772, 0.7451084649371831], - "2016-01-16": [0.21962262556226464, 0.9274272764531087], - "2016-01-17": [0.885379459035442, 0.3507870357790486], - "2016-01-18": [0.6460546060324733, 0.8225983771798888], - "2016-01-19": [0.3604601800609517, 0.4588944740666544], - "2016-01-20": [0.4191777260908942, 0.2500836388909915], - }, - clean_ex_first=True, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_historic( - self, *, request: SingleSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts time series data for the in-sample period (historical period). It takes a JSON as an input, including information like the series frequency and the historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: SingleSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, SingleSeriesInsampleForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_historic( - request=SingleSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "2015-12-02": 8.71177264560569, - "2015-12-03": 8.05610965954506, - "2015-12-04": 8.08147504013705, - "2015-12-05": 7.45876269238096, - "2015-12-06": 8.01400499477946, - "2015-12-07": 8.49678638163858, - "2015-12-08": 7.98104975966596, - "2015-12-09": 7.77779262633883, - "2015-12-10": 8.2602342916073, - "2015-12-11": 7.86633892304654, - "2015-12-12": 7.31055015853442, - "2015-12-13": 7.71824095195932, - "2015-12-14": 8.31947369244219, - "2015-12-15": 8.23668532271246, - "2015-12-16": 7.80751004221619, - "2015-12-17": 7.59186171488993, - "2015-12-18": 7.52886925664225, - "2015-12-19": 7.17165682276851, - "2015-12-20": 7.89133075766189, - "2015-12-21": 8.36007143564403, - "2015-12-22": 8.11042723757502, - "2015-12-23": 7.77527584648686, - "2015-12-24": 7.34729970074316, - "2015-12-25": 7.30182234213793, - "2015-12-26": 7.12044437239249, - "2015-12-27": 8.87877607170755, - "2015-12-28": 9.25061821847475, - "2015-12-29": 9.24792513230345, - "2015-12-30": 8.39140318535794, - "2015-12-31": 8.00469951054955, - "2016-01-01": 7.58933582317062, - "2016-01-02": 7.82524529143177, - "2016-01-03": 8.24931374626064, - "2016-01-04": 9.29514097366865, - "2016-01-05": 8.56826646160024, - "2016-01-06": 8.35255436947459, - "2016-01-07": 8.29579811063615, - "2016-01-08": 8.29029259122431, - "2016-01-09": 7.78572089653462, - "2016-01-10": 8.28172399041139, - "2016-01-11": 8.4707303170059, - "2016-01-12": 8.13505390861157, - "2016-01-13": 8.06714903991011, - }, - clean_ex_first=True, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_historic"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_multi_series( - self, *, request: MultiSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the future values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for each series based on the input arguments. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_multi_series( - request=MultiSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_multi_series_historic( - self, *, request: MultiSeriesInsampleForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint predicts the in-sample period (historical period) values of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains the predicted values for the historical period. Usually useful for anomaly detection. Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesInsampleForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import Model, MultiSeriesInsampleForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_multi_series_historic( - request=MultiSeriesInsampleForecast( - model=Model.TIMEGPT_1, - freq="D", - level=[90], - y={ - "columns": {"0": "unique_id", "1": "ds", "2": "y"}, - "data": { - "0": { - "0": "PeytonManning", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "1": { - "0": "PeytonManning", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "2": { - "0": "PeytonManning", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "3": { - "0": "PeytonManning", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "4": { - "0": "PeytonManning", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "5": { - "0": "PeytonManning", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "6": { - "0": "PeytonManning", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "7": { - "0": "PeytonManning", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "8": { - "0": "PeytonManning", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "9": { - "0": "PeytonManning", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "10": { - "0": "PeytonManning", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "11": { - "0": "PeytonManning", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "12": { - "0": "PeytonManning", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "13": { - "0": "PeytonManning", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "14": { - "0": "PeytonManning", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "15": { - "0": "PeytonManning", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "16": { - "0": "PeytonManning", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "17": { - "0": "PeytonManning", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "18": { - "0": "PeytonManning", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "19": { - "0": "PeytonManning", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "20": { - "0": "PeytonManning", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "21": { - "0": "PeytonManning", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "22": { - "0": "PeytonManning", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "23": { - "0": "PeytonManning", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "24": { - "0": "PeytonManning", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "25": { - "0": "PeytonManning", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "26": { - "0": "PeytonManning", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "27": { - "0": "PeytonManning", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "28": { - "0": "PeytonManning", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "29": { - "0": "PeytonManning", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "30": { - "0": "PeytonManning", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "31": { - "0": "PeytonManning", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "32": { - "0": "PeytonManning", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "33": { - "0": "PeytonManning", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "34": { - "0": "PeytonManning", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "35": { - "0": "PeytonManning", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "36": { - "0": "PeytonManning", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "37": { - "0": "PeytonManning", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "38": { - "0": "PeytonManning", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "39": { - "0": "PeytonManning", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "40": { - "0": "PeytonManning", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "41": { - "0": "PeytonManning", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "42": { - "0": "PeytonManning", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - "43": { - "0": "TomBrady", - "1": "2015-12-02", - "2": 8.71177264560569, - }, - "44": { - "0": "TomBrady", - "1": "2015-12-03", - "2": 8.05610965954506, - }, - "45": { - "0": "TomBrady", - "1": "2015-12-04", - "2": 8.08147504013705, - }, - "46": { - "0": "TomBrady", - "1": "2015-12-05", - "2": 7.45876269238096, - }, - "47": { - "0": "TomBrady", - "1": "2015-12-06", - "2": 8.01400499477946, - }, - "48": { - "0": "TomBrady", - "1": "2015-12-07", - "2": 8.49678638163858, - }, - "49": { - "0": "TomBrady", - "1": "2015-12-08", - "2": 7.98104975966596, - }, - "50": { - "0": "TomBrady", - "1": "2015-12-09", - "2": 7.77779262633883, - }, - "51": { - "0": "TomBrady", - "1": "2015-12-10", - "2": 8.2602342916073, - }, - "52": { - "0": "TomBrady", - "1": "2015-12-11", - "2": 7.86633892304654, - }, - "53": { - "0": "TomBrady", - "1": "2015-12-12", - "2": 7.31055015853442, - }, - "54": { - "0": "TomBrady", - "1": "2015-12-13", - "2": 7.71824095195932, - }, - "55": { - "0": "TomBrady", - "1": "2015-12-14", - "2": 8.31947369244219, - }, - "56": { - "0": "TomBrady", - "1": "2015-12-15", - "2": 8.23668532271246, - }, - "57": { - "0": "TomBrady", - "1": "2015-12-16", - "2": 7.80751004221619, - }, - "58": { - "0": "TomBrady", - "1": "2015-12-17", - "2": 7.59186171488993, - }, - "59": { - "0": "TomBrady", - "1": "2015-12-18", - "2": 7.52886925664225, - }, - "60": { - "0": "TomBrady", - "1": "2015-12-19", - "2": 7.17165682276851, - }, - "61": { - "0": "TomBrady", - "1": "2015-12-20", - "2": 7.89133075766189, - }, - "62": { - "0": "TomBrady", - "1": "2015-12-21", - "2": 8.36007143564403, - }, - "63": { - "0": "TomBrady", - "1": "2015-12-22", - "2": 8.11042723757502, - }, - "64": { - "0": "TomBrady", - "1": "2015-12-23", - "2": 7.77527584648686, - }, - "65": { - "0": "TomBrady", - "1": "2015-12-24", - "2": 7.34729970074316, - }, - "66": { - "0": "TomBrady", - "1": "2015-12-25", - "2": 7.30182234213793, - }, - "67": { - "0": "TomBrady", - "1": "2015-12-26", - "2": 7.12044437239249, - }, - "68": { - "0": "TomBrady", - "1": "2015-12-27", - "2": 8.87877607170755, - }, - "69": { - "0": "TomBrady", - "1": "2015-12-28", - "2": 9.25061821847475, - }, - "70": { - "0": "TomBrady", - "1": "2015-12-29", - "2": 9.24792513230345, - }, - "71": { - "0": "TomBrady", - "1": "2015-12-30", - "2": 8.39140318535794, - }, - "72": { - "0": "TomBrady", - "1": "2015-12-31", - "2": 8.00469951054955, - }, - "73": { - "0": "TomBrady", - "1": "2016-01-01", - "2": 7.58933582317062, - }, - "74": { - "0": "TomBrady", - "1": "2016-01-02", - "2": 7.82524529143177, - }, - "75": { - "0": "TomBrady", - "1": "2016-01-03", - "2": 8.24931374626064, - }, - "76": { - "0": "TomBrady", - "1": "2016-01-04", - "2": 9.29514097366865, - }, - "77": { - "0": "TomBrady", - "1": "2016-01-05", - "2": 8.56826646160024, - }, - "78": { - "0": "TomBrady", - "1": "2016-01-06", - "2": 8.35255436947459, - }, - "79": { - "0": "TomBrady", - "1": "2016-01-07", - "2": 8.29579811063615, - }, - "80": { - "0": "TomBrady", - "1": "2016-01-08", - "2": 8.29029259122431, - }, - "81": { - "0": "TomBrady", - "1": "2016-01-09", - "2": 7.78572089653462, - }, - "82": { - "0": "TomBrady", - "1": "2016-01-10", - "2": 8.28172399041139, - }, - "83": { - "0": "TomBrady", - "1": "2016-01-11", - "2": 8.4707303170059, - }, - "84": { - "0": "TomBrady", - "1": "2016-01-12", - "2": 8.13505390861157, - }, - "85": { - "0": "TomBrady", - "1": "2016-01-13", - "2": 8.06714903991011, - }, - }, - }, - ), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_historic"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_multi_series_anomalies( - self, *, request: MultiSeriesAnomaly, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Based on the provided data, this endpoint detects the anomalies in the historical perdiod of multiple time series at once. It takes a JSON as an input containing information like the series frequency and historical data. (See below for a full description of the parameters.) The response contains a flag indicating if the date has a anomaly and also provides the prediction interval used to define if an observation is an anomaly.Get your token for private beta at https://dashboard.nixtla.io. - - Parameters: - - request: MultiSeriesAnomaly. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesAnomaly - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_multi_series_anomalies( - request=MultiSeriesAnomaly(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_anomalies"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_multi_series_cross_validation( - self, *, request: MultiSeriesCrossValidation, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Perform Cross Validation for multiple series - - Parameters: - - request: MultiSeriesCrossValidation. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import MultiSeriesCrossValidation - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_multi_series_cross_validation( - request=MultiSeriesCrossValidation(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_multi_series_cross_validation"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_input_size( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_input_size( - request=SingleSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_input_size"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def timegpt_model_params( - self, *, request: SingleSeriesForecast, request_options: typing.Optional[RequestOptions] = None - ) -> typing.Any: - """ - Parameters: - - request: SingleSeriesForecast. - - - request_options: typing.Optional[RequestOptions]. Request-specific configuration. - --- - from nixtlats import SingleSeriesForecast - from nixtlats.client import AsyncNixtla - - client = AsyncNixtla( - token="YOUR_TOKEN", - base_url="https://yourhost.com/path/to/api", - ) - await client.timegpt_model_params( - request=SingleSeriesForecast(), - ) - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "timegpt_model_params"), - params=jsonable_encoder( - request_options.get("additional_query_parameters") if request_options is not None else None - ), - json=jsonable_encoder(request) - if request_options is None or request_options.get("additional_body_parameters") is None - else { - **jsonable_encoder(request), - **(jsonable_encoder(remove_none_from_dict(request_options.get("additional_body_parameters", {})))), - }, - headers=jsonable_encoder( - remove_none_from_dict( - { - **self._client_wrapper.get_headers(), - **(request_options.get("additional_headers", {}) if request_options is not None else {}), - } - ) - ), - timeout=request_options.get("timeout_in_seconds") - if request_options is not None and request_options.get("timeout_in_seconds") is not None - else self._client_wrapper.get_timeout(), - retries=0, - max_retries=request_options.get("max_retries") if request_options is not None else 0, # type: ignore - ) - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError( - pydantic_v1.parse_obj_as(HttpValidationError, _response.json()) # type: ignore - ) - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/nixtlats/core/client_wrapper.py b/src/nixtlats/core/client_wrapper.py deleted file mode 100644 index 8377a745..00000000 --- a/src/nixtlats/core/client_wrapper.py +++ /dev/null @@ -1,67 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import httpx - -from .http_client import AsyncHttpClient, HttpClient - - -class BaseClientWrapper: - def __init__( - self, - *, - token: typing.Union[str, typing.Callable[[], str]], - base_url: str, - timeout: typing.Optional[float] = None, - ): - self._token = token - self._base_url = base_url - self._timeout = timeout - - def get_headers(self) -> typing.Dict[str, str]: - headers: typing.Dict[str, str] = { - "X-Fern-Language": "Python", - "X-Fern-SDK-Name": "nixtla", - "X-Fern-SDK-Version": "0.0.0", - } - headers["Authorization"] = f"Bearer {self._get_token()}" - return headers - - def _get_token(self) -> str: - if isinstance(self._token, str): - return self._token - else: - return self._token() - - def get_base_url(self) -> str: - return self._base_url - - def get_timeout(self) -> typing.Optional[float]: - return self._timeout - - -class SyncClientWrapper(BaseClientWrapper): - def __init__( - self, - *, - token: typing.Union[str, typing.Callable[[], str]], - base_url: str, - timeout: typing.Optional[float] = None, - httpx_client: httpx.Client, - ): - super().__init__(token=token, base_url=base_url, timeout=timeout) - self.httpx_client = HttpClient(httpx_client=httpx_client) - - -class AsyncClientWrapper(BaseClientWrapper): - def __init__( - self, - *, - token: typing.Union[str, typing.Callable[[], str]], - base_url: str, - timeout: typing.Optional[float] = None, - httpx_client: httpx.AsyncClient, - ): - super().__init__(token=token, base_url=base_url, timeout=timeout) - self.httpx_client = AsyncHttpClient(httpx_client=httpx_client) diff --git a/src/nixtlats/core/file.py b/src/nixtlats/core/file.py deleted file mode 100644 index cb0d40bb..00000000 --- a/src/nixtlats/core/file.py +++ /dev/null @@ -1,38 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -# File typing inspired by the flexibility of types within the httpx library -# https://github.com/encode/httpx/blob/master/httpx/_types.py -FileContent = typing.Union[typing.IO[bytes], bytes, str] -File = typing.Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - typing.Tuple[typing.Optional[str], FileContent], - # (filename, file (or bytes), content_type) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str]], - # (filename, file (or bytes), content_type, headers) - typing.Tuple[typing.Optional[str], FileContent, typing.Optional[str], typing.Mapping[str, str]], -] - - -def convert_file_dict_to_httpx_tuples( - d: typing.Dict[str, typing.Union[File, typing.List[File]]] -) -> typing.List[typing.Tuple[str, File]]: - """ - The format we use is a list of tuples, where the first element is the - name of the file and the second is the file object. Typically HTTPX wants - a dict, but to be able to send lists of files, you have to use the list - approach (which also works for non-lists) - https://github.com/encode/httpx/pull/1032 - """ - - httpx_tuples = [] - for key, file_like in d.items(): - if isinstance(file_like, list): - for file_like_item in file_like: - httpx_tuples.append((key, file_like_item)) - else: - httpx_tuples.append((key, file_like)) - return httpx_tuples diff --git a/src/nixtlats/core/http_client.py b/src/nixtlats/core/http_client.py deleted file mode 100644 index 4e6877df..00000000 --- a/src/nixtlats/core/http_client.py +++ /dev/null @@ -1,130 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import asyncio -import email.utils -import re -import time -import typing -from contextlib import asynccontextmanager, contextmanager -from functools import wraps -from random import random - -import httpx - -INITIAL_RETRY_DELAY_SECONDS = 0.5 -MAX_RETRY_DELAY_SECONDS = 10 -MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30 - - -def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]: - """ - This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait. - - Inspired by the urllib3 retry implementation. - """ - retry_after_ms = response_headers.get("retry-after-ms") - if retry_after_ms is not None: - try: - return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0 - except Exception: - pass - - retry_after = response_headers.get("retry-after") - if retry_after is None: - return None - - # Attempt to parse the header as an int. - if re.match(r"^\s*[0-9]+\s*$", retry_after): - seconds = float(retry_after) - # Fallback to parsing it as a date. - else: - retry_date_tuple = email.utils.parsedate_tz(retry_after) - if retry_date_tuple is None: - return None - if retry_date_tuple[9] is None: # Python 2 - # Assume UTC if no timezone was specified - # On Python2.7, parsedate_tz returns None for a timezone offset - # instead of 0 if no timezone is given, where mktime_tz treats - # a None timezone offset as local time. - retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] - - retry_date = email.utils.mktime_tz(retry_date_tuple) - seconds = retry_date - time.time() - - if seconds < 0: - seconds = 0 - - return seconds - - -def _retry_timeout(response: httpx.Response, retries: int) -> float: - """ - Determine the amount of time to wait before retrying a request. - This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff - with a jitter to determine the number of seconds to wait. - """ - - # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. - retry_after = _parse_retry_after(response.headers) - if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER: - return retry_after - - # Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS. - retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS) - - # Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries. - timeout = retry_delay * (1 - 0.25 * random()) - return timeout if timeout >= 0 else 0 - - -def _should_retry(response: httpx.Response) -> bool: - retriable_400s = [429, 408, 409] - return response.status_code >= 500 or response.status_code in retriable_400s - - -class HttpClient: - def __init__(self, *, httpx_client: httpx.Client): - self.httpx_client = httpx_client - - # Ensure that the signature of the `request` method is the same as the `httpx.Client.request` method - @wraps(httpx.Client.request) - def request( - self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any - ) -> httpx.Response: - response = self.httpx_client.request(*args, **kwargs) - if _should_retry(response=response): - if max_retries > retries: - time.sleep(_retry_timeout(response=response, retries=retries)) - return self.request(max_retries=max_retries, retries=retries + 1, *args, **kwargs) - return response - - @wraps(httpx.Client.stream) - @contextmanager - def stream(self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any) -> typing.Any: - with self.httpx_client.stream(*args, **kwargs) as stream: - yield stream - - -class AsyncHttpClient: - def __init__(self, *, httpx_client: httpx.AsyncClient): - self.httpx_client = httpx_client - - # Ensure that the signature of the `request` method is the same as the `httpx.Client.request` method - @wraps(httpx.AsyncClient.request) - async def request( - self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any - ) -> httpx.Response: - response = await self.httpx_client.request(*args, **kwargs) - if _should_retry(response=response): - if max_retries > retries: - await asyncio.sleep(_retry_timeout(response=response, retries=retries)) - return await self.request(max_retries=max_retries, retries=retries + 1, *args, **kwargs) - return response - - @wraps(httpx.AsyncClient.stream) - @asynccontextmanager - async def stream( - self, *args: typing.Any, max_retries: int = 0, retries: int = 0, **kwargs: typing.Any - ) -> typing.Any: - async with self.httpx_client.stream(*args, **kwargs) as stream: - yield stream diff --git a/src/nixtlats/core/pydantic_utilities.py b/src/nixtlats/core/pydantic_utilities.py deleted file mode 100644 index 952b5f2a..00000000 --- a/src/nixtlats/core/pydantic_utilities.py +++ /dev/null @@ -1,12 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import pydantic - -IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") - -if IS_PYDANTIC_V2: - import pydantic.v1 as pydantic_v1 # type: ignore # nopycln: import -else: - import pydantic as pydantic_v1 # type: ignore # nopycln: import - -__all__ = ["pydantic_v1"] diff --git a/src/nixtlats/core/request_options.py b/src/nixtlats/core/request_options.py deleted file mode 100644 index cd6f27a7..00000000 --- a/src/nixtlats/core/request_options.py +++ /dev/null @@ -1,32 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -try: - from typing import NotRequired # type: ignore -except ImportError: - from typing_extensions import NotRequired # type: ignore - - -class RequestOptions(typing.TypedDict): - """ - Additional options for request-specific configuration when calling APIs via the SDK. - This is used primarily as an optional final parameter for service functions. - - Attributes: - - timeout_in_seconds: int. The number of seconds to await an API call before timing out. - - - max_retries: int. The max number of retries to attempt if the API call fails. - - - additional_headers: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's header dict - - - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict - - - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict - """ - - timeout_in_seconds: NotRequired[int] - max_retries: NotRequired[int] - additional_headers: NotRequired[typing.Dict[str, typing.Any]] - additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]] - additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]] diff --git a/src/nixtlats/types/multi_series_anomaly.py b/src/nixtlats/types/multi_series_anomaly.py deleted file mode 100644 index 48549842..00000000 --- a/src/nixtlats/types/multi_series_anomaly.py +++ /dev/null @@ -1,51 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model -from .multi_series_input import MultiSeriesInput - - -class MultiSeriesAnomaly(pydantic_v1.BaseModel): - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - Specifies the confidence level for the prediction interval used in anomaly detection. It is represented as a percentage between 0 and 100. For instance, a level of 95 indicates that the generated prediction interval captures the true future observation 95% of the time. Any observed values outside of this interval would be considered anomalies. A higher level leads to wider prediction intervals and potentially fewer detected anomalies, whereas a lower level results in narrower intervals and potentially more detected anomalies. Default: 99. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[MultiSeriesInput] = pydantic_v1.Field() - """ - The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values. - """ - - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/multi_series_cross_validation.py b/src/nixtlats/types/multi_series_cross_validation.py deleted file mode 100644 index d5202ac4..00000000 --- a/src/nixtlats/types/multi_series_cross_validation.py +++ /dev/null @@ -1,88 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model -from .multi_series_cross_validation_fewshot_loss import MultiSeriesCrossValidationFewshotLoss -from .multi_series_cross_validation_finetune_loss import MultiSeriesCrossValidationFinetuneLoss -from .multi_series_input import MultiSeriesInput - - -class MultiSeriesCrossValidation(pydantic_v1.BaseModel): - fewshot_steps: typing.Optional[int] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_steps instead. - """ - - fewshot_loss: typing.Optional[MultiSeriesCrossValidationFewshotLoss] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_loss instead. - """ - - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals. - """ - - fh: typing.Optional[int] = pydantic_v1.Field() - """ - The forecasting horizon. This represents the number of time steps into the future that the forecast should predict. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[MultiSeriesInput] = pydantic_v1.Field() - """ - The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values. - """ - - n_windows: typing.Optional[int] = pydantic_v1.Field() - """ - Number of windows to evaluate. - """ - - step_size: typing.Optional[int] = pydantic_v1.Field() - """ - Step size between each cross validation window. If None it will be equal to the forecasting horizon. - """ - - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - finetune_steps: typing.Optional[int] = pydantic_v1.Field() - """ - The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning. - """ - - finetune_loss: typing.Optional[MultiSeriesCrossValidationFinetuneLoss] = pydantic_v1.Field() - """ - The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/multi_series_forecast.py b/src/nixtlats/types/multi_series_forecast.py deleted file mode 100644 index 5ec35f6d..00000000 --- a/src/nixtlats/types/multi_series_forecast.py +++ /dev/null @@ -1,78 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model -from .multi_series_forecast_fewshot_loss import MultiSeriesForecastFewshotLoss -from .multi_series_forecast_finetune_loss import MultiSeriesForecastFinetuneLoss -from .multi_series_input import MultiSeriesInput - - -class MultiSeriesForecast(pydantic_v1.BaseModel): - fewshot_steps: typing.Optional[int] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_steps instead. - """ - - fewshot_loss: typing.Optional[MultiSeriesForecastFewshotLoss] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_loss instead. - """ - - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals. - """ - - fh: typing.Optional[int] = pydantic_v1.Field() - """ - The forecasting horizon. This represents the number of time steps into the future that the forecast should predict. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[MultiSeriesInput] = pydantic_v1.Field() - """ - The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values. - """ - - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - finetune_steps: typing.Optional[int] = pydantic_v1.Field() - """ - The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning. - """ - - finetune_loss: typing.Optional[MultiSeriesForecastFinetuneLoss] = pydantic_v1.Field() - """ - The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/multi_series_insample_forecast.py b/src/nixtlats/types/multi_series_insample_forecast.py deleted file mode 100644 index 818163eb..00000000 --- a/src/nixtlats/types/multi_series_insample_forecast.py +++ /dev/null @@ -1,51 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model -from .multi_series_input import MultiSeriesInput - - -class MultiSeriesInsampleForecast(pydantic_v1.BaseModel): - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[MultiSeriesInput] = pydantic_v1.Field() - """ - The exogenous variables provided as a dictionary of two colums: columns and data. The columns contains the columns of the dataframe and data contains eaach data point. For example: {"columns": ["unique_id", "ds", "ex_1", "ex_2"], "data": [["ts_0", "2021-01-01", 0.2, 0.67], ["ts_0", "2021-01-02", 0.4, 0.7]}. This should also include forecasting horizon (fh) additional timestamps for each unique_id to calculate the future values. - """ - - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/single_series_forecast.py b/src/nixtlats/types/single_series_forecast.py deleted file mode 100644 index f68bff54..00000000 --- a/src/nixtlats/types/single_series_forecast.py +++ /dev/null @@ -1,77 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model -from .single_series_forecast_fewshot_loss import SingleSeriesForecastFewshotLoss -from .single_series_forecast_finetune_loss import SingleSeriesForecastFinetuneLoss - - -class SingleSeriesForecast(pydantic_v1.BaseModel): - fewshot_steps: typing.Optional[int] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_steps instead. - """ - - fewshot_loss: typing.Optional[SingleSeriesForecastFewshotLoss] = pydantic_v1.Field() - """ - Deprecated. Please use finetune_loss instead. - """ - - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals. - """ - - fh: typing.Optional[int] = pydantic_v1.Field() - """ - The forecasting horizon. This represents the number of time steps into the future that the forecast should predict. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[typing.Dict[str, typing.List[float]]] = pydantic_v1.Field() - """ - The exogenous variables provided as a dictionary. Each key is a timestamp (string format: YYYY-MM-DD) and the corresponding value is a list of exogenous variable values at that time point. For example: {"2021-01-01": [0.1], "2021-01-02": [0.4]}. This should also include forecasting horizon (fh) additional timestamps to calculate the future values. - """ - - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - finetune_steps: typing.Optional[int] = pydantic_v1.Field() - """ - The number of tuning steps used to train the large time model on the data. Set this value to 0 for zero-shot inference, i.e., to make predictions without any further model tuning. - """ - - finetune_loss: typing.Optional[SingleSeriesForecastFinetuneLoss] = pydantic_v1.Field() - """ - The loss used to train the large time model on the data. Select from ['default', 'mae', 'mse', 'rmse', 'mape', 'smape']. It will only be used if finetune_steps larger than 0. Default is a robust loss function that is less sensitive to outliers. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/types/single_series_insample_forecast.py b/src/nixtlats/types/single_series_insample_forecast.py deleted file mode 100644 index 361ab86a..00000000 --- a/src/nixtlats/types/single_series_insample_forecast.py +++ /dev/null @@ -1,46 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import pydantic_v1 -from .model import Model - - -class SingleSeriesInsampleForecast(pydantic_v1.BaseModel): - model: typing.Optional[Model] = pydantic_v1.Field() - """ - Model to use as a string. Options are: `timegpt-1`, and `timegpt-1-long-horizon.` We recommend using `timegpt-1-long-horizon` for forecasting if you want to predict more than one seasonal period given the frequency of your data. - """ - - freq: typing.Optional[str] = pydantic_v1.Field() - """ - The frequency of the data represented as a string. 'D' for daily, 'M' for monthly, 'H' for hourly, and 'W' for weekly frequencies are available. - """ - - level: typing.Optional[typing.List[typing.Any]] = pydantic_v1.Field() - """ - A list of values representing the prediction intervals. Each value is a percentage that indicates the level of certainty for the corresponding prediction interval. For example, [80, 90] defines 80% and 90% prediction intervals. - """ - - y: typing.Optional[typing.Any] - x: typing.Optional[typing.Any] - clean_ex_first: typing.Optional[bool] = pydantic_v1.Field() - """ - A boolean flag that indicates whether the API should preprocess (clean) the exogenous signal before applying the large time model. If True, the exogenous signal is cleaned; if False, the exogenous variables are applied after the large time model. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.forbid - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/nixtlats/version.py b/src/nixtlats/version.py deleted file mode 100644 index 3bf1dd43..00000000 --- a/src/nixtlats/version.py +++ /dev/null @@ -1,4 +0,0 @@ - -from importlib import metadata - -__version__ = metadata.version("nixtla")