Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FEAT] finetune loss #218

Merged
merged 2 commits into from
Jan 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions nbs/distributed.timegpt.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@
" X_df: Optional[fugue.AnyDataFrame] = None,\n",
" level: Optional[List[Union[int, float]]] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" validate_token: bool = False,\n",
" add_history: bool = False,\n",
Expand All @@ -177,6 +178,7 @@
" target_col=target_col,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" add_history=add_history,\n",
Expand Down Expand Up @@ -245,6 +247,7 @@
" target_col: str = 'y',\n",
" level: Optional[List[Union[int, float]]] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" validate_token: bool = False,\n",
" date_features: Union[bool, List[str]] = False,\n",
Expand All @@ -262,6 +265,7 @@
" target_col=target_col,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" date_features=date_features,\n",
Expand Down
21 changes: 19 additions & 2 deletions nbs/timegpt.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@
" freq: str = None,\n",
" level: Optional[List[Union[int, float]]] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" date_features: Union[bool, List[str]] = False,\n",
" date_features_to_one_hot: Union[bool, List[str]] = True,\n",
Expand All @@ -180,6 +181,7 @@
" self.base_freq = freq\n",
" self.level = level\n",
" self.finetune_steps = finetune_steps\n",
" self.finetune_loss = finetune_loss\n",
" self.clean_ex_first = clean_ex_first\n",
" self.date_features = date_features\n",
" self.date_features_to_one_hot = date_features_to_one_hot\n",
Expand Down Expand Up @@ -510,6 +512,7 @@
" freq=self.freq,\n",
" level=self.level,\n",
" finetune_steps=self.finetune_steps,\n",
" finetune_loss=self.finetune_loss,\n",
" clean_ex_first=self.clean_ex_first,\n",
" model=self.model,\n",
" )\n",
Expand Down Expand Up @@ -798,6 +801,7 @@
" X_df: Optional[pd.DataFrame] = None,\n",
" level: Optional[List[Union[int, float]]] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" validate_token: bool = False,\n",
" add_history: bool = False,\n",
Expand All @@ -819,6 +823,7 @@
" freq=freq,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" date_features=date_features,\n",
" date_features_to_one_hot=date_features_to_one_hot,\n",
Expand Down Expand Up @@ -887,6 +892,7 @@
" n_windows: int = 1,\n",
" step_size: Optional[int] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" date_features: Union[bool, List[str]] = False,\n",
" date_features_to_one_hot: Union[bool, List[str]] = True,\n",
Expand All @@ -906,6 +912,7 @@
" freq=freq,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" date_features=date_features,\n",
" date_features_to_one_hot=date_features_to_one_hot,\n",
Expand Down Expand Up @@ -1059,6 +1066,7 @@
" X_df: Optional[pd.DataFrame] = None,\n",
" level: Optional[List[Union[int, float]]] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" validate_token: bool = False,\n",
" add_history: bool = False,\n",
Expand Down Expand Up @@ -1101,6 +1109,8 @@
" finetune_steps : int (default=0)\n",
" Number of steps used to finetune TimeGPT in the\n",
" new data.\n",
" finetune_loss : str (default='default')\n",
" Loss function to use for finetuning. Options are: `default`, `mae`, `mse`, `rmse`, `mape`, and `smape`.\n",
" clean_ex_first : bool (default=True)\n",
" Clean exogenous signal before making forecasts\n",
" using TimeGPT.\n",
Expand Down Expand Up @@ -1145,6 +1155,7 @@
" X_df=X_df,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" add_history=add_history,\n",
Expand All @@ -1165,6 +1176,7 @@
" X_df=X_df,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" add_history=add_history,\n",
Expand Down Expand Up @@ -1291,6 +1303,7 @@
" n_windows: int = 1,\n",
" step_size: Optional[int] = None,\n",
" finetune_steps: int = 0,\n",
" finetune_loss: str = 'default',\n",
" clean_ex_first: bool = True,\n",
" date_features: Union[bool, List[str]] = False,\n",
" date_features_to_one_hot: Union[bool, List[str]] = True,\n",
Expand Down Expand Up @@ -1336,6 +1349,8 @@
" finetune_steps : int (default=0)\n",
" Number of steps used to finetune TimeGPT in the\n",
" new data.\n",
" finetune_loss : str (default='default')\n",
" Loss function to use for finetuning. Options are: `default`, `mae`, `mse`, `rmse`, `mape`, and `smape`.\n",
" clean_ex_first : bool (default=True)\n",
" Clean exogenous signal before making forecasts\n",
" using TimeGPT.\n",
Expand Down Expand Up @@ -1373,6 +1388,7 @@
" target_col=target_col,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" date_features=date_features,\n",
Expand All @@ -1393,6 +1409,7 @@
" target_col=target_col,\n",
" level=level,\n",
" finetune_steps=finetune_steps,\n",
" finetune_loss=finetune_loss,\n",
" clean_ex_first=clean_ex_first,\n",
" validate_token=validate_token,\n",
" date_features=date_features,\n",
Expand Down Expand Up @@ -1862,7 +1879,7 @@
"#| hide\n",
"#test same results custom url\n",
"timegpt_custom = TimeGPT(\n",
" token=os.environ['TIMEGPT_CUSTOM_URL_TOKEN'],\n",
" token=os.environ['TIMEGPT_CUSTOM_URL_TOKEN'], \n",
" environment=os.environ['TIMEGPT_CUSTOM_URL'],\n",
")\n",
"# forecast method\n",
Expand Down Expand Up @@ -2294,7 +2311,7 @@
"#| hide \n",
"# test short horizon with finetunning\n",
"test_fail(\n",
" lambda: timegpt.forecast(df=df.tail(3), h=12, time_col='timestamp', target_col='value', finetune_steps=10),\n",
" lambda: timegpt.forecast(df=df.tail(3), h=12, time_col='timestamp', target_col='value', finetune_steps=10, finetune_loss='mae'),\n",
" contains='be sure'\n",
")"
]
Expand Down
9 changes: 9 additions & 0 deletions nixtlats/_modidx.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,17 @@
'nixtlats.timegpt.validate_model_parameter': ( 'timegpt.html#validate_model_parameter',
'nixtlats/timegpt.py')},
'nixtlats.types.http_validation_error': {},
'nixtlats.types.multi_series_anomaly_model': {},
'nixtlats.types.multi_series_cross_validation_finetune_loss': {},
'nixtlats.types.multi_series_cross_validation_model': {},
'nixtlats.types.multi_series_forecast_finetune_loss': {},
'nixtlats.types.multi_series_forecast_model': {},
'nixtlats.types.multi_series_input': {},
'nixtlats.types.multi_series_insample_forecast_model': {},
'nixtlats.types.single_series_forecast': {},
'nixtlats.types.single_series_forecast_finetune_loss': {},
'nixtlats.types.single_series_forecast_model': {},
'nixtlats.types.single_series_insample_forecast_model': {},
'nixtlats.types.validation_error': {},
'nixtlats.types.validation_error_loc_item': {},
'nixtlats.utils': {'nixtlats.utils.colab_badge': ('utils.html#colab_badge', 'nixtlats/utils.py')}}}
4 changes: 4 additions & 0 deletions nixtlats/distributed/timegpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ def forecast(
X_df: Optional[fugue.AnyDataFrame] = None,
level: Optional[List[Union[int, float]]] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
validate_token: bool = False,
add_history: bool = False,
Expand All @@ -141,6 +142,7 @@ def forecast(
target_col=target_col,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
add_history=add_history,
Expand Down Expand Up @@ -211,6 +213,7 @@ def cross_validation(
target_col: str = "y",
level: Optional[List[Union[int, float]]] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
validate_token: bool = False,
date_features: Union[bool, List[str]] = False,
Expand All @@ -228,6 +231,7 @@ def cross_validation(
target_col=target_col,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
date_features=date_features,
Expand Down
17 changes: 17 additions & 0 deletions nixtlats/timegpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ def __init__(
freq: str = None,
level: Optional[List[Union[int, float]]] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
date_features: Union[bool, List[str]] = False,
date_features_to_one_hot: Union[bool, List[str]] = True,
Expand All @@ -116,6 +117,7 @@ def __init__(
self.base_freq = freq
self.level = level
self.finetune_steps = finetune_steps
self.finetune_loss = finetune_loss
self.clean_ex_first = clean_ex_first
self.date_features = date_features
self.date_features_to_one_hot = date_features_to_one_hot
Expand Down Expand Up @@ -481,6 +483,7 @@ def forecast(
freq=self.freq,
level=self.level,
finetune_steps=self.finetune_steps,
finetune_loss=self.finetune_loss,
clean_ex_first=self.clean_ex_first,
model=self.model,
)
Expand Down Expand Up @@ -749,6 +752,7 @@ def _forecast(
X_df: Optional[pd.DataFrame] = None,
level: Optional[List[Union[int, float]]] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
validate_token: bool = False,
add_history: bool = False,
Expand All @@ -768,6 +772,7 @@ def _forecast(
freq=freq,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
date_features=date_features,
date_features_to_one_hot=date_features_to_one_hot,
Expand Down Expand Up @@ -834,6 +839,7 @@ def _cross_validation(
n_windows: int = 1,
step_size: Optional[int] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
date_features: Union[bool, List[str]] = False,
date_features_to_one_hot: Union[bool, List[str]] = True,
Expand All @@ -851,6 +857,7 @@ def _cross_validation(
freq=freq,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
date_features=date_features,
date_features_to_one_hot=date_features_to_one_hot,
Expand Down Expand Up @@ -999,6 +1006,7 @@ def forecast(
X_df: Optional[pd.DataFrame] = None,
level: Optional[List[Union[int, float]]] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
validate_token: bool = False,
add_history: bool = False,
Expand Down Expand Up @@ -1041,6 +1049,8 @@ def forecast(
finetune_steps : int (default=0)
Number of steps used to finetune TimeGPT in the
new data.
finetune_loss : str (default='default')
Loss function to use for finetuning. Options are: `default`, `mae`, `mse`, `rmse`, `mape`, and `smape`.
clean_ex_first : bool (default=True)
Clean exogenous signal before making forecasts
using TimeGPT.
Expand Down Expand Up @@ -1085,6 +1095,7 @@ def forecast(
X_df=X_df,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
add_history=add_history,
Expand All @@ -1105,6 +1116,7 @@ def forecast(
X_df=X_df,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
add_history=add_history,
Expand Down Expand Up @@ -1231,6 +1243,7 @@ def cross_validation(
n_windows: int = 1,
step_size: Optional[int] = None,
finetune_steps: int = 0,
finetune_loss: str = "default",
clean_ex_first: bool = True,
date_features: Union[bool, List[str]] = False,
date_features_to_one_hot: Union[bool, List[str]] = True,
Expand Down Expand Up @@ -1276,6 +1289,8 @@ def cross_validation(
finetune_steps : int (default=0)
Number of steps used to finetune TimeGPT in the
new data.
finetune_loss : str (default='default')
Loss function to use for finetuning. Options are: `default`, `mae`, `mse`, `rmse`, `mape`, and `smape`.
clean_ex_first : bool (default=True)
Clean exogenous signal before making forecasts
using TimeGPT.
Expand Down Expand Up @@ -1313,6 +1328,7 @@ def cross_validation(
target_col=target_col,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
date_features=date_features,
Expand All @@ -1333,6 +1349,7 @@ def cross_validation(
target_col=target_col,
level=level,
finetune_steps=finetune_steps,
finetune_loss=finetune_loss,
clean_ex_first=clean_ex_first,
validate_token=validate_token,
date_features=date_features,
Expand Down
Loading