From b46daaad68e52f2cd99466e0d922d9ef8e3fa92d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20=C3=85strand?= Date: Fri, 6 Sep 2024 12:14:42 +0200 Subject: [PATCH] First draft of hyperparam tuning notebook --- .../notebooks/Hyperparameter_Tuning.ipynb | 1640 +++++++++++++++++ 1 file changed, 1640 insertions(+) create mode 100644 examples/notebooks/Hyperparameter_Tuning.ipynb diff --git a/examples/notebooks/Hyperparameter_Tuning.ipynb b/examples/notebooks/Hyperparameter_Tuning.ipynb new file mode 100644 index 000000000..63729910a --- /dev/null +++ b/examples/notebooks/Hyperparameter_Tuning.ipynb @@ -0,0 +1,1640 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from fedn import APIClient\n", + "import time\n", + "import json\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "CONTROLLER_HOST = 'fedn.scaleoutsystems.com/getting-started-with-fedn-akt-fedn-reducer' \n", + "ACCESS_TOKEN = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ0b2tlbl90eXBlIjoiYWNjZXNzIiwiZXhwIjoxNzI4MDUzNjI5LCJpYXQiOjE3MjU0NjE2MjksImp0aSI6IjkyNWJmYWVkN2Y2MzQ3YzFiM2Q0OWQ5ZWUyNDM5NTc5IiwidXNlcl9pZCI6NDM1LCJjcmVhdG9yIjoiYmVuamFtaW5fYXN0cmFuZCIsInJvbGUiOiJhZG1pbiIsInByb2plY3Rfc2x1ZyI6ImdldHRpbmctc3RhcnRlZC13aXRoLWZlZG4tYWt0In0.NMWrYm5B6_zacoPIvkvkw5X-PdDQSLf3oMWIwyG8lnE'\n", + "client = APIClient(CONTROLLER_HOST,token=ACCESS_TOKEN, secure=True,verify=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "client.set_active_package('../mnist-pytorch/package.tgz', 'numpyhelper')\n", + "client.set_active_model('../mnist-pytorch/seed.npz')\n", + "seed_model = client.get_active_model()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Defining the objective function\n", + "\n", + "Optuna expects an objective function - the function that evaluates a certain set of hyperparameter values. In this example, we will use the test accuracy as a validation score and we want to maximize it.\n", + "\n", + "For each set of hyperparameter values, each 'trial', we will start a new session using the FEDn Python API. In each session/trial, we will select the model with the highest test accuracy and use that in the Optuna objective function to evaluate the trial." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# Helper function to get the model with the highest test accuracy within a session\n", + "def get_highest_test_accuracy_in_session(client, n_rounds):\n", + " best_accuracy = 0\n", + " validations_in_session = client.get_validations()['result'][:n_rounds]\n", + " for validation in validations_in_session:\n", + " val_accuracy = json.loads(validation['data'])['test_accuracy']\n", + " if val_accuracy > best_accuracy:\n", + " best_accuracy = val_accuracy\n", + "\n", + " return best_accuracy" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [], + "source": [ + "import optuna\n", + "\n", + "# Objective function which will be sent to Optuna to evaluate the selection of hyperparameter values\n", + "def objective(trial):\n", + " # Number of rounds per session\n", + " n_rounds = 1\n", + "\n", + " # Suggest hyperparameter priors\n", + " learning_rate = trial.suggest_float(\"learning_rate\", 1e-4, 1e-1, log=True)\n", + "\n", + " # Set session configurations (from seed model)\n", + " session_config = {\n", + " \"helper\": \"numpyhelper\",\n", + " # \"id\": session_id,\n", + " \"aggregator\": \"fedopt\",\n", + " \"aggregator_kwargs\": {\n", + " \"serveropt\": \"adam\",\n", + " \"learning_rate\": learning_rate,\n", + " \"beta1\": 0.9,\n", + " \"beta2\": 0.99,\n", + " \"tau\": 1e-4\n", + " },\n", + " \"model_id\": seed_model['model'], # Restart from seed model in each new session\n", + " \"rounds\": n_rounds\n", + " }\n", + "\n", + " # Run session and get session id\n", + " result_fedadam = client.start_session(**session_config)\n", + " session_id = result_fedadam['config']['session_id']\n", + " \n", + " # Wait while the current session is active\n", + " while client.get_session_status(session_id) != 'Finished':\n", + " time.sleep(1)\n", + " \n", + " # Return highest 'test' (validation) accuracy\n", + " return get_highest_test_accuracy_in_session(client=client, n_rounds=n_rounds)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Creating and running an Optuna study\n", + "\n", + "Here we create an Optuna study. Since we are using the test accuracy for evaluation, we want to maximize the objective function in this example. We pass the objective function defined earlier when calling `study.optimize()` and select the number of trials we want to perform.\n", + "\n", + "**Note:** Each trial starts a session, so the number of sessions is `n_trials`." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[I 2024-09-06 11:22:59,025] A new study created in memory with name: no-name-b1a1a339-93d1-4420-9006-00a43d546dd9\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/var/folders/p9/37n8_h0j3w136cfjm88xkpcr0000gn/T/ipykernel_13870/468742983.py:8: FutureWarning:\n", + "\n", + "suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", + "\n", + "[I 2024-09-06 11:23:10,169] Trial 0 finished with value: 0.18700000643730164 and parameters: {'learning_rate': 0.01060256463073004}. Best is trial 0 with value: 0.18700000643730164.\n", + "[I 2024-09-06 11:23:23,317] Trial 1 finished with value: 0.4269999861717224 and parameters: {'learning_rate': 0.06363319752806389}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:23:34,356] Trial 2 finished with value: 0.2070000022649765 and parameters: {'learning_rate': 0.044054634905094}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:23:44,244] Trial 3 finished with value: 0.3019999861717224 and parameters: {'learning_rate': 0.005457455596524018}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:23:55,219] Trial 4 finished with value: 0.23999999463558197 and parameters: {'learning_rate': 0.0006534358443990889}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:24:07,568] Trial 5 finished with value: 0.1420000046491623 and parameters: {'learning_rate': 0.005409564385563341}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:24:18,643] Trial 6 finished with value: 0.2409999966621399 and parameters: {'learning_rate': 0.000296117238409861}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:24:30,917] Trial 7 finished with value: 0.13600000739097595 and parameters: {'learning_rate': 0.0001548659658268065}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:24:41,906] Trial 8 finished with value: 0.13199999928474426 and parameters: {'learning_rate': 0.0059773222630647126}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:24:53,059] Trial 9 finished with value: 0.2669999897480011 and parameters: {'learning_rate': 0.009610312272318417}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:25:05,070] Trial 10 finished with value: 0.39899998903274536 and parameters: {'learning_rate': 0.07936629680356555}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:25:18,276] Trial 11 finished with value: 0.2280000001192093 and parameters: {'learning_rate': 0.0999017085272383}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:25:32,440] Trial 12 finished with value: 0.09700000286102295 and parameters: {'learning_rate': 0.03030693483792421}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:25:43,418] Trial 13 finished with value: 0.3709999918937683 and parameters: {'learning_rate': 0.09839071999176227}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:25:54,343] Trial 14 finished with value: 0.11599999666213989 and parameters: {'learning_rate': 0.0018120798841072265}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:05,276] Trial 15 finished with value: 0.164000004529953 and parameters: {'learning_rate': 0.02630581835365611}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:16,256] Trial 16 finished with value: 0.3959999978542328 and parameters: {'learning_rate': 0.0015862265658428628}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:28,358] Trial 17 finished with value: 0.16599999368190765 and parameters: {'learning_rate': 0.018963170798261392}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:38,257] Trial 18 finished with value: 0.4099999964237213 and parameters: {'learning_rate': 0.05591498203997656}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:49,235] Trial 19 finished with value: 0.23499999940395355 and parameters: {'learning_rate': 0.0507252826898167}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:26:59,122] Trial 20 finished with value: 0.27799999713897705 and parameters: {'learning_rate': 0.015598714196031616}. Best is trial 1 with value: 0.4269999861717224.\n", + "[I 2024-09-06 11:27:11,184] Trial 21 finished with value: 0.453000009059906 and parameters: {'learning_rate': 0.04981982237560933}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:27:22,170] Trial 22 finished with value: 0.2160000056028366 and parameters: {'learning_rate': 0.04472041885602553}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:27:33,156] Trial 23 finished with value: 0.27900001406669617 and parameters: {'learning_rate': 0.04569663540155219}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:27:44,127] Trial 24 finished with value: 0.30799999833106995 and parameters: {'learning_rate': 0.02409255925048435}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:27:57,214] Trial 25 finished with value: 0.3959999978542328 and parameters: {'learning_rate': 0.012153379377141215}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:28:08,184] Trial 26 finished with value: 0.4350000023841858 and parameters: {'learning_rate': 0.06204357797505839}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:28:19,284] Trial 27 finished with value: 0.26899999380111694 and parameters: {'learning_rate': 0.0318047942959313}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:28:31,285] Trial 28 finished with value: 0.36500000953674316 and parameters: {'learning_rate': 0.0026889985534921607}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:28:43,293] Trial 29 finished with value: 0.17599999904632568 and parameters: {'learning_rate': 0.008406962615909752}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:28:54,275] Trial 30 finished with value: 0.36399999260902405 and parameters: {'learning_rate': 0.07570103253376405}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:29:05,232] Trial 31 finished with value: 0.1469999998807907 and parameters: {'learning_rate': 0.059086197752773524}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:29:17,298] Trial 32 finished with value: 0.20600000023841858 and parameters: {'learning_rate': 0.03714901133834086}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:29:29,314] Trial 33 finished with value: 0.3330000042915344 and parameters: {'learning_rate': 0.0646915046763561}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:29:41,414] Trial 34 finished with value: 0.23499999940395355 and parameters: {'learning_rate': 0.018225411751837776}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:29:52,437] Trial 35 finished with value: 0.4059999883174896 and parameters: {'learning_rate': 0.060873750215417036}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:30:03,385] Trial 36 finished with value: 0.23199999332427979 and parameters: {'learning_rate': 0.0005937941753721039}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:30:15,629] Trial 37 finished with value: 0.14399999380111694 and parameters: {'learning_rate': 0.03794230073789979}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:30:27,769] Trial 38 finished with value: 0.3569999933242798 and parameters: {'learning_rate': 0.021776847720301837}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:30:39,810] Trial 39 finished with value: 0.4009999930858612 and parameters: {'learning_rate': 0.012825763536089739}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:30:51,808] Trial 40 finished with value: 0.4180000126361847 and parameters: {'learning_rate': 0.006945279111941595}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:01,716] Trial 41 finished with value: 0.3070000112056732 and parameters: {'learning_rate': 0.003924318415667345}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:12,678] Trial 42 finished with value: 0.19499999284744263 and parameters: {'learning_rate': 0.0009962905683287265}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:25,797] Trial 43 finished with value: 0.14900000393390656 and parameters: {'learning_rate': 0.007485623216043211}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:37,890] Trial 44 finished with value: 0.34299999475479126 and parameters: {'learning_rate': 0.08121672594329421}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:47,800] Trial 45 finished with value: 0.18799999356269836 and parameters: {'learning_rate': 0.03218747043584}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:31:58,778] Trial 46 finished with value: 0.3499999940395355 and parameters: {'learning_rate': 0.0043209290517303035}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:32:08,667] Trial 47 finished with value: 0.2070000022649765 and parameters: {'learning_rate': 0.09007673305976491}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:32:19,734] Trial 48 finished with value: 0.15299999713897705 and parameters: {'learning_rate': 0.00011242006456928975}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:32:32,488] Trial 49 finished with value: 0.13199999928474426 and parameters: {'learning_rate': 0.052926437331853586}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:32:43,824] Trial 50 finished with value: 0.19099999964237213 and parameters: {'learning_rate': 0.015080515584708165}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:32:54,136] Trial 51 finished with value: 0.41499999165534973 and parameters: {'learning_rate': 0.06571961786071892}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:33:05,493] Trial 52 finished with value: 0.22300000488758087 and parameters: {'learning_rate': 0.06853902191868609}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:33:18,187] Trial 53 finished with value: 0.17900000512599945 and parameters: {'learning_rate': 0.043188154256233874}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:33:30,701] Trial 54 finished with value: 0.2759999930858612 and parameters: {'learning_rate': 0.03137506646247659}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:33:41,351] Trial 55 finished with value: 0.36899998784065247 and parameters: {'learning_rate': 0.09815877477219788}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:33:54,493] Trial 56 finished with value: 0.14499999582767487 and parameters: {'learning_rate': 0.024965815660978004}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:34:04,881] Trial 57 finished with value: 0.335999995470047 and parameters: {'learning_rate': 0.0002369246671375477}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:34:17,437] Trial 58 finished with value: 0.13300000131130219 and parameters: {'learning_rate': 0.05402719364497381}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:34:28,108] Trial 59 finished with value: 0.30399999022483826 and parameters: {'learning_rate': 0.04024469007474665}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:34:40,344] Trial 60 finished with value: 0.3540000021457672 and parameters: {'learning_rate': 0.0021520115377091278}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:34:51,334] Trial 61 finished with value: 0.17000000178813934 and parameters: {'learning_rate': 0.06777601481845777}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:02,328] Trial 62 finished with value: 0.2549999952316284 and parameters: {'learning_rate': 0.054552697374482345}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:14,370] Trial 63 finished with value: 0.24799999594688416 and parameters: {'learning_rate': 0.07824127973056091}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:25,393] Trial 64 finished with value: 0.15299999713897705 and parameters: {'learning_rate': 0.062190956331727144}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:36,519] Trial 65 finished with value: 0.21699999272823334 and parameters: {'learning_rate': 0.04574533489252647}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:46,388] Trial 66 finished with value: 0.31700000166893005 and parameters: {'learning_rate': 0.026700039728034024}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:35:56,731] Trial 67 finished with value: 0.35499998927116394 and parameters: {'learning_rate': 0.035572253575668344}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:36:07,788] Trial 68 finished with value: 0.38100001215934753 and parameters: {'learning_rate': 0.020947893979189017}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:36:18,778] Trial 69 finished with value: 0.39399999380111694 and parameters: {'learning_rate': 0.09807853814362605}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:36:30,423] Trial 70 finished with value: 0.13099999725818634 and parameters: {'learning_rate': 0.074008662788353}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:36:43,473] Trial 71 finished with value: 0.17800000309944153 and parameters: {'learning_rate': 0.006494329870318023}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:36:54,514] Trial 72 finished with value: 0.27900001406669617 and parameters: {'learning_rate': 0.013324381014246824}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:37:05,715] Trial 73 finished with value: 0.4050000011920929 and parameters: {'learning_rate': 0.047348523959182987}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:37:20,038] Trial 74 finished with value: 0.28600001335144043 and parameters: {'learning_rate': 0.05213136025170205}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:37:33,497] Trial 75 finished with value: 0.22100000083446503 and parameters: {'learning_rate': 0.0468237789021135}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:37:46,689] Trial 76 finished with value: 0.23499999940395355 and parameters: {'learning_rate': 0.009671712975184507}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:37:58,076] Trial 77 finished with value: 0.4050000011920929 and parameters: {'learning_rate': 0.028449834843744366}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:38:10,357] Trial 78 finished with value: 0.35499998927116394 and parameters: {'learning_rate': 0.03736743378716483}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:38:21,594] Trial 79 finished with value: 0.33799999952316284 and parameters: {'learning_rate': 0.0011102163493235233}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:38:33,990] Trial 80 finished with value: 0.15299999713897705 and parameters: {'learning_rate': 0.059419725114923834}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:38:44,958] Trial 81 finished with value: 0.20100000500679016 and parameters: {'learning_rate': 0.029590575438061956}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:38:57,152] Trial 82 finished with value: 0.35499998927116394 and parameters: {'learning_rate': 0.017734648001224127}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:39:07,228] Trial 83 finished with value: 0.40700000524520874 and parameters: {'learning_rate': 0.08418584424126585}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:39:19,411] Trial 84 finished with value: 0.14499999582767487 and parameters: {'learning_rate': 0.08334759428158998}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:39:31,771] Trial 85 finished with value: 0.10499999672174454 and parameters: {'learning_rate': 0.06386707358622472}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:39:43,951] Trial 86 finished with value: 0.16899999976158142 and parameters: {'learning_rate': 0.08176816323358746}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:39:58,405] Trial 87 finished with value: 0.1550000011920929 and parameters: {'learning_rate': 0.04691662345629954}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:40:09,377] Trial 88 finished with value: 0.3019999861717224 and parameters: {'learning_rate': 0.0005092474657505243}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:40:21,584] Trial 89 finished with value: 0.14100000262260437 and parameters: {'learning_rate': 0.06835465614566856}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:40:31,586] Trial 90 finished with value: 0.1940000057220459 and parameters: {'learning_rate': 0.040926964321475}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:40:43,624] Trial 91 finished with value: 0.3370000123977661 and parameters: {'learning_rate': 0.05663192019646022}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:40:55,919] Trial 92 finished with value: 0.22499999403953552 and parameters: {'learning_rate': 0.03407804204205137}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:41:08,004] Trial 93 finished with value: 0.3490000069141388 and parameters: {'learning_rate': 0.083789776074211}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:41:19,210] Trial 94 finished with value: 0.13199999928474426 and parameters: {'learning_rate': 0.02789909609279049}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:41:31,487] Trial 95 finished with value: 0.3779999911785126 and parameters: {'learning_rate': 0.04909602447498623}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:41:43,551] Trial 96 finished with value: 0.3160000145435333 and parameters: {'learning_rate': 0.07135979861985685}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:41:55,018] Trial 97 finished with value: 0.13699999451637268 and parameters: {'learning_rate': 0.02340684816934831}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:42:07,090] Trial 98 finished with value: 0.38999998569488525 and parameters: {'learning_rate': 0.04183807409164346}. Best is trial 21 with value: 0.453000009059906.\n", + "[I 2024-09-06 11:42:18,542] Trial 99 finished with value: 0.335999995470047 and parameters: {'learning_rate': 0.09709298119891982}. Best is trial 21 with value: 0.453000009059906.\n" + ] + } + ], + "source": [ + "# Create an Optuna study\n", + "study = optuna.create_study(direction=\"maximize\")\n", + "\n", + "# Optimize hyperparameters\n", + "study.optimize(objective, n_trials=100) #19m19s to run 100 trials (session) with one round each\n", + "print(\"Best hyperparameters:\", study.best_params)\n", + "print(\"Best value:\", study.best_value)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize Optuna's optimization\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import optuna.visualization as vis\n", + "\n", + "# vis.plot_optimization_history(study)\n", + "# vis.plot_param_importances(study)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": {}, + "outputs": [ + { + "ename": "ValueError", + "evalue": "Mime type rendering requires nbformat>=4.2.0 but it is not installed", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Library/Python/3.9/lib/python/site-packages/IPython/core/formatters.py:920\u001b[0m, in \u001b[0;36mIPythonDisplayFormatter.__call__\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 918\u001b[0m method \u001b[38;5;241m=\u001b[39m get_real_method(obj, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprint_method)\n\u001b[1;32m 919\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m method \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 920\u001b[0m \u001b[43mmethod\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 921\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/plotly/basedatatypes.py:832\u001b[0m, in \u001b[0;36mBaseFigure._ipython_display_\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 829\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mplotly\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mio\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mpio\u001b[39;00m\n\u001b[1;32m 831\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m pio\u001b[38;5;241m.\u001b[39mrenderers\u001b[38;5;241m.\u001b[39mrender_on_display \u001b[38;5;129;01mand\u001b[39;00m pio\u001b[38;5;241m.\u001b[39mrenderers\u001b[38;5;241m.\u001b[39mdefault:\n\u001b[0;32m--> 832\u001b[0m \u001b[43mpio\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mshow\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 833\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 834\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;28mrepr\u001b[39m(\u001b[38;5;28mself\u001b[39m))\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/site-packages/plotly/io/_renderers.py:394\u001b[0m, in \u001b[0;36mshow\u001b[0;34m(fig, renderer, validate, **kwargs)\u001b[0m\n\u001b[1;32m 389\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 390\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMime type rendering requires ipython but it is not installed\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 391\u001b[0m )\n\u001b[1;32m 393\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m nbformat \u001b[38;5;129;01mor\u001b[39;00m Version(nbformat\u001b[38;5;241m.\u001b[39m__version__) \u001b[38;5;241m<\u001b[39m Version(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m4.2.0\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m--> 394\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 395\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMime type rendering requires nbformat>=4.2.0 but it is not installed\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 396\u001b[0m )\n\u001b[1;32m 398\u001b[0m ipython_display\u001b[38;5;241m.\u001b[39mdisplay(bundle, raw\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m 400\u001b[0m \u001b[38;5;66;03m# external renderers\u001b[39;00m\n", + "\u001b[0;31mValueError\u001b[0m: Mime type rendering requires nbformat>=4.2.0 but it is not installed" + ] + }, + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "marker": { + "color": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, + 63, + 64, + 65, + 66, + 67, + 68, + 69, + 70, + 71, + 72, + 73, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 82, + 83, + 84, + 85, + 86, + 87, + 88, + 89, + 90, + 91, + 92, + 93, + 94, + 95, + 96, + 97, + 98, + 99 + ], + "colorbar": { + "title": { + "text": "Trial" + }, + "x": 1, + "xpad": 40 + }, + "colorscale": [ + [ + 0, + "rgb(247,251,255)" + ], + [ + 0.125, + "rgb(222,235,247)" + ], + [ + 0.25, + "rgb(198,219,239)" + ], + [ + 0.375, + "rgb(158,202,225)" + ], + [ + 0.5, + "rgb(107,174,214)" + ], + [ + 0.625, + "rgb(66,146,198)" + ], + [ + 0.75, + "rgb(33,113,181)" + ], + [ + 0.875, + "rgb(8,81,156)" + ], + [ + 1, + "rgb(8,48,107)" + ] + ], + "line": { + "color": "Grey", + "width": 0.5 + } + }, + "mode": "markers", + "name": "Feasible Trial", + "showlegend": false, + "type": "scatter", + "x": [ + 0.01060256463073004, + 0.06363319752806389, + 0.044054634905094, + 0.005457455596524018, + 0.0006534358443990889, + 0.005409564385563341, + 0.000296117238409861, + 0.0001548659658268065, + 0.0059773222630647126, + 0.009610312272318417, + 0.07936629680356555, + 0.0999017085272383, + 0.03030693483792421, + 0.09839071999176227, + 0.0018120798841072265, + 0.02630581835365611, + 0.0015862265658428628, + 0.018963170798261392, + 0.05591498203997656, + 0.0507252826898167, + 0.015598714196031616, + 0.04981982237560933, + 0.04472041885602553, + 0.04569663540155219, + 0.02409255925048435, + 0.012153379377141215, + 0.06204357797505839, + 0.0318047942959313, + 0.0026889985534921607, + 0.008406962615909752, + 0.07570103253376405, + 0.059086197752773524, + 0.03714901133834086, + 0.0646915046763561, + 0.018225411751837776, + 0.060873750215417036, + 0.0005937941753721039, + 0.03794230073789979, + 0.021776847720301837, + 0.012825763536089739, + 0.006945279111941595, + 0.003924318415667345, + 0.0009962905683287265, + 0.007485623216043211, + 0.08121672594329421, + 0.03218747043584, + 0.0043209290517303035, + 0.09007673305976491, + 0.00011242006456928975, + 0.052926437331853586, + 0.015080515584708165, + 0.06571961786071892, + 0.06853902191868609, + 0.043188154256233874, + 0.03137506646247659, + 0.09815877477219788, + 0.024965815660978004, + 0.0002369246671375477, + 0.05402719364497381, + 0.04024469007474665, + 0.0021520115377091278, + 0.06777601481845777, + 0.054552697374482345, + 0.07824127973056091, + 0.062190956331727144, + 0.04574533489252647, + 0.026700039728034024, + 0.035572253575668344, + 0.020947893979189017, + 0.09807853814362605, + 0.074008662788353, + 0.006494329870318023, + 0.013324381014246824, + 0.047348523959182987, + 0.05213136025170205, + 0.0468237789021135, + 0.009671712975184507, + 0.028449834843744366, + 0.03736743378716483, + 0.0011102163493235233, + 0.059419725114923834, + 0.029590575438061956, + 0.017734648001224127, + 0.08418584424126585, + 0.08334759428158998, + 0.06386707358622472, + 0.08176816323358746, + 0.04691662345629954, + 0.0005092474657505243, + 0.06835465614566856, + 0.040926964321475, + 0.05663192019646022, + 0.03407804204205137, + 0.083789776074211, + 0.02789909609279049, + 0.04909602447498623, + 0.07135979861985685, + 0.02340684816934831, + 0.04183807409164346, + 0.09709298119891982 + ], + "y": [ + 0.18700000643730164, + 0.4269999861717224, + 0.2070000022649765, + 0.3019999861717224, + 0.23999999463558197, + 0.1420000046491623, + 0.2409999966621399, + 0.13600000739097595, + 0.13199999928474426, + 0.2669999897480011, + 0.39899998903274536, + 0.2280000001192093, + 0.09700000286102295, + 0.3709999918937683, + 0.11599999666213989, + 0.164000004529953, + 0.3959999978542328, + 0.16599999368190765, + 0.4099999964237213, + 0.23499999940395355, + 0.27799999713897705, + 0.453000009059906, + 0.2160000056028366, + 0.27900001406669617, + 0.30799999833106995, + 0.3959999978542328, + 0.4350000023841858, + 0.26899999380111694, + 0.36500000953674316, + 0.17599999904632568, + 0.36399999260902405, + 0.1469999998807907, + 0.20600000023841858, + 0.3330000042915344, + 0.23499999940395355, + 0.4059999883174896, + 0.23199999332427979, + 0.14399999380111694, + 0.3569999933242798, + 0.4009999930858612, + 0.4180000126361847, + 0.3070000112056732, + 0.19499999284744263, + 0.14900000393390656, + 0.34299999475479126, + 0.18799999356269836, + 0.3499999940395355, + 0.2070000022649765, + 0.15299999713897705, + 0.13199999928474426, + 0.19099999964237213, + 0.41499999165534973, + 0.22300000488758087, + 0.17900000512599945, + 0.2759999930858612, + 0.36899998784065247, + 0.14499999582767487, + 0.335999995470047, + 0.13300000131130219, + 0.30399999022483826, + 0.3540000021457672, + 0.17000000178813934, + 0.2549999952316284, + 0.24799999594688416, + 0.15299999713897705, + 0.21699999272823334, + 0.31700000166893005, + 0.35499998927116394, + 0.38100001215934753, + 0.39399999380111694, + 0.13099999725818634, + 0.17800000309944153, + 0.27900001406669617, + 0.4050000011920929, + 0.28600001335144043, + 0.22100000083446503, + 0.23499999940395355, + 0.4050000011920929, + 0.35499998927116394, + 0.33799999952316284, + 0.15299999713897705, + 0.20100000500679016, + 0.35499998927116394, + 0.40700000524520874, + 0.14499999582767487, + 0.10499999672174454, + 0.16899999976158142, + 0.1550000011920929, + 0.3019999861717224, + 0.14100000262260437, + 0.1940000057220459, + 0.3370000123977661, + 0.22499999403953552, + 0.3490000069141388, + 0.13199999928474426, + 0.3779999911785126, + 0.3160000145435333, + 0.13699999451637268, + 0.38999998569488525, + 0.335999995470047 + ] + } + ], + "layout": { + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": { + "text": "Slice Plot" + }, + "xaxis": { + "title": { + "text": "learning_rate" + }, + "type": "log" + }, + "yaxis": { + "title": { + "text": "Objective Value" + } + } + } + }, + "text/html": [ + "
\n", + "
" + ], + "text/plain": [ + "Figure({\n", + " 'data': [{'marker': {'color': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,\n", + " 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n", + " 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37,\n", + " 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49,\n", + " 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,\n", + " 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73,\n", + " 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,\n", + " 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97,\n", + " 98, 99],\n", + " 'colorbar': {'title': {'text': 'Trial'}, 'x': 1.0, 'xpad': 40},\n", + " 'colorscale': [[0.0, 'rgb(247,251,255)'], [0.125,\n", + " 'rgb(222,235,247)'], [0.25,\n", + " 'rgb(198,219,239)'], [0.375,\n", + " 'rgb(158,202,225)'], [0.5,\n", + " 'rgb(107,174,214)'], [0.625,\n", + " 'rgb(66,146,198)'], [0.75,\n", + " 'rgb(33,113,181)'], [0.875,\n", + " 'rgb(8,81,156)'], [1.0, 'rgb(8,48,107)']],\n", + " 'line': {'color': 'Grey', 'width': 0.5}},\n", + " 'mode': 'markers',\n", + " 'name': 'Feasible Trial',\n", + " 'showlegend': False,\n", + " 'type': 'scatter',\n", + " 'x': [0.01060256463073004, 0.06363319752806389, 0.044054634905094,\n", + " 0.005457455596524018, 0.0006534358443990889,\n", + " 0.005409564385563341, 0.000296117238409861,\n", + " 0.0001548659658268065, 0.0059773222630647126,\n", + " 0.009610312272318417, 0.07936629680356555, 0.0999017085272383,\n", + " 0.03030693483792421, 0.09839071999176227,\n", + " 0.0018120798841072265, 0.02630581835365611,\n", + " 0.0015862265658428628, 0.018963170798261392,\n", + " 0.05591498203997656, 0.0507252826898167, 0.015598714196031616,\n", + " 0.04981982237560933, 0.04472041885602553, 0.04569663540155219,\n", + " 0.02409255925048435, 0.012153379377141215, 0.06204357797505839,\n", + " 0.0318047942959313, 0.0026889985534921607,\n", + " 0.008406962615909752, 0.07570103253376405,\n", + " 0.059086197752773524, 0.03714901133834086, 0.0646915046763561,\n", + " 0.018225411751837776, 0.060873750215417036,\n", + " 0.0005937941753721039, 0.03794230073789979,\n", + " 0.021776847720301837, 0.012825763536089739,\n", + " 0.006945279111941595, 0.003924318415667345,\n", + " 0.0009962905683287265, 0.007485623216043211,\n", + " 0.08121672594329421, 0.03218747043584, 0.0043209290517303035,\n", + " 0.09007673305976491, 0.00011242006456928975,\n", + " 0.052926437331853586, 0.015080515584708165,\n", + " 0.06571961786071892, 0.06853902191868609, 0.043188154256233874,\n", + " 0.03137506646247659, 0.09815877477219788, 0.024965815660978004,\n", + " 0.0002369246671375477, 0.05402719364497381,\n", + " 0.04024469007474665, 0.0021520115377091278,\n", + " 0.06777601481845777, 0.054552697374482345, 0.07824127973056091,\n", + " 0.062190956331727144, 0.04574533489252647,\n", + " 0.026700039728034024, 0.035572253575668344,\n", + " 0.020947893979189017, 0.09807853814362605, 0.074008662788353,\n", + " 0.006494329870318023, 0.013324381014246824,\n", + " 0.047348523959182987, 0.05213136025170205, 0.0468237789021135,\n", + " 0.009671712975184507, 0.028449834843744366,\n", + " 0.03736743378716483, 0.0011102163493235233,\n", + " 0.059419725114923834, 0.029590575438061956,\n", + " 0.017734648001224127, 0.08418584424126585, 0.08334759428158998,\n", + " 0.06386707358622472, 0.08176816323358746, 0.04691662345629954,\n", + " 0.0005092474657505243, 0.06835465614566856, 0.040926964321475,\n", + " 0.05663192019646022, 0.03407804204205137, 0.083789776074211,\n", + " 0.02789909609279049, 0.04909602447498623, 0.07135979861985685,\n", + " 0.02340684816934831, 0.04183807409164346, 0.09709298119891982],\n", + " 'y': [0.18700000643730164, 0.4269999861717224, 0.2070000022649765,\n", + " 0.3019999861717224, 0.23999999463558197, 0.1420000046491623,\n", + " 0.2409999966621399, 0.13600000739097595, 0.13199999928474426,\n", + " 0.2669999897480011, 0.39899998903274536, 0.2280000001192093,\n", + " 0.09700000286102295, 0.3709999918937683, 0.11599999666213989,\n", + " 0.164000004529953, 0.3959999978542328, 0.16599999368190765,\n", + " 0.4099999964237213, 0.23499999940395355, 0.27799999713897705,\n", + " 0.453000009059906, 0.2160000056028366, 0.27900001406669617,\n", + " 0.30799999833106995, 0.3959999978542328, 0.4350000023841858,\n", + " 0.26899999380111694, 0.36500000953674316, 0.17599999904632568,\n", + " 0.36399999260902405, 0.1469999998807907, 0.20600000023841858,\n", + " 0.3330000042915344, 0.23499999940395355, 0.4059999883174896,\n", + " 0.23199999332427979, 0.14399999380111694, 0.3569999933242798,\n", + " 0.4009999930858612, 0.4180000126361847, 0.3070000112056732,\n", + " 0.19499999284744263, 0.14900000393390656, 0.34299999475479126,\n", + " 0.18799999356269836, 0.3499999940395355, 0.2070000022649765,\n", + " 0.15299999713897705, 0.13199999928474426, 0.19099999964237213,\n", + " 0.41499999165534973, 0.22300000488758087, 0.17900000512599945,\n", + " 0.2759999930858612, 0.36899998784065247, 0.14499999582767487,\n", + " 0.335999995470047, 0.13300000131130219, 0.30399999022483826,\n", + " 0.3540000021457672, 0.17000000178813934, 0.2549999952316284,\n", + " 0.24799999594688416, 0.15299999713897705, 0.21699999272823334,\n", + " 0.31700000166893005, 0.35499998927116394, 0.38100001215934753,\n", + " 0.39399999380111694, 0.13099999725818634, 0.17800000309944153,\n", + " 0.27900001406669617, 0.4050000011920929, 0.28600001335144043,\n", + " 0.22100000083446503, 0.23499999940395355, 0.4050000011920929,\n", + " 0.35499998927116394, 0.33799999952316284, 0.15299999713897705,\n", + " 0.20100000500679016, 0.35499998927116394, 0.40700000524520874,\n", + " 0.14499999582767487, 0.10499999672174454, 0.16899999976158142,\n", + " 0.1550000011920929, 0.3019999861717224, 0.14100000262260437,\n", + " 0.1940000057220459, 0.3370000123977661, 0.22499999403953552,\n", + " 0.3490000069141388, 0.13199999928474426, 0.3779999911785126,\n", + " 0.3160000145435333, 0.13699999451637268, 0.38999998569488525,\n", + " 0.335999995470047]}],\n", + " 'layout': {'template': '...',\n", + " 'title': {'text': 'Slice Plot'},\n", + " 'xaxis': {'title': {'text': 'learning_rate'}, 'type': 'log'},\n", + " 'yaxis': {'title': {'text': 'Objective Value'}}}\n", + "})" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vis.plot_slice(study)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}