Skip to content

Commit

Permalink
fix the unit test
Browse files Browse the repository at this point in the history
  • Loading branch information
allenanie committed Jul 29, 2024
1 parent b935cb5 commit f5fa7c1
Showing 1 changed file with 59 additions and 56 deletions.
115 changes: 59 additions & 56 deletions tests/unit_tests/test_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import autogen
from opto.trace import bundle, node, GRAPH
from opto.optimizers import OptoPrime
Expand Down Expand Up @@ -123,60 +124,62 @@ def foobar_text(x):

GRAPH.clear()
x = node("negative point one", trainable=True)
optimizer = OptoPrime([x], config_list=autogen.config_list_from_json("OAI_CONFIG_LIST"))
output = foobar_text(x)
feedback = user(output.data)
optimizer.zero_feedback()
optimizer.backward(output, feedback)
print(f"variable={x.data}, output={output.data}, feedback={feedback}") # logging
optimizer.step(verbose=True)

## Test the optimizer with an example of code
GRAPH.clear()


def user(output):
if output < 0:
return "Success."
else:
return "Try again. The output should be negative"


# We make this function as a parameter that can be optimized.
@bundle(trainable=True)
def my_fun(x):
"""Test function"""
return x**2 + 1


x = node(-1, trainable=False)
optimizer = OptoPrime([my_fun.parameter], config_list=autogen.config_list_from_json("OAI_CONFIG_LIST"))
output = my_fun(x)
feedback = user(output.data)
optimizer.zero_feedback()
optimizer.backward(output, feedback)

print(f"output={output.data}, feedback={feedback}, variables=\n") # logging
for p in optimizer.parameters:
print(p.name, p.data)
optimizer.step(verbose=True)


# Test directly providing feedback to parameters
GRAPH.clear()
x = node(-1, trainable=True)
optimizer = OptoPrime([x])
feedback = "test"
optimizer.zero_feedback()
optimizer.backward(x, feedback)
optimizer.step(verbose=True)


# Test if we can save log in both pickle and json
import json, pickle
json.dump(optimizer.log, open("log.json", "w"))
pickle.dump(optimizer.log, open("log.pik", "wb"))
# remove these files
import os
os.remove("log.json")
os.remove("log.pik")
if os.path.exists("OAI_CONFIG_LIST"):
optimizer = OptoPrime([x], config_list=autogen.config_list_from_json("OAI_CONFIG_LIST"))
output = foobar_text(x)
feedback = user(output.data)
optimizer.zero_feedback()
optimizer.backward(output, feedback)
print(f"variable={x.data}, output={output.data}, feedback={feedback}") # logging
optimizer.step(verbose=True)

## Test the optimizer with an example of code
GRAPH.clear()


def user(output):
if output < 0:
return "Success."
else:
return "Try again. The output should be negative"


# We make this function as a parameter that can be optimized.
@bundle(trainable=True)
def my_fun(x):
"""Test function"""
return x**2 + 1


x = node(-1, trainable=False)
optimizer = OptoPrime([my_fun.parameter], config_list=autogen.config_list_from_json("OAI_CONFIG_LIST"))
output = my_fun(x)
feedback = user(output.data)
optimizer.zero_feedback()
optimizer.backward(output, feedback)

print(f"output={output.data}, feedback={feedback}, variables=\n") # logging
for p in optimizer.parameters:
print(p.name, p.data)
optimizer.step(verbose=True)


# Test directly providing feedback to parameters
GRAPH.clear()
x = node(-1, trainable=True)
optimizer = OptoPrime([x])
feedback = "test"
optimizer.zero_feedback()
optimizer.backward(x, feedback)
optimizer.step(verbose=True)


# Test if we can save log in both pickle and json
import json, pickle
json.dump(optimizer.log, open("log.json", "w"))
pickle.dump(optimizer.log, open("log.pik", "wb"))
# remove these files
import os
os.remove("log.json")
os.remove("log.pik")

0 comments on commit f5fa7c1

Please sign in to comment.