Skip to content

Commit

Permalink
update quasi-convex
Browse files Browse the repository at this point in the history
  • Loading branch information
luk036 committed Aug 3, 2024
1 parent 2d1ef76 commit 0cbe68a
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 20 deletions.
22 changes: 11 additions & 11 deletions tests/test_quasicvx.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

class MyQuasicvxOracle(OracleOptim):
idx: int = -1 # for round robin
tmp2: float
y: float
tmp3: float

def __init__(self):
Expand All @@ -25,30 +25,30 @@ def __init__(self):
self.fns = (self.fn1, self.fn2)
self.grads = (self.grad1, self.grad2)

def fn1(self, sqrtx, ly, _):
def fn1(self, sqrtx, logy, _):
"""
The function calculates the difference between the square of a given value and another value.
:param sqrtx: The parameter `sqrtx` represents the square root of a value
:param ly: The parameter `ly` represents the upper limit for the square of the square root of `x`
:param logy: The parameter `ly` represents the upper limit for the square of the square root of `x`
:param _: The underscore symbol (_) is commonly used as a placeholder variable in Python to indicate
that the value is not going to be used in the function. In this context, it seems that the third
parameter is not used in the function `fn1`
:return: The function `fn1` is returning the value of `sqrtx * sqrtx - ly`.
:return: The function `fn1` is returning the value of `sqrtx * sqrtx - logy`.
"""
return sqrtx * sqrtx - ly
return sqrtx * sqrtx - logy

def fn2(self, sqrtx, ly, gamma):
def fn2(self, sqrtx, logy, gamma):
"""
The function calculates the value of -sqrt(x) plus gamma times the exponential of y.
:param sqrtx: The `sqrtx` parameter represents the square root of a value
:param ly: The parameter `ly` appears to represent the natural logarithm of `y`
:param logy: The parameter `ly` appears to represent the natural logarithm of `y`
:param gamma: Gamma is a constant value used in the calculation within the function
:return: The function `fn2` is returning the value of `-sqrtx + self.tmp3`.
"""
self.tmp2 = math.exp(ly)
self.tmp3 = gamma * self.tmp2
self.y = math.exp(logy)
self.tmp3 = gamma * self.y
return -sqrtx + self.tmp3

def grad1(self, sqrtx):
Expand Down Expand Up @@ -88,13 +88,13 @@ def assess_optim(self, xc, gamma: float):
tuple containing an array and a float value, and the second element is either `None` or a float
value.
"""
sqrtx, ly = xc
sqrtx, logy = xc

for _ in [0, 1]:
self.idx += 1
if self.idx == 2:
self.idx = 0 # round robin
if (fj := self.fns[self.idx](sqrtx, ly, gamma)) > 0:
if (fj := self.fns[self.idx](sqrtx, logy, gamma)) > 0:
return (self.grads[self.idx](sqrtx), fj), None

gamma = sqrtx / self.tmp2
Expand Down
15 changes: 6 additions & 9 deletions tests/test_quasicvx2.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,13 @@ def assess_optim(self, xc, gamma: float):
if x <= 0.0:
return (np.array([-1.0, 0.0]), -x), None

# objective: minimize -sqrt(x) / y
# objective: maximize sqrt(x) / y
tmp2 = math.sqrt(x)
if (fj := -tmp2 - gamma * y) > 0.0: # infeasible
return (np.array([-0.5 / tmp2, -gamma]), fj), None
if (fj := -tmp2 + gamma * y) > 0.0: # infeasible
return (np.array([-0.5 / tmp2, gamma]), fj), None

gamma = -tmp2 / y
return (np.array([-0.5 / tmp2, -gamma]), 0.0), -tmp2 / y
gamma = tmp2 / y
return (np.array([-0.5 / tmp2, gamma]), 0.0), gamma


def test_case_feasible():
Expand All @@ -75,9 +75,6 @@ def test_case_feasible():
omega = MyQuasicvxOracle()
xbest, fbest, _ = cutting_plane_optim(omega, ellip, 0.0)
assert xbest is not None
# assert fbest == approx(-0.42888194247600586)
# assert xbest[0] == approx(0.5000004646814299)
# assert xbest[1] == approx(1.6487220368468205)


def test_case_infeasible1():
Expand All @@ -98,5 +95,5 @@ def test_case_infeasible2():
xinit = np.array([1.0, 1.0]) # initial xinit
ellip = Ell(10.0, xinit)
omega = MyQuasicvxOracle()
xbest, _, _ = cutting_plane_optim(omega, ellip, -100) # wrong init best-so-far
xbest, _, _ = cutting_plane_optim(omega, ellip, 100) # wrong init best-so-far
assert xbest is None

0 comments on commit 0cbe68a

Please sign in to comment.