Skip to content

Commit

Permalink
closes #5, remove runtime assertions
Browse files Browse the repository at this point in the history
  • Loading branch information
cahity committed Nov 14, 2024
1 parent 28ece41 commit 6f12406
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 26 deletions.
3 changes: 2 additions & 1 deletion vectoptal/algorithms/naive_elimination.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ def __init__(

self.K = len(self.dataset.in_data)
if L is None: # Use theoretical sampling count if not given.
assert hasattr(order.ordering_cone, "beta"), "Ordering complexity needs to be defined."
if not hasattr(order.ordering_cone, "beta"):
raise AttributeError("Ordering complexity needs to be defined.")
ordering_complexity = order.ordering_cone.beta

c = 1 + np.sqrt(2) # Any c>0 should suffice according to Lemma B.12.
Expand Down
49 changes: 29 additions & 20 deletions vectoptal/confidence_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,11 @@ def __init__(
self.intersect_iteratively = intersect_iteratively

if lower is not None and upper is not None:
assert (
len(lower) == dim and len(upper) == dim
), "Bounds must have the same dimensions as the space."
assert np.all(lower <= upper), "Lower bound must be less than or equal to upper bound."
if len(lower) != dim or len(upper) != dim:
raise ValueError("Bounds must have the same dimensions as the space.")

if not np.all(lower <= upper):
raise ValueError("Lower bound must be less than or equal to upper bound.")

self.lower = lower
self.upper = upper
Expand Down Expand Up @@ -146,9 +147,11 @@ def is_dominated(
:rtype: bool
"""

assert np.array(slackness).size == 1 or slackness.size == len(
obj1.lower
), "Slackness must be a scalar or a vector of the same size as the number of dimensions."
if np.array(slackness).size != 1 and slackness.size != len(obj1.lower):
raise ValueError(
"Slackness must be a scalar or a vector of the same size as the number of"
" dimensions."
)

verts1 = hyperrectangle_get_vertices(obj1.lower, obj1.upper)
verts2 = hyperrectangle_get_vertices(obj2.lower, obj2.upper)
Expand Down Expand Up @@ -212,9 +215,11 @@ def is_covered(
cone_matrix = order.ordering_cone.W
m = cone_matrix.shape[1]

assert (
np.array(slackness).size == 1 or slackness.size == m
), "Slackness must be a scalar or a vector of the same size as the number of dimensions."
if np.array(slackness).size != 1 and slackness.size != m:
raise ValueError(
"Slackness must be a scalar or a vector of the same size as the number of"
" dimensions."
)

z_point = cp.Variable(m)
z_point2 = cp.Variable(m)
Expand Down Expand Up @@ -288,10 +293,10 @@ def update(
:type scale: np.ndarray
"""

assert covariance.shape[-1] == covariance.shape[-2], "Covariance matrix must be square."
assert (
np.array(scale).size == 1
), "Scale must be a scalar for this type of confidence region."
if covariance.shape[-1] != covariance.shape[-2]:
raise ValueError("Covariance matrix must be square.")
if np.array(scale).size != 1:
raise ValueError("Scale must be a scalar for this type of confidence region.")

self.center = mean
self.sigma = covariance
Expand Down Expand Up @@ -319,9 +324,11 @@ def is_dominated(

if np.array(slackness).size == 1:
slackness = np.array([slackness] * cone_matrix.shape[0])
assert (
slackness.size == cone_matrix.shape[0]
), "Slackness must be a scalar or a vector of the same size as the number of constraints."
if slackness.size != cone_matrix.shape[0]:
raise ValueError(
"Slackness must be a scalar or a vector of the same size as the number of"
" constraints."
)

mux = cp.Variable(output_dim)
muy = cp.Variable(output_dim)
Expand Down Expand Up @@ -401,9 +408,11 @@ def is_covered(
cone_matrix = order.ordering_cone.W
output_dim = cone_matrix.shape[1]

assert (
np.array(slackness).size == 1 or slackness.size == cone_matrix.shape[0]
), "Slackness must be a scalar or a vector of the same size as the number of constraints."
if np.array(slackness).size != 1 and slackness.size != cone_matrix.shape[0]:
raise ValueError(
"Slackness must be a scalar or a vector of the same size as the number of"
" constraints."
)

mux = cp.Variable(output_dim)
muy = cp.Variable(output_dim)
Expand Down
11 changes: 6 additions & 5 deletions vectoptal/models/empirical_mean_var.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,10 @@ def add_sample(self, indices: Iterable[int], Y_t: np.ndarray):
:param Y_t: A N-by-output_dim array containing the new samples to be added.
:type Y_t: np.ndarray
"""
assert len(indices) == len(Y_t), "Number of samples is ambiguous."
assert max(indices) < self.design_count, "Design index out of bounds."
if len(indices) != len(Y_t):
raise ValueError("Number of samples is ambiguous.")
if max(indices) >= self.design_count:
raise ValueError("Design index out of bounds.")

for idx, y in zip(indices, Y_t):
self.design_samples[idx] = np.concatenate(
Expand Down Expand Up @@ -121,9 +123,8 @@ def predict(self, test_X: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
:return: A tuple containing two numpy arrays: the predicted means and variances.
:rtype: tuple[np.ndarray, np.ndarray]
"""
assert (
test_X.shape[1] == self.input_dim + 1
), "Test data needs to have an additional column for indices."
if test_X.shape[1] != self.input_dim + 1:
raise ValueError("Test data needs to have an additional column for indices.")

indices = test_X[..., -1].astype(int)
if self.track_means:
Expand Down

0 comments on commit 6f12406

Please sign in to comment.