Skip to content

Commit

Permalink
Turn warning into an error.
Browse files Browse the repository at this point in the history
Also reorders the branches of the if statement to avoid excessive
indentation.
  • Loading branch information
ioannis-vm committed Dec 3, 2024
1 parent 56da5f6 commit c7dd540
Showing 1 changed file with 34 additions and 32 deletions.
66 changes: 34 additions & 32 deletions pelicun/model/damage_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1330,7 +1330,7 @@ def map_ds(values: np.ndarray, offset: int) -> np.ndarray:

def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C901
"""
Parse and validate the scaling specification, used in the '_create_dmg_RVs' method.
Parse and validate the scaling specification, used in the'_create_dmg_RVs' method.
Parameters
----------
Expand All @@ -1355,11 +1355,11 @@ def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C
Raises
------
ValueError
If the scaling specification is invalid.
TypeError
If the type of an entry is invalid.
"""
ValueError: If the scaling specification is invalid.
ValueError: If an unupported distribution is specified.
TypeError: If the type of an entry is invalid.
""" # noqa: DOC502
# if there are contents, ensure they are valid.
# See docstring for an example of what is expected.
parsed_scaling_specification = defaultdict(dict)
Expand All @@ -1372,7 +1372,7 @@ def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C
f'Invalid entry in scaling_specification: '
f"{value}. No other entries are allowed for a component when 'ALL' is used."
)
raise ValueError(msg)
raise ValueError(msg) # noqa: DOC501
for limit_state_id, specifics in value.items():
if not (
limit_state_id.startswith('LS') or limit_state_id == 'ALL'
Expand All @@ -1395,7 +1395,7 @@ def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C
f'The specified scaling operation has to be a string.'
f'See docstring of DamageModel._create_dmg_RVs.'
)
raise TypeError(msg)
raise TypeError(msg) # noqa: DOC501
capacity_adjustment_operation = spec[0]
number = spec[1::]
if capacity_adjustment_operation not in {'+', '-', '*', '/'}:
Expand Down Expand Up @@ -1485,34 +1485,36 @@ def parse_scaling_specification(scaling_specification: dict) -> dict: # noqa: C
)

if capacity_adjustment_operation:
if family in {'normal', 'lognormal', 'deterministic'}:
# Only scale the median value if ls_id is defined in capacity_adjustment_operation
# Otherwise, use the original value
new_theta_0 = None
if 'ALL' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation['ALL'],
)
elif f'LS{ls_id}' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation[f'LS{ls_id}'],
)
if new_theta_0 is not None:
if new_theta_0.size == 1:
theta[0] = new_theta_0[0]
else:
# Repeat the theta values new_theta_0.size times along axis 0
# and 1 time along axis 1
theta = np.tile(theta, (new_theta_0.size, 1))
theta[:, 0] = new_theta_0
else:
self.log.warning(
if family not in {'normal', 'lognormal', 'deterministic'}:
msg = (
f'Capacity adjustment is only supported '
f'for `normal` or `lognormal` distributions. '
f'Ignoring: `{cmp_loc_dir}`, which is `{family}`'
)
raise ValueError(msg) # noqa: DOC501
# Only scale the median value if ls_id is
# defined in capacity_adjustment_operation
# Otherwise, use the original value
new_theta_0 = None
if 'ALL' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation['ALL'],
)
elif f'LS{ls_id}' in capacity_adjustment_operation:
new_theta_0 = self._handle_operation_list(
theta[0],
capacity_adjustment_operation[f'LS{ls_id}'],
)
if new_theta_0 is not None:
if new_theta_0.size == 1:
theta[0] = new_theta_0[0]
else:
# Repeat the theta values
# new_theta_0.size times along
# axis 0 and 1 time along axis 1
theta = np.tile(theta, (new_theta_0.size, 1))
theta[:, 0] = new_theta_0

tr_lims = np.array(
[
Expand Down

0 comments on commit c7dd540

Please sign in to comment.