Skip to content

Commit

Permalink
Resolve Coverity Issues (securefederatedai#874)
Browse files Browse the repository at this point in the history
* Fix coverity issues

* Resolve remaining coverity issues

Signed-off-by: Parth Mandaliya <[email protected]>
Signed-off-by: Parth Mandaliya <[email protected]>
  • Loading branch information
psfoley authored and ParthMandaliya committed Oct 4, 2023
1 parent 1d0c9e1 commit af49096
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -451,22 +451,21 @@ def train(self):
last_iter=(batch_idx == (len(self.train_loader) - 1)),
)

if self.dp_params is not None:
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)

train_losses.append(loss.item())

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -454,22 +454,21 @@ def train(self):
last_iter=(batch_idx == (len(self.train_loader) - 1)),
)

if self.dp_params is not None:
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)

train_losses.append(loss.item())

Expand Down
5 changes: 2 additions & 3 deletions openfl-workspace/keras_cnn_mnist/src/keras_cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,8 @@ def __init__(self, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')

def build_model(self,
input_shape,
Expand Down
5 changes: 2 additions & 3 deletions openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,8 @@ def __init__(self, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')

def build_model(self,
input_shape,
Expand Down
3 changes: 1 addition & 2 deletions openfl-workspace/keras_nlp/src/nlp_taskrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
4 changes: 0 additions & 4 deletions openfl/component/aggregator/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -834,10 +834,6 @@ def _compute_validation_related_task_metrics(self, task_name):
'metric_value': agg_results.item(),
'round': round_number}

if agg_results is None:
self.logger.warning(
f'Aggregated metric {agg_tensor_name} could not be collected '
f'for round {self.round_number}. Skipping reporting for this round')
if agg_function:
self.logger.metric(f'Round {round_number}, aggregator: {task_name} '
f'{agg_function} {agg_tensor_name}:\t{agg_results:f}')
Expand Down
2 changes: 1 addition & 1 deletion openfl/databases/utilities/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def _search(self, tensor_name: str = None, origin: str = None,
pd.DataFrame : New dataframe that matches the search query from
the tensor_db dataframe
"""
df = None
df = pd.DataFrame()
query_string = []
if tensor_name is not None:
query_string.append(f"(tensor_name == '{tensor_name}')")
Expand Down

0 comments on commit af49096

Please sign in to comment.