Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Resolve Coverity Issues #874

Merged
merged 2 commits into from
Sep 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -463,22 +463,21 @@ def train(self):
last_iter=(batch_idx == (len(self.train_loader) - 1)),
)

if self.dp_params is not None:
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)

train_losses.append(loss.item())

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,22 +460,21 @@ def train(self):
last_iter=(batch_idx == (len(self.train_loader) - 1)),
)

if self.dp_params is not None:
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)
if batch_idx % self.dp_params["clip_frequency"] == 0 or (
batch_idx == (len(self.train_loader) - 1)
):
if self.clip_test:
optimizer_after_step_params = [
param.data
for param in self.optimizer.param_groups()[0]["params"]
]
clip_testing_on_optimizer_parameters(
optimizer_before_step_params,
optimizer_after_step_params,
self.collaborator_name,
self.round,
self.device,
)

train_losses.append(loss.item())

Expand Down
5 changes: 2 additions & 3 deletions openfl-workspace/keras_cnn_mnist/src/keras_cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,8 @@ def __init__(self, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')

def build_model(self,
input_shape,
Expand Down
5 changes: 2 additions & 3 deletions openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,8 @@ def __init__(self, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}')

def build_model(self,
input_shape,
Expand Down
3 changes: 1 addition & 2 deletions openfl-workspace/keras_nlp/src/nlp_taskrunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs):

self.model.summary(print_fn=self.logger.info)

if self.data_loader is not None:
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
self.logger.info(f'Train Set Size : {self.get_train_data_size()}')
4 changes: 0 additions & 4 deletions openfl/component/aggregator/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -834,10 +834,6 @@ def _compute_validation_related_task_metrics(self, task_name):
'metric_value': agg_results.item(),
'round': round_number}

if agg_results is None:
self.logger.warning(
f'Aggregated metric {agg_tensor_name} could not be collected '
f'for round {self.round_number}. Skipping reporting for this round')
if agg_function:
self.logger.metric(f'Round {round_number}, aggregator: {task_name} '
f'{agg_function} {agg_tensor_name}:\t{agg_results:f}')
Expand Down
2 changes: 1 addition & 1 deletion openfl/databases/utilities/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def _search(self, tensor_name: str = None, origin: str = None,
pd.DataFrame : New dataframe that matches the search query from
the tensor_db dataframe
"""
df = None
df = pd.DataFrame()
query_string = []
if tensor_name is not None:
query_string.append(f"(tensor_name == '{tensor_name}')")
Expand Down
Loading