From 3bd32d183beee7afd677504e6feb00bf7cb3ccf3 Mon Sep 17 00:00:00 2001 From: Patrick Foley Date: Thu, 14 Sep 2023 16:54:11 -0700 Subject: [PATCH 1/2] Resolve Coverity Issues (#874) * Fix coverity issues * Resolve remaining coverity issues --- ...rkflow_Interface_Mnist_Implementation_1.py | 31 +++++++++---------- ...rkflow_Interface_Mnist_Implementation_2.py | 31 +++++++++---------- .../keras_cnn_mnist/src/keras_cnn.py | 5 ++- .../src/keras_cnn.py | 5 ++- .../keras_nlp/src/nlp_taskrunner.py | 3 +- .../src/nlp_taskrunner.py | 3 +- openfl/component/aggregator/aggregator.py | 4 --- openfl/databases/utilities/dataframe.py | 2 +- 8 files changed, 37 insertions(+), 47 deletions(-) diff --git a/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_1.py b/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_1.py index b0aa0cee82..eae51d76c3 100644 --- a/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_1.py +++ b/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_1.py @@ -463,22 +463,21 @@ def train(self): last_iter=(batch_idx == (len(self.train_loader) - 1)), ) - if self.dp_params is not None: - if batch_idx % self.dp_params["clip_frequency"] == 0 or ( - batch_idx == (len(self.train_loader) - 1) - ): - if self.clip_test: - optimizer_after_step_params = [ - param.data - for param in self.optimizer.param_groups()[0]["params"] - ] - clip_testing_on_optimizer_parameters( - optimizer_before_step_params, - optimizer_after_step_params, - self.collaborator_name, - self.round, - self.device, - ) + if batch_idx % self.dp_params["clip_frequency"] == 0 or ( + batch_idx == (len(self.train_loader) - 1) + ): + if self.clip_test: + optimizer_after_step_params = [ + param.data + for param in self.optimizer.param_groups()[0]["params"] + ] + clip_testing_on_optimizer_parameters( + optimizer_before_step_params, + optimizer_after_step_params, + self.collaborator_name, + self.round, + self.device, + ) train_losses.append(loss.item()) diff --git a/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_2.py b/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_2.py index 54a1e131de..88dab0269d 100644 --- a/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_2.py +++ b/openfl-tutorials/experimental/Global_DP/Workflow_Interface_Mnist_Implementation_2.py @@ -460,22 +460,21 @@ def train(self): last_iter=(batch_idx == (len(self.train_loader) - 1)), ) - if self.dp_params is not None: - if batch_idx % self.dp_params["clip_frequency"] == 0 or ( - batch_idx == (len(self.train_loader) - 1) - ): - if self.clip_test: - optimizer_after_step_params = [ - param.data - for param in self.optimizer.param_groups()[0]["params"] - ] - clip_testing_on_optimizer_parameters( - optimizer_before_step_params, - optimizer_after_step_params, - self.collaborator_name, - self.round, - self.device, - ) + if batch_idx % self.dp_params["clip_frequency"] == 0 or ( + batch_idx == (len(self.train_loader) - 1) + ): + if self.clip_test: + optimizer_after_step_params = [ + param.data + for param in self.optimizer.param_groups()[0]["params"] + ] + clip_testing_on_optimizer_parameters( + optimizer_before_step_params, + optimizer_after_step_params, + self.collaborator_name, + self.round, + self.device, + ) train_losses.append(loss.item()) diff --git a/openfl-workspace/keras_cnn_mnist/src/keras_cnn.py b/openfl-workspace/keras_cnn_mnist/src/keras_cnn.py index d559ebe44c..35a71f7734 100644 --- a/openfl-workspace/keras_cnn_mnist/src/keras_cnn.py +++ b/openfl-workspace/keras_cnn_mnist/src/keras_cnn.py @@ -30,9 +30,8 @@ def __init__(self, **kwargs): self.model.summary(print_fn=self.logger.info) - if self.data_loader is not None: - self.logger.info(f'Train Set Size : {self.get_train_data_size()}') - self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}') + self.logger.info(f'Train Set Size : {self.get_train_data_size()}') + self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}') def build_model(self, input_shape, diff --git a/openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py b/openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py index d559ebe44c..35a71f7734 100644 --- a/openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py +++ b/openfl-workspace/keras_cnn_with_compression/src/keras_cnn.py @@ -30,9 +30,8 @@ def __init__(self, **kwargs): self.model.summary(print_fn=self.logger.info) - if self.data_loader is not None: - self.logger.info(f'Train Set Size : {self.get_train_data_size()}') - self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}') + self.logger.info(f'Train Set Size : {self.get_train_data_size()}') + self.logger.info(f'Valid Set Size : {self.get_valid_data_size()}') def build_model(self, input_shape, diff --git a/openfl-workspace/keras_nlp/src/nlp_taskrunner.py b/openfl-workspace/keras_nlp/src/nlp_taskrunner.py index 2db741cee0..7dc53716f4 100644 --- a/openfl-workspace/keras_nlp/src/nlp_taskrunner.py +++ b/openfl-workspace/keras_nlp/src/nlp_taskrunner.py @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs): self.model.summary(print_fn=self.logger.info) - if self.data_loader is not None: - self.logger.info(f'Train Set Size : {self.get_train_data_size()}') + self.logger.info(f'Train Set Size : {self.get_train_data_size()}') diff --git a/openfl-workspace/keras_nlp_gramine_ready/src/nlp_taskrunner.py b/openfl-workspace/keras_nlp_gramine_ready/src/nlp_taskrunner.py index 2db741cee0..7dc53716f4 100644 --- a/openfl-workspace/keras_nlp_gramine_ready/src/nlp_taskrunner.py +++ b/openfl-workspace/keras_nlp_gramine_ready/src/nlp_taskrunner.py @@ -70,5 +70,4 @@ def __init__(self, latent_dim, **kwargs): self.model.summary(print_fn=self.logger.info) - if self.data_loader is not None: - self.logger.info(f'Train Set Size : {self.get_train_data_size()}') + self.logger.info(f'Train Set Size : {self.get_train_data_size()}') diff --git a/openfl/component/aggregator/aggregator.py b/openfl/component/aggregator/aggregator.py index 3f19444d98..f2d3c17b5f 100644 --- a/openfl/component/aggregator/aggregator.py +++ b/openfl/component/aggregator/aggregator.py @@ -834,10 +834,6 @@ def _compute_validation_related_task_metrics(self, task_name): 'metric_value': agg_results.item(), 'round': round_number} - if agg_results is None: - self.logger.warning( - f'Aggregated metric {agg_tensor_name} could not be collected ' - f'for round {self.round_number}. Skipping reporting for this round') if agg_function: self.logger.metric(f'Round {round_number}, aggregator: {task_name} ' f'{agg_function} {agg_tensor_name}:\t{agg_results:f}') diff --git a/openfl/databases/utilities/dataframe.py b/openfl/databases/utilities/dataframe.py index ebc6801b03..9038fa07d3 100644 --- a/openfl/databases/utilities/dataframe.py +++ b/openfl/databases/utilities/dataframe.py @@ -34,7 +34,7 @@ def _search(self, tensor_name: str = None, origin: str = None, pd.DataFrame : New dataframe that matches the search query from the tensor_db dataframe """ - df = None + df = pd.DataFrame() query_string = [] if tensor_name is not None: query_string.append(f"(tensor_name == '{tensor_name}')") From 62b5b0c96183726da7abe57c91a84dd843e7f335 Mon Sep 17 00:00:00 2001 From: Patrick Foley Date: Thu, 21 Sep 2023 09:37:50 -0700 Subject: [PATCH 2/2] Migrate to Ubuntu 22.04 LTS release (supported through 2027) (#875) Signed-off-by: Patrick Foley --- openfl-docker/Dockerfile.base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openfl-docker/Dockerfile.base b/openfl-docker/Dockerfile.base index 9a21818033..1aa76dfacd 100644 --- a/openfl-docker/Dockerfile.base +++ b/openfl-docker/Dockerfile.base @@ -3,7 +3,7 @@ # If your machine is behind a proxy, make sure you set it up in ~/.docker/config.json -FROM ubuntu:22.10 +FROM ubuntu:22.04 SHELL ["/bin/bash", "-o", "pipefail", "-c"] ARG INSTALL_SOURCES="yes"