Skip to content

Commit

Permalink
Code cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
romainsacchi committed Jul 24, 2024
1 parent 682923f commit 94d19f2
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 20 deletions.
14 changes: 9 additions & 5 deletions dev/timing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,25 @@

vars = [v for v in p.scenarios.coords["variables"].values if v.startswith("FE")]


p.calculate(
methods=[
"EF v3.1 EN15804 - climate change - global warming potential (GWP100)",
"EF v3.1 EN15804 - ecotoxicity: freshwater - comparative toxic unit for ecosystems (CTUe)",
],
] + [m for m in p.lcia_methods if "relics" in m.lower()][-3:],
regions=[
"CH",
],
scenarios=p.scenarios.pathway.values.tolist(),
years=[
2035,
2020,
2030,
2040,
2050
],
variables=vars,
use_distributions=3,
use_distributions=500,
subshares=True,
multiprocessing=False,
multiprocessing=True,
)

p.export_results()
9 changes: 8 additions & 1 deletion pathways/lca.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,7 @@ def process_region(data: Tuple) -> dict[str, ndarray[Any, dtype[Any]] | list[int
id_uncertainty_indices = None
id_uncertainty_values = None
id_technosphere_indices = None
id_iter_results_array = None

if use_distributions == 0:
# Regular LCA calculations
Expand All @@ -374,7 +375,6 @@ def process_region(data: Tuple) -> dict[str, ndarray[Any, dtype[Any]] | list[int
iter_results, iter_param_vals = [], []
with CustomFilter("(almost) singular matrix"):
for iteration in range(use_distributions):
print(f"------ Iteration {iteration + 1}/{use_distributions}...")
next(lca)
lca.lci()

Expand All @@ -395,6 +395,12 @@ def process_region(data: Tuple) -> dict[str, ndarray[Any, dtype[Any]] | list[int
lci_results = np.array(iter_results)
lci_results = np.quantile(lci_results, [0.05, 0.5, 0.95], axis=0)

total_results = np.array(iter_results).sum(-1).sum(1)

# Save the iterations results to disk
id_iter_results_array = uuid.uuid4()
np.save(file=DIR_CACHED_DB / f"{id_iter_results_array}.npy", arr=total_results)

# Save the uncertainty indices and values to disk
id_uncertainty_indices = uuid.uuid4()
np.save(
Expand Down Expand Up @@ -430,6 +436,7 @@ def process_region(data: Tuple) -> dict[str, ndarray[Any, dtype[Any]] | list[int
d["uncertainty_params"] = id_uncertainty_indices
d["uncertainty_vals"] = id_uncertainty_values
d["technosphere_indices"] = id_technosphere_indices
d["iterations_results"] = id_iter_results_array

return d

Expand Down
16 changes: 11 additions & 5 deletions pathways/pathways.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,8 +391,6 @@ def _fill_in_result_array(
self, results: dict, use_distributions: int, shares: [None, dict], methods: list
) -> None:

# Assuming DIR_CACHED_DB, results, and self.lca_results are already defined

# Pre-loading data from disk if possible
cached_data = {
data["id_array"]: load_numpy_array_from_disk(
Expand Down Expand Up @@ -484,6 +482,15 @@ def _fill_in_result_array(
if region != "other"
}

iteration_results = {
data["iterations_results"]: load_numpy_array_from_disk(
DIR_CACHED_DB / f"{data['iterations_results']}.npy",
)
for coord, result in results.items()
for region, data in result.items()
if region != "other"
}

for coord, result in results.items():
model, scenario, year = coord

Expand All @@ -505,8 +512,7 @@ def _fill_in_result_array(
if region == "other":
continue

id_array = data["id_array"]
total_impacts = np.squeeze(cached_data[id_array]).sum(-1).sum(1)
total_impacts = iteration_results[data["iterations_results"]]

df_sum_impacts = pd.concat(
[
Expand Down Expand Up @@ -568,7 +574,7 @@ def _fill_in_result_array(
)
df_GSA.to_excel(writer, sheet_name="Global Sensitivity Analysis", index=False)

print(f"Statistical analysis: {export_path.resolve()}")
print(f"Statistical analysis: {export_path.resolve()}")

def display_results(self, cutoff: float = 0.001) -> xr.DataArray:
return display_results(self.lca_results, cutoff=cutoff)
Expand Down
12 changes: 7 additions & 5 deletions pathways/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,11 @@ def run_GSA_delta(
# merge uncertainty_values and technology_shares
# based on "iteration" and "region" columns

df_parameters = uncertainty_values.merge(technology_shares, on=["iteration", "region"])
if len(technology_shares) > 0:
df_parameters = uncertainty_values.merge(technology_shares, on=["iteration", "region"])
else:
df_parameters = uncertainty_values

parameters = [param for param in df_parameters.columns if param not in ["iteration", "region"]]

problem = {
Expand All @@ -286,14 +290,12 @@ def run_GSA_delta(
],
}

print(problem)

methods = [m for m in total_impacts.columns if m not in ["iteration", "region"]]

results = []

for method in methods:
param_values = df_parameters[params].values
param_values = df_parameters[parameters].values

# total impacts for the method
Y = total_impacts[method].values
Expand All @@ -302,7 +304,7 @@ def run_GSA_delta(

results.append([f"Delta Moment-Independent Measure for {method}"])
results.append(["Parameter", "Delta", "Delta Conf", "S1", "S1 Conf"])
for i, param in enumerate(params):
for i, param in enumerate(parameters):
results.append(
[
param,
Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
numpy==1.24.0
numpy==1.24.4
pathlib
pandas
scipy
xarray
premise
pyyaml
bw_processing
bw2calc >= 2.0.dev17
bw2calc >= 2.0.dev18
scikit-umfpack
datapackage
pyprind
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,15 @@ def package_files(directory):
# Might need to change the directory name as well.
include_package_data=True,
install_requires=[
"numpy==1.24.0",
"numpy==1.24.4",
"pathlib",
"pandas",
"xarray",
"scipy",
"premise",
"pyyaml",
"bw_processing",
"bw2calc>=2.0.dev17",
"bw2calc>=2.0.dev18",
"datapackage",
"pyprind",
"platformdirs",
Expand Down

0 comments on commit 94d19f2

Please sign in to comment.