Skip to content

Commit

Permalink
Chore: pre-commit autoupdate
Browse files Browse the repository at this point in the history
  • Loading branch information
pre-commit-ci[bot] committed Jan 13, 2025
1 parent c0aaa83 commit ddce8bd
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 19 deletions.
2 changes: 1 addition & 1 deletion src/ITR/data/base_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2252,7 +2252,7 @@ def project_ei_targets(
last_prod_value = production_proj.loc[last_ei_year]
ei_projection_scopes[scope_name] = (
ICompanyEIProjections(
ei_metric=f"{(last_em_value/last_prod_value).u:~P}",
ei_metric=f"{(last_em_value / last_prod_value).u:~P}",
projections=self._get_bounded_projections(
model_ei_projections
),
Expand Down
6 changes: 3 additions & 3 deletions src/ITR/data/data_warehouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def update_benchmarks(
logger.info(
f"new_production_bm calculating trajectories for {len(self.company_data.get_company_data())}"
f"companies (times {len(EScope.get_scopes())} scopes times "
f"{cd_pc.TARGET_YEAR-cd_pc.BASE_YEAR} years)"
f"{cd_pc.TARGET_YEAR - cd_pc.BASE_YEAR} years)"
)
self.company_data._validate_projected_trajectories(
self.company_data.get_company_data(), self.benchmarks_projected_ei
Expand Down Expand Up @@ -215,7 +215,7 @@ def update_benchmarks(
cd_pc = self.company_data.get_projection_controls()
logger.info(
f"projecting targets for {len(self.company_data.get_company_data())} companies "
f"(times {len(EScope.get_scopes())} scopes times {cd_pc.TARGET_YEAR-cd_pc.BASE_YEAR} years)"
f"(times {len(EScope.get_scopes())} scopes times {cd_pc.TARGET_YEAR - cd_pc.BASE_YEAR} years)"
)
self.company_data._calculate_target_projections(
benchmark_projected_production, benchmarks_projected_ei
Expand Down Expand Up @@ -418,7 +418,7 @@ def update_trajectories(self):
logger.info(
f"re-calculating trajectories for {len(self.company_data._companies)} companies"
f"\n (times {len(EScope.get_scopes())} scopes times "
f"{self.company_data.projection_controls.TARGET_YEAR-self.company_data.projection_controls.BASE_YEAR} years)"
f"{self.company_data.projection_controls.TARGET_YEAR - self.company_data.projection_controls.BASE_YEAR} years)"
)
for company in self.company_data._companies:
company.projected_intensities = None
Expand Down
6 changes: 3 additions & 3 deletions src/ITR/data/osc_units.py
Original file line number Diff line number Diff line change
Expand Up @@ -593,9 +593,9 @@ def Quantity_type(units: str) -> type:

def validate(value, units, info):
quantity = to_Quantity(value)
assert quantity.is_compatible_with(
units
), f"Units of {value} incompatible with {units}"
assert quantity.is_compatible_with(units), (
f"Units of {value} incompatible with {units}"
)
return quantity

def __get_pydantic_core_schema__(source_type: Any) -> CoreSchema:
Expand Down
8 changes: 4 additions & 4 deletions src/ITR/data/template.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,7 @@ def convert_prefix_to_scalar(x):
)
if x.currency_tuple[0] != "USD"
else fx_ctx.redefine(
f"{x.fx_quote_tuple[0]} = {x.currency_tuple[1]/(x.fx_rate * x.fx_quote_tuple[1])} {x.currency_tuple[0]}"
f"{x.fx_quote_tuple[0]} = {x.currency_tuple[1] / (x.fx_rate * x.fx_quote_tuple[1])} {x.currency_tuple[0]}"
)
),
axis=1,
Expand Down Expand Up @@ -1830,9 +1830,9 @@ def _company_df_to_model(
# FIXME: Is this the best place to finalize base_year_production, ghg_s1s2, and ghg_s3 data?
# Something tells me these parameters should be removed in favor of querying historical data directly
company_data[ColumnsConfig.BASE_YEAR_PRODUCTION] = (
df_historic_data.loc[
company_id, "Productions", "production"
][base_year]
df_historic_data.loc[company_id, "Productions", "production"][
base_year
]
)
try:
company_data[ColumnsConfig.GHG_SCOPE12] = df_historic_data.loc[
Expand Down
4 changes: 2 additions & 2 deletions src/ITR/data/vault_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def read_quantified_sql(
]
for col_tuple in extra_unit_columns_positions:
logger.error(
f"Missing units column '{col_tuple[2]}' after original column '{sql_df.columns[col_tuple[1]]}' (should be column #{col_tuple[0]+col_tuple[1]+1} in new query)" # noqa: E501
f"Missing units column '{col_tuple[2]}' after original column '{sql_df.columns[col_tuple[1]]}' (should be column #{col_tuple[0] + col_tuple[1] + 1} in new query)" # noqa: E501
)
raise ValueError
return requantify_df(sql_df).convert_dtypes()
Expand Down Expand Up @@ -1058,7 +1058,7 @@ def __init__(
regexp_replace(regexp_replace(concat(ET.ei_{scope}_by_year_units, ' * ', P.production_by_year_units),
'{re_simplify_units_both}', ''), '{re_simplify_units_one}', '')) as cumulative_target_units,
'{scope.upper()}' as scope
from {emissions_from_tables.replace('SCOPE', scope)}
from {emissions_from_tables.replace("SCOPE", scope)}
"""
for scope in map(str.lower, EScope.get_scopes())
]
Expand Down
6 changes: 3 additions & 3 deletions src/ITR/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ def get_data(
s3_data_invalid = df_company_data[ColumnsConfig.GHG_SCOPE3].isna()
if len(s3_data_invalid[s3_data_invalid].index) > 0:
df_company_data.loc[s3_data_invalid, ColumnsConfig.GHG_SCOPE3] = (
df_company_data.loc[
s3_data_invalid, ColumnsConfig.GHG_SCOPE3
].map(lambda x: Q_(np.nan, "Mt CO2e"))
df_company_data.loc[s3_data_invalid, ColumnsConfig.GHG_SCOPE3].map(
lambda x: Q_(np.nan, "Mt CO2e")
)
)
for col in [
ColumnsConfig.GHG_SCOPE3,
Expand Down
6 changes: 3 additions & 3 deletions test/test_vault_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,9 @@ def _get_base_ei(filename: str) -> BaseProviderIntensityBenchmark:


@pytest.fixture(scope="session")
def base_benchmarks() -> (
Tuple[BaseProviderProductionBenchmark, BaseProviderIntensityBenchmark]
):
def base_benchmarks() -> Tuple[
BaseProviderProductionBenchmark, BaseProviderIntensityBenchmark
]:
benchmark_dict: Dict[str, Future] = {}
with concurrent.futures.ThreadPoolExecutor() as executor:
future_to_benchmark = {
Expand Down

0 comments on commit ddce8bd

Please sign in to comment.