Skip to content

Commit

Permalink
Apply ruff format 0.1.11
Browse files Browse the repository at this point in the history
  • Loading branch information
macflo8 committed Aug 22, 2024
1 parent bbb4523 commit 61179fa
Show file tree
Hide file tree
Showing 4 changed files with 197 additions and 132 deletions.
177 changes: 97 additions & 80 deletions message_ix_models/model/material/data_aluminum.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,69 +527,74 @@ def gen_mock_demand_aluminum(scenario: message_ix.Scenario) -> pd.DataFrame:

return demand2020_al

def gen_data_alu_trade(scenario: message_ix.Scenario):

def gen_data_alu_trade(scenario: message_ix.Scenario):
results = defaultdict(list)

data_trade = pd.read_csv(
package_data_path("material", "aluminum", "aluminum_trade_data.csv"))
package_data_path("material", "aluminum", "aluminum_trade_data.csv")
)

data_trade.drop_duplicates()

s_info = ScenarioInfo(scenario)

modelyears = s_info.Y
yv_ya = s_info.yv_ya
year_all =yv_ya["year_vtg"].unique()
year_all = yv_ya["year_vtg"].unique()

data_trade = data_trade[data_trade['Year'].isin(year_all)]
data_trade = data_trade[data_trade["Year"].isin(year_all)]

# Divide R12_WEU as 0.7 WEU, 0.3 EEU.
data_trade.loc[(data_trade['Region']=='Europe'), 'Value'] *= 0.7
data_trade.loc[(data_trade['Region']=='Europe'), 'Region'] = 'West Europe'
data_trade.loc[(data_trade["Region"] == "Europe"), "Value"] *= 0.7
data_trade.loc[(data_trade["Region"] == "Europe"), "Region"] = "West Europe"

data_trade_eeu = data_trade[data_trade['Region'] == 'West Europe']
data_trade_eeu['Value'] *= (0.3 / 0.7)
data_trade_eeu['Region'] = 'East Europe'
data_trade_eeu = data_trade[data_trade["Region"] == "West Europe"]
data_trade_eeu["Value"] *= 0.3 / 0.7
data_trade_eeu["Region"] = "East Europe"

data_trade = pd.concat([data_trade, data_trade_eeu])

# Sum Japan and Oceania as PAO

condition = ((data_trade['Region'] == 'Japan') | (data_trade['Region'] == 'Oceania'))
condition = (data_trade["Region"] == "Japan") | (data_trade["Region"] == "Oceania")
data_trade_pao = data_trade.loc[condition]
data_trade_pao = data_trade_pao.groupby(['Variable', 'Year'])['Value'].sum().reset_index()
data_trade_pao = (
data_trade_pao.groupby(["Variable", "Year"])["Value"].sum().reset_index()
)

data_trade_pao['Region'] = 'Pacific OECD'
data_trade_pao["Region"] = "Pacific OECD"
data_trade = pd.concat([data_trade, data_trade_pao])
condition_updated = ((data_trade['Region'] == 'Japan') | (data_trade['Region'] == 'Oceania'))
condition_updated = (data_trade["Region"] == "Japan") | (
data_trade["Region"] == "Oceania"
)
data_trade = data_trade.drop(data_trade[condition_updated].index)

data_trade.reset_index(drop=True, inplace=True)

# Divide Other Asia 50-50 to SAS and PAS

data_trade.loc[(data_trade['Region']=='Other Asia'), 'Value'] *= 0.5
data_trade.loc[(data_trade['Region']=='Other Asia'), 'Region'] = 'South Asia'
data_trade.loc[(data_trade["Region"] == "Other Asia"), "Value"] *= 0.5
data_trade.loc[(data_trade["Region"] == "Other Asia"), "Region"] = "South Asia"

data_trade_pas = data_trade[data_trade['Region'] == 'South Asia']
data_trade_pas['Region'] = 'Other Pacific Asia'
data_trade_pas = data_trade[data_trade["Region"] == "South Asia"]
data_trade_pas["Region"] = "Other Pacific Asia"

data_trade = pd.concat([data_trade, data_trade_pas])

# Divide Other Producing Regions 50-50s as Africa and FSU

data_trade.loc[(data_trade['Region']=='Other Producers'), 'Value'] *= 0.5
data_trade.loc[(data_trade['Region']=='Other Producers'), 'Region'] = 'Africa'
data_trade.loc[(data_trade["Region"] == "Other Producers"), "Value"] *= 0.5
data_trade.loc[(data_trade["Region"] == "Other Producers"), "Region"] = "Africa"

data_trade_fsu = data_trade[data_trade['Region'] == 'Africa']
data_trade_fsu['Region'] = 'Former Soviet Union'
data_trade_fsu = data_trade[data_trade["Region"] == "Africa"]
data_trade_fsu["Region"] = "Former Soviet Union"

data_trade = pd.concat([data_trade, data_trade_fsu])

# Drop non-producers

condition = data_trade['Region'] == 'Non Producers'
condition = data_trade["Region"] == "Non Producers"
data_trade = data_trade.drop(data_trade[condition].index)

s_info = ScenarioInfo(scenario)
Expand All @@ -601,99 +606,111 @@ def gen_data_alu_trade(scenario: message_ix.Scenario):
region_tag = "R11_"
china_mapping = "R11_CPA"

region_mapping = {
"China": china_mapping,
"West Europe": region_tag + "WEU",
"East Europe": region_tag + "EEU",
"Pacific OECD": region_tag + "PAO",
"South Asia": region_tag + "SAS",
"Other Pacific Asia": region_tag + "PAS",
"Africa": region_tag + "AFR",
"Former Soviet Union": region_tag + "FSU",
"Middle East": region_tag + "MEA",
"North America": region_tag + "NAM",
"South America": region_tag + "LAM"
}
region_mapping = {
"China": china_mapping,
"West Europe": region_tag + "WEU",
"East Europe": region_tag + "EEU",
"Pacific OECD": region_tag + "PAO",
"South Asia": region_tag + "SAS",
"Other Pacific Asia": region_tag + "PAS",
"Africa": region_tag + "AFR",
"Former Soviet Union": region_tag + "FSU",
"Middle East": region_tag + "MEA",
"North America": region_tag + "NAM",
"South America": region_tag + "LAM",
}

# Add the data as historical_activity

data_trade = data_trade.replace(region_mapping)
data_trade.rename(columns= {"Region":"node_loc",
"Year": "year_act",
"Value": "value"}, inplace = True)
data_trade.rename(
columns={"Region": "node_loc", "Year": "year_act", "Value": "value"},
inplace=True,
)

# Trade is at the product level.
# For imports this corresponds to: USE|Inputs|Imports

data_import = data_trade[data_trade["Variable"]=="USE|Inputs|Imports"]
data_import_hist = data_import[data_import['year_act']<=2015]
data_import = data_trade[data_trade["Variable"] == "USE|Inputs|Imports"]
data_import_hist = data_import[data_import["year_act"] <= 2015]
data_import_hist["technology"] = "import_aluminum"
data_import_hist["mode"]= "M1"
data_import_hist["time"]= "year"
data_import_hist["unit"]= "-"
data_import_hist.drop(['Variable'], axis=1, inplace = True)
data_import_hist["mode"] = "M1"
data_import_hist["time"] = "year"
data_import_hist["unit"] = "-"
data_import_hist.drop(["Variable"], axis=1, inplace=True)
data_import_hist.reset_index(drop=True)

# For exports this corresponds to: MANUFACTURING|Outputs|Exports

data_export = data_trade[data_trade["Variable"]=="MANUFACTURING|Outputs|Exports"]
data_export_hist = data_export[data_export['year_act']<=2015]
data_export = data_trade[data_trade["Variable"] == "MANUFACTURING|Outputs|Exports"]
data_export_hist = data_export[data_export["year_act"] <= 2015]
data_export_hist["technology"] = "export_aluminum"
data_export_hist["mode"]= "M1"
data_export_hist["time"]= "year"
data_export_hist["unit"]= "-"
data_export_hist.drop(['Variable'], axis=1, inplace = True)
data_export_hist["mode"] = "M1"
data_export_hist["time"] = "year"
data_export_hist["unit"] = "-"
data_export_hist.drop(["Variable"], axis=1, inplace=True)
data_export_hist.reset_index(drop=True)

results['historical_activity'].append(data_export_hist)
results['historical_activity'].append(data_import_hist)
results["historical_activity"].append(data_export_hist)
results["historical_activity"].append(data_import_hist)

# Add data as historical_new_capacity for export

for r in data_export_hist['node_loc'].unique():
df_hist_cap = data_export_hist[data_export_hist['node_loc'] == r]
df_hist_cap = df_hist_cap.sort_values(by='year_act')
df_hist_cap['value_difference'] = df_hist_cap['value'].diff()
df_hist_cap['value_difference'] = df_hist_cap['value_difference'].fillna(df_hist_cap['value'])
df_hist_cap['historical_new_capacity'] = df_hist_cap['value_difference'] / 5
for r in data_export_hist["node_loc"].unique():
df_hist_cap = data_export_hist[data_export_hist["node_loc"] == r]
df_hist_cap = df_hist_cap.sort_values(by="year_act")
df_hist_cap["value_difference"] = df_hist_cap["value"].diff()
df_hist_cap["value_difference"] = df_hist_cap["value_difference"].fillna(
df_hist_cap["value"]
)
df_hist_cap["historical_new_capacity"] = df_hist_cap["value_difference"] / 5

df_hist_cap = df_hist_cap.drop(columns=['mode', 'time', 'value',
'value_difference'], axis = 1)
df_hist_cap.rename(columns= {"historical_new_capacity":"value",
"year_act": "year_vtg"}, inplace = True)
df_hist_cap = df_hist_cap.drop(
columns=["mode", "time", "value", "value_difference"], axis=1
)
df_hist_cap.rename(
columns={"historical_new_capacity": "value", "year_act": "year_vtg"},
inplace=True,
)

df_hist_cap['value'] = df_hist_cap['value'].apply(lambda x: 0 if x < 0 else x)
df_hist_cap["unit"]= "-"
results['historical_new_capacity'].append(df_hist_cap)
df_hist_cap["value"] = df_hist_cap["value"].apply(lambda x: 0 if x < 0 else x)
df_hist_cap["unit"] = "-"
results["historical_new_capacity"].append(df_hist_cap)

# For China fixing 2020 and 2025 values

import_chn = data_import[(data_import['year_act']==2020)
& (data_import['node_loc']=="R12_CHN")]
import_chn = data_import[
(data_import["year_act"] == 2020) & (data_import["node_loc"] == "R12_CHN")
]

export_chn = data_export[(data_export['year_act']==2020)
& (data_export['node_loc']=="R12_CHN")]
export_chn = data_export[
(data_export["year_act"] == 2020) & (data_export["node_loc"] == "R12_CHN")
]

# Merge the DataFrames on 'node_loc' and 'year'
merged_df = pd.merge(import_chn, export_chn, on=['node_loc', 'year_act'],
suffixes=('_import', '_export'))
merged_df = pd.merge(
import_chn,
export_chn,
on=["node_loc", "year_act"],
suffixes=("_import", "_export"),
)

# Subtract the 'value_import' from 'value_export' to get net export value
merged_df['value'] = merged_df['value_export'] - merged_df['value_import']
merged_df["value"] = merged_df["value_export"] - merged_df["value_import"]

# Select relevant columns for the final DataFrame
bound_act_net_export_chn = merged_df[['node_loc', 'year_act', 'value']]
bound_act_net_export_chn = merged_df[["node_loc", "year_act", "value"]]

bound_act_net_export_chn["technology"] = "export_aluminum"
bound_act_net_export_chn["mode"]= "M1"
bound_act_net_export_chn["time"]= "year"
bound_act_net_export_chn["unit"]= "-"
bound_act_net_export_chn["mode"] = "M1"
bound_act_net_export_chn["time"] = "year"
bound_act_net_export_chn["unit"] = "-"

bound_act_net_export_chn_2025 = bound_act_net_export_chn.replace({2020: 2025})

results['bound_activity_up'].append(bound_act_net_export_chn)
results['bound_activity_lo'].append(bound_act_net_export_chn)
results['bound_activity_up'].append(bound_act_net_export_chn_2025)
results['bound_activity_lo'].append(bound_act_net_export_chn_2025)
results["bound_activity_up"].append(bound_act_net_export_chn)
results["bound_activity_lo"].append(bound_act_net_export_chn)
results["bound_activity_up"].append(bound_act_net_export_chn_2025)
results["bound_activity_lo"].append(bound_act_net_export_chn_2025)

return {par_name: pd.concat(dfs) for par_name, dfs in results.items()}
27 changes: 18 additions & 9 deletions message_ix_models/model/material/data_cement.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
from message_ix import make_df

from message_ix_models import ScenarioInfo
from message_ix_models.model.material.data_util import read_sector_data, read_timeseries, calculate_ini_new_cap
from message_ix_models.model.material.data_util import (
read_sector_data,
read_timeseries,
calculate_ini_new_cap,
)
from message_ix_models.model.material.material_demand import material_demand_calc
from message_ix_models.model.material.util import get_ssp_from_context, read_config
from message_ix_models.util import (
Expand All @@ -14,6 +18,7 @@
same_node,
)


def gen_mock_demand_cement(scenario):
s_info = ScenarioInfo(scenario)
nodes = s_info.N
Expand Down Expand Up @@ -167,7 +172,7 @@ def gen_data_cement(scenario, dry_run=False):

# Techno-economic assumptions
# TEMP: now add cement sector as well
data_cement = read_sector_data(scenario, "cement", "Global_cement_MESSAGE.xlsx" )
data_cement = read_sector_data(scenario, "cement", "Global_cement_MESSAGE.xlsx")
# Special treatment for time-dependent Parameters
data_cement_ts = read_timeseries(scenario, "cement", "Global_cement_MESSAGE.xlsx")
tec_ts = set(data_cement_ts.technology) # set of tecs with var_cost
Expand Down Expand Up @@ -345,7 +350,9 @@ def gen_data_cement(scenario, dry_run=False):

# Create external demand param
parname = "demand"
df_demand = material_demand_calc.derive_demand("cement", scenario, old_gdp=False, ssp=ssp)
df_demand = material_demand_calc.derive_demand(
"cement", scenario, old_gdp=False, ssp=ssp
)
results[parname].append(df_demand)

# Add CCS as addon
Expand All @@ -354,12 +361,12 @@ def gen_data_cement(scenario, dry_run=False):
technology_1 = ["clinker_dry_cement"]
df_1 = make_df(
parname, mode="M1", type_addon="dry_ccs_cement", value=1, unit="-", **common
).pipe(broadcast, node=nodes, technology= technology_1)
).pipe(broadcast, node=nodes, technology=technology_1)

technology_2 = ["clinker_wet_cement"]
df_2 = make_df(
parname, mode="M1", type_addon="wet_ccs_cement", value=1, unit="-", **common
).pipe(broadcast, node=nodes, technology= technology_2)
).pipe(broadcast, node=nodes, technology=technology_2)

results[parname].append(df_1)
results[parname].append(df_2)
Expand All @@ -370,12 +377,14 @@ def gen_data_cement(scenario, dry_run=False):
results["initial_new_capacity_up"] = pd.concat(
[
calculate_ini_new_cap(
df_demand=df_demand.copy(deep=True), technology="clinker_dry_ccs_cement",
material = "cement"
df_demand=df_demand.copy(deep=True),
technology="clinker_dry_ccs_cement",
material="cement",
),
calculate_ini_new_cap(
df_demand=df_demand.copy(deep=True), technology="clinker_wet_ccs_cement",
material = "cement"
df_demand=df_demand.copy(deep=True),
technology="clinker_wet_ccs_cement",
material="cement",
),
]
)
Expand Down
Loading

0 comments on commit 61179fa

Please sign in to comment.