Skip to content

Commit

Permalink
Merge branch 'main' into sidk/milestonyr
Browse files Browse the repository at this point in the history
  • Loading branch information
siddharth-krishna committed Sep 15, 2023
2 parents 5e70e46 + ab4c510 commit fc7e147
Show file tree
Hide file tree
Showing 8 changed files with 2,480 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
python -m venv .venv
source .venv/bin/activate
python -m pip install --upgrade pip
python -m pip install .[dev]
python -m pip install -e .[dev]
- name: Check code formatting
working-directory: times-excel-reader
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ build/
input/*
output/*
ground_truth/*
/.obsidian
/.spyproject
/.vs
/.vscode
/env*
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ We recommend installing the tool in editable mode (`-e`) in a Python virtual env
```bash
python3 -m venv .venv
source .venv/bin/activate
pip install -U pip
pip install -r requirements.txt
pip install -e .[dev]
```
Expand Down
5 changes: 3 additions & 2 deletions times_reader/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,14 @@ def convert_xl_to_times(
transforms.generate_all_regions,
transforms.capitalise_attributes,
transforms.apply_fixups,
transforms.extract_commodity_groups,
transforms.generate_commodity_groups,
transforms.fill_in_missing_pcgs,
transforms.generate_top_ire,
transforms.include_tables_source,
transforms.merge_tables,
transforms.apply_more_fixups,
transforms.process_years,
transforms.complete_commodity_groups,
transforms.process_uc_wildcards,
transforms.process_wildcards,
transforms.convert_aliases,
Expand Down Expand Up @@ -380,7 +381,7 @@ def main():
args_parser.add_argument("--use_pkl", action="store_true")
args = args_parser.parse_args()

config = datatypes.Config("times_mapping.txt", "times-info.json")
config = datatypes.Config("times_mapping.txt", "times-info.json", "veda-tags.json")

if not isinstance(args.input, list) or len(args.input) < 1:
print(f"ERROR: expected at least 1 input. Got {args.input}")
Expand Down
8 changes: 4 additions & 4 deletions times_reader/config/times_mapping.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ COM_PKFLX[REG,DATAYEAR,COM,TSLVL,VALUE] = ~FI_T(Region,Year,CommName,TimeSlice,V
COM_PKRSV[REG,DATAYEAR,COM,VALUE] = ~FI_T(Region,Year,CommName,VALUE,Attribute:COM_PKRSV)
COM_PKTS[REG,COM,TSLVL] = ~FI_Comm(Region,CommName,PeakTS)
COM_PROJ[REG,DATAYEAR,COM,VALUE] = ~FI_T(Region,Year,CommName,VALUE,Attribute:COM_PROJ)
COM_STEP[REG,DATAYEAR,BD,VALUE] = ~FI_T(Region,Year,LimType,VALUE,Attribute:COM_STEP)
COM_STEP[REG,COM,BD,VALUE] = ~FI_T(Region,CommName,LimType,VALUE,Attribute:COM_STEP)
COM_TAXNET[REG,DATAYEAR,COM,TSLVL,CUR,VALUE] = ~FI_T(Region,Year,CommName,TimeSlice,Curr,VALUE,Attribute:COM_TAXNET)
COM_TMAP[REG,COM_TYPE,COM] = ~FI_Comm(Region,Csets,CommName)
COM_TSL[REG,COM,TSLVL] = ~FI_Comm(Region,CommName,CTSLvl)
Expand Down Expand Up @@ -70,7 +70,7 @@ NCAP_TLIFE[REG,DATAYEAR,PRC,VALUE] = ~FI_T(Region,Year,TechName,VALUE,Attribute:
NRG_TMAP[REG,NRG_TYPE,COM] = ~FI_Comm(Region,Ctype,CommName)
PASTYEAR[DATAYEAR,TEXT] = PastYear(Year,Year)
PRC[PRC] = ~FI_Process(TechName)
PRC_ACTFLO[REG,DATAYEAR,PRC_GRP,PRC,VALUE] = ~FI_T(Region,Year,TechGroup,TechName,VALUE,Attribute:PRC_ACTFLO)
PRC_ACTFLO[REG,DATAYEAR,PRC,COM_GRP,VALUE] = ~FI_T(Region,Year,TechName,Other_Indexes,VALUE,Attribute:PRC_ACTFLO)
PRC_ACTUNT[REG,PRC,COM_GRP,UNITS] = ~FI_Process(Region,TechName,PrimaryCG,Tact)
PRC_CAPACT[REG,PRC,VALUE] = ~FI_T(Region,TechName,VALUE,Attribute:PRC_CAPACT)
PRC_DESC[REG,PRC,TEXT] = ~FI_Process(Region,TechName,TechDesc)
Expand All @@ -85,7 +85,7 @@ SHAPE[J,AGE,VALUE] = ~FI_T(Other_Indexes,Year,VALUE,Attribute:SHAPE)
STGIN_BND[REG,DATAYEAR,PRC,COM,TSLVL,BD,VALUE] = ~FI_T(Region,Year,TechName,CommName,TimeSlice,LimType,VALUE,Attribute:STGIN_BND)
STG_EFF[REG,DATAYEAR,PRC,VALUE] = ~FI_T(Region,Year,TechName,VALUE,Attribute:STG_EFF)
STG_LOSS[REG,DATAYEAR,PRC,TSLVL,VALUE] = ~FI_T(Region,Year,TechName,TimeSlice,VALUE,Attribute:STG_LOSS)
TOP[REG,PRC,COM,IO] = COMM_GROUPS(Region,TechName,CommName,IO)
TOP[REG,PRC,COM,IO] = TOPOLOGY(Region,TechName,CommName,IO)
TOP_IRE[ALL_REG,COM,ALL_R,C,PRC] = TOP_IRE(Origin,IN,Destination,OUT,TechName)
TS_GROUP[REG,TSLVL,TS_GROUP] = TimeSlicesGroup(Region,TSLVL,TS_GROUP)
TS_MAP[REG,PARENT,TS_MAP] = TimeSliceMap(Region,Parent,TimesliceMap)
Expand All @@ -98,7 +98,7 @@ UC_FLO[UC_N,SIDE,REG,DATAYEAR,PRC,COM,TSLVL,VALUE] = ~TODO(UC_N,Side,Region,Year
UC_IRE[UC_N,SIDE,REG,DATAYEAR,PRC,COM,TSLVL,IMP,VALUE] = ~TODO(TODO)
UC_NCAP[UC_N,SIDE,REG,DATAYEAR,PRC,VALUE] = ~UC_T(UC_N,Side,Region,Year,TechName,VALUE,Attribute:UC_NCAP)
UC_N[UC_N,TEXT] = ~UC_T(UC_N,UC_Desc)
UC_RHSRT[REG,UC_N,DATAYEAR,TSLVL,VALUE] = ~UC_T(Region,UC_N,Year,TimeSlice,VALUE,Attribute:UC_RHSRT)
UC_RHSRT[REG,UC_N,DATAYEAR,BD,VALUE] = ~UC_T(Region,UC_N,Year,LimType,VALUE,Attribute:UC_RHSRT)
UC_RHSRTS[REG,UC_N,DATAYEAR,TSLVL,BD,VALUE] = ~UC_T(Region,UC_N,Year,TimeSlice,LimType,VALUE,Attribute:UC_RHSRTS)
UC_RHSTS[UC_N,DATAYEAR,TSLVL,BD,VALUE] = ~UC_T(UC_N,Year,TimeSlice,LimType,VALUE,Attribute:UC_RHSTS)
UC_R_EACH[REG,UC_N] = ~TODO(Region,UC_N)
Expand Down
Loading

0 comments on commit fc7e147

Please sign in to comment.