From 66d0ff5bf2b85f5cdd498d1e5b313389d2790e25 Mon Sep 17 00:00:00 2001 From: Samuel Letellier-Duchesne Date: Wed, 6 Apr 2022 14:36:58 -0400 Subject: [PATCH] Summarize EndUses for any IDF model (#225) * adds IDF.copy() and IDF.saveas(inplace=True) (#254) * Adjusts svg repr to the min/max values of the schedule (#255) * Graceful warning when Slab or Basement program is not found * Adds KeyBoardInterupt to IDF Thread * catches more variations of unit name * Adds ability to scale a schedule * Fixes fallback limits for Schedule.plot2d() when Type is not defined * Type can be specified in Schedule.from_values constructor * plot2d is prettier by default * more Typing * Return existing object when new_object is there (#257) * Adds ability to replace schedule values without affecting the full load hours * more robust IDF.name property * Keep sim files when error occurs (#276) * Better Output class for IDF model * Added tests for Outputs * more tests * Parse existing outputs in IDF model when initializing class * Backwards compatibility with E+ <= 7.2 and meters objects * Latest transitioned object more robust * eplus_interface looks in /bin when version is lower or equal to E+ 7.2 * New method: add_idf_object_from_idf_string * code format * Adds enduse balance class * Adds default output variables and meters to Output class * Towards working characterization * removed zone predicted load and temp * adds cooling routine to sankey * Name convention * Fixes the window gains and losses * Fixes an issue reading multipliers * Fixes names in sankey * better * Collect by outputs * Revert "Collect by outputs" This reverts commit 121549fc252835c7e72747248828702e113ccaea. * multipliers * Current state * include html and sql by default * add missing outputs to method * Added construction table to Sql * adds storage outputs * Uses outside face storage * Fix issue with getattr * Fixes a test * isort+black * Removes IDF and SQL by default * Fix test * Fix test with failing transition * Fixes tests * Fixes test * fix test --- archetypal/eplus_interface/basement.py | 36 +- archetypal/eplus_interface/energy_plus.py | 2 +- archetypal/eplus_interface/expand_objects.py | 23 +- archetypal/eplus_interface/slab.py | 36 +- archetypal/eplus_interface/transition.py | 13 +- archetypal/eplus_interface/version.py | 12 +- archetypal/idfclass/end_use_balance.py | 1122 ++++++++++++++++++ archetypal/idfclass/extensions.py | 7 + archetypal/idfclass/idf.py | 194 ++- archetypal/idfclass/load_balance.py | 257 ++++ archetypal/idfclass/meters.py | 5 +- archetypal/idfclass/outputs.py | 779 ++++++++---- archetypal/idfclass/sql.py | 496 ++++++++ archetypal/idfclass/util.py | 6 +- archetypal/idfclass/variables.py | 69 +- archetypal/schedule.py | 126 +- archetypal/settings.py | 7 +- archetypal/umi_template.py | 1 + tests/test_end_use_balance.py | 73 ++ tests/test_idfclass.py | 36 +- tests/test_outputs.py | 62 + tests/test_schedules.py | 22 + tests/test_template.py | 8 +- 23 files changed, 2991 insertions(+), 401 deletions(-) create mode 100644 archetypal/idfclass/end_use_balance.py create mode 100644 archetypal/idfclass/load_balance.py create mode 100644 archetypal/idfclass/sql.py create mode 100644 tests/test_end_use_balance.py create mode 100644 tests/test_outputs.py diff --git a/archetypal/eplus_interface/basement.py b/archetypal/eplus_interface/basement.py index a69cb531..30b2912a 100644 --- a/archetypal/eplus_interface/basement.py +++ b/archetypal/eplus_interface/basement.py @@ -57,11 +57,17 @@ def run(self): # Get executable using shutil.which (determines the extension based on # the platform, eg: .exe. And copy the executable to tmp - self.basement_exe = Path( - shutil.which( - "Basement", path=self.eplus_home / "PreProcess" / "GrndTempCalc" + basemenet_exe = shutil.which( + "Basement", path=self.eplus_home / "PreProcess" / "GrndTempCalc" + ) + if basemenet_exe is None: + log( + f"The Basement program could not be found at " + f"'{self.eplus_home / 'PreProcess' / 'GrndTempCalc'}'", + lg.WARNING, ) - ).copy(self.run_dir) + return + self.basement_exe = Path(basemenet_exe).copy(self.run_dir) self.basement_idd = ( self.eplus_home / "PreProcess" / "GrndTempCalc" / "BasementGHT.idd" ).copy(self.run_dir) @@ -207,11 +213,19 @@ def cancelled_callback(self, stdin, stdout): @property def eplus_home(self): - eplus_exe, eplus_home = paths_from_version(self.idf.as_version.dash) - if not Path(eplus_home).exists(): - raise EnergyPlusVersionError( - msg=f"No EnergyPlus Executable found for version " - f"{EnergyPlusVersion(self.idf.as_version)}" - ) + """Get the version-dependant directory where executables are installed.""" + if self.idf.file_version <= EnergyPlusVersion("7.2"): + install_dir = self.idf.file_version.current_install_dir / "bin" else: - return Path(eplus_home) + install_dir = ( + self.idf.file_version.current_install_dir + / "PreProcess" + / "GrndTempCalc" + ) + return install_dir + + def stop(self): + if self.p.poll() is None: + self.msg_callback("Attempting to cancel simulation ...") + self.cancelled = True + self.p.kill() diff --git a/archetypal/eplus_interface/energy_plus.py b/archetypal/eplus_interface/energy_plus.py index 1c9abd80..100115ce 100644 --- a/archetypal/eplus_interface/energy_plus.py +++ b/archetypal/eplus_interface/energy_plus.py @@ -270,7 +270,7 @@ def failure_callback(self): with open(error_filename, "r") as stderr: stderr_r = stderr.read() if self.idf.keep_data_err: - failed_dir = self.idf.simulation_dir.mkdir_p() / "failed" + failed_dir = self.idf.simulation_dir.mkdir_p() try: failed_dir.rmtree_p() except PermissionError as e: diff --git a/archetypal/eplus_interface/expand_objects.py b/archetypal/eplus_interface/expand_objects.py index 710920f0..bf2d8075 100644 --- a/archetypal/eplus_interface/expand_objects.py +++ b/archetypal/eplus_interface/expand_objects.py @@ -59,9 +59,8 @@ def run(self): self.epw = self.idf.epw.copy(tmp / "in.epw").expand() self.idfname = Path(self.idf.savecopy(tmp / "in.idf")).expand() self.idd = self.idf.iddname.copy(tmp / "Energy+.idd").expand() - self.expandobjectsexe = Path( - shutil.which("ExpandObjects", path=self.eplus_home.expand()) - ).copy2(tmp) + expand_object_exe = shutil.which("ExpandObjects", path=self.eplus_home) + self.expandobjectsexe = Path(expand_object_exe).copy2(tmp) self.run_dir = Path(tmp).expand() # Run ExpandObjects Program @@ -151,11 +150,15 @@ def cancelled_callback(self, stdin, stdout): @property def eplus_home(self): - eplus_exe, eplus_home = paths_from_version(self.idf.as_version.dash) - if not Path(eplus_home).exists(): - raise EnergyPlusVersionError( - msg=f"No EnergyPlus Executable found for version " - f"{EnergyPlusVersion(self.idf.as_version)}" - ) + """Get the version-dependant directory where executables are installed.""" + if self.idf.file_version <= EnergyPlusVersion("7.2"): + install_dir = self.idf.file_version.current_install_dir / "bin" else: - return Path(eplus_home) + install_dir = self.idf.file_version.current_install_dir + return install_dir + + def stop(self): + if self.p.poll() is None: + self.msg_callback("Attempting to cancel simulation ...") + self.cancelled = True + self.p.kill() diff --git a/archetypal/eplus_interface/slab.py b/archetypal/eplus_interface/slab.py index 56a2e53d..395c2a63 100644 --- a/archetypal/eplus_interface/slab.py +++ b/archetypal/eplus_interface/slab.py @@ -58,9 +58,17 @@ def run(self): # Get executable using shutil.which (determines the extension based on # the platform, eg: .exe. And copy the executable to tmp - self.slabexe = Path( - shutil.which("Slab", path=self.eplus_home / "PreProcess" / "GrndTempCalc") - ).copy(self.run_dir) + slab_exe = shutil.which( + "Slab", path=self.eplus_home / "PreProcess" / "GrndTempCalc" + ) + if slab_exe is None: + log( + f"The Slab program could not be found at " + f"'{self.eplus_home / 'PreProcess' / 'GrndTempCalc'}'", + lg.WARNING, + ) + return + self.slabexe = Path(slab_exe).copy(self.run_dir) self.slabidd = ( self.eplus_home / "PreProcess" / "GrndTempCalc" / "SlabGHT.idd" ).copy(self.run_dir) @@ -164,11 +172,19 @@ def cancelled_callback(self, stdin, stdout): @property def eplus_home(self): - eplus_exe, eplus_home = paths_from_version(self.idf.as_version.dash) - if not Path(eplus_home).exists(): - raise EnergyPlusVersionError( - msg=f"No EnergyPlus Executable found for version " - f"{EnergyPlusVersion(self.idf.as_version)}" - ) + """Get the version-dependant directory where executables are installed.""" + if self.idf.file_version <= EnergyPlusVersion("7.2"): + install_dir = self.idf.file_version.current_install_dir / "bin" else: - return Path(eplus_home) + install_dir = ( + self.idf.file_version.current_install_dir + / "PreProcess" + / "GrndTempCalc" + ) + return install_dir + + def stop(self): + if self.p.poll() is None: + self.msg_callback("Attempting to cancel simulation ...") + self.cancelled = True + self.p.kill() diff --git a/archetypal/eplus_interface/transition.py b/archetypal/eplus_interface/transition.py index 42e75331..dc334250 100644 --- a/archetypal/eplus_interface/transition.py +++ b/archetypal/eplus_interface/transition.py @@ -171,6 +171,9 @@ def run(self): generator = TransitionExe(self.idf, tmp_dir=tmp) + # set the initial version from which we are transitioning + last_successful_transition = self.idf.file_version + for trans in tqdm( generator, total=len(generator.transitions), @@ -214,10 +217,14 @@ def run(self): time.time() - start_time ) ) + last_successful_transition = trans.trans self.success_callback() for line in self.p.stderr: self.msg_callback(line.decode("utf-8")) else: + # set the version of the IDF the latest it was able to transition + # to. + self.idf.as_version = last_successful_transition self.msg_callback("Transition failed") self.failure_callback() @@ -275,7 +282,9 @@ def failure_callback(self): """Read stderr and pass to logger.""" for line in self.p.stderr: self.msg_callback(line.decode("utf-8"), level=lg.ERROR) - raise CalledProcessError(self.p.returncode, cmd=self.cmd, stderr=self.p.stderr) + self.exception = CalledProcessError( + self.p.returncode, cmd=self.cmd, stderr=self.p.stderr + ) def cancelled_callback(self, stdin, stdout): """Call on cancelled.""" @@ -286,7 +295,7 @@ def eplus_home(self): """Return the location of the EnergyPlus directory.""" eplus_exe, eplus_home = paths_from_version(self.idf.as_version.dash) if not Path(eplus_home).exists(): - raise EnergyPlusVersionError( + self.exception = EnergyPlusVersionError( msg=f"No EnergyPlus Executable found for version " f"{EnergyPlusVersion(self.idf.as_version)}" ) diff --git a/archetypal/eplus_interface/version.py b/archetypal/eplus_interface/version.py index 085ebaf3..d87e9aa1 100644 --- a/archetypal/eplus_interface/version.py +++ b/archetypal/eplus_interface/version.py @@ -10,6 +10,7 @@ from archetypal import settings from archetypal.eplus_interface.exceptions import ( + EnergyPlusVersionError, InvalidEnergyPlusVersion, ) @@ -109,7 +110,12 @@ def current_idd_path(self): @property def current_install_dir(self): """Get the current installation directory for this EnergyPlus version.""" - return self.install_locations[self.dash] + try: + return self.install_locations[self.dash] + except KeyError: + raise EnergyPlusVersionError( + f"EnergyPlusVersion {self.dash} is not installed." + ) @property def tuple(self) -> tuple: @@ -121,7 +127,9 @@ def valid_versions(self) -> set: """List the idd file version found on this machine.""" if not self.valid_idd_paths: # Little hack in case E+ is not installed - _choices = {settings.ep_version,} + _choices = { + settings.ep_version, + } else: _choices = set(self.valid_idd_paths.keys()) diff --git a/archetypal/idfclass/end_use_balance.py b/archetypal/idfclass/end_use_balance.py new file mode 100644 index 00000000..807144e1 --- /dev/null +++ b/archetypal/idfclass/end_use_balance.py @@ -0,0 +1,1122 @@ +from sqlite3 import connect + +import numpy as np +import pandas as pd +from energy_pandas import EnergyDataFrame +from energy_pandas.units import unit_registry + +from archetypal.idfclass.sql import Sql + + +class EndUseBalance: + HVAC_INPUT_SENSIBLE = ( # not multiplied by zone or group multipliers + "Zone Air Heat Balance System Air Transfer Rate", + "Zone Air Heat Balance System Convective Heat Gain Rate", + ) + HVAC_INPUT_HEATED_SURFACE = ( + "Zone Radiant HVAC Heating Energy", + "Zone Ventilated Slab Radiant Heating Energy", + ) + HVAC_INPUT_COOLED_SURFACE = ( + "Zone Radiant HVAC Cooling Energy", + "Zone Ventilated Slab Radiant Cooling Energy", + ) + LIGHTING = ("Zone Lights Total Heating Energy",) # checked + EQUIP_GAINS = ( # checked + "Zone Electric Equipment Radiant Heating Energy", + "Zone Gas Equipment Radiant Heating Energy", + "Zone Steam Equipment Radiant Heating Energy", + "Zone Hot Water Equipment Radiant Heating Energy", + "Zone Other Equipment Radiant Heating Energy", + "Zone Electric Equipment Convective Heating Energy", + "Zone Gas Equipment Convective Heating Energy", + "Zone Steam Equipment Convective Heating Energy", + "Zone Hot Water Equipment Convective Heating Energy", + "Zone Other Equipment Convective Heating Energy", + ) + PEOPLE_GAIN = ("Zone People Sensible Heating Energy",) # checked, Todo: +latent + SOLAR_GAIN = ("Zone Windows Total Transmitted Solar Radiation Energy",) # checked + INFIL_GAIN = ( + "Zone Infiltration Sensible Heat Gain Energy", # checked + # "Zone Infiltration Latent Heat Gain Energy", + "AFN Zone Infiltration Sensible Heat Gain Energy", + # "AFN Zone Infiltration Latent Heat Gain Energy", + ) + INFIL_LOSS = ( + "Zone Infiltration Sensible Heat Loss Energy", # checked + # "Zone Infiltration Latent Heat Loss Energy", + "AFN Zone Infiltration Sensible Heat Loss Energy", + # "AFN Zone Infiltration Latent Heat Loss Energy", + ) + VENTILATION_LOSS = ("Zone Air System Sensible Heating Energy",) + VENTILATION_GAIN = ("Zone Air System Sensible Cooling Energy",) + NAT_VENT_GAIN = ( + # "Zone Ventilation Total Heat Gain Energy", + "Zone Ventilation Sensible Heat Gain Energy", + # "Zone Ventilation Latent Heat Gain Energy", + "AFN Zone Ventilation Sensible Heat Gain Energy", + # "AFN Zone Ventilation Latent Heat Gain Energy", + ) + NAT_VENT_LOSS = ( + # "Zone Ventilation Total Heat Loss Energy", + "Zone Ventilation Sensible Heat Loss Energy", + # "Zone Ventilation Latent Heat Loss Energy", + "AFN Zone Ventilation Sensible Heat Loss Energy", + # "AFN Zone Ventilation Latent Heat Loss Energy", + ) + MECHANICAL_VENT_LOSS = ( + "Zone Mechanical Ventilation No Load Heat Removal Energy", + "Zone Mechanical Ventilation Heating Load Increase Energy", + "Zone Mechanical Ventilation Cooling Load Decrease Energy", + ) + MECHANICAL_VENT_GAIN = ( + "Zone Mechanical Ventilation No Load Heat Addition Energy", + "Zone Mechanical Ventilation Heating Load Decrease Energy", + "Zone Mechanical Ventilation Cooling Load Increase Energy", + ) + OPAQUE_ENERGY_FLOW = ("Surface Outside Face Conduction Heat Transfer Energy",) + OPAQUE_ENERGY_STORAGE = ("Surface Heat Storage Energy",) + WINDOW_LOSS = ("Zone Windows Total Heat Loss Energy",) # checked + WINDOW_GAIN = ("Zone Windows Total Heat Gain Energy",) # checked + HEAT_RECOVERY_LOSS = ("Heat Exchanger Total Cooling Energy",) + HEAT_RECOVERY_GAIN = ("Heat Exchanger Total Heating Energy",) + + def __init__( + self, + sql_file, + cooling, + heating, + lighting, + electric_equip, + gas_equip, + how_water, + people_gain, + solar_gain, + infiltration, + mech_vent, + nat_vent, + window_energy_flow, + opaque_flow, + opaque_storage, + window_flow, + heat_recovery, + is_cooling, + is_heating, + units="J", + use_all_solar=True, + ): + self.sql_file = sql_file + self.cooling = cooling + self.heating = heating + self.lighting = lighting + self.electric_equip = electric_equip + self.gas_equip = gas_equip + self.hot_water = how_water + self.people_gain = people_gain + self.solar_gain = solar_gain + self.infiltration = infiltration + self.mech_vent = mech_vent + self.nat_vent = nat_vent + self.window_energy_flow = window_energy_flow + self.opaque_flow = opaque_flow + self.opaque_storage = opaque_storage + self.window_flow = window_flow + self.heat_recovery = heat_recovery + self.units = units + self.use_all_solar = use_all_solar + self.is_cooling = is_cooling + self.is_heating = is_heating + + @classmethod + def from_idf(cls, idf, units="kWh", power_units="kW", outdoor_surfaces_only=True): + assert ( + idf.sql_file.exists() + ), "Expected an IDF model with simulation results. Run `IDF.simulate()` first." + # get all of the results relevant for gains and losses + _hvac_input = idf.variables.OutputVariable.collect_by_output_name( + cls.HVAC_INPUT_SENSIBLE, + reporting_frequency=idf.outputs.reporting_frequency, + units=power_units, + ) + # _hvac_input = cls.apply_multipliers(_hvac_input, idf) # zone-level output + + _hvac_input_heated_surface = ( + idf.variables.OutputVariable.collect_by_output_name( + cls.HVAC_INPUT_HEATED_SURFACE, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + ) + # zone-level output + # _hvac_input_heated_surface = cls.apply_multipliers(_hvac_input_heated_surface, idf) + + _hvac_input_cooled_surface = ( + idf.variables.OutputVariable.collect_by_output_name( + cls.HVAC_INPUT_COOLED_SURFACE, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + ) + # _hvac_input_cooled_surface = cls.apply_multipliers(_hvac_input_cooled_surface, idf) # zone-level output + + # convert power to energy assuming the reporting frequency + freq = pd.infer_freq( + _hvac_input.iloc[ + :3, + ].index + ) + assert freq == "H", "A reporting frequency other than H is not yet supported." + freq_to_unit = {"H": "hr"} + _hvac_input = _hvac_input.apply( + lambda row: unit_registry.Quantity( + row.values, + unit_registry(power_units) * unit_registry(freq_to_unit[freq]), + ) + .to(units) + .m + ) + + # concat sensible hvac with heated surfaces + _hvac_input = pd.concat( + filter( + lambda x: not x.empty, + [ + _hvac_input, + EndUseBalance.subtract_cooled_from_heated_surface( + _hvac_input_cooled_surface, _hvac_input_heated_surface + ), + ], + ), + axis=1, + verify_integrity=True, + ) + + # compute rolling sign for each zone (determines if zone is in heating or + # cooling model.) + rolling_sign = cls.get_rolling_sign_change(_hvac_input) + + # Create both heating and cooling masks + is_heating = rolling_sign > 0 + is_cooling = rolling_sign < 0 + + heating = _hvac_input[is_heating].fillna(0) + cooling = _hvac_input[is_cooling].fillna(0) + + # Get internal gain components: lighting, people, equipment, solar + lighting = idf.variables.OutputVariable.collect_by_output_name( + cls.LIGHTING, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + lighting = cls.apply_multipliers(lighting, idf) # zone-level output + people_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.PEOPLE_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + people_gain = cls.apply_multipliers(people_gain, idf) # zone-level output + equipment = idf.variables.OutputVariable.collect_by_output_name( + cls.EQUIP_GAINS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + equipment = cls.apply_multipliers(equipment, idf) # zone-level output + solar_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.SOLAR_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + solar_gain = cls.apply_multipliers(solar_gain, idf) # zone-level output + infil_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.INFIL_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + infil_gain = cls.apply_multipliers(infil_gain, idf) # zone-level output + infil_loss = idf.variables.OutputVariable.collect_by_output_name( + cls.INFIL_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + infil_loss = cls.apply_multipliers(infil_loss, idf) # zone-level output + vent_loss = idf.variables.OutputVariable.collect_by_output_name( + cls.VENTILATION_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + vent_loss = cls.apply_multipliers(vent_loss, idf) # zone-level output + vent_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.VENTILATION_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + vent_gain = cls.apply_multipliers(vent_gain, idf) # zone-level output + nat_vent_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.NAT_VENT_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + nat_vent_gain = cls.apply_multipliers(nat_vent_gain, idf) # zone-level output + nat_vent_loss = idf.variables.OutputVariable.collect_by_output_name( + cls.NAT_VENT_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + nat_vent_loss = cls.apply_multipliers(nat_vent_loss, idf) # zone-level output + mech_vent_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.MECHANICAL_VENT_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + mech_vent_gain = cls.apply_multipliers(mech_vent_gain, idf) # zone-level output + mech_vent_loss = idf.variables.OutputVariable.collect_by_output_name( + cls.MECHANICAL_VENT_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + mech_vent_loss = cls.apply_multipliers(mech_vent_loss, idf) # zone-level output + + # subtract losses from gains + infiltration = None + mech_vent = None + nat_vent = None + if len(infil_gain) == len(infil_loss): + infiltration = cls.subtract_loss_from_gain(infil_gain, infil_loss) + if not any((vent_gain.empty, vent_loss.empty, cooling.empty, heating.empty)): + mech_vent = cls.subtract_loss_from_gain(mech_vent_gain, mech_vent_loss) + if nat_vent_gain.shape == nat_vent_loss.shape: + nat_vent = cls.subtract_loss_from_gain(nat_vent_gain, nat_vent_loss) + + # get the surface energy flow + opaque_flow = idf.variables.OutputVariable.collect_by_output_name( + cls.OPAQUE_ENERGY_FLOW, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + opaque_storage = idf.variables.OutputVariable.collect_by_output_name( + cls.OPAQUE_ENERGY_STORAGE, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + opaque_flow = opaque_flow - opaque_storage + window_loss = idf.variables.OutputVariable.collect_by_output_name( + cls.WINDOW_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + window_loss = cls.apply_multipliers(window_loss, idf) + window_gain = idf.variables.OutputVariable.collect_by_output_name( + cls.WINDOW_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + units=units, + ) + window_gain = cls.apply_multipliers(window_gain, idf) + window_flow = cls.subtract_loss_from_gain(window_gain, window_loss) + window_flow = cls.subtract_solar_from_window_net(window_flow, solar_gain) + + opaque_flow = cls.match_opaque_surface_to_zone(idf, opaque_flow) + opaque_storage = cls.match_opaque_surface_to_zone(idf, opaque_storage) + if outdoor_surfaces_only: + opaque_flow = opaque_flow.drop( + ["Surface", float("nan")], level="Outside_Boundary_Condition", axis=1 + ) + # opaque_storage = opaque_storage.drop( + # ["Surface", float("nan")], level="Outside_Boundary_Condition", axis=1 + # ) + window_energy_flow = window_flow + + bal_obj = cls( + idf, + cooling, + heating, + lighting, + equipment, + None, + None, + people_gain, + solar_gain, + infiltration, + mech_vent, + nat_vent, + window_energy_flow, + opaque_flow, + opaque_storage, + window_flow, + is_cooling, + is_heating, + units, + use_all_solar=True, + ) + return bal_obj + + @classmethod + def from_sql_file( + cls, sql_file, units="kWh", power_units="kW", outdoor_surfaces_only=True + ): + sql = Sql(sql_file) + + _hvac_input = sql.timeseries_by_name(cls.HVAC_INPUT_SENSIBLE).to_units( + power_units + ) + _hvac_input_heated_surface = sql.timeseries_by_name( + cls.HVAC_INPUT_HEATED_SURFACE + ).to_units(units) + _hvac_input_cooled_surface = sql.timeseries_by_name( + cls.HVAC_INPUT_COOLED_SURFACE + ).to_units(units) + # convert power to energy assuming the reporting frequency + freq = pd.infer_freq(_hvac_input.index) + assert freq == "H", "A reporting frequency other than H is not yet supported." + freq_to_unit = {"H": "hr"} + _hvac_input = _hvac_input.apply( + lambda row: unit_registry.Quantity( + row.values, + unit_registry(power_units) * unit_registry(freq_to_unit[freq]), + ) + .to(units) + .m + ) + + _hvac_input = pd.concat( + filter( + lambda x: not x.empty, + [ + _hvac_input, + EndUseBalance.subtract_cooled_from_heated_surface( + _hvac_input_cooled_surface, _hvac_input_heated_surface + ), + ], + ), + axis=1, + verify_integrity=True, + ) + + rolling_sign = cls.get_rolling_sign_change(_hvac_input) + + # Create both heating and cooling masks + is_heating = rolling_sign > 0 + is_cooling = rolling_sign < 0 + + heating = _hvac_input[is_heating].fillna(0) + cooling = _hvac_input[is_cooling].fillna(0) + + lighting = sql.timeseries_by_name(cls.LIGHTING).to_units(units) + zone_multipliers = sql.zone_info.set_index("ZoneName")["Multiplier"].rename( + "KeyValue" + ) + lighting = cls.apply_multipliers( + lighting, + zone_multipliers, + ) + people_gain = sql.timeseries_by_name(cls.PEOPLE_GAIN).to_units(units) + people_gain = cls.apply_multipliers(people_gain, zone_multipliers) + equipment = sql.timeseries_by_name(cls.EQUIP_GAINS).to_units(units) + equipment = cls.apply_multipliers(equipment, zone_multipliers) + solar_gain = sql.timeseries_by_name(cls.SOLAR_GAIN).to_units(units) + solar_gain = cls.apply_multipliers(solar_gain, zone_multipliers) + infil_gain = sql.timeseries_by_name(cls.INFIL_GAIN).to_units(units) + infil_gain = cls.apply_multipliers(infil_gain, zone_multipliers) + infil_loss = sql.timeseries_by_name(cls.INFIL_LOSS).to_units(units) + infil_loss = cls.apply_multipliers(infil_loss, zone_multipliers) + vent_loss = sql.timeseries_by_name(cls.VENTILATION_LOSS).to_units(units) + vent_loss = cls.apply_multipliers(vent_loss, zone_multipliers) + vent_gain = sql.timeseries_by_name(cls.VENTILATION_GAIN).to_units(units) + vent_gain = cls.apply_multipliers(vent_gain, zone_multipliers) + nat_vent_gain = sql.timeseries_by_name(cls.NAT_VENT_GAIN).to_units(units) + nat_vent_gain = cls.apply_multipliers(nat_vent_gain, zone_multipliers) + nat_vent_loss = sql.timeseries_by_name(cls.NAT_VENT_LOSS).to_units(units) + nat_vent_loss = cls.apply_multipliers(nat_vent_loss, zone_multipliers) + mech_vent_gain = sql.timeseries_by_name(cls.MECHANICAL_VENT_GAIN).to_units( + units + ) + mech_vent_gain = cls.apply_multipliers(mech_vent_gain, zone_multipliers) + mech_vent_loss = sql.timeseries_by_name(cls.MECHANICAL_VENT_LOSS).to_units( + units + ) + mech_vent_loss = cls.apply_multipliers(mech_vent_loss, zone_multipliers) + heat_recovery_loss = sql.timeseries_by_name(cls.HEAT_RECOVERY_LOSS).to_units( + units + ) + heat_recovery_gain = sql.timeseries_by_name(cls.HEAT_RECOVERY_GAIN).to_units( + units + ) + heat_recovery = cls.subtract_loss_from_gain( + heat_recovery_gain, heat_recovery_loss, level="KeyValue" + ) + + # subtract losses from gains + infiltration = None + mech_vent = None + nat_vent = None + if len(infil_gain) == len(infil_loss): + infiltration = cls.subtract_loss_from_gain( + infil_gain, infil_loss, level="Name" + ) + if not any((vent_gain.empty, vent_loss.empty, cooling.empty, heating.empty)): + mech_vent = cls.subtract_loss_from_gain( + mech_vent_gain, mech_vent_loss, level="Name" + ) + if nat_vent_gain.shape == nat_vent_loss.shape: + nat_vent = cls.subtract_loss_from_gain( + nat_vent_gain, nat_vent_loss, level="Name" + ) + + # get the surface energy flow + opaque_flow = sql.timeseries_by_name(cls.OPAQUE_ENERGY_FLOW).to_units(units) + opaque_storage = sql.timeseries_by_name(cls.OPAQUE_ENERGY_STORAGE).to_units( + units + ) + opaque_storage_ = opaque_storage.copy() + opaque_storage_.columns = opaque_flow.columns + opaque_flow = - (opaque_flow + opaque_storage_) + window_loss = sql.timeseries_by_name(cls.WINDOW_LOSS).to_units(units) + window_loss = cls.apply_multipliers(window_loss, zone_multipliers) + window_gain = sql.timeseries_by_name(cls.WINDOW_GAIN).to_units(units) + window_gain = cls.apply_multipliers(window_gain, zone_multipliers) + window_flow = cls.subtract_loss_from_gain( + window_gain, window_loss, level="Name" + ) + window_flow = cls.subtract_solar_from_window_net( + window_flow, solar_gain, level="KeyValue" + ) + + opaque_flow = cls.match_opaque_surface_to_zone( + sql.surfaces_table, opaque_flow, sql.zone_info + ) + opaque_storage = cls.match_opaque_surface_to_zone( + sql.surfaces_table, opaque_storage, sql.zone_info + ) + if outdoor_surfaces_only: + # inside surfaces are identified by ExtBoundCond > 0 + inside_surfaces = sql.surfaces_table[lambda x: x["ExtBoundCond"] > 0][ + "SurfaceName" + ].values.tolist() + + # drop inside surfaces + opaque_flow = opaque_flow.drop( + inside_surfaces, level="KeyValue", axis=1, errors="ignore" + ) + opaque_storage = opaque_storage.drop( + inside_surfaces, level="KeyValue", axis=1, errors="ignore" + ) + window_energy_flow = window_flow + + bal_obj = cls( + sql_file, + cooling, + heating, + lighting, + equipment, + None, + None, + people_gain, + solar_gain, + infiltration, + mech_vent, + nat_vent, + window_energy_flow, + opaque_flow, + opaque_storage, + window_flow, + heat_recovery, + is_cooling, + is_heating, + units, + use_all_solar=True, + ) + return bal_obj + + @classmethod + def apply_multipliers(cls, data, idf): + from archetypal import IDF + + if isinstance(idf, IDF): + multipliers = ( + pd.Series( + { + zone.Name.upper(): zone.Multiplier + for zone in idf.idfobjects["ZONE"] + }, + name="Key_Name", + ) + .replace({"": 1}) + .fillna(1) + ) + key = "OutputVariable" + elif isinstance(idf, pd.Series): + multipliers = idf + key = "KeyValue" + else: + raise ValueError + return data.mul(multipliers, level=key, axis=1) + + @classmethod + def subtract_cooled_from_heated_surface( + cls, _hvac_input_cooled_surface, _hvac_input_heated_surface + ): + if _hvac_input_cooled_surface.empty: + return _hvac_input_cooled_surface + try: + columns = _hvac_input_heated_surface.rename( + columns=lambda x: str.replace(x, " Heating", ""), level="OutputVariable" + ).columns + except KeyError: + columns = None + return EnergyDataFrame( + ( + _hvac_input_heated_surface.sum(level="Key_Name", axis=1) + - _hvac_input_cooled_surface.sum(level="Key_Name", axis=1) + ).values, + columns=columns, + index=_hvac_input_heated_surface.index, + ) + + @classmethod + def get_rolling_sign_change(cls, data: pd.DataFrame): + # create a sign series where -1 is negative and 0 or 1 is positive + sign = ( + np.sign(data) + .replace({0: np.NaN}) + .fillna(method="bfill") + .fillna(method="ffill") + ) + # when does a change of sign occurs? + sign_switch = sign != sign.shift(-1) + # From sign, keep when the sign switches and fill with the previous values + # (back fill). The final forward fill is to fill the last few timesteps of the + # series which might be NaNs. + rolling_sign = sign[sign_switch].fillna(method="bfill").fillna(method="ffill") + return rolling_sign + + @classmethod + def match_window_to_zone(cls, idf, window_flow): + """Match window surfaces with their wall and zone. + + Adds the following properties to the `window_flow` DataFrame as a MultiIndex level with names: + * Building_Surface_Name + * Surface_Type + * Zone_Name + * Multiplier + """ + # Todo: Check if Zone Multiplier needs to be added. + assert window_flow.columns.names == ["OutputVariable", "Key_Name"] + window_to_surface_match = pd.DataFrame( + [ + ( + window.Name.upper(), # name of the window + window.Building_Surface_Name.upper(), # name of the wall this window is on + window.get_referenced_object( + "Building_Surface_Name" + ).Surface_Type.title(), # surface type (wall, ceiling, floor) this windows is on. + window.get_referenced_object( # get the zone name though the surface name + "Building_Surface_Name" + ).Zone_Name.upper(), + float(window.Multiplier) + if window.Multiplier != "" + else 1, # multiplier of this window. + ) + for window in idf.getsubsurfaces() + ], + columns=[ + "Name", + "Building_Surface_Name", + "Surface_Type", + "Zone_Name", + "Multiplier", + ], + ).set_index("Name") + # Match the subsurface to the surface name and the zone name it belongs to. + stacked = ( + window_flow.stack() + .join( + window_to_surface_match.rename(index=str.upper), + on="Key_Name", + ) + .set_index( + ["Building_Surface_Name", "Surface_Type", "Zone_Name"], append=True + ) + ) + window_flow = ( + stacked.drop(columns=["Multiplier"]).iloc[:, 0] * stacked["Multiplier"] + ) + window_flow = window_flow.unstack( + level=["Key_Name", "Building_Surface_Name", "Surface_Type", "Zone_Name"] + ) + + return window_flow # .groupby("Building_Surface_Name", axis=1).sum() + + @classmethod + def match_opaque_surface_to_zone(cls, surface_table, opaque_flow, zone_info): + """Match opaque surfaces with their zone. + + Multiplies the surface heat flow by the zone multiplier. + + Adds the following properties to the `opaque_flow` DataFrame as a MultiIndex level with names: + * Surface_Type + * Outside_Boundary_Condition + * Zone_Name + """ + surface_with_idx = surface_table.join( + zone_info["ZoneName"], on="ZoneIndex" + ).reset_index() + opaque_flow.columns = pd.MultiIndex.from_frame( + opaque_flow.columns.to_frame(index=False).join( + surface_with_idx.reset_index() + .set_index("SurfaceName")[ + ["ClassName", "ExtBoundCond", "ZoneName", "ZoneIndex"] + ] + .rename( + { + "ClassName": "Surface_Type", + "ExtBoundCond": "Outside_Boundary_Condition", + "ZoneName": "Zone_Name", + }, + axis=1, + ), + on="KeyValue", + ) + ) + opaque_flow.mul(zone_info["Multiplier"], level="ZoneIndex") + opaque_flow = opaque_flow.droplevel("ZoneIndex", axis=1) + return opaque_flow + + @classmethod + def subtract_loss_from_gain(cls, load_gain, load_loss, level="OutputVariable"): + try: + columns = load_gain.rename( + columns=lambda x: str.replace(x, " Gain", ""), level=level + ).columns + except KeyError: + columns = None + return EnergyDataFrame( + load_gain.values - load_loss.values, + columns=columns, + index=load_gain.index, + ) + + @classmethod + def subtract_solar_from_window_net(cls, window_flow, solar_gain, level="Key_Name"): + columns = window_flow.columns + return EnergyDataFrame( + window_flow.sum(level=level, axis=1).values + - solar_gain.sum(level=level, axis=1).values, + columns=columns, + index=window_flow.index, + ) + + @classmethod + def subtract_vent_from_system(cls, system, vent, level="Key_Name"): + columns = vent.columns + return EnergyDataFrame( + system.sum(level=level, axis=1).values + - vent.sum(level=level, axis=1).values, + columns=columns, + index=system.index, + ) + + def separate_gains_and_losses( + self, component, level="Key_Name", stack_on_level=None + ) -> EnergyDataFrame: + """Separate gains from losses when cooling and heating occurs for the component. + + Args: + component (str): + level (str or list): + + Returns: + + """ + assert ( + component in self.__dict__.keys() + ), f"{component} is not a valid attribute of EndUseBalance." + component_df = getattr(self, component) + assert not component_df.empty, "Expected a component that is not empty." + if isinstance(level, str): + level = [level] + print(component) + + # mask when cooling occurs in zone (negative values) + mask = (self.is_cooling.stack("KeyValue") == True).any(axis=1) + + # get the dataframe using the attribute name, summarize by `level` and stack so that a Series is returned. + stacked = getattr(self, component).sum(level=level, axis=1).stack(level[0]) + + # concatenate the masked values with keys to easily create a MultiIndex when unstacking + inter = pd.concat( + [ + stacked[mask].reindex(stacked.index), + stacked[~mask].reindex(stacked.index), + ], + keys=["Cooling Periods", "Heating Periods"], + names=["Period"] + + stacked.index.names, # prepend the new key name to the existing index names. + ) + + # mask when values are positive (gain) + positive_mask = inter >= 0 + + # concatenate the masked values with keys to easily create a MultiIndex when unstacking + final = pd.concat( + [ + inter[positive_mask].reindex(inter.index), + inter[~positive_mask].reindex(inter.index), + ], + keys=["Heat Gain", "Heat Loss"], + names=["Gain/Loss"] + inter.index.names, + ).unstack(["Period", "Gain/Loss"]) + final.sort_index(axis=1, inplace=True) + return final + + def to_df(self, separate_gains_and_losses=False, level="KeyValue"): + """Summarize components into a DataFrame.""" + if separate_gains_and_losses: + summary_by_component = {} + levels = ["Component", "Zone_Name", "Period", "Gain/Loss"] + for component in [ + "cooling", + "heating", + "lighting", + "electric_equip", + "people_gain", + "solar_gain", + "infiltration", + "window_energy_flow", + "nat_vent", + "mech_vent", + ]: + if not getattr(self, component).empty: + summary_by_component[component] = ( + self.separate_gains_and_losses( + component, + level=level, + ) + .unstack(level) + .reorder_levels([level, "Period", "Gain/Loss"], axis=1) + .sort_index(axis=1) + ) + for (surface_type), data in ( + self.separate_gains_and_losses( + "opaque_flow", ["Zone_Name", "Surface_Type"] + ) + .unstack("Zone_Name") + .groupby(level=["Surface_Type"], axis=1) + ): + summary_by_component[surface_type] = data.sum( + level=["Zone_Name", "Period", "Gain/Loss"], axis=1 + ).sort_index(axis=1) + + # for (surface_type), data in ( + # self.separate_gains_and_losses( + # "opaque_storage", ["Zone_Name", "Surface_Type"] + # ) + # .unstack("Zone_Name") + # .groupby(level=["Surface_Type"], axis=1) + # ): + # summary_by_component[surface_type + " Storage"] = data.sum( + # level=["Zone_Name", "Period", "Gain/Loss"], axis=1 + # ).sort_index(axis=1) + + else: + summary_by_component = {} + for component in [ + "cooling", + "heating", + "lighting", + "electric_equip", + "people_gain", + "solar_gain", + "infiltration", + "window_energy_flow", + "nat_vent", + "mech_vent", + ]: + component_df = getattr(self, component) + if not component_df.empty: + summary_by_component[component] = component_df.sum( + level=level, axis=1 + ).sort_index(axis=1) + for (zone_name, surface_type), data in self.face_energy_flow.groupby( + level=["Zone_Name", "Surface_Type"], axis=1 + ): + summary_by_component[surface_type] = data.sum( + level="Zone_Name", axis=1 + ).sort_index(axis=1) + levels = ["Component", "Zone_Name"] + return pd.concat( + summary_by_component, axis=1, verify_integrity=True, names=levels + ) + + def component_summary(self) -> EnergyDataFrame: + """Return a DataFrame of components summarized annually.""" + sum_opaque_flow = ( + self.separate_gains_and_losses("opaque_flow", "Zone_Name") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + sum_window_flow = ( + self.separate_gains_and_losses("window_flow", "Zone_Name") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + sum_solar_gain = ( + self.separate_gains_and_losses("solar_gain") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + sum_lighting = ( + self.separate_gains_and_losses("lighting") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + sum_infiltration = ( + self.separate_gains_and_losses("infiltration") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + sum_people_gain = ( + self.separate_gains_and_losses("people_gain") + .sum() + .sum(level=["Period", "Gain/Loss"]) + ) + + df = pd.concat( + [ + sum_opaque_flow, + sum_window_flow, + sum_solar_gain, + sum_lighting, + sum_infiltration, + sum_people_gain, + ], + keys=[ + "Opaque Conduction", + "Window Conduction", + "Window Solar Gains", + "Lighting", + "Infiltration", + "Occupants (Sensible + Latent)", + ], + ) + + return df.unstack(level=["Period", "Gain/Loss"]) + + def to_sankey(self, path_or_buf): + system_data = self.to_df(separate_gains_and_losses=True) + annual_system_data = system_data.sum().sum( + level=["Component", "Period", "Gain/Loss"] + ) + annual_system_data.rename( + { + "people_gain": "Occupants", + "solar_gain": "Passive Solar", + "electric_equip": "Equipment", + "lighting": "Lighting", + "infiltration": "Infiltration", + "interior_equipment": "Equipment", + "window_energy_flow": "Windows", + "Wall": "Walls", + }, + inplace=True, + ) + + heating_load = annual_system_data.xs("Heating Periods", level="Period") + cooling_load = annual_system_data.xs("Cooling Periods", level="Period") + + end_uses = ( + "Heating", + "Cooling", + "Interior Lighting", + "Exterior Lighting", + "Interior Equipment", + "Exterior Equipment", + "Fans", + "Pumps", + "Heat Rejection", + "Humidification", + "Heat Recovery", + "Water Systems", + "Refrigeration", + "Generators", + ) + energy_sources = ( + "Electricity", + "Natural Gas", + "District Cooling", + "District Heating", + ) + with connect(self.sql_file) as conn: + df = pd.read_sql( + 'select * from "TabularDataWithStrings" as f where f."TableName" == "End Uses" and f."ReportName" == "AnnualBuildingUtilityPerformanceSummary"', + conn, + ) + system_input = df.pivot( + index="RowName", columns="ColumnName", values="Value" + ).loc[end_uses, energy_sources] + system_input = system_input.astype("float") + system_input = EnergyDataFrame( + system_input.values, + index=system_input.index, + columns=system_input.columns, + ) + system_input.units = df.set_index("ColumnName").Units.to_dict() + system_input = system_input.to_units("kWh") + + floor_area = pd.to_numeric( + Sql(self.sql_file) + .tabular_data_by_name( + *( + "AnnualBuildingUtilityPerformanceSummary", + "Building Area", + "Entire Facility", + ) + ) + .loc["Net Conditioned Building Area", ("Area", "m2")] + ) + + system_input = ( + system_input + # .replace({0: np.NaN}) + .dropna(how="all").dropna(how="all", axis=1) + ) + system_input.rename_axis("source", axis=1, inplace=True) + system_input.rename_axis("target", axis=0, inplace=True) + system_input = system_input.unstack().rename("value").reset_index().dropna() + system_input_data = system_input.to_dict(orient="records") + + heating_energy_to_heating_system = [ + { + "source": "Heating", + "target": "Heating System", + "value": system_input.set_index("target").at["Heating", "value"].sum(), + } + ] + + cooling_energy_to_heating_system = [ + { + "source": "Cooling", + "target": "Cooling System", + "value": system_input.set_index("target").at["Cooling", "value"].sum(), + } + ] + + ( + heating_load_source_data, + heating_load_target_data, + link_heating_system_to_gains, + ) = self._sankey_heating(heating_load, load_type="heating") + + ( + cooling_load_source_data, + cooling_load_target_data, + link_cooling_system_to_gains, + ) = self._sankey_cooling(cooling_load, load_type="cooling") + + return ( + pd.DataFrame( + system_input_data + + link_heating_system_to_gains + + heating_energy_to_heating_system + + heating_load_source_data + + heating_load_target_data + + cooling_energy_to_heating_system + + cooling_load_source_data + + cooling_load_target_data + + link_cooling_system_to_gains + ) + # .div(floor_area) + .to_csv(path_or_buf, index=False) + ) + + def _sankey_heating(self, load, load_type="heating"): + assert load_type in ["heating", "cooling"] + load_source = ( + load.unstack("Gain/Loss") + .replace({0: np.NaN}) + .loc[:, "Heat Gain"] + .dropna(how="all") + .apply(abs) + .rename("value") + .reset_index() + ) + load_target = ( + load.unstack("Gain/Loss") + .replace({0: np.NaN}) + .loc[:, "Heat Loss"] + .dropna(how="all") + .apply(abs) + .rename("value") + .reset_index() + ) + load_source["target"] = load_type.title() + " Load" + load_source = load_source.rename({"Component": "source"}, axis=1) + load_source["source"] = load_source["source"] + " Gain" + load_source = load_source.replace( + {f"{load_type} Gain": load_type.title() + " System"} + ) + + load_source_data = load_source.to_dict(orient="records") + load_target["source"] = load_type.title() + " Load" + load_target = load_target.rename({"Component": "target"}, axis=1) + load_target["target"] = load_target["target"] + " Heat Losses" + load_target_data = load_target.to_dict(orient="records") + link_system_to_gains = ( + load_source.set_index("source") + .drop(load_type.title() + " System", errors="ignore") + .rename_axis("target") + .apply(lambda x: 0.01, axis=1) + .rename("value") + .reset_index() + ) + link_system_to_gains["source"] = load_type.title() + link_system_to_gains = link_system_to_gains.to_dict(orient="records") + return ( + load_source_data, + load_target_data, + link_system_to_gains, + ) + + def _sankey_cooling(self, load, load_type="cooling"): + load_source = ( + load.unstack("Gain/Loss") + .replace({0: np.NaN}) + .loc[:, "Heat Loss"] + .dropna(how="all") + .apply(abs) + .rename("value") + .reset_index() + ) + load_source["target"] = load_type.title() + " Load" + load_source = load_source.rename({"Component": "source"}, axis=1) + load_source["source"] = load_source["source"] + " Losses" + load_source = load_source.replace( + {f"{load_type} Losses": load_type.title() + " System"} + ) + load_source_data = load_source.to_dict(orient="records") + + load_target = ( + load.unstack("Gain/Loss") + .replace({0: np.NaN}) + .loc[:, "Heat Gain"] + .dropna(how="all") + .apply(abs) + .rename("value") + .reset_index() + ) + load_target["source"] = load_type.title() + " Load" + load_target = load_target.rename({"Component": "target"}, axis=1) + load_target_data = load_target.to_dict(orient="records") + link_system_to_gains = ( + load_source.set_index("source") + .drop(load_type.title() + " System", errors="ignore") + .rename_axis("target") + .apply(lambda x: 0.01, axis=1) + .rename("value") + .reset_index() + ) + link_system_to_gains["source"] = load_type.title() + link_system_to_gains = link_system_to_gains.to_dict(orient="records") + return ( + load_source_data, + load_target_data, + link_system_to_gains, + ) diff --git a/archetypal/idfclass/extensions.py b/archetypal/idfclass/extensions.py index 30d02c85..838c6a73 100644 --- a/archetypal/idfclass/extensions.py +++ b/archetypal/idfclass/extensions.py @@ -130,3 +130,10 @@ def _parse_idd_type(epbunch, name): "Environment": "Run Period", "Annual": "Annual", } + + +def get_name_attribute(__o: EpBunch): + try: + return getattr(__o, "Key_Name") + except BadEPFieldError: # Backwards compatibility + return getattr(__o, "Name") diff --git a/archetypal/idfclass/idf.py b/archetypal/idfclass/idf.py index 57b0aa61..e0ddd18b 100644 --- a/archetypal/idfclass/idf.py +++ b/archetypal/idfclass/idf.py @@ -3,7 +3,7 @@ Various functions for processing EnergyPlus models and retrieving results in different forms. """ - +import io import itertools import logging as lg import math @@ -194,7 +194,7 @@ def __init__( output_suffix="L", epmacro=False, keep_data=True, - keep_data_err=False, + keep_data_err=True, position=0, name=None, output_directory=None, @@ -252,7 +252,13 @@ def __init__( self.prep_outputs = prep_outputs self._position = position self.output_prefix = None - self.name = self.idfname.basename() if isinstance(self.idfname, Path) else name + self.name = ( + name + if name is not None + else self.idfname.basename() + if isinstance(self.idfname, Path) + else None + ) self.output_directory = output_directory # Set dependants to None @@ -294,23 +300,24 @@ def __init__( self.upgrade(to_version=self.as_version, overwrite=False) finally: # Set model outputs - self._outputs = Outputs(idf=self) + self._outputs = Outputs(idf=self, include_html=False, include_sqlite=False) if self.prep_outputs: - ( - self._outputs.add_basics() - .add_umi_template_outputs() - .add_custom(outputs=self.prep_outputs) - .add_profile_gas_elect_ouputs() - .apply() - ) + self._outputs.include_html = True + self._outputs.include_sqlite = True + self._outputs.add_basics() + if isinstance(self.prep_outputs, list): + self._outputs.add_custom(outputs=self.prep_outputs) + self._outputs.add_profile_gas_elect_outputs() + self._outputs.add_umi_template_outputs() + self._outputs.apply() @property def outputtype(self): + """Get or set the outputtype for the idf string representation of self.""" return self._outputtype @outputtype.setter def outputtype(self, value): - """Get or set the outputtype for the idf string representation of self.""" assert value in self.OUTPUTTYPES, ( f'Invalid input "{value}" for output_type.' f"\nOutput type must be one of the following: {self.OUTPUTTYPES}" @@ -333,6 +340,10 @@ def __repr__(self): body += sim_info return f"<{body}>" + def __copy__(self): + """Get a copy of self.""" + return self.copy() + @classmethod def from_example_files(cls, example_name, epw=None, **kwargs): """Load an IDF model from the ExampleFiles folder by name. @@ -700,6 +711,10 @@ def prep_outputs(self): @prep_outputs.setter def prep_outputs(self, value): + assert isinstance(value, (bool, list)), ( + f"Expected bool or list of dict for " + f"SimulationOutput outputs. Got {type(value)}." + ) self._prep_outputs = value @property @@ -899,6 +914,14 @@ def open_last_simulation(self): app_path_guess = self.file_version.current_install_dir find_and_launch("EP-Launch", app_path_guess, filepath.abspath()) + def open_err(self): + """Open last simulation err file in texteditor.""" + import webbrowser + + filepath, *_ = self.simulation_dir.files("*.err") + + webbrowser.open(filepath.abspath()) + def open_mdd(self): """Open .mdd file in browser. @@ -1357,14 +1380,19 @@ def simulate(self, force=False, **kwargs): ).mkdir() # Run the ExpandObjects preprocessor program expandobjects_thread = ExpandObjectsThread(self, tmp) - expandobjects_thread.start() - expandobjects_thread.join() - while expandobjects_thread.is_alive(): - time.sleep(1) - tmp.rmtree(ignore_errors=True) - e = expandobjects_thread.exception - if e is not None: - raise e + try: + expandobjects_thread.start() + expandobjects_thread.join() + # Give time to the subprocess to finish completely + while expandobjects_thread.is_alive(): + time.sleep(1) + except (KeyboardInterrupt, SystemExit): + expandobjects_thread.stop() + finally: + tmp.rmtree(ignore_errors=True) + e = expandobjects_thread.exception + if e is not None: + raise e # Run the Basement preprocessor program if necessary tmp = ( @@ -1372,43 +1400,58 @@ def simulate(self, force=False, **kwargs): + str(uuid.uuid1())[0:8] ).mkdir() basement_thread = BasementThread(self, tmp) - basement_thread.start() - basement_thread.join() - while basement_thread.is_alive(): - time.sleep(1) - tmp.rmtree(ignore_errors=True) - e = basement_thread.exception - if e is not None: - raise e + try: + basement_thread.start() + basement_thread.join() + # Give time to the subprocess to finish completely + while basement_thread.is_alive(): + time.sleep(1) + except KeyboardInterrupt: + basement_thread.stop() + finally: + tmp.rmtree(ignore_errors=True) + e = basement_thread.exception + if e is not None: + raise e # Run the Slab preprocessor program if necessary tmp = ( self.output_directory.makedirs_p() / "runSlab_run_" + str(uuid.uuid1())[0:8] ).mkdir() slab_thread = SlabThread(self, tmp) - slab_thread.start() - slab_thread.join() - while slab_thread.is_alive(): - time.sleep(1) - tmp.rmtree(ignore_errors=True) - e = slab_thread.exception - if e is not None: - raise e + try: + slab_thread.start() + slab_thread.join() + # Give time to the subprocess to finish completely + while slab_thread.is_alive(): + time.sleep(1) + except KeyboardInterrupt: + slab_thread.stop() + finally: + tmp.rmtree(ignore_errors=True) + e = slab_thread.exception + if e is not None: + raise e # Run the energyplus program tmp = ( self.output_directory.makedirs_p() / "eplus_run_" + str(uuid.uuid1())[0:8] ).mkdir() running_simulation_thread = EnergyPlusThread(self, tmp) - running_simulation_thread.start() - running_simulation_thread.join() - while running_simulation_thread.is_alive(): - time.sleep(1) - tmp.rmtree(ignore_errors=True) - e = running_simulation_thread.exception - if e is not None: - raise e - return self + try: + running_simulation_thread.start() + running_simulation_thread.join() + # Give time to the subprocess to finish completely + while running_simulation_thread.is_alive(): + time.sleep(1) + except KeyboardInterrupt: + running_simulation_thread.stop() + finally: + tmp.rmtree(ignore_errors=True) + e = running_simulation_thread.exception + if e is not None: + raise e + return self def savecopy(self, filename, lineendings="default", encoding="latin-1"): """Save a copy of the file with the filename passed. @@ -1427,6 +1470,15 @@ def savecopy(self, filename, lineendings="default", encoding="latin-1"): super(IDF, self).save(filename, lineendings, encoding) return Path(filename) + def copy(self): + """Return a copy of self as an in memory IDF. + + The copy is a new IDF object with the same parameters and arguments as self + but is not attached to an file. Use IDF.saveas("idfname.idf", inplace=True) + to save the copy to a file inplace. self.idfname will now be idfname.idf + """ + return self.saveas(io.StringIO("")) + def save(self, lineendings="default", encoding="latin-1", **kwargs): """Write the IDF model to the text file. @@ -1448,7 +1500,9 @@ def save(self, lineendings="default", encoding="latin-1", **kwargs): log(f"saved '{self.name}' at '{self.idfname}'") return self - def saveas(self, filename, lineendings="default", encoding="latin-1"): + def saveas( + self, filename, lineendings="default", encoding="latin-1", inplace=False + ): """Save the IDF model as. Writes a new text file and load a new instance of the IDF class (new object). @@ -1461,6 +1515,8 @@ def saveas(self, filename, lineendings="default", encoding="latin-1"): the line endings for the current system. encoding (str): Encoding to use for the saved file. The default is 'latin-1' which is compatible with the EnergyPlus IDFEditor. + inplace (bool): If True, applies the new filename to self directly, + else a new object is returned with the new filename. Returns: IDF: A new IDF object based on the new location file. @@ -1490,8 +1546,18 @@ def saveas(self, filename, lineendings="default", encoding="latin-1"): name = Path(name).basename() else: name = file.basename() - file.copy(as_idf.simulation_dir / name) - return as_idf + try: + file.copy(as_idf.simulation_dir / name) + except shutil.SameFileError: + # A copy of self would have the same files in the simdir and + # throw an error. + pass + if inplace: + # If inplace, replace content of self with content of as_idf. + self.__dict__.update(as_idf.__dict__) + else: + # return the new object. + return as_idf def process_results(self): """Return the list of processed results. @@ -1602,13 +1668,13 @@ def upgrade(self, to_version=None, overwrite=True): tmp = ( self.output_directory / "Transition_run_" + str(uuid.uuid1())[0:8] ).makedirs_p() - slab_thread = TransitionThread(self, tmp, overwrite=overwrite) - slab_thread.start() - slab_thread.join() - while slab_thread.is_alive(): + transition_thread = TransitionThread(self, tmp, overwrite=overwrite) + transition_thread.start() + transition_thread.join() + while transition_thread.is_alive(): time.sleep(1) tmp.rmtree(ignore_errors=True) - e = slab_thread.exception + e = transition_thread.exception if e is not None: raise e @@ -1875,7 +1941,7 @@ def newidfobject(self, key, **kwargs) -> Optional[EpBunch]: except BadEPFieldError as e: raise e else: - # If object is supposed to be 'unique-object', deletes all objects to be + # If object is supposed to be 'unique-object', delete all objects to be # sure there is only one of them when creating new object # (see following line) if "unique-object" in set().union( @@ -1883,22 +1949,23 @@ def newidfobject(self, key, **kwargs) -> Optional[EpBunch]: ): for obj in existing_objs: self.removeidfobject(obj) - self.addidfobject(new_object) log( f"{obj} is a 'unique-object'; Removed and replaced with" f" {new_object}", lg.DEBUG, ) + self.addidfobject(new_object) return new_object if new_object in existing_objs: - # If obj already exists, simply return + # If obj already exists, simply return the existing one. log( f"object '{new_object}' already exists in {self.name}. " f"Skipping.", lg.DEBUG, ) - return new_object + return next(x for x in existing_objs if x == new_object) elif new_object not in existing_objs and new_object.nameexists(): + # Object does not exist (because not equal) but Name exists. obj = self.getobject( key=new_object.key.upper(), name=new_object.Name.upper() ) @@ -1930,6 +1997,17 @@ def addidfobject(self, new_object) -> EpBunch: self._reset_dependant_vars("idfobjects") return new_object + def addidfobjects(self, new_objects): + """Add multiple IDF objects to the model. + + Resetting dependent variables will wait after all objects have been added. + """ + for new_object in new_objects: + key = new_object.key.upper() + self.idfobjects[key].append(new_object) + self._reset_dependant_vars("idfobjects") + return new_objects + def removeidfobject(self, idfobject): """Remove an IDF object from the model. @@ -1943,6 +2021,8 @@ def removeidfobject(self, idfobject): def removeidfobjects(self, idfobjects: Iterable[EpBunch]): """Remove an IDF object from the model. + Resetting dependent variables will wait after all objects have been removed. + Args: idfobjects: The object to remove from the model. """ diff --git a/archetypal/idfclass/load_balance.py b/archetypal/idfclass/load_balance.py new file mode 100644 index 00000000..a48d5e5e --- /dev/null +++ b/archetypal/idfclass/load_balance.py @@ -0,0 +1,257 @@ +import pandas as pd +from energy_pandas import EnergyDataFrame + + +class LoadBalance: + def __init__( + self, + cooling, + heating, + lighting, + electric_equip, + gas_equip, + how_water, + people_gain, + solar_gain, + infiltration, + mech_vent, + nat_vent, + face_energy_flow, + units="J", + use_all_solar=True, + ): + self.cooling = cooling + self.heating = heating + self.lighting = lighting + self.electric_equip = electric_equip + self.gas_equip = gas_equip + self.hot_water = how_water + self.people_gain = people_gain + self.solar_gain = solar_gain + self.infiltration = infiltration + self.mech_vent = mech_vent + self.nat_vent = nat_vent + self.face_energy_flow = face_energy_flow + self.units = units + self.use_all_solar = use_all_solar + + @classmethod + def from_idf(cls, idf): + assert idf.sql_file.exists() + + # get all of the results relevant for gains and losses + cooling = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.COOLING, reporting_frequency=idf.outputs.reporting_frequency + ) + heating = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.HEATING, reporting_frequency=idf.outputs.reporting_frequency + ) + lighting = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.LIGHTING, reporting_frequency=idf.outputs.reporting_frequency + ) + people_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.PEOPLE_GAIN, reporting_frequency=idf.outputs.reporting_frequency + ) + solar_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.SOLAR_GAIN, reporting_frequency=idf.outputs.reporting_frequency + ) + infil_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.INFIL_GAIN, reporting_frequency=idf.outputs.reporting_frequency + ) + infil_loss = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.INFIL_LOSS, reporting_frequency=idf.outputs.reporting_frequency + ) + vent_loss = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.VENT_LOSS, reporting_frequency=idf.outputs.reporting_frequency + ) + vent_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.VENT_GAIN, reporting_frequency=idf.outputs.reporting_frequency + ) + nat_vent_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.NAT_VENT_GAIN, + reporting_frequency=idf.outputs.reporting_frequency, + ) + nat_vent_loss = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.NAT_VENT_LOSS, + reporting_frequency=idf.outputs.reporting_frequency, + ) + + # handle the case that both total elect/gas energy and zone gain are requested + electric_equip = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.ELECTRIC_EQUIP[1], + reporting_frequency=idf.outputs.reporting_frequency, + ) + if len(electric_equip) == 0: + electric_equip = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.ELECTRIC_EQUIP, + reporting_frequency=idf.outputs.reporting_frequency, + ) + gas_equip = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.GAS_EQUIP[1], + reporting_frequency=idf.outputs.reporting_frequency, + ) + if len(gas_equip) == 0: + gas_equip = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.GAS_EQUIP, + reporting_frequency=idf.outputs.reporting_frequency, + ) + hot_water = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.HOT_WATER[1], + reporting_frequency=idf.outputs.reporting_frequency, + ) + if len(hot_water) == 0: + hot_water = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.HOT_WATER, + reporting_frequency=idf.outputs.reporting_frequency, + ) + + # subtract losses from gains + infiltration = None + mech_vent = None + nat_vent = None + if len(infil_gain) == len(infil_loss): + infiltration = cls.subtract_loss_from_gain(infil_gain, infil_loss) + if ( + vent_gain.shape == vent_loss.shape == cooling.shape == heating.shape + and not vent_gain.empty == vent_loss.empty == cooling.empty == heating.empty + ): + mech_vent_loss = cls.subtract_loss_from_gain(heating, vent_loss) + mech_vent_gain = cls.subtract_loss_from_gain(cooling, vent_gain) + total_load = cls.subtract_loss_from_gain(mech_vent_gain, mech_vent_loss) + mech_vent = total_load.copy() + mech_vent.rename( + columns=lambda x: str.replace( + x, "Zone Ideal Loads Supply Air", "Zone Ideal Loads Ventilation" + ), + level="OutputVariable", + inplace=True, + ) + if nat_vent_gain.shape == nat_vent_loss.shape: + nat_vent = cls.subtract_loss_from_gain(nat_vent_gain, nat_vent_loss) + + # get the surface energy flow + opaque_flow = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.OPAQUE_ENERGY_FLOW, + reporting_frequency=idf.outputs.reporting_frequency, + ) + window_loss = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.WINDOW_LOSS, reporting_frequency=idf.outputs.reporting_frequency + ) + window_gain = idf.variables.OutputVariable.collect_by_output_name( + idf.outputs.WINDOW_GAIN, reporting_frequency=idf.outputs.reporting_frequency + ) + window_flow = [] + if window_gain.shape == window_loss.shape: + window_flow = cls.subtract_loss_from_gain(window_gain, window_loss) + window_flow = cls.match_window_to_zone(idf, window_flow) + face_energy_flow = opaque_flow.add( + window_flow.sum(level=["Building_Surface_Name"], axis=1).rename( + columns=str.upper + ), + level="Key_Name", + axis=1, + fill_value=0, + ) + + bal_obj = cls( + cooling, + heating, + lighting, + electric_equip, + gas_equip, + hot_water, + people_gain, + solar_gain, + infiltration, + mech_vent, + nat_vent, + face_energy_flow, + "J", + use_all_solar=True, + ) + return bal_obj + + @classmethod + def match_window_to_zone(cls, idf, window_flow): + """Match the DataFrame of""" + assert window_flow.columns.names == ["OutputVariable", "Key_Name"] + window_to_surface_match = pd.DataFrame( + [ + ( + window.Name, # name of the window + window.Building_Surface_Name, # name of the wall this window is on + window.get_referenced_object( + "Building_Surface_Name" + ).Surface_Type, # surface type (wall, ceiling, floor) this windows is on. + window.get_referenced_object( # get the zone name though the surface name + "Building_Surface_Name" + ).Zone_Name, + window.Multiplier, # multiplier of this window. + ) + for window in idf.getsubsurfaces() + ], + columns=[ + "Name", + "Building_Surface_Name", + "Surface_Type", + "Zone_Name", + "Multiplier", + ], + ).set_index("Name") + # Match the subsurface to the surface name and the zone name it belongs to. + stacked = ( + window_flow.stack() + .join( + window_to_surface_match.rename(index=str.upper), + on="Key_Name", + ) + .set_index( + ["Building_Surface_Name", "Surface_Type", "Zone_Name"], append=True + ) + ) + window_flow = stacked.drop(columns=["Multiplier"]).iloc[:, 0] * pd.to_numeric( + stacked["Multiplier"] + ) + window_flow = window_flow.unstack( + level=["Key_Name", "Building_Surface_Name", "Surface_Type", "Zone_Name"] + ) + + return window_flow # .groupby("Building_Surface_Name", axis=1).sum() + + @classmethod + def subtract_loss_from_gain(cls, load_gain, load_loss): + try: + columns = load_gain.rename( + columns=lambda x: str.replace(x, " Gain", ""), level="OutputVariable" + ).columns + except KeyError: + columns = None + return EnergyDataFrame( + load_gain.values - load_loss.values, + columns=columns, + index=load_gain.index, + ) + + def to_df(self): + return pd.concat( + [ + df + for df in [ + self.cooling, + self.heating, + self.lighting, + self.electric_equip, + self.gas_equip, + self.hot_water, + self.people_gain, + self.solar_gain, + self.infiltration, + self.mech_vent, + self.nat_vent, + self.face_energy_flow, + ] + if not df.empty + ], + axis=1, + verify_integrity=True, + ) diff --git a/archetypal/idfclass/meters.py b/archetypal/idfclass/meters.py index f28cb7e7..748a1585 100644 --- a/archetypal/idfclass/meters.py +++ b/archetypal/idfclass/meters.py @@ -83,7 +83,10 @@ def values( # the environment_type is specified by the simulationcontrol. try: for ctrl in self._idf.idfobjects["SIMULATIONCONTROL"]: - if ctrl.Run_Simulation_for_Weather_File_Run_Periods.lower() == "yes": + if ( + ctrl.Run_Simulation_for_Weather_File_Run_Periods.lower() + == "yes" + ): environment_type = 3 else: environment_type = 1 diff --git a/archetypal/idfclass/outputs.py b/archetypal/idfclass/outputs.py index ab2fd530..42f5fbed 100644 --- a/archetypal/idfclass/outputs.py +++ b/archetypal/idfclass/outputs.py @@ -1,23 +1,258 @@ +from typing import Iterable + +from archetypal.idfclass.end_use_balance import EndUseBalance +from archetypal.idfclass.extensions import get_name_attribute + + class Outputs: """Handles preparation of EnergyPlus outputs. Different instance methods allow to chain methods together and to add predefined bundles of outputs in - one go. + one go. `.apply()` is required at the end to apply the outputs to the IDF model. Examples: >>> from archetypal import IDF >>> idf = IDF(prep_outputs=False) # True be default - >>> idf.outputs.add_output_control().add_umi_ouputs( - >>> ).add_profile_gas_elect_ouputs().apply() + >>> idf.outputs.add_output_control().add_umi_outputs( + >>> ).add_profile_gas_elect_outputs().apply() """ - def __init__(self, idf): + REPORTING_FREQUENCIES = ("Annual", "Monthly", "Daily", "Hourly", "Timestep") + COOLING = ( + "Zone Ideal Loads Supply Air Total Cooling Energy", + "Zone Ideal Loads Zone Sensible Cooling Energy", + "Zone Ideal Loads Zone Latent Cooling Energy", + ) + HEATING = ( + "Zone Ideal Loads Supply Air Total Heating Energy", + "Zone Ideal Loads Zone Sensible Heating Energy", + "Zone Ideal Loads Zone Latent Heating Energy", + ) + LIGHTING = ( + "Zone Lights Electric Energy", + "Zone Lights Total Heating Energy", + ) + ELECTRIC_EQUIP = ( + "Zone Electric Equipment Electricity Energy", + "Zone Electric Equipment Total Heating Energy", + "Zone Electric Equipment Radiant Heating Energy", + "Zone Electric Equipment Convective Heating Energy", + "Zone Electric Equipment Latent Gain Energy", + ) + GAS_EQUIP = ( + "Zone Gas Equipment NaturalGas Energy", + "Zone Gas Equipment Total Heating Energy", + "Zone Gas Equipment Radiant Heating Energy", + "Zone Gas Equipment Convective Heating Energy", + "Zone Gas Equipment Latent Gain Energy", + ) + HOT_WATER = ( + "Water Use Equipment Zone Sensible Heat Gain Energy", + "Water Use Equipment Zone Latent Gain Energy", + ) + PEOPLE_GAIN = ( + "Zone People Total Heating Energy", + "Zone People Sensible Heating Energy", + "Zone People Latent Gain Energy", + ) + SOLAR_GAIN = ("Zone Windows Total Transmitted Solar Radiation Energy",) + INFIL_GAIN = ( + "Zone Infiltration Total Heat Gain Energy", + "Zone Infiltration Sensible Heat Gain Energy", + "Zone Infiltration Latent Heat Gain Energy", + "AFN Zone Infiltration Sensible Heat Gain Energy", + "AFN Zone Infiltration Latent Heat Gain Energy", + ) + INFIL_LOSS = ( + "Zone Infiltration Total Heat Loss Energy", + "Zone Infiltration Sensible Heat Loss Energy", + "Zone Infiltration Latent Heat Loss Energy", + "AFN Zone Infiltration Sensible Heat Loss Energy", + "AFN Zone Infiltration Latent Heat Loss Energy", + ) + VENT_LOSS = ( + "Zone Ideal Loads Zone Total Heating Energy", + "Zone Ideal Loads Zone Sensible Heating Energy", + "Zone Ideal Loads Zone Latent Heating Energy", + ) + VENT_GAIN = ( + "Zone Ideal Loads Zone Total Cooling Energy", + "Zone Ideal Loads Zone Sensible Cooling Energy", + "Zone Ideal Loads Zone Latent Cooling Energy", + ) + NAT_VENT_GAIN = ( + "Zone Ventilation Total Heat Gain Energy", + "Zone Ventilation Sensible Heat Gain Energy", + "Zone Ventilation Latent Heat Gain Energy", + "AFN Zone Ventilation Sensible Heat Gain Energy", + "AFN Zone Ventilation Latent Heat Gain Energy", + ) + NAT_VENT_LOSS = ( + "Zone Ventilation Total Heat Loss Energy", + "Zone Ventilation Sensible Heat Loss Energy", + "Zone Ventilation Latent Heat Loss Energy", + "AFN Zone Ventilation Sensible Heat Loss Energy", + "AFN Zone Ventilation Latent Heat Loss Energy", + ) + OPAQUE_ENERGY_FLOW = ("Surface Average Face Conduction Heat Transfer Energy",) + WINDOW_LOSS = ("Surface Window Heat Loss Energy",) + WINDOW_GAIN = ("Surface Window Heat Gain Energy",) + + def __init__( + self, + idf, + variables=None, + meters=None, + outputs=None, + reporting_frequency="Hourly", + include_sqlite=True, + include_html=True, + unit_conversion=None, + ): """Initialize an outputs object. Args: idf (IDF): the IDF object for wich this outputs object is created. """ self.idf = idf - self._outputs = [] + self.output_variables = set( + a.Variable_Name for a in idf.idfobjects["Output:Variable".upper()] + ) + self.output_meters = set( + get_name_attribute(a) for a in idf.idfobjects["Output:Meter".upper()] + ) + self.other_outputs = outputs + self.output_variables += tuple(variables or ()) + self.output_meters += tuple(meters or ()) + self.other_outputs += tuple(outputs or ()) + self.reporting_frequency = reporting_frequency + self.include_sqlite = include_sqlite + self.include_html = include_html + self.unit_conversion = unit_conversion + + @property + def unit_conversion(self): + return self._unit_conversion + + @unit_conversion.setter + def unit_conversion(self, value): + if not value: + value = "None" + assert value in ["None", "JtoKWH", "JtoMJ", "JtoGJ", "InchPound"] + for obj in self.idf.idfobjects["OutputControl:Table:Style".upper()]: + obj.Unit_Conversion = value + self._unit_conversion = value + + @property + def include_sqlite(self): + """Get or set a boolean for whether a SQLite report should be generated.""" + return self._include_sqlite + + @include_sqlite.setter + def include_sqlite(self, value): + value = bool(value) + if value: + self.add_sql().apply() + else: + # if False, try to remove sql, if exists. + for obj in self.idf.idfobjects["Output:SQLite".upper()]: + self.idf.removeidfobject(obj) + self._include_sqlite = value + + @property + def include_html(self): + """Get or set a boolean for whether an HTML report should be generated.""" + return self._include_html + + @include_html.setter + def include_html(self, value): + value = bool(value) + if value: + self.add_output_control().apply() + else: + # if False, try to remove sql, if exists. + for obj in self.idf.idfobjects["OutputControl:Table:Style".upper()]: + obj.Column_Separator = "Comma" + self._include_html = value + + @property + def output_variables(self) -> tuple: + """Get or set a tuple of EnergyPlus simulation output variables.""" + return tuple(sorted(self._output_variables)) + + @output_variables.setter + def output_variables(self, value): + if value is not None: + assert not isinstance( + value, (str, bytes) + ), f"Expected list or tuple. Got {type(value)}." + values = [] + for output in value: + values.append(str(output)) + value = set(values) + else: + value = set() + self._output_variables = value + + @property + def output_meters(self): + """Get or set a tuple of EnergyPlus simulation output meters.""" + return tuple(sorted(self._output_meters)) + + @output_meters.setter + def output_meters(self, value): + if value is not None: + assert not isinstance( + value, (str, bytes) + ), f"Expected list or tuple. Got {type(value)}." + values = [] + for output in value: + values.append(str(output)) + value = set(values) + else: + value = set() + self._output_meters = value + + @property + def other_outputs(self): + """Get or set a list of outputs.""" + return self._other_outputs + + @other_outputs.setter + def other_outputs(self, value): + if value is not None: + assert all( + isinstance(item, dict) for item in value + ), f"Expected list of dict. Got {type(value)}." + values = [] + for output in value: + values.append(output) + value = values + else: + value = [] + self._other_outputs = value + + @property + def reporting_frequency(self): + """Get or set the reporting frequency of outputs. + + Choose from the following: + + * Annual + * Monthly + * Daily + * Hourly + * Timestep + """ + return self._reporting_frequency + + @reporting_frequency.setter + def reporting_frequency(self, value): + value = value.title() + assert value in self.REPORTING_FREQUENCIES, ( + f"reporting_frequency {value} is not recognized.\nChoose from the " + f"following:\n{self.REPORTING_FREQUENCIES}" + ) + self._reporting_frequency = value def add_custom(self, outputs): """Add custom-defined outputs as a list of objects. @@ -38,8 +273,14 @@ def add_custom(self, outputs): Returns: Outputs: self """ - if isinstance(outputs, list): - self._outputs.extend(outputs) + assert isinstance(outputs, Iterable), "outputs must be some sort of iterable" + for output in outputs: + if "meter" in output["key"].lower(): + self._output_meters.add(output) + elif "variable" in output["key"].lower(): + self._output_variables.add(output) + else: + self._other_outputs.append(output) return self def add_basics(self): @@ -47,7 +288,6 @@ def add_basics(self): return ( self.add_summary_report() .add_output_control() - .add_sql() .add_schedules() .add_meter_variables() ) @@ -55,8 +295,8 @@ def add_basics(self): def add_schedules(self): """Adds Schedules object""" outputs = [{"key": "Output:Schedules".upper(), **dict(Key_Field="Hourly")}] - - self._outputs.extend(outputs) + for output in outputs: + self._other_outputs.append(output) return self def add_meter_variables(self, format="IDF"): @@ -75,7 +315,8 @@ def add_meter_variables(self, format="IDF"): Outputs: self """ outputs = [dict(key="Output:VariableDictionary".upper(), Key_Field=format)] - self._outputs.extend(outputs) + for output in outputs: + self._other_outputs.append(output) return self def add_summary_report(self, summary="AllSummary"): @@ -105,8 +346,8 @@ def add_summary_report(self, summary="AllSummary"): **dict(Report_1_Name=summary), } ] - - self._outputs.extend(outputs) + for output in outputs: + self._other_outputs.append(output) return self def add_sql(self, sql_output_style="SimpleAndTabular"): @@ -125,9 +366,12 @@ def add_sql(self, sql_output_style="SimpleAndTabular"): Returns: Outputs: self """ - output = {"key": "Output:SQLite".upper(), **dict(Option_Type=sql_output_style)} + outputs = [ + {"key": "Output:SQLite".upper(), **dict(Option_Type=sql_output_style)} + ] - self._outputs.extend([output]) + for output in outputs: + self._other_outputs.append(output) return self def add_output_control(self, output_control_table_style="CommaAndHTML"): @@ -139,6 +383,17 @@ def add_output_control(self, output_control_table_style="CommaAndHTML"): Returns: Outputs: self """ + assert output_control_table_style in [ + "Comma", + "Tab", + "Fixed", + "HTML", + "XML", + "CommaAndHTML", + "TabAndHTML", + "XMLAndHTML", + "All", + ] outputs = [ { "key": "OutputControl:Table:Style".upper(), @@ -146,195 +401,52 @@ def add_output_control(self, output_control_table_style="CommaAndHTML"): } ] - self._outputs.extend(outputs) + for output in outputs: + self._other_outputs.append(output) return self def add_umi_template_outputs(self): """Adds the necessary outputs in order to create an UMI template.""" # list the outputs here - outputs = [ - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Air System Total Heating Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Air System Total Cooling Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Ideal Loads Zone Total Cooling Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Ideal Loads Zone Total Heating Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Thermostat Heating Setpoint Temperature", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Thermostat Cooling Setpoint Temperature", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Heat Exchanger Total Heating Rate", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Heat Exchanger Sensible Effectiveness", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Heat Exchanger Latent Effectiveness", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Water Heater Heating Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Air System Outdoor Air Minimum Flow Fraction", - Reporting_Frequency="hourly", - ), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="HeatRejection:EnergyTransfer", - Reporting_Frequency="hourly", - ), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Heating:EnergyTransfer", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Cooling:EnergyTransfer", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="Heating:DistrictHeating", Reporting_Frequency="hourly" - ), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Heating:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Heating:Gas", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="Cooling:DistrictCooling", Reporting_Frequency="hourly" - ), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Cooling:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Cooling:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Cooling:Gas", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="WaterSystems:EnergyTransfer", Reporting_Frequency="hourly" - ), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Fans:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Pumps:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="Refrigeration:Electricity", Reporting_Frequency="hourly" - ), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="Refrigeration:EnergyTransfer", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Meter".upper(), - **dict( - Key_Name="HeatingCoils:EnergyTransfer", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Meter".upper(), - **dict( - Key_Name="Baseboard:EnergyTransfer", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Meter".upper(), - **dict( - Key_Name="HeatRejection:Electricity", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Meter".upper(), - **dict( - Key_Name="CoolingCoils:EnergyTransfer", - Reporting_Frequency="hourly", - ), - }, + variables = [ + "Air System Outdoor Air Minimum Flow Fraction", + "Air System Total Cooling Energy", + "Air System Total Heating Energy", + "Heat Exchanger Latent Effectiveness", + "Heat Exchanger Sensible Effectiveness", + "Heat Exchanger Total Heating Rate", + "Water Heater Heating Energy", + "Zone Ideal Loads Zone Total Cooling Energy", + "Zone Ideal Loads Zone Total Heating Energy", + "Zone Thermostat Cooling Setpoint Temperature", + "Zone Thermostat Heating Setpoint Temperature", ] + for output in variables: + self._output_variables.add(output) - self._outputs.extend(outputs) + meters = [ + "Baseboard:EnergyTransfer", + "Cooling:DistrictCooling", + "Cooling:Electricity", + "Cooling:Electricity", + "Cooling:EnergyTransfer", + "Cooling:Gas", + "CoolingCoils:EnergyTransfer", + "Fans:Electricity", + "HeatRejection:Electricity", + "HeatRejection:EnergyTransfer", + "Heating:DistrictHeating", + "Heating:Electricity", + "Heating:EnergyTransfer", + "Heating:Gas", + "HeatingCoils:EnergyTransfer", + "Pumps:Electricity", + "Refrigeration:Electricity", + "Refrigeration:EnergyTransfer", + "WaterSystems:EnergyTransfer", + ] + for meter in meters: + self._output_meters.add(meter) return self def add_dxf(self): @@ -344,90 +456,237 @@ def add_dxf(self): **dict(Report_Type="DXF", Report_Specifications_1="ThickPolyline"), } ] - self._outputs.extend(outputs) + for output in outputs: + self._other_outputs.append(output) return self - def add_umi_ouputs(self): + def add_umi_outputs(self): """Adds the necessary outputs in order to return the same energy profile as in UMI. """ # list the outputs here outputs = [ - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Air System Total Heating Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Air System Total Cooling Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Ideal Loads Zone Total Cooling Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Zone Ideal Loads Zone Total Heating Energy", - Reporting_Frequency="hourly", - ), - }, - { - "key": "Output:Variable".upper(), - **dict( - Variable_Name="Water Heater Heating Energy", - Reporting_Frequency="hourly", - ), - }, + "Air System Total Heating Energy", + "Air System Total Cooling Energy", + "Zone Ideal Loads Zone Total Cooling Energy", + "Zone Ideal Loads Zone Total Heating Energy", + "Water Heater Heating Energy", ] + for output in outputs: + self._output_variables.add(output) + return self - self._outputs.extend(outputs) + def add_sensible_heat_gain_summary_components(self): + hvac_input_sensible_air_heating = [ + "Zone Air Heat Balance System Air Transfer Rate", + "Zone Air Heat Balance System Convective Heat Gain Rate", + ] + hvac_input_sensible_air_cooling = [ + "Zone Air Heat Balance System Air Transfer Rate", + "Zone Air Heat Balance System Convective Heat Gain Rate", + ] + + hvac_input_heated_surface_heating = [ + "Zone Radiant HVAC Heating Energy", + "Zone Ventilated Slab Radiant Heating Energy", + ] + + hvac_input_cooled_surface_cooling = [ + "Zone Radiant HVAC Cooling Energy", + "Zone Ventilated Slab Radiant Cooling Energy", + ] + people_sensible_heat_addition = ["Zone People Sensible Heating Energy"] + + lights_sensible_heat_addition = ["Zone Lights Total Heating Energy"] + + equipment_sensible_heat_addition_and_equipment_sensible_heat_removal = [ + "Zone Electric Equipment Radiant Heating Energy", + "Zone Gas Equipment Radiant Heating Energy", + "Zone Steam Equipment Radiant Heating Energy", + "Zone Hot Water Equipment Radiant Heating Energy", + "Zone Other Equipment Radiant Heating Energy", + "Zone Electric Equipment Convective Heating Energy", + "Zone Gas Equipment Convective Heating Energy", + "Zone Steam Equipment Convective Heating Energy", + "Zone Hot Water Equipment Convective Heating Energy", + "Zone Other Equipment Convective Heating Energy", + ] + + window_heat_addition_and_window_heat_removal = [ + "Zone Windows Total Heat Gain Energy" + ] + + interzone_air_transfer_heat_addition_and_interzone_air_transfer_heat_removal = [ + "Zone Air Heat Balance Interzone Air Transfer Rate" + ] + + infiltration_heat_addition_and_infiltration_heat_removal = [ + "Zone Air Heat Balance Outdoor Air Transfer Rate" + ] + + tuple(map(self._output_variables.add, hvac_input_sensible_air_heating)) + tuple(map(self._output_variables.add, hvac_input_sensible_air_cooling)) + tuple(map(self._output_variables.add, hvac_input_heated_surface_heating)) + tuple(map(self._output_variables.add, hvac_input_cooled_surface_cooling)) + tuple(map(self._output_variables.add, people_sensible_heat_addition)) + tuple(map(self._output_variables.add, lights_sensible_heat_addition)) + tuple( + map( + self._output_variables.add, + equipment_sensible_heat_addition_and_equipment_sensible_heat_removal, + ) + ) + tuple( + map( + self._output_variables.add, window_heat_addition_and_window_heat_removal + ) + ) + tuple( + map( + self._output_variables.add, + interzone_air_transfer_heat_addition_and_interzone_air_transfer_heat_removal, + ) + ) + tuple( + map( + self._output_variables.add, + infiltration_heat_addition_and_infiltration_heat_removal, + ) + ) + + # The Opaque Surface Conduction and Other Heat Addition and Opaque Surface Conduction and Other Heat Removal + # columns are also calculated on an timestep basis as the negative value of the other removal and gain columns + # so that the total for the timestep sums to zero. These columns are derived strictly from the other columns. + + def add_end_use_balance_components(self): + for group in [ + EndUseBalance.HVAC_INPUT_SENSIBLE, + EndUseBalance.HVAC_INPUT_HEATED_SURFACE, + EndUseBalance.HVAC_INPUT_COOLED_SURFACE, + EndUseBalance.LIGHTING, + EndUseBalance.EQUIP_GAINS, + EndUseBalance.PEOPLE_GAIN, + EndUseBalance.SOLAR_GAIN, + EndUseBalance.INFIL_GAIN, + EndUseBalance.INFIL_LOSS, + EndUseBalance.VENTILATION_LOSS, + EndUseBalance.VENTILATION_GAIN, + EndUseBalance.NAT_VENT_GAIN, + EndUseBalance.NAT_VENT_LOSS, + EndUseBalance.MECHANICAL_VENT_GAIN, + EndUseBalance.MECHANICAL_VENT_LOSS, + EndUseBalance.OPAQUE_ENERGY_FLOW, + EndUseBalance.OPAQUE_ENERGY_STORAGE, + EndUseBalance.WINDOW_LOSS, + EndUseBalance.WINDOW_GAIN, + EndUseBalance.HEAT_RECOVERY_LOSS, + EndUseBalance.HEAT_RECOVERY_GAIN + ]: + for item in group: + self._output_variables.add(item) return self - def add_profile_gas_elect_ouputs(self): + def add_load_balance_components(self): + + for group in [ + self.COOLING, + self.HEATING, + self.LIGHTING, + self.ELECTRIC_EQUIP, + self.GAS_EQUIP, + self.HOT_WATER, + self.PEOPLE_GAIN, + self.SOLAR_GAIN, + self.INFIL_GAIN, + self.INFIL_LOSS, + self.VENT_LOSS, + self.VENT_GAIN, + self.NAT_VENT_GAIN, + self.NAT_VENT_LOSS, + self.OPAQUE_ENERGY_FLOW, + self.WINDOW_LOSS, + self.WINDOW_GAIN, + ]: + for item in group: + self._output_variables.add(item) + + def add_profile_gas_elect_outputs(self): """Adds the following meters: Electricity:Facility, Gas:Facility, WaterSystems:Electricity, Heating:Electricity, Cooling:Electricity """ # list the outputs here outputs = [ - { - "key": "OUTPUT:METER", - **dict(Key_Name="Electricity:Facility", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Gas:Facility", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict( - Key_Name="WaterSystems:Electricity", Reporting_Frequency="hourly" - ), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Heating:Electricity", Reporting_Frequency="hourly"), - }, - { - "key": "OUTPUT:METER", - **dict(Key_Name="Cooling:Electricity", Reporting_Frequency="hourly"), - }, + "Electricity:Facility", + "Gas:Facility", + "WaterSystems:Electricity", + "Heating:Electricity", + "Cooling:Electricity", ] - self._outputs.extend(outputs) + for output in outputs: + self._output_meters.add(output) return self + def add_hvac_energy_use(self): + """Add outputs for HVAC energy use when detailed systems are assigned. + + This includes a range of outputs for different pieces of equipment, + which is meant to catch all energy-consuming parts of a system. + (eg. chillers, boilers, coils, humidifiers, fans, pumps). + """ + outputs = [ + "Baseboard Electricity Energy", + "Boiler NaturalGas Energy", + "Chiller Electricity Energy", + "Chiller Heater System Cooling Electricity Energy", + "Chiller Heater System Heating Electricity Energy", + "Cooling Coil Electricity Energy", + "Cooling Tower Fan Electricity Energy", + "District Cooling Chilled Water Energy", + "District Heating Hot Water Energy", + "Evaporative Cooler Electricity Energy", + "Fan Electricity Energy", + "Heating Coil Electricity Energy", + "Heating Coil NaturalGas Energy", + "Heating Coil Total Heating Energy", + "Hot_Water_Loop_Central_Air_Source_Heat_Pump Electricity Consumption", + "Humidifier Electricity Energy", + "Pump Electricity Energy", + "VRF Heat Pump Cooling Electricity Energy", + "VRF Heat Pump Crankcase Heater Electricity Energy", + "VRF Heat Pump Defrost Electricity Energy", + "VRF Heat Pump Heating Electricity Energy", + "Zone VRF Air Terminal Cooling Electricity Energy", + "Zone VRF Air Terminal Heating Electricity Energy", + ] + for output in outputs: + self._output_variables.add(output) + def apply(self): """Applies the outputs to the idf model. Modifies the model by calling :meth:`~archetypal.idfclass.idf.IDF.newidfobject`""" - for output in self._outputs: + for output in self.output_variables: + self.idf.newidfobject( + key="Output:Variable".upper(), + **dict( + Variable_Name=output, Reporting_Frequency=self.reporting_frequency + ), + ) + for meter in self.output_meters: + self.idf.newidfobject( + key="Output:Meter".upper(), + **dict(Key_Name=meter, Reporting_Frequency=self.reporting_frequency), + ) + for output in self.other_outputs: + key = output.pop("key", None) + if key: + output["key"] = key.upper() self.idf.newidfobject(**output) return self + + def __repr__(self): + variables = "OutputVariables:\n {}".format("\n ".join(self.output_variables)) + meters = "OutputMeters:\n {}".format("\n ".join(self.output_meters)) + outputs = "Outputs:\n {}".format( + "\n ".join((a["key"] for a in self.other_outputs)) + ) + return "\n".join([variables, meters, outputs]) diff --git a/archetypal/idfclass/sql.py b/archetypal/idfclass/sql.py new file mode 100644 index 00000000..0878386f --- /dev/null +++ b/archetypal/idfclass/sql.py @@ -0,0 +1,496 @@ +"""Module for parsing EnergyPlus SQLite result files into DataFrames.""" +import logging +from datetime import timedelta +from sqlite3 import connect +from typing import List, Optional, Sequence, Union + +import numpy as np +import pandas as pd +from energy_pandas import EnergyDataFrame +from pandas import to_datetime +from path import Path +from typing_extensions import Literal + +_REPORTING_FREQUENCIES = Literal[ + "HVAC System Timestep", + "Zone Timestep", + "Hourly", + "Daily", + "Monthly", + "Run Period", +] + + +class SqlOutput: + """Represents a single output from the Sql file.""" + + __slots__ = ( + "_file_path", + "output_name", + "reporting_frequency", + ) + + def __init__(self, file_path, output_name, reporting_frequency): + self._file_path = file_path + self.output_name = output_name + self.reporting_frequency = reporting_frequency + + def values(self, environment_type: int = 3, units: str = None) -> EnergyDataFrame: + """Get the time series values as an EnergyDataFrame. + + Args: + environment_type (int): The environment type (1 = Design Day, 2 = Design + Run Period, 3 = Weather Run Period) for the series. + units (str): Convert original values to another unit. The original unit + is detected automatically and a dimensionality check is performed. + + Returns: + (EnergyDataFrame): The time series as an EnergyDataFrame. + """ + cols = ( + "ReportDataDictionaryIndex, IndexGroup, KeyValue, Name, " + "Units, ReportingFrequency" + ) + query = f""" + SELECT {cols} + FROM ReportDataDictionary + WHERE Name=@output_name + AND ReportingFrequency=@reporting_frequency; + """ + with connect(self._file_path) as conn: + header_rows = pd.read_sql( + query, + conn, + params={ + "output_name": self.output_name, + "reporting_frequency": self.reporting_frequency, + }, + ) + header_rows.set_index("ReportDataDictionaryIndex", inplace=True) + # extract all data of the relevant type from ReportData + rel_indices = tuple(header_rows.index.to_list()) + data = _extract_timeseries(conn, environment_type, header_rows, rel_indices) + + if units is not None: + data = data.to_units(units) + + return data + + +class _SqlOutputs: + """Represents all the available outputs from the Sql file.""" + + def __init__(self, file_path: str, available_outputs: List[tuple]): + self._available_outputs = available_outputs + self._properties = {} + + for output, reporting_frequency in self._available_outputs: + name = ( + output.replace(":", "__").replace(" ", "_") + f"_{reporting_frequency}" + ) + self._properties[name] = SqlOutput(file_path, output, reporting_frequency) + setattr(self, name, self._properties[name]) + + def __getitem__(self, meter_name): + """Get item by key.""" + return self._properties[meter_name] + + +class Sql: + """Object for parsing EnergyPlus SQLite result files into DataFrames. + + Args: + file_path: Full path to an SQLite file that was generated by EnergyPlus. + + Properties: + * file_path + * available_outputs + * zone_info + * environment_periods + """ + + _reporting_frequencies = ( + "HVAC System Timestep", + "Zone Timestep", + "Hourly", + "Daily", + "Monthly", + "Run Period", + ) + + def __init__(self, file_path): + """Initialize SQLiteResult""" + assert Path(file_path).exists(), "No file was found at {}".format(file_path) + self._file_path = file_path + + # values to be computed as soon as they are requested + self._available_outputs = None + self._zone_info = None + self._environment_periods = None + self._tabular_data_keys = None + self._outputs = None + self._surfaces_table = None + self._constructions_table = None + + @property + def file_path(self): + """Get the path to the .sql file.""" + return self._file_path + + @property + def tabular_data_keys(self): + """Get tuples of (ReportName, TableName, ReportForString) from tabular data.""" + if self._tabular_data_keys is None: + with connect(self.file_path) as conn: + query = "SELECT DISTINCT ReportName, TableName, ReportForString FROM TabularDataWithStrings" + data = conn.execute(query) + self._tabular_data_keys = data.fetchall() + return self._tabular_data_keys + + @property + def available_outputs(self) -> List[tuple]: + """Get tuples (OutputName, ReportingFrequency) that can be requested. + + Any of these outputs when input to data_collections_by_output_name will + yield a result with data collections. + """ + if self._available_outputs is None: + self._available_outputs = self._extract_available_outputs() + return self._available_outputs + + @property + def outputs(self): + if self._outputs is None: + self._outputs = _SqlOutputs(self.file_path, self.available_outputs) + return self._outputs + + @property + def zone_info(self): + """Get a list of strings for available timeseries outputs that can be requested. + + Any of these outputs when input to data_collections_by_output_name will + yield a result with data collections. + """ + if self._zone_info is None: + self._zone_info = self._extract_zone_info() + return self._zone_info + + @property + def surfaces_table(self): + if self._surfaces_table is None: + self._surfaces_table = self._extract_surfaces_table() + return self._surfaces_table + + @property + def constructions_table(self): + if self._constructions_table is None: + self._constructions_table = self._extract_constructions_table() + return self._constructions_table + + @property + def environment_periods(self): + """Get a list of environment periods for the simulation run periods. + + EnvironmentType: An enumeration of the environment type. (1 = Design Day, + 2 = Design Run Period, 3 = Weather Run Period). + """ + if self._environment_periods is None: + self._environment_periods = self._extract_environment_periods() + return self._environment_periods + + def full_html_report(self): + """Get the html report as a dictionary of DataFrames. + + The dictionary keys are tuples of ("ReportName", "TableName", + "ReportForString"). + """ + with connect(self.file_path) as conn: + cols = ( + "ReportName, TableName, ReportForString, ColumnName, RowName, " + "Units, Value" + ) + query = f"SELECT {cols} FROM TabularDataWithStrings" + data = pd.read_sql(query, conn) + + data.RowName = data.RowName.replace({"": np.NaN, "-": np.NaN}) + data.dropna(subset=["RowName"], inplace=True) + + all_df = {} + for name, df in data.groupby(["ReportName", "TableName", "ReportForString"]): + try: + pivoted = df.pivot( + columns=["ColumnName", "Units"], index="RowName", values="Value" + ) + except ValueError: + # Cannot pivot; return long form + pivoted = df + # apply to_numeric column-wise + pivoted = pivoted.apply(pd.to_numeric, errors="ignore") + + # insert in final dict. + all_df[name] = pivoted + + return all_df + + def timeseries_by_name( + self, + variable_or_meter: Union[str, Sequence], + reporting_frequency: Union[_REPORTING_FREQUENCIES] = "Hourly", + environment_type: Union[Literal[1, 2, 3]] = 3, + ) -> EnergyDataFrame: + """Get an EnergyDataFrame for specified meters and/or variables. + + The returned DataFrame has a column MultiIndex with levels ["IndexGroup", + "KeyValue", "Name"]. KeyValue corresponds to the zone name for variables + while KeyValue is None for meters. + + Note that if an output name is not not available for the reporting + frequency or the environment type, the DataFrame can be empty. + + Args: + variable_or_meter (str or list): The name of an EnergyPlus output meter or + variable to be retrieved from the SQLite result file. This can also be an + array of output names for which all data collections should be retrieved. + reporting_frequency (str): The reporting interval. One of ("HVAC System + Timestep", "Zone Timestep", "Hourly", "Daily", "Monthly" or "Run + Period" + environment_type (int): The environment type. (1 = Design Day, 2 = Design + Run Period, 3 = Weather Run Period). Default = 3. + + Returns: + EnergyDataFrame: An EnergyDataFrame with the variable_or_meter as columns. + """ + reporting_frequency = reporting_frequency.title() + assert ( + reporting_frequency in Sql._reporting_frequencies + ), f"reporting_frequency is not one of {Sql._reporting_frequencies}" + with connect(self.file_path) as conn: + cols = "ReportDataDictionaryIndex, IndexGroup, KeyValue, Name, Units, ReportingFrequency" + if isinstance(variable_or_meter, str): # assume it's a single output + if ( + variable_or_meter, + reporting_frequency, + ) not in self.available_outputs: + logging.warning( + f"{(variable_or_meter, reporting_frequency)} not " + f"an available output in the Sql file." + ) + query = f""" + SELECT {cols} + FROM ReportDataDictionary + WHERE Name=@output_name + AND ReportingFrequency=@reporting_frequency; + """ + header_rows = pd.read_sql( + query, + conn, + params={ + "output_name": variable_or_meter, + "reporting_frequency": reporting_frequency, + }, + ) + elif len(variable_or_meter) == 1: # assume it's a list + query = f""" + SELECT {cols} + FROM ReportDataDictionary + WHERE Name=@output_name + AND ReportingFrequency=@reporting_frequency; + """ + header_rows = pd.read_sql( + query, + conn, + params={ + "output_name": variable_or_meter[0], + "reporting_frequency": reporting_frequency, + }, + ) + else: # assume it is a list of outputs + query = f""" + SELECT {cols} + FROM ReportDataDictionary + WHERE Name IN {tuple(variable_or_meter)} + AND ReportingFrequency=@reporting_frequency;""" + header_rows = pd.read_sql( + query, + conn, + params={ + "reporting_frequency": reporting_frequency, + }, + ) + # if nothing was found, return an empty DataFrame + if len(header_rows) == 0: + return EnergyDataFrame([]) + else: + header_rows.set_index("ReportDataDictionaryIndex", inplace=True) + + # extract all data of the relevant type from ReportData + rel_indices = tuple(header_rows.index.to_list()) + data = _extract_timeseries(conn, environment_type, header_rows, rel_indices) + print(f"collected data for {variable_or_meter}") + return data + + def tabular_data_by_name( + self, report_name: str, table_name: str, report_for_string: Optional[str] = None + ) -> pd.DataFrame: + """Get (ReportName, TableName) data as DataFrame. + + Args: + report_name (str): The name of the report. + table_name (str): The name of the table in the report. + report_for_string (str): The “For” string. + + Returns: + (pd.DataFrame): A DataFrame. + """ + with connect(self.file_path) as conn: + cols = "RowName, ColumnName, Value, Units" + query = f""" + SELECT {cols} FROM TabularDataWithStrings + WHERE + (@report_name IS NULL OR ReportName=@report_name) + AND + (@table_name IS NULL OR TableName=@table_name) + AND + (@report_for_string IS NULL OR ReportForString=@report_for_string); + """ + data = pd.read_sql( + query, + conn, + params={ + "report_name": report_name, + "table_name": table_name, + "report_for_string": report_for_string, + }, + ) + try: + pivoted = data.pivot( + index="RowName", columns=["ColumnName", "Units"], values="Value" + ) + except ValueError: + # Cannot pivot; return long-form DataFrame + pivoted = data + logging.warning( + f"{(report_name, table_name, report_name)} cannot be " + f"pivoted as RowName and ColumnName. The long-form " + f"DataFrame has been returned." + ) + pivoted = pivoted.apply(pd.to_numeric, errors="ignore") + return pivoted + + def _extract_available_outputs(self) -> List: + """Extract the list of all available outputs from the SQLite file.""" + with connect(self.file_path) as conn: + cols = "Name, ReportingFrequency" + query = f"SELECT DISTINCT {cols} FROM ReportDataDictionary" + data = conn.execute(query) + return data.fetchall() + + def _extract_zone_info(self): + """Extract the Zones table from the SQLite file.""" + with connect(self.file_path) as conn: + query = "SELECT * from Zones" + df = pd.read_sql(query, conn).set_index("ZoneIndex") + return df + + def _extract_surfaces_table(self): + """Extract the Zones table from the SQLite file.""" + with connect(self.file_path) as conn: + query = "SELECT * from Surfaces" + df = pd.read_sql(query, conn).set_index(["ZoneIndex", "SurfaceIndex"]) + return df + + def _extract_constructions_table(self): + with connect(self.file_path) as conn: + query = "SELECT * from Constructions" + df = pd.read_sql(query, conn).set_index("ConstructionIndex") + return df + + def _extract_environment_periods(self): + """Extract the EnvironmentPeriods table from the SQLite file.""" + with connect(self.file_path) as conn: + query = "SELECT * from EnvironmentPeriods" + df = pd.read_sql(query, conn).set_index("EnvironmentPeriodIndex") + return df + + +def _extract_timeseries( + conn, environment_type, header_rows, rel_indices +) -> EnergyDataFrame: + """Extract time series given indices.""" + if len(rel_indices) == 1: + data = pd.read_sql( + """SELECT rd.Value, + rd.ReportDataDictionaryIndex, + t.Month, + t.Day, + t.Hour, + t.Minute, + t.Interval + FROM ReportData as rd + LEFT JOIN Time As t ON rd.TimeIndex = t.TimeIndex + LEFT JOIN EnvironmentPeriods as p ON t.EnvironmentPeriodIndex = p.EnvironmentPeriodIndex + WHERE ReportDataDictionaryIndex=@rel_indices + AND (IFNULL(t.WarmupFlag, 0) = @warmup_flag) + AND p.EnvironmentType = @environment_type + ORDER BY t.TimeIndex;""", + conn, + params={ + "rel_indices": rel_indices[0], + "warmup_flag": 0, + "environment_type": environment_type, + }, + ) + else: + data = pd.read_sql( + f"""SELECT rd.Value, + rd.ReportDataDictionaryIndex, + t.Month, + t.Day, + t.Hour, + t.Minute, + t.Interval + FROM ReportData as rd + LEFT JOIN Time As t ON rd.TimeIndex = t.TimeIndex + LEFT JOIN EnvironmentPeriods as p ON t.EnvironmentPeriodIndex = p.EnvironmentPeriodIndex + WHERE ReportDataDictionaryIndex IN {tuple(rel_indices)} + AND (IFNULL(t.WarmupFlag, 0) = @warmup_flag) + AND p.EnvironmentType = @environment_type + ORDER BY rd.ReportDataDictionaryIndex, t.TimeIndex;""", + conn, + params={"warmup_flag": 0, "environment_type": environment_type}, + ) + # Join the header_rows on ReportDataDictionaryIndex + data = data.join( + header_rows[["IndexGroup", "KeyValue", "Name"]], + on="ReportDataDictionaryIndex", + ) + # Pivot the data so that ["Name", "KeyValue"] becomes the column MultiIndex. + data = data.pivot( + index=["Month", "Day", "Hour", "Minute", "Interval"], + columns=["IndexGroup", "KeyValue", "Name"], + values="Value", + ) + # reset the index to prepare the DatetimeIndex + date_time_names = data.index.names + data.reset_index(inplace=True) + index = to_datetime( + { + "year": 2018, + "month": data.Month, + "day": data.Day, + "hour": data.Hour, + "minute": data.Minute, + } + ) + # Adjust timeindex by timedelta + index -= data["Interval"].apply(lambda x: timedelta(minutes=x)) + index = pd.DatetimeIndex(index, freq="infer") + # get data + data = data.drop(columns=date_time_names, level="IndexGroup") + data.index = index + # Create the EnergyDataFrame and set the units using dict + data = EnergyDataFrame(data) + data.units = header_rows.set_index(["IndexGroup", "KeyValue", "Name"])[ + "Units" + ].to_dict() + return data diff --git a/archetypal/idfclass/util.py b/archetypal/idfclass/util.py index ea5e6484..9920a0d8 100644 --- a/archetypal/idfclass/util.py +++ b/archetypal/idfclass/util.py @@ -102,8 +102,10 @@ def get_idf_version(file, doted=True): else: versionid = ver_block[1].replace(".", "-") + "-0" except IndexError: - raise Exception("The IDF model does not contain a 'Version' object. " - "Specify file_version= in the IDF() constructor.") + raise Exception( + "The IDF model does not contain a 'Version' object. " + "Specify file_version= in the IDF() constructor." + ) except Exception as e: log('Version id for file "{}" cannot be found'.format(file)) log("{}".format(e)) diff --git a/archetypal/idfclass/variables.py b/archetypal/idfclass/variables.py index 2a1a22be..3360fa13 100644 --- a/archetypal/idfclass/variables.py +++ b/archetypal/idfclass/variables.py @@ -1,5 +1,6 @@ """EnergyPlus variables module.""" import logging +from typing import Iterable import pandas as pd from energy_pandas import EnergyDataFrame, EnergySeries @@ -7,6 +8,7 @@ from archetypal.idfclass.extensions import bunch2db from archetypal.reportdata import ReportData +from archetypal.utils import log class Variable: @@ -74,8 +76,10 @@ def values( # the environment_type is specified by the simulationcontrol. try: for ctrl in self._idf.idfobjects["SIMULATIONCONTROL"]: - if ctrl.Run_Simulation_for_Weather_File_Run_Periods.lower() \ - == "yes": + if ( + ctrl.Run_Simulation_for_Weather_File_Run_Periods.lower() + == "yes" + ): environment_type = 3 else: environment_type = 1 @@ -111,16 +115,71 @@ def __init__(self, idf, variables_dict: dict): self._properties = {} for i, variable in variables_dict.items(): - variable_name = ( - variable["Variable_Name"].replace(":", "__").replace(" ", "_") - ) + variable_name = self.normalize_output_name(variable["Variable_Name"]) self._properties[variable_name] = Variable(idf, variable) setattr(self, variable_name, self._properties[variable_name]) + def normalize_output_name(self, variable): + return variable.replace(":", "__").replace(" ", "_") + def __getitem__(self, variable_name): """Get item by key.""" return self._properties[variable_name] + def collect_by_output_name( + self, + output_name, + reporting_frequency="Hourly", + units=None, + environment_type=None, + normalize=False, + sort_values=False, + ): + """ + + Args: + output_name: The name of an EnergyPlus output to be retrieved from the SQLite result file. This can also + be an array of output names for which all data collections should be retrieved. + + Returns: + + """ + if isinstance(output_name, str): # assume one output + output_name = self.normalize_output_name(output_name) + try: + out = self[output_name].values( + units=units, + reporting_frequency=reporting_frequency, + environment_type=environment_type, + normalize=normalize, + sort_values=sort_values, + ) + except KeyError: + log(f"{output_name} not available as an output for this model.") + out = EnergyDataFrame([]) + return out + elif isinstance(output_name, Iterable): + output_values = {} + for an_output_name in output_name: + try: + out = self[self.normalize_output_name(an_output_name)].values( + units=units, + reporting_frequency=reporting_frequency, + environment_type=environment_type, + normalize=normalize, + sort_values=sort_values, + ) + except KeyError: + log(f"{output_name} not available as an output for this model.") + else: + output_values[an_output_name] = out + if not output_values: + return EnergyDataFrame([]) + else: + return pd.concat( + output_values, axis=1, names=["OutputVariable", "Key_Name"] + ) + class Variables: """Lists available variables in the IDF model. diff --git a/archetypal/schedule.py b/archetypal/schedule.py index 89cd6a1c..fc76894d 100644 --- a/archetypal/schedule.py +++ b/archetypal/schedule.py @@ -5,11 +5,13 @@ import logging as lg from datetime import datetime, timedelta from itertools import groupby +from typing import FrozenSet, Union, List import numpy as np import pandas as pd from energy_pandas import EnergySeries from eppy.bunch_subclass import BadEPFieldError +from typing_extensions import Literal from validator_collection import checkers, validators from archetypal.utils import log @@ -1112,11 +1114,11 @@ class Schedule: def __init__( self, - Name, - start_day_of_the_week=0, - strict=False, - Type=None, - Values=None, + Name: str, + start_day_of_the_week: FrozenSet[Literal[0, 1, 2, 3, 4, 5, 6]] = 0, + strict: bool = False, + Type: Union[str, ScheduleTypeLimits] = None, + Values: Union[List[Union[int, float]], np.ndarray] = None, **kwargs, ): """Initialize object. @@ -1195,16 +1197,15 @@ def Name(self, value): self._name = value @classmethod - def from_values(cls, Name, Values, Type="Fraction", **kwargs): - """Create a Schedule from a list of Values. - - Args: - Name: - Values: - Type: - **kwargs: - """ - return cls(Name=Name, Values=Values, Type="Fraction", **kwargs) + def from_values( + cls, + Name: str, + Values: List[Union[float, int]], + Type: str = "Fraction", + **kwargs, + ): + """Create a Schedule from a list of Values.""" + return cls(Name=Name, Values=Values, Type=Type, **kwargs) @classmethod def from_epbunch(cls, epbunch, strict=False, Type=None, **kwargs): @@ -1294,7 +1295,11 @@ def series(self): index = pd.date_range( start=self.startDate, periods=self.all_values.size, freq="1H" ) - return EnergySeries(self.all_values, index=index, name=self.Name) + if self.Type is not None: + units = self.Type.UnitType + else: + units = None + return EnergySeries(self.all_values, index=index, name=self.Name, units=units) @staticmethod def get_schedule_type_limits_name(epbunch): @@ -1312,6 +1317,57 @@ def startDate(self): year = get_year_for_first_weekday(self.startDayOfTheWeek) return datetime(year, 1, 1) + def scale(self, diversity=0.1): + """Scale the schedule values by a diversity factor around the average.""" + average = np.average(self.Values) + new_values = ((average - self.Values) * diversity) + self.Values + + self.Values = new_values + return self + + def replace(self, new_values: Union[pd.Series]): + """Replace values with new values while keeping the full load hours constant. + + Time steps that are not specified in `new_values` will be adjusted to keep + the full load hours of the schedule constant. No check whether the new schedule + stays between the bounds set by self.Type is done. Be aware. + + """ + assert isinstance(new_values.index, pd.DatetimeIndex), ( + "The index of `new_values` must be a `pandas.DatetimeIndex`. Instead, " + f"`{type(new_values.index)}` was provided." + ) + assert not self.series.index.difference(new_values.index).empty, ( + "There is no overlap between self.index and new_values.index. Please " + "check your dates." + ) + + # create a copy of self.series as orig. + orig = self.series.copy() + + new_data = new_values.values + + # get the new_values index + idx = new_values.index + + # compute the difference in values with the original data and the new data. + diff = orig.loc[idx] - new_data.reshape(-1) + + # replace the original data with new values at their location. + orig.loc[idx] = new_values + + # adjust remaining time steps with the average difference. Inplace. + orig.loc[orig.index.difference(idx)] += diff.sum() / len( + orig.index.difference(idx) + ) + new = orig + + # assert the sum has not changed as a sanity check. + np.testing.assert_array_almost_equal(self.series.sum(), new.sum()) + + # replace values of self with new values. + self.Values = new.tolist() + def plot(self, **kwargs): """Plot the schedule. Implements the .loc accessor on the series object. @@ -1334,7 +1390,22 @@ def plot(self, **kwargs): def plot2d(self, **kwargs): """Plot the carpet plot of the schedule.""" - return self.series.plot2d(**kwargs) + if self.Type is not None: + vmin = self.Type.LowerLimit + vmax = self.Type.UpperLimit + else: + vmin, vmax = (None, None) + + pretty_plot_kwargs = { + "cmap": "Greys", + "show": True, + "figsize": (7, 2), + "dpi": 72, + "vmin": vmin, + "vmax": vmax, + } + pretty_plot_kwargs.update(kwargs) + return self.series.plot2d(**pretty_plot_kwargs) plot2d.__doc__ += EnergySeries.plot2d.__doc__ @@ -1489,7 +1560,14 @@ def __mul__(self, other): def _repr_svg_(self): """SVG representation for iPython notebook.""" - fig, ax = self.series.plot2d(cmap="Greys", show=False, figsize=(7, 2), dpi=72) + if self.Type is not None: + vmin = self.Type.LowerLimit + vmax = self.Type.UpperLimit + else: + vmin, vmax = (None, None) + fig, ax = self.series.plot2d( + cmap="Greys", show=False, figsize=(7, 2), dpi=72, vmin=vmin, vmax=vmax + ) f = io.BytesIO() fig.savefig(f, format="svg") return f.getvalue() @@ -1569,8 +1647,16 @@ def _how(how): return "max" -def get_year_for_first_weekday(weekday=0): - """Get the year that starts on 'weekday', eg. Monday=0.""" +def get_year_for_first_weekday(weekday: FrozenSet[Literal[0, 1, 2, 3, 4, 5, 6]] = 0): + """Get the year that starts on 'weekday', eg. Monday=0. + + Args: + weekday (int): 0-based day of week (Monday=0). Default is + None which looks for the start day in the IDF model. + + Returns: + (int): The year number for which the first starts on :attr:`weekday`. + """ import calendar if weekday > 6: diff --git a/archetypal/settings.py b/archetypal/settings.py index fd6a2956..31e25457 100644 --- a/archetypal/settings.py +++ b/archetypal/settings.py @@ -128,7 +128,12 @@ from energy_pandas.units import unit_registry -unit_registry = unit_registry +additional_units = ( + "Dimensionless = dimensionless = Fraction = fraction", + "@alias degC = Temperature = temperature", +) +for unit in additional_units: + unit_registry.define(unit) class ZoneWeight(object): diff --git a/archetypal/umi_template.py b/archetypal/umi_template.py index e1c5d633..210e14f3 100644 --- a/archetypal/umi_template.py +++ b/archetypal/umi_template.py @@ -257,6 +257,7 @@ def from_idf_files( def template_complexity_reduction(idfname, epw, **kwargs): """Wrap IDF, simulate and BuildingTemplate for parallel processing.""" idf = IDF(idfname, epw=epw, **kwargs) + idf._outputs.add_umi_template_outputs() # remove daylight saving time modifiers for daylight in idf.idfobjects["RunPeriodControl:DaylightSavingTime".upper()]: diff --git a/tests/test_end_use_balance.py b/tests/test_end_use_balance.py new file mode 100644 index 00000000..4cdeb862 --- /dev/null +++ b/tests/test_end_use_balance.py @@ -0,0 +1,73 @@ +from tempfile import TemporaryFile + +import pytest +from archetypal import IDF +from archetypal.idfclass.end_use_balance import EndUseBalance + + +class TestEndUseBalance: + @pytest.fixture() + def idf(self): + idf = IDF.from_example_files( + "AdultEducationCenter.idf", + epw="USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw", + annual=True, + design_day=False, + readvars=False, + ) + idf = idf.saveas("AdultEducationCenter.idf") + idf.outputs.add_load_balance_components() + idf.outputs.add_end_use_balance_components() + idf.outputs.add_sensible_heat_gain_summary_components() + idf.outputs.apply() + idf.simulate() + yield idf + + @pytest.fixture() + def idf_noOA(self): + idf = IDF.from_example_files( + "HVACStandAloneERV_Economizer.idf", + epw="USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw", + annual=True, + design_day=False, + readvars=False, + ) + idf.outputs.add_load_balance_components() + idf.outputs.add_end_use_balance_components() + idf.outputs.add_sensible_heat_gain_summary_components() + idf.outputs.apply() + + idf.removeidfobjects(list(idf.idfobjects["DESIGNSPECIFICATION:OUTDOORAIR"])) + + idf.simulate() + yield idf + + def test_from_idf(self, idf): + """Test initializing with idf model.""" + eu = EndUseBalance.from_sql_file( + idf.sql_file, outdoor_surfaces_only=True, units="GJ", power_units="W" + ) + # assert eu + # assert not eu.component_summary().empty + # assert not eu.separate_gains_and_losses("opaque_flow", ["Zone_Name"]).empty + # to_df = eu.to_df(separate_gains_and_losses=False) + # assert not to_df.empty + # assert to_df.columns.shape == (10,) # should have 10 columns + to_df_sep = eu.to_df(separate_gains_and_losses=True) + assert not to_df_sep.empty + # assert to_df_sep.columns.shape == (32,) # should have 32 columns + + def test_from_idf_noOA(self, idf_noOA): + """Test initializing with idf model.""" + eu = EndUseBalance.from_sql_file( + idf_noOA.sql_file, outdoor_surfaces_only=True, units="GJ", power_units="W" + ) + to_df_sep = eu.to_df(separate_gains_and_losses=True) + assert not to_df_sep.empty + + def test_to_sankey(self, idf): + eu = EndUseBalance.from_sql_file( + idf.sql_file, outdoor_surfaces_only=True, units="GJ", power_units="W" + ) + with TemporaryFile("w") as f: + eu.to_sankey(f) diff --git a/tests/test_idfclass.py b/tests/test_idfclass.py index 95212c41..7b7747fe 100644 --- a/tests/test_idfclass.py +++ b/tests/test_idfclass.py @@ -63,14 +63,20 @@ def natvent_v9_1_0(self, config): as_version="9-1-0", ) - @pytest.fixture() - def wont_transition_correctly(self, config): - file = ( - "tests/input_data/problematic/RefBldgLargeOfficeNew2004_v1.4_7" - ".2_5A_USA_IL_CHICAGO-OHARE.idf" - ) - wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" - yield IDF(file, epw=wf, as_version="8.9.0") + def test_copy_saveas(self, idf_model, tmp_path): + """Test making a copy of self and two ways of saving as (inplace or not).""" + idf_copy = idf_model.copy() # make a copy of self + + assert idf_copy is not idf_model + + # assert saveas modifies self inplace. + id_before = id(idf_copy) + idf_copy.saveas(tmp_path / "in.idf", inplace=True) + id_after = id(idf_copy) + assert id_after == id_before + + # assert saveas returns another object + assert idf_copy.saveas(tmp_path / "in.idf", inplace=False) is not idf_copy def test_default_version_none(self): file = ( @@ -127,11 +133,15 @@ def test_version(self, natvent_v9_1_0): natvent_v9_1_0.epw = "newepw.epw" assert natvent_v9_1_0.epw == Path("newepw.epw") - def test_transition_error(self, config, wont_transition_correctly): - with pytest.raises( - (EnergyPlusProcessError, EnergyPlusVersionError, CalledProcessError) - ): - assert wont_transition_correctly.simulate(ep_version="8.9.0") + @pytest.mark.xfail(reason="Fails on Linux") + def test_transition_error(self, config): + with pytest.raises(CalledProcessError): + file = ( + "tests/input_data/problematic/RefBldgLargeOfficeNew2004_v1.4_7" + ".2_5A_USA_IL_CHICAGO-OHARE.idf" + ) + wf = "tests/input_data/CAN_PQ_Montreal.Intl.AP.716270_CWEC.epw" + IDF(file, epw=wf, as_version="8.9.0") def test_set_iddname(self): """Set new iddname path.""" diff --git a/tests/test_outputs.py b/tests/test_outputs.py new file mode 100644 index 00000000..321fcd88 --- /dev/null +++ b/tests/test_outputs.py @@ -0,0 +1,62 @@ +import pytest + +from archetypal import IDF +from archetypal.idfclass import Outputs + + +class TestOutput: + @pytest.fixture() + def idf(self): + yield IDF(prep_outputs=False) + + def test_output_init(self, idf): + """Test initialization of the Output class.""" + + outputs = Outputs(idf) + str(outputs) # test the string representation of the object + + assert len(outputs.other_outputs) == 2 + assert len(outputs.output_variables) == 0 + assert len(outputs.output_meters) == 0 + + outputs.add_umi_template_outputs() + assert len(outputs.output_variables) > 1 + assert len(outputs.output_meters) > 1 + assert outputs.reporting_frequency == "Hourly" + assert outputs.include_sqlite + assert outputs.include_html + + def test_output_properties(self, idf): + """Test changing properties of Outputs.""" + outputs = Outputs(idf) + + outputs.output_variables = ["Air System Outdoor Air Minimum Flow Fraction"] + assert outputs.output_variables == ( + "Air System Outdoor Air Minimum Flow Fraction", + ) + outputs.reporting_frequency = "daily" # lower case + assert outputs.reporting_frequency == "Daily" # should be upper case + outputs.unit_conversion = "InchPound" + assert outputs.unit_conversion == "InchPound" + outputs.include_sqlite = False + assert not outputs.include_sqlite + outputs.include_html = True + assert outputs.include_html + + with pytest.raises(AssertionError): + outputs.output_variables = ( + "Zone Ideal Loads Supply Air Total Cooling Energy" + ) + with pytest.raises(AssertionError): + outputs.reporting_frequency = "annually" + with pytest.raises(AssertionError): + outputs.other_outputs = "ComponentSizingSummary" + with pytest.raises(AssertionError): + outputs.unit_conversion = "IP" + + def test_add_basics(self, idf): + """Test the Output add_basics method""" + outputs = Outputs(idf).add_basics() + assert len(outputs.output_variables) == 0 + assert len(outputs.output_meters) == 0 + assert len(outputs.other_outputs) == 6 diff --git a/tests/test_schedules.py b/tests/test_schedules.py index a87dd7b1..04e1a639 100644 --- a/tests/test_schedules.py +++ b/tests/test_schedules.py @@ -47,6 +47,14 @@ def schedules_in_necb_specific(self, config): s = Schedule.from_epbunch(epbunch, start_day_of_the_week=0) yield s + def test_scale(self, schedules_in_necb_specific): + before_sum = sum(schedules_in_necb_specific.Values) + ax = schedules_in_necb_specific.series.iloc[0:24].plot() + assert pytest.approx( + before_sum, sum(schedules_in_necb_specific.scale(0.1).Values) + ) + schedules_in_necb_specific.series.iloc[0:24].plot(ax=ax) + def test_plot(self, schedules_in_necb_specific): schedules_in_necb_specific.plot(drawstyle="steps-post") @@ -87,6 +95,20 @@ def test_from_values(self, new_idf): ) assert len(heating_sched.all_values) == 8760 + def test_replace(self): + """Test replacing values while keeping full load hours constant.""" + sch = Schedule.from_values("Test", [1] * 6 + [0.5] * 12 + [1] * 6) + new = pd.Series([1, 1], index=sch.series.index[11:13]) + + orig = sch.series.sum() + + sch.replace(new) + + new = sch.series.sum() + + # assert the full load hours (sum) has not changed. + assert new == pytest.approx(orig) + idf_file = "tests/input_data/schedules/test_multizone_EP.idf" diff --git a/tests/test_template.py b/tests/test_template.py index e7defcfa..fffb15f4 100644 --- a/tests/test_template.py +++ b/tests/test_template.py @@ -2917,12 +2917,8 @@ def test_from_to_dict(self): assert zone == zone_dup def test_zone_volume(self, small_idf_copy): - """Test the zone volume for a sloped roof - - Args: - small_idf_copy: - """ - idf = small_idf_copy + """Test the zone volume for a sloped roof.""" + idf = small_idf_copy.simulate() zone = idf.getobject("ZONE", "Perim") z = ZoneDefinition.from_epbunch(ep_bunch=zone, construct_parents=False) assert z.volume == pytest.approx(25.54, 1e-2)