From 9968cfedeb2e116cdbee6222464cff169f30a718 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:31:23 -0800 Subject: [PATCH 1/8] ruff spell check --- modules/createEVENT/IsolatedBuildingCFD/post_process_output.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py b/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py index ee02c9daa..9cb60212a 100644 --- a/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py +++ b/modules/createEVENT/IsolatedBuildingCFD/post_process_output.py @@ -259,7 +259,7 @@ def read_pressure_data(file_names): # index += 1 except: # noqa: E722 - # sys.exit('Fatal Error!: the pressure filese have time gap') + # sys.exit('Fatal Error!: the pressure files have time gap') index = 0 # Joint them even if they have a time gap connected_time = np.concatenate((connected_time, time2[index:])) From 54a1ea348ca96420d8c06cee918d35ace71f7cb5 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:31:54 -0800 Subject: [PATCH 2/8] fix shakemapevent for IM colum indices and unit --- .../shakeMapEvent/shakeMapEvent.py | 41 ++++++++++++++----- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/modules/createEVENT/shakeMapEvent/shakeMapEvent.py b/modules/createEVENT/shakeMapEvent/shakeMapEvent.py index 244195009..297b0c8e7 100644 --- a/modules/createEVENT/shakeMapEvent/shakeMapEvent.py +++ b/modules/createEVENT/shakeMapEvent/shakeMapEvent.py @@ -44,12 +44,17 @@ from pathlib import Path +class IntensityMeasureTypeError(Exception): + def __init__(self, im_type): + super().__init__(f'Intensity measure type {im_type} not found in grid data') + + def create_shakemap_event(eventDirectory, eventPath, IMTypes): # noqa: D103, N803 IMTypesList = eval(IMTypes) # noqa: S307, N806 print('Creating shakemap event') # noqa: T201 - xml_file_path = Path(eventDirectory) / eventPath / 'grid.xml' + xml_file_path = Path(eventPath) / 'grid.xml' # Parse the XML file tree = ET.parse(xml_file_path) # noqa: S314 @@ -62,6 +67,19 @@ def create_shakemap_event(eventDirectory, eventPath, IMTypes): # noqa: D103, N8 points = [] attributes = [] + # Get the attribute_mapping + namespace = {'ns': 'http://earthquake.usgs.gov/eqcenter/shakemap'} + grid_fields = {} + for grid_field in root.findall('ns:grid_field', namespace): + index = grid_field.get('index') + name = grid_field.get('name') + units = grid_field.get('units') + grid_fields[name] = {'index': index, 'units': units} + attribute_mapping = {} + for im_type in ['PGA', 'PGV', 'MMI', 'PSA03', 'PSA10', 'PSA30']: + if im_type not in grid_fields: + raise IntensityMeasureTypeError(im_type) + attribute_mapping[im_type] = int(grid_fields[im_type]['index']) - 1 # Parse the grid data for line in grid_data.text.strip().split('\n'): values = line.split() @@ -71,15 +89,6 @@ def create_shakemap_event(eventDirectory, eventPath, IMTypes): # noqa: D103, N8 # Store only the specified attributes attr = {} - attribute_mapping = { - 'PGA': 2, - 'PGV': 3, - 'MMI': 4, - 'PSA03': 5, - 'PSA10': 6, - 'PSA30': 7, - } - for im_type in IMTypesList: if im_type in attribute_mapping: attr[im_type] = float(values[attribute_mapping[im_type]]) @@ -89,11 +98,21 @@ def create_shakemap_event(eventDirectory, eventPath, IMTypes): # noqa: D103, N8 # Create GeoDataFrame gdf = gpd.GeoDataFrame(attributes, geometry=points, crs='EPSG:4326') + if 'PGA' in gdf.columns: + gdf['PGA'] = gdf['PGA'] / 100 # convert from pct g to g + + if 'PSA03' in gdf.columns: + gdf['PSA03'] = gdf['PSA03'] / 100 # convert from pct g to g + gdf = gdf.rename(columns={'PSA03': 'SA_0.3'}) + if 'PSA10' in gdf.columns: + gdf['PSA10'] = gdf['PSA10'] / 100 # convert from pct g to g + gdf = gdf.rename(columns={'PSA10': 'SA_1.0'}) + # Display the first few rows print('Saving shakemap to gpkg') # noqa: T201 # Save as a GeoPackage file - gdf_path = Path(eventDirectory) / 'EventGrid.gpkg' + gdf_path = Path(eventPath) / 'EventGrid.gpkg' gdf.to_file(gdf_path, driver='GPKG') return # noqa: PLR1711 From df819be1007b05dcf27849a446e0f92153340fb2 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:34:07 -0800 Subject: [PATCH 3/8] correct the nearest neighbor algorithm when sampling grid from GIS files --- .../NearestNeighborEvents/NNE.py | 37 ++++++++++--------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py index f423bcb97..f10996d1d 100644 --- a/modules/performRegionalMapping/NearestNeighborEvents/NNE.py +++ b/modules/performRegionalMapping/NearestNeighborEvents/NNE.py @@ -321,7 +321,8 @@ def find_neighbors( # noqa: C901, D103 for col in grid_df.columns if col not in ['geometry', 'Longitude', 'Latitude'] ] - event_count = len(im_columns) + # event_count = len(im_columns) + event_count = 1 # for each neighbor for sample_j, nbr in enumerate(nbr_samples): @@ -332,26 +333,28 @@ def find_neighbors( # noqa: C901, D103 nbr_index = ind_list[nbr] # For GIS files, create a new CSV file - csv_filename = f'Site_{sample_j}.csv' + csv_filename = f'Site_{nbr_index}.csv' csv_path = event_dir / csv_filename - # Create a CSV file with data from the GIS file - # Use actual data from the GIS file if available, otherwise use dummy data - im_columns = [ - col - for col in grid_df.columns - if col not in ['geometry', 'Longitude', 'Latitude'] - ] - - im_data = pd.DataFrame( - { - col: [grid_df.iloc[nbr_index][col]] * event_count - for col in im_columns - } - ) + if not csv_path.exists(): + + # Create a CSV file with data from the GIS file + # Use actual data from the GIS file if available, otherwise use dummy data + im_columns = [ + col + for col in grid_df.columns + if col not in ['geometry', 'Longitude', 'Latitude'] + ] + + im_data = pd.DataFrame( + { + col: [grid_df.iloc[nbr_index][col]] * event_count + for col in im_columns + } + ) - im_data.to_csv(csv_path, index=False) + im_data.to_csv(csv_path, index=False) # save the collection file name and the IM row id event_list.append(csv_filename + f'x{event_j}') From 31f69d7e944fa652190b57761374669c05b74fe0 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:34:57 -0800 Subject: [PATCH 4/8] add a default rise=none for capacity spectum method --- modules/performSIMULATION/capacitySpectrum/CapacityModels.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/performSIMULATION/capacitySpectrum/CapacityModels.py b/modules/performSIMULATION/capacitySpectrum/CapacityModels.py index c55830d46..6a31cd9fe 100644 --- a/modules/performSIMULATION/capacitySpectrum/CapacityModels.py +++ b/modules/performSIMULATION/capacitySpectrum/CapacityModels.py @@ -88,6 +88,7 @@ def convert_story_rise(structureType, stories): # noqa: N803 rise = None else: + rise = None # First, check if we have valid story information try: stories = int(stories) @@ -340,7 +341,7 @@ def __init__(self, general_info, dD=0.001): # noqa: N803 self.Dy = self.capacity_data[self.design_level][self.HAZUS_type]['Dy'] self.Ay = self.capacity_data[self.design_level][self.HAZUS_type]['Ay'] except KeyError: - msg = f'No capacity data for {self.HAZUS_type} and {self.design_level}' + msg = f'No capacity data for build class {self.HAZUS_type} and design level {self.design_level}' raise KeyError(msg) # noqa: B904 self.cao_peterson_2006 = cao_peterson_2006( self.Dy, self.Ay, self.Du, self.Au, dD From 49708080e5aa50d7aacfebf2e63dedd780d6fa98 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:35:45 -0800 Subject: [PATCH 5/8] modify API of userinputshakemap --- modules/Workflow/WorkflowApplications.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/Workflow/WorkflowApplications.json b/modules/Workflow/WorkflowApplications.json index 117f2d51c..3a5f70bdd 100644 --- a/modules/Workflow/WorkflowApplications.json +++ b/modules/Workflow/WorkflowApplications.json @@ -285,12 +285,12 @@ "ApplicationSpecificInputs": [ { "id": "Directory", - "type": "path", + "type": "string", "description": "Path to file containing folder of shake maps" }, { "id": "EventPath", - "type": "string", + "type": "path", "description": "Path to the shake map event" }, { From 34705c3aba051828419c2de0bd05409cfdc0393f Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:36:03 -0800 Subject: [PATCH 6/8] modify brails transportation wrapper --- modules/tools/BRAILS/runBrailsTransp.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/modules/tools/BRAILS/runBrailsTransp.py b/modules/tools/BRAILS/runBrailsTransp.py index b7c3cf418..2c3a60601 100644 --- a/modules/tools/BRAILS/runBrailsTransp.py +++ b/modules/tools/BRAILS/runBrailsTransp.py @@ -72,6 +72,7 @@ def runBrails( # noqa: N802, D103 minimumHAZUS, # noqa: N803 maxRoadLength, # noqa: N803 lengthUnit, # noqa: N803 + saveTrafficSimulationAttr, # noqa: N803 ): # Initialize TranspInventoryGenerator: invGenerator = TranspInventoryGenerator( # noqa: N806 @@ -83,7 +84,8 @@ def runBrails( # noqa: N802, D103 # Combine and format the generated inventory to SimCenter transportation network inventory json format invGenerator.combineAndFormat_HWY( - minimumHAZUS=minimumHAZUS, maxRoadLength=maxRoadLength, lengthUnit=lengthUnit + minimumHAZUS=minimumHAZUS, maxRoadLength=maxRoadLength, lengthUnit=lengthUnit, + connectivity=saveTrafficSimulationAttr ) @@ -99,6 +101,7 @@ def main(args): # noqa: D103 ) parser.add_argument('--maxRoadLength', default=100, type=float) parser.add_argument('--lengthUnit', default='m', type=str) + parser.add_argument('--saveTrafficSimulationAttr', default=False, type=str2bool, nargs='?', const=True) args = parser.parse_args(args) @@ -115,6 +118,7 @@ def main(args): # noqa: D103 args.minimumHAZUS, args.maxRoadLength, args.lengthUnit, + args.saveTrafficSimulationAttr, ) log_msg('BRAILS successfully generated the requested transportation inventory') From fe542be1902cdaa5c6842d6cea8b43cb5690a711 Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 11:36:21 -0800 Subject: [PATCH 7/8] residual demand bug fix --- modules/systemPerformance/ResidualDemand/run_residual_demand.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/systemPerformance/ResidualDemand/run_residual_demand.py b/modules/systemPerformance/ResidualDemand/run_residual_demand.py index ab099957f..c2dc17df4 100644 --- a/modules/systemPerformance/ResidualDemand/run_residual_demand.py +++ b/modules/systemPerformance/ResidualDemand/run_residual_demand.py @@ -868,6 +868,8 @@ def run_residual_demand( # noqa: C901 edges_gdf['capacity'] = edges_gdf['lanes'] * 1800 edges_gdf['normal_capacity'] = edges_gdf['capacity'] edges_gdf['normal_maxspeed'] = edges_gdf['maxspeed'] + edges_gdf['start_nid'] = edges_gdf['start_nid'].astype(int) + edges_gdf['end_nid'] = edges_gdf['end_nid'].astype(int) # edges_gdf['fft'] = edges_gdf['length']/edges_gdf['maxspeed'] * 2.23694 edges_gdf.to_csv('edges.csv', index=False) nodes_gdf = gpd.read_file(node_geojson) From d21a339bf17fede166ba4fe17e3e053e131780ef Mon Sep 17 00:00:00 2001 From: jinyan1214 Date: Tue, 12 Nov 2024 16:48:25 -0800 Subject: [PATCH 8/8] jz: print error if capacity demand method unconverge --- modules/performSIMULATION/capacitySpectrum/DampingModels.py | 2 ++ modules/performSIMULATION/capacitySpectrum/runCMS.py | 6 ++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/performSIMULATION/capacitySpectrum/DampingModels.py b/modules/performSIMULATION/capacitySpectrum/DampingModels.py index feb455461..916400441 100644 --- a/modules/performSIMULATION/capacitySpectrum/DampingModels.py +++ b/modules/performSIMULATION/capacitySpectrum/DampingModels.py @@ -256,6 +256,8 @@ def get_beta(self, Dp, Ap): # noqa: N803 f'The base model {self.base_model} does not have a useful' 'get_kappa method.' ) + if Dp <= 0 or Ap <= 0: + return beta_elastic Du = self.capacity.Du # noqa: N806 Ax = self.capacity.Ax # noqa: N806 B = self.capacity.B # noqa: N806 diff --git a/modules/performSIMULATION/capacitySpectrum/runCMS.py b/modules/performSIMULATION/capacitySpectrum/runCMS.py index c17fb7fba..085658b46 100644 --- a/modules/performSIMULATION/capacitySpectrum/runCMS.py +++ b/modules/performSIMULATION/capacitySpectrum/runCMS.py @@ -97,10 +97,8 @@ def find_performance_point(cap_x, cap_y, dem_x, dem_y, dd=0.001): elif dem_y_interp[0] < cap_y_interp[0]: perf_x = 0.001 # x_interp[0] perf_y = 0.001 # cap_y_interp[0] - # except IndexError as err: - # print('No performance point found; curves do not intersect.') - # print('IndexError: ') - # print(err) + else: + print('No performance point found; curves do not intersect.') return perf_x, perf_y