Skip to content

Commit

Permalink
Merge branch 'NHERI-SimCenter:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
JustinBonus authored Dec 5, 2024
2 parents 7b48d9e + c83d05a commit 62f0baa
Show file tree
Hide file tree
Showing 4 changed files with 738 additions and 599 deletions.
73 changes: 37 additions & 36 deletions modules/performRegionalMapping/NearestNeighborEvents/NNE.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,28 +56,28 @@ def find_neighbors( # noqa: C901, D103
neighbors,
filter_label,
seed,
doParallel, # noqa: N803
do_parallel,
):
# check if running parallel
numP = 1 # noqa: N806
procID = 0 # noqa: N806
runParallel = False # noqa: N806
num_processes = 1
process_id = 0
run_parallel = False

if doParallel == 'True':
if do_parallel == 'True':
mpi_spec = importlib.util.find_spec('mpi4py')
found = mpi_spec is not None
if found:
from mpi4py import MPI

runParallel = True # noqa: N806
run_parallel = True
comm = MPI.COMM_WORLD
numP = comm.Get_size() # noqa: N806
procID = comm.Get_rank() # noqa: N806
if numP < 2: # noqa: PLR2004
doParallel = 'False' # noqa: N806
runParallel = False # noqa: N806
numP = 1 # noqa: N806
procID = 0 # noqa: N806
num_processes = comm.Get_size()
process_id = comm.Get_rank()
if num_processes < 2: # noqa: PLR2004
do_parallel = 'False'
run_parallel = False
num_processes = 1
process_id = 0

# read the event grid data file
event_grid_path = Path(event_grid_file).resolve()
Expand All @@ -91,10 +91,10 @@ def find_neighbors( # noqa: C901, D103
# Existing code for CSV files
grid_df = pd.read_csv(event_dir / event_grid_file, header=0)

# store the locations of the grid points in X
lat_E = grid_df['Latitude'] # noqa: N806
lon_E = grid_df['Longitude'] # noqa: N806
X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
# store the locations of the grid points in grid_locations
lat_e = grid_df['Latitude']
lon_e = grid_df['Longitude']
grid_locations = np.array([[lo, la] for lo, la in zip(lon_e, lat_e)])

if filter_label == '':
grid_extra_keys = list(
Expand All @@ -113,10 +113,10 @@ def find_neighbors( # noqa: C901, D103
gdf['Longitude'] = gdf.geometry.x
gdf['Latitude'] = gdf.geometry.y

# store the locations of the grid points in X
lat_E = gdf['Latitude'] # noqa: N806
lon_E = gdf['Longitude'] # noqa: N806
X = np.array([[lo, la] for lo, la in zip(lon_E, lat_E)]) # noqa: N806
# store the locations of the grid points in grid_locations
lat_e = gdf['Latitude']
lon_e = gdf['Longitude']
grid_locations = np.array([[lo, la] for lo, la in zip(lon_e, lat_e)])

if filter_label == '':
grid_extra_keys = list(
Expand All @@ -128,46 +128,47 @@ def find_neighbors( # noqa: C901, D103

# prepare the tree for the nearest neighbor search
if filter_label != '' or len(grid_extra_keys) > 0:
neighbors_to_get = min(neighbors * 10, len(lon_E))
neighbors_to_get = min(neighbors * 10, len(lon_e))
else:
neighbors_to_get = neighbors

nbrs = NearestNeighbors(n_neighbors=neighbors_to_get, algorithm='ball_tree').fit(
X
grid_locations
)

# load the building data file
with open(asset_file, encoding='utf-8') as f: # noqa: PTH123
asset_dict = json.load(f)

# prepare a dataframe that holds asset filenames and locations
AIM_df = pd.DataFrame( # noqa: N806
aim_df = pd.DataFrame(
columns=['Latitude', 'Longitude', 'file'], index=np.arange(len(asset_dict))
)

count = 0
for i, asset in enumerate(asset_dict):
if runParallel == False or (i % numP) == procID: # noqa: E712
if run_parallel == False or (i % num_processes) == process_id: # noqa: E712
with open(asset['file'], encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)

asset_loc = asset_data['GeneralInformation']['location']
AIM_df.iloc[count]['Longitude'] = asset_loc['longitude']
AIM_df.iloc[count]['Latitude'] = asset_loc['latitude']
AIM_df.iloc[count]['file'] = asset['file']
aim_id = aim_df.index[count]
aim_df.loc[aim_id, 'Longitude'] = asset_loc['longitude']
aim_df.loc[aim_id, 'Latitude'] = asset_loc['latitude']
aim_df.loc[aim_id, 'file'] = asset['file']
count = count + 1

# store building locations in Y
Y = np.array( # noqa: N806
# store building locations in bldg_locations
bldg_locations = np.array(
[
[lo, la]
for lo, la in zip(AIM_df['Longitude'], AIM_df['Latitude'])
for lo, la in zip(aim_df['Longitude'], aim_df['Latitude'])
if not np.isnan(lo) and not np.isnan(la)
]
)

# collect the neighbor indices and distances for every building
distances, indices = nbrs.kneighbors(Y)
distances, indices = nbrs.kneighbors(bldg_locations)
distances = distances + 1e-20

# initialize the random generator
Expand All @@ -179,11 +180,12 @@ def find_neighbors( # noqa: C901, D103
count = 0

# iterate through the buildings and store the selected events in the AIM
for asset_i, (AIM_id, dist_list, ind_list) in enumerate( # noqa: B007, N806
zip(AIM_df.index, distances, indices)
for asset_i, (aim_id, dist_list, ind_list) in enumerate( # noqa: B007
zip(aim_df.index, distances, indices)
):
# open the AIM file
asst_file = AIM_df.iloc[AIM_id]['file']
aim_index_id = aim_df.index[aim_id]
asst_file = aim_df.loc[aim_index_id, 'file']

with open(asst_file, encoding='utf-8') as f: # noqa: PTH123
asset_data = json.load(f)
Expand Down Expand Up @@ -338,7 +340,6 @@ def find_neighbors( # noqa: C901, D103
csv_path = event_dir / csv_filename

if not csv_path.exists():

# Create a CSV file with data from the GIS file
# Use actual data from the GIS file if available, otherwise use dummy data
im_columns = [
Expand Down
28 changes: 26 additions & 2 deletions modules/performSIMULATION/capacitySpectrum/runCMS.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,11 +396,35 @@ def write_RV(AIM_input_path, EVENT_input_path): # noqa: C901, N802, N803, D103

EDP_output = np.concatenate([index, EDP_output], axis=1) # noqa: N806

# Concatenate the original IM to CMS in case some (e.g., PGD) are needed
EDP_output = np.concatenate([EDP_output, IM_samples], axis = 1)
# prepare the header
header_out = ['1-PFA-0-0', '1-PRD-1-1']
for h_label in header:
# remove leading and trailing whitespace
h_label = h_label.strip() # noqa: PLW2901

# convert suffixes to the loc-dir format used by the SimCenter
if h_label.endswith('_h'): # horizontal
header_out.append(f'1-{h_label[:-2]}-1-1')

elif h_label.endswith('_v'): # vertical
header_out.append(f'1-{h_label[:-2]}-1-3')

elif h_label.endswith('_x'): # x direction
header_out.append(f'1-{h_label[:-2]}-1-1')

elif h_label.endswith('_y'): # y direction
header_out.append(f'1-{h_label[:-2]}-1-2')

else: # if none of the above is given, default to 1-1
header_out.append(f'1-{h_label.strip()}-1-1')


working_dir = Path(PurePath(EVENT_input_path).parent)
# working_dir = posixpath.dirname(EVENT_input_path)

# prepare the header
header_out = ['1-PFA-0-0', '1-PRD-1-1']


np.savetxt(
working_dir / 'response.csv',
Expand Down
39 changes: 31 additions & 8 deletions modules/systemPerformance/ResidualDemand/run_residual_demand.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,15 +357,26 @@ def aggregate_delay_results(undamaged_time, damaged_time, od_file_pre, od_file_p
compare_df.loc[undamaged_time['agent_id'], 'mean_time_used_undamaged'] = (
undamaged_time['data'].mean(axis=1)
)
compare_df.loc[undamaged_time['agent_id'], 'std_time_used_undamaged'] = (
undamaged_time['data'].std(axis=1)
)

compare_df.loc[damaged_time['agent_id'], 'mean_time_used_damaged'] = (
damaged_time['data'].mean(axis=1)
)
compare_df.loc[damaged_time['agent_id'], 'std_time_used_damaged'] = damaged_time[
'data'
].std(axis=1)

std_time_used_undamaged = np.zeros(len(undamaged_time['agent_id']))
rows_with_inf = np.any(np.isinf(undamaged_time['data']), axis=1)
std_time_used_undamaged[rows_with_inf] = np.nan
std_time_used_undamaged[~rows_with_inf] = undamaged_time['data'][~rows_with_inf].std(
axis=1
)
compare_df.loc[undamaged_time['agent_id'], 'std_time_used_undamaged'] = std_time_used_undamaged

std_time_used_damaged = np.zeros(len(damaged_time['agent_id']))
rows_with_inf = np.any(np.isinf(damaged_time['data']), axis=1)
std_time_used_damaged[rows_with_inf] = np.nan
std_time_used_damaged[~rows_with_inf] = damaged_time['data'][~rows_with_inf].std(
axis=1
)
compare_df.loc[damaged_time['agent_id'], 'std_time_used_damaged'] = std_time_used_damaged

inner_agents = od_df_pre.merge(od_df_post, on='agent_id', how='inner')[
'agent_id'
Expand All @@ -385,13 +396,24 @@ def aggregate_delay_results(undamaged_time, damaged_time, od_file_pre, od_file_p
- undamaged_time['data'][indices_in_undamaged, :]
)
delay_ratio = delay_duration / undamaged_time['data'][indices_in_undamaged, :]

std_delay_duration = np.zeros(len(inner_agents))
rows_with_inf = np.any(np.isinf(delay_duration), axis=1)
std_delay_duration[rows_with_inf] = np.nan
std_delay_duration[~rows_with_inf] = delay_duration[~rows_with_inf].std(axis=1)

std_delay_ratio = np.zeros(len(inner_agents))
rows_with_inf = np.any(np.isinf(delay_ratio), axis=1)
std_delay_ratio[rows_with_inf] = np.nan
std_delay_ratio[~rows_with_inf] = delay_ratio[~rows_with_inf].std(axis=1)

delay_df = pd.DataFrame(
data={
'agent_id': inner_agents,
'mean_delay_duration': delay_duration.mean(axis=1),
'mean_delay_ratio': delay_ratio.mean(axis=1),
'std_delay_duration': delay_duration.std(axis=1),
'std_delay_ratio': delay_ratio.std(axis=1),
'std_delay_duration': std_delay_duration,
'std_delay_ratio': std_delay_ratio,
}
)

Expand Down Expand Up @@ -770,6 +792,7 @@ def run_one_realization(
trip_info_compare['delay_duration']
/ trip_info_compare['travel_time_used_undamaged']
)
trip_info_compare = trip_info_compare.replace([np.inf, -np.inf], 'inf')
trip_info_compare.to_csv('trip_info_compare.csv', index=False)
return True

Expand Down
Loading

0 comments on commit 62f0baa

Please sign in to comment.