-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrun_config_static.py
322 lines (300 loc) · 14.8 KB
/
run_config_static.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#--------------------------------------------------------------------------------#
# This is the main run script
# How to run:
# python run_config_static.py case_name
#
# User input files should be in JOBS/case_name/INPUT/
#
# @author: Dongqi Lin, Jiawei Zhang
#--------------------------------------------------------------------------------#
import warnings
## supress warnings
## switch to other actions if needed
warnings.filterwarnings("ignore")
warnings.simplefilter("ignore")
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.simplefilter("ignore", category=FutureWarning)
import requests
import math
import getpass, pprint, time, os, cgi, json
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=UserWarning)
import geopandas as gpd
import osmnx as ox
from pyproj import Proj, Transformer, CRS
import rasterio
from rasterio import logging
log = logging.getLogger()
log.setLevel(logging.ERROR)
from geocube.api.core import make_geocube
from datetime import datetime, timezone
import pandas as pd
from shapely.geometry import box, Point
from pathlib import Path
import os
import pickle
import numpy as np
import pandas as pd
import osmnx as ox
import rasterio
from geocube.api.core import make_geocube
from util.get_osm import *
from util.get_sst import download_sst
from util.get_geo_nasa import *
from util.loc_dom import convert_wgs_to_utm,domain_location, domain_nest
from util.create_static import *
from util.pre_process_tif import *
from util.get_esa_world import *
import configparser
import ast
import sys
from glob import glob
from urllib.request import urlretrieve
pd.options.mode.chained_assignment = None
#--------------------------------------------------------------------------------#
#--------------------------------------------------------------------------------#
# read namelist
settings_cfg = configparser.ConfigParser(inline_comment_prefixes='#')
config = configparser.RawConfigParser()
prefix = sys.argv[1]
namelist = f"./JOBS/{prefix}/INPUT/config.static-{prefix}"
config.read(namelist)
## [case]
case_name = ast.literal_eval(config.get("case", "case_name"))[0]
origin_time = ast.literal_eval(config.get("case", "origin_time"))[0]
# local projection (unit: m)
config_proj = ast.literal_eval(config.get("case", "config_proj"))[0]
# use WGS84 (EPSG:4326) for centlat/centlon
default_proj = ast.literal_eval(config.get("case", "default_proj"))[0]
# land use look up table
lu_table = ast.literal_eval(config.get("case", "lu_table"))[0]
lu_csv_file = f"./JOBS/{prefix}/INPUT/{lu_table}"
## [domain configuration]
ndomain = ast.literal_eval(config.get("domain", "ndomain"))[0]
centlat = ast.literal_eval(config.get("domain", "centlat"))[0]
centlon = ast.literal_eval(config.get("domain", "centlon"))[0]
dx = ast.literal_eval(config.get("domain", "dx"))
dy = ast.literal_eval(config.get("domain", "dy"))
dz = ast.literal_eval(config.get("domain", "dz"))
nx = ast.literal_eval(config.get("domain", "nx"))
ny = ast.literal_eval(config.get("domain", "ny"))
nz = ast.literal_eval(config.get("domain", "nz"))
z_origin = ast.literal_eval(config.get("domain", "z_origin"))
ll_x = ast.literal_eval(config.get("domain", "ll_x"))
ll_y = ast.literal_eval(config.get("domain", "ll_y"))
## [default settings or filter values]
water_temperature_default = ast.literal_eval(config.get("settings", "water_temperature"))
building_height_dummy = ast.literal_eval(config.get("settings", "building_height_dummy"))
tree_height_filter = ast.literal_eval(config.get("settings", "tree_height_filter"))
## [required tif files]
water = ast.literal_eval(config.get("geotif", "water"))
dem = ast.literal_eval(config.get("geotif", "dem"))
lu = ast.literal_eval(config.get("geotif", "lu"))
dem_start_date = ast.literal_eval(config.get("geotif", "dem_start_date"))[0]
dem_end_date = ast.literal_eval(config.get("geotif", "dem_end_date"))[0]
lu_start_date = ast.literal_eval(config.get("geotif", "lu_start_date"))[0]
lu_end_date = ast.literal_eval(config.get("geotif", "lu_end_date"))[0]
## [tif files for urban canopy]
bldh = ast.literal_eval(config.get("urban", "bldh"))
bldid = ast.literal_eval(config.get("urban", "bldid"))
pavement = ast.literal_eval(config.get("urban", "pavement"))
street = ast.literal_eval(config.get("urban", "street"))
## [tif files for plant canopy]
tree_lai_max = ast.literal_eval(config.get("plant", "tree_lai_max"))
lad_max_height = ast.literal_eval(config.get("plant", "lad_max_height"))
sfch = ast.literal_eval(config.get("plant", "sfch"))
# specify the directory of tif files
# users can provide their own tif files
# otherwise will download from NASA or OSM
static_tif_path = f'./JOBS/{case_name}/INPUT/'
output_path = static_tif_path.replace("INPUT","OUTPUT")
tmp_path = static_tif_path.replace("INPUT","TMP")
## create folders for temporary tif files and final netcdf outputs
if not os.path.exists(tmp_path):
print("Create tmp folder")
os.makedirs(tmp_path)
if not os.path.exists(output_path):
print("Create output folder")
os.makedirs(output_path)
## check if UTM projection is given
if len(config_proj)==0:
print("UTM projection not given, identifying...")
config_proj_code = convert_wgs_to_utm(centlon, centlat)
config_proj = f"EPSG:{config_proj_code}"
print(config_proj)
## these dictionanries only pass keys
tif_geotif_dict = dict(config.items('geotif'))
tif_urban_dict = dict(config.items('urban'))
tif_plant_dict = dict(config.items('plant'))
for i in range(0,ndomain):
if i == 0:
case_name_d01 = case_name+"_N01"
dom_cfg_d01 = {'origin_time': origin_time,
'centlat': centlat,
'centlon': centlon,
'lu_csv_file': lu_csv_file,
'dx': dx[i],
'dy': dy[i],
'dz': dz[i],
'nx': nx[i],
'ny': ny[i],
'nz': nz[i],
'z_origin': z_origin[i],
'tree_lai_max': tree_lai_max[i],
'lad_max_height': lad_max_height[i],
'water_temperature_file': water[i],
'water_temperature_default': water_temperature_default[i],
'bldh_dummy': building_height_dummy[i],
'tree_height_filter': tree_height_filter[i],
}
tif_dict_d01 = {}
for keys in tif_geotif_dict.keys():
tif_dict_d01[keys] = ast.literal_eval(config.get("geotif", keys))[i]
for keys in tif_urban_dict.keys():
tif_dict_d01[keys] = ast.literal_eval(config.get("urban", keys))[i]
for keys in tif_plant_dict.keys():
tif_dict_d01[keys] = ast.literal_eval(config.get("plant", keys))[i]
# configure domain location information
dom_cfg_d01 = domain_location(default_proj, config_proj, dom_cfg_d01)
area_radius = np.max([dx[i]*nx[i], dy[i]*ny[0]])*1.2 # units=metre
#--------------------------------------------------------------------------------#
## first check if we need to download data from online sources
## water temperature data
## NOTE: this check is only done for the root domain as we assume that if users have their own data covering the root
## domain then the data should cover the child domains
#--------------------------------------------------------------------------------#
if "online" in tif_geotif_dict['water']:
download_sst(case_name, origin_time, static_tif_path)
## DEM and land use (currently only for NASA online data sets)
## prepare dictionaries
geodata_name_dict = {}
output_format_dict = {}
start_date_dict = {}
end_date_dict = {}
if "nasa" in tif_geotif_dict['dem']:
geodata_name_dict["DEM"] = ["SRTMGL1_NC.003",]
output_format_dict["DEM"] = "geotiff"
# NASA DEM data only available for these dates
start_date_dict["DEM"] = dem_start_date
end_date_dict["DEM"] = dem_end_date
if "nasa" in tif_dict_d01["lu"]:
# https://lpdaac.usgs.gov/documents/1409/MCD12_User_Guide_V61.pdf
geodata_name_dict["Land_Use"] = ["MCD12Q1.061",]
output_format_dict["Land_Use"] = "geotiff"
# User to choose the start/end date
start_date_dict["Land_Use"] = lu_start_date
end_date_dict["Land_Use"] = lu_end_date
## download data for NASA AρρEEARS API
if ("nasa" in tif_geotif_dict['dem']) or ("nasa" in tif_geotif_dict['lu']):
default_buffer_ratio = 1.2 # used to multiply area_radius avoid areas becoming smaller than required after reproject
api = 'https://appeears.earthdatacloud.nasa.gov/api/' # Set the AρρEEARS API to a variable
task_type = 'area' # this is the only type used in this script
# check if the files are already there
if len(glob(static_tif_path+case_name+"_DEM_*"))>0 or len(glob(static_tif_path+case_name+"_Land_Use_*"))>0:
# asking if need to download data
while True:
user_input = input("NASA data directories exist, do you wish to continue download? [y/N]")
if user_input.lower() == "y":
download_nasa_main(api, geodata_name_dict, centlon, centlat, area_radius, default_proj, task_type,\
default_buffer_ratio, start_date_dict,end_date_dict, output_format_dict,case_name,static_tif_path)
break
elif user_input.lower().lower() == "n":
break
else:
print('Please answer y or n')
continue
else:
download_nasa_main(api, geodata_name_dict, centlon, centlat, area_radius, default_proj, task_type,\
default_buffer_ratio, start_date_dict,end_date_dict, output_format_dict,case_name,static_tif_path)
## download data for ESA world land use
if "esa" in tif_dict_d01["lu"]:
# https://esa-worldcover.org/en/data-access
check_esa_download(static_tif_path, dom_cfg_d01['lon_w'], dom_cfg_d01['lon_e'], dom_cfg_d01['lat_s'], dom_cfg_d01['lat_n'])
## If need to downlaod data from OSM
if tif_dict_d01["bldh"]=="osm" or tif_dict_d01["bldid"]=="osm":
get_osm_building(centlat, centlon, area_radius, static_tif_path, case_name, i)
if tif_dict_d01["pavement"]=="osm":
get_osm_street(centlat, centlon, area_radius, static_tif_path, case_name, i)
# save domain configuration for later - creating static drivers
with open(f'{tmp_path}{case_name}_cfg_N0{i+1}.pickle', 'wb') as dicts:
pickle.dump(dom_cfg_d01, dicts, protocol=pickle.HIGHEST_PROTOCOL)
## for child domains
else:
#--------------------------------------------------------------------------------#
# downloading data for nested domains
#--------------------------------------------------------------------------------#
dom_cfg_nest = {'origin_time': origin_time,
'lu_csv_file': lu_csv_file,
'dx': dx[i],
'dy': dy[i],
'dz': dz[i],
'nx': nx[i],
'ny': ny[i],
'nz': nz[i],
'z_origin': z_origin[i],
'tree_lai_max': tree_lai_max[i],
'lad_max_height': lad_max_height[i],
'water_temperature_file': water[i],
'water_temperature_default': water_temperature_default[i],
'bldh_dummy': building_height_dummy[i],
'tree_height_filter': tree_height_filter[i],
}
ll_x_nest, ll_y_nest = ll_x[i], ll_y[i]
dom_cfg_nest = domain_nest(config_proj, dom_cfg_d01['west'], dom_cfg_d01['south'], ll_x_nest, ll_y_nest,dom_cfg_nest)
tif_dict_nest = {}
for keys in tif_urban_dict.keys():
tif_dict_nest[keys] = ast.literal_eval(config.get("urban", keys))[i]
for keys in tif_plant_dict.keys():
tif_dict_nest[keys] = ast.literal_eval(config.get("plant", keys))[i]
area_radius_nest = np.max([dx[i]*nx[i], dy[i]*ny[i]])*1.2 # units=metre
if tif_dict_nest["bldh"]=="osm" or tif_dict_nest["bldid"]=="osm":
get_osm_building(dom_cfg_nest["centlat"], dom_cfg_nest["centlon"], area_radius_nest, static_tif_path, case_name, i)
if tif_dict_nest["pavement"]=="osm":
get_osm_street(dom_cfg_nest["centlat"], dom_cfg_nest["centlon"], area_radius_nest, static_tif_path, case_name, i)
# save domain configuration for later - creating static drivers
with open(f'{tmp_path}{case_name}_cfg_N0{i+1}.pickle', 'wb') as dicts:
pickle.dump(dom_cfg_nest, dicts, protocol=pickle.HIGHEST_PROTOCOL)
#--------------------------------------------------------------------------------#
#--------------------------------------------------------------------------------#
# preprocess tif files
process_all(prefix)
#--------------------------------------------------------------------------------#
#--------------------------------------------------------------------------------#
# create static driver
for i in range(0,ndomain):
if i==0:
static_driver_file = output_path + case_name + f'_static_N{i+1:02d}'
if os.path.exists(static_driver_file):
while True:
user_input = input(f"static driver file exists for N{i+1:02d}, continue? [y/N]")
if user_input.lower() == "y":
ex = "y"
break
elif user_input.lower()=="n":
ex = "n"
break
else:
print('Please answer y or n')
continue
if not os.path.exists(static_driver_file) or ex=="y":
dom_cfg_d01 = generate_palm_static(case_name,tmp_path, i, config_proj, dom_cfg_d01)
else:
static_driver_file = output_path + case_name + f'_static_N{i+1:02d}'
if os.path.exists(static_driver_file):
while True:
user_input = input(f"static driver file exists for N{i+1:02d}, continue? [y/N]")
if user_input.lower() == "y":
ex = "y"
break
elif user_input.lower()=="n":
ex = "n"
break
else:
print('Please answer y or n')
continue
if not os.path.exists(static_driver_file) or ex=="y":
with open(f'{tmp_path}{case_name}_cfg_N0{i+1}.pickle', 'rb') as dicts:
dom_cfg_nest = pickle.load(dicts)
dom_cfg_nest = generate_palm_static(case_name,tmp_path, i, config_proj, dom_cfg_nest)