forked from zhaoyutim/CalFireMonitoring
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain.py
53 lines (43 loc) · 2.74 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import ee
import yaml
import tqdm
from DataPreparation.DatasetPrepareService import DatasetPrepareService
if __name__ == '__main__':
# TODO: Enter your desired config file path here. If you just want to recreate the results from the paper,
# use the config files in the config folder to download the data belonging to the specified year.
with open("config/us_fire_2021_1e7.yml", "r", encoding="utf8") as f:
config = yaml.load(f, Loader=yaml.FullLoader)
# TODO: Enter your gcloud key file path here.
key_file = 'your_gcloud_key_file.json'
# TODO: Enter your gcloud service account here.
service_account = '[email protected]'
credentials = ee.ServiceAccountCredentials(service_account, key_file)
ee.Initialize(credentials)
# Number of days to extract additionally, before and after the fire dates given in GlobFire.
# If we want to perform multi-temporal modeling, e.g. with five days of input data, based on which we want to
# predict the fire occurrence on the sixth day, we need to add four days before the first fire occurrence. This way,
# we can predict the fire spread right on the day after the fire first appears. Alternatively, the preceding input
# data could of course be set to zero. However, in a real-world scenario, we would *always* have preceding data,
# so we choose to model it this way here. Similarly, we want the last fire date to be able to take every position
# in the input data, so we add four days after the last fire date, to 'push out' the last fire date.
N_BUFFER_DAYS = 4
# Extract fire names from config file.
fire_names = list(config.keys())
for non_fire_key in ["output_bucket", "rectangular_size", "year"]:
fire_names.remove(non_fire_key)
locations = fire_names
# Keep track of any failures happening, to be able to manually re-run these later.
# Shouldn't happen, but if it does, we get to know about it.
failed_locations = []
# Tell Google Earth Engine to compute the images and add them to the specified google cloud bucket.
for location in tqdm.tqdm(locations):
print(f"Failed locations so far: {failed_locations}")
dataset_pre = DatasetPrepareService(location=location, config=config)
print("Current Location:" + location)
try:
dataset_pre.extract_dataset_from_gee_to_gcloud('32610', n_buffer_days=N_BUFFER_DAYS)
# Uncomment to download data from gcloud to the local machine right away. Alternatively, you can use the
# gcloud command line tool to download the whole dataset at once after this script is done.
dataset_pre.download_data_from_gcloud_to_local()
except:
failed_locations.append(location)