-
Notifications
You must be signed in to change notification settings - Fork 3
/
make_plots.py
141 lines (118 loc) · 4.78 KB
/
make_plots.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import rioxarray
import xarray as xr
import numpy as np
import os
import s3fs
import glob
import pathlib
import dask.array
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import geopandas as gpd
from shapely.geometry import box, mapping, Polygon
import warnings
warnings.filterwarnings("ignore")
import json
import shutil
import tempfile
from datetime import date, datetime
# Connect to AWS S3 storage
s3 = s3fs.S3FileSystem(anon=False, client_kwargs=dict(region_name="us-west-2"))
# Get all key urls for datasets in S3 bucket.
# From https://alexwlchan.net/2017/07/listing-s3-keys/
def get_s3_keys(bucket, prefix):
s3 = boto3.client("s3")
kwargs = {"Bucket": bucket, "Prefix": prefix}
while True:
resp = s3.list_objects_v2(**kwargs)
for obj in resp["Contents"]:
yield obj["Key"]
try:
kwargs["ContinuationToken"] = resp["NextContinuationToken"]
except KeyError:
break
def get_zarrs(Bucket, dataset_date):
Prefix = f"aviris/{dataset_date}/"
urls = list(get_s3_keys(bucket=Bucket, prefix=Prefix))
s3_key = os.path.join(Bucket, Prefix)
s3_url = os.path.join("s3://", s3_key)
substring = "100-100-100.zarr"
links = []
for link in urls:
if substring in link:
url = link[:link.index(substring) + len(substring)]
zarr = url.replace(f"aviris/{dataset_date}/", "")
links.append(str(zarr))
data_set = set(links)
data = list(data_set)
#pprint(data)
return data
def make_plots(dataset_date, zarr):
print("Starting plots")
Prefix = f"aviris/{dataset_date}/{zarr}"
s3_key = os.path.join(Bucket, Prefix)
s3_url = os.path.join("s3://", s3_key)
# Open flight path zarr
store = s3fs.S3Map(root=s3_url, s3=s3, check=False)
ds = xr.open_zarr(store=store, decode_coords="all", consolidated=True)
# set Easting & Northing as coords for plotting
ds = ds.set_coords(("Easting", "Northing"))
## Make Plots
# Select best bands for RGB composite
R = ds.isel(wavelength=59).Reflectance
G = ds.isel(wavelength=35).Reflectance
B = ds.isel(wavelength=14).Reflectance
rgb = ds.isel(wavelength=[50,35,14]).Reflectance
# The RGB array for the true color image
RGB = np.dstack([R, G, B])
# The RGB for the true color image
fig = plt.figure(figsize=(16, 16))
ax = fig.add_subplot()
ax.imshow(RGB)
ax.set_title(f"{zarr} RGB Reflectance True Color", fontsize=12)
ax.set_axis_off()
plt.savefig(f"{zarr}_RGB_Reflectance_True_Color.jpg")
# The RGB for the higher-exposure reflectance
norm_reflectance = rgb / 0.7
fig = plt.figure(figsize=(16, 16))
ax = fig.add_subplot()
ax.imshow(norm_reflectance)
ax.set_title(f"{zarr} RGB Increased Exposure Reflectance", fontsize=12)
ax.set_axis_off()
plt.savefig(f"{zarr}_RGB_Increased_Exposure_Reflectance.jpg")
# R plot
fig, ax = plt.subplots(figsize=(10, 6))
R.plot.pcolormesh("Easting", "Northing", robust=True, add_colorbar=False)
plt.xlabel("Easting (m)")
plt.ylabel("Northing (m)")
ax.set_title(f"{zarr} R Reflectance, 672.6 nm", fontsize=12)
plt.savefig(f"{zarr}_R_Reflectance.jpg")
# G plot
fig, ax = plt.subplots(figsize=(10, 6))
G.plot.pcolormesh("Easting", "Northing", robust=True, add_colorbar=False)
plt.xlabel("Easting (m)")
plt.ylabel("Northing (m)")
ax.set_title(f"{zarr} G Reflectance, 552.4 nm", fontsize=12)
plt.savefig(f"{zarr}_G_Reflectance.jpg")
# B plot
fig, ax = plt.subplots(figsize=(10, 6))
B.plot.pcolormesh("Easting", "Northing", robust=True, add_colorbar=False)
plt.xlabel("Easting (m)")
plt.ylabel("Northing (m)")
ax.set_title(f"{zarr} B Reflectance, 447.2 nm", fontsize=12)
plt.savefig(f"{zarr}_B_Reflectance.jpg")
print("Plots done!")
# Upload jpegs to s3
s3.put(f"{zarr}_RGB_Reflectance_True_Color.jpg", f"s3://dh-shift-curated/aviris/{dataset_date}/" + "{}".format(f"{zarr}_RGB_Reflectance_True_Color.jpg"))
s3.put(f"{zarr}_RGB_Increased_Exposure_Reflectance.jpg", f"s3://dh-shift-curated/aviris/{dataset_date}/" + "{}".format(f"{zarr}_RGB_Increased_Exposure_Reflectance.jpg"))
s3.put(f"{zarr}_R_Reflectance.jpg", f"s3://dh-shift-curated/aviris/{dataset_date}/" + "{}".format(f"{zarr}_R_Reflectance.jpg"))
s3.put(f"{zarr}_G_Reflectance.jpg", f"s3://dh-shift-curated/aviris/{dataset_date}/" + "{}".format(f"{zarr}_G_Reflectance.jpg"))
s3.put(f"{zarr}_B_Reflectance.jpg", f"s3://dh-shift-curated/aviris/{dataset_date}/" + "{}".format(f"{zarr}_B_Reflectance.jpg"))
print("S3 upload done!")
# Change dataset_date
Bucket = "dh-shift-curated"
dataset_date = "20220228"
data = get_zarrs(Bucket, dataset_date)
for zarr in data:
item = make_plots(dataset_date, zarr)