Skip to content

Commit

Permalink
Merge pull request #133 from kvos/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
kvos authored Aug 29, 2020
2 parents d0fcf8d + e3e2528 commit 5324643
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 41 deletions.
33 changes: 17 additions & 16 deletions coastsat/SDS_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,21 +300,22 @@ def preprocess_single(fn, satname, cloud_mask_issue):
georef[0] = georef[0] + 7.5
georef[3] = georef[3] - 7.5

# check if -inf or nan values on any band and add to cloud mask
# check if -inf or nan values on any band and eventually add those pixels to cloud mask
im_nodata = np.zeros(cloud_mask.shape).astype(bool)
for k in range(im_ms.shape[2]):
im_inf = np.isin(im_ms[:,:,k], -np.inf)
im_nan = np.isnan(im_ms[:,:,k])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
im_nodata = np.logical_or(np.logical_or(im_nodata, im_inf), im_nan)
# check if there are pixels with 0 intensity in the Green, NIR and SWIR bands and add those
# to the cloud mask as otherwise they will cause errors when calculating the NDWI and MNDWI
im_zeros = np.ones(cloud_mask.shape).astype(bool)
for k in [1,3,4]: # loop through the Green, NIR and SWIR bands
im_zeros = np.logical_and(np.isin(im_ms[:,:,k],0), im_zeros)
# update cloud mask and nodata
cloud_mask = np.logical_or(im_zeros, cloud_mask)
im_nodata = np.logical_or(im_zeros, im_nodata)
# add zeros to im nodata
im_nodata = np.logical_or(im_zeros, im_nodata)
# update cloud mask with all the nodata pixels
cloud_mask = np.logical_or(cloud_mask, im_nodata)

# no extra image for Landsat 5 (they are all 30 m bands)
im_extra = []

Expand Down Expand Up @@ -351,21 +352,21 @@ def preprocess_single(fn, satname, cloud_mask_issue):
# resize the image using nearest neighbour interpolation (order 0)
cloud_mask = transform.resize(cloud_mask, (nrows, ncols), order=0, preserve_range=True,
mode='constant').astype('bool_')
# check if -inf or nan values on any band and eventually add those pixels to cloud mask
# check if -inf or nan values on any band and eventually add those pixels to cloud mask
im_nodata = np.zeros(cloud_mask.shape).astype(bool)
for k in range(im_ms.shape[2]):
im_inf = np.isin(im_ms[:,:,k], -np.inf)
im_nan = np.isnan(im_ms[:,:,k])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
im_nodata = np.logical_or(np.logical_or(im_nodata, im_inf), im_nan)
# check if there are pixels with 0 intensity in the Green, NIR and SWIR bands and add those
# to the cloud mask as otherwise they will cause errors when calculating the NDWI and MNDWI
im_zeros = np.ones(cloud_mask.shape).astype(bool)
for k in [1,3,4]: # loop through the Green, NIR and SWIR bands
im_zeros = np.logical_and(np.isin(im_ms[:,:,k],0), im_zeros)
# update cloud mask and nodata
cloud_mask = np.logical_or(im_zeros, cloud_mask)
im_nodata = np.logical_or(im_zeros, im_nodata)
# add zeros to im nodata
im_nodata = np.logical_or(im_zeros, im_nodata)
# update cloud mask with all the nodata pixels
cloud_mask = np.logical_or(cloud_mask, im_nodata)

# pansharpen Green, Red, NIR (where there is overlapping with pan band in L7)
try:
Expand Down Expand Up @@ -413,22 +414,22 @@ def preprocess_single(fn, satname, cloud_mask_issue):
# resize the image using nearest neighbour interpolation (order 0)
cloud_mask = transform.resize(cloud_mask, (nrows, ncols), order=0, preserve_range=True,
mode='constant').astype('bool_')
# check if -inf or nan values on any band and eventually add those pixels to cloud mask
# check if -inf or nan values on any band and eventually add those pixels to cloud mask
im_nodata = np.zeros(cloud_mask.shape).astype(bool)
for k in range(im_ms.shape[2]):
im_inf = np.isin(im_ms[:,:,k], -np.inf)
im_nan = np.isnan(im_ms[:,:,k])
cloud_mask = np.logical_or(np.logical_or(cloud_mask, im_inf), im_nan)
im_nodata = np.logical_or(np.logical_or(im_nodata, im_inf), im_nan)
# check if there are pixels with 0 intensity in the Green, NIR and SWIR bands and add those
# to the cloud mask as otherwise they will cause errors when calculating the NDWI and MNDWI
im_zeros = np.ones(cloud_mask.shape).astype(bool)
for k in [1,3,4]: # loop through the Green, NIR and SWIR bands
im_zeros = np.logical_and(np.isin(im_ms[:,:,k],0), im_zeros)
# update cloud mask and nodata
cloud_mask = np.logical_or(im_zeros, cloud_mask)
im_nodata = np.logical_or(im_zeros, im_nodata)

# add zeros to im nodata
im_nodata = np.logical_or(im_zeros, im_nodata)
# update cloud mask with all the nodata pixels
cloud_mask = np.logical_or(cloud_mask, im_nodata)

# pansharpen Blue, Green, Red (where there is overlapping with pan band in L8)
try:
im_ms_ps = pansharpen(im_ms[:,:,[0,1,2]], im_pan, cloud_mask)
Expand Down
30 changes: 12 additions & 18 deletions coastsat/SDS_shoreline.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,18 +756,19 @@ def extract_shorelines(metadata, settings):
im_ms, georef, cloud_mask, im_extra, im_QA, im_nodata = SDS_preprocess.preprocess_single(fn, satname, settings['cloud_mask_issue'])
# get image spatial reference system (epsg code) from metadata dict
image_epsg = metadata[satname]['epsg'][i]
# define an advanced cloud mask (for L7 it takes into account the fact that diagonal
# bands of no data are not clouds)
# if not satname == 'L7' or sum(sum(im_nodata)) == 0 or sum(sum(im_nodata)) > 0.5*im_nodata.size:
# cloud_mask_adv = cloud_mask
# else:
# cloud_mask_adv = np.logical_xor(cloud_mask, im_nodata)
cloud_mask_adv = np.logical_xor(cloud_mask, im_nodata)

# calculate cloud cover

# compute cloud_cover percentage (with no data pixels)
cloud_cover_combined = np.divide(sum(sum(cloud_mask.astype(int))),
(cloud_mask.shape[0]*cloud_mask.shape[1]))
if cloud_cover_combined > 0.99: # if 99% of cloudy pixels in image skip
continue
# remove no data pixels from the cloud mask
# (for example L7 bands of no data should not be accounted for)
cloud_mask_adv = np.logical_xor(cloud_mask, im_nodata)
# compute updated cloud cover percentage (without no data pixels)
cloud_cover = np.divide(sum(sum(cloud_mask_adv.astype(int))),
(cloud_mask.shape[0]*cloud_mask.shape[1]))
# skip image if cloud cover is above threshold
# skip image if cloud cover is above user-defined threshold
if cloud_cover > settings['cloud_thresh']:
continue

Expand Down Expand Up @@ -831,7 +832,7 @@ def extract_shorelines(metadata, settings):
}
print('')

# Close figure window if still open
# close figure window if still open
if plt.get_fignums():
plt.close()

Expand All @@ -843,11 +844,4 @@ def extract_shorelines(metadata, settings):
with open(os.path.join(filepath, sitename + '_output.pkl'), 'wb') as f:
pickle.dump(output, f)

# save output into a gdb.GeoDataFrame
gdf = SDS_tools.output_to_gdf(output)
# set projection
gdf.crs = {'init':'epsg:'+str(settings['output_epsg'])}
# save as geojson
gdf.to_file(os.path.join(filepath, sitename + '_output.geojson'), driver='GeoJSON', encoding='utf-8')

return output
19 changes: 13 additions & 6 deletions coastsat/SDS_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@ def transects_from_geojson(filename):

return transects

def output_to_gdf(output):
def output_to_gdf(output, geomtype):
"""
Saves the mapped shorelines as a gpd.GeoDataFrame
Expand All @@ -605,7 +605,9 @@ def output_to_gdf(output):
Arguments:
-----------
output: dict
contains the coordinates of the mapped shorelines + attributes
contains the coordinates of the mapped shorelines + attributes
geomtype: str
'lines' for LineString and 'points' for Multipoint geometry
Returns:
-----------
Expand All @@ -621,10 +623,15 @@ def output_to_gdf(output):
if len(output['shorelines'][i]) == 0:
continue
else:
# save the geometry + attributes
coords = output['shorelines'][i]
geom = geometry.MultiPoint([(coords[_,0], coords[_,1]) for _ in range(coords.shape[0])])
# geom = geometry.LineString(output['shorelines'][i])
# save the geometry depending on the linestyle
if geomtype == 'lines':
geom = geometry.LineString(output['shorelines'][i])
elif geomtype == 'points':
coords = output['shorelines'][i]
geom = geometry.MultiPoint([(coords[_,0], coords[_,1]) for _ in range(coords.shape[0])])
else:
raise Exception('geomtype %s is not an option, choose between lines or points'%geomtype)
# save into geodataframe with attributes
gdf = gpd.GeoDataFrame(geometry=gpd.GeoSeries(geom))
gdf.index = [i]
gdf.loc[i,'date'] = output['dates'][i].strftime('%Y-%m-%d %H:%M:%S')
Expand Down
8 changes: 8 additions & 0 deletions example.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,14 @@
# remove inaccurate georeferencing (set threshold to 10 m)
output = SDS_tools.remove_inaccurate_georef(output, 10)

# for GIS applications, save output into a GEOJSON layer
geomtype = 'lines' # choose 'points' or 'lines' for the layer geometry
gdf = SDS_tools.output_to_gdf(output, geomtype)
gdf.crs = {'init':'epsg:'+str(settings['output_epsg'])} # set layer projection
# save GEOJSON layer to file
gdf.to_file(os.path.join(inputs['filepath'], inputs['sitename'], '%s_output_%s.geojson'%(sitename,geomtype)),
driver='GeoJSON', encoding='utf-8')

# plot the mapped shorelines
fig = plt.figure(figsize=[15,8], tight_layout=True)
plt.axis('equal')
Expand Down
23 changes: 22 additions & 1 deletion example_jupyter.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,27 @@
"output = SDS_tools.remove_inaccurate_georef(output, 10) # remove inaccurate georeferencing (set threshold to 10 m)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For use in GIS applications, you can save the mapped shorelines as a GEOJSON layer which can be easily imported into QGIS for example. You can choose to save the shorelines as a collection of lines or points (sometimes the lines are crossing over so better to use points)."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"geomtype = 'lines' # choose 'points' or 'lines' for the layer geometry\n",
"gdf = SDS_tools.output_to_gdf(output, geomtype)\n",
"gdf.crs = {'init':'epsg:'+str(settings['output_epsg'])} # set layer projection\n",
"# save GEOJSON layer to file\n",
"gdf.to_file(os.path.join(inputs['filepath'], inputs['sitename'], '%s_output_%s.geojson'%(sitename,geomtype)),\n",
" driver='GeoJSON', encoding='utf-8')"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down Expand Up @@ -675,7 +696,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
"version": "3.7.4"
},
"toc": {
"base_numbering": 1,
Expand Down

0 comments on commit 5324643

Please sign in to comment.