Skip to content

Commit

Permalink
Merge pull request #41 from Queeno11/Nico
Browse files Browse the repository at this point in the history
Agrega saturación
  • Loading branch information
Queeno11 authored Mar 19, 2024
2 parents c9812a4 + c27a4ab commit 9b02a6c
Show file tree
Hide file tree
Showing 6 changed files with 380 additions and 73 deletions.
145 changes: 145 additions & 0 deletions scripts/debug.ipynb

Large diffs are not rendered by default.

7 changes: 6 additions & 1 deletion scripts/hyperparameter_selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,11 @@ def get_true_val_loss(
return metrics_epochs







if __name__ == "__main__":
import warnings

Expand All @@ -97,7 +102,7 @@ def get_true_val_loss(

variable = "ln_pred_inc_mean"
kind = "reg"
model = "effnet_v2M"
model = "effnet_v2L"
path_repo = r"/mnt/d/Maestría/Tesis/Repo/"
extra = ""
sat_data = "pleiades"
Expand Down
154 changes: 148 additions & 6 deletions scripts/notebooks/visualizar_modelos.ipynb

Large diffs are not rendered by default.

42 changes: 21 additions & 21 deletions scripts/run_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ def get_test_data(i):
),
)

train_dataset = train_dataset.batch(16)
train_dataset = train_dataset.batch(64)
if sample_size > 1:
train_dataset = train_dataset.repeat(sample_size).prefetch(tf.data.AUTOTUNE)
else:
Expand Down Expand Up @@ -720,7 +720,7 @@ def run(
# Run model
model, history = run_model(
model_function=model,
lr=0.0001,
lr=0.0001 * 5,
train_dataset=train_dataset,
val_dataset=val_dataset,
loss=loss,
Expand Down Expand Up @@ -749,22 +749,22 @@ def run(
generate=True,
)

# Generate gridded predictions & plot examples
for year in all_years_datasets.keys():
grid_preds = grid_predictions.generate_grid(
savename,
all_years_datasets,
all_years_extents,
image_size,
resizing_size,
nbands,
stacked_images,
year=year,
generate=True,
)
grid_predictions.plot_all_examples(
all_years_datasets, all_years_extents, grid_preds, savename, year
)
# # Generate gridded predictions & plot examples
# for year in all_years_datasets.keys():
# grid_preds = grid_predictions.generate_grid(
# savename,
# all_years_datasets,
# all_years_extents,
# image_size,
# resizing_size,
# nbands,
# stacked_images,
# year=year,
# generate=True,
# )
# grid_predictions.plot_all_examples(
# all_years_datasets, all_years_extents, grid_preds, savename, year
# )


if __name__ == "__main__":
Expand All @@ -776,13 +776,13 @@ def run(

variable = "ln_pred_inc_mean"
kind = "reg"
model = "effnet_v2L"
model = "effnet_v2S"
path_repo = r"/mnt/d/Maestría/Tesis/Repo/"
extra = ""
extra = "_years_only2013"
sat_data = "pleiades"

if sat_data == "pleiades":
years = [2013, 2018, 2022]
years = [2013] # , 2018, 2022]
nbands = 4
elif sat_data == "landsat":
years = [2013]
Expand Down
2 changes: 1 addition & 1 deletion scripts/true_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,7 +636,7 @@ def plot_results(
generate=generate,
)
metrics_epochs = pd.read_csv(
f"{path_dataout}/models_by_epoch/{savename}/{savename}_metrics_over_epochs.csv"
f"{path_dataout}/models_by_epoch/{savename}/{savename}_test_metrics_over_epochs.csv"
)

plot_mse_over_epochs(metrics_epochs, savename, metric="mse", save=True)
Expand Down
103 changes: 59 additions & 44 deletions scripts/utils.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import cv2
import shapely
import numpy as np
import geopandas as gpd
import cv2
import skimage
from skimage import color
from shapely import Point


def get_dataset_extent(ds):
"""Return a polygon with the extent of the dataset
Expand All @@ -27,21 +29,23 @@ def get_dataset_extent(ds):

return polygon


def get_datasets_for_polygon(poly, extents):
''' Devuelve el nombre del dataset que contiene el polígono seleccionado.'''
"""Devuelve el nombre del dataset que contiene el polígono seleccionado."""

correct_datasets = []
for name, extent in extents.items():
if extent.intersects(poly):
correct_datasets += [name]
if len(correct_datasets)==0:

if len(correct_datasets) == 0:
print("Ningun dataset contiene al polígono seleccionado.")

return correct_datasets


def random_point_from_geometry(polygon, size=100):
'''Generates a random point within the bounds of a Polygon.'''
"""Generates a random point within the bounds of a Polygon."""

# Get bounds of the shapefile's polygon
(minx, miny, maxx, maxy) = polygon.bounds
Expand All @@ -55,6 +59,7 @@ def random_point_from_geometry(polygon, size=100):
if polygon.contains(point):
return x[0], y[0]


def find_nearest_idx(array, value):
array = np.asarray(array)
idx = (np.abs(array - value)).argmin()
Expand All @@ -72,8 +77,8 @@ def find_nearest_raster(x_array, y_array, x_value, y_value, max_bias=0):
y_bias = round(np.sin(angle_bias) * actual_bias)

# Get the nearest index for each axis and add the bias
x_idx = np.searchsorted(x_array, x_value, side='left', sorter=None) + x_bias
y_idx = np.searchsorted(y_array, y_value, side='left', sorter=None) + y_bias
x_idx = np.searchsorted(x_array, x_value, side="left", sorter=None) + x_bias
y_idx = np.searchsorted(y_array, y_value, side="left", sorter=None) + y_bias
return x_idx, y_idx


Expand All @@ -85,12 +90,14 @@ def point_column_to_x_y(df):


def image_from_point(dataset, point, img_size=128):

# Find the rearest raster of this random point
x, y = point
idx_x = np.searchsorted(dataset.x, x, side='left', sorter=None)
idx_y = np.searchsorted(-dataset.y, -y, side='left', sorter=None) # The y array is inverted! https://stackoverflow.com/questions/43095739/numpy-searchsorted-descending-order

idx_x = np.searchsorted(dataset.x, x, side="left", sorter=None)
idx_y = np.searchsorted(
-dataset.y, -y, side="left", sorter=None
) # The y array is inverted! https://stackoverflow.com/questions/43095739/numpy-searchsorted-descending-order

# Create the indexes of the box of the image
idx_x_min = round(idx_x - img_size / 2)
idx_x_max = round(idx_x + img_size / 2)
Expand All @@ -104,70 +111,72 @@ def image_from_point(dataset, point, img_size=128):
| (idx_y_min < 0)
| (idx_y_max > dataset.y.size)
):
image = np.zeros(shape=(1,1,1))
image = np.zeros(shape=(1, 1, 1))
return image


image = dataset.isel(
x=slice(idx_x_min, idx_x_max),
y=slice(idx_y_min, idx_y_max)
)

image = dataset.isel(x=slice(idx_x_min, idx_x_max), y=slice(idx_y_min, idx_y_max))

image = image.band_data
return image


def get_image_bounds(image_ds):
# The array y is inverted!
min_x = image_ds.x[0].item()
min_y = image_ds.y[-1].item()
max_x = image_ds.x[-1].item()
max_y = image_ds.y[0].item()

return min_x, min_y, max_x, max_y


def assert_image_is_valid(image_dataset, n_bands, size):
''' Check whether image is valid. For using after image_from_point.
If the image is valid, returns the image in numpy format.
'''
"""Check whether image is valid. For using after image_from_point.
If the image is valid, returns the image in numpy format.
"""
try:
image = image_dataset.band_data
is_valid = True
except:
image = np.zeros(shape=[1,100,100])
image = np.zeros(shape=[1, 100, 100])
is_valid = False

return image, is_valid

def stacked_image_from_census_tract(dataset, polygon, point=None, img_size=100, n_bands=4, stacked_images=[1,3]):


def stacked_image_from_census_tract(
dataset, polygon, point=None, img_size=100, n_bands=4, stacked_images=[1, 3]
):

images_to_stack = []
total_bands = n_bands*len(stacked_images)
total_bands = n_bands * len(stacked_images)

if point is None:
# Sample point from the polygon's box
point = random_point_from_geometry(polygon)
# point = polygon.centroid.x, polygon.centroid.y
# point = polygon.centroid.x, polygon.centroid.y

for size_multiplier in stacked_images:
image_size = img_size*size_multiplier
image_size = img_size * size_multiplier
image_da = image_from_point(dataset, point, image_size)

try:
image = image_da.to_numpy()[:n_bands,::size_multiplier,::size_multiplier]
try:
image = image_da.to_numpy()[:n_bands, ::size_multiplier, ::size_multiplier]
image = image.astype(np.uint8)
images_to_stack += [image]
except:
image = np.zeros(shape=(n_bands,1,1))
image = np.zeros(shape=(n_bands, 1, 1))
bounds = None
return image, bounds

# Get total bounds
bounds = get_image_bounds(image_da) # The last image has to be the bigger
image = np.concatenate(images_to_stack, axis=0) # Concat over bands
bounds = get_image_bounds(image_da) # The last image has to be the bigger
image = np.concatenate(images_to_stack, axis=0) # Concat over bands
assert image.shape == (total_bands, img_size, img_size)

return image, bounds


def random_image_from_census_tract(dataset, polygon, image_size):
"""Genera una imagen aleatoria de tamaño size centrada en un punto aleatorio del radio censal {link}.
Expand All @@ -186,9 +195,10 @@ def random_image_from_census_tract(dataset, polygon, image_size):

# Sample point from the polygon's box
point = random_point_from_geometry(polygon)
image_da = image_from_point(dataset, point, img_size=image_size)
image_da = image_from_point(dataset, point, img_size=image_size)
return image_da


def random_tiled_image_from_census_tract(
ds,
icpag,
Expand Down Expand Up @@ -252,7 +262,7 @@ def random_tiled_image_from_census_tract(
image_dataset = image_from_point(
ds, point, max_bias=max_bias, tile_size=tile_size
)

try:
image = image_dataset.band_data
except:
Expand Down Expand Up @@ -297,7 +307,6 @@ def random_tiled_image_from_census_tract(
return composition, point, boundaries, total_boundaries



def process_image(img, resizing_size, moveaxis=True):
if moveaxis:
img = np.moveaxis(
Expand All @@ -307,7 +316,7 @@ def process_image(img, resizing_size, moveaxis=True):
else:
image_size = img.shape[2]

if image_size != resizing_size: #FIXME: Me las pasa a blanco y negro. Por qué?
if image_size != resizing_size: # FIXME: Me las pasa a blanco y negro. Por qué?
img = cv2.resize(
img, dsize=(resizing_size, resizing_size), interpolation=cv2.INTER_CUBIC
)
Expand All @@ -329,11 +338,17 @@ def augment_image(img):
) # Random number between 0.4 and 2.5 (same level of contrast)
img = skimage.exposure.adjust_gamma(img, gamma=rand_gamma)

# FIXME: agregar saturation
# Rescale intensity
rand_min = np.random.randint(0, 5) / 10 # Random number between 0 and 0.5
rand_max = np.random.randint(0, 5) / 10 # Random number between 0 and 0.5
v_min, v_max = np.percentile(img, (rand_min, 100 - rand_max))
img = skimage.exposure.rescale_intensity(img, in_range=(v_min, v_max))

return img

# Rescale saturation
rand_sat = 0.5 + np.random.rand()
img_hsv = color.rgb2hsv(img) # hue-saturation-value
img_hsv[:, :, 1] *= rand_sat
img_hsv[:, :, 1] = np.clip(img_hsv[:, :, 1], 0, 255)
img = color.hsv2rgb(img_hsv)

return img

0 comments on commit 9b02a6c

Please sign in to comment.