Skip to content

Commit

Permalink
keras.backend.random to keras.random and keras.backend.shape to keras…
Browse files Browse the repository at this point in the history
….ops.shape
  • Loading branch information
henrysky committed Jul 19, 2024
1 parent 240caa3 commit e972d0d
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 14 deletions.
24 changes: 12 additions & 12 deletions src/astroNN/nn/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ def __init__(self, name=None, **kwargs):

def call(self, inputs):
z_mean, z_log_var = inputs
batch = keras.backend.shape(z_mean)[0]
dim = keras.backend.shape(z_mean)[1]
epsilon = keras.backend.random.normal(shape=(batch, dim))
batch = keras.ops.shape(z_mean)[0]
dim = keras.ops.shape(z_mean)[1]
epsilon = keras.random.normal(shape=(batch, dim))
return z_mean + keras.ops.exp(0.5 * z_log_var) * epsilon


Expand Down Expand Up @@ -90,7 +90,7 @@ def _get_noise_shape(self, inputs):
if self.noise_shape is None:
return self.noise_shape

symbolic_shape = keras.backend.shape(inputs)
symbolic_shape = keras.ops.shape(inputs)
noise_shape = [
symbolic_shape[axis] if shape is None else shape
for axis, shape in enumerate(self.noise_shape)
Expand All @@ -109,7 +109,7 @@ def call(self, inputs, training=None):
if self.disable_layer is True:
return inputs
else:
return keras.backend.random.dropout(inputs, rate=self.rate, noise_shape=noise_shape)
return keras.random.dropout(inputs, rate=self.rate, noise_shape=noise_shape)

def get_config(self):
"""
Expand Down Expand Up @@ -144,7 +144,7 @@ def __init__(self, rate, disable=False, **kwargs):
self.input_spec = keras.layers.input_spec.InputSpec(ndim=3)

def _get_noise_shape(self, inputs):
input_shape = keras.backend.shape(inputs)
input_shape = keras.ops.shape(inputs)
return input_shape[0], 1, input_shape[2]


Expand All @@ -168,7 +168,7 @@ def __init__(self, rate, disable=False, **kwargs):
self.input_spec = keras.layers.input_spec.InputSpec(ndim=4)

def _get_noise_shape(self, inputs):
input_shape = keras.backend.shape(inputs)
input_shape = keras.ops.shape(inputs)
return input_shape[0], 1, 1, input_shape[3]


Expand Down Expand Up @@ -205,8 +205,8 @@ def call(self, inputs, training=None):
if self.disable_layer is True:
return inputs
else:
return inputs * keras.backend.random.normal(
shape=keras.backend.shape(inputs), mean=1.0, stddev=stddev
return inputs * keras.random.normal(
shape=keras.ops.shape(inputs), mean=1.0, stddev=stddev
)

def get_config(self):
Expand Down Expand Up @@ -246,7 +246,7 @@ def call(self, inputs, training=None):
if training is None:
training = keras.backend.learning_phase()

noised = keras.backend.random.normal([1], mean=inputs[0], stddev=inputs[1])
noised = keras.random.normal([1], mean=inputs[0], stddev=inputs[1])
output_tensor = keras.ops.where(keras.ops.equal(training, True), inputs[0], noised)
output_tensor._uses_learning_phase = True
return output_tensor
Expand Down Expand Up @@ -495,7 +495,7 @@ def __init__(self, mask, name=None, **kwargs):
super().__init__(name=name, **kwargs)

def compute_output_shape(self, input_shape):
input_shape = keras.backend.shape(input_shape)
input_shape = keras.ops.shape(input_shape)
# TODO: convert to keras
input_shape = input_shape.with_rank_at_least(2)
return input_shape[:-1].concatenate(self.mask_shape)
Expand All @@ -508,7 +508,7 @@ def call(self, inputs, training=None):
:return: Tensor after applying the layer which is just the masked tensor
:rtype: tf.Tensor
"""
batchsize = keras.backend.shape(inputs)[0]
batchsize = keras.ops.shape(inputs)[0]
# need to reshape because tf.keras cannot get the Tensor shape correctly from tf.boolean_mask op

boolean_mask = keras.ops.any(keras.ops.not_equal(inputs, self.boolmask), axis=1, keepdims=True)
Expand Down
4 changes: 2 additions & 2 deletions src/astroNN/nn/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,12 +517,12 @@ def categorical_crossentropy(y_true, y_pred, sample_weight=None, from_logits=Fal
"float32",
)
# scale preds so that the class probas of each sample sum to 1
y_pred /= keras.ops.sum(y_pred, len(keras.backend.shape(y_pred)) - 1, True)
y_pred /= keras.ops.sum(y_pred, len(keras.ops.shape(y_pred)) - 1, True)
# manual computation of crossentropy
y_pred = keras.ops.clip(y_pred, epsilon_tensor, 1.0 - epsilon_tensor)
losses = (
-keras.ops.sum(
y_true * keras.ops.log(y_pred), len(keras.backend.shape(y_pred)) - 1
y_true * keras.ops.log(y_pred), len(keras.ops.shape(y_pred)) - 1
)
* correction
)
Expand Down

0 comments on commit e972d0d

Please sign in to comment.