Skip to content

Commit

Permalink
Messing with latent_dense param.
Browse files Browse the repository at this point in the history
  • Loading branch information
Benjamin-Etheredge committed Jan 30, 2022
1 parent d542180 commit 66b0972
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 9 deletions.
4 changes: 3 additions & 1 deletion gloves/models/custom_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ def build_custom_encoder(
conv_reg_rate=0.01,
dense_reg_rate=0.1,
use_batch_norm=True,
latent_dense=True,
latent_dense=False,
pooling='avg'):
# TODO pass activation as none and assign. str value so constructs in block.
# TODO maybe linear final layer
Expand Down Expand Up @@ -392,5 +392,7 @@ def __init__(self, **kwargs):
def call(self, inputs):
x, y = inputs
# TODO verify norm logic
return tf.sqrt(tf.reduce_sum(tf.math.pow(tf.math.subtract(x,y), 2), axis=-1, keepdims=True))
return tf.linalg.norm(x-y,axis=1,keepdims=True)
return tf.norm(x-y, ord=2, axis=-1, keepdims=True)

12 changes: 9 additions & 3 deletions gloves/train_siamese.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ def log_summary(model, dir=None, name=None):
with open(filename, "w") as f:
model.summary(print_fn=lambda x: f.write(x + '\n'))
mlflow.log_artifact(filename)
#wandb.F({filename: filename})



def train(
Expand Down Expand Up @@ -75,9 +77,10 @@ def train(
use_batch_norm,
distance,
use_sigmoid,
monitor_metric,
pooling,
conv_layers,
monitor_metric,
latent_dense,
glob_pattern='*.jpg',
nway_disabled=False,
label_func='name',
Expand Down Expand Up @@ -109,7 +112,8 @@ def train(
conv_reg_rate=conv_reg_rate,
use_batch_norm=use_batch_norm,
pooling=pooling,
conv_layers=conv_layers
conv_layers=conv_layers,
latent_dense=latent_dense,
)

input1 = tf.keras.Input(encoder.output_shape[-1])
Expand Down Expand Up @@ -294,7 +298,7 @@ def train(
@click.option("--depth", default=3, type=int)
@click.option("--verbose", default=0, type=int)
@click.option("--nways", default=24, type=int)
@click.option("--nway_freq", default=1, type=int)
@click.option("--nway_freq", default=5, type=int)
@click.option("--eval_freq", default=1, type=int)
@click.option("--mixed_precision", default=False, type=bool)

Expand All @@ -317,6 +321,7 @@ def train(
@click.option("--monitor_metric", type=str)
@click.option("--pooling", type=str)
@click.option("--conv_layers", type=int, default = 3)
@click.option("--latent_dense", type=bool)

def main(
**kwargs
Expand Down Expand Up @@ -359,6 +364,7 @@ def main(
wandb.init(project="gloves", config=kwargs)
mlflow.set_experiment("gloves")
mlflow.log_params(kwargs)
mlflow.tensorflow.autolog(every_n_iter=1)
train(
**kwargs
# train_dir=train_dir,
Expand Down
17 changes: 12 additions & 5 deletions siamese_wandb_sweep.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
program: gloves/train_siamese.py
method: random
method: bayes
metric:
goal: maximize
name: test_nway_acc
early_terminate:
type: hyperband
min_iter: 350
s: 3
min_iter: 200
s: 4

parameters:
train_dir:
Expand All @@ -17,6 +17,15 @@ parameters:
conv_layers:
min: 1
max: 6
pooling:
values:
- avg
- max
- None
latent_dense:
values:
- True
- False
mutate_anchor:
values:
- True
Expand All @@ -30,12 +39,10 @@ parameters:
- 0.01
- 0.001
- 0.0001
- 0.00001
- 0.0
#distribution: log_uniform
conv_reg_rate:
values:
- 0.01
- 0.001
- 0.0001
- 0.00001
Expand Down

0 comments on commit 66b0972

Please sign in to comment.