-
Notifications
You must be signed in to change notification settings - Fork 16
/
CapsuleParameters.py
123 lines (102 loc) · 3.75 KB
/
CapsuleParameters.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/usr/bin/env python3
# coding: utf-8
import os
class CapsuleParameters(object):
def __init__(self):
self.layer_parameters = {}
def add_params(self, parameters_dict, layer_name):
self.layer_parameters[layer_name] = parameters_dict
def get_layer_params(self, layer_name):
return self.layer_parameters[layer_name]
# def get_conv_params(self,layer):
# return layer['filters'],layer['kernel_size'], \
# layer['strides'],layer['padding']
# def get_caps_params(self,layer):
# #layer = self.layer_parameters[layer_name]
# return layer['n_channels'], \
# layer['dim_capsule']
class CapsuleTrainingParameters(object):
def __init__(self,
epochs=100,
batch_size=50,
lr=0.001,
lr_decay=0.9,
lam_recon=0.392,
routing=5,
shift_fraction=0.1,
debug=False,
save_dir='./result',
data_augmentation=False,
testing=False,
digit=5,
weights=None,
plot_log=False,
log_filename='log_test.csv',
verbose = 1):
self.epochs = epochs
self.batch_size = batch_size
self.lr = lr
self.lr_decay = lr_decay
self.lam_recon = lam_recon
self.debug = debug
self.routing = routing
self.shift_fraction = shift_fraction
self.save_dir = save_dir
if not os.path.exists(self.save_dir):
os.makedirs(self.save_dir)
self.data_augmentation = data_augmentation
self.testing = testing
self.digit = digit
self.weights = weights
self.plot_log = plot_log
self.log_filename = log_filename
self.verbose = verbose
self.fold = 1
def generate_params_df(self):
dict_params = {}
dict_params['lr_decay'] = self.lr_decay
dict_params['routing'] = self.routing
dict_params['lr'] = self.lr
dict_params['lam_recon'] = self.lam_recon
return
def add_fold(self,j):
self.fold = j
if __name__ == '__main__':
capsule_params = CapsuleParameters()
# First conv layer: 'filters', kernel_size)
conv_layer_name = 'conv_layer'
conv_layer_params = {}
conv_layer_params['filters'] = 256
conv_layer_params['kernel_size'] = 9
conv_layer_params['strides'] = [1, 1]
conv_layer_params['padding'] = 'VALID'
conv_layer_params['activation'] = 'relu'
conv_layer_params['name'] = 'conv1'
capsule_params.add_params(conv_layer_params, conv_layer_name)
# First Capsule Layer:
# [num_output_caps, caps_len,'filters',kernel_size,strides,padding]
caps_layer_name = 'caps_layer'
caps_layer_params = {}
caps_layer_params['filters'] = 256
caps_layer_params['kernel_size'] = 9
caps_layer_params['strides'] = [2, 2]
caps_layer_params['padding'] = 'VALID'
caps_layer_params['padding'] = 'VALID'
caps_layer_params['n_channels'] = 32
caps_layer_params['dim_capsule'] = 8
caps_layer_params['name'] = 'caps_layer'
capsule_params.add_params(caps_layer_params, caps_layer_name)
# Digit Capsule Layer:
digit_layer_name = 'digitcaps_layer'
digit_layer_params = {}
digit_layer_params['n_channels'] = 10
digit_layer_params['dim_capsule'] = 16
digit_layer_params['name'] = 'digitcaps'
capsule_params.add_params(digit_layer_params, digit_layer_name)
# Capsule Decoder:
decoder_layer = 'decoder_layer'
decoder_params = {}
decoder_params['first_dense'] = 512
decoder_params['second_dense'] = 1024
decoder_params['name'] = 'decoder'
capsule_params.add_params(decoder_params, decoder_layer)