diff --git a/layers/attention_net.py b/layers/bidaf.py similarity index 98% rename from layers/attention_net.py rename to layers/bidaf.py index 7088d7a..a0dfa80 100644 --- a/layers/attention_net.py +++ b/layers/bidaf.py @@ -6,9 +6,9 @@ from layers.highway import Highway -class AttentionNet(nn.Module): +class BiDAF(nn.Module): def __init__(self, args): - super(AttentionNet, self).__init__() + super(BiDAF, self).__init__() self.embd_size = args.w_embd_size self.d = self.embd_size * 2 # word_embedding + char_embedding # self.d = self.embd_size # only word_embedding diff --git a/main.py b/main.py index 347db75..2292bb3 100644 --- a/main.py +++ b/main.py @@ -11,7 +11,7 @@ from process_data import save_pickle, load_pickle, load_task, load_processed_json, load_glove_weights from process_data import to_var, to_np, make_vector from process_data import DataSet -from layers.attention_net import AttentionNet +from layers.bidaf import BiDAF from ema import EMA from logger import Logger @@ -195,7 +195,7 @@ def test(model, data, batch_size=args.batch_size): # }}} -model = AttentionNet(args) +model = BiDAF(args) if torch.cuda.is_available(): print('use cuda')