-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrain_muInfo.py
executable file
·43 lines (33 loc) · 1.05 KB
/
train_muInfo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
"""
it's used to train a mutual information system
"""
import model
import torch
from matplotlib import pyplot as plt
num_epoch = 400
save_path = './trainedModel/MutualInfoSystem.pth'
device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
print("Using: " + str(device).upper())
net = model.MutualInfoSystem()
net.to(device)
optim = torch.optim.Adam(net.parameters(), lr=0.0001)
muInfo = []
for i in range(num_epoch):
batch_joint = torch.tensor(model.sample_batch(40, 'joint')).to(device)
batch_marginal = torch.tensor(model.sample_batch(40, 'marginal')).to(device)
t = net(batch_joint)
et = torch.exp(net(batch_marginal))
loss = -(torch.mean(t) - torch.log(torch.mean(et)))
print('epoch: {} '.format(i + 1))
print(-loss.cpu().detach().numpy())
muInfo.append(-loss.cpu().detach().numpy())
loss.backward()
optim.step()
optim.zero_grad()
torch.save(net.state_dict(), save_path)
plt.title('train mutual info system')
plt.xlabel('Epoch')
plt.ylabel('Mutual Info')
plt.plot(muInfo)
plt.show()
print('All done!')