diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6eced82 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +__pycache__ +/MA_SNN/DVSGait/data +/MA_SNN/DVSGestures/data/DvsGesture +/MA_SNN/DVSGestures/data/train +/MA_SNN/DVSGestures/data/test +MA_SNN/DVSGestures/data/DvsGesture.tar.gz +info.txt +Attention-SNN.code-workspace diff --git a/MA_SNN/DVSGestures/Att_SNN_CNN.py b/MA_SNN/DVSGestures/Att_SNN_CNN.py index b78d86a..5e613d6 100644 --- a/MA_SNN/DVSGestures/Att_SNN_CNN.py +++ b/MA_SNN/DVSGestures/Att_SNN_CNN.py @@ -1,41 +1,42 @@ -import os - -import sys - -sys.path.append(os.path.dirname("__file__")) -from DVSGestures.CNN import Att_SNN - -rootPath = os.path.abspath(os.path.dirname(__file__)) -rootPath = os.path.split(rootPath)[0] -sys.path.append(rootPath) - -from DVSGestures.CNN import Config - -os.environ["CUDA_VISIBLE_DEVICES"] = "4," - - -class Logger(object): - def __init__(self, fileN="Default.log"): - self.terminal = sys.stdout - self.log = open(fileN, "w") - - def write(self, message): - self.terminal.write(message) - self.log.write(message) - - def flush(self): - pass - - -logPath = Config.configs().recordPath -if not os.path.exists(logPath): - os.makedirs(logPath) -sys.stdout = Logger(logPath + os.sep + "log_DVS_Gesture_SNN.txt") - - -def main(): - Att_SNN.main() - - -if __name__ == "__main__": - main() +import os + +import sys + +# sys.path.append(os.path.dirname("__file__")) +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from DVSGestures.CNN import Att_SNN + +rootPath = os.path.abspath(os.path.dirname(__file__)) +rootPath = os.path.split(rootPath)[0] +sys.path.append(rootPath) + +from DVSGestures.CNN import Config + +# os.environ["CUDA_VISIBLE_DEVICES"] = "4," + + +class Logger(object): + def __init__(self, fileN="Default.log"): + self.terminal = sys.stdout + self.log = open(fileN, "w") + + def write(self, message): + self.terminal.write(message) + self.log.write(message) + + def flush(self): + pass + + +logPath = Config.configs().recordPath +if not os.path.exists(logPath): + os.makedirs(logPath) +sys.stdout = Logger(logPath + os.sep + "log_DVS_Gesture_SNN.txt") + + +def main(): + Att_SNN.main() + + +if __name__ == "__main__": + main() diff --git a/MA_SNN/DVSGestures/Att_SNN_CNN_SpikingJelly.py b/MA_SNN/DVSGestures/Att_SNN_CNN_SpikingJelly.py new file mode 100644 index 0000000..ba1d999 --- /dev/null +++ b/MA_SNN/DVSGestures/Att_SNN_CNN_SpikingJelly.py @@ -0,0 +1,42 @@ +import os + +import sys + +# sys.path.append(os.path.dirname("__file__")) +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from DVSGestures.CNN import Att_SNN_SpikingJelly as Att_SNN + +rootPath = os.path.abspath(os.path.dirname(__file__)) +rootPath = os.path.split(rootPath)[0] +sys.path.append(rootPath) + +from DVSGestures.CNN import Config + +# os.environ["CUDA_VISIBLE_DEVICES"] = "0" + + +class Logger(object): + def __init__(self, fileN="Default.log"): + self.terminal = sys.stdout + self.log = open(fileN, "w") + + def write(self, message): + self.terminal.write(message) + self.log.write(message) + + def flush(self): + pass + + +logPath = Config.configs().recordPath +if not os.path.exists(logPath): + os.makedirs(logPath) +sys.stdout = Logger(logPath + os.sep + "log_DVS_Gesture_SNN.txt") + + +def main(): + Att_SNN.main() + + +if __name__ == "__main__": + main() diff --git a/MA_SNN/DVSGestures/CNN/Att_SNN_SpikingJelly.py b/MA_SNN/DVSGestures/CNN/Att_SNN_SpikingJelly.py new file mode 100644 index 0000000..7791b04 --- /dev/null +++ b/MA_SNN/DVSGestures/CNN/Att_SNN_SpikingJelly.py @@ -0,0 +1,44 @@ +import torch +from utils import util +from DVSGestures.DVS_Gesture_utils.dataset import create_data +from DVSGestures.CNN.Networks.Att_SNN_SpikingJelly import create_net +from DVSGestures.CNN.Config import configs +from DVSGestures.DVS_Gesture_utils.process import process +from DVSGestures.DVS_Gesture_utils.save import save_csv + + +def main(): + + config = configs() + config.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + print(config.device) + + config.device_ids = range(torch.cuda.device_count()) + print(config.device_ids) + + config.name = ( + config.attention + + "_SNN(CNN)-DVS-Gesture_dt=" + + str(config.dt) + + "ms" + + "_T=" + + str(config.T) + ) + config.modelNames = config.name + ".t7" + config.recordNames = config.name + ".csv" + + print(config) + + create_data(config=config) + + create_net(config=config) + + print(config.model) + + print(util.get_parameter_number(config.model)) + + process(config=config) + + print("best acc:", config.best_acc, "best_epoch:", config.best_epoch) + + save_csv(config=config) diff --git a/MA_SNN/DVSGestures/CNN/Config.py b/MA_SNN/DVSGestures/CNN/Config.py index 75a8bf1..2490e9c 100644 --- a/MA_SNN/DVSGestures/CNN/Config.py +++ b/MA_SNN/DVSGestures/CNN/Config.py @@ -1,13 +1,14 @@ import os, torch import torch.nn as nn +from spikingjelly.activation_based.neuron import surrogate class configs(object): def __init__(self): - self.dt = 25 + self.dt = 15 self.T = 60 - self.attention = "no" + self.attention = "TCSA" self.c_ratio=8 self.t_ratio=5 self.epoch = 0 @@ -16,7 +17,7 @@ def __init__(self): self.pretrained_path = None self.batch_size = 128 - self.batch_size_test = 128 + self.batch_size_test = 28 # None 'kaiming' 'xavier' self.init_method = None @@ -37,18 +38,24 @@ def __init__(self): self.interval_scaling = False # network - self.beta = 0 + self.beta = 0. self.alpha = 0.3 - self.Vreset = 0 + self.Vreset = 0. self.Vthres = 0.3 self.reduction = 16 self.T_extend_Conv = False self.T_extend_BN = False self.h_conv = False + self.step_mode = "m" + # self.surrogate_function = surrogate.Sigmoid() + self.surrogate_function = surrogate.LeakyKReLU() + self.backend = "cupy" + # Old parameters self.mem_act = torch.relu self.mode_select = "spike" self.TR_model = "NTR" + # BatchNorm self.track_running_stats = True diff --git a/MA_SNN/DVSGestures/CNN/Networks/Att_SNN.py b/MA_SNN/DVSGestures/CNN/Networks/Att_SNN.py index 1b6205a..a926913 100644 --- a/MA_SNN/DVSGestures/CNN/Networks/Att_SNN.py +++ b/MA_SNN/DVSGestures/CNN/Networks/Att_SNN.py @@ -195,18 +195,24 @@ def __init__( ) def forward(self, input): + # print("input: ",input.shape) b, t, _, _, _ = input.size() outputs = input outputs = self.convAttLIF0(outputs) + # print("convAttLIF0: ",outputs.shape) outputs = self.convAttLIF1(outputs) + # print("convAttLIF1: ",outputs.shape) outputs = self.convAttLIF2(outputs) + # print("convAttLIF1: ",outputs.shape) outputs = outputs.reshape(b, t, -1) - + # print("fc_input: ",outputs.shape) outputs = self.FC0(outputs) + # print("FC0: ",outputs.shape) outputs = self.FC1(outputs) + # print("FC1: ",outputs.shape) outputs = torch.sum(outputs, dim=1) outputs = outputs / t diff --git a/MA_SNN/DVSGestures/CNN/Networks/Att_SNN_SpikingJelly.py b/MA_SNN/DVSGestures/CNN/Networks/Att_SNN_SpikingJelly.py new file mode 100644 index 0000000..987c6b7 --- /dev/null +++ b/MA_SNN/DVSGestures/CNN/Networks/Att_SNN_SpikingJelly.py @@ -0,0 +1,210 @@ +import torch +import torch.nn as nn +from module.LIF_Module_SpikingJelly import AttLIF, ConvAttLIF +from torch import optim + + +def create_net(config): + # Net + # define approximate firing function + + class ActFun(torch.autograd.Function): + def __init__(self): + super(ActFun, self).__init__() + + @staticmethod + def forward(ctx, input): + ctx.save_for_backward(input) + return input.ge(0.0).float() + + @staticmethod + def backward(ctx, grad_output): + (input,) = ctx.saved_tensors + temp = abs(input) < config.lens + return grad_output * temp.float() / (2 * config.lens) + + # cnn_layer(in_planes, out_planes, stride, padding, kernel_size) + cfg_cnn = [ + ( + 2, + 64, + 1, + 1, + 3, + ), + ( + 64, + 128, + 1, + 1, + 3, + ), + ( + 128, + 128, + 1, + 1, + 3, + ), + ] + # pooling kernel_size + cfg_pool = [1, 2, 2] + # fc layer + cfg_fc = [cfg_cnn[2][1] * 8 * 8, 256, config.target_size] + + class Net(nn.Module): + def __init__( + self, + ): + super(Net, self).__init__() + h, w = config.im_height, config.im_width + in_planes, out_planes, stride, padding, kernel_size = cfg_cnn[0] + pooling_kernel_size = cfg_pool[0] + h, w = h // cfg_pool[0], w // cfg_pool[0] + self.convAttLIF0 = ConvAttLIF( + attention=config.attention, + inputSize=in_planes, + hiddenSize=out_planes, + kernel_size=(kernel_size, kernel_size), + init_method=config.init_method, + useBatchNorm=True, + pooling_kernel_size=pooling_kernel_size, + T=config.T, + pa_dict={ + "Vreset": config.Vreset, + "Vthres": config.Vthres, + }, + step_mode=config.step_mode, + surrogate_function=config.surrogate_function, + backend=config.backend, + track_running_stats=config.track_running_stats, + c_ratio=config.c_ratio, + t_ratio=config.t_ratio + ) + + in_planes, out_planes, stride, padding, kernel_size = cfg_cnn[1] + pooling_kernel_size = cfg_pool[1] + h, w = h // cfg_pool[1], w // cfg_pool[1] + self.convAttLIF1 = ConvAttLIF( + attention=config.attention, + inputSize=in_planes, + hiddenSize=out_planes, + kernel_size=(kernel_size, kernel_size), + init_method=config.init_method, + useBatchNorm=True, + pooling_kernel_size=pooling_kernel_size, + T=config.T, + pa_dict={ + "Vreset": config.Vreset, + "Vthres": config.Vthres, + }, + step_mode=config.step_mode, + surrogate_function=config.surrogate_function, + backend=config.backend, + track_running_stats=config.track_running_stats, + c_ratio=config.c_ratio, + t_ratio=config.t_ratio + ) + + in_planes, out_planes, stride, padding, kernel_size = cfg_cnn[2] + pooling_kernel_size = cfg_pool[2] + h, w = h // cfg_pool[2], w // cfg_pool[2] + self.convAttLIF2 = ConvAttLIF( + attention=config.attention, + inputSize=in_planes, + hiddenSize=out_planes, + kernel_size=(kernel_size, kernel_size), + init_method=config.init_method, + useBatchNorm=True, + pooling_kernel_size=pooling_kernel_size, + T=config.T, + pa_dict={ + "Vreset": config.Vreset, + "Vthres": config.Vthres, + }, + step_mode=config.step_mode, + surrogate_function=config.surrogate_function, + backend=config.backend, + track_running_stats=config.track_running_stats, + c_ratio=config.c_ratio, + t_ratio=config.t_ratio + ) + + self.FC0 = AttLIF( + attention="no" + if config.attention in ["no", "CA", "SA", "CSA"] + else "TA", + inputSize=cfg_fc[0], + hiddenSize=cfg_fc[1], + useBatchNorm=True, + T=config.T, + pa_dict={ + "Vreset": config.Vreset, + "Vthres": config.Vthres, + }, + step_mode=config.step_mode, + surrogate_function=config.surrogate_function, + backend=config.backend, + track_running_stats=config.track_running_stats, + t_ratio=config.t_ratio + ) + + self.FC1 = AttLIF( + attention="no" + if config.attention in ["no", "CA", "SA", "CSA"] + else "TA", + inputSize=cfg_fc[1], + hiddenSize=cfg_fc[2], + useBatchNorm=True, + T=config.T, + pa_dict={ + "Vreset": config.Vreset, + "Vthres": config.Vthres, + }, + step_mode=config.step_mode, + surrogate_function=config.surrogate_function, + backend=config.backend, + track_running_stats=config.track_running_stats, + t_ratio=config.t_ratio + ) + + def forward(self, input): + # print("input: ",input.shape) + b, t, _, _, _ = input.size() + outputs = input + + outputs = self.convAttLIF0(outputs) + # print("convAttLIF0: ",outputs.shape) + outputs = self.convAttLIF1(outputs) + # print("convAttLIF1: ",outputs.shape) + outputs = self.convAttLIF2(outputs) + # print("convAttLIF2: ",outputs.shape) + + outputs = outputs.reshape(b, t, -1) + # print("fc_input: ",outputs.shape) + outputs = self.FC0(outputs) + # print("FC0: ",outputs.shape) + + outputs = self.FC1(outputs) + # print("FC1: ",outputs.shape) + outputs = torch.sum(outputs, dim=1) + outputs = outputs / t + + return outputs + + config.model = Net().to(config.device) + + # optimizer + config.optimizer = optim.Adam( + config.model.parameters(), + lr=config.lr, + betas=config.betas, + eps=config.eps, + weight_decay=config.weight_decay, + ) + + config.scheduler = optim.lr_scheduler.ReduceLROnPlateau( + optimizer=config.optimizer, mode="min", factor=0.1, patience=5, verbose=True + ) + + config.model = nn.DataParallel(config.model, device_ids=config.device_ids) diff --git a/MA_SNN/DVSGestures/CNN/Result/log_DVS_Gesture_SNN.txt b/MA_SNN/DVSGestures/CNN/Result/log_DVS_Gesture_SNN.txt new file mode 100644 index 0000000..ae0cdc8 --- /dev/null +++ b/MA_SNN/DVSGestures/CNN/Result/log_DVS_Gesture_SNN.txt @@ -0,0 +1,252 @@ +cuda +range(0, 4) +dt==15 +T==60 +attention==TCSA +c_ratio==8 +t_ratio==5 +epoch==0 +num_epochs==300 +onlyTest==False +pretrained_path==None +batch_size==128 +batch_size_test==28 +init_method==None +ds==4 +in_channels==2 +im_width==32 +im_height==32 +target_size==11 +clip==10 +is_train_Enhanced==True +is_spike==False +interval_scaling==False +beta==0.0 +alpha==0.3 +Vreset==0.0 +Vthres==0.3 +reduction==16 +T_extend_Conv==False +T_extend_BN==False +h_conv==False +step_mode==m +surrogate_function==LeakyKReLU() +backend==cupy +mem_act== +mode_select==spike +TR_model==NTR +track_running_stats==True +a==0.5 +lens==0.25 +lr==0.0001 +betas==[0.9, 0.999] +eps==1e-08 +weight_decay==0 +lr_scheduler==True +lr_scheduler_epoch==25 +name==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60 +modelPath==/home/zbx/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +modelNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 +recordPath==/home/zbx/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +recordNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv +savePath==/home/zbx/Attention-SNN/MA_SNN/DVSGestures/data +train_dataset==None +test_dataset==None +train_loader==None +test_loader==None +drop_last==False +pip_memory==False +num_work==8 +model==None +criterion==MSELoss() +optimizer==None +device==cuda +device_ids==range(0, 4) +best_acc==0 +best_epoch==0 +epoch_list==[] +loss_train_list==[] +loss_test_list==[] +acc_train_list==[] +acc_test_list==[] +train_loss==0 +train_correct==0 +train_acc==0 +test_loss==0 +test_correct==0 +test_acc==0 +state==None + +DataParallel( + (module): Net( + (convAttLIF0): ConvAttLIF( + (surrogate_function): LeakyKReLU() + (conv2d): Conv2d(2, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(64, 8, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(8, 64, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): IFNode( + v_threshold=0.3, v_reset=0.0, detach_reset=False, step_mode=m, backend=cupy + (surrogate_function): LeakyKReLU() + ) + ) + ) + (convAttLIF1): ConvAttLIF( + (surrogate_function): LeakyKReLU() + (conv2d): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): IFNode( + v_threshold=0.3, v_reset=0.0, detach_reset=False, step_mode=m, backend=cupy + (surrogate_function): LeakyKReLU() + ) + ) + ) + (convAttLIF2): ConvAttLIF( + (surrogate_function): LeakyKReLU() + (conv2d): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): IFNode( + v_threshold=0.3, v_reset=0.0, detach_reset=False, step_mode=m, backend=cupy + (surrogate_function): LeakyKReLU() + ) + ) + ) + (FC0): AttLIF( + (surrogate_function): LeakyKReLU() + (network): Sequential( + (IFNode): IFNode( + v_threshold=0.3, v_reset=0.0, detach_reset=False, step_mode=m, backend=cupy + (surrogate_function): LeakyKReLU() + ) + ) + (linear): Linear(in_features=8192, out_features=256, bias=True) + (BNLayer): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + (FC1): AttLIF( + (surrogate_function): LeakyKReLU() + (network): Sequential( + (IFNode): IFNode( + v_threshold=0.3, v_reset=0.0, detach_reset=False, step_mode=m, backend=cupy + (surrogate_function): LeakyKReLU() + ) + ) + (linear): Linear(in_features=256, out_features=11, bias=True) + (BNLayer): BatchNorm1d(11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + ) +) +{'Total': 2340535, 'Trainable': 2340535} +epoch: 1 +dt: 15 +T: 60 +Tarin loss:0.12404 diff --git a/MA_SNN/DVSGestures/DVS_Gesture_utils/process.py b/MA_SNN/DVSGestures/DVS_Gesture_utils/process.py index 719fb3d..9f20c32 100644 --- a/MA_SNN/DVSGestures/DVS_Gesture_utils/process.py +++ b/MA_SNN/DVSGestures/DVS_Gesture_utils/process.py @@ -1,6 +1,8 @@ from DVSGestures.DVS_Gesture_utils.train import train from DVSGestures.DVS_Gesture_utils.test import test -import torch +import torch,gc +import time +from torch import nn import os @@ -49,10 +51,19 @@ def process(config): config.loss_train_list.append(config.train_loss) config.acc_train_list.append(config.train_acc) + # config.model=config.model.module.cpu() + # device_saved=config.device + # config.device='cpu' + gc.collect() + torch.cuda.empty_cache() + #time.sleep(3) + #config.model=config.model.to(torch.device('cuda:0')) + #config.model = nn.DataParallel(config.model, device_ids=config.device_ids) + + # test with torch.no_grad(): - config.model.eval() test(config=config) @@ -87,3 +98,9 @@ def process(config): ) print('beat acc:',config.best_acc) + gc.collect() + torch.cuda.empty_cache() + # if config.onlyTest == False: + # config.device=device_saved + # config.model=config.model.to(torch.device('cuda:0')) + # config.model = nn.DataParallel(config.model, device_ids=config.device_ids) diff --git a/MA_SNN/DVSGestures/DVS_Gesture_utils/train.py b/MA_SNN/DVSGestures/DVS_Gesture_utils/train.py index ae55308..c244f3f 100644 --- a/MA_SNN/DVSGestures/DVS_Gesture_utils/train.py +++ b/MA_SNN/DVSGestures/DVS_Gesture_utils/train.py @@ -15,7 +15,9 @@ def train(config): input = input.float().to(config.device) labels = labels[:, 1, :].float().to(config.device) - + # print(input) + # print(input.shape) + # exit() outputs = config.model(input) @@ -36,4 +38,7 @@ def train(config): bar_train.set_description("Train:Epoch[%d/%d]" % (config.epoch + 1, config.num_epochs)) bar_train.set_postfix(Loss=loss.item()) + del loss,outputs,input,labels + torch.cuda.empty_cache() + bar_train.close() diff --git a/MA_SNN/DVSGestures/DVS_gesture_data_process/events_timeslices.py b/MA_SNN/DVSGestures/DVS_gesture_data_process/events_timeslices.py index 49ac832..21b9fc1 100644 --- a/MA_SNN/DVSGestures/DVS_gesture_data_process/events_timeslices.py +++ b/MA_SNN/DVSGestures/DVS_gesture_data_process/events_timeslices.py @@ -34,9 +34,11 @@ def cast_evs(evs): def get_subsampled_coordinates(evs, ds_h, ds_w): x_coords = evs[:, 1] // ds_w y_coords = evs[:, 2] // ds_h - if x_coords.dtype != np.int: + # if x_coords.dtype != np.int: + if x_coords.dtype != np.int64: x_coords = x_coords.astype(int) - if y_coords.dtype != np.int: + # if y_coords.dtype != np.int: + if x_coords.dtype != np.int64: y_coords = y_coords.astype(int) return x_coords, y_coords @@ -114,7 +116,8 @@ def frame_evs(times, addrs, deltat=1000, duration=500, size=[240], downsample=[1 idx_end += find_first(times[idx_end:], t) if idx_end > idx_start: ee = addrs[idx_start:idx_end] - ev = [(ee[:, i] // d).astype(np.int) for i, d in enumerate(downsample)] + # ev = [(ee[:, i] // d).astype(np.int) for i, d in enumerate(downsample)] + ev = [(ee[:, i] // d).astype(np.int64) for i, d in enumerate(downsample)] np.add.at(chunks, tuple([i] + ev), 1) idx_start = idx_end return chunks @@ -130,7 +133,8 @@ def chunk_evs_pol_dvs(times, addrs, deltat=1000, chunk_size=500, size=[2, 304, 2 idx_end += find_first(times[idx_end:], t) if idx_end > idx_start: ee = addrs[idx_start:idx_end] - pol, x, y = ee[:, 2], (ee[:, 0] // ds_w).astype(np.int), (ee[:, 1] // ds_h).astype(np.int) + # pol, x, y = ee[:, 2], (ee[:, 0] // ds_w).astype(np.int), (ee[:, 1] // ds_h).astype(np.int) + pol, x, y = ee[:, 2], (ee[:, 0] // ds_w).astype(np.int64), (ee[:, 1] // ds_h).astype(np.int64) np.add.at(chunks, (i, pol, x, y), 1) idx_start = idx_end return chunks @@ -149,7 +153,8 @@ def my_chunk_evs_pol_dvs(data, dt=1000, T=500, size=[2, 304, 240], ds=[4, 4]): idx_end += find_first(data[idx_end:, 0], t + dt) if idx_end > idx_start: ee = data[idx_start:idx_end, 1:] - pol, x, y = ee[:, 0], (ee[:, 1] // ds[0]).astype(np.int), (ee[:, 2] // ds[1]).astype(np.int) + # pol, x, y = ee[:, 0], (ee[:, 1] // ds[0]).astype(np.int), (ee[:, 2] // ds[1]).astype(np.int) + pol, x, y = ee[:, 0], (ee[:, 1] // ds[0]).astype(np.int64), (ee[:, 2] // ds[1]).astype(np.int64) np.add.at(chunks, (i, pol, x, y), 1) idx_start = idx_end return chunks \ No newline at end of file diff --git a/MA_SNN/module/LIF.py b/MA_SNN/module/LIF.py index cf8ed9e..97f4d70 100644 --- a/MA_SNN/module/LIF.py +++ b/MA_SNN/module/LIF.py @@ -6,15 +6,15 @@ class IFCell(nn.Module): def __init__(self, inputSize, hiddenSize, - spikeActFun, - scale=0.3, + spikeActFun,#放电函数 + scale=0.3,# pa_dict=None, pa_train_self=False, bias=True, p=0, - mode_select='spike', + mode_select='spike',#模式设置,spike:脉冲放电,mem:膜电位放电 mem_act=torch.relu, - TR_model='NTR', + TR_model='NTR',#膜电位是否使用时间常数规则,NTR:不使用,TR:使用 ): super().__init__() self.inputSize = inputSize @@ -84,6 +84,7 @@ def forward(self, input, init_v=None): if input.device != self.h.device: input = input.to(self.h.device) + # 整合放电过程 # Step 1: accumulate and reset,spike used as forgetting gate u = self.h + input @@ -95,7 +96,7 @@ def forward(self, input, init_v=None): # step 4: if self.mode_select == 'spike': x = x - elif self.mode_select == 'mem': + elif self.mode_select == 'mem':# 膜电位模式 # TR if self.TR_model == 'TR': diff --git a/MA_SNN/module/LIF_Module_SpikingJelly.py b/MA_SNN/module/LIF_Module_SpikingJelly.py new file mode 100644 index 0000000..77bc2fc --- /dev/null +++ b/MA_SNN/module/LIF_Module_SpikingJelly.py @@ -0,0 +1,219 @@ +import torch +from torch import nn +from spikingjelly.activation_based.neuron import IFNode,surrogate +from module.Attention import * +from typing import Callable + +# 模型权重初始化 +def paramInit(model, method='xavier'): + scale = 0.05 + for name, w in model.named_parameters(): + if 'weight' in name: + if method == 'xavier': + nn.init.xavier_normal_(w) + elif method == 'kaiming': + nn.init.kaiming_normal_(w) + else: + nn.init.normal_(w) + w *= scale + elif 'bias' in name: + nn.init.constant_(w, 0) + else: + pass + +class AttLIF(nn.Module): + def __init__( + self, + inputSize:int, + hiddenSize:int, + attention:str="TA",# 是否应用注意力 + useBatchNorm:bool=False, + init_method:str=None,# 模型权重初始化方式 + pa_dict:dict=None,# 电压参数字典 + bias:bool=True, + track_running_stats:bool=False,# batchnorm参数 + step_mode:str='m',# multi_step or single_step + surrogate_function:Callable=surrogate.Sigmoid(), + onlyLast:bool=False,# 是否选择仅保留最后时刻的电压值 + backend:str='cupy',# IFNode是否使用cupy进行后端 + T:int=60, + t_ratio:int=16 + ) -> None: + super().__init__() + self.store_v_req = not onlyLast + self.step_mode = step_mode + self.useBatchNorm = useBatchNorm + self.surrogate_function = surrogate_function + self.backend = backend + + self.network = nn.Sequential() + self.attention_flag = attention + self.linear = nn.Linear( + in_features=inputSize, + out_features=hiddenSize, + bias=bias, + ) + + if self.useBatchNorm: + self.BNLayer = nn.BatchNorm1d( + num_features=hiddenSize, track_running_stats=track_running_stats + ) + + if init_method is not None: + paramInit(model=self.linear, method=init_method) + if self.attention_flag == "TA": + self.attention = TA(T, hiddenSize, t_ratio=t_ratio, fc=True) + elif self.attention_flag == "no": + pass + + if pa_dict is None: + pa_dict={'Vreset': 0., 'Vthres': 0.6} + self.v_threshold = float(pa_dict["Vthres"]) if pa_dict["Vthres"] is not None else 0.6 + self.v_reset = float(pa_dict["Vreset"]) if pa_dict["Vreset"] is not None else None + + self.network.add_module( + "IFNode", + IFNode(v_threshold=self.v_threshold,v_reset=self.v_reset,surrogate_function=self.surrogate_function, + step_mode=self.step_mode,backend=self.backend,store_v_seq=self.store_v_req) + ) + + def forward(self,data:torch.Tensor) -> torch.Tensor: + #*对data shape的操作应该视具体问题进行调整* + for layer in self.network: + layer.reset() + + b, t, _ = data.size() + output = self.linear(data.reshape(b * t, -1)) + + if self.useBatchNorm: + output = self.BNLayer(output) + + outputsum = output.reshape(b, t, -1) + + if self.attention_flag == "no": + data=outputsum + else: + data=self.attention(outputsum) + + #*单独使用此神经元时建议取消本行代码的注释* + # data.reshape(b, t, c, h, w) + + output=self.network(data.transpose(0,1)) + + return output.transpose(0,1) + +class ConvAttLIF(nn.Module): + def __init__( + self, + inputSize:int, + hiddenSize:int, + kernel_size:tuple, + attention:str="TA", + onlyLast:bool=False, + padding:int=1, + useBatchNorm:bool=False, + init_method:str=None, + pa_dict:dict=None, + step_mode:str="m", + surrogate_function:Callable=surrogate.Sigmoid(), + backend:str="cupy", + T:int=60, + stride:int=1, + pooling_kernel_size:int=1, + p:float=0,#dropout几率 + track_running_stats:int=False, + c_ratio=16, + t_ratio=16 + ) -> None: + super().__init__() + self.store_v_req = not onlyLast + self.step_mode = step_mode + self.useBatchNorm = useBatchNorm + self.surrogate_function = surrogate_function + self.backend = backend + self.attention_flag = attention + self.p=p + + self.conv2d = nn.Conv2d( + in_channels=inputSize, + out_channels=hiddenSize, + kernel_size=kernel_size, + bias=True, + padding=padding, + stride=stride, + ) + + if init_method is not None: + paramInit(model=self.conv2d, method=init_method) + + self.useBatchNorm = useBatchNorm + + if self.useBatchNorm: + self.BNLayer = nn.BatchNorm2d( + hiddenSize, track_running_stats=track_running_stats + ) + + self.pooling_kernel_size = pooling_kernel_size + if self.pooling_kernel_size > 1: + self.pooling = nn.AvgPool2d(kernel_size=pooling_kernel_size) + + if self.attention_flag == "TCSA": + self.attention = TCSA(T, hiddenSize, c_ratio=c_ratio, t_ratio=t_ratio) + elif self.attention_flag == "TSA": + self.attention = TSA(T, hiddenSize, t_ratio=t_ratio) + elif self.attention_flag == "TCA": + self.attention = TCA(T, hiddenSize, c_ratio=c_ratio, t_ratio=t_ratio) + elif self.attention_flag == "CSA": + self.attention = CSA(T, hiddenSize, c_ratio=c_ratio) + elif self.attention_flag == "TA": + self.attention = TA(T, hiddenSize, t_ratio=t_ratio) + elif self.attention_flag == "CA": + self.attention = CA(T, hiddenSize, c_ratio=c_ratio) + elif self.attention_flag == "SA": + self.attention = SA(T, hiddenSize) + elif self.attention_flag == "no": + pass + + if pa_dict is None: + pa_dict={'alpha': 0.3, 'beta': 0., 'Vreset': 0., 'Vthres': 0.6} + self.v_threshold = pa_dict["Vthres"] + self.v_reset = pa_dict["Vreset"] + + self.network = nn.Sequential() + self.network.add_module( + "ConvIF", + IFNode(v_threshold=self.v_threshold,v_reset=self.v_reset,surrogate_function=self.surrogate_function, + step_mode=self.step_mode,backend=self.backend,store_v_seq=self.store_v_req) + ) + if 0 < self.p < 1: + self.network.add_module( + "ConvIF_Dropout", + nn.Dropout2d(p=self.p) + ) + + def forward(self,data:torch.Tensor) -> torch.Tensor: + + for layer in self.network: + layer.reset() + + b, t, c, h, w = data.size() + out = data.reshape(b * t, c, h, w) + output = self.conv2d(out) + + if self.useBatchNorm: + output = self.BNLayer(output) + + if self.pooling_kernel_size > 1: + output = self.pooling(output) + + _, c, h, w = output.size() + outputsum = output.reshape(b, t, c, h, w) + + if self.attention_flag == "no": + data = outputsum + else: + data = self.attention(outputsum) + + output=self.network(data.transpose(0,1)) + + return output.transpose(0,1) \ No newline at end of file diff --git a/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv b/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv new file mode 100644 index 0000000..42b5963 --- /dev/null +++ b/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv @@ -0,0 +1,5 @@ +Epochs,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,240 +Train_Loss,0.09031411781907081,0.06810766570270062,0.06179821453988552,0.057789426669478415,0.0564749326556921,0.05180017314851284,0.05096320360898972,0.04834402464330197,0.04585769288241863,0.042783647403120996,0.04172452054917812,0.039781785756349566,0.035022117756307126,0.034807223454117776,0.03253343813121319,0.029752515628933905,0.029549396596848965,0.027863951213657857,0.026209463737905024,0.024575243517756463,0.024679859913885594,0.022799992375075816,0.02282809242606163,0.019751012325286865,0.021109626349061727,0.02052415069192648,0.020139065105468035,0.018211920280009508,0.01853621732443571,0.017399059608578682,0.01806702623143792,0.018153740372508765,0.01649759989231825,0.016529795806854965,0.01666716979816556,0.015534069575369357,0.01432667402550578,0.015083404909819364,0.01356588639318943,0.012842379324138165,0.01383795365691185,0.013749755453318357,0.012003883020952344,0.012154356017708779,0.011725314427167177,0.011510772723704576,0.011598600260913371,0.01184363877400756,0.01087255091406405,0.011507796309888362,0.011218643747270107,0.011050776252523065,0.010220130579546094,0.010109112272039056,0.010041890386492013,0.010497899213805794,0.009216710925102234,0.010199342062696815,0.00921801566146314,0.00965913743712008,0.01073057479225099,0.0089528804179281,0.008945690654218196,0.010440457705408334,0.00881304768845439,0.009358054073527456,0.00860198438167572,0.008003691816702485,0.0077903378056362275,0.007526286691427231,0.007725806534290313,0.007522972952574492,0.0076623340137302876,0.006561605026945472,0.007082080584950745,0.005926115461625159,0.0070330527611076835,0.006392353912815452,0.006422390067018569,0.0067875954322516915,0.006804796168580651,0.006310538086108863,0.0062260939041152595,0.005153560568578542,0.006054024142213166,0.00602238040883094,0.005448684794828296,0.0058386921882629395,0.005774457938969135,0.006525431456975639,0.005770289897918701,0.005999835091643035,0.005855431547388434,0.005832337262108922,0.005257425317540765,0.006353677343577146,0.005054251570254564,0.00581409081351012,0.005209989240393042,0.005567230423912406,0.004790040012449026,0.00625017664860934,0.005833874992094934,0.007411966100335121,0.005101023381575942,0.00510174916125834,0.0047873568953946235,0.007209934759885073,0.006024513626471162,0.00640478974673897,0.006504826340824365,0.0057320800377056004,0.005643255566246808,0.005120188347063959,0.005668208701536059,0.004983033961616456,0.00521184632088989,0.006395891471765935,0.006608007266186178,0.005298852641135454,0.006547315861098468,0.004778543254360557,0.006648100889287889,0.005099499179050326,0.005332687054760754,0.0050659032305702565,0.005252109677530825,0.00661910951603204,0.005568490317091346,0.0056290141306817535,0.005447927489876747,0.00494470470584929,0.0046927672112360595,0.005430393968708813,0.006143167265690863,0.005881598312407732,0.00636917722877115,0.006774520664475858,0.006023981189355254,0.005347376316785812,0.005780694866552949,0.005649171490222216,0.005795042566023767,0.00471052024513483,0.006219310499727726,0.005106132244691253,0.006454275920987129,0.0049290955299511555,0.0063075685873627664,0.006241770228371024,0.006226699519902468,0.0061056305887177585,0.005281838122755289,0.005948448274284601,0.006754239648580551,0.005709422449581325,0.006238073506392538,0.005115439114160836,0.005696640978567302,0.0049732238287106155,0.0059076848207041625,0.005118841677904129,0.0055562826106324795,0.005993486708030104,0.004366437438875437,0.005527587793767452,0.004858000366948545,0.005584491044282913,0.005918541457504034,0.005383839947171509,0.006869885930791497,0.00556892182212323,0.0057996206916868685,0.00543743020389229,0.006100595695897937,0.005294836615212262,0.0059942209627479315,0.006671296758577227,0.004747544624842703,0.005046023009344936,0.005221206229180098,0.005521503719501197,0.006952461833134293,0.005579481506720186,0.007116630813106895,0.006258276198059321,0.005037914542481303,0.006497156899422407,0.005398326413705945,0.005941310827620327,0.004877268429845571,0.005326517880894244,0.006174199329689145,0.005331912497058511,0.005693758814595639,0.005901188030838967,0.0056455121375620365,0.006077703600749373,0.00543204415589571,0.005702490592375398,0.0056938034482300285,0.0049057048978284005,0.006701647653244436,0.007081173709593713,0.004989130073226988,0.005320594343356788,0.006210119836032391,0.005410733260214328,0.00534204610157758,0.005498767783865333,0.0056956422748044135,0.005645560380071401,0.005453409533947707,0.005950590805150568,0.005366222024895251,0.00616337510291487,0.005733541888184845,0.005087584163993597,0.005407247296534479,0.004405556572601199,0.0066483238944783805,0.0060376065550372,0.0062179236439988015,0.006002749619074166,0.00546180612873286,0.005330597492866218,0.0052405507536605,0.005346972425468266,0.006139295292086899,0.005662960722111166,0.006802306068129838,0.005041178362444043,0.006229368108324707,0.005597995896823704,0.006081615830771625,0.004966403753496706,0.005323946592397988,0.006042478699237108,0.006049386109225452,0.0062355335336178545,0.005423105554655194,0.004794501257129014,0.005634753382764757,0.005894665746018291,0.006022670422680676,0.005107529740780592,0.005462767579592765,0.005850979755632579,0.005488419090397656,0.006530570494942367,0.00715697705745697,0.004993989272043109,0.0055512947961688045,0.005173778813332319,0.005086471536196768,0.005390749988146126,0.004955719388090074,0.00545675300527364,0.005861586891114712,0.004744160128757358,0.005538190645165741,0.004947064560838044,0.0054164612898603085,0.005529118841513992,0.005847425176762044,0.006484675803221762,0.0057362917810678486,0.0060131552163511515,0.005270612658932805,0.005886770784854889,0.00510157085955143,0.00599415844772011,0.005292468308471143,0.005538428551517427,0.005769240343943239,0.00549903316423297,0.005771749862469732,0.005697072204202414,0.006457725749351084,0.004762444016523659,0.005339598935097456,0.005357496929354966,0.00706325622741133,0.0055562329012900594,0.006048425496555865,0.004915318754501641,0.005472927005030215,0.005868845270015299,0.006122718844562769,0.005662393989041448,0.005959948152303695,0.00635661359410733,0.005813535745255649,0.006252845749258995,0.005308738490566611,0.0048698086058720945,0.004965846124105155,0.004990690434351563,0.005166421085596085,0.004951701476238668, +Test_Loss,0.009090908616781234,0.009391414042976168,0.00909025909172164,0.008112670563989215,0.006405417538351483,0.005544441772831811,0.005119174180759324,0.004794340001212226,0.004637229897909694,0.004614219193657239,0.004269264555639691,0.004022717558675342,0.00384789113369253,0.0035708893090486524,0.0034346229293280178,0.0032168156156937282,0.0030339426671465238,0.0030353797185752127,0.002844077017572191,0.002832377432949013,0.002819397838579284,0.00251978716502587,0.0025829298421740535,0.002462725237839752,0.002439490312503444,0.00236710672163301,0.0022671575347582495,0.002476679864856932,0.002411495914889707,0.002199252777629429,0.0020527838004959957,0.002181633034100135,0.0019909730284578273,0.0020356735007630456,0.0019893385883834624,0.0021074580959975723,0.0020220300493141016,0.0019502980220648977,0.0019245220658679802,0.0018554888044794403,0.0017918322442306413,0.0018218705534107154,0.0018105410763786895,0.001899724608908097,0.0017350974182287853,0.0015767141038344964,0.0016990116900867885,0.0015296049726506071,0.0019294778505961102,0.0014967951509687635,0.0017093441863026883,0.0016041621358858218,0.0015526970020598837,0.001495132688432932,0.0014964382267660566,0.0014864398218277428,0.0013804940920737056,0.0013685052593549092,0.0013801741051591102,0.0013692479787601366,0.0014025496784597635,0.0013977456268750958,0.001694730224294795,0.0014312771045499378,0.00154799813301199,0.0014956499553389019,0.0013627006879283322,0.001305626467284229,0.0013418866373184652,0.001395054611687859,0.0012729700706485246,0.0012806597838385238,0.0013314743991941211,0.0015258442403541675,0.00125831574615505,0.0013028101271225345,0.001310497248131368,0.0013228306443327006,0.0012438033966140614,0.001278708864831262,0.0012243703266398775,0.0011989494889146752,0.0011758995345897145,0.0012114775677522023,0.0012203457403100202,0.0012102832862486443,0.0011817935440275402,0.0011993756000366477,0.0011777741989741726,0.0011828633232249157,0.0012018999860932428,0.0011906109077648986,0.001214348276456197,0.00120625347416434,0.001196105053855313,0.0012071039631134934,0.0011970432889130383,0.0011629295638865894,0.0012360811750921938,0.0012054914453377326,0.001197420298639271,0.0011629602561394376,0.0011599509873323971,0.0011828492757760814,0.0012111443508830333,0.0011698354294316638,0.0011426205332908364,0.0011874043486184544,0.0011975991591397258,0.001194412794171108,0.0011869975055257482,0.0011610356625169514,0.0011909528376741541,0.001214028201583359,0.0012260196368313499,0.001231911059262024,0.001204797030530042,0.0012202387468682396,0.0012195118909908665,0.0011956946820848519,0.0012013379173974196,0.001214053626689646,0.0012135880481865672,0.0012137064710259437,0.0012016755218307177,0.0012449424144708448,0.0012115204862008491,0.0012257566365102928,0.0012315795756876469,0.0011687315224359434,0.0012394526093784306,0.0011963470890704129,0.001212947991573148,0.0011866792156878447,0.0011899997273253071,0.001206518259520332,0.0012151540774438115,0.0012508565818683968,0.0012572556216683655,0.0012060149365829096,0.0012202939846449427,0.0012732646686749326,0.0011772393145494994,0.0012208139782564507,0.001203422196623352,0.0011910115627364983,0.0012190637903081048,0.0012005496666663224,0.0011660221757160292,0.0011874472670671014,0.001191752533324891,0.0012144604594343238,0.0012244475436293417,0.0012032152153551576,0.0012216338008228275,0.0012063972341517608,0.0011849772857709063,0.0012083227125306924,0.001167000618038906,0.0011736881867465047,0.001204480468812916,0.0012294954309860866,0.0012098326037327447,0.0012584446256773338,0.0012224264080739685,0.0012452414052353965,0.0011843582666996454,0.0011944496486749915,0.0012041586451232433,0.001178313424396846,0.001169238311962949,0.0011830815838442907,0.0012178011807716556,0.0012136880204909374,0.001168553557039963,0.0012003366318013935,0.0011986101563606,0.0011915157652563518,0.0011756049572593634,0.0011854613199830054,0.001214240398257971,0.0011841741028345294,0.001173986339320739,0.0012037658960454994,0.001200855699264341,0.0011945609417226579,0.0012022112703157794,0.0011879699097739327,0.0011741651894731653,0.0011721739131543372,0.0012030433759921127,0.0011957499612536694,0.0011766746484984953,0.0012252147412962382,0.00120967922007872,0.0011742107828872069,0.001170001132413745,0.0011960077099502087,0.0011940760848422845,0.001203560679116183,0.0012218810773144168,0.0012360382618175614,0.0012014860908190407,0.0011950993289550145,0.0011877322466009195,0.0011779872648831871,0.0012026128824800253,0.0012151908905555806,0.0012277865109758244,0.001159637974989083,0.0012231445560852686,0.0012280477780020902,0.0012018930011739334,0.0011679713148623704,0.0011705658036387627,0.001164099258474178,0.0011309886299487619,0.0011986505033241377,0.001175993334295021,0.0011618983983579607,0.0012281196347127357,0.0012044226285070183,0.0011988959999548066,0.0011626463642136918,0.0011532003422164254,0.0011940541728917096,0.0012120807957318094,0.0011757180456899935,0.0012023726012557745,0.0011618536845263506,0.001201836826900641,0.0011907906116296848,0.0011796163653747904,0.0011648585450732044,0.0011774514905280537,0.001163932660387622,0.0012055738932556577,0.0011814332153234216,0.0012012230646279126,0.0012257276827262508,0.0011750762350857256,0.0011537439810733001,0.0011656249252458414,0.001191058066777057,0.0011922110699945027,0.001184265325880713,0.001211861583093802,0.0011959367172999513,0.0011957622184935541,0.0011837743274453613,0.0011853148902042044,0.0012199029326438902,0.00119050742747883,0.0011944899438983863,0.0011948196217417717,0.0011822740495618846,0.001198213868257072,0.0012125175032350753,0.0011507128158377276,0.0011731997960143618,0.0011708446209215456,0.0012119045636306207,0.001219832788531979,0.001176904369559553,0.001199878891929984,0.0012432106770575045,0.0012184658243010443,0.0012175110065274768,0.0011858488174362315,0.0011714259938647351,0.001200390100065205,0.0012206386050416364,0.0011931309062573644,0.0011697328080319698,0.0011926249135285616,0.0011851052857107585,0.0012237127352919845,0.001219859988325172,0.001178212608728144,0.0011831815768447189,0.0011598852411326436,0.0011801179343213638,0.0012264010962098836,0.0012274734158482818,0.0011978271624280346,0.0011827106742809217,0.0011953948220858972,0.001179909287020564,0.0012203317135572435,0.0011903460603207348,0.0012253506523039607,0.0011877664623575078,0.0012041148367441363,0.001238220697268844,0.0012043927951405446,0.001172385240594546,0.0012083051881442467,0.0011786387301981449,0.0012062139239990047,0.001164861214864585, +Train_Accuracy,45.361702127659576,56.340425531914896,58.46808510638298,61.53191489361702,62.723404255319146,65.44680851063829,68.42553191489361,70.2127659574468,74.38297872340425,77.7872340425532,81.27659574468085,82.72340425531915,84.85106382978724,85.7872340425532,87.91489361702128,88.76595744680851,90.63829787234043,90.29787234042553,91.57446808510639,91.06382978723404,91.57446808510639,92.59574468085107,92.51063829787235,92.93617021276596,92.85106382978724,92.08510638297872,93.70212765957447,94.55319148936171,94.2127659574468,95.23404255319149,94.72340425531915,94.80851063829788,95.31914893617021,95.65957446808511,95.74468085106383,95.65957446808511,96.08510638297872,95.91489361702128,96.42553191489361,97.19148936170212,96.34042553191489,96.0,97.44680851063829,96.93617021276596,96.93617021276596,97.1063829787234,97.02127659574468,96.76595744680851,96.68085106382979,97.02127659574468,97.61702127659575,98.04255319148936,97.61702127659575,97.36170212765957,97.53191489361703,97.95744680851064,98.2127659574468,97.61702127659575,98.04255319148936,97.61702127659575,97.61702127659575,98.12765957446808,98.04255319148936,97.7872340425532,98.38297872340425,97.7872340425532,97.44680851063829,98.38297872340425,98.12765957446808,98.38297872340425,98.72340425531915,98.29787234042553,98.8936170212766,98.72340425531915,98.12765957446808,98.97872340425532,98.97872340425532,98.72340425531915,98.97872340425532,98.97872340425532,98.80851063829788,98.55319148936171,99.06382978723404,98.8936170212766,98.63829787234043,98.8936170212766,98.8936170212766,99.06382978723404,99.06382978723404,98.46808510638297,98.80851063829788,98.8936170212766,98.97872340425532,99.06382978723404,99.23404255319149,98.80851063829788,99.14893617021276,99.23404255319149,99.23404255319149,99.31914893617021,99.06382978723404,98.8936170212766,98.97872340425532,98.8936170212766,99.31914893617021,99.14893617021276,99.23404255319149,98.8936170212766,98.38297872340425,98.80851063829788,98.29787234042553,99.23404255319149,99.23404255319149,99.23404255319149,99.14893617021276,98.80851063829788,99.40425531914893,98.97872340425532,98.72340425531915,99.14893617021276,98.63829787234043,99.23404255319149,98.63829787234043,99.31914893617021,98.80851063829788,99.31914893617021,99.14893617021276,99.06382978723404,99.06382978723404,99.23404255319149,98.97872340425532,99.23404255319149,98.80851063829788,98.8936170212766,98.80851063829788,98.55319148936171,99.40425531914893,99.23404255319149,99.23404255319149,99.23404255319149,98.97872340425532,98.97872340425532,98.46808510638297,99.14893617021276,99.06382978723404,99.06382978723404,98.63829787234043,98.80851063829788,99.23404255319149,98.55319148936171,98.72340425531915,98.46808510638297,99.06382978723404,98.80851063829788,98.72340425531915,98.72340425531915,98.80851063829788,98.80851063829788,99.06382978723404,99.14893617021276,99.40425531914893,98.46808510638297,99.23404255319149,99.06382978723404,98.97872340425532,99.23404255319149,99.06382978723404,98.8936170212766,98.8936170212766,98.97872340425532,98.80851063829788,99.23404255319149,99.31914893617021,99.14893617021276,98.97872340425532,99.23404255319149,98.97872340425532,98.8936170212766,99.48936170212765,98.97872340425532,98.97872340425532,99.06382978723404,98.55319148936171,99.48936170212765,98.63829787234043,99.14893617021276,99.23404255319149,98.72340425531915,98.80851063829788,98.72340425531915,99.57446808510639,99.40425531914893,98.97872340425532,99.23404255319149,98.8936170212766,99.40425531914893,98.8936170212766,98.72340425531915,99.06382978723404,99.14893617021276,98.97872340425532,99.23404255319149,99.06382978723404,98.8936170212766,99.14893617021276,99.14893617021276,98.97872340425532,99.23404255319149,99.31914893617021,98.8936170212766,98.8936170212766,99.48936170212765,99.14893617021276,98.63829787234043,98.63829787234043,98.97872340425532,99.06382978723404,98.97872340425532,99.14893617021276,99.31914893617021,98.29787234042553,98.80851063829788,98.72340425531915,98.97872340425532,98.80851063829788,99.06382978723404,99.14893617021276,99.06382978723404,98.63829787234043,99.23404255319149,98.72340425531915,99.40425531914893,98.80851063829788,98.46808510638297,98.46808510638297,99.14893617021276,98.97872340425532,98.97872340425532,98.72340425531915,99.14893617021276,98.97872340425532,98.8936170212766,99.48936170212765,98.97872340425532,99.14893617021276,99.14893617021276,99.06382978723404,98.72340425531915,99.23404255319149,99.06382978723404,97.87234042553192,98.8936170212766,99.40425531914893,98.8936170212766,99.14893617021276,99.06382978723404,99.14893617021276,99.06382978723404,98.97872340425532,98.55319148936171,98.8936170212766,98.97872340425532,98.46808510638297,99.23404255319149,98.72340425531915,98.97872340425532,98.8936170212766,98.55319148936171,98.63829787234043,98.46808510638297,99.14893617021276,98.72340425531915,99.06382978723404,99.06382978723404,98.72340425531915,98.55319148936171,98.63829787234043,98.72340425531915,98.72340425531915,98.97872340425532,98.8936170212766,98.29787234042553,98.29787234042553,99.31914893617021,98.63829787234043,99.40425531914893,98.97872340425532,98.8936170212766,98.63829787234043,99.06382978723404,98.55319148936171,98.80851063829788,98.72340425531915,98.80851063829788,99.23404255319149,98.72340425531915,98.97872340425532,98.72340425531915,98.8936170212766,99.57446808510639, +Test_Accuracy,8.333333333333334,8.333333333333334,8.680555555555555,29.86111111111111,42.361111111111114,55.55555555555556,60.763888888888886,69.44444444444444,69.44444444444444,70.48611111111111,75.69444444444444,78.125,83.68055555555556,85.41666666666667,85.76388888888889,87.84722222222223,87.15277777777777,87.84722222222223,89.58333333333333,88.54166666666667,88.54166666666667,90.625,91.66666666666667,89.58333333333333,90.625,90.27777777777777,90.97222222222223,90.625,89.93055555555556,92.01388888888889,92.01388888888889,92.70833333333333,91.31944444444444,91.31944444444444,92.01388888888889,91.66666666666667,92.01388888888889,92.36111111111111,93.75,92.70833333333333,92.01388888888889,92.01388888888889,92.01388888888889,92.01388888888889,91.66666666666667,94.09722222222223,93.40277777777777,93.40277777777777,91.31944444444444,94.09722222222223,93.05555555555556,92.36111111111111,95.13888888888889,94.79166666666667,93.05555555555556,93.40277777777777,94.09722222222223,94.09722222222223,94.09722222222223,94.44444444444444,93.75,95.13888888888889,92.70833333333333,92.36111111111111,93.05555555555556,93.75,95.13888888888889,94.79166666666667,93.75,93.75,94.09722222222223,94.09722222222223,93.75,92.36111111111111,94.79166666666667,94.79166666666667,93.75,94.44444444444444,94.09722222222223,93.40277777777777,94.79166666666667,94.79166666666667,94.44444444444444,95.13888888888889,94.44444444444444,94.09722222222223,94.09722222222223,93.75,94.44444444444444,94.44444444444444,94.44444444444444,94.09722222222223,94.09722222222223,94.44444444444444,93.40277777777777,94.79166666666667,94.44444444444444,94.44444444444444,94.09722222222223,94.79166666666667,94.79166666666667,94.44444444444444,93.75,94.09722222222223,93.75,94.44444444444444,94.44444444444444,94.09722222222223,94.79166666666667,94.44444444444444,95.13888888888889,94.09722222222223,94.79166666666667,93.40277777777777,94.44444444444444,94.44444444444444,94.79166666666667,94.44444444444444,94.79166666666667,94.09722222222223,94.09722222222223,95.13888888888889,94.44444444444444,94.09722222222223,94.44444444444444,94.44444444444444,93.75,94.44444444444444,94.09722222222223,93.75,94.44444444444444,94.44444444444444,93.75,94.79166666666667,93.40277777777777,93.75,94.44444444444444,94.09722222222223,94.79166666666667,94.79166666666667,93.40277777777777,94.79166666666667,94.09722222222223,93.75,94.44444444444444,94.44444444444444,94.09722222222223,94.79166666666667,94.44444444444444,94.09722222222223,94.09722222222223,94.44444444444444,94.09722222222223,93.40277777777777,94.09722222222223,93.40277777777777,94.09722222222223,94.09722222222223,95.13888888888889,94.09722222222223,95.13888888888889,94.44444444444444,94.44444444444444,93.75,95.13888888888889,94.79166666666667,94.09722222222223,93.75,95.13888888888889,94.44444444444444,95.13888888888889,94.44444444444444,93.75,94.09722222222223,94.79166666666667,94.44444444444444,94.44444444444444,93.75,95.13888888888889,94.79166666666667,93.75,94.09722222222223,94.79166666666667,94.44444444444444,94.79166666666667,94.09722222222223,94.44444444444444,93.40277777777777,94.44444444444444,93.75,95.13888888888889,94.44444444444444,94.44444444444444,95.13888888888889,94.79166666666667,94.79166666666667,94.09722222222223,93.75,94.09722222222223,95.13888888888889,94.09722222222223,93.75,94.44444444444444,94.44444444444444,94.44444444444444,94.09722222222223,94.09722222222223,94.44444444444444,94.09722222222223,94.44444444444444,94.44444444444444,94.44444444444444,94.44444444444444,94.44444444444444,94.44444444444444,94.09722222222223,94.79166666666667,94.79166666666667,94.44444444444444,94.44444444444444,94.44444444444444,94.09722222222223,94.09722222222223,94.09722222222223,94.44444444444444,94.09722222222223,93.75,94.09722222222223,94.09722222222223,94.44444444444444,94.44444444444444,94.44444444444444,94.79166666666667,94.44444444444444,94.09722222222223,94.44444444444444,94.44444444444444,94.09722222222223,93.75,95.48611111111111,94.09722222222223,94.79166666666667,93.75,94.09722222222223,94.79166666666667,94.09722222222223,94.44444444444444,94.79166666666667,94.09722222222223,94.79166666666667,94.79166666666667,94.09722222222223,94.79166666666667,93.75,93.40277777777777,94.09722222222223,94.09722222222223,94.79166666666667,94.44444444444444,94.09722222222223,94.09722222222223,94.79166666666667,94.09722222222223,95.13888888888889,93.40277777777777,93.40277777777777,93.75,94.44444444444444,94.44444444444444,94.44444444444444,94.79166666666667,93.75,94.09722222222223,94.09722222222223,94.09722222222223,94.09722222222223,94.09722222222223,93.75,94.79166666666667,94.79166666666667,94.44444444444444,94.44444444444444,94.44444444444444,94.44444444444444,94.79166666666667,94.44444444444444,93.75,94.09722222222223,93.75,94.44444444444444,94.44444444444444,94.44444444444444,93.75,94.44444444444444,94.44444444444444,94.79166666666667,94.09722222222223,94.79166666666667,94.44444444444444,94.79166666666667,95.48611111111111 diff --git a/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 b/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 new file mode 100644 index 0000000..86998ee Binary files /dev/null and b/Results/Result/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 differ diff --git a/Results/Result/log_DVS_Gesture_SNN.txt b/Results/Result/log_DVS_Gesture_SNN.txt new file mode 100644 index 0000000..42dc672 --- /dev/null +++ b/Results/Result/log_DVS_Gesture_SNN.txt @@ -0,0 +1,2653 @@ +cuda +range(0, 4) +dt==15 +T==60 +attention==TCSA +c_ratio==8 +t_ratio==5 +epoch==0 +num_epochs==300 +onlyTest==False +pretrained_path==None +batch_size==128 +batch_size_test==32 +init_method==None +ds==4 +in_channels==2 +im_width==32 +im_height==32 +target_size==11 +clip==10 +is_train_Enhanced==True +is_spike==False +interval_scaling==False +beta==0 +alpha==0.3 +Vreset==0 +Vthres==0.3 +reduction==16 +T_extend_Conv==False +T_extend_BN==False +h_conv==False +mem_act== +mode_select==spike +TR_model==NTR +track_running_stats==True +a==0.5 +lens==0.25 +lr==0.0001 +betas==[0.9, 0.999] +eps==1e-08 +weight_decay==0 +lr_scheduler==True +lr_scheduler_epoch==25 +name==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60 +modelPath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +modelNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 +recordPath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +recordNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv +savePath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/data +train_dataset==None +test_dataset==None +train_loader==None +test_loader==None +drop_last==False +pip_memory==False +num_work==8 +model==None +criterion==MSELoss() +optimizer==None +device==cuda +device_ids==range(0, 4) +best_acc==0 +best_epoch==0 +epoch_list==[] +loss_train_list==[] +loss_test_list==[] +acc_train_list==[] +acc_test_list==[] +train_loss==0 +train_correct==0 +train_acc==0 +test_loss==0 +test_correct==0 +test_acc==0 +state==None + +DataParallel( + (module): Net( + (convAttLIF0): ConvAttLIF( + (conv2d): Conv2d(2, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(64, 8, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(8, 64, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (convAttLIF1): ConvAttLIF( + (conv2d): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (convAttLIF2): ConvAttLIF( + (conv2d): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (FC0): AttLIF( + (network): Sequential( + (IF): IFCell() + ) + (linear): Linear(in_features=8192, out_features=256, bias=True) + (BNLayer): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + (FC1): AttLIF( + (network): Sequential( + (IF): IFCell() + ) + (linear): Linear(in_features=256, out_features=11, bias=True) + (BNLayer): BatchNorm1d(11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + ) +) +{'Total': 2340535, 'Trainable': 2340535} +epoch: 1 +dt: 15 +T: 60 +Tarin loss:0.09031 +Train acc: 45.362 +Test loss:0.00909 +Test acc: 8.333 +Saving.. +beat acc: 8.333333333333334 +epoch: 2 +dt: 15 +T: 60 +Tarin loss:0.06811 +Train acc: 56.340 +Test loss:0.00939 +Test acc: 8.333 +beat acc: 8.333333333333334 +epoch: 3 +dt: 15 +T: 60 +Tarin loss:0.06180 +Train acc: 58.468 +Test loss:0.00909 +Test acc: 8.681 +Saving.. +beat acc: 8.680555555555555 +epoch: 4 +dt: 15 +T: 60 +Tarin loss:0.05779 +Train acc: 61.532 +Test loss:0.00811 +Test acc: 29.861 +Saving.. +beat acc: 29.86111111111111 +epoch: 5 +dt: 15 +T: 60 +Tarin loss:0.05647 +Train acc: 62.723 +Test loss:0.00641 +Test acc: 42.361 +Saving.. +beat acc: 42.361111111111114 +epoch: 6 +dt: 15 +T: 60 +Tarin loss:0.05180 +Train acc: 65.447 +Test loss:0.00554 +Test acc: 55.556 +Saving.. +beat acc: 55.55555555555556 +epoch: 7 +dt: 15 +T: 60 +Tarin loss:0.05096 +Train acc: 68.426 +Test loss:0.00512 +Test acc: 60.764 +Saving.. +beat acc: 60.763888888888886 +epoch: 8 +dt: 15 +T: 60 +Tarin loss:0.04834 +Train acc: 70.213 +Test loss:0.00479 +Test acc: 69.444 +Saving.. +beat acc: 69.44444444444444 +epoch: 9 +dt: 15 +T: 60 +Tarin loss:0.04586 +Train acc: 74.383 +Test loss:0.00464 +Test acc: 69.444 +beat acc: 69.44444444444444 +epoch: 10 +dt: 15 +T: 60 +Tarin loss:0.04278 +Train acc: 77.787 +Test loss:0.00461 +Test acc: 70.486 +Saving.. +beat acc: 70.48611111111111 +epoch: 11 +dt: 15 +T: 60 +Tarin loss:0.04172 +Train acc: 81.277 +Test loss:0.00427 +Test acc: 75.694 +Saving.. +beat acc: 75.69444444444444 +epoch: 12 +dt: 15 +T: 60 +Tarin loss:0.03978 +Train acc: 82.723 +Test loss:0.00402 +Test acc: 78.125 +Saving.. +beat acc: 78.125 +epoch: 13 +dt: 15 +T: 60 +Tarin loss:0.03502 +Train acc: 84.851 +Test loss:0.00385 +Test acc: 83.681 +Saving.. +beat acc: 83.68055555555556 +epoch: 14 +dt: 15 +T: 60 +Tarin loss:0.03481 +Train acc: 85.787 +Test loss:0.00357 +Test acc: 85.417 +Saving.. +beat acc: 85.41666666666667 +epoch: 15 +dt: 15 +T: 60 +Tarin loss:0.03253 +Train acc: 87.915 +Test loss:0.00343 +Test acc: 85.764 +Saving.. +beat acc: 85.76388888888889 +epoch: 16 +dt: 15 +T: 60 +Tarin loss:0.02975 +Train acc: 88.766 +Test loss:0.00322 +Test acc: 87.847 +Saving.. +beat acc: 87.84722222222223 +epoch: 17 +dt: 15 +T: 60 +Tarin loss:0.02955 +Train acc: 90.638 +Test loss:0.00303 +Test acc: 87.153 +beat acc: 87.84722222222223 +epoch: 18 +dt: 15 +T: 60 +Tarin loss:0.02786 +Train acc: 90.298 +Test loss:0.00304 +Test acc: 87.847 +beat acc: 87.84722222222223 +epoch: 19 +dt: 15 +T: 60 +Tarin loss:0.02621 +Train acc: 91.574 +Test loss:0.00284 +Test acc: 89.583 +Saving.. +beat acc: 89.58333333333333 +epoch: 20 +dt: 15 +T: 60 +Tarin loss:0.02458 +Train acc: 91.064 +Test loss:0.00283 +Test acc: 88.542 +beat acc: 89.58333333333333 +epoch: 21 +dt: 15 +T: 60 +Tarin loss:0.02468 +Train acc: 91.574 +Test loss:0.00282 +Test acc: 88.542 +beat acc: 89.58333333333333 +epoch: 22 +dt: 15 +T: 60 +Tarin loss:0.02280 +Train acc: 92.596 +Test loss:0.00252 +Test acc: 90.625 +Saving.. +beat acc: 90.625 +epoch: 23 +dt: 15 +T: 60 +Tarin loss:0.02283 +Train acc: 92.511 +Test loss:0.00258 +Test acc: 91.667 +Saving.. +beat acc: 91.66666666666667 +epoch: 24 +dt: 15 +T: 60 +Tarin loss:0.01975 +Train acc: 92.936 +Test loss:0.00246 +Test acc: 89.583 +beat acc: 91.66666666666667 +epoch: 25 +dt: 15 +T: 60 +Tarin loss:0.02111 +Train acc: 92.851 +Test loss:0.00244 +Test acc: 90.625 +beat acc: 91.66666666666667 +epoch: 26 +dt: 15 +T: 60 +Tarin loss:0.02052 +Train acc: 92.085 +Test loss:0.00237 +Test acc: 90.278 +beat acc: 91.66666666666667 +epoch: 27 +dt: 15 +T: 60 +Tarin loss:0.02014 +Train acc: 93.702 +Test loss:0.00227 +Test acc: 90.972 +beat acc: 91.66666666666667 +epoch: 28 +dt: 15 +T: 60 +Tarin loss:0.01821 +Train acc: 94.553 +Test loss:0.00248 +Test acc: 90.625 +beat acc: 91.66666666666667 +epoch: 29 +dt: 15 +T: 60 +Tarin loss:0.01854 +Train acc: 94.213 +Test loss:0.00241 +Test acc: 89.931 +beat acc: 91.66666666666667 +epoch: 30 +dt: 15 +T: 60 +Tarin loss:0.01740 +Train acc: 95.234 +Test loss:0.00220 +Test acc: 92.014 +Saving.. +beat acc: 92.01388888888889 +epoch: 31 +dt: 15 +T: 60 +Tarin loss:0.01807 +Train acc: 94.723 +Test loss:0.00205 +Test acc: 92.014 +beat acc: 92.01388888888889 +epoch: 32 +dt: 15 +T: 60 +Tarin loss:0.01815 +Train acc: 94.809 +Test loss:0.00218 +Test acc: 92.708 +Saving.. +beat acc: 92.70833333333333 +epoch: 33 +dt: 15 +T: 60 +Tarin loss:0.01650 +Train acc: 95.319 +Test loss:0.00199 +Test acc: 91.319 +beat acc: 92.70833333333333 +epoch: 34 +dt: 15 +T: 60 +Tarin loss:0.01653 +Train acc: 95.660 +Test loss:0.00204 +Test acc: 91.319 +beat acc: 92.70833333333333 +epoch: 35 +dt: 15 +T: 60 +Tarin loss:0.01667 +Train acc: 95.745 +Test loss:0.00199 +Test acc: 92.014 +beat acc: 92.70833333333333 +epoch: 36 +dt: 15 +T: 60 +Tarin loss:0.01553 +Train acc: 95.660 +Test loss:0.00211 +Test acc: 91.667 +beat acc: 92.70833333333333 +epoch: 37 +dt: 15 +T: 60 +Tarin loss:0.01433 +Train acc: 96.085 +Test loss:0.00202 +Test acc: 92.014 +beat acc: 92.70833333333333 +epoch: 38 +dt: 15 +T: 60 +Tarin loss:0.01508 +Train acc: 95.915 +Test loss:0.00195 +Test acc: 92.361 +beat acc: 92.70833333333333 +epoch: 39 +dt: 15 +T: 60 +Tarin loss:0.01357 +Train acc: 96.426 +Test loss:0.00192 +Test acc: 93.750 +Saving.. +beat acc: 93.75 +epoch: 40 +dt: 15 +T: 60 +Tarin loss:0.01284 +Train acc: 97.191 +Test loss:0.00186 +Test acc: 92.708 +beat acc: 93.75 +epoch: 41 +dt: 15 +T: 60 +Tarin loss:0.01384 +Train acc: 96.340 +Test loss:0.00179 +Test acc: 92.014 +beat acc: 93.75 +epoch: 42 +dt: 15 +T: 60 +Tarin loss:0.01375 +Train acc: 96.000 +Test loss:0.00182 +Test acc: 92.014 +beat acc: 93.75 +epoch: 43 +dt: 15 +T: 60 +Tarin loss:0.01200 +Train acc: 97.447 +Test loss:0.00181 +Test acc: 92.014 +beat acc: 93.75 +epoch: 44 +dt: 15 +T: 60 +Tarin loss:0.01215 +Train acc: 96.936 +Test loss:0.00190 +Test acc: 92.014 +beat acc: 93.75 +epoch: 45 +dt: 15 +T: 60 +Tarin loss:0.01173 +Train acc: 96.936 +Test loss:0.00174 +Test acc: 91.667 +beat acc: 93.75 +epoch: 46 +dt: 15 +T: 60 +Tarin loss:0.01151 +Train acc: 97.106 +Test loss:0.00158 +Test acc: 94.097 +Saving.. +beat acc: 94.09722222222223 +epoch: 47 +dt: 15 +T: 60 +Tarin loss:0.01160 +Train acc: 97.021 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 48 +dt: 15 +T: 60 +Tarin loss:0.01184 +Train acc: 96.766 +Test loss:0.00153 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 49 +dt: 15 +T: 60 +Tarin loss:0.01087 +Train acc: 96.681 +Test loss:0.00193 +Test acc: 91.319 +beat acc: 94.09722222222223 +epoch: 50 +dt: 15 +T: 60 +Tarin loss:0.01151 +Train acc: 97.021 +Test loss:0.00150 +Test acc: 94.097 +beat acc: 94.09722222222223 +epoch: 51 +dt: 15 +T: 60 +Tarin loss:0.01122 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 52 +dt: 15 +T: 60 +Tarin loss:0.01105 +Train acc: 98.043 +Test loss:0.00160 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 53 +dt: 15 +T: 60 +Tarin loss:0.01022 +Train acc: 97.617 +Test loss:0.00155 +Test acc: 95.139 +Saving.. +beat acc: 95.13888888888889 +epoch: 54 +dt: 15 +T: 60 +Tarin loss:0.01011 +Train acc: 97.362 +Test loss:0.00150 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 55 +dt: 15 +T: 60 +Tarin loss:0.01004 +Train acc: 97.532 +Test loss:0.00150 +Test acc: 93.056 +beat acc: 95.13888888888889 +epoch: 56 +dt: 15 +T: 60 +Tarin loss:0.01050 +Train acc: 97.957 +Test loss:0.00149 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 57 +dt: 15 +T: 60 +Tarin loss:0.00922 +Train acc: 98.213 +Test loss:0.00138 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 58 +dt: 15 +T: 60 +Tarin loss:0.01020 +Train acc: 97.617 +Test loss:0.00137 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 59 +dt: 15 +T: 60 +Tarin loss:0.00922 +Train acc: 98.043 +Test loss:0.00138 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 60 +dt: 15 +T: 60 +Tarin loss:0.00966 +Train acc: 97.617 +Test loss:0.00137 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 61 +dt: 15 +T: 60 +Tarin loss:0.01073 +Train acc: 97.617 +Test loss:0.00140 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 62 +dt: 15 +T: 60 +Tarin loss:0.00895 +Train acc: 98.128 +Test loss:0.00140 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 63 +dt: 15 +T: 60 +Tarin loss:0.00895 +Train acc: 98.043 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 95.13888888888889 +epoch: 64 +dt: 15 +T: 60 +Tarin loss:0.01044 +Train acc: 97.787 +Test loss:0.00143 +Test acc: 92.361 +beat acc: 95.13888888888889 +epoch: 65 +dt: 15 +T: 60 +Tarin loss:0.00881 +Train acc: 98.383 +Test loss:0.00155 +Test acc: 93.056 +beat acc: 95.13888888888889 +epoch: 66 +dt: 15 +T: 60 +Tarin loss:0.00936 +Train acc: 97.787 +Test loss:0.00150 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 67 +dt: 15 +T: 60 +Tarin loss:0.00860 +Train acc: 97.447 +Test loss:0.00136 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 68 +dt: 15 +T: 60 +Tarin loss:0.00800 +Train acc: 98.383 +Test loss:0.00131 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 69 +dt: 15 +T: 60 +Tarin loss:0.00779 +Train acc: 98.128 +Test loss:0.00134 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 70 +dt: 15 +T: 60 +Tarin loss:0.00753 +Train acc: 98.383 +Test loss:0.00140 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 71 +dt: 15 +T: 60 +Tarin loss:0.00773 +Train acc: 98.723 +Test loss:0.00127 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 72 +dt: 15 +T: 60 +Tarin loss:0.00752 +Train acc: 98.298 +Test loss:0.00128 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 73 +dt: 15 +T: 60 +Tarin loss:0.00766 +Train acc: 98.894 +Test loss:0.00133 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 74 +dt: 15 +T: 60 +Tarin loss:0.00656 +Train acc: 98.723 +Test loss:0.00153 +Test acc: 92.361 +beat acc: 95.13888888888889 +epoch: 75 +dt: 15 +T: 60 +Tarin loss:0.00708 +Train acc: 98.128 +Test loss:0.00126 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 76 +dt: 15 +T: 60 +Tarin loss:0.00593 +Train acc: 98.979 +Test loss:0.00130 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 77 +dt: 15 +T: 60 +Tarin loss:0.00703 +Train acc: 98.979 +Test loss:0.00131 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 78 +dt: 15 +T: 60 +Tarin loss:0.00639 +Train acc: 98.723 +Test loss:0.00132 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 79 +dt: 15 +T: 60 +Tarin loss:0.00642 +Train acc: 98.979 +Test loss:0.00124 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 80 +dt: 15 +T: 60 +Tarin loss:0.00679 +Train acc: 98.979 +Test loss:0.00128 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 81 +dt: 15 +T: 60 +Tarin loss:0.00680 +Train acc: 98.809 +Test loss:0.00122 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 82 +dt: 15 +T: 60 +Tarin loss:0.00631 +Train acc: 98.553 +Epoch 82: reducing learning rate of group 0 to 1.0000e-05. +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 83 +dt: 15 +T: 60 +Tarin loss:0.00623 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 84 +dt: 15 +T: 60 +Tarin loss:0.00515 +Train acc: 98.894 +Test loss:0.00121 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 85 +dt: 15 +T: 60 +Tarin loss:0.00605 +Train acc: 98.638 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 86 +dt: 15 +T: 60 +Tarin loss:0.00602 +Train acc: 98.894 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 87 +dt: 15 +T: 60 +Tarin loss:0.00545 +Train acc: 98.894 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 88 +dt: 15 +T: 60 +Tarin loss:0.00584 +Train acc: 99.064 +Test loss:0.00120 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 89 +dt: 15 +T: 60 +Tarin loss:0.00577 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 90 +dt: 15 +T: 60 +Tarin loss:0.00653 +Train acc: 98.468 +Epoch 90: reducing learning rate of group 0 to 1.0000e-06. +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 91 +dt: 15 +T: 60 +Tarin loss:0.00577 +Train acc: 98.809 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 92 +dt: 15 +T: 60 +Tarin loss:0.00600 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 93 +dt: 15 +T: 60 +Tarin loss:0.00586 +Train acc: 98.979 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 94 +dt: 15 +T: 60 +Tarin loss:0.00583 +Train acc: 99.064 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 95 +dt: 15 +T: 60 +Tarin loss:0.00526 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 96 +dt: 15 +T: 60 +Tarin loss:0.00635 +Train acc: 98.809 +Epoch 96: reducing learning rate of group 0 to 1.0000e-07. +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 97 +dt: 15 +T: 60 +Tarin loss:0.00505 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 98 +dt: 15 +T: 60 +Tarin loss:0.00581 +Train acc: 99.234 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 99 +dt: 15 +T: 60 +Tarin loss:0.00521 +Train acc: 99.234 +Test loss:0.00124 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 100 +dt: 15 +T: 60 +Tarin loss:0.00557 +Train acc: 99.319 +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 101 +dt: 15 +T: 60 +Tarin loss:0.00479 +Train acc: 99.064 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 102 +dt: 15 +T: 60 +Tarin loss:0.00625 +Train acc: 98.894 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 103 +dt: 15 +T: 60 +Tarin loss:0.00583 +Train acc: 98.979 +Test loss:0.00116 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 104 +dt: 15 +T: 60 +Tarin loss:0.00741 +Train acc: 98.894 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 105 +dt: 15 +T: 60 +Tarin loss:0.00510 +Train acc: 99.319 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 106 +dt: 15 +T: 60 +Tarin loss:0.00510 +Train acc: 99.149 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 107 +dt: 15 +T: 60 +Tarin loss:0.00479 +Train acc: 99.234 +Test loss:0.00114 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 108 +dt: 15 +T: 60 +Tarin loss:0.00721 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 109 +dt: 15 +T: 60 +Tarin loss:0.00602 +Train acc: 98.383 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 110 +dt: 15 +T: 60 +Tarin loss:0.00640 +Train acc: 98.809 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 111 +dt: 15 +T: 60 +Tarin loss:0.00650 +Train acc: 98.298 +Test loss:0.00119 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 112 +dt: 15 +T: 60 +Tarin loss:0.00573 +Train acc: 99.234 +Test loss:0.00116 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 113 +dt: 15 +T: 60 +Tarin loss:0.00564 +Train acc: 99.234 +Epoch 113: reducing learning rate of group 0 to 1.0000e-08. +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 114 +dt: 15 +T: 60 +Tarin loss:0.00512 +Train acc: 99.234 +Test loss:0.00121 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 115 +dt: 15 +T: 60 +Tarin loss:0.00567 +Train acc: 99.149 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 116 +dt: 15 +T: 60 +Tarin loss:0.00498 +Train acc: 98.809 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 117 +dt: 15 +T: 60 +Tarin loss:0.00521 +Train acc: 99.404 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 118 +dt: 15 +T: 60 +Tarin loss:0.00640 +Train acc: 98.979 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 119 +dt: 15 +T: 60 +Tarin loss:0.00661 +Train acc: 98.723 +Test loss:0.00122 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 120 +dt: 15 +T: 60 +Tarin loss:0.00530 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 121 +dt: 15 +T: 60 +Tarin loss:0.00655 +Train acc: 98.638 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 122 +dt: 15 +T: 60 +Tarin loss:0.00478 +Train acc: 99.234 +Test loss:0.00121 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 123 +dt: 15 +T: 60 +Tarin loss:0.00665 +Train acc: 98.638 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 124 +dt: 15 +T: 60 +Tarin loss:0.00510 +Train acc: 99.319 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 125 +dt: 15 +T: 60 +Tarin loss:0.00533 +Train acc: 98.809 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 126 +dt: 15 +T: 60 +Tarin loss:0.00507 +Train acc: 99.319 +Test loss:0.00124 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 127 +dt: 15 +T: 60 +Tarin loss:0.00525 +Train acc: 99.149 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 128 +dt: 15 +T: 60 +Tarin loss:0.00662 +Train acc: 99.064 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 129 +dt: 15 +T: 60 +Tarin loss:0.00557 +Train acc: 99.064 +Test loss:0.00123 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 130 +dt: 15 +T: 60 +Tarin loss:0.00563 +Train acc: 99.234 +Test loss:0.00117 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 131 +dt: 15 +T: 60 +Tarin loss:0.00545 +Train acc: 98.979 +Test loss:0.00124 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 132 +dt: 15 +T: 60 +Tarin loss:0.00494 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 133 +dt: 15 +T: 60 +Tarin loss:0.00469 +Train acc: 98.809 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 134 +dt: 15 +T: 60 +Tarin loss:0.00543 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 135 +dt: 15 +T: 60 +Tarin loss:0.00614 +Train acc: 98.809 +Test loss:0.00119 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 136 +dt: 15 +T: 60 +Tarin loss:0.00588 +Train acc: 98.553 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 137 +dt: 15 +T: 60 +Tarin loss:0.00637 +Train acc: 99.404 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 138 +dt: 15 +T: 60 +Tarin loss:0.00677 +Train acc: 99.234 +Test loss:0.00125 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 139 +dt: 15 +T: 60 +Tarin loss:0.00602 +Train acc: 99.234 +Test loss:0.00126 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 140 +dt: 15 +T: 60 +Tarin loss:0.00535 +Train acc: 99.234 +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 141 +dt: 15 +T: 60 +Tarin loss:0.00578 +Train acc: 98.979 +Test loss:0.00122 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 142 +dt: 15 +T: 60 +Tarin loss:0.00565 +Train acc: 98.979 +Test loss:0.00127 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 143 +dt: 15 +T: 60 +Tarin loss:0.00580 +Train acc: 98.468 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 144 +dt: 15 +T: 60 +Tarin loss:0.00471 +Train acc: 99.149 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 145 +dt: 15 +T: 60 +Tarin loss:0.00622 +Train acc: 99.064 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 146 +dt: 15 +T: 60 +Tarin loss:0.00511 +Train acc: 99.064 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 147 +dt: 15 +T: 60 +Tarin loss:0.00645 +Train acc: 98.638 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 148 +dt: 15 +T: 60 +Tarin loss:0.00493 +Train acc: 98.809 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 149 +dt: 15 +T: 60 +Tarin loss:0.00631 +Train acc: 99.234 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 150 +dt: 15 +T: 60 +Tarin loss:0.00624 +Train acc: 98.553 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 151 +dt: 15 +T: 60 +Tarin loss:0.00623 +Train acc: 98.723 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 152 +dt: 15 +T: 60 +Tarin loss:0.00611 +Train acc: 98.468 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 153 +dt: 15 +T: 60 +Tarin loss:0.00528 +Train acc: 99.064 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 154 +dt: 15 +T: 60 +Tarin loss:0.00595 +Train acc: 98.809 +Test loss:0.00120 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 155 +dt: 15 +T: 60 +Tarin loss:0.00675 +Train acc: 98.723 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 156 +dt: 15 +T: 60 +Tarin loss:0.00571 +Train acc: 98.723 +Test loss:0.00121 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 157 +dt: 15 +T: 60 +Tarin loss:0.00624 +Train acc: 98.809 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 158 +dt: 15 +T: 60 +Tarin loss:0.00512 +Train acc: 98.809 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 159 +dt: 15 +T: 60 +Tarin loss:0.00570 +Train acc: 99.064 +Test loss:0.00117 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 160 +dt: 15 +T: 60 +Tarin loss:0.00497 +Train acc: 99.149 +Test loss:0.00117 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 161 +dt: 15 +T: 60 +Tarin loss:0.00591 +Train acc: 99.404 +Test loss:0.00120 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 162 +dt: 15 +T: 60 +Tarin loss:0.00512 +Train acc: 98.468 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 163 +dt: 15 +T: 60 +Tarin loss:0.00556 +Train acc: 99.234 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 164 +dt: 15 +T: 60 +Tarin loss:0.00599 +Train acc: 99.064 +Test loss:0.00126 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 165 +dt: 15 +T: 60 +Tarin loss:0.00437 +Train acc: 98.979 +Test loss:0.00122 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 166 +dt: 15 +T: 60 +Tarin loss:0.00553 +Train acc: 99.234 +Test loss:0.00125 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 167 +dt: 15 +T: 60 +Tarin loss:0.00486 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 168 +dt: 15 +T: 60 +Tarin loss:0.00558 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 169 +dt: 15 +T: 60 +Tarin loss:0.00592 +Train acc: 98.894 +Test loss:0.00120 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 170 +dt: 15 +T: 60 +Tarin loss:0.00538 +Train acc: 98.979 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 171 +dt: 15 +T: 60 +Tarin loss:0.00687 +Train acc: 98.809 +Test loss:0.00117 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 172 +dt: 15 +T: 60 +Tarin loss:0.00557 +Train acc: 99.234 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 173 +dt: 15 +T: 60 +Tarin loss:0.00580 +Train acc: 99.319 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 174 +dt: 15 +T: 60 +Tarin loss:0.00544 +Train acc: 99.149 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 175 +dt: 15 +T: 60 +Tarin loss:0.00610 +Train acc: 98.979 +Test loss:0.00117 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 176 +dt: 15 +T: 60 +Tarin loss:0.00529 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 177 +dt: 15 +T: 60 +Tarin loss:0.00599 +Train acc: 98.979 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 178 +dt: 15 +T: 60 +Tarin loss:0.00667 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 179 +dt: 15 +T: 60 +Tarin loss:0.00475 +Train acc: 99.489 +Test loss:0.00118 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 180 +dt: 15 +T: 60 +Tarin loss:0.00505 +Train acc: 98.979 +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 181 +dt: 15 +T: 60 +Tarin loss:0.00522 +Train acc: 98.979 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 182 +dt: 15 +T: 60 +Tarin loss:0.00552 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 183 +dt: 15 +T: 60 +Tarin loss:0.00695 +Train acc: 98.553 +Test loss:0.00117 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 184 +dt: 15 +T: 60 +Tarin loss:0.00558 +Train acc: 99.489 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 185 +dt: 15 +T: 60 +Tarin loss:0.00712 +Train acc: 98.638 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 186 +dt: 15 +T: 60 +Tarin loss:0.00626 +Train acc: 99.149 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 187 +dt: 15 +T: 60 +Tarin loss:0.00504 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 188 +dt: 15 +T: 60 +Tarin loss:0.00650 +Train acc: 98.723 +Test loss:0.00119 +Test acc: 93.403 +beat acc: 95.13888888888889 +epoch: 189 +dt: 15 +T: 60 +Tarin loss:0.00540 +Train acc: 98.809 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 190 +dt: 15 +T: 60 +Tarin loss:0.00594 +Train acc: 98.723 +Test loss:0.00117 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 191 +dt: 15 +T: 60 +Tarin loss:0.00488 +Train acc: 99.574 +Test loss:0.00120 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 192 +dt: 15 +T: 60 +Tarin loss:0.00533 +Train acc: 99.404 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 193 +dt: 15 +T: 60 +Tarin loss:0.00617 +Train acc: 98.979 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 194 +dt: 15 +T: 60 +Tarin loss:0.00533 +Train acc: 99.234 +Test loss:0.00123 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 195 +dt: 15 +T: 60 +Tarin loss:0.00569 +Train acc: 98.894 +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 196 +dt: 15 +T: 60 +Tarin loss:0.00590 +Train acc: 99.404 +Test loss:0.00117 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 197 +dt: 15 +T: 60 +Tarin loss:0.00565 +Train acc: 98.894 +Test loss:0.00117 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 198 +dt: 15 +T: 60 +Tarin loss:0.00608 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 199 +dt: 15 +T: 60 +Tarin loss:0.00543 +Train acc: 99.064 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 200 +dt: 15 +T: 60 +Tarin loss:0.00570 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 95.139 +beat acc: 95.13888888888889 +epoch: 201 +dt: 15 +T: 60 +Tarin loss:0.00569 +Train acc: 98.979 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 202 +dt: 15 +T: 60 +Tarin loss:0.00491 +Train acc: 99.234 +Test loss:0.00124 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 203 +dt: 15 +T: 60 +Tarin loss:0.00670 +Train acc: 99.064 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 204 +dt: 15 +T: 60 +Tarin loss:0.00708 +Train acc: 98.894 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 205 +dt: 15 +T: 60 +Tarin loss:0.00499 +Train acc: 99.149 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 206 +dt: 15 +T: 60 +Tarin loss:0.00532 +Train acc: 99.149 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 207 +dt: 15 +T: 60 +Tarin loss:0.00621 +Train acc: 98.979 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 208 +dt: 15 +T: 60 +Tarin loss:0.00541 +Train acc: 99.234 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 209 +dt: 15 +T: 60 +Tarin loss:0.00534 +Train acc: 99.319 +Test loss:0.00123 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 210 +dt: 15 +T: 60 +Tarin loss:0.00550 +Train acc: 98.894 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 211 +dt: 15 +T: 60 +Tarin loss:0.00570 +Train acc: 98.894 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 212 +dt: 15 +T: 60 +Tarin loss:0.00565 +Train acc: 99.489 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 213 +dt: 15 +T: 60 +Tarin loss:0.00545 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 214 +dt: 15 +T: 60 +Tarin loss:0.00595 +Train acc: 98.638 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 215 +dt: 15 +T: 60 +Tarin loss:0.00537 +Train acc: 98.638 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 216 +dt: 15 +T: 60 +Tarin loss:0.00616 +Train acc: 98.979 +Test loss:0.00116 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 217 +dt: 15 +T: 60 +Tarin loss:0.00573 +Train acc: 99.064 +Test loss:0.00113 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 218 +dt: 15 +T: 60 +Tarin loss:0.00509 +Train acc: 98.979 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 219 +dt: 15 +T: 60 +Tarin loss:0.00541 +Train acc: 99.149 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 220 +dt: 15 +T: 60 +Tarin loss:0.00441 +Train acc: 99.319 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 221 +dt: 15 +T: 60 +Tarin loss:0.00665 +Train acc: 98.298 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 222 +dt: 15 +T: 60 +Tarin loss:0.00604 +Train acc: 98.809 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 223 +dt: 15 +T: 60 +Tarin loss:0.00622 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 224 +dt: 15 +T: 60 +Tarin loss:0.00600 +Train acc: 98.979 +Test loss:0.00116 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 225 +dt: 15 +T: 60 +Tarin loss:0.00546 +Train acc: 98.809 +Test loss:0.00115 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 226 +dt: 15 +T: 60 +Tarin loss:0.00533 +Train acc: 99.064 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 227 +dt: 15 +T: 60 +Tarin loss:0.00524 +Train acc: 99.149 +Test loss:0.00121 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 228 +dt: 15 +T: 60 +Tarin loss:0.00535 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 229 +dt: 15 +T: 60 +Tarin loss:0.00614 +Train acc: 98.638 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 230 +dt: 15 +T: 60 +Tarin loss:0.00566 +Train acc: 99.234 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 231 +dt: 15 +T: 60 +Tarin loss:0.00680 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 232 +dt: 15 +T: 60 +Tarin loss:0.00504 +Train acc: 99.404 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 233 +dt: 15 +T: 60 +Tarin loss:0.00623 +Train acc: 98.809 +Test loss:0.00118 +Test acc: 94.792 +beat acc: 95.13888888888889 +epoch: 234 +dt: 15 +T: 60 +Tarin loss:0.00560 +Train acc: 98.468 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 235 +dt: 15 +T: 60 +Tarin loss:0.00608 +Train acc: 98.468 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 236 +dt: 15 +T: 60 +Tarin loss:0.00497 +Train acc: 99.149 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 237 +dt: 15 +T: 60 +Tarin loss:0.00532 +Train acc: 98.979 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.13888888888889 +epoch: 238 +dt: 15 +T: 60 +Tarin loss:0.00604 +Train acc: 98.979 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.13888888888889 +epoch: 239 +dt: 15 +T: 60 +Tarin loss:0.00605 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 93.750 +beat acc: 95.13888888888889 +epoch: 240 +dt: 15 +T: 60 +Tarin loss:0.00624 +Train acc: 99.149 +Test loss:0.00123 +Test acc: 95.486 +Saving.. +beat acc: 95.48611111111111 +epoch: 241 +dt: 15 +T: 60 +Tarin loss:0.00542 +Train acc: 98.979 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 242 +dt: 15 +T: 60 +Tarin loss:0.00479 +Train acc: 98.894 +Test loss:0.00115 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 243 +dt: 15 +T: 60 +Tarin loss:0.00563 +Train acc: 99.489 +Test loss:0.00117 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 244 +dt: 15 +T: 60 +Tarin loss:0.00589 +Train acc: 98.979 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 245 +dt: 15 +T: 60 +Tarin loss:0.00602 +Train acc: 99.149 +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 246 +dt: 15 +T: 60 +Tarin loss:0.00511 +Train acc: 99.149 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 247 +dt: 15 +T: 60 +Tarin loss:0.00546 +Train acc: 99.064 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 248 +dt: 15 +T: 60 +Tarin loss:0.00585 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 249 +dt: 15 +T: 60 +Tarin loss:0.00549 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 250 +dt: 15 +T: 60 +Tarin loss:0.00653 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 251 +dt: 15 +T: 60 +Tarin loss:0.00716 +Train acc: 97.872 +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 252 +dt: 15 +T: 60 +Tarin loss:0.00499 +Train acc: 98.894 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 253 +dt: 15 +T: 60 +Tarin loss:0.00555 +Train acc: 99.404 +Test loss:0.00119 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 254 +dt: 15 +T: 60 +Tarin loss:0.00517 +Train acc: 98.894 +Test loss:0.00119 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 255 +dt: 15 +T: 60 +Tarin loss:0.00509 +Train acc: 99.149 +Test loss:0.00119 +Test acc: 93.403 +beat acc: 95.48611111111111 +epoch: 256 +dt: 15 +T: 60 +Tarin loss:0.00539 +Train acc: 99.064 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 257 +dt: 15 +T: 60 +Tarin loss:0.00496 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 258 +dt: 15 +T: 60 +Tarin loss:0.00546 +Train acc: 99.064 +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 259 +dt: 15 +T: 60 +Tarin loss:0.00586 +Train acc: 98.979 +Test loss:0.00115 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 260 +dt: 15 +T: 60 +Tarin loss:0.00474 +Train acc: 98.553 +Test loss:0.00117 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 261 +dt: 15 +T: 60 +Tarin loss:0.00554 +Train acc: 98.894 +Test loss:0.00117 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 262 +dt: 15 +T: 60 +Tarin loss:0.00495 +Train acc: 98.979 +Test loss:0.00121 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 263 +dt: 15 +T: 60 +Tarin loss:0.00542 +Train acc: 98.468 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 264 +dt: 15 +T: 60 +Tarin loss:0.00553 +Train acc: 99.234 +Test loss:0.00118 +Test acc: 95.139 +beat acc: 95.48611111111111 +epoch: 265 +dt: 15 +T: 60 +Tarin loss:0.00585 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 93.403 +beat acc: 95.48611111111111 +epoch: 266 +dt: 15 +T: 60 +Tarin loss:0.00648 +Train acc: 98.979 +Test loss:0.00124 +Test acc: 93.403 +beat acc: 95.48611111111111 +epoch: 267 +dt: 15 +T: 60 +Tarin loss:0.00574 +Train acc: 98.894 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 268 +dt: 15 +T: 60 +Tarin loss:0.00601 +Train acc: 98.553 +Test loss:0.00122 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 269 +dt: 15 +T: 60 +Tarin loss:0.00527 +Train acc: 98.638 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 270 +dt: 15 +T: 60 +Tarin loss:0.00589 +Train acc: 98.468 +Test loss:0.00117 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 271 +dt: 15 +T: 60 +Tarin loss:0.00510 +Train acc: 99.149 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 272 +dt: 15 +T: 60 +Tarin loss:0.00599 +Train acc: 98.723 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 273 +dt: 15 +T: 60 +Tarin loss:0.00529 +Train acc: 99.064 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 274 +dt: 15 +T: 60 +Tarin loss:0.00554 +Train acc: 99.064 +Test loss:0.00117 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 275 +dt: 15 +T: 60 +Tarin loss:0.00577 +Train acc: 98.723 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 276 +dt: 15 +T: 60 +Tarin loss:0.00550 +Train acc: 98.553 +Test loss:0.00119 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 277 +dt: 15 +T: 60 +Tarin loss:0.00577 +Train acc: 98.638 +Test loss:0.00122 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 278 +dt: 15 +T: 60 +Tarin loss:0.00570 +Train acc: 98.723 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 279 +dt: 15 +T: 60 +Tarin loss:0.00646 +Train acc: 98.723 +Test loss:0.00118 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 280 +dt: 15 +T: 60 +Tarin loss:0.00476 +Train acc: 98.979 +Test loss:0.00118 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 281 +dt: 15 +T: 60 +Tarin loss:0.00534 +Train acc: 98.894 +Test loss:0.00116 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 282 +dt: 15 +T: 60 +Tarin loss:0.00536 +Train acc: 98.298 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 283 +dt: 15 +T: 60 +Tarin loss:0.00706 +Train acc: 98.298 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 284 +dt: 15 +T: 60 +Tarin loss:0.00556 +Train acc: 99.319 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 285 +dt: 15 +T: 60 +Tarin loss:0.00605 +Train acc: 98.638 +Test loss:0.00120 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 286 +dt: 15 +T: 60 +Tarin loss:0.00492 +Train acc: 99.404 +Test loss:0.00118 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 287 +dt: 15 +T: 60 +Tarin loss:0.00547 +Train acc: 98.979 +Test loss:0.00120 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 288 +dt: 15 +T: 60 +Tarin loss:0.00587 +Train acc: 98.894 +Test loss:0.00118 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 289 +dt: 15 +T: 60 +Tarin loss:0.00612 +Train acc: 98.638 +Test loss:0.00122 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 290 +dt: 15 +T: 60 +Tarin loss:0.00566 +Train acc: 99.064 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 291 +dt: 15 +T: 60 +Tarin loss:0.00596 +Train acc: 98.553 +Test loss:0.00123 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 292 +dt: 15 +T: 60 +Tarin loss:0.00636 +Train acc: 98.809 +Test loss:0.00119 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 293 +dt: 15 +T: 60 +Tarin loss:0.00581 +Train acc: 98.723 +Test loss:0.00120 +Test acc: 93.750 +beat acc: 95.48611111111111 +epoch: 294 +dt: 15 +T: 60 +Tarin loss:0.00625 +Train acc: 98.809 +Test loss:0.00124 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 295 +dt: 15 +T: 60 +Tarin loss:0.00531 +Train acc: 99.234 +Test loss:0.00120 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 296 +dt: 15 +T: 60 +Tarin loss:0.00487 +Train acc: 98.723 +Test loss:0.00117 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 297 +dt: 15 +T: 60 +Tarin loss:0.00497 +Train acc: 98.979 +Test loss:0.00121 +Test acc: 94.097 +beat acc: 95.48611111111111 +epoch: 298 +dt: 15 +T: 60 +Tarin loss:0.00499 +Train acc: 98.723 +Test loss:0.00118 +Test acc: 94.792 +beat acc: 95.48611111111111 +epoch: 299 +dt: 15 +T: 60 +Tarin loss:0.00517 +Train acc: 98.894 +Test loss:0.00121 +Test acc: 94.444 +beat acc: 95.48611111111111 +epoch: 300 +dt: 15 +T: 60 +Tarin loss:0.00495 +Train acc: 99.574 +Test loss:0.00116 +Test acc: 94.792 +beat acc: 95.48611111111111 +best acc: 95.48611111111111 best_epoch: 240 diff --git a/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv b/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv new file mode 100644 index 0000000..68291d8 --- /dev/null +++ b/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv @@ -0,0 +1,5 @@ +Epochs,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,202 +Train_Loss,0.0817619264125824,0.07938119769096375,0.07831129655241967,0.07017166838049889,0.06249977350234985,0.060484439879655835,0.05889853686094284,0.055950015783309937,0.053983813151717186,0.05237787961959839,0.049748575687408446,0.047577469423413274,0.04662046395242214,0.04343795664608478,0.041768360510468484,0.04096023924648762,0.03829753212630749,0.03620485179126263,0.034456591680645944,0.033241191133856776,0.031038175150752068,0.03071222174912691,0.028931938298046588,0.026993177458643915,0.02679173294454813,0.025645730644464494,0.025239812582731246,0.02489432655274868,0.023078807815909385,0.022126049362123014,0.020677856262773277,0.020333175919950007,0.019315582886338233,0.018679632525891065,0.017507948447018862,0.018308005575090647,0.018501173984259368,0.016968384757637976,0.01614844463765621,0.017265631444752218,0.01642801398411393,0.015805695578455924,0.01536155641078949,0.01440116399899125,0.013001086749136448,0.014950401708483696,0.013904918730258942,0.011753417924046517,0.013864753860980272,0.012490108143538237,0.013081566989421844,0.012586647644639015,0.01222921097651124,0.012242811266332865,0.01123660709708929,0.011028117360547185,0.01084657502360642,0.010516327433288097,0.010043293889611959,0.011645493609830738,0.010018661292269826,0.010328067000955343,0.009240597067400814,0.01038159434683621,0.010923519218340515,0.0107917754445225,0.010864055994898082,0.01124898954294622,0.011179570481181145,0.011649037757888437,0.010618013236671687,0.010153891891241074,0.010147759644314646,0.00960706458427012,0.010675376281142235,0.010581417987123132,0.010460918582975865,0.01036696434020996,0.010912931803613902,0.011186765134334564,0.012216046079993248,0.010516824712976814,0.010838363086804748,0.010670251119881869,0.010103181889280676,0.010219548549503087,0.01096191257238388,0.011654696334153414,0.010071672219783067,0.01032913695089519,0.010703323921188713,0.010665056481957435,0.011407371703535318,0.010298742260783911,0.010503018135204911,0.010915305418893694,0.01045796349644661,0.01009654370136559,0.010541452979668974,0.01066613565199077,0.011059600906446577,0.010609146859496831,0.010977576859295369,0.009685425646603107,0.010520964488387108,0.009993886621668936,0.009484911570325494,0.009782159142196179,0.010695233521983028,0.010946442605927587,0.011066832300275563,0.009960408927872777,0.011505325231701136,0.01059479545801878,0.011524707172065973,0.010983599349856377,0.010983864730224013,0.00987563282251358,0.010343271400779485,0.010526555264368654,0.01028744806535542,0.011152717843651771,0.009640682954341173,0.009671192057430745,0.011071579996496439,0.011294413730502129,0.011649378342553973,0.01117040435783565,0.010296715423464775,0.010713181830942631,0.010430572368204594,0.00995368747971952,0.010402054665610195,0.011045056348666548,0.010155918030068278,0.010536802001297474,0.010714042512699962,0.011012600734829903,0.011027538357302547,0.009127772971987724,0.01078446232713759,0.011069206055253744,0.010251909447833895,0.009858740912750363,0.010910129314288497,0.010505888937041163,0.010024758335202933,0.010975938010960817,0.010708058532327414,0.010054966807365418,0.008868662780150771,0.00959297250956297,0.01093264613300562,0.010890778852626681,0.00983351948671043,0.01080886209383607,0.011117894435301423,0.01088467836380005,0.010520372353494168,0.010031775617972017,0.010389338200911879,0.011332032922655343,0.010124812182039023,0.011209301697090269,0.010125213209539652,0.009539801720529794,0.00938011882826686,0.009613866871222853,0.010463785892352463,0.010622650384902954,0.011037223506718874,0.010886391904205085,0.011127966269850732,0.010928525775671005,0.01036774911917746,0.009600057872012258,0.0110278713516891,0.010874422686174511,0.011067397845908999,0.010390035575255752,0.009057997353374958,0.009673946211114526,0.0117403166834265,0.009692162601277232,0.009103306848555804,0.010799702862277627,0.011835355078801513,0.009770618611946702,0.011421925015747547,0.00971734719350934,0.010670995805412531,0.01101247975602746,0.010465228138491512,0.009854565979912877,0.01042121797800064,0.01040297425352037,0.01003807047381997,0.01105805542320013,0.009845261741429568,0.010469883913174272,0.010648100497201085,0.011163349077105523,0.010434907721355557,0.010704637924209236,0.009956790087744593,0.00992733808234334,0.010336466692388057,0.010637106234207749,0.009940990805625915,0.010763395065441727,0.009548657527193426,0.010781139275059104,0.011316077830269933,0.009611328039318323,0.010827110288664699,0.010530436364933848,0.009764801245182753,0.011113997455686331,0.010532335052266717,0.010932664899155498,0.009234645729884505,0.010528042865917086,0.009725281596183777,0.009784528939053416,0.011456403462216259,0.010947908135131001,0.009565776120871306,0.010839853901416063,0.01169079397805035,0.010039347782731056,0.009903800953179599,0.010228500654920935,0.010502798575907945,0.010181338619440794,0.01067491713911295,0.009104666579514743,0.010471442388370633,0.010271529713645578,0.010363764734938741,0.010790697438642383,0.010986818140372634,0.010934829618781805,0.011147677572444082,0.010842151055112482,0.010246547544375062,0.010527616459876299,0.011124465707689523,0.01076837587170303,0.011540586035698653,0.010298843355849385,0.009890676941722631,0.009885478252544998,0.012104177149012685,0.01050978284329176,0.010864971159026026,0.010445602098479867,0.011459358129650354,0.009771482273936271,0.010026798862963916,0.010515522072091698,0.011892051016911865,0.010210667503997684,0.011240761121734976,0.010479641938582062,0.009845658484846354,0.009105926658958197,0.011104554077610374,0.010731598967686296,0.010272807069122791,0.010598418582230807,0.01015697973780334,0.01036317222751677,0.010398245230317116,0.011385544762015342,0.009386884234845638,0.01084754285402596,0.009731813613325358,0.009519388666376472,0.010567268263548613,0.009744997136294842,0.010379956196993589,0.011155618820339441,0.010711038857698441,0.010265558073297144,0.009898832766339183,0.009929832397028803,0.011330119706690312,0.012091563548892736,0.011639951169490815,0.009930332424119115,0.011635370505973696,0.010108341975137592,0.010914292326197029,0.009798888769000768,0.009848354337736964,0.01015521101653576,0.010379026038572192,0.01142339832149446,0.009474906651303173,0.009559086803346873, +Test_Loss,0.009090908616781234,0.009090908616781234,0.009090908616781234,0.009034227165910931,0.0073013342916965485,0.006174597226911121,0.005828766773144404,0.005636400274104542,0.00536306363840898,0.005270221953590711,0.0049896445125341415,0.004923051388727295,0.004745404173930486,0.004658791836765077,0.004382358077499602,0.004233348245422045,0.004149730586343341,0.004035472662912476,0.003889282378885481,0.003978797379467223,0.00360002089291811,0.003526925419767698,0.0034087505398525137,0.003254143728150262,0.003141672722995281,0.0030409835278987886,0.003017749471796884,0.002995151943630642,0.002896988060739305,0.002752673232720958,0.002646527604924308,0.002527768806450897,0.0025777548965480596,0.002484423936241203,0.0023881399797068704,0.0024332568877273135,0.0023674547879232303,0.0022991782882147366,0.002307366952300072,0.0024683763273060325,0.0022440550020999383,0.002139253510783116,0.0020939042170842487,0.0021601789423988924,0.002135225416471561,0.0019531090329918596,0.0019338689951433074,0.0019088575616478922,0.0019417770620849396,0.0019131531421509053,0.002008420932624075,0.0018127339788609079,0.0018605480591456095,0.001832801890042093,0.0017909861376716031,0.001730117005192571,0.0017366800871160297,0.0017415368205143346,0.0017291919845673772,0.0016901154898934893,0.0016736189110411537,0.0016732532220582168,0.0017343661333951686,0.001749269426282909,0.0017204158422019745,0.0017370317027800614,0.0017807930707931519,0.0016954193719559245,0.0017038167247341736,0.0016861521949370702,0.001669583728330003,0.001743863864491383,0.0017651855324705443,0.001718501757002539,0.001697590346965525,0.0016775847826566964,0.0017026005623241266,0.0017336550168693063,0.0017106954008340837,0.0016938480962481765,0.0017134573931495347,0.0016648138459357954,0.0016868159381879702,0.001681923307478428,0.0017408037765158546,0.0017135170288383958,0.0016986381572981675,0.0017135161596039932,0.001713793228069941,0.001735700625512335,0.001715399573246638,0.0017010643871294127,0.0017108759946293302,0.0016597159724268647,0.0016807624449332555,0.0017107348268230757,0.0017134302088783844,0.0017070960253477096,0.0016708595160808827,0.0016646095344589817,0.0016795435920357704,0.0017890667439334925,0.001805167045030329,0.00174585429744588,0.0017016535935302573,0.0017139001749455927,0.001665026011566321,0.0016834620593322647,0.0016870737696687381,0.0017236775615149076,0.0017075835106273491,0.001714457788815101,0.0017187218078308635,0.0016955868341028688,0.001743056325034963,0.0017790201327039135,0.0017190576117071842,0.0017439918903013071,0.0017131618638005523,0.0016801801924076346,0.0016993949086301858,0.0016885363289879428,0.0016775365815394454,0.0016590513806376192,0.0016655223340623908,0.0017162780794832443,0.0017109812651243475,0.0017561402482291063,0.0017236557478706041,0.0016478411853313446,0.001696523299647702,0.0017078676364488071,0.0016884793216983478,0.0016810385613805717,0.00167237115609977,0.0017222755795551674,0.001698482140070862,0.0016626717419260077,0.0016373280642761126,0.0016610101796686652,0.0016651786035961574,0.001676116128348642,0.001705987710091803,0.0016814498127334647,0.0016906713963382772,0.0017302011967533163,0.0017136423641608823,0.0017137081672747932,0.0016775716199643083,0.0017175003782742553,0.0017442409156097306,0.001727180555462837,0.0016685280741916763,0.0016900313190288012,0.0017514246826370555,0.001706603252225452,0.0017372710319856805,0.0017332955573995906,0.0017350167139536805,0.0016714663244783879,0.0017820925969216559,0.0017492554667923187,0.0017253120429813861,0.0016878927747408549,0.0017116388823423122,0.0017580807312495175,0.001735572630746497,0.0016913246466881702,0.0016796777438786293,0.001678234493980805,0.0017163947017656432,0.0017269666203194192,0.0017054151640170149,0.0017049556701547567,0.0016830579274230535,0.0016991345004902945,0.0016765772985915346,0.0017748376975456877,0.0017681422436402903,0.0017189831162492434,0.0017008188801507153,0.0016747491227255926,0.0016718424753182463,0.0016618273738357755,0.0016803178004920485,0.0017276320399509536,0.0016600447499917617,0.0016844529141154551,0.0017078123676280178,0.001681041241519981,0.0017057255427870488,0.001679410371515486,0.0017162307579484252,0.0017173478793766764,0.0016800117885900868,0.0016814234252605172,0.0016889563037289512,0.0016942970144251984,0.0016830543987452983,0.001754877633518643,0.0017649278562102053,0.0016866388627224498,0.0016392789781093594,0.0016759863847659693,0.0017366730918486914,0.0017774287197324964,0.001714754187398487,0.0017338891617125934,0.001710767319632901,0.001696107629686594,0.0017074739560484886,0.001724518525103728,0.0016892526816162795,0.001744039222184155,0.0017143877119653753,0.0017134670271641678,0.0017143271656499966,0.0016719686695271067,0.0016812630515131684,0.0017353718686434958,0.0017291867070727874,0.0017791156553559834,0.0017382513110836347,0.001704692685355743,0.0017232725396752358,0.0017402057846387228,0.001731425244361162,0.0016955430205497475,0.0017041147996981936,0.0016955097102456622,0.0017346703861322668,0.0017192584669424426,0.001778484373870823,0.0017123429311646356,0.001718696424116691,0.0017580745741724965,0.001716826990660694,0.0017007978322605291,0.001714719117929538,0.0017382355613840951,0.0017346949213080934,0.0016920198686420918,0.001700401513112916,0.0016973238748808702,0.0017014054581522943,0.0016559105469948717,0.0017001059734159045,0.001657192481474744,0.0016611242045958838,0.001691134367138147,0.001709203039192491,0.0017829062417149544,0.0017371438961062166,0.0017306755917767683,0.0017117309073607127,0.0017578158734573256,0.0016981059478388893,0.001685102687527736,0.0017216942687001491,0.0017567373915678924,0.0017273804276353782,0.0016927862643367718,0.0017267658271723325,0.0016972264274954798,0.0016427248923314942,0.0017063928768038749,0.0017071626356078519,0.0017514615319669247,0.0016892895412941773,0.001709040854540136,0.001668962970789936,0.001662915562176042,0.0016748000143302815,0.00166466124355793,0.0016671812575724389,0.001693906014164289,0.001689182542678383,0.001714855898171663,0.0017302275117900634,0.0017080359988742406,0.001708313977966706,0.0017263151394824186,0.0017655398696660995,0.0017026058191226589,0.0017092679109838274,0.001734745761172639,0.0016981585779123837,0.001703960531287723,0.001728835143148899,0.0016998578690820268,0.001716803376459413,0.0016540341596636505,0.0016763563060926065,0.001660953193075127,0.0016843713406059478,0.0017131890687677594,0.0016825905276669397,0.001700874956117736,0.0016706849655343428,0.0017081429664459495, +Train_Accuracy,26.72340425531915,44.0,47.744680851063826,48.340425531914896,58.12765957446808,60.51063829787234,62.212765957446805,67.14893617021276,67.82978723404256,71.06382978723404,72.76595744680851,75.48936170212765,78.38297872340425,81.70212765957447,81.7872340425532,83.57446808510639,84.93617021276596,86.04255319148936,87.14893617021276,87.06382978723404,88.93617021276596,89.95744680851064,88.85106382978724,91.23404255319149,90.38297872340425,91.57446808510639,92.93617021276596,91.31914893617021,92.08510638297872,92.93617021276596,92.76595744680851,93.27659574468085,94.55319148936171,93.53191489361703,95.40425531914893,94.04255319148936,94.38297872340425,94.8936170212766,95.23404255319149,95.23404255319149,95.82978723404256,95.91489361702128,95.82978723404256,95.57446808510639,97.1063829787234,96.42553191489361,95.91489361702128,97.7872340425532,96.0,97.19148936170212,96.51063829787235,97.27659574468085,96.93617021276596,96.85106382978724,97.95744680851064,96.93617021276596,96.68085106382979,97.70212765957447,98.29787234042553,97.02127659574468,97.70212765957447,98.46808510638297,98.38297872340425,98.29787234042553,97.95744680851064,97.53191489361703,98.12765957446808,97.36170212765957,97.1063829787234,97.70212765957447,97.44680851063829,97.95744680851064,97.44680851063829,98.29787234042553,98.04255319148936,97.95744680851064,97.95744680851064,97.53191489361703,97.61702127659575,97.61702127659575,98.29787234042553,97.53191489361703,97.44680851063829,98.12765957446808,98.04255319148936,97.61702127659575,97.53191489361703,97.36170212765957,98.12765957446808,97.36170212765957,98.04255319148936,97.27659574468085,97.44680851063829,97.27659574468085,97.27659574468085,96.85106382978724,97.44680851063829,97.27659574468085,97.61702127659575,97.44680851063829,97.95744680851064,97.36170212765957,97.95744680851064,97.53191489361703,98.29787234042553,97.61702127659575,98.29787234042553,97.61702127659575,97.1063829787234,97.61702127659575,97.87234042553192,97.44680851063829,97.61702127659575,97.53191489361703,97.87234042553192,97.19148936170212,97.27659574468085,98.2127659574468,97.53191489361703,98.12765957446808,97.19148936170212,97.02127659574468,97.70212765957447,97.95744680851064,97.87234042553192,98.04255319148936,97.44680851063829,97.7872340425532,97.87234042553192,97.53191489361703,97.95744680851064,97.7872340425532,97.19148936170212,97.19148936170212,97.1063829787234,97.70212765957447,97.44680851063829,97.7872340425532,98.29787234042553,98.29787234042553,96.85106382978724,97.27659574468085,97.61702127659575,97.70212765957447,97.61702127659575,97.87234042553192,97.95744680851064,97.27659574468085,98.2127659574468,97.27659574468085,97.7872340425532,97.87234042553192,98.38297872340425,97.61702127659575,97.95744680851064,97.36170212765957,97.1063829787234,98.12765957446808,97.53191489361703,98.29787234042553,97.44680851063829,97.27659574468085,98.12765957446808,97.44680851063829,97.53191489361703,97.44680851063829,98.38297872340425,97.27659574468085,98.04255319148936,97.95744680851064,96.68085106382979,97.95744680851064,97.53191489361703,97.44680851063829,97.7872340425532,98.38297872340425,97.70212765957447,97.53191489361703,96.76595744680851,97.7872340425532,98.04255319148936,97.7872340425532,97.53191489361703,98.04255319148936,97.70212765957447,97.7872340425532,97.36170212765957,98.12765957446808,97.61702127659575,97.95744680851064,97.7872340425532,97.44680851063829,97.87234042553192,97.7872340425532,97.36170212765957,97.02127659574468,98.12765957446808,97.53191489361703,97.53191489361703,97.61702127659575,97.61702127659575,97.36170212765957,97.70212765957447,97.36170212765957,98.29787234042553,98.12765957446808,97.36170212765957,97.36170212765957,97.70212765957447,97.70212765957447,98.55319148936171,97.61702127659575,97.95744680851064,97.7872340425532,97.53191489361703,98.04255319148936,97.53191489361703,97.36170212765957,97.36170212765957,98.2127659574468,98.29787234042553,97.7872340425532,98.29787234042553,98.12765957446808,97.36170212765957,97.61702127659575,98.2127659574468,97.87234042553192,97.61702127659575,97.61702127659575,97.7872340425532,98.38297872340425,98.2127659574468,97.7872340425532,97.53191489361703,98.04255319148936,97.36170212765957,98.04255319148936,97.7872340425532,97.61702127659575,97.36170212765957,97.44680851063829,97.7872340425532,98.29787234042553,97.02127659574468,97.7872340425532,97.1063829787234,97.44680851063829,97.19148936170212,97.70212765957447,97.70212765957447,98.12765957446808,97.44680851063829,98.2127659574468,97.70212765957447,96.76595744680851,97.27659574468085,98.12765957446808,97.02127659574468,97.95744680851064,97.61702127659575,97.53191489361703,98.04255319148936,98.12765957446808,98.29787234042553,98.38297872340425,97.53191489361703,97.7872340425532,97.53191489361703,97.70212765957447,97.44680851063829,97.7872340425532,97.44680851063829,98.04255319148936,98.04255319148936,97.44680851063829,98.12765957446808,98.04255319148936,97.61702127659575,97.27659574468085,97.44680851063829,97.02127659574468,96.85106382978724,97.7872340425532,98.29787234042553,97.1063829787234,97.95744680851064,97.1063829787234,97.19148936170212,98.04255319148936,97.87234042553192,97.95744680851064,97.95744680851064,98.38297872340425,98.55319148936171,97.19148936170212,97.27659574468085,97.61702127659575,98.38297872340425,98.38297872340425, +Test_Accuracy,8.333333333333334,8.333333333333334,8.333333333333334,9.722222222222221,38.541666666666664,51.736111111111114,55.208333333333336,59.02777777777778,63.19444444444444,65.625,71.52777777777777,71.875,71.875,77.08333333333333,80.90277777777777,82.98611111111111,81.25,82.29166666666667,84.02777777777777,81.59722222222223,85.06944444444444,85.76388888888889,85.76388888888889,87.5,86.11111111111111,87.84722222222223,86.11111111111111,89.58333333333333,88.19444444444444,88.19444444444444,87.5,89.58333333333333,88.54166666666667,87.84722222222223,89.23611111111111,89.23611111111111,89.23611111111111,90.625,88.88888888888889,86.11111111111111,88.54166666666667,89.93055555555556,90.97222222222223,89.93055555555556,88.88888888888889,90.625,90.97222222222223,91.66666666666667,90.27777777777777,91.66666666666667,90.625,92.01388888888889,91.31944444444444,91.31944444444444,92.01388888888889,93.40277777777777,92.70833333333333,92.36111111111111,93.40277777777777,93.75,92.70833333333333,92.70833333333333,93.40277777777777,93.05555555555556,91.66666666666667,92.01388888888889,91.31944444444444,93.40277777777777,92.70833333333333,93.05555555555556,93.05555555555556,92.70833333333333,93.05555555555556,92.70833333333333,92.36111111111111,92.36111111111111,92.01388888888889,92.36111111111111,92.70833333333333,92.01388888888889,92.70833333333333,93.05555555555556,93.75,92.70833333333333,92.70833333333333,92.36111111111111,92.01388888888889,93.05555555555556,93.05555555555556,91.66666666666667,92.36111111111111,91.66666666666667,92.01388888888889,92.01388888888889,93.05555555555556,92.36111111111111,92.36111111111111,92.36111111111111,93.40277777777777,93.40277777777777,92.70833333333333,91.31944444444444,91.66666666666667,92.36111111111111,93.40277777777777,92.70833333333333,92.01388888888889,93.05555555555556,92.36111111111111,92.36111111111111,93.40277777777777,92.01388888888889,92.01388888888889,93.40277777777777,93.40277777777777,90.97222222222223,94.09722222222223,92.70833333333333,92.36111111111111,93.05555555555556,92.36111111111111,92.70833333333333,93.05555555555556,92.70833333333333,92.70833333333333,93.05555555555556,92.36111111111111,92.70833333333333,92.01388888888889,92.36111111111111,92.70833333333333,92.01388888888889,93.40277777777777,92.01388888888889,92.70833333333333,92.01388888888889,91.66666666666667,92.36111111111111,92.01388888888889,91.66666666666667,93.40277777777777,92.01388888888889,93.05555555555556,93.05555555555556,93.75,92.70833333333333,92.36111111111111,93.05555555555556,92.70833333333333,93.05555555555556,93.05555555555556,92.01388888888889,92.70833333333333,92.36111111111111,92.36111111111111,92.70833333333333,92.36111111111111,91.66666666666667,92.01388888888889,92.01388888888889,90.97222222222223,92.01388888888889,91.31944444444444,92.70833333333333,92.36111111111111,91.66666666666667,92.36111111111111,93.05555555555556,92.01388888888889,93.05555555555556,93.05555555555556,92.70833333333333,90.97222222222223,92.70833333333333,94.09722222222223,93.40277777777777,91.66666666666667,90.27777777777777,90.97222222222223,92.01388888888889,92.36111111111111,92.36111111111111,93.40277777777777,93.05555555555556,93.75,91.31944444444444,92.01388888888889,93.40277777777777,93.05555555555556,92.01388888888889,92.36111111111111,92.01388888888889,92.01388888888889,93.40277777777777,93.75,93.40277777777777,92.01388888888889,92.70833333333333,92.36111111111111,91.66666666666667,93.05555555555556,94.44444444444444,93.40277777777777,93.05555555555556,92.36111111111111,92.01388888888889,92.01388888888889,92.36111111111111,93.05555555555556,93.05555555555556,93.05555555555556,91.66666666666667,92.36111111111111,92.01388888888889,92.70833333333333,93.40277777777777,92.70833333333333,93.40277777777777,92.36111111111111,91.66666666666667,92.70833333333333,92.36111111111111,91.31944444444444,92.36111111111111,92.01388888888889,92.70833333333333,92.01388888888889,93.05555555555556,93.05555555555556,92.36111111111111,92.70833333333333,92.01388888888889,90.97222222222223,93.40277777777777,91.66666666666667,91.31944444444444,92.01388888888889,93.05555555555556,92.70833333333333,91.31944444444444,90.97222222222223,92.01388888888889,92.01388888888889,92.36111111111111,93.40277777777777,93.05555555555556,92.70833333333333,92.01388888888889,92.70833333333333,92.70833333333333,92.70833333333333,90.97222222222223,92.01388888888889,93.05555555555556,93.40277777777777,92.70833333333333,93.05555555555556,93.05555555555556,92.01388888888889,91.66666666666667,92.01388888888889,93.05555555555556,92.70833333333333,93.40277777777777,93.40277777777777,92.36111111111111,92.70833333333333,92.36111111111111,93.05555555555556,91.66666666666667,92.01388888888889,92.01388888888889,93.05555555555556,92.36111111111111,92.70833333333333,92.70833333333333,92.70833333333333,93.05555555555556,91.66666666666667,92.36111111111111,92.70833333333333,92.70833333333333,91.31944444444444,93.05555555555556,92.36111111111111,91.66666666666667,92.70833333333333,92.70833333333333,92.36111111111111,93.40277777777777,92.01388888888889,92.70833333333333,93.05555555555556,92.36111111111111,92.70833333333333,90.97222222222223,92.70833333333333,93.05555555555556,92.70833333333333,92.70833333333333,94.44444444444444 diff --git a/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 b/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 new file mode 100644 index 0000000..32d0010 Binary files /dev/null and b/Results/Result_43/TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 differ diff --git a/Results/Result_43/log_DVS_Gesture_SNN.txt b/Results/Result_43/log_DVS_Gesture_SNN.txt new file mode 100644 index 0000000..07f9081 --- /dev/null +++ b/Results/Result_43/log_DVS_Gesture_SNN.txt @@ -0,0 +1,2657 @@ +cuda +range(0, 4) +dt==15 +T==60 +attention==TCSA +c_ratio==8 +t_ratio==5 +epoch==0 +num_epochs==300 +onlyTest==False +pretrained_path==None +batch_size==128 +batch_size_test==32 +init_method==None +ds==4 +in_channels==2 +im_width==32 +im_height==32 +target_size==11 +clip==10 +is_train_Enhanced==True +is_spike==False +interval_scaling==False +beta==0 +alpha==0.3 +Vreset==0 +Vthres==0.3 +reduction==16 +T_extend_Conv==False +T_extend_BN==False +h_conv==False +mem_act== +mode_select==spike +TR_model==NTR +track_running_stats==True +a==0.5 +lens==0.25 +lr==0.0001 +betas==[0.9, 0.999] +eps==1e-08 +weight_decay==0 +lr_scheduler==True +lr_scheduler_epoch==25 +name==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60 +modelPath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +modelNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.t7 +recordPath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/CNN/Result +recordNames==TCSA_SNN(CNN)-DVS-Gesture_dt=15ms_T=60.csv +savePath==/home/noitom-server/PycharmProjects/pythonProject/Attention-SNN/MA_SNN/DVSGestures/data +train_dataset==None +test_dataset==None +train_loader==None +test_loader==None +drop_last==False +pip_memory==False +num_work==8 +model==None +criterion==MSELoss() +optimizer==None +device==cuda +device_ids==range(0, 4) +best_acc==0 +best_epoch==0 +epoch_list==[] +loss_train_list==[] +loss_test_list==[] +acc_train_list==[] +acc_test_list==[] +train_loss==0 +train_correct==0 +train_acc==0 +test_loss==0 +test_correct==0 +test_acc==0 +state==None + +DataParallel( + (module): Net( + (convAttLIF0): ConvAttLIF( + (conv2d): Conv2d(2, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(64, 8, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(8, 64, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (convAttLIF1): ConvAttLIF( + (conv2d): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (convAttLIF2): ConvAttLIF( + (conv2d): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) + (BNLayer): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (pooling): AvgPool2d(kernel_size=2, stride=2, padding=0) + (attention): TCSA( + (relu): ReLU(inplace=True) + (ca): ChannelAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(128, 16, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(16, 128, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool3d(output_size=1) + (max_pool): AdaptiveMaxPool3d(output_size=1) + (sharedMLP): Sequential( + (0): Conv3d(60, 12, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + (1): ReLU() + (2): Conv3d(12, 60, kernel_size=(1, 1, 1), stride=(1, 1, 1), bias=False) + ) + (sigmoid): Sigmoid() + ) + (sa): SpatialAttention( + (conv): Conv2d(2, 1, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) + (sigmoid): Sigmoid() + ) + ) + (network): Sequential( + (ConvIF): ConvIFCell() + ) + ) + (FC0): AttLIF( + (network): Sequential( + (IF): IFCell() + ) + (linear): Linear(in_features=8192, out_features=256, bias=True) + (BNLayer): BatchNorm1d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + (FC1): AttLIF( + (network): Sequential( + (IF): IFCell() + ) + (linear): Linear(in_features=256, out_features=11, bias=True) + (BNLayer): BatchNorm1d(11, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) + (attention): TA( + (relu): ReLU(inplace=True) + (ta): TimeAttention( + (avg_pool): AdaptiveAvgPool1d(output_size=1) + (max_pool): AdaptiveMaxPool1d(output_size=1) + (sharedMLP): Sequential( + (0): Conv1d(60, 12, kernel_size=(1,), stride=(1,), bias=False) + (1): ReLU() + (2): Conv1d(12, 60, kernel_size=(1,), stride=(1,), bias=False) + ) + (sigmoid): Sigmoid() + ) + ) + ) + ) +) +{'Total': 2340535, 'Trainable': 2340535} +epoch: 1 +dt: 15 +T: 60 +Tarin loss:0.08176 +Train acc: 26.723 +Test loss:0.00909 +Test acc: 8.333 +Saving.. +beat acc: 8.333333333333334 +epoch: 2 +dt: 15 +T: 60 +Tarin loss:0.07938 +Train acc: 44.000 +Test loss:0.00909 +Test acc: 8.333 +beat acc: 8.333333333333334 +epoch: 3 +dt: 15 +T: 60 +Tarin loss:0.07831 +Train acc: 47.745 +Test loss:0.00909 +Test acc: 8.333 +beat acc: 8.333333333333334 +epoch: 4 +dt: 15 +T: 60 +Tarin loss:0.07017 +Train acc: 48.340 +Test loss:0.00903 +Test acc: 9.722 +Saving.. +beat acc: 9.722222222222221 +epoch: 5 +dt: 15 +T: 60 +Tarin loss:0.06250 +Train acc: 58.128 +Test loss:0.00730 +Test acc: 38.542 +Saving.. +beat acc: 38.541666666666664 +epoch: 6 +dt: 15 +T: 60 +Tarin loss:0.06048 +Train acc: 60.511 +Test loss:0.00617 +Test acc: 51.736 +Saving.. +beat acc: 51.736111111111114 +epoch: 7 +dt: 15 +T: 60 +Tarin loss:0.05890 +Train acc: 62.213 +Test loss:0.00583 +Test acc: 55.208 +Saving.. +beat acc: 55.208333333333336 +epoch: 8 +dt: 15 +T: 60 +Tarin loss:0.05595 +Train acc: 67.149 +Test loss:0.00564 +Test acc: 59.028 +Saving.. +beat acc: 59.02777777777778 +epoch: 9 +dt: 15 +T: 60 +Tarin loss:0.05398 +Train acc: 67.830 +Test loss:0.00536 +Test acc: 63.194 +Saving.. +beat acc: 63.19444444444444 +epoch: 10 +dt: 15 +T: 60 +Tarin loss:0.05238 +Train acc: 71.064 +Test loss:0.00527 +Test acc: 65.625 +Saving.. +beat acc: 65.625 +epoch: 11 +dt: 15 +T: 60 +Tarin loss:0.04975 +Train acc: 72.766 +Test loss:0.00499 +Test acc: 71.528 +Saving.. +beat acc: 71.52777777777777 +epoch: 12 +dt: 15 +T: 60 +Tarin loss:0.04758 +Train acc: 75.489 +Test loss:0.00492 +Test acc: 71.875 +Saving.. +beat acc: 71.875 +epoch: 13 +dt: 15 +T: 60 +Tarin loss:0.04662 +Train acc: 78.383 +Test loss:0.00475 +Test acc: 71.875 +beat acc: 71.875 +epoch: 14 +dt: 15 +T: 60 +Tarin loss:0.04344 +Train acc: 81.702 +Test loss:0.00466 +Test acc: 77.083 +Saving.. +beat acc: 77.08333333333333 +epoch: 15 +dt: 15 +T: 60 +Tarin loss:0.04177 +Train acc: 81.787 +Test loss:0.00438 +Test acc: 80.903 +Saving.. +beat acc: 80.90277777777777 +epoch: 16 +dt: 15 +T: 60 +Tarin loss:0.04096 +Train acc: 83.574 +Test loss:0.00423 +Test acc: 82.986 +Saving.. +beat acc: 82.98611111111111 +epoch: 17 +dt: 15 +T: 60 +Tarin loss:0.03830 +Train acc: 84.936 +Test loss:0.00415 +Test acc: 81.250 +beat acc: 82.98611111111111 +epoch: 18 +dt: 15 +T: 60 +Tarin loss:0.03620 +Train acc: 86.043 +Test loss:0.00404 +Test acc: 82.292 +beat acc: 82.98611111111111 +epoch: 19 +dt: 15 +T: 60 +Tarin loss:0.03446 +Train acc: 87.149 +Test loss:0.00389 +Test acc: 84.028 +Saving.. +beat acc: 84.02777777777777 +epoch: 20 +dt: 15 +T: 60 +Tarin loss:0.03324 +Train acc: 87.064 +Test loss:0.00398 +Test acc: 81.597 +beat acc: 84.02777777777777 +epoch: 21 +dt: 15 +T: 60 +Tarin loss:0.03104 +Train acc: 88.936 +Test loss:0.00360 +Test acc: 85.069 +Saving.. +beat acc: 85.06944444444444 +epoch: 22 +dt: 15 +T: 60 +Tarin loss:0.03071 +Train acc: 89.957 +Test loss:0.00353 +Test acc: 85.764 +Saving.. +beat acc: 85.76388888888889 +epoch: 23 +dt: 15 +T: 60 +Tarin loss:0.02893 +Train acc: 88.851 +Test loss:0.00341 +Test acc: 85.764 +beat acc: 85.76388888888889 +epoch: 24 +dt: 15 +T: 60 +Tarin loss:0.02699 +Train acc: 91.234 +Test loss:0.00325 +Test acc: 87.500 +Saving.. +beat acc: 87.5 +epoch: 25 +dt: 15 +T: 60 +Tarin loss:0.02679 +Train acc: 90.383 +Test loss:0.00314 +Test acc: 86.111 +beat acc: 87.5 +epoch: 26 +dt: 15 +T: 60 +Tarin loss:0.02565 +Train acc: 91.574 +Test loss:0.00304 +Test acc: 87.847 +Saving.. +beat acc: 87.84722222222223 +epoch: 27 +dt: 15 +T: 60 +Tarin loss:0.02524 +Train acc: 92.936 +Test loss:0.00302 +Test acc: 86.111 +beat acc: 87.84722222222223 +epoch: 28 +dt: 15 +T: 60 +Tarin loss:0.02489 +Train acc: 91.319 +Test loss:0.00300 +Test acc: 89.583 +Saving.. +beat acc: 89.58333333333333 +epoch: 29 +dt: 15 +T: 60 +Tarin loss:0.02308 +Train acc: 92.085 +Test loss:0.00290 +Test acc: 88.194 +beat acc: 89.58333333333333 +epoch: 30 +dt: 15 +T: 60 +Tarin loss:0.02213 +Train acc: 92.936 +Test loss:0.00275 +Test acc: 88.194 +beat acc: 89.58333333333333 +epoch: 31 +dt: 15 +T: 60 +Tarin loss:0.02068 +Train acc: 92.766 +Test loss:0.00265 +Test acc: 87.500 +beat acc: 89.58333333333333 +epoch: 32 +dt: 15 +T: 60 +Tarin loss:0.02033 +Train acc: 93.277 +Test loss:0.00253 +Test acc: 89.583 +beat acc: 89.58333333333333 +epoch: 33 +dt: 15 +T: 60 +Tarin loss:0.01932 +Train acc: 94.553 +Test loss:0.00258 +Test acc: 88.542 +beat acc: 89.58333333333333 +epoch: 34 +dt: 15 +T: 60 +Tarin loss:0.01868 +Train acc: 93.532 +Test loss:0.00248 +Test acc: 87.847 +beat acc: 89.58333333333333 +epoch: 35 +dt: 15 +T: 60 +Tarin loss:0.01751 +Train acc: 95.404 +Test loss:0.00239 +Test acc: 89.236 +beat acc: 89.58333333333333 +epoch: 36 +dt: 15 +T: 60 +Tarin loss:0.01831 +Train acc: 94.043 +Test loss:0.00243 +Test acc: 89.236 +beat acc: 89.58333333333333 +epoch: 37 +dt: 15 +T: 60 +Tarin loss:0.01850 +Train acc: 94.383 +Test loss:0.00237 +Test acc: 89.236 +beat acc: 89.58333333333333 +epoch: 38 +dt: 15 +T: 60 +Tarin loss:0.01697 +Train acc: 94.894 +Test loss:0.00230 +Test acc: 90.625 +Saving.. +beat acc: 90.625 +epoch: 39 +dt: 15 +T: 60 +Tarin loss:0.01615 +Train acc: 95.234 +Test loss:0.00231 +Test acc: 88.889 +beat acc: 90.625 +epoch: 40 +dt: 15 +T: 60 +Tarin loss:0.01727 +Train acc: 95.234 +Test loss:0.00247 +Test acc: 86.111 +beat acc: 90.625 +epoch: 41 +dt: 15 +T: 60 +Tarin loss:0.01643 +Train acc: 95.830 +Test loss:0.00224 +Test acc: 88.542 +beat acc: 90.625 +epoch: 42 +dt: 15 +T: 60 +Tarin loss:0.01581 +Train acc: 95.915 +Test loss:0.00214 +Test acc: 89.931 +beat acc: 90.625 +epoch: 43 +dt: 15 +T: 60 +Tarin loss:0.01536 +Train acc: 95.830 +Test loss:0.00209 +Test acc: 90.972 +Saving.. +beat acc: 90.97222222222223 +epoch: 44 +dt: 15 +T: 60 +Tarin loss:0.01440 +Train acc: 95.574 +Test loss:0.00216 +Test acc: 89.931 +beat acc: 90.97222222222223 +epoch: 45 +dt: 15 +T: 60 +Tarin loss:0.01300 +Train acc: 97.106 +Test loss:0.00214 +Test acc: 88.889 +beat acc: 90.97222222222223 +epoch: 46 +dt: 15 +T: 60 +Tarin loss:0.01495 +Train acc: 96.426 +Test loss:0.00195 +Test acc: 90.625 +beat acc: 90.97222222222223 +epoch: 47 +dt: 15 +T: 60 +Tarin loss:0.01390 +Train acc: 95.915 +Test loss:0.00193 +Test acc: 90.972 +beat acc: 90.97222222222223 +epoch: 48 +dt: 15 +T: 60 +Tarin loss:0.01175 +Train acc: 97.787 +Test loss:0.00191 +Test acc: 91.667 +Saving.. +beat acc: 91.66666666666667 +epoch: 49 +dt: 15 +T: 60 +Tarin loss:0.01386 +Train acc: 96.000 +Test loss:0.00194 +Test acc: 90.278 +beat acc: 91.66666666666667 +epoch: 50 +dt: 15 +T: 60 +Tarin loss:0.01249 +Train acc: 97.191 +Test loss:0.00191 +Test acc: 91.667 +beat acc: 91.66666666666667 +epoch: 51 +dt: 15 +T: 60 +Tarin loss:0.01308 +Train acc: 96.511 +Test loss:0.00201 +Test acc: 90.625 +beat acc: 91.66666666666667 +epoch: 52 +dt: 15 +T: 60 +Tarin loss:0.01259 +Train acc: 97.277 +Test loss:0.00181 +Test acc: 92.014 +Saving.. +beat acc: 92.01388888888889 +epoch: 53 +dt: 15 +T: 60 +Tarin loss:0.01223 +Train acc: 96.936 +Test loss:0.00186 +Test acc: 91.319 +beat acc: 92.01388888888889 +epoch: 54 +dt: 15 +T: 60 +Tarin loss:0.01224 +Train acc: 96.851 +Epoch 54: reducing learning rate of group 0 to 1.0000e-05. +Test loss:0.00183 +Test acc: 91.319 +beat acc: 92.01388888888889 +epoch: 55 +dt: 15 +T: 60 +Tarin loss:0.01124 +Train acc: 97.957 +Test loss:0.00179 +Test acc: 92.014 +beat acc: 92.01388888888889 +epoch: 56 +dt: 15 +T: 60 +Tarin loss:0.01103 +Train acc: 96.936 +Test loss:0.00173 +Test acc: 93.403 +Saving.. +beat acc: 93.40277777777777 +epoch: 57 +dt: 15 +T: 60 +Tarin loss:0.01085 +Train acc: 96.681 +Test loss:0.00174 +Test acc: 92.708 +beat acc: 93.40277777777777 +epoch: 58 +dt: 15 +T: 60 +Tarin loss:0.01052 +Train acc: 97.702 +Test loss:0.00174 +Test acc: 92.361 +beat acc: 93.40277777777777 +epoch: 59 +dt: 15 +T: 60 +Tarin loss:0.01004 +Train acc: 98.298 +Test loss:0.00173 +Test acc: 93.403 +beat acc: 93.40277777777777 +epoch: 60 +dt: 15 +T: 60 +Tarin loss:0.01165 +Train acc: 97.021 +Test loss:0.00169 +Test acc: 93.750 +Saving.. +beat acc: 93.75 +epoch: 61 +dt: 15 +T: 60 +Tarin loss:0.01002 +Train acc: 97.702 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 93.75 +epoch: 62 +dt: 15 +T: 60 +Tarin loss:0.01033 +Train acc: 98.468 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 93.75 +epoch: 63 +dt: 15 +T: 60 +Tarin loss:0.00924 +Train acc: 98.383 +Test loss:0.00173 +Test acc: 93.403 +beat acc: 93.75 +epoch: 64 +dt: 15 +T: 60 +Tarin loss:0.01038 +Train acc: 98.298 +Test loss:0.00175 +Test acc: 93.056 +beat acc: 93.75 +epoch: 65 +dt: 15 +T: 60 +Tarin loss:0.01092 +Train acc: 97.957 +Test loss:0.00172 +Test acc: 91.667 +beat acc: 93.75 +epoch: 66 +dt: 15 +T: 60 +Tarin loss:0.01079 +Train acc: 97.532 +Test loss:0.00174 +Test acc: 92.014 +beat acc: 93.75 +epoch: 67 +dt: 15 +T: 60 +Tarin loss:0.01086 +Train acc: 98.128 +Test loss:0.00178 +Test acc: 91.319 +beat acc: 93.75 +epoch: 68 +dt: 15 +T: 60 +Tarin loss:0.01125 +Train acc: 97.362 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 93.75 +epoch: 69 +dt: 15 +T: 60 +Tarin loss:0.01118 +Train acc: 97.106 +Epoch 69: reducing learning rate of group 0 to 1.0000e-06. +Test loss:0.00170 +Test acc: 92.708 +beat acc: 93.75 +epoch: 70 +dt: 15 +T: 60 +Tarin loss:0.01165 +Train acc: 97.702 +Test loss:0.00169 +Test acc: 93.056 +beat acc: 93.75 +epoch: 71 +dt: 15 +T: 60 +Tarin loss:0.01062 +Train acc: 97.447 +Test loss:0.00167 +Test acc: 93.056 +beat acc: 93.75 +epoch: 72 +dt: 15 +T: 60 +Tarin loss:0.01015 +Train acc: 97.957 +Test loss:0.00174 +Test acc: 92.708 +beat acc: 93.75 +epoch: 73 +dt: 15 +T: 60 +Tarin loss:0.01015 +Train acc: 97.447 +Test loss:0.00177 +Test acc: 93.056 +beat acc: 93.75 +epoch: 74 +dt: 15 +T: 60 +Tarin loss:0.00961 +Train acc: 98.298 +Test loss:0.00172 +Test acc: 92.708 +beat acc: 93.75 +epoch: 75 +dt: 15 +T: 60 +Tarin loss:0.01068 +Train acc: 98.043 +Epoch 75: reducing learning rate of group 0 to 1.0000e-07. +Test loss:0.00170 +Test acc: 92.361 +beat acc: 93.75 +epoch: 76 +dt: 15 +T: 60 +Tarin loss:0.01058 +Train acc: 97.957 +Test loss:0.00168 +Test acc: 92.361 +beat acc: 93.75 +epoch: 77 +dt: 15 +T: 60 +Tarin loss:0.01046 +Train acc: 97.957 +Test loss:0.00170 +Test acc: 92.014 +beat acc: 93.75 +epoch: 78 +dt: 15 +T: 60 +Tarin loss:0.01037 +Train acc: 97.532 +Test loss:0.00173 +Test acc: 92.361 +beat acc: 93.75 +epoch: 79 +dt: 15 +T: 60 +Tarin loss:0.01091 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 93.75 +epoch: 80 +dt: 15 +T: 60 +Tarin loss:0.01119 +Train acc: 97.617 +Test loss:0.00169 +Test acc: 92.014 +beat acc: 93.75 +epoch: 81 +dt: 15 +T: 60 +Tarin loss:0.01222 +Train acc: 98.298 +Epoch 81: reducing learning rate of group 0 to 1.0000e-08. +Test loss:0.00171 +Test acc: 92.708 +beat acc: 93.75 +epoch: 82 +dt: 15 +T: 60 +Tarin loss:0.01052 +Train acc: 97.532 +Test loss:0.00166 +Test acc: 93.056 +beat acc: 93.75 +epoch: 83 +dt: 15 +T: 60 +Tarin loss:0.01084 +Train acc: 97.447 +Test loss:0.00169 +Test acc: 93.750 +beat acc: 93.75 +epoch: 84 +dt: 15 +T: 60 +Tarin loss:0.01067 +Train acc: 98.128 +Test loss:0.00168 +Test acc: 92.708 +beat acc: 93.75 +epoch: 85 +dt: 15 +T: 60 +Tarin loss:0.01010 +Train acc: 98.043 +Test loss:0.00174 +Test acc: 92.708 +beat acc: 93.75 +epoch: 86 +dt: 15 +T: 60 +Tarin loss:0.01022 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 93.75 +epoch: 87 +dt: 15 +T: 60 +Tarin loss:0.01096 +Train acc: 97.532 +Test loss:0.00170 +Test acc: 92.014 +beat acc: 93.75 +epoch: 88 +dt: 15 +T: 60 +Tarin loss:0.01165 +Train acc: 97.362 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 93.75 +epoch: 89 +dt: 15 +T: 60 +Tarin loss:0.01007 +Train acc: 98.128 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 93.75 +epoch: 90 +dt: 15 +T: 60 +Tarin loss:0.01033 +Train acc: 97.362 +Test loss:0.00174 +Test acc: 91.667 +beat acc: 93.75 +epoch: 91 +dt: 15 +T: 60 +Tarin loss:0.01070 +Train acc: 98.043 +Test loss:0.00172 +Test acc: 92.361 +beat acc: 93.75 +epoch: 92 +dt: 15 +T: 60 +Tarin loss:0.01067 +Train acc: 97.277 +Test loss:0.00170 +Test acc: 91.667 +beat acc: 93.75 +epoch: 93 +dt: 15 +T: 60 +Tarin loss:0.01141 +Train acc: 97.447 +Test loss:0.00171 +Test acc: 92.014 +beat acc: 93.75 +epoch: 94 +dt: 15 +T: 60 +Tarin loss:0.01030 +Train acc: 97.277 +Test loss:0.00166 +Test acc: 92.014 +beat acc: 93.75 +epoch: 95 +dt: 15 +T: 60 +Tarin loss:0.01050 +Train acc: 97.277 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 93.75 +epoch: 96 +dt: 15 +T: 60 +Tarin loss:0.01092 +Train acc: 96.851 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 93.75 +epoch: 97 +dt: 15 +T: 60 +Tarin loss:0.01046 +Train acc: 97.447 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 93.75 +epoch: 98 +dt: 15 +T: 60 +Tarin loss:0.01010 +Train acc: 97.277 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 93.75 +epoch: 99 +dt: 15 +T: 60 +Tarin loss:0.01054 +Train acc: 97.617 +Test loss:0.00167 +Test acc: 93.403 +beat acc: 93.75 +epoch: 100 +dt: 15 +T: 60 +Tarin loss:0.01067 +Train acc: 97.447 +Test loss:0.00166 +Test acc: 93.403 +beat acc: 93.75 +epoch: 101 +dt: 15 +T: 60 +Tarin loss:0.01106 +Train acc: 97.957 +Test loss:0.00168 +Test acc: 92.708 +beat acc: 93.75 +epoch: 102 +dt: 15 +T: 60 +Tarin loss:0.01061 +Train acc: 97.362 +Test loss:0.00179 +Test acc: 91.319 +beat acc: 93.75 +epoch: 103 +dt: 15 +T: 60 +Tarin loss:0.01098 +Train acc: 97.957 +Test loss:0.00181 +Test acc: 91.667 +beat acc: 93.75 +epoch: 104 +dt: 15 +T: 60 +Tarin loss:0.00969 +Train acc: 97.532 +Test loss:0.00175 +Test acc: 92.361 +beat acc: 93.75 +epoch: 105 +dt: 15 +T: 60 +Tarin loss:0.01052 +Train acc: 98.298 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 93.75 +epoch: 106 +dt: 15 +T: 60 +Tarin loss:0.00999 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 93.75 +epoch: 107 +dt: 15 +T: 60 +Tarin loss:0.00948 +Train acc: 98.298 +Test loss:0.00167 +Test acc: 92.014 +beat acc: 93.75 +epoch: 108 +dt: 15 +T: 60 +Tarin loss:0.00978 +Train acc: 97.617 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 93.75 +epoch: 109 +dt: 15 +T: 60 +Tarin loss:0.01070 +Train acc: 97.106 +Test loss:0.00169 +Test acc: 92.361 +beat acc: 93.75 +epoch: 110 +dt: 15 +T: 60 +Tarin loss:0.01095 +Train acc: 97.617 +Test loss:0.00172 +Test acc: 92.361 +beat acc: 93.75 +epoch: 111 +dt: 15 +T: 60 +Tarin loss:0.01107 +Train acc: 97.872 +Test loss:0.00171 +Test acc: 93.403 +beat acc: 93.75 +epoch: 112 +dt: 15 +T: 60 +Tarin loss:0.00996 +Train acc: 97.447 +Test loss:0.00171 +Test acc: 92.014 +beat acc: 93.75 +epoch: 113 +dt: 15 +T: 60 +Tarin loss:0.01151 +Train acc: 97.617 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 93.75 +epoch: 114 +dt: 15 +T: 60 +Tarin loss:0.01059 +Train acc: 97.532 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 93.75 +epoch: 115 +dt: 15 +T: 60 +Tarin loss:0.01152 +Train acc: 97.872 +Test loss:0.00174 +Test acc: 93.403 +beat acc: 93.75 +epoch: 116 +dt: 15 +T: 60 +Tarin loss:0.01098 +Train acc: 97.191 +Test loss:0.00178 +Test acc: 90.972 +beat acc: 93.75 +epoch: 117 +dt: 15 +T: 60 +Tarin loss:0.01098 +Train acc: 97.277 +Test loss:0.00172 +Test acc: 94.097 +Saving.. +beat acc: 94.09722222222223 +epoch: 118 +dt: 15 +T: 60 +Tarin loss:0.00988 +Train acc: 98.213 +Test loss:0.00174 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 119 +dt: 15 +T: 60 +Tarin loss:0.01034 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 120 +dt: 15 +T: 60 +Tarin loss:0.01053 +Train acc: 98.128 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 121 +dt: 15 +T: 60 +Tarin loss:0.01029 +Train acc: 97.191 +Test loss:0.00170 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 122 +dt: 15 +T: 60 +Tarin loss:0.01115 +Train acc: 97.021 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 123 +dt: 15 +T: 60 +Tarin loss:0.00964 +Train acc: 97.702 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 124 +dt: 15 +T: 60 +Tarin loss:0.00967 +Train acc: 97.957 +Test loss:0.00166 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 125 +dt: 15 +T: 60 +Tarin loss:0.01107 +Train acc: 97.872 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 126 +dt: 15 +T: 60 +Tarin loss:0.01129 +Train acc: 98.043 +Test loss:0.00172 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 127 +dt: 15 +T: 60 +Tarin loss:0.01165 +Train acc: 97.447 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 128 +dt: 15 +T: 60 +Tarin loss:0.01117 +Train acc: 97.787 +Test loss:0.00176 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 129 +dt: 15 +T: 60 +Tarin loss:0.01030 +Train acc: 97.872 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 130 +dt: 15 +T: 60 +Tarin loss:0.01071 +Train acc: 97.532 +Test loss:0.00165 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 131 +dt: 15 +T: 60 +Tarin loss:0.01043 +Train acc: 97.957 +Test loss:0.00170 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 132 +dt: 15 +T: 60 +Tarin loss:0.00995 +Train acc: 97.787 +Test loss:0.00171 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 133 +dt: 15 +T: 60 +Tarin loss:0.01040 +Train acc: 97.191 +Test loss:0.00169 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 134 +dt: 15 +T: 60 +Tarin loss:0.01105 +Train acc: 97.191 +Test loss:0.00168 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 135 +dt: 15 +T: 60 +Tarin loss:0.01016 +Train acc: 97.106 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 136 +dt: 15 +T: 60 +Tarin loss:0.01054 +Train acc: 97.702 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 137 +dt: 15 +T: 60 +Tarin loss:0.01071 +Train acc: 97.447 +Test loss:0.00170 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 138 +dt: 15 +T: 60 +Tarin loss:0.01101 +Train acc: 97.787 +Test loss:0.00166 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 139 +dt: 15 +T: 60 +Tarin loss:0.01103 +Train acc: 98.298 +Test loss:0.00164 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 140 +dt: 15 +T: 60 +Tarin loss:0.00913 +Train acc: 98.298 +Test loss:0.00166 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 141 +dt: 15 +T: 60 +Tarin loss:0.01078 +Train acc: 96.851 +Test loss:0.00167 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 142 +dt: 15 +T: 60 +Tarin loss:0.01107 +Train acc: 97.277 +Test loss:0.00168 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 143 +dt: 15 +T: 60 +Tarin loss:0.01025 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 144 +dt: 15 +T: 60 +Tarin loss:0.00986 +Train acc: 97.702 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 145 +dt: 15 +T: 60 +Tarin loss:0.01091 +Train acc: 97.617 +Test loss:0.00169 +Test acc: 93.750 +beat acc: 94.09722222222223 +epoch: 146 +dt: 15 +T: 60 +Tarin loss:0.01051 +Train acc: 97.872 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 147 +dt: 15 +T: 60 +Tarin loss:0.01002 +Train acc: 97.957 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 148 +dt: 15 +T: 60 +Tarin loss:0.01098 +Train acc: 97.277 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 149 +dt: 15 +T: 60 +Tarin loss:0.01071 +Train acc: 98.213 +Test loss:0.00168 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 150 +dt: 15 +T: 60 +Tarin loss:0.01005 +Train acc: 97.277 +Test loss:0.00172 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 151 +dt: 15 +T: 60 +Tarin loss:0.00887 +Train acc: 97.787 +Test loss:0.00174 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 152 +dt: 15 +T: 60 +Tarin loss:0.00959 +Train acc: 97.872 +Test loss:0.00173 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 153 +dt: 15 +T: 60 +Tarin loss:0.01093 +Train acc: 98.383 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 154 +dt: 15 +T: 60 +Tarin loss:0.01089 +Train acc: 97.617 +Test loss:0.00169 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 155 +dt: 15 +T: 60 +Tarin loss:0.00983 +Train acc: 97.957 +Test loss:0.00175 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 156 +dt: 15 +T: 60 +Tarin loss:0.01081 +Train acc: 97.362 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 157 +dt: 15 +T: 60 +Tarin loss:0.01112 +Train acc: 97.106 +Test loss:0.00174 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 158 +dt: 15 +T: 60 +Tarin loss:0.01088 +Train acc: 98.128 +Test loss:0.00173 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 159 +dt: 15 +T: 60 +Tarin loss:0.01052 +Train acc: 97.532 +Test loss:0.00174 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 160 +dt: 15 +T: 60 +Tarin loss:0.01003 +Train acc: 98.298 +Test loss:0.00167 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 161 +dt: 15 +T: 60 +Tarin loss:0.01039 +Train acc: 97.447 +Test loss:0.00178 +Test acc: 90.972 +beat acc: 94.09722222222223 +epoch: 162 +dt: 15 +T: 60 +Tarin loss:0.01133 +Train acc: 97.277 +Test loss:0.00175 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 163 +dt: 15 +T: 60 +Tarin loss:0.01012 +Train acc: 98.128 +Test loss:0.00173 +Test acc: 91.319 +beat acc: 94.09722222222223 +epoch: 164 +dt: 15 +T: 60 +Tarin loss:0.01121 +Train acc: 97.447 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 165 +dt: 15 +T: 60 +Tarin loss:0.01013 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 166 +dt: 15 +T: 60 +Tarin loss:0.00954 +Train acc: 97.447 +Test loss:0.00176 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 167 +dt: 15 +T: 60 +Tarin loss:0.00938 +Train acc: 98.383 +Test loss:0.00174 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 168 +dt: 15 +T: 60 +Tarin loss:0.00961 +Train acc: 97.277 +Test loss:0.00169 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 169 +dt: 15 +T: 60 +Tarin loss:0.01046 +Train acc: 98.043 +Test loss:0.00168 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 170 +dt: 15 +T: 60 +Tarin loss:0.01062 +Train acc: 97.957 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 171 +dt: 15 +T: 60 +Tarin loss:0.01104 +Train acc: 96.681 +Test loss:0.00172 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 172 +dt: 15 +T: 60 +Tarin loss:0.01089 +Train acc: 97.957 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 173 +dt: 15 +T: 60 +Tarin loss:0.01113 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 90.972 +beat acc: 94.09722222222223 +epoch: 174 +dt: 15 +T: 60 +Tarin loss:0.01093 +Train acc: 97.447 +Test loss:0.00170 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 175 +dt: 15 +T: 60 +Tarin loss:0.01037 +Train acc: 97.787 +Test loss:0.00168 +Test acc: 94.097 +beat acc: 94.09722222222223 +epoch: 176 +dt: 15 +T: 60 +Tarin loss:0.00960 +Train acc: 98.383 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 177 +dt: 15 +T: 60 +Tarin loss:0.01103 +Train acc: 97.702 +Test loss:0.00168 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 178 +dt: 15 +T: 60 +Tarin loss:0.01087 +Train acc: 97.532 +Test loss:0.00177 +Test acc: 90.278 +beat acc: 94.09722222222223 +epoch: 179 +dt: 15 +T: 60 +Tarin loss:0.01107 +Train acc: 96.766 +Test loss:0.00177 +Test acc: 90.972 +beat acc: 94.09722222222223 +epoch: 180 +dt: 15 +T: 60 +Tarin loss:0.01039 +Train acc: 97.787 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 181 +dt: 15 +T: 60 +Tarin loss:0.00906 +Train acc: 98.043 +Test loss:0.00170 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 182 +dt: 15 +T: 60 +Tarin loss:0.00967 +Train acc: 97.787 +Test loss:0.00167 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 183 +dt: 15 +T: 60 +Tarin loss:0.01174 +Train acc: 97.532 +Test loss:0.00167 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 184 +dt: 15 +T: 60 +Tarin loss:0.00969 +Train acc: 98.043 +Test loss:0.00166 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 185 +dt: 15 +T: 60 +Tarin loss:0.00910 +Train acc: 97.702 +Test loss:0.00168 +Test acc: 93.750 +beat acc: 94.09722222222223 +epoch: 186 +dt: 15 +T: 60 +Tarin loss:0.01080 +Train acc: 97.787 +Test loss:0.00173 +Test acc: 91.319 +beat acc: 94.09722222222223 +epoch: 187 +dt: 15 +T: 60 +Tarin loss:0.01184 +Train acc: 97.362 +Test loss:0.00166 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 188 +dt: 15 +T: 60 +Tarin loss:0.00977 +Train acc: 98.128 +Test loss:0.00168 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 189 +dt: 15 +T: 60 +Tarin loss:0.01142 +Train acc: 97.617 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 190 +dt: 15 +T: 60 +Tarin loss:0.00972 +Train acc: 97.957 +Test loss:0.00168 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 191 +dt: 15 +T: 60 +Tarin loss:0.01067 +Train acc: 97.787 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 192 +dt: 15 +T: 60 +Tarin loss:0.01101 +Train acc: 97.447 +Test loss:0.00168 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 193 +dt: 15 +T: 60 +Tarin loss:0.01047 +Train acc: 97.872 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 194 +dt: 15 +T: 60 +Tarin loss:0.00985 +Train acc: 97.787 +Test loss:0.00172 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 195 +dt: 15 +T: 60 +Tarin loss:0.01042 +Train acc: 97.362 +Test loss:0.00168 +Test acc: 93.750 +beat acc: 94.09722222222223 +epoch: 196 +dt: 15 +T: 60 +Tarin loss:0.01040 +Train acc: 97.021 +Test loss:0.00168 +Test acc: 93.403 +beat acc: 94.09722222222223 +epoch: 197 +dt: 15 +T: 60 +Tarin loss:0.01004 +Train acc: 98.128 +Test loss:0.00169 +Test acc: 92.014 +beat acc: 94.09722222222223 +epoch: 198 +dt: 15 +T: 60 +Tarin loss:0.01106 +Train acc: 97.532 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.09722222222223 +epoch: 199 +dt: 15 +T: 60 +Tarin loss:0.00985 +Train acc: 97.532 +Test loss:0.00168 +Test acc: 92.361 +beat acc: 94.09722222222223 +epoch: 200 +dt: 15 +T: 60 +Tarin loss:0.01047 +Train acc: 97.617 +Test loss:0.00175 +Test acc: 91.667 +beat acc: 94.09722222222223 +epoch: 201 +dt: 15 +T: 60 +Tarin loss:0.01065 +Train acc: 97.617 +Test loss:0.00176 +Test acc: 93.056 +beat acc: 94.09722222222223 +epoch: 202 +dt: 15 +T: 60 +Tarin loss:0.01116 +Train acc: 97.362 +Test loss:0.00169 +Test acc: 94.444 +Saving.. +beat acc: 94.44444444444444 +epoch: 203 +dt: 15 +T: 60 +Tarin loss:0.01043 +Train acc: 97.702 +Test loss:0.00164 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 204 +dt: 15 +T: 60 +Tarin loss:0.01070 +Train acc: 97.362 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 205 +dt: 15 +T: 60 +Tarin loss:0.00996 +Train acc: 98.298 +Test loss:0.00174 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 206 +dt: 15 +T: 60 +Tarin loss:0.00993 +Train acc: 98.128 +Test loss:0.00178 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 207 +dt: 15 +T: 60 +Tarin loss:0.01034 +Train acc: 97.362 +Test loss:0.00171 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 208 +dt: 15 +T: 60 +Tarin loss:0.01064 +Train acc: 97.362 +Test loss:0.00173 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 209 +dt: 15 +T: 60 +Tarin loss:0.00994 +Train acc: 97.702 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 210 +dt: 15 +T: 60 +Tarin loss:0.01076 +Train acc: 97.702 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 211 +dt: 15 +T: 60 +Tarin loss:0.00955 +Train acc: 98.553 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 212 +dt: 15 +T: 60 +Tarin loss:0.01078 +Train acc: 97.617 +Test loss:0.00172 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 213 +dt: 15 +T: 60 +Tarin loss:0.01132 +Train acc: 97.957 +Test loss:0.00169 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 214 +dt: 15 +T: 60 +Tarin loss:0.00961 +Train acc: 97.787 +Test loss:0.00174 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 215 +dt: 15 +T: 60 +Tarin loss:0.01083 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 216 +dt: 15 +T: 60 +Tarin loss:0.01053 +Train acc: 98.043 +Test loss:0.00171 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 217 +dt: 15 +T: 60 +Tarin loss:0.00976 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 218 +dt: 15 +T: 60 +Tarin loss:0.01111 +Train acc: 97.362 +Test loss:0.00167 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 219 +dt: 15 +T: 60 +Tarin loss:0.01053 +Train acc: 97.362 +Test loss:0.00168 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 220 +dt: 15 +T: 60 +Tarin loss:0.01093 +Train acc: 98.213 +Test loss:0.00174 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 221 +dt: 15 +T: 60 +Tarin loss:0.00923 +Train acc: 98.298 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 222 +dt: 15 +T: 60 +Tarin loss:0.01053 +Train acc: 97.787 +Test loss:0.00178 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 223 +dt: 15 +T: 60 +Tarin loss:0.00973 +Train acc: 98.298 +Test loss:0.00174 +Test acc: 91.319 +beat acc: 94.44444444444444 +epoch: 224 +dt: 15 +T: 60 +Tarin loss:0.00978 +Train acc: 98.128 +Test loss:0.00170 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 225 +dt: 15 +T: 60 +Tarin loss:0.01146 +Train acc: 97.362 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 226 +dt: 15 +T: 60 +Tarin loss:0.01095 +Train acc: 97.617 +Test loss:0.00174 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 227 +dt: 15 +T: 60 +Tarin loss:0.00957 +Train acc: 98.213 +Test loss:0.00173 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 228 +dt: 15 +T: 60 +Tarin loss:0.01084 +Train acc: 97.872 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 229 +dt: 15 +T: 60 +Tarin loss:0.01169 +Train acc: 97.617 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 230 +dt: 15 +T: 60 +Tarin loss:0.01004 +Train acc: 97.617 +Test loss:0.00170 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 231 +dt: 15 +T: 60 +Tarin loss:0.00990 +Train acc: 97.787 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 232 +dt: 15 +T: 60 +Tarin loss:0.01023 +Train acc: 98.383 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 233 +dt: 15 +T: 60 +Tarin loss:0.01050 +Train acc: 98.213 +Test loss:0.00178 +Test acc: 90.972 +beat acc: 94.44444444444444 +epoch: 234 +dt: 15 +T: 60 +Tarin loss:0.01018 +Train acc: 97.787 +Test loss:0.00171 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 235 +dt: 15 +T: 60 +Tarin loss:0.01067 +Train acc: 97.532 +Test loss:0.00172 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 236 +dt: 15 +T: 60 +Tarin loss:0.00910 +Train acc: 98.043 +Test loss:0.00176 +Test acc: 91.319 +beat acc: 94.44444444444444 +epoch: 237 +dt: 15 +T: 60 +Tarin loss:0.01047 +Train acc: 97.362 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 238 +dt: 15 +T: 60 +Tarin loss:0.01027 +Train acc: 98.043 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 239 +dt: 15 +T: 60 +Tarin loss:0.01036 +Train acc: 97.787 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 240 +dt: 15 +T: 60 +Tarin loss:0.01079 +Train acc: 97.617 +Test loss:0.00174 +Test acc: 91.319 +beat acc: 94.44444444444444 +epoch: 241 +dt: 15 +T: 60 +Tarin loss:0.01099 +Train acc: 97.362 +Test loss:0.00173 +Test acc: 90.972 +beat acc: 94.44444444444444 +epoch: 242 +dt: 15 +T: 60 +Tarin loss:0.01093 +Train acc: 97.447 +Test loss:0.00169 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 243 +dt: 15 +T: 60 +Tarin loss:0.01115 +Train acc: 97.787 +Test loss:0.00170 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 244 +dt: 15 +T: 60 +Tarin loss:0.01084 +Train acc: 98.298 +Test loss:0.00170 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 245 +dt: 15 +T: 60 +Tarin loss:0.01025 +Train acc: 97.021 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 246 +dt: 15 +T: 60 +Tarin loss:0.01053 +Train acc: 97.787 +Test loss:0.00166 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 247 +dt: 15 +T: 60 +Tarin loss:0.01112 +Train acc: 97.106 +Test loss:0.00170 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 248 +dt: 15 +T: 60 +Tarin loss:0.01077 +Train acc: 97.447 +Test loss:0.00166 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 249 +dt: 15 +T: 60 +Tarin loss:0.01154 +Train acc: 97.191 +Test loss:0.00166 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 250 +dt: 15 +T: 60 +Tarin loss:0.01030 +Train acc: 97.702 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 251 +dt: 15 +T: 60 +Tarin loss:0.00989 +Train acc: 97.702 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 252 +dt: 15 +T: 60 +Tarin loss:0.00989 +Train acc: 98.128 +Test loss:0.00178 +Test acc: 90.972 +beat acc: 94.44444444444444 +epoch: 253 +dt: 15 +T: 60 +Tarin loss:0.01210 +Train acc: 97.447 +Test loss:0.00174 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 254 +dt: 15 +T: 60 +Tarin loss:0.01051 +Train acc: 98.213 +Test loss:0.00173 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 255 +dt: 15 +T: 60 +Tarin loss:0.01086 +Train acc: 97.702 +Test loss:0.00171 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 256 +dt: 15 +T: 60 +Tarin loss:0.01045 +Train acc: 96.766 +Test loss:0.00176 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 257 +dt: 15 +T: 60 +Tarin loss:0.01146 +Train acc: 97.277 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 258 +dt: 15 +T: 60 +Tarin loss:0.00977 +Train acc: 98.128 +Test loss:0.00169 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 259 +dt: 15 +T: 60 +Tarin loss:0.01003 +Train acc: 97.021 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 260 +dt: 15 +T: 60 +Tarin loss:0.01052 +Train acc: 97.957 +Test loss:0.00176 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 261 +dt: 15 +T: 60 +Tarin loss:0.01189 +Train acc: 97.617 +Test loss:0.00173 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 262 +dt: 15 +T: 60 +Tarin loss:0.01021 +Train acc: 97.532 +Test loss:0.00169 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 263 +dt: 15 +T: 60 +Tarin loss:0.01124 +Train acc: 98.043 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 264 +dt: 15 +T: 60 +Tarin loss:0.01048 +Train acc: 98.128 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 265 +dt: 15 +T: 60 +Tarin loss:0.00985 +Train acc: 98.298 +Test loss:0.00164 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 266 +dt: 15 +T: 60 +Tarin loss:0.00911 +Train acc: 98.383 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 267 +dt: 15 +T: 60 +Tarin loss:0.01110 +Train acc: 97.532 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 268 +dt: 15 +T: 60 +Tarin loss:0.01073 +Train acc: 97.787 +Test loss:0.00175 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 269 +dt: 15 +T: 60 +Tarin loss:0.01027 +Train acc: 97.532 +Test loss:0.00169 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 270 +dt: 15 +T: 60 +Tarin loss:0.01060 +Train acc: 97.702 +Test loss:0.00171 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 271 +dt: 15 +T: 60 +Tarin loss:0.01016 +Train acc: 97.447 +Test loss:0.00167 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 272 +dt: 15 +T: 60 +Tarin loss:0.01036 +Train acc: 97.787 +Test loss:0.00166 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 273 +dt: 15 +T: 60 +Tarin loss:0.01040 +Train acc: 97.447 +Test loss:0.00167 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 274 +dt: 15 +T: 60 +Tarin loss:0.01139 +Train acc: 98.043 +Test loss:0.00166 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 275 +dt: 15 +T: 60 +Tarin loss:0.00939 +Train acc: 98.043 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 276 +dt: 15 +T: 60 +Tarin loss:0.01085 +Train acc: 97.447 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 277 +dt: 15 +T: 60 +Tarin loss:0.00973 +Train acc: 98.128 +Test loss:0.00169 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 278 +dt: 15 +T: 60 +Tarin loss:0.00952 +Train acc: 98.043 +Test loss:0.00171 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 279 +dt: 15 +T: 60 +Tarin loss:0.01057 +Train acc: 97.617 +Test loss:0.00173 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 280 +dt: 15 +T: 60 +Tarin loss:0.00974 +Train acc: 97.277 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 281 +dt: 15 +T: 60 +Tarin loss:0.01038 +Train acc: 97.447 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 282 +dt: 15 +T: 60 +Tarin loss:0.01116 +Train acc: 97.021 +Test loss:0.00173 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 283 +dt: 15 +T: 60 +Tarin loss:0.01071 +Train acc: 96.851 +Test loss:0.00177 +Test acc: 91.319 +beat acc: 94.44444444444444 +epoch: 284 +dt: 15 +T: 60 +Tarin loss:0.01027 +Train acc: 97.787 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 285 +dt: 15 +T: 60 +Tarin loss:0.00990 +Train acc: 98.298 +Test loss:0.00171 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 286 +dt: 15 +T: 60 +Tarin loss:0.00993 +Train acc: 97.106 +Test loss:0.00173 +Test acc: 91.667 +beat acc: 94.44444444444444 +epoch: 287 +dt: 15 +T: 60 +Tarin loss:0.01133 +Train acc: 97.957 +Test loss:0.00170 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 288 +dt: 15 +T: 60 +Tarin loss:0.01209 +Train acc: 97.106 +Test loss:0.00170 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 289 +dt: 15 +T: 60 +Tarin loss:0.01164 +Train acc: 97.191 +Test loss:0.00173 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 290 +dt: 15 +T: 60 +Tarin loss:0.00993 +Train acc: 98.043 +Test loss:0.00170 +Test acc: 93.403 +beat acc: 94.44444444444444 +epoch: 291 +dt: 15 +T: 60 +Tarin loss:0.01164 +Train acc: 97.872 +Test loss:0.00172 +Test acc: 92.014 +beat acc: 94.44444444444444 +epoch: 292 +dt: 15 +T: 60 +Tarin loss:0.01011 +Train acc: 97.957 +Test loss:0.00165 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 293 +dt: 15 +T: 60 +Tarin loss:0.01091 +Train acc: 97.957 +Test loss:0.00168 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 294 +dt: 15 +T: 60 +Tarin loss:0.00980 +Train acc: 98.383 +Test loss:0.00166 +Test acc: 92.361 +beat acc: 94.44444444444444 +epoch: 295 +dt: 15 +T: 60 +Tarin loss:0.00985 +Train acc: 98.553 +Test loss:0.00168 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 296 +dt: 15 +T: 60 +Tarin loss:0.01016 +Train acc: 97.191 +Test loss:0.00171 +Test acc: 90.972 +beat acc: 94.44444444444444 +epoch: 297 +dt: 15 +T: 60 +Tarin loss:0.01038 +Train acc: 97.277 +Test loss:0.00168 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 298 +dt: 15 +T: 60 +Tarin loss:0.01142 +Train acc: 97.617 +Test loss:0.00170 +Test acc: 93.056 +beat acc: 94.44444444444444 +epoch: 299 +dt: 15 +T: 60 +Tarin loss:0.00947 +Train acc: 98.383 +Test loss:0.00167 +Test acc: 92.708 +beat acc: 94.44444444444444 +epoch: 300 +dt: 15 +T: 60 +Tarin loss:0.00956 +Train acc: 98.383 +Test loss:0.00171 +Test acc: 92.708 +beat acc: 94.44444444444444 +best acc: 94.44444444444444 best_epoch: 202