提交 c3040f32 编写于 作者: Eric.Lee2021's avatar Eric.Lee2021 🚴🏻

change loss pattern

上级 5a5cad7e
......@@ -132,8 +132,8 @@ def trainer(ops,f_log):
# 优化器梯度清零
optimizer.zero_grad()
step += 1
torch.save(model_.state_dict(), ops.model_exp + '{}-epoch-{}.pth'.format(ops.model,epoch))
if epoch % 5 == 0 and epoch >0:
torch.save(model_.state_dict(), ops.model_exp + '{}-epoch-{}.pth'.format(ops.model,epoch))
except Exception as e:
print('Exception : ',e) # 打印异常
......@@ -163,11 +163,11 @@ if __name__ == "__main__":
parser.add_argument('--pretrained', type=bool, default = True,
help = 'imageNet_Pretrain') # 初始化学习率
parser.add_argument('--fintune_model', type=str, default = './model_exp/2021-02-21_17-51-10/resnet_50-epoch-103.pth',
parser.add_argument('--fintune_model', type=str, default = './model_exp/2021-02-21_17-51-30/resnet_50-epoch-724.pth',
help = 'fintune_model') # fintune model
parser.add_argument('--loss_define', type=str, default = 'wing_loss',
help = 'define_loss') # 损失函数定义
parser.add_argument('--init_lr', type=float, default = 1e-5,
parser.add_argument('--init_lr', type=float, default = 1e-3,
help = 'init_learningRate') # 初始化学习率
parser.add_argument('--lr_decay', type=float, default = 0.1,
help = 'learningRate_decay') # 学习率权重衰减率
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册