import torch import torch.nn as nn X = 编程客栈torch.tensor([[10,10,10,0,0,0],[10,10,10,0,0,0],[10,10,10,0,0,0],[10,10,10,0,0,0],[10,10,10,0,0,0],[10,10,10,0,0,0]], dtype=float) Y = torch.tensor([[0,30,30,0],[0,30,30,0],[0,30,30,0],[0,30,30,0]], dtype=float) conv2d = nn.Conv2d(1,1,kerUNgWaLnel_size=(3,3), bias=False, dtype=float) X = X.reshape((1,1,6,6)) Y = Y.reshape((1,1,4,4)) lr = 0.0005 optim = torch.optim.RMSprop(conv2d.parameters(), lr=lr) loss_fn = torch.nn.MSELoss() for i in range(4000): Y_pred = conv2d(X) loss = loss_fn(Y_pred, Y) # 更新参数 if 0: # 手动更新 conv2d.zero_grad() loss.backward() conv2d.weight.data[:] -= lr * conv2d.weight.grad if 10: # 使用优化器更新 opt编程im.zero_grad() loss.backward() optim.step() if(i + 1) % 100 == 0: print(f'epoch {i+1}, loss {loss.sum():.4f}') # 打印训练的参数 print(conv2d.weight.data.reshape(3,3))
输出:
epoch 100, loss 331.4604
epoch 200, loss 284.8803epoch 300, loss 248.8032epoch 400, loss 218.8007epoch 500, loss 193.1186epoch 600, loss 170.4061epoch 700, los编程客栈s 149.4530epoch 800, loss 129.7580epoch 900, loss 111.4134epoch 1000, loss 94.5393epoch 1100, loss 79.1782epoch 1200, loss 65.3312epoch 1300, loss 52.9822epoch 1400, loss 42.1062epoch 1500, loss 32.6718epoch 1600, loss 24.6388epoch 1700, loss 17.9555epoch 1800, loss 12.5522epoch 1900, loss 8.3332epoch 2000, loss 5.1700epoch 2100, loss 2.9096epoch 2200, loss 1.4077epoch 2300, loss 0.5341epoch 2400, loss 0.1348epoch 2500, loss 0.0166epoch 2600, loss 0.0006epoch 2700, loss 0.0000epoch 2800, loss 0.0001epoch 2900, loss 0.0001epoch 3000, loss 0.0001epoch 3100, loss 0.0001epoch 3200, loss 0.0002epoch 3300, loss 0.0002epoch 3400, loss 0.0002epoch 3500, loss 0.0002epoch 3600编程客栈, loss 0.0002epoch 3700, loss 0.0002epoch 3800, loss 0.0002epoch 3900, loss 0.0002epoch 4000, loss 0.0002tensor([[ 1.3123, -0.0050, -1.0276], [ 0.8334, 0.0677, -0.8868], [ 0.8551, -0.0619, -1.0849]], dtype=torch.float64)
由训练出的结果可以看出卷积核参数与实际的卷积核挺接近了。
到此这篇关于如何使用PyTorch优化一个边缘检测器的文章就介绍到这了,更多相关PyTorch优化边缘检测器内容请搜索编程客栈(www.devze.com)以前的文章或继续浏览下面的相关文章希望大家以后多多支持编程客栈(www.devze.com)!
精彩评论