# 线性回归的从零开始实现

lr = 0.03
num_epochs = 3
net = linreg
loss = squared_loss

for epoch in range(num_epochs):
for X, y in data_iter(batch_size, features, labels):
l = loss(net(X, w, b), y) # X和y的小批量损失
# 因为l形状是(batch_size,1)，而不是一个标量。l中的所有元素被加到一起，
# 并以此计算关于[w,b]的梯度
l.sum().backward()
sgd([w, b], lr, batch_size) # 使用参数的梯度更新参数
train_l = loss(net(features, w, b), labels)
print(f’epoch {epoch + 1}, loss {float(train_l.mean()):f}’)

epoch 1, loss 0.000050
epoch 2, loss 0.000050
epoch 3, loss 0.000050

## d2l的包出问题了吗？实验环境就是Google Colab。 前天跑的还好好的，昨天开始图片生成就有问题了。运行到： d2l.plt.scatter(features[:, (1)].detach().numpy(), labels.detach().numpy(), 1); 这这一步会出现以下问题：

ImportError Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/IPython/core/formatters.py in call(self, obj)
332 pass
333 else:
→ 334 return printer(obj)
335 # Finally look for special method names
336 method = get_real_method(obj, self.print_method)

12 frames
/usr/local/lib/python3.7/dist-packages/matplotlib/backends/backend_svg.py in ()
16 import matplotlib as mpl
17 from matplotlib import _api, cbook, font_manager as fm
—> 18 from matplotlib.backend_bases import (
19 _Backend, _check_savefig_extra_args, FigureCanvasBase, FigureManagerBase,
20 RendererBase)

ImportError: cannot import name ‘_check_savefig_extra_args’ from ‘matplotlib.backend_bases’ (/usr/local/lib/python3.7/dist-packages/matplotlib/backend_bases.py)

1 Like

#初始化参数权重w，偏置b=0

``````class Tes(object):
def __init__(self):
self.num = 10
pass

pass

def func(inc: Tes) -> Tes:
inc.num += 10
print(id(inc))
return inc

def func2(inc):
inc += 10
print(id(inc))
return inc

a1 = Tes()
print(id(a1))
a2 = func(a1)

print(id(a2) == id(a1)) #此输出为True 对于非python基础类型的成立

b1 = 9999
print(id(b1))
b2 = func2(b1)
print(id(b2) == id(b1))  #此输出为False
``````

batch_size为标量 所以除法是没问题的

``````a = torch.ones((2, 1))
b = torch.ones((1, 2)) * 0.6
c=a-b
print(f'被减数shape为{a.shape}\n'
f'减数的shape为{b.shape}\n'
f'结果的shape为{c.shape}\n'
``````

## 最终解

``````test1=torch.randn((1,2),requires_grad=True)
test2=torch.mm(test1,test1.T)
test2.shape
test2.backward()

`with torch.no_grad():`封装可以暂时关闭计算图，也就是在语句内进行的张量运算不改变已有的计算图，这样就可以进行in-place操作了