guog算法笔记

V1

2023/04/06阅读：21主题：默认主题

# 三元一次线性回归神经网络的示例

``import torchimport torch.nn as nn# 训练数据x_train = torch.tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]], dtype=torch.float32)y_train = torch.tensor([[4], [7], [10], [13]], dtype=torch.float32)# 定义模型class LinearRegression(nn.Module):    def __init__(self):        super(LinearRegression, self).__init__()        self.linear = nn.Linear(3, 1)  # 输入特征数为3，输出特征数为1的线性层    def forward(self, x):        out = self.linear(x)        return outmodel = LinearRegression()# 定义损失函数与优化器criterion = nn.MSELoss() # 均方误差损失函数optimizer = torch.optim.SGD(model.parameters(), lr=0.01) # 随机梯度下降优化器# 训练模型num_epochs = 1000for epoch in range(num_epochs):    # 前向传播计算预测值    y_pred = model(x_train)    # 计算损失    loss = criterion(y_pred, y_train)    # 反向传播更新参数    optimizer.zero_grad()    loss.backward()    optimizer.step()    if (epoch+1) % 100 == 0:        print('Epoch [{}/{}], Loss: {:.4f}'.format(epoch+1, num_epochs, loss.item()))# 测试模型x_test = torch.tensor([[2, 3, 4], [5, 6, 7]], dtype=torch.float32)y_test = torch.tensor([[5], [8]], dtype=torch.float32)with torch.no_grad():    y_pred = model(x_test)    print('Test Loss: {:.4f}'.format(criterion(y_pred, y_test).item()))``

V1