1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798 |
- import torch
- import torch.nn as nn
- import numpy as np
- import matplotlib.pyplot as plt
- print(f"torch.__version__:{torch.__version__}") # pytorch版本
- print(f"torch.version.cuda:{torch.version.cuda}") # cuda版本
- print(torch.cuda.is_available()) # 查看cuda是否可用
- #
- # 使用GPU or CPU
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
- print(f"using device:{device}")
- # 生成一些随机的训练数据
- np.random.seed(42)
- x = np.random.rand(1000, 1)
- y = 10 * x + 1 + 0.5 * np.random.randn(1000, 1)
- # y[900]=1000
- # 将数据转换为张量
- x_tensor = torch.from_numpy(x).float()
- y_tensor = torch.from_numpy(y).float()
- # 定义线性回归模型
- class LinearRegressionModel(nn.Module):
- def __init__(self):
- super(LinearRegressionModel, self).__init__()
- self.linear = nn.Linear(1, 1)
- def forward(self, x):
- return self.linear(x)
- # 创建模型实例
- model = LinearRegressionModel()
- # 定义损失函数和优化器
- criterion = nn.MSELoss()
- optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
- # 训练模型
- num_epochs = 6000
- dbug_epos=np.zeros(num_epochs)
- dbug_loss=np.zeros(num_epochs)
- for epoch in range(num_epochs):
- # 前向传播
- outputs = model(x_tensor)
- loss = criterion(outputs, y_tensor)
- # 反向传播和优化
- optimizer.zero_grad()
- loss.backward()
- optimizer.step()
- dbug_epos[epoch]=epoch
- dbug_loss[epoch] = loss.item()
- if (epoch + 1) % 10 == 0:
- print(f'Epoch [{epoch + 1}/{num_epochs}], Loss: {loss.item()}')
- # 进行预测
- with torch.no_grad():
- predicted = model(x_tensor)
- # 绘制原始数据和预测结果
- # plt.scatter(x, y, label='Original Data')
- # plt.plot(x, predicted.numpy(), color='red', label='Predicted Line')
- # plt.xlabel('x')
- # plt.ylabel('y')
- # plt.legend()
- # plt.show()
- fig1, ax1 = plt.subplots()
- ax1.scatter(x, y, label='Original Data')
- ax1.plot(x, predicted.numpy(), color='red', label='Predicted Line')
- ax1.set_xlabel('x')
- ax1.set_ylabel('y')
- ax1.legend()
- fig2, ax2 = plt.subplots()
- ax2.scatter(dbug_epos, dbug_loss, label='loss')
- ax2.plot(dbug_epos, dbug_loss, color='red', label='loss')
- ax2.set_xlabel('dbug_epos')
- ax2.set_ylabel('dbug_loss')
- ax2.legend()
- plt.show()
|