代码拉取完成,页面将自动刷新
import torch
import torch.nn as nn
import torch.nn.functional as F
class DQN(nn.Module):
def __init__(self, action_dim, device):
super(DQN, self).__init__()
self.__conv1 = nn.Conv2d(4, 32, kernel_size=8, stride=4, bias=False)
self.__conv2 = nn.Conv2d(32, 64, kernel_size=4, stride=2, bias=False)
self.__conv3 = nn.Conv2d(64, 64, kernel_size=3, stride=1, bias=False)
self.__fc1 = nn.Linear(64*7*7, 512)
self.__fc2 = nn.Linear(512, action_dim)
self.__device = device
def forward(self, x):
x = x / 255.
x = F.relu(self.__conv1(x))
x = F.relu(self.__conv2(x))
x = F.relu(self.__conv3(x))
x = F.relu(self.__fc1(x.view(x.size(0), -1)))
return self.__fc2(x)
@staticmethod
def init_weights(module):
if isinstance(module, nn.Linear):
torch.nn.init.kaiming_normal_(module.weight, nonlinearity="relu")
module.bias.data.fill_(0.0)
elif isinstance(module, nn.Conv2d):
torch.nn.init.kaiming_normal_(module.weight, nonlinearity="relu")
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。