torch.nn.functionalF.cross_entropy combines both negative log likelihood loss and log softmax activation.
nn.Modulefrom torch import nn
class Minst_Logistic(nn.Module):
def __init__(self):
super().__init__()
self.weights = nn.Parameter(torch.randn(784, 10) / math.sqrt(784))
# here 784 = 28^2, as MNIST images are 28×28 grayscale
self.bias = nn.Parameter(torch.zeros(10))
def forward(self, xb):
# here xb is shorten for input_batch or x_batch
return xb @ self.weights + self.bias