Last active
March 29, 2018 22:50
-
-
Save vritant24/14bb36ed3ce999234f424c278bee3718 to your computer and use it in GitHub Desktop.
PyTorch implementation of 2 linear layers
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # PyTorch implementation of 2 linear layers | |
| def run(): | |
| ... | |
| train_loader = torch.utils.data.DataLoader(...) | |
| class Net(nn.Module): | |
| def __init__(self): | |
| super(Net, self).__init__() | |
| self.fc1 = nn.Linear(784, 50) | |
| self.fc2 = nn.Linear(50, 10) | |
| def forward(self, x): | |
| x = x.view(-1, 784) | |
| x = F.relu(self.fc1(x)) | |
| x = self.fc2(x) | |
| return F.log_softmax(x, dim=1) | |
| model = Net() | |
| optimizer = optim.SGD(...) | |
| def train(epoch): | |
| for batch_idx, (data, target) in enumerate(train_loader): | |
| ... | |
| loss.backward() | |
| optimizer.step() | |
| if (((batch_idx + 1) % args.log_interval) == 0): | |
| print_time_and_loss() | |
| for epoch in range(1, args.epochs + 1): | |
| train(epoch) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment