Last active
March 29, 2018 22:48
-
-
Save vritant24/3076deeb6946ac6958a717a92324d671 to your computer and use it in GitHub Desktop.
# Adapt PyTorch implementation for our python front end
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Adapt PyTorch implementation for our python front end | |
| from pylms import lms, stage, stageTensor # add our snek-lms module | |
| from pylms.rep import Rep # add our snek-lms module | |
| @lms # add anotation for snek-lms | |
| def run(dummy): | |
| ... | |
| train_loader = torch.utils.data.DataLoader(...) | |
| fc1 = nn.Linear(784, 50) | |
| fc2 = nn.Linear(50, 10) | |
| def forward(x): | |
| x1 = x.view(-1, 784) | |
| x2 = F.relu(fc1(x1)) | |
| x3 = fc2(x2) | |
| return F.log_softmax(x3, dim=1) | |
| optimizer = optim.SGD(...) | |
| def train(epoch): | |
| for batch_idx, (data, target) in enumerate(train_loader): | |
| ... | |
| loss.backward() | |
| optimizer.step() | |
| if (((batch_idx + 1) % args.log_interval) == 0): | |
| print_time_and_loss() | |
| idx = 0 | |
| while idx < args.epochs: | |
| idx = idx + 1 | |
| train(idx) | |
| @stage # add anotation and bootstrapping | |
| def runX(x): | |
| return run(x) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment