import torch.optim as optim
from torchvision import datasets, transforms
import neptune.new as neptune
# Step 1: Initialize Neptune and create new Neptune Run
project="<YOUR_WORKSPACE/YOUR_PROJECT>", #common/showroom
api_token="<YOUR_API_TOKEN>" #ANONYMOUS
data_dir = "data/CIFAR10"
compressed_ds = "./data/CIFAR10/cifar-10-python.tar.gz"
"train": transforms.Compose(
transforms.RandomHorizontalFlip(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
"input_size": 32 * 32 * 3,
"model_filename": "basemodel",
trainset = datasets.CIFAR10(data_dir, transform=data_tfms["train"], download=True)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=params["batch_size"], shuffle=True)
dataset_size = {"train": len(trainset)}
model = BaseModel(params["input_size"], params["input_size"], params["n_classes"])
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=params["lr"])
# Step 2: Log config & pararameters
run["config/dataset/path"] = data_dir
run["config/dataset/transforms"] = data_tfms
run["config/dataset/size"] = dataset_size
run["config/params"] = params
# Step 3: Log losses & metrics
for i, (x, y) in enumerate(trainloader, 0):
outputs = model.forward(x)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, y)
acc = (torch.sum(preds == y.data)) / len(x)
run["metrics/training/batch/loss"].log(loss)
run["metrics/training/batch/acc"].log(acc)