Open
Description
optimization
for k in range(100):
# forward
total_loss, acc = loss()
# backward
model.zero_grad()
total_loss.backward()
# update (sgd)
learning_rate = 1.0 - 0.9*k/100
for p in model.parameters():
p.data -= learning_rate * p.grad
if k % 1 == 0:
# should it add a new line here?
# total_loss, acc = loss()
print(f"step {k} loss {total_loss.data}, accuracy {acc*100}%")
Metadata
Metadata
Assignees
Labels
No labels