# See https://github.com/tensorflow/docs/blob/master/site/en/r2/guide/autograph.ipynb def train_one_step(model, optimizer, x, y): with tf.GradientTape() as tape: logits = model(x) loss = compute_loss(y, logits) grads = tape.gradient(loss, model.trainable_variables) optimizer.apply_gradients(zip(grads, model.trainable_variables)) compute_accuracy(y, logits) return loss def train(model, optimizer): train_ds = mnist_dataset() step = 0 loss = 0.0 for x, y in train_ds: step += 1 loss = train_one_step(model, optimizer, x, y) if tf.equal(step % 10, 0): tf.print('Step', step, ': loss', loss, '; accuracy', compute_accuracy.result()) return step, loss, accuracy
# See https://github.com/tensorflow/docs/blob/master/site/en/r2/guide/autograph.ipynb @tf.function def train(model, optimizer): train_ds = mnist_dataset() step = 0 loss = 0 accuracy = 0 for x, y in train_ds: # as above, including the "if" and "print()" return step