Skip to content

Commit 88c032b

Browse files
Add files via upload
1 parent 3bb060f commit 88c032b

File tree

1 file changed

+45
-0
lines changed

1 file changed

+45
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import tensorflow as tf
2+
3+
# Load and prepare the dataset
4+
mnist = tf.keras.datasets.mnist
5+
6+
(x_train, y_train), (x_test, y_test) = mnist.load_data()
7+
x_train, x_test = x_train / 255.0, x_test / 255.0
8+
9+
# Build the tf.keras.Sequential model by stacking layers.
10+
# Choose an optimizer and loss function for training:
11+
model = tf.keras.models.Sequential([
12+
tf.keras.layers.Flatten(input_shape=(28, 28)),
13+
tf.keras.layers.Dense(128, activation='relu'),
14+
tf.keras.layers.Dropout(0.2),
15+
tf.keras.layers.Dense(10)
16+
])
17+
18+
predictions = model(x_train[:1]).numpy()
19+
predictions
20+
21+
# Convert the logits to probabilities for each class
22+
tf.nn.softmax(predictions).numpy()
23+
24+
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
25+
26+
loss_fn(y_train[:1], predictions).numpy()
27+
28+
# Compile the deep learning model
29+
model.compile(optimizer='adam',
30+
loss=loss_fn,
31+
metrics=['accuracy'])
32+
33+
# Fitting, adjust the model parameters to minimize the loss:
34+
model.fit(x_train, y_train, epochs=5)
35+
36+
# Evaluate the model
37+
model.evaluate(x_test, y_test, verbose=2)
38+
39+
# Attach the softmax layer
40+
probability_model = tf.keras.Sequential([
41+
model,
42+
tf.keras.layers.Softmax()
43+
])
44+
45+
probability_model(x_test[:5])

0 commit comments

Comments
 (0)