Keras Essentials

High-level Keras API for building neural networks quickly.


Installation

1# Keras is included in TensorFlow 2.x
2pip install tensorflow
3
4# Standalone Keras (multi-backend)
5pip install keras

Sequential Model

 1from tensorflow import keras
 2from tensorflow.keras import layers
 3
 4model = keras.Sequential([
 5    layers.Input(shape=(28, 28, 1)),
 6    layers.Conv2D(32, kernel_size=(3, 3), activation='relu'),
 7    layers.MaxPooling2D(pool_size=(2, 2)),
 8    layers.Conv2D(64, kernel_size=(3, 3), activation='relu'),
 9    layers.MaxPooling2D(pool_size=(2, 2)),
10    layers.Flatten(),
11    layers.Dropout(0.5),
12    layers.Dense(10, activation='softmax')
13])

Functional API

 1# Multi-input model
 2input1 = keras.Input(shape=(32,), name='input1')
 3input2 = keras.Input(shape=(64,), name='input2')
 4
 5x1 = layers.Dense(64, activation='relu')(input1)
 6x2 = layers.Dense(64, activation='relu')(input2)
 7
 8# Concatenate
 9combined = layers.concatenate([x1, x2])
10output = layers.Dense(1, activation='sigmoid')(combined)
11
12model = keras.Model(inputs=[input1, input2], outputs=output)

Custom Layers

 1class MyLayer(keras.layers.Layer):
 2    def __init__(self, units=32):
 3        super().__init__()
 4        self.units = units
 5    
 6    def build(self, input_shape):
 7        self.w = self.add_weight(
 8            shape=(input_shape[-1], self.units),
 9            initializer='random_normal',
10            trainable=True
11        )
12        self.b = self.add_weight(
13            shape=(self.units,),
14            initializer='zeros',
15            trainable=True
16        )
17    
18    def call(self, inputs):
19        return tf.matmul(inputs, self.w) + self.b

Custom Model

 1class MyModel(keras.Model):
 2    def __init__(self):
 3        super().__init__()
 4        self.dense1 = layers.Dense(64, activation='relu')
 5        self.dense2 = layers.Dense(10, activation='softmax')
 6    
 7    def call(self, inputs):
 8        x = self.dense1(inputs)
 9        return self.dense2(x)
10
11model = MyModel()

Callbacks

 1# Early stopping
 2early_stop = keras.callbacks.EarlyStopping(
 3    monitor='val_loss',
 4    patience=5,
 5    restore_best_weights=True
 6)
 7
 8# Model checkpoint
 9checkpoint = keras.callbacks.ModelCheckpoint(
10    'best_model.h5',
11    monitor='val_accuracy',
12    save_best_only=True
13)
14
15# Learning rate scheduler
16def scheduler(epoch, lr):
17    if epoch < 10:
18        return lr
19    else:
20        return lr * tf.math.exp(-0.1)
21
22lr_schedule = keras.callbacks.LearningRateScheduler(scheduler)
23
24# Custom callback
25class CustomCallback(keras.callbacks.Callback):
26    def on_epoch_end(self, epoch, logs=None):
27        print(f"Epoch {epoch}: loss = {logs['loss']:.4f}")
28
29# Use callbacks
30model.fit(
31    x_train, y_train,
32    epochs=50,
33    callbacks=[early_stop, checkpoint, lr_schedule]
34)

Data Augmentation

 1data_augmentation = keras.Sequential([
 2    layers.RandomFlip("horizontal"),
 3    layers.RandomRotation(0.1),
 4    layers.RandomZoom(0.1),
 5])
 6
 7# Include in model
 8model = keras.Sequential([
 9    data_augmentation,
10    layers.Conv2D(32, 3, activation='relu'),
11    # ... rest of model
12])

Related Snippets