Réseaux de neurones (Keras)

import sklearn

print(f"scikit-learn version: {sklearn.__version__}")

from sklearn.datasets import make_moons, make_circles

import tensorflow as tf

print(f"TensorFlow version: {tf.__version__}")
print(f"Keras version: {tf.keras.__version__}")

from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Flatten
from tensorflow.keras.optimizers import SGD, Adam, Adagrad
from tensorflow.keras.datasets import mnist, imdb
from tensorflow.keras.utils import to_categorical
from tensorflow.keras import regularizers
Copy to clipboard
scikit-learn version: 0.23.2
Copy to clipboard
TensorFlow version: 2.3.1
Keras version: 2.4.0
Copy to clipboard

Créer un modèle

from sklearn.datasets import make_moons, make_circles
x_train, y_train = make_moons(n_samples=1000, noise=0.10, random_state=0)
Copy to clipboard
# Create a new neural network as a linear stack of layers (other architectures exist)
model = Sequential()

# Add a 3-neurons hidden layer using tanh as activation function
activations = ['relu', 'tanh', 'sigmoid', 'softmax']
model.add(Dense(3, activation="tanh", input_shape=(2,)))
model.add(Dropout(0.25))
model.add(Dense(1, activation="sigmoid"))

# Describe the model
model.summary()
Copy to clipboard
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 3)                 9         
_________________________________________________________________
dropout (Dropout)            (None, 3)                 0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 4         
=================================================================
Total params: 13
Trainable params: 13
Non-trainable params: 0
_________________________________________________________________
Copy to clipboard

Optimisation

optimizer = SGD(learning_rate=0.01)
otpimizer = Adagrad(learning_rate=0.001)
optimizer = Adam(learning_rate=0.001)
Copy to clipboard
loss = 'binary_crossentropy'
loss = 'categorical_crossentropy'
Copy to clipboard
metrics = ['accuracy']
Copy to clipboard
model.compile(
    optimizer=optimizer, loss=loss, metrics=metrics
)

# Launch the training of the network on the data
history = model.fit(x_train, y_train, verbose=0, epochs=40, batch_size=32)
Copy to clipboard
# Compute the loss & metrics values for the trained network
loss, acc = model.evaluate(x_train, y_train, verbose=0)

print(f"Training loss: {loss:.05f}")
print(f"Training accuracy: {acc:.05f}")
Copy to clipboard
Training loss: 0.00000
Training accuracy: 0.28500
Copy to clipboard
# Saving model for future use
model_json = model.to_json()

with open("model.json", "w") as json_file:
    json_file.write(model_json)

model.save_weights("model.h5")
Copy to clipboard