im

You might also like

Download as docx, pdf, or txt
Download as docx, pdf, or txt
You are on page 1of 4

https://youtu.be/DGYsJVXTik0?

si=S7gVSC1SHrDdEGZ8Regression models:

Import Libraries

import tensorflow as tf

from tensorflow.keras import models, layers

Load and Preprocess Data

from sklearn.datasets import load_boston

boston = load_boston()

X = boston.data

y = boston.target

Define the Model Architecture

model = models.Sequential([

layers.Dense(64, activation='relu', input_shape=(X.shape[1],)),

layers.Dense(64, activation='relu'),

layers.Dense(1) # Output layer for regression (1 neuron)

])

Compile the Model

model.compile(optimizer='adam',

loss='mean_squared_error') # Use mean squared error for regression

Train the Model

history = model.fit(X, y, epochs=100, validation_split=0.2)

Evaluate the Model

# Example: Evaluate using cross-validation

scores = model.evaluate(X, y, verbose=0)

print("Mean Squared Error:", scores)

Visualize Training History (Optional

import matplotlib.pyplot as plt

plt.plot(history.history['loss'], label='training loss')

plt.plot(history.history['val_loss'], label='validation loss')


plt.xlabel('Epoch')

plt.ylabel('Loss')

plt.legend()

plt.show()

classification models:

Import Libraries

import tensorflow as tf

from tensorflow.keras import datasets, layers, models

Load and Preprocess Data

# Example: Load the MNIST dataset

(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()

train_images, test_images = train_images / 255.0, test_images / 255.0

Define the Model Architecture

model = models.Sequential([

layers.Flatten(input_shape=(28, 28)),

layers.Dense(128, activation='relu'),

layers.Dropout(0.2),

layers.Dense(10, activation='softmax')

])

Compile the Model

model.compile(optimizer='adam',

loss='sparse_categorical_crossentropy',

metrics=['accuracy'])

Train the Model

history = model.fit(train_images, train_labels, epochs=10, validation_data=(test_images, test_labels))

Evaluate the Model

test_loss, test_acc = model.evaluate(test_images, test_labels)

print('Test accuracy:', test_acc)


Visualize Training History

import matplotlib.pyplot as plt

plt.plot(history.history['accuracy'], label='accuracy')

plt.plot(history.history['val_accuracy'], label='val_accuracy')

plt.xlabel('Epoch')

plt.ylabel('Accuracy')

plt.legend()

plt.show()

image classification models:

Import Libraries

import tensorflow as tf

from tensorflow.keras import datasets, layers, models

Load and Preprocess Data

# Example: Load the CIFAR-10 dataset

(train_images, train_labels), (test_images, test_labels) = datasets.cifar10.load_data()

train_images, test_images = train_images / 255.0, test_images / 255.0

Define the Model Architecture

model = models.Sequential([

layers.Conv2D(32, (3, 3), activation='relu', input_shape=(32, 32, 3)),

layers.MaxPooling2D((2, 2)),

layers.Conv2D(64, (3, 3), activation='relu'),

layers.MaxPooling2D((2, 2)),

layers.Conv2D(64, (3, 3), activation='relu'),

layers.Flatten(),

layers.Dense(64, activation='relu'),

layers.Dense(10, activation='softmax')])
Compile the Model

model.compile(optimizer='adam',

loss='sparse_categorical_crossentropy',

metrics=['accuracy'])

Train the Model

history = model.fit(train_images, train_labels, epochs=10, validation_data=(test_images, test_labels))

Evaluate the Model

test_loss, test_acc = model.evaluate(test_images, test_labels)

print('Test accuracy:', test_acc)

Visualize Training History

import matplotlib.pyplot as plt

plt.plot(history.history['accuracy'], label='accuracy')

plt.plot(history.history['val_accuracy'], label='val_accuracy')

plt.xlabel('Epoch')

plt.ylabel('Accuracy')

plt.legend()

plt.show()

You might also like