File size: 3,222 Bytes
350eabd 9eb9505 350eabd 9eb9505 350eabd 9eb9505 350eabd 9eb9505 350eabd 9eb9505 350eabd 9eb9505 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 |
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Embedding, Flatten, concatenate, Dense
from tensorflow.keras.optimizers import Adam
from sklearn.metrics.pairwise import cosine_similarity
import tensorflow as tf
# Check if GPU is available
gpu_available = tf.config.list_physical_devices('GPU')
print(gpu_available)
# Load datasets
books = pd.read_csv("../data/datasets/books.csv")
ratings = pd.read_csv("../data/datasets/ratings.csv")
# Preprocess data
user_encoder = LabelEncoder()
book_encoder = LabelEncoder()
ratings["user_id"] = user_encoder.fit_transform(ratings["user_id"])
ratings["book_id"] = book_encoder.fit_transform(ratings["book_id"])
# Ensure all book IDs are included
all_books = np.arange(len(books))
# Define the neural network model
def build_model(num_users, num_books, embedding_size=50):
"""
Build a recommendation model.
Args:
num_users (int): The number of users in the dataset.
num_books (int): The number of books in the dataset.
embedding_size (int, optional): The size of the embedding vectors. Defaults to 50.
Returns:
keras.Model: The compiled recommendation model.
"""
user_input = Input(shape=(1,))
book_input = Input(shape=(1,))
user_embedding = Embedding(input_dim=num_users, output_dim=embedding_size)(user_input)
book_embedding = Embedding(input_dim=num_books, output_dim=embedding_size)(book_input)
user_flat = Flatten()(user_embedding)
book_flat = Flatten()(book_embedding)
merged = concatenate([user_flat, book_flat])
dense1 = Dense(128, activation="relu")(merged)
output = Dense(1)(dense1)
model = Model(inputs=[user_input, book_input], outputs=output)
model.compile(loss="mean_squared_error", optimizer=Adam(learning_rate=0.001))
return model
# Train the collaborative filtering model
with tf.device('/GPU:0') if gpu_available else tf.device('/CPU:0'):
model_cf = build_model(num_users=len(ratings["user_id"].unique()),
num_books=len(books))
model_cf.summary() # Display model summary
history = model_cf.fit([ratings["user_id"], ratings["book_id"]],
ratings["rating"],
epochs=5,
batch_size=128,
validation_split=0.1)
# Save the collaborative filtering model
model_cf.save("recommendation_model.keras")
# Evaluate the collaborative filtering model
test_loss = model_cf.evaluate([ratings["user_id"], ratings["book_id"]], ratings["rating"])
print(f"Collaborative Filtering Test Loss: {test_loss}")
# Test the recommendation functions
user_id = 0 # Example user ID
book_name = "The Great Gatsby" # Example book name
print("Content-Based Recommendation:")
print(content_based_recommendation(book_name, books))
print("\nModel-Recommended History-Based Recommendation:")
print(history_based_recommendation(user_id, model_cf, ratings))
print("\nHybrid Recommendation:")
print(hybrid_recommendation(user_id, book_name, model_cf, books, ratings))
|