# ============================== # Explainable Recommendation System # Group 15 Project # ============================== import os import json import numpy as np from fastapi import FastAPI from pydantic import BaseModel from sklearn.metrics.pairwise import cosine_similarity from openai import OpenAI # ------------------------------ # Initialize OpenAI client # ------------------------------ client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) # ------------------------------ # Small Local Dataset # ------------------------------ dataset = [ { "id": 1, "title": "Introduction to Machine Learning", "description": "Learn supervised and unsupervised learning, regression, and classification.", "tags": ["machine learning", "ai", "beginner"] }, { "id": 2, "title": "Deep Learning with Neural Networks", "description": "Advanced deep learning concepts including CNNs and RNNs.", "tags": ["deep learning", "neural networks", "ai"] }, { "id": 3, "title": "Data Science with Python", "description": "Data analysis, visualization, and machine learning using Python.", "tags": ["python", "data science"] }, { "id": 4, "title": "Natural Language Processing", "description": "Text processing, embeddings, and transformer models.", "tags": ["nlp", "transformers", "ai"] } ] # ------------------------------ # Generate Embedding # ------------------------------ def get_embedding(text: str): response = client.embeddings.create( model="text-embedding-3-small", input=text ) return np.array(response.data[0].embedding) # ------------------------------ # Precompute Dataset Embeddings # ------------------------------ for item in dataset: combined_text = ( item["title"] + " " + item["description"] + " " + " ".join(item["tags"]) ) item["embedding"] = get_embedding(combined_text) # ------------------------------ # Recommendation Function # ------------------------------ def recommend(user_query: str, top_k: int = 2): query_embedding = get_embedding(user_query) similarities = [] for item in dataset: score = cosine_similarity( [query_embedding], [item["embedding"]] )[0][0] similarities.append((item, score)) similarities.sort(key=lambda x: x[1], reverse=True) return similarities[:top_k] # ------------------------------ # LLM Explanation Function # ------------------------------ def generate_explanation(user_query, recommended_items): items_text = "\n".join([ f"- {item['title']}: {item['description']}" for item, score in recommended_items ]) prompt = f""" User interest: {user_query} Recommended items: {items_text} Explain clearly why these recommendations match the user's interest. Make it personalized and easy to understand. """ response = client.chat.completions.create( model="gpt-4o-mini", messages=[{"role": "user", "content": prompt}] ) return response.choices[0].message.content # ------------------------------ # FastAPI App # ------------------------------ app = FastAPI(title="Explainable Recommendation System") class QueryRequest(BaseModel): user_query: str @app.post("/recommend") def get_recommendation(request: QueryRequest): recommended = recommend(request.user_query) explanation = generate_explanation(request.user_query, recommended) return { "query": request.user_query, "recommendations": [ { "title": item["title"], "description": item["description"], "score": float(score) } for item, score in recommended ], "explanation": explanation } # ------------------------------ # Root Endpoint # ------------------------------ @app.get("/") def home(): return {"message": "Explainable Recommendation System is running 🚀"}