Spaces:
Runtime error
Runtime error
| import pandas as pd | |
| import numpy as np | |
| import gradio as gr | |
| from sklearn.metrics.pairwise import euclidean_distances | |
| import openai | |
| # --- Set your OpenAI API key --- | |
| openai.api_key = "YOUR_API_KEY" # replace with your key | |
| # --- Load your CSV --- | |
| # Ensure your CSV has columns: 'song', 'artist', 'bpm', 'nrgy', 'dnce', 'dB', 'live', 'val', 'dur', 'acous', 'spch', 'pop' | |
| df = pd.read_csv("datalab_export_2025-08-11 14_16_35.csv") | |
| feature_cols = ['bpm', 'nrgy', 'dnce', 'dB', 'live', 'val', 'dur', 'acous', 'spch', 'pop'] | |
| df_features = df[feature_cols].astype(float) | |
| # --- Song recommendation function --- | |
| def recommend_song(user_input): | |
| # Try to find a song or artist mentioned | |
| user_input_lower = user_input.lower() | |
| matched_row = None | |
| for _, row in df.iterrows(): | |
| if row['song'].lower() in user_input_lower or row['artist'].lower() in user_input_lower: | |
| matched_row = row | |
| break | |
| # If a song/artist is found, use its features; otherwise use dataset mean | |
| if matched_row is not None: | |
| user_features = df_features.loc[matched_row.name].values.reshape(1, -1) | |
| else: | |
| user_features = df_features.mean().values.reshape(1, -1) | |
| distances = euclidean_distances(df_features.values, user_features) | |
| top5_idx = np.argsort(distances.flatten())[:5] | |
| top5 = df.iloc[top5_idx][['artist', 'song']] | |
| output_lines = [f"{row['artist']} - {row['song']}" for _, row in top5.iterrows()] | |
| return "\n".join(output_lines) | |
| # --- Chat model using OpenAI GPT with memory --- | |
| def chat_model(user_input, chat_history): | |
| try: | |
| messages = [{"role": "system", "content": "You are a friendly chatbot."}] | |
| # Include previous conversation for context | |
| for user_msg, bot_msg in chat_history: | |
| messages.append({"role": "user", "content": user_msg}) | |
| messages.append({"role": "assistant", "content": bot_msg}) | |
| # Add the current user input | |
| messages.append({"role": "user", "content": user_input}) | |
| response = openai.ChatCompletion.create( | |
| model="gpt-4o-mini", | |
| messages=messages, | |
| temperature=0.7 | |
| ) | |
| return response['choices'][0]['message']['content'] | |
| except Exception as e: | |
| return f"Error contacting LLM: {e}" | |
| # --- Main chat handler --- | |
| def respond(message, chat_history): | |
| input_lower = message.lower() | |
| if any(keyword in input_lower for keyword in ["recommend", "song", "music", "suggest"]): | |
| recommendation = recommend_song(message) | |
| chat_history.append((message, recommendation)) | |
| return recommendation, chat_history | |
| else: | |
| reply = chat_model(message, chat_history) | |
| chat_history.append((message, reply)) | |
| return reply, chat_history | |
| # --- Gradio ChatInterface --- | |
| demo = gr.ChatInterface( | |
| fn=respond, | |
| title="Smart Music Chatbot", | |
| description="Chat normally with the AI, or ask for song recommendations." | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |