| import os | |
| import pandas as pd | |
| import google.generativeai as genai | |
| import gradio as gr | |
| from google.api_core import retry | |
| my_api_key = os.environ.get("GOOGLE-API-KEY") | |
| genai.configure(api_key=my_api_key) | |
| MODEL_NAME = 'gemini-1.5-flash-latest' | |
| retry_policy = {"retry": retry.Retry(predicate=retry.if_transient_error, initial=10, multiplier= 1.5, timeout=300)} | |
| model = genai.GenerativeModel( | |
| MODEL_NAME, | |
| generation_config= genai.GenerationConfig( | |
| temperature= 1.0, | |
| top_p= 1, | |
| max_output_tokens=1000, | |
| ) | |
| ) | |
| data_path = os.path.join("docs", "Nigerian_Foods.csv") | |
| json_path = os.path.join("docs", "food_data.json") | |
| food_data = pd.read_csv(data_path) | |
| json_data = food_data.to_json(orient="records", lines=False, indent=4) | |
| with open(json_path, "w") as json_file: | |
| json_file.write(json_data) | |
| few_shot_prompt = f""" | |
| You are an interactive recipe assistant. Use the following dataset to recommend recipes: | |
| {json_data} | |
| Instructions: | |
| 1. Provide recipes based on the user's query. | |
| 2. If the requested recipe is unavailable, suggest the most similar one. | |
| 3. Maintain context across multiple messages. | |
| """ | |
| history = [] | |
| def recipe_chatbot(messages: str, history: list[str]): | |
| ask = { | |
| "current message" : messages, | |
| "previous message": history[::-1] | |
| } | |
| history.append(messages) | |
| response = model.generate_content([few_shot_prompt,ask], request_options=retry_policy) | |
| return response.text | |
| bot = gr.ChatInterface( | |
| fn=recipe_chatbot, | |
| type="messages" | |
| ) | |
| bot.launch() | |