Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from groq import Groq | |
| # Set up the Groq client | |
| client = Groq(api_key="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") | |
| # Set the system prompt | |
| system_prompt = """You are a helpful, respectful and professional assistant. | |
| the conversation should be shorter. | |
| Your task is to assist a marketing team in getting the budget and providing market strategies according to the budget and the platforms they're running ads on. | |
| The platforms include Google and Meta. | |
| You should consider the budget, the target audience, the goals of the campaign, and the strengths and weaknesses of each platform when providing market strategies. | |
| the content should be optimized and summerized. | |
| make the budget in Indian ruppes.""" | |
| # Initialize an empty list to store the conversation history | |
| conversation_history = [] | |
| # Define a function to handle user messages | |
| def handle_message(user_message,iface): | |
| # Add the user's message to the conversation history | |
| conversation_history.append({"role": "user", "content": user_message}) | |
| # Use the Groq client to get a response from the language model | |
| chat_completion = client.chat.completions.create( | |
| messages=[ | |
| { | |
| "role": "system", | |
| "content": system_prompt, | |
| }, | |
| *conversation_history | |
| ], | |
| model="llama3-8b-8192", | |
| ) | |
| # Add the language model's response to the conversation history | |
| conversation_history.append({"role": "assistant", "content": chat_completion.choices[0].message.content}) | |
| # Return the language model's response | |
| return chat_completion.choices[0].message.content | |
| # Create a Gradio interface with a chatbot component | |
| iface = gr.ChatInterface(handle_message) | |
| # Launch the interface | |
| iface.launch(share=True) | |