Spaces:
Sleeping
Sleeping
| '''from openai import OpenAI | |
| from dotenv import load_dotenv | |
| import os | |
| from prompts import ai_motivational_speaker | |
| load_dotenv() | |
| api_key = os.getenv("GEMINI_API_KEY") | |
| model="gemini-2.5-flash-lite" | |
| base_url="https://generativelanguage-googleapis.com/v1beta/openai/" | |
| client= OpenAI(base_url=base_url, api_key=api_key) | |
| ai_motivational_speaker = ai_motivational_speaker | |
| ''' | |
| ''' | |
| def get_response(message, history): | |
| messages=({"role":"system","content":ai_motivational_speaker}) | |
| messages.extend(history) | |
| messages.append({"role":"user","content":message}) | |
| response = client.chat.completions.create( | |
| model=model, | |
| messages = messages) | |
| ai_response = response.choices[0].message.content | |
| return ai_response | |
| print(get_response(" hh h hhh h ", []))*''' | |
| ''' | |
| def get_response(message, history): | |
| # Start messages with the system prompt to set the AI's persona | |
| messages = [{"role": "system", "content": ai_motivational_speaker}] | |
| # Extend the messages list with the existing chat history | |
| messages.extend(history) | |
| # Add the current user's message to the conversation | |
| messages.append({"role": "user", "content": message}) | |
| # Call the OpenAI API to get a completion from the Gemini model | |
| response = client.chat.completions.create(model="gemini-2.5-flash", messages=messages) | |
| # Extract the AI's response content | |
| ai_response = response.choices[0].message.content | |
| # Return the AI's generated response | |
| return ai_response | |
| # Main execution block to test the chatbot function | |
| if __name__ == "__main__": | |
| # Print a test conversation with the chatbot | |
| print(get_response("Hello, Caramel AI! Can you tell me what AI is?", []))''' | |
| from openai import OpenAI | |
| from dotenv import load_dotenv | |
| import os | |
| from prompts import ai_motivational_speaker | |
| load_dotenv() | |
| api_key = os.getenv("GEMINI_API_KEY") | |
| model ="gemini-2.5-flash-lite" | |
| base_url = "https://generativelanguage.googleapis.com/v1beta/openai/" | |
| client = OpenAI(api_key=api_key, base_url=base_url) | |
| ai_motivational_speaker = ai_motivational_speaker | |
| def get_response(message, history): | |
| messages = [{"role": "system", "content": ai_motivational_speaker}] | |
| messages.extend(history) | |
| messages.append({"role": "user", "content": message}) | |
| response = client.chat.completions.create( | |
| model=model, | |
| messages=messages | |
| ) | |
| Ai_response = response.choices[0].message.content | |
| return Ai_response | |
| #if __name__ == "__main__": | |
| # print(get_response("Hello, Caramel AI! Can you tell me what AI is?", [])) |