Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import google.generativeai as genai | |
| from PIL import Image | |
| import os | |
| # Configure the Gemini API | |
| genai.configure(api_key=os.environ.get("AIzaSyCFdxcKVO6VSxEBaNE2W3LIvRLPEPpyMGw")) | |
| # Set up the model | |
| model = genai.GenerativeModel('gemini-pro-vision') | |
| def chat_with_gemini(history, user_message, image): | |
| history = history or [] | |
| if image is not None: | |
| # If an image is uploaded, include it in the message | |
| response = model.generate_content([user_message, image]) | |
| else: | |
| # Text-only message | |
| response = model.generate_content(user_message) | |
| history.append((user_message, response.text)) | |
| return history, history | |
| def clear_conversation(): | |
| return None | |
| # Define the Gradio interface | |
| with gr.Blocks() as demo: | |
| chatbot = gr.Chatbot(label="Chat with Gemini") | |
| msg = gr.Textbox(label="Type your message here") | |
| clear = gr.Button("Clear") | |
| image_upload = gr.Image(type="pil", label="Upload an image (optional)") | |
| msg.submit(chat_with_gemini, [chatbot, msg, image_upload], [chatbot, chatbot]) | |
| clear.click(clear_conversation, outputs=[chatbot]) | |
| # Launch the app | |
| if __name__ == "__main__": | |
| demo.launch() | |
| # Requirements for Hugging Face Spaces | |
| # requirements.txt | |
| ''' | |
| gradio==3.50.2 | |
| google-generativeai==0.3.1 | |
| Pillow==10.0.0 | |
| ''' |