Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| import google.generativeai as genai | |
| # β Load API key securely | |
| api_key = os.getenv("GEMINI_API_KEY", st.secrets.get("GEMINI_API_KEY")) | |
| # β Check if API key is available | |
| if not api_key: | |
| st.error("β οΈ API key is missing! Set GEMINI_API_KEY as an environment variable or in Streamlit Secrets.") | |
| st.stop() | |
| # β Configure Gemini API | |
| genai.configure(api_key=api_key) | |
| # β Function to generate AI response | |
| def generate_text(prompt, model, max_tokens, temperature): | |
| try: | |
| model = genai.GenerativeModel(model) | |
| response = model.generate_content( | |
| prompt, | |
| generation_config={ | |
| "max_output_tokens": max_tokens, | |
| "temperature": temperature, | |
| }, | |
| ) | |
| if response and response.candidates: | |
| return response.candidates[0].content.parts[0].text | |
| else: | |
| return "β οΈ No valid response from Gemini AI." | |
| except Exception as e: | |
| return f"β Error: {str(e)}" | |
| # β Streamlit Page Configuration | |
| st.set_page_config(page_title="Gemini AI Text Generator", layout="centered") | |
| # β Title & Description | |
| st.title("Creative AI Assistant") | |
| st.write("Generate AI-powered text by entering a prompt and customizing the settings.") | |
| # β Input Prompt | |
| user_input = st.text_area("π Enter Your Prompt:", height=150, placeholder="Example: Write a formal article about climate change.") | |
| # β Sliders (Aligned Below Prompt) | |
| temperature = st.slider("π¨ Creativity Level (1-10)", min_value=1, max_value=10, value=5) / 10 | |
| max_tokens = st.slider("π Max Tokens (Word Length)", min_value=50, max_value=1000, value=300, step=50) | |
| # β AI Model Selection (Below Sliders) | |
| model = st.selectbox("π€ Choose AI Model", ["gemini-pro", "gemini-pro-code"]) | |
| # β Generate Button (Below Model Selection) | |
| if st.button("π Generate Text"): | |
| if user_input.strip(): | |
| with st.spinner("β¨ Generating response..."): | |
| result = generate_text(user_input, model, max_tokens, temperature) | |
| st.subheader("π― Generated Response") | |
| st.write(result) | |
| else: | |
| st.warning("β οΈ Please enter a prompt before generating text.") | |
| # β Footer | |
| st.markdown("---") | |
| st.caption("πΉ *Powered by Gemini AI* | πΉ *Developed with Streamlit*") | |