Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import os | |
| import google.generativeai as genai | |
| from groq import Groq | |
| import concurrent.futures | |
| # --- CONFIGURATION --- | |
| st.set_page_config(page_title="AI Council", page_icon="π€", layout="wide") | |
| # CSS to make it look clean | |
| st.markdown(""" | |
| <style> | |
| .stTextArea textarea { font-size: 16px; } | |
| .stButton button { height: 50px; font-size: 18px; } | |
| div[data-testid="stExpander"] details { border: 1px solid #ddd; border-radius: 8px; } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # --- LOGIN SYSTEM --- | |
| # This prevents random internet strangers from using your quota | |
| if "authenticated" not in st.session_state: | |
| st.session_state.authenticated = False | |
| def check_password(): | |
| # We look for the password set in Hugging Face Secrets (or local .env) | |
| stored_password = os.environ.get("FRIEND_PASS") | |
| # If no password is set in settings, we let everyone in (dev mode) | |
| if not stored_password: | |
| st.session_state.authenticated = True | |
| return | |
| if st.session_state.password_input == stored_password: | |
| st.session_state.authenticated = True | |
| else: | |
| st.error("Wrong password!") | |
| if not st.session_state.authenticated: | |
| st.title("π Council Access") | |
| st.markdown("This tool is exclusive to friends.") | |
| st.text_input("Enter Password", type="password", key="password_input", on_change=check_password) | |
| st.stop() # Stop the app here if not logged in | |
| # --- APP STARTS HERE (Only for friends) --- | |
| # Load Keys from Secrets (Hidden from user) | |
| GOOGLE_KEY = os.environ.get("GOOGLE_API_KEY") | |
| GROQ_KEY = os.environ.get("GROQ_API_KEY") | |
| if not GOOGLE_KEY or not GROQ_KEY: | |
| st.error("π¨ System Error: The owner needs to add API Keys in Settings > Secrets.") | |
| st.stop() | |
| # Configure Clients | |
| try: | |
| genai.configure(api_key=GOOGLE_KEY) | |
| groq_client = Groq(api_key=GROQ_KEY) | |
| except Exception as e: | |
| st.error(f"Connection Error: {e}") | |
| st.stop() | |
| # --- COUNCIL MEMBERS (Smartest Free Config) --- | |
| MEMBERS = [ | |
| # 1. Google: Switched to Flash (Guaranteed working & Fast) | |
| {"name": "Gemini 1.5 Flash", "id": "gemini-1.5-flash-001", "provider": "google"}, | |
| # 2. Meta (Groq): This one is working great! | |
| {"name": "Llama 3.3 70B", "id": "llama-3.3-70b-versatile", "provider": "groq"}, | |
| # 3. Meta (Groq): Replaced the dead Mixtral model with the new fast Llama | |
| {"name": "Llama 3.1 8B", "id": "llama-3.1-8b-instant", "provider": "groq"}, | |
| ] | |
| # IMPORTANT: Also update the Chairman to match the working Google model | |
| CHAIRMAN_ID = "gemini-1.5-flash" | |
| # --- HELPER FUNCTIONS --- | |
| def query_google(model_id, prompt): | |
| try: | |
| model = genai.GenerativeModel(model_id) | |
| response = model.generate_content(prompt) | |
| return response.text | |
| except Exception as e: | |
| return f"Error (Google): {str(e)}" | |
| def query_groq(model_id, prompt): | |
| try: | |
| completion = groq_client.chat.completions.create( | |
| model=model_id, | |
| messages=[{"role": "user", "content": prompt}], | |
| temperature=0.7, | |
| max_tokens=2000, | |
| ) | |
| return completion.choices[0].message.content | |
| except Exception as e: | |
| return f"Error (Groq): {str(e)}" | |
| def ask_member(member, prompt): | |
| if member["provider"] == "google": | |
| return query_google(member["id"], prompt) | |
| elif member["provider"] == "groq": | |
| return query_groq(member["id"], prompt) | |
| # --- MAIN INTERFACE --- | |
| st.title("π€ The AI Council") | |
| st.markdown("Ask a question. **Google** and **Meta** models will debate it to find the truth.") | |
| user_query = st.text_area("What do you want to ask?", placeholder="e.g. How do I start a business with $0?") | |
| start_btn = st.button("Start Debate", type="primary") | |
| if start_btn and user_query: | |
| # STAGE 1 | |
| st.info("π§ Stage 1: Gathering Expert Opinions...") | |
| results = {} | |
| # Create columns for the live results | |
| cols = st.columns(len(MEMBERS)) | |
| placeholders = [col.empty() for col in cols] | |
| with concurrent.futures.ThreadPoolExecutor() as executor: | |
| future_to_member = {executor.submit(ask_member, m, user_query): m for m in MEMBERS} | |
| for future in concurrent.futures.as_completed(future_to_member): | |
| member = future_to_member[future] | |
| ans = future.result() | |
| results[member["name"]] = ans | |
| # Find which column this member belongs to | |
| idx = MEMBERS.index(member) | |
| with placeholders[idx].container(): | |
| st.success(f"β {member['name']}") | |
| with st.expander("Read Opinion", expanded=True): | |
| st.write(ans) | |
| # STAGE 2 | |
| st.warning("βοΈ Stage 2: Peer Review (Critique)...") | |
| combined_text = "\n\n".join([f"OPINION FROM {k}:\n{v}" for k, v in results.items()]) | |
| critique_prompt = f"Question: {user_query}\nAnswers:\n{combined_text}\nCritique these answers. Who is factually right? Who is hallucinating? Be harsh." | |
| # Using Llama 3.3 for critique (it is very logical) | |
| critique = query_groq("llama-3.3-70b-versatile", critique_prompt) | |
| with st.expander("π Read the Debate Minutes"): | |
| st.write(critique) | |
| # STAGE 3 | |
| st.success("π Stage 3: Final Verdict") | |
| final_prompt = f"Question: {user_query}\nCritique: {critique}\nSynthesize the best possible answer based on the critique. Ignore the errors, keep the insights." | |
| # Using Gemini Pro for final writing (it writes better reports) | |
| final_answer = query_google(CHAIRMAN_ID, final_prompt) | |
| st.markdown("### Final Answer") | |
| st.markdown(final_answer) |