Spaces:
Sleeping
Sleeping
| import os | |
| import json | |
| import gradio as gr | |
| from groq import Groq | |
| from typing import List, Dict, Any | |
| class Groqee346: | |
| def __init__(self): | |
| # Don't initialize client here, do it in the chat method when we have the API key | |
| self.models = { | |
| "Musky (Most Powerful)": { | |
| "id": "llama-3.3-70b-versatile", | |
| "context_window": 128000, | |
| "max_tokens": 32768 | |
| }, | |
| "Compound Beta (Groq Native)": { | |
| "id": "compound-beta", | |
| "context_window": 128000, | |
| "max_tokens": 8192 | |
| }, | |
| "Sheepshead (Unlimited)": { | |
| "id": "llama-3.1-8b-instant", | |
| "context_window": 128000, | |
| "max_tokens": 4096 | |
| }, | |
| "Northern Pike (Fast)": { | |
| "id": "llama-3.1-8b-instant", | |
| "context_window": 128000, | |
| "max_tokens": 8192 | |
| }, | |
| "Walleye (Focused)": { | |
| "id": "gemma2-9b-it", | |
| "context_window": 8192, | |
| "max_tokens": 4096 | |
| } | |
| } | |
| self.templates = { | |
| "Musky - Academic Coach": """You are a highly sophisticated academic coach and advisor, combining decades of | |
| educational experience with cutting-edge technological understanding. Like the mighty Muskellunge of Wisconsin's | |
| waters, you are the apex of intelligence and capability in academic guidance. You excel at curriculum development, | |
| student success strategies, and administrative leadership in technical college settings.""", | |
| "Compound Beta - Pike Expert": """You are Groq's native Compound AI system, taking on the role of the Northern Pike, | |
| Wisconsin's opportunistic predator known for its adaptability and strategic hunting. Like the Pike's ability to | |
| thrive in diverse environments, you combine multiple AI capabilities to provide comprehensive solutions. You excel | |
| at complex problem-solving, strategic thinking, and adapting your responses based on context.""", | |
| "Sheepshead - Efficient": """You are an efficient and resourceful AI assistant, like the Sheepshead (Freshwater Drum) | |
| of Wisconsin's waters. Known for their resilience and ability to thrive in any condition, you provide concise, | |
| practical responses while maintaining high quality. You're designed for sustained conversations without exhausting | |
| resources, perfect for long-term projects and extended discussions.""", | |
| "Perch - Research": """You are a meticulous researcher with the precision of a Yellow Perch, known for their | |
| keen eye and schooling behavior. Like how Perch work together to find food, you excel at gathering and | |
| synthesizing information from multiple sources, creating comprehensive analyses and detailed reports.""", | |
| "Catfish - Data": """You are a data analysis specialist with the sensory capabilities of a Channel Catfish, | |
| able to detect and process subtle patterns in complex datasets. Like how Catfish use their barbels to sense their | |
| environment, you excel at data mining, pattern recognition, and predictive analytics.""", | |
| "Sturgeon - Creative": """You are a creative AI assistant with the wisdom of a Lake Sturgeon, | |
| Wisconsin's ancient living fossil. Like these magnificent creatures that have existed for over 150 million years, | |
| you draw upon deep wells of creative inspiration for writing, storytelling, and artistic expression.""", | |
| "Smallmouth Bass - Technical": """You are a technical expert with the adaptability and precision of a | |
| Smallmouth Bass, capable of thriving in diverse environments. You excel in programming, engineering, and | |
| scientific analysis, approaching problems with agility and intelligence.""", | |
| "Walleye - Professional": """You are a professional assistant with the focus and clarity of a Walleye, | |
| known for their sharp vision and precision. You excel in business communication, strategic planning, and | |
| professional documentation.""" | |
| } | |
| self.default_template = "Musky - Academic Coach" | |
| def format_messages(self, history: List[tuple], new_message: str, template: str = "Musky - Academic Coach") -> List[Dict[str, str]]: | |
| messages = [{"role": "system", "content": self.templates[template]}] | |
| # Check if history is in the new messages format or old tuples format | |
| if history and isinstance(history[0], dict): | |
| # New format - already has role/content keys | |
| for msg in history: | |
| messages.append(msg) | |
| else: | |
| # Old format - convert tuples to role/content format | |
| for human, bot in history: | |
| messages.append({"role": "user", "content": human}) | |
| if bot: | |
| messages.append({"role": "assistant", "content": bot}) | |
| messages.append({"role": "user", "content": new_message}) | |
| return messages | |
| def chat(self, | |
| message: str, | |
| history: List[tuple], | |
| model_name: str = "Musky (Most Powerful)", | |
| template: str = "Musky - Academic Coach", | |
| temperature: float = 0.7, | |
| api_key: str = "", | |
| stream: bool = True) -> Any: | |
| """Main chat function that handles conversation with Groq API""" | |
| # Use provided API key or environment variable | |
| api_key = api_key.strip() if api_key.strip() else os.getenv("GROQ_API_KEY", "") | |
| if not api_key: | |
| return "Error: Please provide your GROQ API Key in the text field below." | |
| try: | |
| # Create client with provided API key only when needed | |
| client = Groq(api_key=api_key) | |
| model_config = self.models[model_name] | |
| messages = self.format_messages(history, message, template) | |
| chat_completion = client.chat.completions.create( | |
| messages=messages, | |
| model=model_config["id"], | |
| temperature=temperature, | |
| max_tokens=model_config["max_tokens"], | |
| stream=bool(stream) | |
| ) | |
| if stream: | |
| partial_message = "" | |
| for chunk in chat_completion: | |
| if chunk.choices[0].delta.content is not None: | |
| partial_message += chunk.choices[0].delta.content | |
| yield partial_message | |
| else: | |
| yield chat_completion.choices[0].message.content | |
| except Exception as e: | |
| yield f"Error: {str(e)}" | |
| def create_interface(): | |
| groqee = Groqee346() | |
| demo = gr.ChatInterface( | |
| fn=groqee.chat, | |
| title="🎣 Groqee_3.4.6", | |
| description="""Powered by Groq's LPU™ technology | Channeling the Wisdom of Wisconsin Waters | |
| Featured: Try the Sheepshead model for unlimited, efficient conversations!""", | |
| additional_inputs=[ | |
| gr.Dropdown( | |
| choices=list(groqee.models.keys()), | |
| label="Fish Species (Model)", | |
| value="Musky (Most Powerful)" | |
| ), | |
| gr.Dropdown( | |
| choices=list(groqee.templates.keys()), | |
| label="Fish Persona", | |
| value="Musky - Academic Coach" | |
| ), | |
| gr.Slider( | |
| minimum=0.1, | |
| maximum=1.0, | |
| value=0.7, | |
| step=0.1, | |
| label="Water Temperature (Creativity)", | |
| ), | |
| gr.Textbox( | |
| label="Groq API Key", | |
| placeholder="Enter your GROQ_API_KEY", | |
| type="password", | |
| value="", | |
| interactive=True | |
| ) | |
| ], | |
| chatbot=gr.Chatbot(type="messages"), | |
| examples=[ | |
| ["Help me develop a technical college curriculum for IT professionals", "Musky (Most Powerful)", "Musky - Academic Coach", 0.7, ""], | |
| ["I need to analyze this dataset over a long conversation", "Sheepshead (Unlimited)", "Sheepshead - Efficient", 0.5, ""], | |
| ["Research and summarize the latest developments in AI", "Northern Pike (Fast)", "Perch - Research", 0.6, ""], | |
| ["Analyze these sales trends and identify patterns", "Walleye (Focused)", "Catfish - Data", 0.3, ""], | |
| ["Write a creative story about Wisconsin's waters", "Northern Pike (Fast)", "Sturgeon - Creative", 0.8, ""], | |
| ["Debug this Python code and explain the fixes", "Walleye (Focused)", "Smallmouth Bass - Technical", 0.2, ""], | |
| ["Draft a professional email to the board of directors", "Northern Pike (Fast)", "Walleye - Professional", 0.5, ""], | |
| ], | |
| theme=gr.themes.Soft( | |
| primary_hue="blue", | |
| secondary_hue="indigo", | |
| ) | |
| ) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = create_interface() | |
| demo.launch() # HF Spaces compatible parameters | |