Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from llama_cpp import Llama | |
| try: | |
| # Load your model | |
| llm = Llama(model_path="model.Q4_K_M.gguf", n_gpu_layers=0) | |
| except Exception as e: | |
| print(f"Error loading model: {e}") | |
| llm = None | |
| def chat(message, history): | |
| """Chat function with history""" | |
| if llm is None: | |
| return "Error: Model not loaded. Make sure model.Q4_K_M.gguf is uploaded." | |
| try: | |
| # Build conversation history | |
| chat_history = "" | |
| for user_msg, bot_msg in history: | |
| chat_history += f"Q: {user_msg}\nA: {bot_msg}\n" | |
| # Create prompt | |
| prompt = chat_history + f"Q: {message}\nA:" | |
| # Get response | |
| response = llm(prompt, max_tokens=512) | |
| return response["choices"]["text"].strip() | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| # Launch Gradio interface | |
| demo = gr.ChatInterface( | |
| chat, | |
| examples=[ | |
| "What is SQL injection?", | |
| "How to prevent XSS attacks?", | |
| "What is CSRF?", | |
| "Best security practices" | |
| ], | |
| title="🔒 Security Expert - Llama-3", | |
| description="Ask me anything about cybersecurity!", | |
| theme=gr.themes.Soft(), | |
| chatbot=gr.Chatbot(label="Chat History") | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |