Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| def analyze(project_data, question): | |
| try: | |
| api_key = os.getenv("HF_API_KEY") | |
| client = InferenceClient( | |
| model="Qwen/Qwen2.5-72B-Instruct", | |
| token=api_key | |
| ) | |
| prompt = f"Analyze this project: {project_data}\n\nQuestion: {question}" | |
| response = client.text_generation( | |
| prompt, | |
| max_new_tokens=1000, | |
| temperature=0.7, | |
| top_p=0.95, | |
| repetition_penalty=1.1, | |
| do_sample=True | |
| ) | |
| return response | |
| except Exception as e: | |
| print(f"Error details: {str(e)}") | |
| return f"Error occurred: {str(e)}" | |
| iface = gr.Interface( | |
| fn=analyze, | |
| inputs=[ | |
| gr.Textbox(label="Project Data"), | |
| gr.Textbox(label="Question") | |
| ], | |
| api_name="analyze", | |
| outputs="text" | |
| ) | |
| # Configure for external access | |
| iface.launch( | |
| server_name="0.0.0.0", # Allow external connections | |
| share=True, # Create public URL | |
| allowed_paths=["*"], # Allow CORS | |
| ) | |