Ventahana commited on
Commit
09d74de
Β·
verified Β·
1 Parent(s): 939bc45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -58
app.py CHANGED
@@ -1,62 +1,34 @@
1
- import os
2
  import gradio as gr
3
- from smolagents import CodeAgent, InferenceClientModel
4
- from retriever import guest_info_tool
5
- from tools import all_tools
6
-
7
- print("🎩 Starting Alfred...")
8
-
9
- # ============================================================================
10
- # AUTO-GET TOKEN FROM SPACES SECRETS
11
- # ============================================================================
12
- # HuggingFace Spaces automatically sets HF_TOKEN from your secrets
13
- # You don't need to type it in code!
14
- HF_TOKEN = os.environ.get("HF_TOKEN")
15
-
16
- if HF_TOKEN:
17
- print(f"βœ… Found HF_TOKEN (first 10 chars): {HF_TOKEN[:10]}...")
18
-
19
- # Set for huggingface_hub library
20
- os.environ["HUGGINGFACE_HUB_TOKEN"] = HF_TOKEN
21
-
22
- # Try InferenceClientModel
23
- try:
24
- model = InferenceClientModel()
25
- print("βœ… InferenceClientModel created with token")
26
- except Exception as e:
27
- print(f"❌ InferenceClientModel failed: {e}")
28
- # Fallback
29
- from smolagents.models import TransformersModel
30
- model = TransformersModel("microsoft/phi-2")
31
- else:
32
- print("❌ No HF_TOKEN found in environment!")
33
- print("πŸ’‘ Add it in Space Settings β†’ Repository secrets")
34
-
35
- # Use local model instead
36
- from smolagents.models import TransformersModel
37
- model = TransformersModel("microsoft/phi-2")
38
- print("πŸ”„ Using local model as fallback")
39
-
40
- # ============================================================================
41
- # CREATE AGENT
42
- # ============================================================================
43
- all_agent_tools = [guest_info_tool] + all_tools
44
- alfred = CodeAgent(tools=all_agent_tools, model=model)
45
-
46
- print(f"πŸ€– Alfred ready with {len(all_agent_tools)} tools")
47
-
48
- def ask_alfred(query):
49
- try:
50
- return alfred.run(query)
51
- except Exception as e:
52
- return f"Error: {str(e)}"
53
-
54
- demo = gr.Interface(
55
- fn=ask_alfred,
56
- inputs=gr.Textbox(label="Ask Alfred"),
57
- outputs=gr.Textbox(label="Response"),
58
- title="🎩 Alfred's Gala Assistant"
59
  )
60
 
61
  if __name__ == "__main__":
62
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
 
 
 
 
 
 
1
+ # app.py - Clean version
2
  import gradio as gr
3
+ from smolagents import GradioUI, CodeAgent, InferenceClientModel
4
+
5
+ # Import our custom tools
6
+ from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
7
+ from retriever import load_guest_dataset
8
+
9
+ # Initialize
10
+ model = InferenceClientModel() # Updated
11
+ search_tool = DuckDuckGoSearchTool()
12
+ weather_info_tool = WeatherInfoTool()
13
+ hub_stats_tool = HubStatsTool()
14
+ guest_tool = load_guest_dataset()
15
+
16
+ # Enhance tool descriptions
17
+ guest_tool.description = "USE THIS FOR GALA GUESTS! Database with names, relations, descriptions, emails of attendees. Examples: 'Lady Ada Lovelace', 'guest list', 'who is coming'. NEVER use web search for guests."
18
+
19
+ # Create agent
20
+ alfred = CodeAgent(
21
+ tools=[guest_tool, weather_info_tool, hub_stats_tool, search_tool],
22
+ model=model,
23
+ add_base_tools=True,
24
+ planning_interval=3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  )
26
 
27
  if __name__ == "__main__":
28
+ # Quick test
29
+ print("Testing...")
30
+ response = alfred.run("Tell me about Lady Ada Lovelace at the gala")
31
+ print(f"Response: {response[:300]}...")
32
+
33
+ # Launch UI
34
+ GradioUI(alfred).launch()