Lui3ui3ui commited on
Commit
3232f5e
·
verified ·
1 Parent(s): 2de003b

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -14
app.py CHANGED
@@ -1,14 +1,38 @@
1
- from huggingface_hub import InferenceClient
2
- import os
3
-
4
- client = InferenceClient(token=os.environ.get("HF_API_TOKEN"))
5
-
6
- try:
7
- response = client.text_generation(
8
- model="tiiuae/falcon-7b-instruct",
9
- prompt="Hello, who are you?",
10
- max_new_tokens=50
11
- )
12
- print(response)
13
- except Exception as e:
14
- print(f"Test failed: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from agents import build_graph
3
+ import asyncio
4
+
5
+ # Build the LangGraph once
6
+ graph = build_graph()
7
+
8
+ async def run_book_recommender(user_input):
9
+ initial_state = {"user_input": user_input}
10
+
11
+ async for state in graph.astream(initial_state):
12
+ final_state = state
13
+
14
+ print("[app.py] Final state:", final_state)
15
+
16
+ # Access the nested "reasoning" key
17
+ reasoning_data = final_state.get("reasoning", {})
18
+ recommendations = reasoning_data.get("final_recommendations", [])
19
+ reasoning = reasoning_data.get("final_reasoning", "")
20
+
21
+ recommendations_text = "\n\n".join(
22
+ [f"📘 {rec['title']}\n🔗 {rec.get('link', '')}\n💡 {rec.get('reason', '')}" for rec in recommendations]
23
+ ) or "No recommendations found."
24
+
25
+ return recommendations_text, reasoning
26
+
27
+ # Gradio UI
28
+ with gr.Blocks() as demo:
29
+ gr.Markdown("# 📚 AI Book Recommender")
30
+ user_input = gr.Textbox(label="Tell me some books you like")
31
+ recommend_btn = gr.Button("Get Recommendations")
32
+ recommendations_output = gr.Textbox(label="Recommended Books", lines=10)
33
+ reasoning_output = gr.Textbox(label="Reasoning / Debug Log", lines=15)
34
+
35
+ recommend_btn.click(run_book_recommender, inputs=user_input, outputs=[recommendations_output, reasoning_output])
36
+
37
+ if __name__ == "__main__":
38
+ demo.launch()