duduvicky commited on
Commit
5d55be2
·
verified ·
1 Parent(s): 37b753f

create app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -0
app.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from langchain_huggingface import HuggingFacePipeline
3
+ from langchain.agents import initialize_agent, Tool
4
+ from langchain_community.tools import DuckDuckGoSearchRun
5
+ from langchain.memory import ConversationBufferMemory
6
+ from langchain.agents import AgentType
7
+
8
+ # Load the free open-source LLM (Phi-2 runs on CPU, might be a bit slow but works for free hosting)
9
+ llm = HuggingFacePipeline.from_model_id(
10
+ model_id="microsoft/phi-2",
11
+ task="text-generation",
12
+ pipeline_kwargs={
13
+ "max_new_tokens": 256, # Limit response length
14
+ "do_sample": True,
15
+ "temperature": 0.7, # Creativity level
16
+ "top_k": 50,
17
+ "top_p": 0.95
18
+ },
19
+ model_kwargs={"trust_remote_code": True} # Required for Phi-2
20
+ )
21
+
22
+ # Define tools the agent can use (e.g., web search for current info)
23
+ search = DuckDuckGoSearchRun()
24
+ tools = [
25
+ Tool(
26
+ name="Web Search",
27
+ func=search.run,
28
+ description="Useful for answering questions about current events, facts, or anything requiring up-to-date web information. Input should be a search query."
29
+ )
30
+ ]
31
+
32
+ # Set up memory to remember conversation history
33
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
34
+
35
+ # Initialize the conversational agent
36
+ agent = initialize_agent(
37
+ tools,
38
+ llm,
39
+ agent=AgentType.CONVERSATIONAL_REACT_DESCRIPTION, # Agent type that handles conversations and tools
40
+ verbose=True, # Logs reasoning (visible in console, not user-facing)
41
+ memory=memory # Enables context understanding across messages
42
+ )
43
+
44
+ # Gradio chat function: Handles user input and gets agent response
45
+ def chat_with_agent(message, history):
46
+ try:
47
+ response = agent.invoke({"input": message})["output"]
48
+ except Exception as e:
49
+ response = f"Error: {str(e)}. Try rephrasing your question."
50
+ return response
51
+
52
+ # Create the Gradio chat interface
53
+ iface = gr.ChatInterface(
54
+ fn=chat_with_agent,
55
+ title="Free Cloud AI Agent",
56
+ description="A conversational AI agent that remembers our talks and can search the web for info. Powered by Phi-2, LangChain, and hosted free on Hugging Face Spaces. Responses may take 10-30 seconds on free CPU.",
57
+ examples=["What's the latest news on AI?", "Tell me a joke.", "Remember my name is Alex. What's my name?"] # Demo prompts
58
+ )
59
+
60
+ # Launch the app (Hugging Face handles this automatically)
61
+ if __name__ == "__main__":
62
+ iface.launch()