peterpeter8585 commited on
Commit
d40b153
·
verified ·
1 Parent(s): 0b6f430

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -22
app.py CHANGED
@@ -1,6 +1,6 @@
1
  from tqdm import tqdm
2
  from subprocess import Popen, PIPE as P
3
- b=Popen("wget 1https://huggingface.co/peterpeter8585/deepseek-llm-7b-chat-Q8_0-GGUF/resolve/main/deepseek-llm-7b-chat-q8_0.gguf -O ./model.gguf",shell=True,stderr=P,stdout=P)
4
  print(b.stdout.read()+b.stderr.read())
5
  from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
6
  from langchain.agents import load_tools, create_structured_chat_agent as Agent,AgentExecutor as Ex, AgentType as Type
@@ -84,7 +84,7 @@ llm = HuggingFacePipeline.from_model_id(
84
  from langchain.retrievers import WikipediaRetriever as Wiki
85
  import gradio as gr
86
  chatbot = gr.Chatbot(
87
- label="SYAI4.1",
88
  show_copy_button=True,
89
  layout="panel"
90
  )
@@ -101,28 +101,10 @@ memory=MEM()
101
  tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1)))
102
  agent=Ex(agent=Agent(llm,tools,prompt),tools=tools,verbose=True,handle_parsing_errors=True,memory=memory)
103
  def chat(message,
104
- history: list[tuple[str, str]],
105
- system_message,
106
- max_tokens,
107
- temperature,
108
- top_p, chat_session):
109
  return agent.invoke({"input":message})
110
  ai1=gr.ChatInterface(
111
  chat,
112
- chatbot=chatbot,
113
- additional_inputs=[
114
- gr.Textbox(value="You are a helpful assistant.", label="System message", interactive=True),
115
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
116
- gr.Slider(minimum=0.1, maximum=4.0, value=0.1, step=0.1, label="Temperature"),
117
- gr.Slider(
118
- minimum=0.1,
119
- maximum=1.0,
120
- value=0.1,
121
- step=0.05,
122
- label="Top-p (nucleus sampling)",
123
- ),
124
- gr.Textbox(label="chat_id(please enter the chat id!)")
125
- ],
126
-
127
  )
128
  ai1.launch()
 
1
  from tqdm import tqdm
2
  from subprocess import Popen, PIPE as P
3
+ #b=Popen("wget 1https://huggingface.co/peterpeter8585/deepseek-llm-7b-chat-Q8_0-GGUF/resolve/main/deepseek-llm-7b-chat-q8_0.gguf -O ./model.gguf",shell=True,stderr=P,stdout=P)
4
  print(b.stdout.read()+b.stderr.read())
5
  from langchain_experimental.tools.python.tool import PythonREPLTool as PYT
6
  from langchain.agents import load_tools, create_structured_chat_agent as Agent,AgentExecutor as Ex, AgentType as Type
 
84
  from langchain.retrievers import WikipediaRetriever as Wiki
85
  import gradio as gr
86
  chatbot = gr.Chatbot(
87
+ label="PIXAL(Pimary Interactive X-ternal Assistant with multi Language) 1.0",
88
  show_copy_button=True,
89
  layout="panel"
90
  )
 
101
  tools.append(crt(name="wiki",description="위키 백과를 검색하여 정보를 가져온다",retriever=Wiki(lang="ko",top_k_results=1)))
102
  agent=Ex(agent=Agent(llm,tools,prompt),tools=tools,verbose=True,handle_parsing_errors=True,memory=memory)
103
  def chat(message,
104
+ history: list[tuple[str, str]]):
 
 
 
 
105
  return agent.invoke({"input":message})
106
  ai1=gr.ChatInterface(
107
  chat,
108
+ chatbot=chatbot,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  )
110
  ai1.launch()