Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import random | |
| import time | |
| import chatglm_cpp | |
| from pathlib import Path | |
| model_file_path = "chatglm3-ggml_q4_0.bin" | |
| #model_file_path = "../../Downloads1/chatglm3-ggml_q4_0.bin" | |
| chatglm_llm = chatglm_cpp.Pipeline(Path(model_file_path)) | |
| def predict(message, history): | |
| flatten_history = [] | |
| for a, b in history: | |
| flatten_history.append(a) | |
| flatten_history.append(b) | |
| streamer = chatglm_llm.chat( | |
| history= flatten_history + [message], do_sample=False, | |
| stream = True | |
| ) | |
| response = "" | |
| for new_text in streamer: | |
| response += new_text | |
| yield response | |
| with gr.Blocks(css = "custom.css") as demo: | |
| title = gr.HTML( | |
| """<h1 align="center"> <font size="+3"> ChatGLM3 Chatbot ☔️🐼 </font> </h1>""", | |
| elem_id="title", | |
| ) | |
| gr.HTML( | |
| """<h1 align="left"> <font size="+0"> 与人工智能助手 ChatGLM3 进行对话 </font> </h1>""", | |
| #elem_id="title", | |
| ) | |
| chatbot = gr.Chatbot() | |
| def user(user_message, history): | |
| return "", history + [[user_message, None]] | |
| ''' | |
| def bot_api(user_message): | |
| l = user_message.split("[SEP]") | |
| history = [] | |
| for ele in l: | |
| if not history: | |
| history.append( | |
| [ele] | |
| ) | |
| else: | |
| if len(history[-1]) == 2: | |
| history.append([ele]) | |
| else: | |
| history[-1].append(ele) | |
| if len(history[-1]) <= 1: | |
| return "" | |
| history[-1][1] = "" | |
| user_message_ = history[-1][0] | |
| pred_iter = predict(user_message_ ,history) | |
| for ele in pred_iter: | |
| history[-1][1] = ele | |
| user_message_out = [] | |
| for ele in history: | |
| for e in ele: | |
| user_message_out.append(e) | |
| user_message_out = "[SEP]".join(user_message_out) | |
| return user_message_out | |
| ''' | |
| def bot(history): | |
| ''' | |
| bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"]) | |
| history[-1][1] = "" | |
| for character in bot_message: | |
| history[-1][1] += character | |
| time.sleep(0.05) | |
| yield history | |
| ''' | |
| history[-1][1] = "" | |
| user_message = history[-1][0] | |
| pred_iter = predict(user_message ,history) | |
| for ele in pred_iter: | |
| history[-1][1] = ele | |
| yield history | |
| ''' | |
| msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
| bot, chatbot, chatbot | |
| ) | |
| ''' | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| #label="与人工智能助手 ChatGLM3 进行对话", | |
| show_label=True, lines=1, max_lines=20, | |
| min_width = 1024, | |
| placeholder="你好 人工智能助手 ChatGLM3,我可以问你一些问题吗?", | |
| elem_id="prompt", | |
| interactive=True, | |
| #info = "Generate by Click, and can edit by yourself, look up Examples below" | |
| ) | |
| #msg_out = gr.Textbox(visible = False) | |
| sub_button = gr.Button("Submit") | |
| clear = gr.Button("Clear") | |
| #api_button = gr.Button("api", visible = False) | |
| sub_button.click(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
| bot, chatbot, chatbot | |
| ) | |
| ''' | |
| api_button.click( | |
| bot_api, msg, msg_out | |
| ) | |
| ''' | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| gr.Examples( | |
| [ | |
| "哈利波特和赫敏是什么关系?", | |
| "请解释下面的emoji符号描述的情景👨👩🔥❄️", | |
| "明朝内阁制度的特点是什么?", | |
| "如何进行经济建设?", | |
| "你听说过马克思吗?", | |
| ], | |
| inputs = msg | |
| ) | |
| demo.queue() | |
| demo.launch(enable_queue = True) | |
| ''' | |
| from gradio_client import Client | |
| client = Client("http://localhost:7860/") | |
| result = client.predict( | |
| [["诸葛亮是哪个朝代的人?", "诸葛亮是三国时期的人。"], | |
| ["为什么说明朝是一个好的时代?", "因为出了王阳明。"], | |
| ["我之前问了哪些问题?", None]], | |
| api_name="/bot" | |
| ) | |
| print(result) | |
| ''' | |
| ''' | |
| #### gradio_client.__version__ : 0.7.0 | |
| from gradio_client import Client | |
| import json | |
| client = Client("https://svjack-chatglm3-open-chat.hf.space/--replicas/ksskt/") | |
| with open("hist.json", "w") as f: | |
| json.dump( | |
| [["诸葛亮是哪个朝代的人?", "诸葛亮是三国时期的人。"], | |
| ["为什么说明朝是一个好的时代?", "因为出了王阳明。"], | |
| ["我之前问了哪些问题?", None]] | |
| , f | |
| ) | |
| result = client.predict( | |
| "hist.json", | |
| fn_index=1 | |
| ) | |
| result | |
| ''' | |