Spaces:
Sleeping
Sleeping
File size: 8,494 Bytes
69bc1d3 8ab161a 69bc1d3 7b2dd38 b72f364 8ab161a 69bc1d3 3c1485b 69bc1d3 3c1485b 69bc1d3 3c1485b 69bc1d3 73b4d2d 69bc1d3 3c1485b 69bc1d3 8ab161a 3c1485b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 |
import os
import time
import gradio as gr
import modelscope_studio.components.antd as antd
import modelscope_studio.components.antdx as antdx
import modelscope_studio.components.base as ms
import modelscope_studio.components.pro as pro
from modelscope_studio.components.pro.chatbot import (ChatbotBotConfig,
ChatbotPromptsConfig,
ChatbotUserConfig,
ChatbotWelcomeConfig)
from openai import OpenAI
gw_api_key = os.getenv("GW_API_KEY")
client = OpenAI(
base_url='https://api.geniuworks.com/v2',
api_key=gw_api_key,
)
model = "xinyuan-32b-v0609"
def prompt_select(e: gr.EventData):
return gr.update(value=e._data["payload"][0]["value"]["description"])
def clear():
return gr.update(value=None)
def retry(chatbot_value, e: gr.EventData):
index = e._data["payload"][0]["index"]
chatbot_value = chatbot_value[:index]
yield gr.update(loading=True), gr.update(value=chatbot_value), gr.update(
disabled=True)
for chunk in submit(None, chatbot_value):
yield chunk
def cancel(chatbot_value):
chatbot_value[-1]["loading"] = False
chatbot_value[-1]["status"] = "done"
chatbot_value[-1]["footer"] = "Chat completion paused"
return gr.update(value=chatbot_value), gr.update(loading=False), gr.update(
disabled=False)
def format_history(history):
messages = []
for item in history:
if item["role"] == "user":
messages.append({"role": "user", "content": item["content"]})
elif item["role"] == "assistant":
# ignore thought message
messages.append({
"role": "assistant",
"content": item["content"][-1]["content"]
})
return messages
def submit(sender_value, chatbot_value):
if sender_value is not None:
chatbot_value.append({
"role": "user",
"content": sender_value,
})
history_messages = format_history(chatbot_value)
chatbot_value.append({
"role": "assistant",
"content": [],
"loading": True,
"status": "pending"
})
yield {
sender: gr.update(value=None, loading=True),
clear_btn: gr.update(disabled=True),
chatbot: gr.update(value=chatbot_value)
}
try:
response = client.chat.completions.create(model=model,
messages=history_messages,
stream=True,
max_tokens=32768,
temperature=0.6,
top_p=0.95,
)
thought_done = False
start_time = time.time()
message_content = chatbot_value[-1]["content"]
# thought content
message_content.append({
"copyable": False,
"editable": False,
"type": "tool",
"content": "",
"options": {
"title": "Thinking..."
}
})
# content
message_content.append({
"type": "text",
"content": "",
})
for chunk in response:
try:
reasoning_content = chunk.choices[0].delta.reasoning_content
except:
reasoning_content = ""
try:
content = chunk.choices[0].delta.content
except:
content = ""
chatbot_value[-1]["loading"] = False
message_content[-2]["content"] += reasoning_content or ""
message_content[-1]["content"] += content or ""
if content and not thought_done:
thought_done = True
thought_cost_time = "{:.2f}".format(time.time() - start_time)
message_content[-2]["options"][
"title"] = f"End of Thought ({thought_cost_time}s)"
message_content[-2]["options"]["status"] = "done"
yield {chatbot: gr.update(value=chatbot_value)}
chatbot_value[-1]["footer"] = "{:.2f}".format(time.time() -
start_time) + 's'
chatbot_value[-1]["status"] = "done"
yield {
clear_btn: gr.update(disabled=False),
sender: gr.update(loading=False),
chatbot: gr.update(value=chatbot_value),
}
except Exception as e:
chatbot_value[-1]["loading"] = False
chatbot_value[-1]["status"] = "done"
chatbot_value[-1]["content"] = "Failed to respond, please try again."
yield {
clear_btn: gr.update(disabled=False),
sender: gr.update(loading=False),
chatbot: gr.update(value=chatbot_value),
}
raise e
with gr.Blocks() as demo, ms.Application(), antdx.XProvider():
with antd.Flex(vertical=True, gap="middle"):
chatbot = pro.Chatbot(
height=1000,
welcome_config=ChatbotWelcomeConfig(
variant="borderless",
icon=
"./xinyuan.png",
title=f"Hello, I'm Xinyuan👋",
description="You can input text to get started.",
prompts=ChatbotPromptsConfig(
title="How can I help you today?",
styles={
"list": {
"width": '100%',
},
"item": {
"flex": 1,
},
},
items=[{
"label":
"💝 心理学与实际应用",
"children": [{
"description":
"课题分离是什么意思?"
}, {
"description":
"回避型依恋和焦虑型依恋有什么区别?还有其他依恋类型吗?"
}, {
"description":
"为什么我背单词的时候总是只记得开头和结尾,中间全忘了?"
}]
}, {
"label":
"👪 儿童教育与发展",
"children": [{
"description":
"什么是正念养育?"
}, {
"description":
"2岁孩子分离焦虑严重,送托育中心天天哭闹怎么办?"
}, {
"description":
"4岁娃说话不清还爱打人,是心理问题还是欠管教?"
}]
}])),
user_config=ChatbotUserConfig(
avatar="https://api.dicebear.com/7.x/miniavs/svg?seed=3",
variant="shadow"),
bot_config=ChatbotBotConfig(
header='Xinyuan',
avatar=
"./xinyuan.png",
actions=["copy", "retry"],
variant="shadow"),
)
with antdx.Sender() as sender:
with ms.Slot("prefix"):
with antd.Button(value=None, color="default",
variant="text") as clear_btn:
with ms.Slot("icon"):
antd.Icon("ClearOutlined")
clear_btn.click(fn=clear, outputs=[chatbot])
submit_event = sender.submit(fn=submit,
inputs=[sender, chatbot],
outputs=[sender, chatbot, clear_btn])
sender.cancel(fn=cancel,
inputs=[chatbot],
outputs=[chatbot, sender, clear_btn],
cancels=[submit_event],
queue=False)
chatbot.retry(fn=retry,
inputs=[chatbot],
outputs=[sender, chatbot, clear_btn])
chatbot.welcome_prompt_select(fn=prompt_select, outputs=[sender])
if __name__ == "__main__":
demo.queue().launch()
|