Spaces:
Build error
Build error
feat: add coze.py
Browse files- chat.py +26 -0
- app.py → coze.py +24 -25
- main.py +17 -0
chat.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from openai import OpenAI
|
| 2 |
+
import gradio as gr
|
| 3 |
+
|
| 4 |
+
api_key = "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" # Replace with your key
|
| 5 |
+
client = OpenAI(api_key=api_key)
|
| 6 |
+
|
| 7 |
+
def predict(message, history):
|
| 8 |
+
history_openai_format = []
|
| 9 |
+
for human, assistant in history:
|
| 10 |
+
history_openai_format.append({"role": "user", "content": human })
|
| 11 |
+
history_openai_format.append({"role": "assistant", "content":assistant})
|
| 12 |
+
history_openai_format.append({"role": "user", "content": message})
|
| 13 |
+
|
| 14 |
+
response = client.chat.completions.create(model='gpt-3.5-turbo',
|
| 15 |
+
messages= history_openai_format,
|
| 16 |
+
temperature=1.0,
|
| 17 |
+
stream=True)
|
| 18 |
+
|
| 19 |
+
partial_message = ""
|
| 20 |
+
for chunk in response:
|
| 21 |
+
if chunk.choices[0].delta.content is not None:
|
| 22 |
+
partial_message = partial_message + chunk.choices[0].delta.content
|
| 23 |
+
yield partial_message
|
| 24 |
+
|
| 25 |
+
gr.ChatInterface(predict).launch()
|
| 26 |
+
|
app.py → coze.py
RENAMED
|
@@ -3,6 +3,7 @@ import json
|
|
| 3 |
import aiohttp
|
| 4 |
import asyncio
|
| 5 |
|
|
|
|
| 6 |
API_URL = "https://api.coze.com/v3/chat"
|
| 7 |
|
| 8 |
# https://www.coze.com/space/7309440314236747794/bot/7392076134450151432
|
|
@@ -18,7 +19,7 @@ TOOLS_CONFIG = {
|
|
| 18 |
}
|
| 19 |
|
| 20 |
|
| 21 |
-
async def run_workflow(parameters, api_name
|
| 22 |
config = TOOLS_CONFIG[api_name]
|
| 23 |
payload = {
|
| 24 |
"bot_id": BOT_ID,
|
|
@@ -49,45 +50,43 @@ async def run_workflow(parameters, api_name, onmessage):
|
|
| 49 |
) as response:
|
| 50 |
async for line in response.content:
|
| 51 |
if line:
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
if asyncio.iscoroutinefunction(onmessage):
|
| 56 |
-
await onmessage(line)
|
| 57 |
-
else:
|
| 58 |
-
onmessage(line)
|
| 59 |
-
except Exception as err:
|
| 60 |
-
print(f"Error in onmessage callback: {err}")
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
# 使用示例
|
| 64 |
-
# event:conversation.message.delta
|
| 65 |
is_message_delta = False
|
| 66 |
|
| 67 |
|
| 68 |
-
async def
|
| 69 |
global is_message_delta
|
| 70 |
decoded_line = line.decode("utf-8")
|
| 71 |
|
| 72 |
-
# print(decoded_line, is_message_delta)
|
| 73 |
-
|
| 74 |
if decoded_line.startswith("event:"):
|
| 75 |
is_message_delta = decoded_line == "event:conversation.message.delta\n"
|
| 76 |
-
return
|
| 77 |
|
| 78 |
if decoded_line.startswith("data:") and is_message_delta:
|
| 79 |
-
# remove the `data:` prefix and json decode the rest
|
| 80 |
message = json.loads(decoded_line[5:])
|
| 81 |
-
# get role, type, content from the message
|
| 82 |
-
# if role is assistant and type is anwser, then print the content
|
| 83 |
if message.get("role") == "assistant" and message.get("type") == "answer":
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 85 |
|
| 86 |
|
| 87 |
-
# 使用示例
|
| 88 |
async def main():
|
| 89 |
-
|
| 90 |
-
|
|
|
|
|
|
|
| 91 |
sys.stdout.write("\n")
|
| 92 |
|
| 93 |
|
|
|
|
| 3 |
import aiohttp
|
| 4 |
import asyncio
|
| 5 |
|
| 6 |
+
|
| 7 |
API_URL = "https://api.coze.com/v3/chat"
|
| 8 |
|
| 9 |
# https://www.coze.com/space/7309440314236747794/bot/7392076134450151432
|
|
|
|
| 19 |
}
|
| 20 |
|
| 21 |
|
| 22 |
+
async def run_workflow(parameters, api_name):
|
| 23 |
config = TOOLS_CONFIG[api_name]
|
| 24 |
payload = {
|
| 25 |
"bot_id": BOT_ID,
|
|
|
|
| 50 |
) as response:
|
| 51 |
async for line in response.content:
|
| 52 |
if line:
|
| 53 |
+
yield line
|
| 54 |
+
|
| 55 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
is_message_delta = False
|
| 57 |
|
| 58 |
|
| 59 |
+
async def process_message(line: str):
|
| 60 |
global is_message_delta
|
| 61 |
decoded_line = line.decode("utf-8")
|
| 62 |
|
|
|
|
|
|
|
| 63 |
if decoded_line.startswith("event:"):
|
| 64 |
is_message_delta = decoded_line == "event:conversation.message.delta\n"
|
| 65 |
+
return None
|
| 66 |
|
| 67 |
if decoded_line.startswith("data:") and is_message_delta:
|
|
|
|
| 68 |
message = json.loads(decoded_line[5:])
|
|
|
|
|
|
|
| 69 |
if message.get("role") == "assistant" and message.get("type") == "answer":
|
| 70 |
+
return message.get("content")
|
| 71 |
+
|
| 72 |
+
return None
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
async def generate_story(query, history=None):
|
| 76 |
+
parameters = {"query": query}
|
| 77 |
+
full_response = ""
|
| 78 |
+
async for line in run_workflow(parameters, "self:create_full_story"):
|
| 79 |
+
content = await process_message(line)
|
| 80 |
+
if content:
|
| 81 |
+
full_response += content
|
| 82 |
+
yield full_response # 使用生成器逐步输出内容
|
| 83 |
|
| 84 |
|
|
|
|
| 85 |
async def main():
|
| 86 |
+
# 调用 generate_story 并传入查询参数
|
| 87 |
+
query = "冰雪奇缘"
|
| 88 |
+
async for partial_story in generate_story(query):
|
| 89 |
+
sys.stdout.write(partial_story) # 打印生成的故事部分内容
|
| 90 |
sys.stdout.write("\n")
|
| 91 |
|
| 92 |
|
main.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
|
| 3 |
+
from coze import run_workflow, process_message
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
async def create_full_story(query, history=None):
|
| 7 |
+
parameters = {"query": query}
|
| 8 |
+
full_response = ""
|
| 9 |
+
async for line in run_workflow(parameters, "self:create_full_story"):
|
| 10 |
+
content = await process_message(line)
|
| 11 |
+
if content:
|
| 12 |
+
full_response += content
|
| 13 |
+
yield full_response # 使用生成器逐步输出内容
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
if __name__ == "__main__":
|
| 17 |
+
gr.ChatInterface(create_full_story).launch()
|