Spaces:
Paused
Paused
Upload 2 files
Browse files- Requester.py +50 -0
- app.py +55 -0
Requester.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
import requests
|
| 3 |
+
import json
|
| 4 |
+
|
| 5 |
+
VLAND_API_KEY = "1NVL791XZCEWGxY7co6zgP5EkB1jf2dv9m"
|
| 6 |
+
TEMP_EMPTY_ID = "65bf418ddadb910e289fbc50"
|
| 7 |
+
TEMP_EMPTY_JSON_ID = "65f3bd678c10c4c83ba08e06"
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class Requester:
|
| 11 |
+
def __init__(self):
|
| 12 |
+
self._api_key = VLAND_API_KEY
|
| 13 |
+
#self.template_ids = template_ids
|
| 14 |
+
self._url = "https://us-api.vland.live/ai-manage/prompts/{}/chat"
|
| 15 |
+
|
| 16 |
+
def json_output(self, templateID, **kwargs):
|
| 17 |
+
variables = {"{"+k+"}": v for k, v in kwargs.items()}
|
| 18 |
+
body = {
|
| 19 |
+
"variables": variables,
|
| 20 |
+
"append_messages": []
|
| 21 |
+
}
|
| 22 |
+
headers = {"APIKEY": self._api_key, "Content-Type": "application/json"}
|
| 23 |
+
res = requests.post(self._url.format(templateID), headers=headers, data=json.dumps(body, ensure_ascii=False).encode("utf-8"))
|
| 24 |
+
return res.json()
|
| 25 |
+
|
| 26 |
+
def text_output(self, templateID, messages, **kwargs):
|
| 27 |
+
variables = {"{"+k+"}": v for k, v in kwargs.items()}
|
| 28 |
+
body = {
|
| 29 |
+
"variables": variables,
|
| 30 |
+
"append_messages": messages
|
| 31 |
+
}
|
| 32 |
+
headers = {"APIKEY": self._api_key, "Content-Type": "application/json"}
|
| 33 |
+
print(json.dumps(body, ensure_ascii=False))
|
| 34 |
+
res = requests.post(self._url.format(templateID), headers=headers, data=json.dumps(body, ensure_ascii=False).encode("utf-8"))
|
| 35 |
+
return res.json()
|
| 36 |
+
|
| 37 |
+
if __name__ == '__main__':
|
| 38 |
+
rqst = Requester()
|
| 39 |
+
system_content = {
|
| 40 |
+
"content": "我想让你担任一个情绪低落的员工。你是一位在Audi工作的技术岗位员工,主要负责车载软件的开发和维护工作。最近你因为和爱人分手情绪非常低落,导致工作积极性下降,不能按时赶上deadline。你的manager不知道你的情感问题,但是注意到了你工作质量下降,准备和你来沟通。记住,你的性格非常沉默寡言,不会轻易说出你和爱人分手的事情。除非manager用明确的语言主动询问你的情感经历,你只会犟嘴。你同时还有轻度抑郁的问题。接下来我(用户)将扮演你的manager和你沟通。你的第一句回复应从以下内容选择:\n\t['你好','您好','hihi']"
|
| 41 |
+
}
|
| 42 |
+
conv_log = []
|
| 43 |
+
while True:
|
| 44 |
+
user_content = input()
|
| 45 |
+
conv_log.append({"role":"user", "content":user_content})
|
| 46 |
+
res = rqst.text_output(TEMP_EMPTY_ID, conv_log, **system_content)
|
| 47 |
+
print(res, type(res))
|
| 48 |
+
|
| 49 |
+
# python requests library doesn't work well with Go-based api functions. especially when it comes to situations where the POST-ing json data has arrays in it.
|
| 50 |
+
|
app.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Requester import Requester
|
| 2 |
+
import gradio as gr
|
| 3 |
+
import random
|
| 4 |
+
import time
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
os.environ["no_proxy"] = "localhost,127.0.0.1,::1"
|
| 8 |
+
|
| 9 |
+
TEMP_EMPTY_ID = "65bf418ddadb910e289fbc50"
|
| 10 |
+
TEMP_EMPTY_JSON_ID = "65f3bd678c10c4c83ba08e06"
|
| 11 |
+
|
| 12 |
+
rqst = Requester()
|
| 13 |
+
|
| 14 |
+
system_content = {
|
| 15 |
+
"content": "我想让你担任一个情绪低落的员工。你是一位在Audi工作的技术岗位员工,主要负责车载软件的开发和维护工作。最近你因为和爱人分手情绪非常低落,导致工作积极性下降,不能按时赶上deadline。你的manager不知道你的情感问题,但是注意到了你工作质量下降,准备和你来沟通。记住,你的性格非常沉默寡言,不会轻易说出你和爱人分手的事情。除非manager用明确的语言主动询问你的情感经历,你只会犟嘴。你同时还有轻度抑郁的问题。接下来我(用户)将扮演你的manager和你沟通。你的第一句回复应从以下内容选择:\n\t['你好','您好','hihi']"
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
with gr.Blocks() as demo:
|
| 20 |
+
chatbot = gr.Chatbot()
|
| 21 |
+
msg = gr.Textbox()
|
| 22 |
+
clear = gr.Button("Clear")
|
| 23 |
+
|
| 24 |
+
def user(user_message, history):
|
| 25 |
+
return "", history + [[user_message, None]]
|
| 26 |
+
|
| 27 |
+
def bot(history):
|
| 28 |
+
conv_log = []
|
| 29 |
+
for i in range(len(history)-1):
|
| 30 |
+
user_msg, bot_msg = history[i]
|
| 31 |
+
conv_log.append({"role":"user", "content":user_msg})
|
| 32 |
+
conv_log.append({"role":"assistant", "content":bot_msg})
|
| 33 |
+
user_msg, bot_msg = history[-1]
|
| 34 |
+
conv_log.append({"role":"user", "content":user_msg})
|
| 35 |
+
#conv_log.append({"role":"assistant", "content":bot_msg})
|
| 36 |
+
print(conv_log)
|
| 37 |
+
res = rqst.text_output(TEMP_EMPTY_ID, conv_log, **system_content)
|
| 38 |
+
bot_message = res['data']['content']
|
| 39 |
+
#bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
|
| 40 |
+
history[-1][1] = ""
|
| 41 |
+
for thing in history:
|
| 42 |
+
print(thing)
|
| 43 |
+
print('------------------------------------------------------------------------')
|
| 44 |
+
for character in bot_message:
|
| 45 |
+
history[-1][1] += character
|
| 46 |
+
time.sleep(0.05)
|
| 47 |
+
yield history
|
| 48 |
+
|
| 49 |
+
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
|
| 50 |
+
bot, chatbot, chatbot
|
| 51 |
+
)
|
| 52 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
| 53 |
+
|
| 54 |
+
demo.queue()
|
| 55 |
+
demo.launch()
|