Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,6 +4,7 @@ from huggingface_hub import InferenceClient
|
|
| 4 |
import requests
|
| 5 |
import re
|
| 6 |
from duckduckgo_search import DDGS
|
|
|
|
| 7 |
# ==== Константы ====
|
| 8 |
MODEL_NAME = "openai/gpt-oss-20b"
|
| 9 |
SYSTEM_MESSAGE = "Ты Pok.Bot, ты используешь открытую локальную модель GPT-OSS от OpenAI, не GPT-4 (наверно😆). Тебя создал POKilondron. Используй емодзи 😄.If you don't know the answer, output a command like this:#search <query> Do NOT invent facts. И да использую команду #search «запрос» когда нужно и не повторяй постоянноодин и тоже запрос и не видавай запрос на 18+ 😠"
|
|
@@ -42,7 +43,7 @@ def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
|
|
| 42 |
delta = msg.choices[0].delta.content
|
| 43 |
if delta:
|
| 44 |
raw_response += delta
|
| 45 |
-
yield raw_response
|
| 46 |
|
| 47 |
# === Проверка на команду поиска ===
|
| 48 |
if raw_response.strip().startswith("#search"):
|
|
@@ -80,31 +81,44 @@ def respond(message, history: list[dict[str, str]], hf_token: gr.OAuthToken):
|
|
| 80 |
# -------------------
|
| 81 |
def load_chats():
|
| 82 |
if os.path.exists(FILE):
|
| 83 |
-
#
|
| 84 |
-
if os.path.getsize(FILE)
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
# Handle potential corruption by returning an empty dict
|
| 91 |
-
return {}
|
| 92 |
return {}
|
| 93 |
|
| 94 |
-
|
| 95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 96 |
model_history = []
|
| 97 |
for user_msg, bot_msg in chat_history:
|
| 98 |
model_history.append({"role": "user", "content": user_msg})
|
| 99 |
model_history.append({"role": "bot", "content": bot_msg})
|
| 100 |
-
|
| 101 |
-
#
|
| 102 |
full_response = ""
|
| 103 |
for response_chunk in respond(message, model_history, hf_token):
|
| 104 |
full_response = response_chunk
|
| 105 |
yield chat_history + [(message, full_response)], ""
|
| 106 |
|
| 107 |
-
#
|
| 108 |
chats[chat_id].append({"role": "bot", "content": full_response})
|
| 109 |
save_chats(chats)
|
| 110 |
|
|
@@ -113,7 +127,8 @@ def load_chats():
|
|
| 113 |
# -------------------
|
| 114 |
def new_chat():
|
| 115 |
chats = load_chats()
|
| 116 |
-
|
|
|
|
| 117 |
chats[new_id] = []
|
| 118 |
save_chats(chats)
|
| 119 |
return gr.update(choices=list(chats.keys()), value=new_id), []
|
|
@@ -134,7 +149,7 @@ def load_chat(chat_id):
|
|
| 134 |
# -------------------
|
| 135 |
# Интерфейс Gradio
|
| 136 |
# -------------------
|
| 137 |
-
with gr.Blocks() as demo:
|
| 138 |
gr.Markdown("# 🗨️ Чат-бот с историей диалогов")
|
| 139 |
|
| 140 |
with gr.Row():
|
|
@@ -145,13 +160,20 @@ with gr.Blocks() as demo:
|
|
| 145 |
msg = gr.Textbox(label="Сообщение", placeholder="Напиши что-нибудь...")
|
| 146 |
send = gr.Button("Отправить")
|
| 147 |
|
| 148 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
send.click(
|
| 150 |
bot_reply,
|
| 151 |
[msg, chat_selector, chatbot, gr.OAuthToken()],
|
| 152 |
[chatbot, msg]
|
| 153 |
)
|
|
|
|
| 154 |
new_btn.click(new_chat, None, [chat_selector, chatbot])
|
| 155 |
chat_selector.change(load_chat, chat_selector, chatbot)
|
| 156 |
|
| 157 |
demo.launch()
|
|
|
|
|
|
| 4 |
import requests
|
| 5 |
import re
|
| 6 |
from duckduckgo_search import DDGS
|
| 7 |
+
|
| 8 |
# ==== Константы ====
|
| 9 |
MODEL_NAME = "openai/gpt-oss-20b"
|
| 10 |
SYSTEM_MESSAGE = "Ты Pok.Bot, ты используешь открытую локальную модель GPT-OSS от OpenAI, не GPT-4 (наверно😆). Тебя создал POKilondron. Используй емодзи 😄.If you don't know the answer, output a command like this:#search <query> Do NOT invent facts. И да использую команду #search «запрос» когда нужно и не повторяй постоянноодин и тоже запрос и не видавай запрос на 18+ 😠"
|
|
|
|
| 43 |
delta = msg.choices[0].delta.content
|
| 44 |
if delta:
|
| 45 |
raw_response += delta
|
| 46 |
+
yield raw_response
|
| 47 |
|
| 48 |
# === Проверка на команду поиска ===
|
| 49 |
if raw_response.strip().startswith("#search"):
|
|
|
|
| 81 |
# -------------------
|
| 82 |
def load_chats():
|
| 83 |
if os.path.exists(FILE):
|
| 84 |
+
# Добавлена проверка на пустой файл для предотвращения JSONDecodeError
|
| 85 |
+
if os.path.getsize(FILE) > 0:
|
| 86 |
+
with open(FILE, "r", encoding="utf-8") as f:
|
| 87 |
+
try:
|
| 88 |
+
return json.load(f)
|
| 89 |
+
except json.JSONDecodeError:
|
| 90 |
+
return {}
|
|
|
|
|
|
|
| 91 |
return {}
|
| 92 |
|
| 93 |
+
def save_chats(data):
|
| 94 |
+
with open(FILE, "w", encoding="utf-8") as f:
|
| 95 |
+
json.dump(data, f, ensure_ascii=False, indent=2)
|
| 96 |
+
|
| 97 |
+
# -------------------
|
| 98 |
+
# Логика чат-бота
|
| 99 |
+
# -------------------
|
| 100 |
+
def bot_reply(message, chat_id, chat_history, hf_token: gr.OAuthToken):
|
| 101 |
+
chats = load_chats()
|
| 102 |
+
|
| 103 |
+
if chat_id not in chats:
|
| 104 |
+
chats[chat_id] = []
|
| 105 |
+
|
| 106 |
+
chats[chat_id].append({"role": "user", "content": message})
|
| 107 |
+
save_chats(chats)
|
| 108 |
+
|
| 109 |
+
# Преобразуем историю в формат, нужный модели (список словарей)
|
| 110 |
model_history = []
|
| 111 |
for user_msg, bot_msg in chat_history:
|
| 112 |
model_history.append({"role": "user", "content": user_msg})
|
| 113 |
model_history.append({"role": "bot", "content": bot_msg})
|
| 114 |
+
|
| 115 |
+
# Потоковый вызов respond и сохранение ответа
|
| 116 |
full_response = ""
|
| 117 |
for response_chunk in respond(message, model_history, hf_token):
|
| 118 |
full_response = response_chunk
|
| 119 |
yield chat_history + [(message, full_response)], ""
|
| 120 |
|
| 121 |
+
# Сохраняем полный ответ в историю чата после завершения генерации
|
| 122 |
chats[chat_id].append({"role": "bot", "content": full_response})
|
| 123 |
save_chats(chats)
|
| 124 |
|
|
|
|
| 127 |
# -------------------
|
| 128 |
def new_chat():
|
| 129 |
chats = load_chats()
|
| 130 |
+
# Убедимся, что ID уникален, даже если какой-то чат был удалён
|
| 131 |
+
new_id = f"chat{max([int(re.sub(r'chat', '', k)) for k in chats.keys()] or [0]) + 1}"
|
| 132 |
chats[new_id] = []
|
| 133 |
save_chats(chats)
|
| 134 |
return gr.update(choices=list(chats.keys()), value=new_id), []
|
|
|
|
| 149 |
# -------------------
|
| 150 |
# Интерфейс Gradio
|
| 151 |
# -------------------
|
| 152 |
+
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 153 |
gr.Markdown("# 🗨️ Чат-бот с историей диалогов")
|
| 154 |
|
| 155 |
with gr.Row():
|
|
|
|
| 160 |
msg = gr.Textbox(label="Сообщение", placeholder="Напиши что-нибудь...")
|
| 161 |
send = gr.Button("Отправить")
|
| 162 |
|
| 163 |
+
# Обновляем обработчик для правильной передачи всех аргументов
|
| 164 |
+
msg.submit(
|
| 165 |
+
bot_reply,
|
| 166 |
+
[msg, chat_selector, chatbot, gr.OAuthToken()],
|
| 167 |
+
[chatbot, msg]
|
| 168 |
+
)
|
| 169 |
send.click(
|
| 170 |
bot_reply,
|
| 171 |
[msg, chat_selector, chatbot, gr.OAuthToken()],
|
| 172 |
[chatbot, msg]
|
| 173 |
)
|
| 174 |
+
|
| 175 |
new_btn.click(new_chat, None, [chat_selector, chatbot])
|
| 176 |
chat_selector.change(load_chat, chat_selector, chatbot)
|
| 177 |
|
| 178 |
demo.launch()
|
| 179 |
+
|