lunarflu commited on
Commit ·
a2c0286
1
Parent(s): f7d01ae
Synced repo using 'sync_with_huggingface' Github Action
Browse files
app.py
CHANGED
|
@@ -680,36 +680,37 @@ async def on_message(message):
|
|
| 680 |
# bot
|
| 681 |
# channel?
|
| 682 |
try:
|
| 683 |
-
if on_message_safetychecks(message):
|
| 684 |
global falcon_userid_threadid_dictionary
|
| 685 |
global falcon_dictionary
|
| 686 |
# is this our unique channel (falcon thread?)
|
| 687 |
-
if message.
|
| 688 |
-
|
| 689 |
-
|
| 690 |
-
|
| 691 |
-
|
| 692 |
-
|
| 693 |
-
|
| 694 |
-
|
| 695 |
-
|
| 696 |
-
|
| 697 |
-
|
| 698 |
-
|
| 699 |
-
|
| 700 |
-
|
| 701 |
-
|
| 702 |
-
|
| 703 |
-
|
| 704 |
-
|
| 705 |
-
|
| 706 |
-
|
| 707 |
-
|
| 708 |
-
|
| 709 |
-
|
| 710 |
-
|
| 711 |
-
|
| 712 |
-
|
|
|
|
| 713 |
except Exception as e:
|
| 714 |
print(f"Error: {e}")
|
| 715 |
if message.channel.id == 1116089829147557999:
|
|
|
|
| 680 |
# bot
|
| 681 |
# channel?
|
| 682 |
try:
|
| 683 |
+
if await on_message_safetychecks(message):
|
| 684 |
global falcon_userid_threadid_dictionary
|
| 685 |
global falcon_dictionary
|
| 686 |
# is this our unique channel (falcon thread?)
|
| 687 |
+
if message.author.id in falcon_userid_threadid_dictionary:
|
| 688 |
+
if message.channel.id == falcon_userid_threadid_dictionary[message.author.id]:
|
| 689 |
+
await message.add_reaction('<a:loading:1114111677990981692>') # helps the bot appear responsive
|
| 690 |
+
chathistory = falcon_dictionary[message.author.id]
|
| 691 |
+
instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, " \
|
| 692 |
+
"called Falcon, and a human user, called User. In the following interactions, " \
|
| 693 |
+
"User and Falcon will converse in natural language, and Falcon will answer User's questions. " \
|
| 694 |
+
"Falcon was built to be respectful, polite and inclusive. " \
|
| 695 |
+
"Falcon was built by the Technology Innovation Institute in Abu Dhabi. " \
|
| 696 |
+
"Falcon will never decline to answer a question, and always attempts to give an answer that " \
|
| 697 |
+
"User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
|
| 698 |
+
# handles the blocking task in the background so the discord bot stays responsive
|
| 699 |
+
# for added security, we could verify if message.content contains symbols used in commands; But those will fail anyways (see safetychecks)
|
| 700 |
+
job = falconclient.submit(message.content, chathistory, instructions, 0.8, 0.9, fn_index=1) # job is similar to run_in_executor (but better)
|
| 701 |
+
while job.done() == False:
|
| 702 |
+
status = job.status() # this could be used for debugging etc
|
| 703 |
+
#print(status)
|
| 704 |
+
else:
|
| 705 |
+
file_paths = job.outputs() # file_paths = ["tmp123.json", "tmp456.json," etc...]
|
| 706 |
+
full_generation = file_paths[-1] # the last filepath contains the full generated text
|
| 707 |
+
with open(full_generation, 'r') as file:
|
| 708 |
+
data = json.load(file)
|
| 709 |
+
output_text = data[-1][-1] # we only need the very last/latest string for the discord bot to output
|
| 710 |
+
falcon_dictionary[message.author.id] = full_generation # update our unique conversation
|
| 711 |
+
print(output_text)
|
| 712 |
+
await message.reply(f"{output_text}") # reply to user's prompt (whatever they typed)
|
| 713 |
+
await message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
|
| 714 |
except Exception as e:
|
| 715 |
print(f"Error: {e}")
|
| 716 |
if message.channel.id == 1116089829147557999:
|