Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,6 +7,12 @@ import time
|
|
| 7 |
import tempfile
|
| 8 |
from huggingface_hub import InferenceClient
|
| 9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 11 |
|
| 12 |
system_instructions1 = "<s>[SYSTEM] You are AI assistant named DorjGPT, Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if super interlligent AI assistant. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
|
|
@@ -31,7 +37,8 @@ async def generate1(prompt, history=[], b=None):
|
|
| 31 |
seed=42,
|
| 32 |
)
|
| 33 |
#formatted_prompt = system_instructions1 + prompt + "[JARVIS]"
|
| 34 |
-
|
|
|
|
| 35 |
stream = client.text_generation(
|
| 36 |
formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
|
| 37 |
|
|
@@ -39,9 +46,10 @@ async def generate1(prompt, history=[], b=None):
|
|
| 39 |
for response in stream:
|
| 40 |
output += response.token.text
|
| 41 |
output = output.replace("</s>","")
|
| 42 |
-
|
|
|
|
| 43 |
|
| 44 |
-
communicate = edge_tts.Communicate(
|
| 45 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
|
| 46 |
tmp_path = tmp_file.name
|
| 47 |
await communicate.save(tmp_path)
|
|
|
|
| 7 |
import tempfile
|
| 8 |
from huggingface_hub import InferenceClient
|
| 9 |
|
| 10 |
+
from deep_translator import GoogleTranslator
|
| 11 |
+
|
| 12 |
+
def get_translation(context_custom, language = 'en'):
|
| 13 |
+
translated = GoogleTranslator(source = 'auto', target = language).translate(text = context_custom)
|
| 14 |
+
return translated
|
| 15 |
+
|
| 16 |
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
| 17 |
|
| 18 |
system_instructions1 = "<s>[SYSTEM] You are AI assistant named DorjGPT, Keep conversation very short, clear, friendly and concise. The text provided is a request for a specific type of response from you, the virtual assistant. The request asks you to provide friendly responses as if super interlligent AI assistant. The expectation is that I will avoid introductions and start answering the query directly, elaborating on all aspects of the request. As an AI-powered assistant, your task is to generate responses that appear as if they are created by the user and not an AI-powered assistant. [USER]"
|
|
|
|
| 37 |
seed=42,
|
| 38 |
)
|
| 39 |
#formatted_prompt = system_instructions1 + prompt + "[JARVIS]"
|
| 40 |
+
prompt_en = get_translation(prompt)
|
| 41 |
+
formatted_prompt = format_prompt(f"{system_instructions1}, {prompt_en}", history) + "[DORJGPT]"
|
| 42 |
stream = client.text_generation(
|
| 43 |
formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
|
| 44 |
|
|
|
|
| 46 |
for response in stream:
|
| 47 |
output += response.token.text
|
| 48 |
output = output.replace("</s>","")
|
| 49 |
+
output_mn = get_translation(output, language="mn")
|
| 50 |
+
history.append([prompt_en, output])
|
| 51 |
|
| 52 |
+
communicate = edge_tts.Communicate(output_mn)
|
| 53 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
|
| 54 |
tmp_path = tmp_file.name
|
| 55 |
await communicate.save(tmp_path)
|