KevanSoon commited on
Commit ·
8373dc8
1
Parent(s): fd09a71
change back sealion model
Browse files
app.py
CHANGED
|
@@ -335,7 +335,7 @@ async def translate_text(text: str, target_language: str):
|
|
| 335 |
data = {
|
| 336 |
"max_completion_tokens": 4096, # Increased token limit for longer translations
|
| 337 |
"messages": [{"role": "user", "content": prompt}],
|
| 338 |
-
"model": "aisingapore/Llama-SEA-LION-v3
|
| 339 |
}
|
| 340 |
|
| 341 |
try:
|
|
@@ -854,7 +854,7 @@ async def call_sealion_for_translation(text_to_translate: str, lang: str) -> str
|
|
| 854 |
payload = {
|
| 855 |
"max_completion_tokens": 2048,
|
| 856 |
"messages": [{"role": "user", "content": prompt}],
|
| 857 |
-
"model": "aisingapore/Llama-SEA-LION-v3
|
| 858 |
}
|
| 859 |
|
| 860 |
async with httpx.AsyncClient() as client:
|
|
@@ -1066,7 +1066,7 @@ async def call_sealion_for_translation(text_to_translate: str, lang: str) -> str
|
|
| 1066 |
payload = {
|
| 1067 |
"max_completion_tokens": 2048,
|
| 1068 |
"messages": [{"role": "user", "content": prompt}],
|
| 1069 |
-
"model": "aisingapore/Llama-SEA-LION-v3
|
| 1070 |
}
|
| 1071 |
|
| 1072 |
async with httpx.AsyncClient() as client:
|
|
|
|
| 335 |
data = {
|
| 336 |
"max_completion_tokens": 4096, # Increased token limit for longer translations
|
| 337 |
"messages": [{"role": "user", "content": prompt}],
|
| 338 |
+
"model": "aisingapore/Llama-SEA-LION-v3-70B-IT",
|
| 339 |
}
|
| 340 |
|
| 341 |
try:
|
|
|
|
| 854 |
payload = {
|
| 855 |
"max_completion_tokens": 2048,
|
| 856 |
"messages": [{"role": "user", "content": prompt}],
|
| 857 |
+
"model": "aisingapore/Llama-SEA-LION-v3-70B-IT",
|
| 858 |
}
|
| 859 |
|
| 860 |
async with httpx.AsyncClient() as client:
|
|
|
|
| 1066 |
payload = {
|
| 1067 |
"max_completion_tokens": 2048,
|
| 1068 |
"messages": [{"role": "user", "content": prompt}],
|
| 1069 |
+
"model": "aisingapore/Llama-SEA-LION-v3-70B-IT",
|
| 1070 |
}
|
| 1071 |
|
| 1072 |
async with httpx.AsyncClient() as client:
|