pro
Browse files- process/interpretation.py +1 -1
- process/translation.py +1 -1
process/interpretation.py
CHANGED
|
@@ -38,7 +38,7 @@ def get_interpretation(genre: str,
|
|
| 38 |
sys_prompt = genres[genre.lower()].replace("[LEARN_LANGUAGE]", learn_lang).replace("[PROF_LANGUAGE]", prof_lang)
|
| 39 |
|
| 40 |
response = client.models.generate_content(
|
| 41 |
-
model="gemini-2.5-
|
| 42 |
config=types.GenerateContentConfig(
|
| 43 |
system_instruction=sys_prompt,
|
| 44 |
temperature=0.3,
|
|
|
|
| 38 |
sys_prompt = genres[genre.lower()].replace("[LEARN_LANGUAGE]", learn_lang).replace("[PROF_LANGUAGE]", prof_lang)
|
| 39 |
|
| 40 |
response = client.models.generate_content(
|
| 41 |
+
model="gemini-2.5-pro",
|
| 42 |
config=types.GenerateContentConfig(
|
| 43 |
system_instruction=sys_prompt,
|
| 44 |
temperature=0.3,
|
process/translation.py
CHANGED
|
@@ -36,7 +36,7 @@ def get_translaton(text: str, api_key: str, target_language: str) -> str:
|
|
| 36 |
tar_lang = lang_map.get(target_language, "English")
|
| 37 |
sys_prompt = SYS_PROMPT_TRANSLATION.replace("[TARGET_LANGUAGE]", tar_lang)
|
| 38 |
response = client.models.generate_content(
|
| 39 |
-
model="gemini-2.5-
|
| 40 |
config=types.GenerateContentConfig(
|
| 41 |
system_instruction=sys_prompt,
|
| 42 |
temperature=0.1,
|
|
|
|
| 36 |
tar_lang = lang_map.get(target_language, "English")
|
| 37 |
sys_prompt = SYS_PROMPT_TRANSLATION.replace("[TARGET_LANGUAGE]", tar_lang)
|
| 38 |
response = client.models.generate_content(
|
| 39 |
+
model="gemini-2.5-pro",
|
| 40 |
config=types.GenerateContentConfig(
|
| 41 |
system_instruction=sys_prompt,
|
| 42 |
temperature=0.1,
|