liumaolin
commited on
Commit
·
b2c10c8
1
Parent(s):
bdc3b7b
Refactor `TestLLMDialogue` to use dynamic LLM model parameters
Browse files- Replace hardcoded `model_params` with `get_llm_model_params` from `llm_config.py`.
- Update imports to reflect changes in the configuration structure.
- tests/test_llm_dialogue.py +3 -12
tests/test_llm_dialogue.py
CHANGED
|
@@ -7,7 +7,8 @@ from langchain_core.messages import SystemMessage
|
|
| 7 |
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder
|
| 8 |
from langchain_core.runnables.history import RunnableWithMessageHistory
|
| 9 |
|
| 10 |
-
from config import paths
|
|
|
|
| 11 |
|
| 12 |
CHINESE_SYSTEM_PROMPT = (
|
| 13 |
"你是善于模拟真实的思考过程的AI助手。"
|
|
@@ -29,17 +30,7 @@ ENGLISH_SYSTEM_PROMPT = (
|
|
| 29 |
class TestLLMDialogue(unittest.TestCase):
|
| 30 |
|
| 31 |
def setUp(self):
|
| 32 |
-
model_params =
|
| 33 |
-
'n_ctx': 32768,
|
| 34 |
-
'temperature': 0.7,
|
| 35 |
-
'top_p': 0.9,
|
| 36 |
-
'top_k': 20,
|
| 37 |
-
'model_kwargs': {
|
| 38 |
-
'mini_p': 0,
|
| 39 |
-
'presence_penalty': 1.5
|
| 40 |
-
},
|
| 41 |
-
'verbose': False
|
| 42 |
-
}
|
| 43 |
self.history_store = {}
|
| 44 |
|
| 45 |
model_path = paths.LLM_MODELS_PATH / 'qwen' / 'Qwen3-8B-Q6_K.gguf'
|
|
|
|
| 7 |
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder
|
| 8 |
from langchain_core.runnables.history import RunnableWithMessageHistory
|
| 9 |
|
| 10 |
+
from voice_dialogue.config import paths
|
| 11 |
+
from voice_dialogue.config.llm_config import get_llm_model_params
|
| 12 |
|
| 13 |
CHINESE_SYSTEM_PROMPT = (
|
| 14 |
"你是善于模拟真实的思考过程的AI助手。"
|
|
|
|
| 30 |
class TestLLMDialogue(unittest.TestCase):
|
| 31 |
|
| 32 |
def setUp(self):
|
| 33 |
+
model_params = get_llm_model_params()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
self.history_store = {}
|
| 35 |
|
| 36 |
model_path = paths.LLM_MODELS_PATH / 'qwen' / 'Qwen3-8B-Q6_K.gguf'
|