| from langchain.chat_models import ChatOpenAI | |
| from langchain.base_language import BaseLanguageModel | |
| from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler | |
| def llm(temperature=0) -> BaseLanguageModel: | |
| # gpt-3.5 | |
| return ChatOpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], temperature=temperature) | |
| # return ChatOpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], temperature=temperature, model_name="gpt-4") | |
| # gpt-4 | |
| # return ChatOpenAI(temperature=temperature, model_name="gpt-4") | |