Spaces:
Sleeping
Sleeping
| import os | |
| from langchain import hub | |
| from langchain.agents import load_tools, initialize_agent, AgentType | |
| from langchain_core.messages import HumanMessage, SystemMessage, AIMessage | |
| from langchain_huggingface import HuggingFaceEndpoint | |
| from langchain_huggingface import ChatHuggingFace | |
| class LLMResource: | |
| llm = HuggingFaceEndpoint( | |
| repo_id="mistralai/Mistral-7B-Instruct-v0.2", | |
| task="text-generation", | |
| huggingfacehub_api_token=os.environ["API_KEY"], | |
| streaming=True, | |
| temperature=0.5 | |
| ) | |
| chat_model = ChatHuggingFace( | |
| llm = llm, | |
| ) | |
| tools = load_tools(["wikipedia"], llm=llm) | |
| agent = initialize_agent( | |
| tools, | |
| llm, | |
| agent=AgentType.CHAT_ZERO_SHOT_REACT_DESCRIPTION, | |
| handle_parsing_errors=True, | |
| ) | |
| def invoke(message:str): | |
| response = LLMResource.agent.invoke(message) | |
| print(response) | |
| return response["output"][:-4] | |
| def test(): | |
| print(LLMResource.agent.invoke("When was the movie \'Oppenheimer\' released?")) | |
| if __name__ == "__main__": | |
| LLMResource.test() |