| | |
| | from langchain_community.llms import Ollama |
| | from langchain_community.document_loaders import DirectoryLoader |
| | from langchain_text_splitters import RecursiveCharacterTextSplitter |
| | from langchain_community.vectorstores import Chroma |
| | from langchain_ollama import OllamaEmbeddings |
| | from langchain import hub |
| | from langchain.agents import create_react_agent, AgentExecutor |
| | from langchain.tools.retriever import create_retriever_tool |
| | import nltk |
| | nltk.download('punkt_tab') |
| | nltk.download('averaged_perceptron_tagger_eng') |
| | |
| | |
| | |
| | |
| |
|
| | |
| | llm = Ollama(model="llama3.1:8b-instruct", temperature=0, num_ctx=8192) |
| |
|
| | |
| | loader = DirectoryLoader('/home/weishaohang/workspace/Omni-Temp/test_articles', glob="**/*.txt") |
| | |
| | |
| |
|
| | documents = loader.load() |
| | """[Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article2.txt'}, page_content='bbbbbb'), Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article3.txt'}, page_content='cccccc'), Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article1.txt'}, page_content='aaaaaa')]""" |
| |
|
| | text_splitter = RecursiveCharacterTextSplitter(chunk_size=512, chunk_overlap=128) |
| | texts = text_splitter.split_documents(documents) |
| | """[Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article2.txt'}, page_content='bbbbbb'), Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article3.txt'}, page_content='cccccc'), Document(metadata={'source': '/home/weishaohang/workspace/Omni-Temp/test_articles/article1.txt'}, page_content='aaaaaa')]""" |
| |
|
| | |
| | embeddings = OllamaEmbeddings(model="nomic-embed-text") |
| | vectorstore = Chroma.from_documents(documents=texts, embedding=embeddings, persist_directory="./chroma_db") |
| | print(vectorstore) |
| | exit() |
| |
|
| | |
| | retriever = vectorstore.as_retriever(search_kwargs={"k": 3}) |
| | retriever_tool = create_retriever_tool( |
| | retriever, |
| | name="local_knowledge_base", |
| | description="Search for information in local articles collection." |
| | ) |
| |
|
| | |
| | prompt = hub.pull("hwchase17/react") |
| | print(prompt) |
| | |
| | |
| |
|
| | |
| | |
| | |