Fix Langchain Prompts Import
Browse files- app.py +3 -1
- requirements.txt +1 -0
app.py
CHANGED
|
@@ -4,7 +4,9 @@ import streamlit as st
|
|
| 4 |
# LangChain (local HF pipeline)
|
| 5 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
|
| 6 |
from langchain_huggingface import HuggingFacePipeline
|
| 7 |
-
from langchain.prompts import PromptTemplate
|
|
|
|
|
|
|
| 8 |
from langchain.schema import StrOutputParser
|
| 9 |
|
| 10 |
# LlamaIndex (modular imports)
|
|
|
|
| 4 |
# LangChain (local HF pipeline)
|
| 5 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
|
| 6 |
from langchain_huggingface import HuggingFacePipeline
|
| 7 |
+
#from langchain.prompts import PromptTemplate
|
| 8 |
+
from langchain_core.prompts import PromptTemplate
|
| 9 |
+
|
| 10 |
from langchain.schema import StrOutputParser
|
| 11 |
|
| 12 |
# LlamaIndex (modular imports)
|
requirements.txt
CHANGED
|
@@ -7,6 +7,7 @@ huggingface_hub>=0.23
|
|
| 7 |
langchain>=0.2.8
|
| 8 |
langchain-community>=0.2.8
|
| 9 |
langchain-huggingface>=0.0.3
|
|
|
|
| 10 |
|
| 11 |
# LlamaIndex (modular packages)
|
| 12 |
llama-index>=0.10.35
|
|
|
|
| 7 |
langchain>=0.2.8
|
| 8 |
langchain-community>=0.2.8
|
| 9 |
langchain-huggingface>=0.0.3
|
| 10 |
+
langchain-core
|
| 11 |
|
| 12 |
# LlamaIndex (modular packages)
|
| 13 |
llama-index>=0.10.35
|