|
|
from langchain.prompts import PromptTemplate
|
|
|
from langchain.chains import LLMChain
|
|
|
from langchain_community.chat_models import ChatOpenAI
|
|
|
from langchain_deepseek import ChatDeepSeek
|
|
|
from langchain_core.runnables import RunnableLambda, RunnableMap
|
|
|
|
|
|
from dotenv import load_dotenv
|
|
|
import os
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
def get_summary_prompt():
|
|
|
return PromptTemplate.from_template("Summarize the following:\n\n{text}")
|
|
|
|
|
|
|
|
|
def get_title_prompt():
|
|
|
return PromptTemplate.from_template("Create a 5-word title for this:\n\n{summary}")
|
|
|
|
|
|
def translate_prompt():
|
|
|
return PromptTemplate.from_template("Translate the following text to Chinese:\n\n{text}")
|
|
|
|
|
|
def build_hyper_chain():
|
|
|
llm = ChatDeepSeek(api_key=os.getenv("DEEPSEEK_API_KEY"), model="deepseek-chat", temperature=0.7)
|
|
|
summarize_chain = get_summary_prompt() | llm
|
|
|
title_chain = get_title_prompt() | llm
|
|
|
translate_chain = translate_prompt() | llm
|
|
|
|
|
|
def chain_fn(inputs):
|
|
|
summary = summarize_chain.invoke({"text": inputs["text"]})
|
|
|
title = title_chain.invoke({"summary": summary})
|
|
|
chinese_summary = translate_chain.invoke({"text": summary.content})
|
|
|
return {"summary": summary, "title": title, "chinese_summary": chinese_summary}
|
|
|
return RunnableLambda(chain_fn)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
full_text = input("Paste your paragraph:\n\n")
|
|
|
|
|
|
chain = build_hyper_chain()
|
|
|
outputs = chain.invoke({"text": full_text})
|
|
|
|
|
|
|
|
|
print(f"\n[Summary]: {outputs['summary'].content}")
|
|
|
print(f"\n[Title]: {outputs['title'].content}")
|
|
|
print(f"\n[Chinese Summary]: {outputs['chinese_summary'].content}") |