Spaces:
No application file
No application file
| import os | |
| from constants import openai_key | |
| from langchain.llms import OpenAI | |
| from langchain import PromptTemplate | |
| from langchain.chains import LLMChain | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain.chains import SequentialChain | |
| import streamlit as st | |
| os.environ["OPENAI_API_KEY"] = openai_key | |
| # streamlit framework | |
| st.title('Generate Effective Article on any topic with Trending Hashtags') | |
| input_text = st.text_input("Search the topic you want") | |
| search_button = st.button("Search") | |
| # Prompt Templates | |
| first_input_prompt = PromptTemplate( | |
| input_variables=['name'], | |
| template='''Generate article ideas on the topic: {name} | |
| but first create a simple description for this topic {name}, then generate your ideas, | |
| ''' | |
| ) | |
| # Memory | |
| person_memory = ConversationBufferMemory(input_key='name', memory_key='chat_history') | |
| dob_memory = ConversationBufferMemory(input_key='person', memory_key='chat_history') | |
| # OPENAI LLMS | |
| llm = OpenAI(temperature=0.8) | |
| chain = LLMChain(llm=llm, prompt=first_input_prompt, verbose=True, output_key='person', memory=person_memory) | |
| second_input_prompt = PromptTemplate( | |
| input_variables=['name'], | |
| template="Generate some trending hashtags for {name}" | |
| ) | |
| chain2 = LLMChain(llm=llm, prompt=second_input_prompt, verbose=True, output_key='Hashtags', memory=dob_memory) | |
| parent_chain = SequentialChain(chains=[chain, chain2], input_variables=['name'], output_variables=['person', 'Hashtags'], verbose=True) | |
| if search_button: | |
| if input_text: | |
| st.write(parent_chain({'name': input_text})) | |
| with st.expander('Article History'): | |
| st.info(dob_memory.buffer) | |