| | import streamlit as st
|
| | from langchain.prompts import PromptTemplate
|
| | from langchain_community.llms import CTransformers
|
| | import time
|
| |
|
| |
|
| | BLOG_STYLES = [
|
| | 'Researchers',
|
| | 'Data Scientist',
|
| | 'Common People',
|
| | 'Software Engineers',
|
| | 'Product Managers',
|
| | 'Healthcare Professionals',
|
| | 'Teachers',
|
| | 'Entrepreneurs',
|
| | 'Marketers',
|
| | 'Students'
|
| | ]
|
| |
|
| |
|
| | def getLLamaResponse(input_text, no_words, blog_style):
|
| |
|
| | llm = CTransformers(model='models/llama-2-7b-chat.ggmlv3.q8_0.bin',
|
| | model_type='llama',
|
| | config={'max_new_tokens': 256, 'temperature': 0.01})
|
| |
|
| |
|
| | template = """
|
| | Write a blog for {blog_style} job profile for a topic {input_text}
|
| | within {no_words} words.
|
| | """
|
| | prompt = PromptTemplate(input_variables=["blog_style", "input_text", 'no_words'], template=template)
|
| |
|
| |
|
| | response = llm(prompt.format(blog_style=blog_style, input_text=input_text, no_words=no_words))
|
| | return response
|
| |
|
| |
|
| | def generate_topics_from_llama(input_text):
|
| |
|
| | llm = CTransformers(model='models/llama-2-7b-chat.ggmlv3.q8_0.bin',
|
| | model_type='llama',
|
| | config={'max_new_tokens': 256, 'temperature': 0.01})
|
| |
|
| |
|
| | topic_template = """
|
| | Generate a list of blog topics based on the keywords: {input_text}
|
| | """
|
| | prompt = PromptTemplate(input_variables=["input_text"], template=topic_template)
|
| |
|
| |
|
| | topics_response = llm(prompt.format(input_text=input_text))
|
| |
|
| | topics = topics_response.split('\n')
|
| | return [topic.strip() for topic in topics if topic.strip()]
|
| |
|
| |
|
| | st.set_page_config(page_title="LLAMA 2 Generate Blogs",
|
| | page_icon='images/favicon.ico',
|
| | layout='centered',
|
| | initial_sidebar_state='collapsed')
|
| |
|
| |
|
| | st.image('images/ConcertIDC_Logo_Stack.png', width=50, caption='')
|
| |
|
| |
|
| | st.markdown(
|
| | """
|
| | <h1 style="display:flex; align-items:center;">
|
| | LLAMA 2 Generate Blogs
|
| | </h1>
|
| | """,
|
| | unsafe_allow_html=True
|
| | )
|
| |
|
| |
|
| | if 'topics' not in st.session_state:
|
| | st.session_state.topics = []
|
| |
|
| | if 'selected_topic' not in st.session_state:
|
| | st.session_state.selected_topic = None
|
| |
|
| |
|
| | input_text = st.text_input("Enter the Blog Topic Keywords")
|
| |
|
| |
|
| | generate_topics = st.button("Generate Topics")
|
| |
|
| |
|
| | if generate_topics:
|
| | with st.spinner('Generating topics...'):
|
| | st.session_state.topics = generate_topics_from_llama(input_text)
|
| | time.sleep(2)
|
| |
|
| |
|
| | if st.session_state.topics:
|
| |
|
| |
|
| |
|
| |
|
| |
|
| | selected_topic = st.selectbox('Select a Topic', st.session_state.topics)
|
| | st.session_state.selected_topic = selected_topic
|
| |
|
| |
|
| | no_words = st.text_input('Number of Words (optional)', value='')
|
| |
|
| |
|
| | blog_style = st.selectbox('Writing the blog for', BLOG_STYLES, index=0)
|
| |
|
| |
|
| | generate_blog = st.button("Generate Blog Content")
|
| |
|
| |
|
| | if generate_blog:
|
| | with st.spinner('Generating blog content...'):
|
| | if no_words == '':
|
| | no_words = '500'
|
| | response = getLLamaResponse(st.session_state.selected_topic, no_words, blog_style)
|
| | time.sleep(2)
|
| | st.write(response)
|
| |
|