File size: 1,369 Bytes
c509f65
0e5c079
6a2acfa
 
 
 
c509f65
6a2acfa
ced4dc7
6a2acfa
 
 
d4e5edd
6a2acfa
 
 
 
d4e5edd
6a2acfa
 
 
 
b88ac17
6a2acfa
d4e5edd
 
6a2acfa
b88ac17
6a2acfa
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import streamlit as st
import tensorflow as tf
from transformers import TFGPT2LMHeadModel ,GPT2Tokenizer, BitsAndBytesConfig

tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
model = TFGPT2LMHeadModel.from_pretrained('gpt2',pad_token_id = tokenizer.eos_token_id)
def generate(inp):
    input_ids = tokenizer.encode(inp,return_tensors = 'tf')
    beam_output =  model.generate(input_ids, max_length = 90,num_beams = 5, no_repeat_ngram_size = 2, early_stopping = True)
    output = tokenizer.decode(beam_output[0],skip_special_tokens = True, clean_up_tokenization_spaces = True)
    return ".".join(output.split(".")[:-1]) + "."
    
st.title("Animal Bot")
if "messages" not in st.session_state:
    st.session_state.messages = []
    st.session_state.messages.append({
        'role':'assistant',
        'content':"Hi! I'm your Animal assistant, any queries about animals ?"
    })
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])
prompt = st.chat_input("Any Queries?")
if prompt:
    with st.chat_message("user"):
        st.markdown(prompt)
    st.session_state.messages.append({"role":"user","content":prompt})
    response = generate(prompt)
    with st.chat_message("assistant"):
        st.markdown(response)
    st.session_state.messages.append({"role":"assistant","content":response})