File size: 1,405 Bytes
5cab1d9
abf1e0b
 
360e553
 
5cab1d9
 
cd85c14
5cab1d9
 
 
 
 
 
 
 
 
cd85c14
 
 
 
 
 
 
 
 
abf1e0b
cd85c14
 
165c42b
dc4a3e4
cd85c14
 
dc4a3e4
3d6ef6e
cd85c14
2cd6fb8
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")


def chatbot(input):

    #loop length = number of chats
    for step in range(50):
        # take user input
        #text = input(">> You: ")
        # encode the input and add end of string token
        input_ids = tokenizer.encode(input + tokenizer.eos_token, return_tensors="pt")
        # concatenate new user input with chat history (if there is)
        bot_input_ids = torch.cat([chat_history_ids, input_ids], dim=-1) if step > 0 else input_ids
        # generate a bot response
        chat_history_ids = model.generate(
            bot_input_ids,
            max_length=1000,
            do_sample=True,
            top_p=0.95,
            top_k=0,
            temperature=0.75,
            pad_token_id=tokenizer.eos_token_id
        )
        #print the output
        output = tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
        return output

inputs = gr.inputs.Textbox(lines=7, label="Chat with AI")
outputs = gr.outputs.Textbox(label="Reply")

gr.Interface(fn=chatbot, inputs=inputs, outputs=outputs, title="Self_Trained_V1",
             description="Ask anything you want",
             ).launch()