File size: 679 Bytes
03ccbe4
f17a6be
03ccbe4
9ade4d1
4a17acf
03ccbe4
a3774ec
03ccbe4
 
9ade4d1
4a17acf
03ccbe4
4a17acf
03ccbe4
4a17acf
03ccbe4
 
 
 
 
 
 
 
8229e6d
03ccbe4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import gradio as gr
import tensorflow as tf
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.sequence import pad_sequences

model = load_model("romangpt.h5")

def generate_response(question):
    input_data = [question]
    input_sequence = pad_sequences(tokenizer.texts_to_sequences(input_data), maxlen=max_sequence_length-1)

    response = model.predict(input_sequence)[0]

    return response

iface = gr.Interface(
    fn=generate_response,
    inputs="text",
    outputs="text",
    live=True,
    title="Π§Π°Ρ‚ с RomanGPT",
    description="Π—Π°Π΄Π°ΠΉΡ‚Π΅ вопрос, ΠΈ RomanGPT сгСнСрируСт ΠΎΡ‚Π²Π΅Ρ‚.",
)

iface.launch()