| import gradio as gr | |
| import tensorflow as tf | |
| from tensorflow.keras.models import load_model | |
| from tensorflow.keras.preprocessing.sequence import pad_sequences | |
| model = load_model("romangpt.h5") | |
| def generate_response(question): | |
| input_data = [question] | |
| input_sequence = pad_sequences(tokenizer.texts_to_sequences(input_data), maxlen=max_sequence_length-1) | |
| response = model.predict(input_sequence)[0] | |
| return response | |
| iface = gr.Interface( | |
| fn=generate_response, | |
| inputs="text", | |
| outputs="text", | |
| live=True, | |
| title="Π§Π°Ρ Ρ RomanGPT", | |
| description="ΠΠ°Π΄Π°ΠΉΡΠ΅ Π²ΠΎΠΏΡΠΎΡ, ΠΈ RomanGPT ΡΠ³Π΅Π½Π΅ΡΠΈΡΡΠ΅Ρ ΠΎΡΠ²Π΅Ρ.", | |
| ) | |
| iface.launch() | |