import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer # Specify the directory containing the model and tokenizer model_name = "gpt4all" # Make sure this matches the actual model directory model_path = f"./" # Path to the model directory # Initialize the GPT-4 model and tokenizer model = AutoModelForCausalLM.from_pretrained(model_path) tokenizer = AutoTokenizer.from_pretrained(model_path) def generate_text(input_text): input_ids = tokenizer(input_text, return_tensors="pt").input_ids generated_ids = model.generate(input_ids) generated_text = tokenizer.decode(generated_ids[0], skip_special_tokens=True) return generated_text text_generation_interface = gr.Interface( fn=generate_text, inputs=[ gr.inputs.Textbox(label="Input Text"), ], outputs=gr.outputs.Textbox(label="Generated Text"), title="GPT-4 Text Generation", ).launch() # model_name = ""