Mau-Gal-8 commited on
Commit
88f6837
·
verified ·
1 Parent(s): 66de87a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -1
app.py CHANGED
@@ -1,3 +1,26 @@
 
 
 
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- gr.load("models/Efficient-Large-Model/VILA1.5-3b").launch()
 
1
+ # BELOW 2 ROWS NOT CORRECTLY WORKING !!!!!
2
+ # import gradio as gr
3
+ # gr.load("models/Efficient-Large-Model/VILA1.5-3b").launch()
4
+
5
  import gradio as gr
6
+ from transformers import AutoModel, AutoProcessor
7
+
8
+ # Load model and processor
9
+ model = AutoModel.from_pretrained("Efficient-Large-Model/VILA1.5-3b")
10
+ processor = AutoProcessor.from_pretrained("Efficient-Large-Model/VILA1.5-3b")
11
+
12
+ # Define function for model inference
13
+ def predict(input_text):
14
+ # Process and perform inference on input_text
15
+ # Note: Adapt this based on your model’s expected inputs/outputs
16
+ inputs = processor(text=input_text, return_tensors="pt")
17
+ outputs = model(**inputs)
18
+ return outputs.logits # or other processing based on model outputs
19
+
20
+ # Launch Gradio interface
21
+ gr.Interface(
22
+ fn=predict,
23
+ inputs="text",
24
+ outputs="text"
25
+ ).launch()
26