learnmatze commited on
Commit ·
63c5eb8
1
Parent(s): b7b0196
added Phi-3.5-mini-instruct
Browse files
app.py
CHANGED
|
@@ -2,11 +2,13 @@ import gradio as gr
|
|
| 2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 3 |
import torch
|
| 4 |
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
|
|
|
|
|
|
| 10 |
|
| 11 |
# Function to generate text based on the prompt
|
| 12 |
def generate_text(prompt, max_length=100):
|
|
@@ -20,6 +22,6 @@ iface = gr.Interface(
|
|
| 20 |
fn=generate_text,
|
| 21 |
inputs="text",
|
| 22 |
outputs="text",
|
| 23 |
-
title="Microsoft Phi
|
| 24 |
)
|
| 25 |
iface.launch()
|
|
|
|
| 2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 3 |
import torch
|
| 4 |
|
| 5 |
+
# Load model directly
|
| 6 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 7 |
+
|
| 8 |
+
tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3.5-mini-instruct", trust_remote_code=True)
|
| 9 |
+
model = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3.5-mini-instruct",
|
| 10 |
+
device_map="auto",
|
| 11 |
+
trust_remote_code=True)
|
| 12 |
|
| 13 |
# Function to generate text based on the prompt
|
| 14 |
def generate_text(prompt, max_length=100):
|
|
|
|
| 22 |
fn=generate_text,
|
| 23 |
inputs="text",
|
| 24 |
outputs="text",
|
| 25 |
+
title="Microsoft Phi 3.5B Instruct - Text Generation"
|
| 26 |
)
|
| 27 |
iface.launch()
|