Sbzc commited on
Commit
be85197
·
verified ·
1 Parent(s): 1ae172d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +98 -0
app.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ import coremltools as ct
4
+ from huggingface_hub import snapshot_download
5
+ from transformers import AutoModel, AutoTokenizer
6
+ import os
7
+ import tempfile
8
+ import shutil
9
+
10
+ def convert_to_coreml(model_name, input_length=128, ios_version="iOS15"):
11
+
12
+ try:
13
+ print(f"Downloading model {model_name}...")
14
+ # Download model from Huggingface
15
+ model_path = snapshot_download(repo_id=model_name)
16
+
17
+ print("Loading model...")
18
+ # Load model and tokenizer
19
+ model = AutoModel.from_pretrained(model_path)
20
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
21
+
22
+ # Set model to evaluation mode
23
+ model.eval()
24
+
25
+ print("Creating example input...")
26
+ # Create example input
27
+ if hasattr(tokenizer, "pad_token") and tokenizer.pad_token is None:
28
+ tokenizer.pad_token = tokenizer.eos_token
29
+
30
+ sample_text = "This is a sample input for conversion"
31
+ inputs = tokenizer(sample_text, return_tensors="pt", padding="max_length", max_length=input_length)
32
+
33
+ print("Tracing model...")
34
+ # Trace the model
35
+ traced_model = torch.jit.trace(model, [inputs["input_ids"], inputs["attention_mask"]])
36
+
37
+ # Define Core ML target version
38
+ if ios_version == "iOS15":
39
+ target = ct.target.iOS15
40
+ elif ios_version == "iOS16":
41
+ target = ct.target.iOS16
42
+ else:
43
+ target = ct.target.iOS17
44
+
45
+ print(f"Converting to Core ML (targeting {ios_version})...")
46
+ # Convert to Core ML
47
+ mlmodel = ct.convert(
48
+ traced_model,
49
+ inputs=[
50
+ ct.TensorType(name="input_ids", shape=inputs["input_ids"].shape),
51
+ ct.TensorType(name="attention_mask", shape=inputs["attention_mask"].shape)
52
+ ],
53
+ minimum_deployment_target=target
54
+ )
55
+
56
+ # Create a temporary directory to save the model
57
+ temp_dir = tempfile.mkdtemp()
58
+ model_file = os.path.join(temp_dir, f"{model_name.split('/')[-1]}.mlmodel")
59
+
60
+ print(f"Saving model to {model_file}...")
61
+ # Save the model
62
+ mlmodel.save(model_file)
63
+
64
+ return model_file, "Conversion successful!"
65
+
66
+ except Exception as e:
67
+ return None, f"Error: {str(e)}"
68
+
69
+ def process(model_name, input_length, ios_version):
70
+ model_file, message = convert_to_coreml(model_name, int(input_length), ios_version)
71
+ if model_file:
72
+ return message, model_file
73
+ else:
74
+ return message, None
75
+
76
+ # Create Gradio interface
77
+ with gr.Blocks(title="Huggingface to Core ML Converter") as demo:
78
+ gr.Markdown("# Huggingface to Core ML Model Converter")
79
+ gr.Markdown("Enter a Huggingface model ID and convert it to Core ML format.")
80
+
81
+ with gr.Row():
82
+ with gr.Column():
83
+ model_name = gr.Textbox(label="Huggingface Model ID (e.g., 'your-username/your-model')")
84
+ input_length = gr.Slider(minimum=16, maximum=512, value=128, step=16, label="Input Length")
85
+ ios_version = gr.Dropdown(choices=["iOS15", "iOS16", "iOS17"], value="iOS15", label="Target iOS Version")
86
+ convert_button = gr.Button("Convert Model")
87
+
88
+ with gr.Column():
89
+ output_message = gr.Textbox(label="Status")
90
+ file_output = gr.File(label="Converted Model")
91
+
92
+ convert_button.click(
93
+ process,
94
+ inputs=[model_name, input_length, ios_version],
95
+ outputs=[output_message, file_output]
96
+ )
97
+
98
+ demo.launch()