Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# app.py
|
| 2 |
+
import os
|
| 3 |
+
import torch
|
| 4 |
+
from transformers import pipeline
|
| 5 |
+
import gradio as gr
|
| 6 |
+
|
| 7 |
+
MODEL_ID = "EYEDOL/Yoruba-ASRNEW"
|
| 8 |
+
|
| 9 |
+
# If you set HF_TOKEN as a secret in the Space (for private models), transformers auto-uses it.
|
| 10 |
+
# Create pipeline. Use GPU if available in the Space.
|
| 11 |
+
device = 0 if torch.cuda.is_available() else -1
|
| 12 |
+
asr = pipeline("automatic-speech-recognition", model=MODEL_ID, device=device)
|
| 13 |
+
|
| 14 |
+
def transcribe_from_file(audio_path):
|
| 15 |
+
"""
|
| 16 |
+
audio_path: local filepath to recorded/uploaded audio (gradio provides wav/m4a etc.)
|
| 17 |
+
"""
|
| 18 |
+
if not audio_path:
|
| 19 |
+
return "No audio provided."
|
| 20 |
+
# pipeline accepts filepath, numpy array, or list.
|
| 21 |
+
res = asr(audio_path)
|
| 22 |
+
return res.get("text", "")
|
| 23 |
+
|
| 24 |
+
with gr.Blocks(title="Yoruba ASR Demo") as demo:
|
| 25 |
+
gr.Markdown("## Yoruba ASR — try microphone or upload an audio file 🎙️")
|
| 26 |
+
with gr.Tabs():
|
| 27 |
+
with gr.TabItem("Microphone"):
|
| 28 |
+
mic = gr.Audio(source="microphone", type="filepath", label="Record from mic")
|
| 29 |
+
mic_btn = gr.Button("Transcribe")
|
| 30 |
+
mic_out = gr.Textbox(label="Transcription")
|
| 31 |
+
mic_btn.click(fn=transcribe_from_file, inputs=mic, outputs=mic_out)
|
| 32 |
+
with gr.TabItem("Upload audio file"):
|
| 33 |
+
upload = gr.Audio(source="upload", type="filepath", label="Upload audio file")
|
| 34 |
+
up_btn = gr.Button("Transcribe file")
|
| 35 |
+
up_out = gr.Textbox(label="Transcription")
|
| 36 |
+
up_btn.click(fn=transcribe_from_file, inputs=upload, outputs=up_out)
|
| 37 |
+
|
| 38 |
+
gr.Markdown("**Notes:** If the model is private, set a `HF_TOKEN` secret in the Space settings. "
|
| 39 |
+
"For better speed, pick a GPU runtime (if available).")
|
| 40 |
+
|
| 41 |
+
if __name__ == "__main__":
|
| 42 |
+
demo.launch()
|