Spaces:
Runtime error
Runtime error
File size: 990 Bytes
9422011 2e35d02 a54b6d7 9422011 2e35d02 3b3366c 9422011 2e35d02 3b3366c b44551d 8a9e88a b44551d 8a9e88a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
from transformers import pipeline
import gradio as gr
model_id = "Teapack1/model_KWS" # update with your model id
pipe = pipeline("audio-classification", model=model_id)
title = "Keyword Spotting Wav2Vec2"
description = "Gradio demo for finetuned Wav2Vec2 model on a custom dataset to perform keyword spotting task. Classes are scene 1, scene 2, scene 3, yes, no and stop."
def classify_audio(filepath):
preds = pipe(filepath)
outputs = {}
for p in preds:
outputs[p["label"]] = p["score"]
return outputs
demo = gr.Blocks()
mic_transcribe = gr.Interface(
fn=classify_audio,
inputs=gr.Audio(sources="microphone", type="filepath"),
outputs=gr.outputs.Label()
)
mic_transcribe = gr.Interface(
fn=classify_audio,
inputs=gr.Audio(sources="upload", type="filepath"),
outputs=gr.outputs.Label()
)
with demo:
gr.TabbedInterface(
[mic_transcribe, file_transcribe],
["Transcribe Microphone", "Transcribe Audio File"],
) |