Spaces:
Runtime error
Runtime error
File size: 980 Bytes
9422011 2e35d02 a54b6d7 0b472d5 9422011 2e35d02 3b3366c 9422011 2e35d02 a7a91bd 2e35d02 3b3366c b44551d 8a9e88a 34d3b12 0b472d5 8a9e88a b44551d 8a9e88a 34d3b12 8a9e88a 0b472d5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
from transformers import pipeline
import gradio as gr
model_id = "Teapack1/model_KWS" # update with your model id
pipe = pipeline("audio-classification", model=model_id)
title = "Keyword Spotting Wav2Vec2"
description = "Gradio demo for finetuned Wav2Vec2 model on a custom dataset to perform keyword spotting task. Classes are scene 1, scene 2, scene 3, yes, no and stop."
def classify_audio(filepath):
preds = pipe(filepath,
chunk_length_s=1,
)
outputs = {}
for p in preds:
outputs[p["label"]] = p["score"]
return outputs
demo = gr.Blocks()
mic_classify = gr.Interface(
title = title,
description=description,
fn=classify_audio,
inputs=gr.Audio(sources="microphone", type="filepath"),
outputs=gr.outputs.Label()
)
with demo:
gr.TabbedInterface(
[mic_classify, file_classify],
["Transcribe Microphone", "Transcribe Audio File"],
)
demo.launch(debug=True, share=True) |