Spaces:
Sleeping
Sleeping
jhj0517
commited on
Commit
·
56b39a5
1
Parent(s):
3408366
fix spaces bug
Browse files- app.py +0 -1
- modules/whisper/faster_whisper_inference.py +7 -0
app.py
CHANGED
@@ -33,7 +33,6 @@ class App:
|
|
33 |
whisper_type = self.args.whisper_type.lower().strip()
|
34 |
|
35 |
if whisper_type in ["faster_whisper", "faster-whisper", "fasterwhisper"]:
|
36 |
-
print(f"\n\ndir: {self.args.faster_whisper_model_dir}\n\n")
|
37 |
whisper_inf = FasterWhisperInference(
|
38 |
model_dir=self.args.faster_whisper_model_dir,
|
39 |
output_dir=self.args.output_dir,
|
|
|
33 |
whisper_type = self.args.whisper_type.lower().strip()
|
34 |
|
35 |
if whisper_type in ["faster_whisper", "faster-whisper", "fasterwhisper"]:
|
|
|
36 |
whisper_inf = FasterWhisperInference(
|
37 |
model_dir=self.args.faster_whisper_model_dir,
|
38 |
output_dir=self.args.output_dir,
|
modules/whisper/faster_whisper_inference.py
CHANGED
@@ -9,6 +9,7 @@ import ctranslate2
|
|
9 |
import whisper
|
10 |
import gradio as gr
|
11 |
from argparse import Namespace
|
|
|
12 |
|
13 |
from modules.whisper.whisper_parameter import *
|
14 |
from modules.whisper.whisper_base import WhisperBase
|
@@ -141,6 +142,12 @@ class FasterWhisperInference(WhisperBase):
|
|
141 |
model_paths[model_name] = os.path.join(webui_dir, self.model_dir, model_name)
|
142 |
return model_paths
|
143 |
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
@staticmethod
|
145 |
def get_device():
|
146 |
if torch.cuda.is_available():
|
|
|
9 |
import whisper
|
10 |
import gradio as gr
|
11 |
from argparse import Namespace
|
12 |
+
import spaces
|
13 |
|
14 |
from modules.whisper.whisper_parameter import *
|
15 |
from modules.whisper.whisper_base import WhisperBase
|
|
|
142 |
model_paths[model_name] = os.path.join(webui_dir, self.model_dir, model_name)
|
143 |
return model_paths
|
144 |
|
145 |
+
@spaces.gpu
|
146 |
+
def get_available_compute_type(self):
|
147 |
+
if self.device == "cuda":
|
148 |
+
return ctranslate2.get_supported_compute_types("cuda")
|
149 |
+
return ctranslate2.get_supported_compute_types("cpu")
|
150 |
+
|
151 |
@staticmethod
|
152 |
def get_device():
|
153 |
if torch.cuda.is_available():
|