[Faster whisper] Add support for large-v1
Browse files
src/whisper/fasterWhisperContainer.py
CHANGED
@@ -39,16 +39,21 @@ class FasterWhisperContainer(AbstractWhisperContainer):
|
|
39 |
def _create_model(self):
|
40 |
print("Loading faster whisper model " + self.model_name + " for device " + str(self.device))
|
41 |
model_config = self._get_model_config()
|
42 |
-
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
device = self.device
|
47 |
|
48 |
if (device is None):
|
49 |
device = "auto"
|
50 |
|
51 |
-
model = WhisperModel(
|
52 |
return model
|
53 |
|
54 |
def create_callback(self, language: str = None, task: str = None, initial_prompt: str = None,
|
|
|
39 |
def _create_model(self):
|
40 |
print("Loading faster whisper model " + self.model_name + " for device " + str(self.device))
|
41 |
model_config = self._get_model_config()
|
42 |
+
model_url = model_config.url
|
43 |
+
|
44 |
+
if model_config.type == "whisper":
|
45 |
+
if model_url not in ["tiny", "base", "small", "medium", "large", "large-v1", "large-v2"]:
|
46 |
+
raise Exception("FasterWhisperContainer does not yet support Whisper models. Use ct2-transformers-converter to convert the model to a faster-whisper model.")
|
47 |
+
if model_url == "large":
|
48 |
+
# large is an alias for large-v1
|
49 |
+
model_url = "large-v1"
|
50 |
|
51 |
device = self.device
|
52 |
|
53 |
if (device is None):
|
54 |
device = "auto"
|
55 |
|
56 |
+
model = WhisperModel(model_url, device=device, compute_type=self.compute_type)
|
57 |
return model
|
58 |
|
59 |
def create_callback(self, language: str = None, task: str = None, initial_prompt: str = None,
|