Spaces:
Configuration error
Configuration error
Fedir Zadniprovskyi
commited on
Commit
·
063ef89
1
Parent(s):
ff17e75
chore: expose additional config options (#51) pt2
Browse files
faster_whisper_server/config.py
CHANGED
@@ -156,9 +156,9 @@ class WhisperConfig(BaseModel):
|
|
156 |
You can find other supported models at https://huggingface.co/models?p=2&sort=trending&search=ctranslate2 and https://huggingface.co/models?sort=trending&search=ct2
|
157 |
"""
|
158 |
inference_device: Device = Field(default=Device.AUTO)
|
159 |
-
compute_type: Quantization = Field(default=Quantization.DEFAULT)
|
160 |
device_index: int | list[int] = 0
|
161 |
-
|
|
|
162 |
num_workers: int = 1
|
163 |
|
164 |
|
|
|
156 |
You can find other supported models at https://huggingface.co/models?p=2&sort=trending&search=ctranslate2 and https://huggingface.co/models?sort=trending&search=ct2
|
157 |
"""
|
158 |
inference_device: Device = Field(default=Device.AUTO)
|
|
|
159 |
device_index: int | list[int] = 0
|
160 |
+
compute_type: Quantization = Field(default=Quantization.DEFAULT)
|
161 |
+
cpu_threads: int = 0
|
162 |
num_workers: int = 1
|
163 |
|
164 |
|
faster_whisper_server/main.py
CHANGED
@@ -67,7 +67,10 @@ def load_model(model_name: str) -> WhisperModel:
|
|
67 |
whisper = WhisperModel(
|
68 |
model_name,
|
69 |
device=config.whisper.inference_device,
|
|
|
70 |
compute_type=config.whisper.compute_type,
|
|
|
|
|
71 |
)
|
72 |
logger.info(
|
73 |
f"Loaded {model_name} loaded in {time.perf_counter() - start:.2f} seconds. {config.whisper.inference_device}({config.whisper.compute_type}) will be used for inference." # noqa: E501
|
|
|
67 |
whisper = WhisperModel(
|
68 |
model_name,
|
69 |
device=config.whisper.inference_device,
|
70 |
+
device_index=config.whisper.device_index,
|
71 |
compute_type=config.whisper.compute_type,
|
72 |
+
cpu_threads=config.whisper.cpu_threads,
|
73 |
+
num_workers=config.whisper.num_workers,
|
74 |
)
|
75 |
logger.info(
|
76 |
f"Loaded {model_name} loaded in {time.perf_counter() - start:.2f} seconds. {config.whisper.inference_device}({config.whisper.compute_type}) will be used for inference." # noqa: E501
|