Fedir Zadniprovskyi commited on
Commit
069092a
·
1 Parent(s): d200ef1

fix: lint errors

Browse files
faster_whisper_server/gradio_app.py CHANGED
@@ -10,13 +10,14 @@ from faster_whisper_server.config import Config, Task
10
 
11
  TRANSCRIPTION_ENDPOINT = "/v1/audio/transcriptions"
12
  TRANSLATION_ENDPOINT = "/v1/audio/translations"
 
13
 
14
 
15
  def create_gradio_demo(config: Config) -> gr.Blocks:
16
  host = os.getenv("UVICORN_HOST", "0.0.0.0")
17
  port = int(os.getenv("UVICORN_PORT", "8000"))
18
  # NOTE: worth looking into generated clients
19
- http_client = httpx.Client(base_url=f"http://{host}:{port}", timeout=None)
20
  openai_client = OpenAI(base_url=f"http://{host}:{port}/v1", api_key="cant-be-empty")
21
 
22
  def handler(file_path: str, model: str, task: Task, temperature: float, stream: bool) -> Generator[str, None, None]:
 
10
 
11
  TRANSCRIPTION_ENDPOINT = "/v1/audio/transcriptions"
12
  TRANSLATION_ENDPOINT = "/v1/audio/translations"
13
+ TIMEOUT_SECONDS = 180
14
 
15
 
16
  def create_gradio_demo(config: Config) -> gr.Blocks:
17
  host = os.getenv("UVICORN_HOST", "0.0.0.0")
18
  port = int(os.getenv("UVICORN_PORT", "8000"))
19
  # NOTE: worth looking into generated clients
20
+ http_client = httpx.Client(base_url=f"http://{host}:{port}", timeout=httpx.Timeout(timeout=TIMEOUT_SECONDS))
21
  openai_client = OpenAI(base_url=f"http://{host}:{port}/v1", api_key="cant-be-empty")
22
 
23
  def handler(file_path: str, model: str, task: Task, temperature: float, stream: bool) -> Generator[str, None, None]: