Fedir Zadniprovskyi commited on
Commit
23a3cae
·
1 Parent(s): 1de8d65

chore: auto-fix ruff errors

Browse files
tests/api_model_test.py CHANGED
@@ -7,19 +7,19 @@ MODEL_THAT_DOES_NOT_EXIST = "i-do-not-exist"
7
  MIN_EXPECTED_NUMBER_OF_MODELS = 70 # At the time of the test creation there are 89 models
8
 
9
 
10
- @pytest.mark.asyncio()
11
  async def test_list_models(openai_client: AsyncOpenAI) -> None:
12
  models = (await openai_client.models.list()).data
13
  assert len(models) > MIN_EXPECTED_NUMBER_OF_MODELS
14
 
15
 
16
- @pytest.mark.asyncio()
17
  async def test_model_exists(openai_client: AsyncOpenAI) -> None:
18
  model = await openai_client.models.retrieve(MODEL_THAT_EXISTS)
19
  assert model.id == MODEL_THAT_EXISTS
20
 
21
 
22
- @pytest.mark.asyncio()
23
  async def test_model_does_not_exist(openai_client: AsyncOpenAI) -> None:
24
  with pytest.raises(openai.NotFoundError):
25
  await openai_client.models.retrieve(MODEL_THAT_DOES_NOT_EXIST)
 
7
  MIN_EXPECTED_NUMBER_OF_MODELS = 70 # At the time of the test creation there are 89 models
8
 
9
 
10
+ @pytest.mark.asyncio
11
  async def test_list_models(openai_client: AsyncOpenAI) -> None:
12
  models = (await openai_client.models.list()).data
13
  assert len(models) > MIN_EXPECTED_NUMBER_OF_MODELS
14
 
15
 
16
+ @pytest.mark.asyncio
17
  async def test_model_exists(openai_client: AsyncOpenAI) -> None:
18
  model = await openai_client.models.retrieve(MODEL_THAT_EXISTS)
19
  assert model.id == MODEL_THAT_EXISTS
20
 
21
 
22
+ @pytest.mark.asyncio
23
  async def test_model_does_not_exist(openai_client: AsyncOpenAI) -> None:
24
  with pytest.raises(openai.NotFoundError):
25
  await openai_client.models.retrieve(MODEL_THAT_DOES_NOT_EXIST)
tests/api_timestamp_granularities_test.py CHANGED
@@ -2,12 +2,13 @@
2
 
3
  from pathlib import Path
4
 
5
- from faster_whisper_server.api_models import TIMESTAMP_GRANULARITIES_COMBINATIONS, TimestampGranularities
6
  from openai import AsyncOpenAI
7
  import pytest
8
 
 
 
9
 
10
- @pytest.mark.asyncio()
11
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
12
  async def test_api_json_response_format_and_timestamp_granularities_combinations(
13
  openai_client: AsyncOpenAI,
@@ -20,7 +21,7 @@ async def test_api_json_response_format_and_timestamp_granularities_combinations
20
  )
21
 
22
 
23
- @pytest.mark.asyncio()
24
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
25
  async def test_api_verbose_json_response_format_and_timestamp_granularities_combinations(
26
  openai_client: AsyncOpenAI,
 
2
 
3
  from pathlib import Path
4
 
 
5
  from openai import AsyncOpenAI
6
  import pytest
7
 
8
+ from faster_whisper_server.api_models import TIMESTAMP_GRANULARITIES_COMBINATIONS, TimestampGranularities
9
+
10
 
11
+ @pytest.mark.asyncio
12
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
13
  async def test_api_json_response_format_and_timestamp_granularities_combinations(
14
  openai_client: AsyncOpenAI,
 
21
  )
22
 
23
 
24
+ @pytest.mark.asyncio
25
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
26
  async def test_api_verbose_json_response_format_and_timestamp_granularities_combinations(
27
  openai_client: AsyncOpenAI,
tests/conftest.py CHANGED
@@ -3,12 +3,13 @@ import logging
3
  import os
4
 
5
  from fastapi.testclient import TestClient
6
- from faster_whisper_server.main import create_app
7
  from httpx import ASGITransport, AsyncClient
8
  from openai import AsyncOpenAI
9
  import pytest
10
  import pytest_asyncio
11
 
 
 
12
  disable_loggers = ["multipart.multipart", "faster_whisper"]
13
 
14
 
@@ -19,7 +20,7 @@ def pytest_configure() -> None:
19
 
20
 
21
  # NOTE: not being used. Keeping just in case
22
- @pytest.fixture()
23
  def client() -> Generator[TestClient, None, None]:
24
  os.environ["WHISPER__MODEL"] = "Systran/faster-whisper-tiny.en"
25
  with TestClient(create_app()) as client:
@@ -38,7 +39,7 @@ def openai_client(aclient: AsyncClient) -> AsyncOpenAI:
38
  return AsyncOpenAI(api_key="cant-be-empty", http_client=aclient)
39
 
40
 
41
- @pytest.fixture()
42
  def actual_openai_client() -> AsyncOpenAI:
43
  return AsyncOpenAI(
44
  base_url="https://api.openai.com/v1"
 
3
  import os
4
 
5
  from fastapi.testclient import TestClient
 
6
  from httpx import ASGITransport, AsyncClient
7
  from openai import AsyncOpenAI
8
  import pytest
9
  import pytest_asyncio
10
 
11
+ from faster_whisper_server.main import create_app
12
+
13
  disable_loggers = ["multipart.multipart", "faster_whisper"]
14
 
15
 
 
20
 
21
 
22
  # NOTE: not being used. Keeping just in case
23
+ @pytest.fixture
24
  def client() -> Generator[TestClient, None, None]:
25
  os.environ["WHISPER__MODEL"] = "Systran/faster-whisper-tiny.en"
26
  with TestClient(create_app()) as client:
 
39
  return AsyncOpenAI(api_key="cant-be-empty", http_client=aclient)
40
 
41
 
42
+ @pytest.fixture
43
  def actual_openai_client() -> AsyncOpenAI:
44
  return AsyncOpenAI(
45
  base_url="https://api.openai.com/v1"
tests/openai_timestamp_granularities_test.py CHANGED
@@ -2,13 +2,14 @@
2
 
3
  from pathlib import Path
4
 
5
- from faster_whisper_server.api_models import TIMESTAMP_GRANULARITIES_COMBINATIONS, TimestampGranularities
6
  from openai import AsyncOpenAI, BadRequestError
7
  import pytest
8
 
 
 
9
 
10
- @pytest.mark.asyncio()
11
- @pytest.mark.requires_openai()
12
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
13
  async def test_openai_json_response_format_and_timestamp_granularities_combinations(
14
  actual_openai_client: AsyncOpenAI,
@@ -29,8 +30,8 @@ async def test_openai_json_response_format_and_timestamp_granularities_combinati
29
  )
30
 
31
 
32
- @pytest.mark.asyncio()
33
- @pytest.mark.requires_openai()
34
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
35
  async def test_openai_verbose_json_response_format_and_timestamp_granularities_combinations(
36
  actual_openai_client: AsyncOpenAI,
 
2
 
3
  from pathlib import Path
4
 
 
5
  from openai import AsyncOpenAI, BadRequestError
6
  import pytest
7
 
8
+ from faster_whisper_server.api_models import TIMESTAMP_GRANULARITIES_COMBINATIONS, TimestampGranularities
9
+
10
 
11
+ @pytest.mark.asyncio
12
+ @pytest.mark.requires_openai
13
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
14
  async def test_openai_json_response_format_and_timestamp_granularities_combinations(
15
  actual_openai_client: AsyncOpenAI,
 
30
  )
31
 
32
 
33
+ @pytest.mark.asyncio
34
+ @pytest.mark.requires_openai
35
  @pytest.mark.parametrize("timestamp_granularities", TIMESTAMP_GRANULARITIES_COMBINATIONS)
36
  async def test_openai_verbose_json_response_format_and_timestamp_granularities_combinations(
37
  actual_openai_client: AsyncOpenAI,
tests/sse_test.py CHANGED
@@ -2,10 +2,6 @@ import json
2
  from pathlib import Path
3
 
4
  import anyio
5
- from faster_whisper_server.api_models import (
6
- CreateTranscriptionResponseJson,
7
- CreateTranscriptionResponseVerboseJson,
8
- )
9
  from httpx import AsyncClient
10
  from httpx_sse import aconnect_sse
11
  import pytest
@@ -13,6 +9,11 @@ import srt
13
  import webvtt
14
  import webvtt.vtt
15
 
 
 
 
 
 
16
  FILE_PATHS = ["audio.wav"] # HACK
17
  ENDPOINTS = [
18
  "/v1/audio/transcriptions",
@@ -23,7 +24,7 @@ ENDPOINTS = [
23
  parameters = [(file_path, endpoint) for endpoint in ENDPOINTS for file_path in FILE_PATHS]
24
 
25
 
26
- @pytest.mark.asyncio()
27
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
28
  async def test_streaming_transcription_text(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
29
  extension = Path(file_path).suffix[1:]
@@ -39,7 +40,7 @@ async def test_streaming_transcription_text(aclient: AsyncClient, file_path: str
39
  assert len(event.data) > 1 # HACK: 1 because of the space character that's always prepended
40
 
41
 
42
- @pytest.mark.asyncio()
43
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
44
  async def test_streaming_transcription_json(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
45
  extension = Path(file_path).suffix[1:]
@@ -54,7 +55,7 @@ async def test_streaming_transcription_json(aclient: AsyncClient, file_path: str
54
  CreateTranscriptionResponseJson(**json.loads(event.data))
55
 
56
 
57
- @pytest.mark.asyncio()
58
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
59
  async def test_streaming_transcription_verbose_json(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
60
  extension = Path(file_path).suffix[1:]
@@ -69,7 +70,7 @@ async def test_streaming_transcription_verbose_json(aclient: AsyncClient, file_p
69
  CreateTranscriptionResponseVerboseJson(**json.loads(event.data))
70
 
71
 
72
- @pytest.mark.asyncio()
73
  async def test_transcription_vtt(aclient: AsyncClient) -> None:
74
  async with await anyio.open_file("audio.wav", "rb") as f:
75
  data = await f.read()
@@ -87,7 +88,7 @@ async def test_transcription_vtt(aclient: AsyncClient) -> None:
87
  webvtt.from_string(text)
88
 
89
 
90
- @pytest.mark.asyncio()
91
  async def test_transcription_srt(aclient: AsyncClient) -> None:
92
  async with await anyio.open_file("audio.wav", "rb") as f:
93
  data = await f.read()
 
2
  from pathlib import Path
3
 
4
  import anyio
 
 
 
 
5
  from httpx import AsyncClient
6
  from httpx_sse import aconnect_sse
7
  import pytest
 
9
  import webvtt
10
  import webvtt.vtt
11
 
12
+ from faster_whisper_server.api_models import (
13
+ CreateTranscriptionResponseJson,
14
+ CreateTranscriptionResponseVerboseJson,
15
+ )
16
+
17
  FILE_PATHS = ["audio.wav"] # HACK
18
  ENDPOINTS = [
19
  "/v1/audio/transcriptions",
 
24
  parameters = [(file_path, endpoint) for endpoint in ENDPOINTS for file_path in FILE_PATHS]
25
 
26
 
27
+ @pytest.mark.asyncio
28
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
29
  async def test_streaming_transcription_text(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
30
  extension = Path(file_path).suffix[1:]
 
40
  assert len(event.data) > 1 # HACK: 1 because of the space character that's always prepended
41
 
42
 
43
+ @pytest.mark.asyncio
44
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
45
  async def test_streaming_transcription_json(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
46
  extension = Path(file_path).suffix[1:]
 
55
  CreateTranscriptionResponseJson(**json.loads(event.data))
56
 
57
 
58
+ @pytest.mark.asyncio
59
  @pytest.mark.parametrize(("file_path", "endpoint"), parameters)
60
  async def test_streaming_transcription_verbose_json(aclient: AsyncClient, file_path: str, endpoint: str) -> None:
61
  extension = Path(file_path).suffix[1:]
 
70
  CreateTranscriptionResponseVerboseJson(**json.loads(event.data))
71
 
72
 
73
+ @pytest.mark.asyncio
74
  async def test_transcription_vtt(aclient: AsyncClient) -> None:
75
  async with await anyio.open_file("audio.wav", "rb") as f:
76
  data = await f.read()
 
88
  webvtt.from_string(text)
89
 
90
 
91
+ @pytest.mark.asyncio
92
  async def test_transcription_srt(aclient: AsyncClient) -> None:
93
  async with await anyio.open_file("audio.wav", "rb") as f:
94
  data = await f.read()