Coverage for tests\conftest.py: 100%

50 statements  

« prev     ^ index     » next       coverage.py v7.10.1, created at 2025-10-19 21:13 +0800

1from bs4 import BeautifulSoup 

2from fastapi.testclient import TestClient 

3from jsonpath_ng import parse 

4from redis.exceptions import ConnectionError 

5from requests import get 

6from src.app import init_api 

7from src.utility.utils import Utils 

8from typing import Any, Optional 

9import pytest 

10 

11ANIME_ENG = "[anime]" 

12DRAMA_ENG = "[drama]" 

13MOVIE_ENG = "[movie]" 

14 

15ANIME_JPN = "アニメ" 

16DRAMA_JPN = "ドラマ" 

17MOVIE_JPN = "映画" 

18 

19 

20@pytest.fixture(scope="function") 

21def client_nc(): 

22 api = init_api(enable_cache=False, flush_cache=False) 

23 

24 with TestClient(api) as client: 

25 yield client 

26 

27 

28@pytest.fixture(scope="function") 

29def client_c(): 

30 api = init_api(enable_cache=True, flush_cache=True) 

31 

32 with TestClient(api) as client: 

33 yield client 

34 

35 

36@pytest.fixture(scope="function") 

37def client_c_conn_err(mocker): 

38 mock_redis = mocker.MagicMock() 

39 mock_redis.ping.side_effect = ConnectionError 

40 

41 mocker.patch( 

42 target="src.utility.rediss.aioredis.from_url", 

43 return_value=mock_redis 

44 ) 

45 

46 api = init_api(enable_cache=True, flush_cache=False) 

47 

48 with TestClient(api) as client: 

49 yield client 

50 

51 

52@pytest.fixture(scope="function") 

53def client_c_serv_err(mocker): 

54 mock_redis = mocker.MagicMock() 

55 mock_redis.ping.side_effect = Exception 

56 

57 mocker.patch( 

58 target="src.utility.rediss.aioredis.from_url", 

59 return_value=mock_redis 

60 ) 

61 

62 api = init_api(enable_cache=True, flush_cache=False) 

63 

64 with TestClient(api) as client: 

65 yield client 

66 

67 

68def get_json_val(json: Any, path: str) -> Optional[Any]: 

69 query = parse(path) 

70 match = query.find(json) 

71 

72 return match[0].value if match else None 

73 

74 

75def get_reviews_last_page(slug: str) -> int: 

76 resp = get(url=f"{Utils.FILMARKS_BASE}{slug}", headers=Utils.FILMARKS_REQUEST_HEADERS) 

77 soup = BeautifulSoup(resp.text, "lxml") 

78 

79 last = soup.select_one("div.p-timeline a.c2-pagination__last") 

80 return 1 if not last else int(last.attrs["href"].split("?page=")[-1])