Enable ruff S113 rule (#11375)

* Enable ruff S113 rule

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
Maxim Smolskiy
2024-04-21 20:34:18 +03:00
committed by GitHub
parent 7b88e15b1c
commit 2702bf9400
36 changed files with 68 additions and 46 deletions

View File

@ -28,7 +28,7 @@ def search_scraper(anime_name: str) -> list:
search_url = f"{BASE_URL}/search/{anime_name}"
response = requests.get(
search_url, headers={"UserAgent": UserAgent().chrome}
search_url, headers={"UserAgent": UserAgent().chrome}, timeout=10
) # request the url.
# Is the response ok?
@ -82,7 +82,9 @@ def search_anime_episode_list(episode_endpoint: str) -> list:
request_url = f"{BASE_URL}{episode_endpoint}"
response = requests.get(url=request_url, headers={"UserAgent": UserAgent().chrome})
response = requests.get(
url=request_url, headers={"UserAgent": UserAgent().chrome}, timeout=10
)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
@ -132,7 +134,7 @@ def get_anime_episode(episode_endpoint: str) -> list:
episode_page_url = f"{BASE_URL}{episode_endpoint}"
response = requests.get(
url=episode_page_url, headers={"User-Agent": UserAgent().chrome}
url=episode_page_url, headers={"User-Agent": UserAgent().chrome}, timeout=10
)
response.raise_for_status()