mirror of
https://github.com/Garmelon/PFERD.git
synced 2023-12-21 10:23:01 +01:00
Retry on more I/O Errors
This commit is contained in:
parent
14cdfb6a69
commit
1f2af3a290
@ -37,3 +37,21 @@ def swallow_and_print_errors(function: TFun) -> TFun:
|
||||
Console().print_exception()
|
||||
return None
|
||||
return cast(TFun, inner)
|
||||
|
||||
|
||||
def retry_on_io_exception(max_retries: int, message: str) -> Callable[[TFun], TFun]:
|
||||
"""
|
||||
Decorates a function and retries it on any exception until the max retries count is hit.
|
||||
"""
|
||||
def retry(function: TFun) -> TFun:
|
||||
def inner(*args: Any, **kwargs: Any) -> Any:
|
||||
for i in range(0, max_retries):
|
||||
# pylint: disable=broad-except
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except IOError as error:
|
||||
PRETTY.warning(f"Error duing operation '{message}': {error}")
|
||||
PRETTY.warning(
|
||||
f"Retrying operation '{message}'. Remaining retries: {max_retries - 1 - i}")
|
||||
return cast(TFun, inner)
|
||||
return retry
|
||||
|
@ -15,7 +15,7 @@ from urllib.parse import (parse_qs, urlencode, urljoin, urlparse, urlsplit,
|
||||
import bs4
|
||||
import requests
|
||||
|
||||
from ..errors import FatalException
|
||||
from ..errors import FatalException, retry_on_io_exception
|
||||
from ..logging import PrettyLogger
|
||||
from ..utils import soupify
|
||||
from .authenticators import IliasAuthenticator
|
||||
@ -625,6 +625,7 @@ class IliasCrawler:
|
||||
|
||||
return results
|
||||
|
||||
@retry_on_io_exception(3, "fetching webpage")
|
||||
def _get_page(self, url: str, params: Dict[str, Any],
|
||||
retry_count: int = 0) -> bs4.BeautifulSoup:
|
||||
"""
|
||||
|
@ -10,6 +10,7 @@ from typing import Callable, List, Optional, Union
|
||||
import bs4
|
||||
import requests
|
||||
|
||||
from ..errors import retry_on_io_exception
|
||||
from ..logging import PrettyLogger
|
||||
from ..organizer import Organizer
|
||||
from ..tmp_dir import TmpDir
|
||||
@ -116,26 +117,23 @@ class IliasDownloader:
|
||||
"""
|
||||
|
||||
LOGGER.debug("Downloading %r", info)
|
||||
|
||||
if not self._strategy(self._organizer, info):
|
||||
self._organizer.mark(info.path)
|
||||
return
|
||||
|
||||
tmp_file = self._tmp_dir.new_path()
|
||||
|
||||
download_successful = False
|
||||
for _ in range(0, 3):
|
||||
try:
|
||||
@retry_on_io_exception(3, "downloading file")
|
||||
def download_impl() -> bool:
|
||||
if not self._try_download(info, tmp_file):
|
||||
LOGGER.info("Re-Authenticating due to download failure: %r", info)
|
||||
self._authenticator.authenticate(self._session)
|
||||
raise IOError("Scheduled retry")
|
||||
else:
|
||||
download_successful = True
|
||||
break
|
||||
except IOError as e:
|
||||
PRETTY.warning(f"I/O Error when downloading ({e}). Retrying...",)
|
||||
LOGGER.info("Retrying download for %s", info.path)
|
||||
return True
|
||||
|
||||
if not download_successful:
|
||||
if not download_impl():
|
||||
PRETTY.error(f"Download of file {info.path} failed too often! Skipping it...")
|
||||
return
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user