From 316b9d7bf4bfd864fa9ca8cb8fb3c2ca995d137f Mon Sep 17 00:00:00 2001 From: I-Al-Istannen Date: Wed, 4 Nov 2020 22:20:40 +0100 Subject: [PATCH] Prevent too many retries when fetching an ILIAS page --- PFERD/ilias/crawler.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/PFERD/ilias/crawler.py b/PFERD/ilias/crawler.py index 7ce460e..036a479 100644 --- a/PFERD/ilias/crawler.py +++ b/PFERD/ilias/crawler.py @@ -593,10 +593,17 @@ class IliasCrawler: return results - def _get_page(self, url: str, params: Dict[str, Any]) -> bs4.BeautifulSoup: + def _get_page(self, url: str, params: Dict[str, Any], + retry_count: int = 0) -> bs4.BeautifulSoup: """ Fetches a page from ILIAS, authenticating when needed. """ + + if retry_count >= 4: + raise FatalException("Could not get a proper page after 4 tries. " + "Maybe your URL is wrong, authentication fails continuously, " + "your ILIAS connection is spotty or ILIAS is not well.") + LOGGER.debug("Fetching %r", url) response = self._session.get(url, params=params) @@ -617,7 +624,7 @@ class IliasCrawler: self._authenticator.authenticate(self._session) - return self._get_page(url, params) + return self._get_page(url, params, retry_count + 1) @staticmethod def _is_logged_in(soup: bs4.BeautifulSoup) -> bool: