mirror of
https://github.com/Garmelon/PFERD.git
synced 2023-12-21 10:23:01 +01:00
Move ffm stuff from aiohttp to requests
This commit is contained in:
parent
9bae030186
commit
82adeb324f
@ -1,10 +1,10 @@
|
|||||||
#from .ffm import *
|
from .ffm import *
|
||||||
from .ilias import *
|
from .ilias import *
|
||||||
#from .norbert import *
|
#from .norbert import *
|
||||||
from .utils import *
|
from .utils import *
|
||||||
|
|
||||||
__all__ = []
|
__all__ = []
|
||||||
#__all__ += ffm.__all__
|
__all__ += ffm.__all__
|
||||||
__all__ += ilias.__all__
|
__all__ += ilias.__all__
|
||||||
#__all__ += norbert.__all__
|
#__all__ += norbert.__all__
|
||||||
__all__ += utils.__all__
|
__all__ += utils.__all__
|
||||||
|
53
PFERD/ffm.py
53
PFERD/ffm.py
@ -1,53 +1,44 @@
|
|||||||
# Fakultät für Mathematik (FfM)
|
# Fakultät für Mathematik (FfM)
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import asyncio
|
|
||||||
import bs4
|
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from .organizer import Organizer
|
import bs4
|
||||||
from . import utils
|
import requests
|
||||||
|
|
||||||
__all__ = [
|
from .organizer import Organizer
|
||||||
"FfM",
|
from .utils import stream_to_path
|
||||||
]
|
|
||||||
|
__all__ = ["FfM"]
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class FfM:
|
class FfM:
|
||||||
BASE_URL = "http://www.math.kit.edu/"
|
BASE_URL = "http://www.math.kit.edu/"
|
||||||
LINK_RE = re.compile(r"^https?://www.math.kit.edu/.*/(.*\.pdf)$")
|
LINK_RE = re.compile(r"^https?://www.math.kit.edu/.*/(.*\.pdf)$")
|
||||||
|
|
||||||
RETRY_ATTEMPTS = 5
|
|
||||||
RETRY_DELAY = 1 # seconds
|
|
||||||
|
|
||||||
def __init__(self, base_path):
|
def __init__(self, base_path):
|
||||||
self.base_path = base_path
|
self.base_path = base_path
|
||||||
|
|
||||||
self._session = aiohttp.ClientSession()
|
self._session = requests.Session()
|
||||||
|
|
||||||
async def synchronize(self, urlpart, to_dir, transform=lambda x: x):
|
def synchronize(self, urlpart, to_dir, transform=lambda x: x):
|
||||||
logging.info(f" Synchronizing {urlpart} to {to_dir} using the FfM synchronizer.")
|
logging.info(f" Synchronizing {urlpart} to {to_dir} using the FfM synchronizer.")
|
||||||
|
|
||||||
sync_path = pathlib.Path(self.base_path, to_dir)
|
sync_path = pathlib.Path(self.base_path, to_dir)
|
||||||
orga = Organizer(self.base_path, sync_path)
|
|
||||||
|
|
||||||
|
orga = Organizer(self.base_path, sync_path)
|
||||||
orga.clean_temp_dir()
|
orga.clean_temp_dir()
|
||||||
|
|
||||||
await self._crawl(orga, urlpart, transform)
|
self._crawl(orga, urlpart, transform)
|
||||||
|
|
||||||
orga.clean_sync_dir()
|
orga.clean_sync_dir()
|
||||||
orga.clean_temp_dir()
|
orga.clean_temp_dir()
|
||||||
|
|
||||||
async def close(self):
|
def _crawl(self, orga, urlpart, transform):
|
||||||
await self._session.close()
|
|
||||||
|
|
||||||
async def _crawl(self, orga, urlpart, transform):
|
|
||||||
url = self.BASE_URL + urlpart
|
url = self.BASE_URL + urlpart
|
||||||
async with self._session.get(url) as resp:
|
r = self._session.get(url)
|
||||||
text = await resp.text()
|
soup = bs4.BeautifulSoup(r.text, "html.parser")
|
||||||
soup = bs4.BeautifulSoup(text, "html.parser")
|
|
||||||
|
|
||||||
for found in soup.find_all("a", href=self.LINK_RE):
|
for found in soup.find_all("a", href=self.LINK_RE):
|
||||||
url = found["href"]
|
url = found["href"]
|
||||||
@ -61,19 +52,9 @@ class FfM:
|
|||||||
logger.debug(f"Transformed from {old_path} to {new_path}")
|
logger.debug(f"Transformed from {old_path} to {new_path}")
|
||||||
|
|
||||||
temp_path = orga.temp_file()
|
temp_path = orga.temp_file()
|
||||||
await self._download(url, temp_path)
|
self._download(url, temp_path)
|
||||||
orga.add_file(temp_path, new_path)
|
orga.add_file(temp_path, new_path)
|
||||||
|
|
||||||
async def _download(self, url, to_path):
|
def _download(self, url, to_path):
|
||||||
for t in range(self.RETRY_ATTEMPTS):
|
with self._session.get(url) as r:
|
||||||
try:
|
stream_to_path(r, to_path)
|
||||||
async with self._session.get(url) as resp:
|
|
||||||
await utils.stream_to_path(resp, to_path)
|
|
||||||
except aiohttp.client_exceptions.ServerDisconnectedError:
|
|
||||||
logger.debug(f"Try {t+1} out of {self.RETRY_ATTEMPTS} failed, retrying in {self.RETRY_DELAY} s")
|
|
||||||
await asyncio.sleep(self.RETRY_DELAY)
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
logger.error(f"Could not download {url}")
|
|
||||||
raise utils.OutOfTriesException(f"Try {self.RETRY_ATTEMPTS} out of {self.RETRY_ATTEMPTS} failed.")
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user