2021-05-21 12:02:51 +02:00
|
|
|
import asyncio
|
2021-04-29 13:44:29 +02:00
|
|
|
from abc import ABC, abstractmethod
|
2021-05-06 01:02:40 +02:00
|
|
|
from datetime import datetime
|
2021-05-05 23:45:10 +02:00
|
|
|
from pathlib import Path, PurePath
|
2021-05-22 21:36:53 +02:00
|
|
|
from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, TypeVar
|
2021-04-29 13:44:29 +02:00
|
|
|
|
2021-05-13 22:28:14 +02:00
|
|
|
import aiohttp
|
2021-04-29 13:44:29 +02:00
|
|
|
from rich.markup import escape
|
|
|
|
|
2021-05-13 18:57:20 +02:00
|
|
|
from .authenticator import Authenticator
|
2021-05-05 23:45:10 +02:00
|
|
|
from .config import Config, Section
|
2021-04-29 13:44:29 +02:00
|
|
|
from .limiter import Limiter
|
2021-05-18 22:43:46 +02:00
|
|
|
from .logging import ProgressBar, log
|
2021-05-22 21:36:53 +02:00
|
|
|
from .output_dir import FileSink, FileSinkToken, OnConflict, OutputDirectory, OutputDirError, Redownload
|
2021-05-22 20:54:42 +02:00
|
|
|
from .report import MarkConflictError, MarkDuplicateError
|
2021-05-19 13:25:57 +02:00
|
|
|
from .transformer import Transformer
|
2021-05-22 21:36:53 +02:00
|
|
|
from .utils import ReusableAsyncContextManager
|
2021-05-19 17:32:23 +02:00
|
|
|
from .version import NAME, VERSION
|
2021-04-29 13:44:29 +02:00
|
|
|
|
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
class CrawlWarning(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class CrawlError(Exception):
|
2021-04-29 13:44:29 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-05-09 01:45:01 +02:00
|
|
|
Wrapped = TypeVar("Wrapped", bound=Callable[..., None])
|
2021-05-05 23:36:54 +02:00
|
|
|
|
|
|
|
|
2021-05-09 01:45:01 +02:00
|
|
|
def noncritical(f: Wrapped) -> Wrapped:
|
|
|
|
"""
|
|
|
|
Catches all exceptions occuring during the function call. If an exception
|
|
|
|
occurs, the crawler's error_free variable is set to False.
|
2021-05-19 13:25:57 +02:00
|
|
|
|
|
|
|
Warning: Must only be applied to member functions of the Crawler class!
|
2021-05-09 01:45:01 +02:00
|
|
|
"""
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> None:
|
|
|
|
if not (args and isinstance(args[0], Crawler)):
|
|
|
|
raise RuntimeError("@noncritical must only applied to Crawler methods")
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
crawler = args[0]
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
try:
|
|
|
|
f(*args, **kwargs)
|
2021-05-22 20:54:42 +02:00
|
|
|
except (CrawlWarning, OutputDirError, MarkDuplicateError, MarkConflictError) as e:
|
|
|
|
log.warn(str(e))
|
2021-05-19 13:25:57 +02:00
|
|
|
crawler.error_free = False
|
2021-05-22 20:54:42 +02:00
|
|
|
except CrawlError:
|
2021-05-19 13:25:57 +02:00
|
|
|
crawler.error_free = False
|
|
|
|
raise
|
2021-05-09 01:45:01 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
return wrapper # type: ignore
|
2021-05-05 23:36:54 +02:00
|
|
|
|
|
|
|
|
2021-05-09 01:45:01 +02:00
|
|
|
AWrapped = TypeVar("AWrapped", bound=Callable[..., Awaitable[None]])
|
2021-05-05 23:36:54 +02:00
|
|
|
|
|
|
|
|
2021-05-09 01:45:01 +02:00
|
|
|
def anoncritical(f: AWrapped) -> AWrapped:
|
|
|
|
"""
|
|
|
|
An async version of @noncritical.
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-09 01:45:01 +02:00
|
|
|
Catches all exceptions occuring during the function call. If an exception
|
|
|
|
occurs, the crawler's error_free variable is set to False.
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
Warning: Must only be applied to member functions of the Crawler class!
|
|
|
|
"""
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
async def wrapper(*args: Any, **kwargs: Any) -> None:
|
|
|
|
if not (args and isinstance(args[0], Crawler)):
|
|
|
|
raise RuntimeError("@anoncritical must only applied to Crawler methods")
|
2021-05-05 23:36:54 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
crawler = args[0]
|
2021-05-09 01:45:01 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
try:
|
|
|
|
await f(*args, **kwargs)
|
|
|
|
except CrawlWarning as e:
|
|
|
|
log.print(f"[bold bright_red]Warning[/] {escape(str(e))}")
|
|
|
|
crawler.error_free = False
|
|
|
|
except CrawlError as e:
|
|
|
|
log.print(f"[bold bright_red]Error[/] [red]{escape(str(e))}")
|
|
|
|
crawler.error_free = False
|
|
|
|
raise
|
2021-05-09 01:45:01 +02:00
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
return wrapper # type: ignore
|
2021-05-05 23:45:10 +02:00
|
|
|
|
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
class CrawlToken(ReusableAsyncContextManager[ProgressBar]):
|
|
|
|
def __init__(self, limiter: Limiter, desc: str):
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
self._limiter = limiter
|
|
|
|
self._desc = desc
|
|
|
|
|
|
|
|
async def _on_aenter(self) -> ProgressBar:
|
|
|
|
await self._stack.enter_async_context(self._limiter.limit_crawl())
|
|
|
|
bar = self._stack.enter_context(log.crawl_bar(self._desc))
|
|
|
|
|
|
|
|
return bar
|
|
|
|
|
|
|
|
|
|
|
|
class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
|
|
|
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, desc: str):
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
self._limiter = limiter
|
|
|
|
self._fs_token = fs_token
|
|
|
|
self._desc = desc
|
|
|
|
|
|
|
|
async def _on_aenter(self) -> Tuple[ProgressBar, FileSink]:
|
|
|
|
await self._stack.enter_async_context(self._limiter.limit_crawl())
|
|
|
|
sink = await self._stack.enter_async_context(self._fs_token)
|
|
|
|
bar = self._stack.enter_context(log.crawl_bar(self._desc))
|
|
|
|
|
|
|
|
return bar, sink
|
|
|
|
|
|
|
|
|
2021-05-05 23:45:10 +02:00
|
|
|
class CrawlerSection(Section):
|
|
|
|
def output_dir(self, name: str) -> Path:
|
2021-05-15 17:23:33 +02:00
|
|
|
# TODO Use removeprefix() after switching to 3.9
|
|
|
|
if name.startswith("crawl:"):
|
|
|
|
name = name[len("crawl:"):]
|
2021-05-06 01:02:40 +02:00
|
|
|
return Path(self.s.get("output_dir", name)).expanduser()
|
2021-05-05 23:45:10 +02:00
|
|
|
|
|
|
|
def redownload(self) -> Redownload:
|
|
|
|
value = self.s.get("redownload", "never-smart")
|
2021-05-15 21:33:51 +02:00
|
|
|
try:
|
|
|
|
return Redownload.from_string(value)
|
|
|
|
except ValueError as e:
|
|
|
|
self.invalid_value(
|
|
|
|
"redownload",
|
|
|
|
value,
|
|
|
|
str(e).capitalize(),
|
|
|
|
)
|
2021-05-05 23:45:10 +02:00
|
|
|
|
|
|
|
def on_conflict(self) -> OnConflict:
|
|
|
|
value = self.s.get("on_conflict", "prompt")
|
2021-05-15 21:33:51 +02:00
|
|
|
try:
|
|
|
|
return OnConflict.from_string(value)
|
|
|
|
except ValueError as e:
|
|
|
|
self.invalid_value(
|
|
|
|
"on_conflict",
|
|
|
|
value,
|
|
|
|
str(e).capitalize(),
|
|
|
|
)
|
2021-05-05 23:45:10 +02:00
|
|
|
|
|
|
|
def transform(self) -> str:
|
|
|
|
return self.s.get("transform", "")
|
|
|
|
|
2021-05-15 13:21:38 +02:00
|
|
|
def max_concurrent_tasks(self) -> int:
|
|
|
|
value = self.s.getint("max_concurrent_tasks", fallback=1)
|
2021-05-15 00:38:46 +02:00
|
|
|
if value <= 0:
|
2021-05-15 13:21:38 +02:00
|
|
|
self.invalid_value("max_concurrent_tasks", value,
|
2021-05-15 00:38:46 +02:00
|
|
|
"Must be greater than 0")
|
|
|
|
return value
|
|
|
|
|
|
|
|
def max_concurrent_downloads(self) -> int:
|
2021-05-15 13:21:38 +02:00
|
|
|
tasks = self.max_concurrent_tasks()
|
|
|
|
value = self.s.getint("max_concurrent_downloads", fallback=None)
|
|
|
|
if value is None:
|
|
|
|
return tasks
|
2021-05-15 00:38:46 +02:00
|
|
|
if value <= 0:
|
|
|
|
self.invalid_value("max_concurrent_downloads", value,
|
|
|
|
"Must be greater than 0")
|
2021-05-15 13:21:38 +02:00
|
|
|
if value > tasks:
|
|
|
|
self.invalid_value("max_concurrent_downloads", value,
|
|
|
|
"Must not be greater than max_concurrent_tasks")
|
2021-05-15 00:38:46 +02:00
|
|
|
return value
|
|
|
|
|
2021-05-15 13:21:38 +02:00
|
|
|
def delay_between_tasks(self) -> float:
|
|
|
|
value = self.s.getfloat("delay_between_tasks", fallback=0.0)
|
2021-05-15 00:38:46 +02:00
|
|
|
if value < 0:
|
2021-05-15 13:21:38 +02:00
|
|
|
self.invalid_value("delay_between_tasks", value,
|
|
|
|
"Must not be negative")
|
2021-05-15 00:38:46 +02:00
|
|
|
return value
|
|
|
|
|
2021-05-13 18:57:20 +02:00
|
|
|
def auth(self, authenticators: Dict[str, Authenticator]) -> Authenticator:
|
|
|
|
value = self.s.get("auth")
|
|
|
|
if value is None:
|
|
|
|
self.missing_value("auth")
|
2021-05-15 15:18:16 +02:00
|
|
|
auth = authenticators.get(value)
|
2021-05-13 18:57:20 +02:00
|
|
|
if auth is None:
|
2021-05-15 00:39:55 +02:00
|
|
|
self.invalid_value("auth", value, "No such auth section exists")
|
2021-05-13 18:57:20 +02:00
|
|
|
return auth
|
|
|
|
|
2021-05-05 23:45:10 +02:00
|
|
|
|
2021-04-29 13:44:29 +02:00
|
|
|
class Crawler(ABC):
|
2021-04-30 16:22:14 +02:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
2021-05-05 23:45:10 +02:00
|
|
|
section: CrawlerSection,
|
2021-05-10 23:50:16 +02:00
|
|
|
config: Config,
|
2021-04-30 16:22:14 +02:00
|
|
|
) -> None:
|
2021-04-29 13:44:29 +02:00
|
|
|
"""
|
2021-04-29 15:43:20 +02:00
|
|
|
Initialize a crawler from its name and its section in the config file.
|
|
|
|
|
|
|
|
If you are writing your own constructor for your own crawler, make sure
|
|
|
|
to call this constructor first (via super().__init__).
|
|
|
|
|
2021-04-29 13:44:29 +02:00
|
|
|
May throw a CrawlerLoadException.
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.name = name
|
2021-05-10 23:50:16 +02:00
|
|
|
self.error_free = True
|
2021-04-29 13:44:29 +02:00
|
|
|
|
2021-05-15 00:38:46 +02:00
|
|
|
self._limiter = Limiter(
|
2021-05-15 13:21:38 +02:00
|
|
|
task_limit=section.max_concurrent_tasks(),
|
2021-05-15 00:38:46 +02:00
|
|
|
download_limit=section.max_concurrent_downloads(),
|
2021-05-15 13:21:38 +02:00
|
|
|
task_delay=section.delay_between_tasks(),
|
2021-05-15 00:38:46 +02:00
|
|
|
)
|
|
|
|
|
2021-05-19 13:25:57 +02:00
|
|
|
self._transformer = Transformer(section.transform())
|
2021-04-29 13:44:29 +02:00
|
|
|
|
2021-05-05 18:08:34 +02:00
|
|
|
self._output_dir = OutputDirectory(
|
2021-05-19 17:48:51 +02:00
|
|
|
config.default_section.working_dir() / section.output_dir(name),
|
2021-05-05 23:45:10 +02:00
|
|
|
section.redownload(),
|
|
|
|
section.on_conflict(),
|
|
|
|
)
|
2021-04-29 13:44:29 +02:00
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
async def crawl(self, path: PurePath) -> Optional[CrawlToken]:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain_topic(f"Decision: Crawl {path}")
|
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
if self._transformer.transform(path) is None:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: No")
|
2021-05-22 21:36:53 +02:00
|
|
|
return None
|
2021-05-06 01:02:40 +02:00
|
|
|
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: Yes")
|
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
desc = f"[bold bright_cyan]Crawling[/] {escape(str(path))}"
|
|
|
|
return CrawlToken(self._limiter, desc)
|
2021-05-15 14:03:15 +02:00
|
|
|
|
2021-05-06 01:02:40 +02:00
|
|
|
async def download(
|
|
|
|
self,
|
|
|
|
path: PurePath,
|
|
|
|
mtime: Optional[datetime] = None,
|
|
|
|
redownload: Optional[Redownload] = None,
|
|
|
|
on_conflict: Optional[OnConflict] = None,
|
2021-05-22 21:36:53 +02:00
|
|
|
) -> Optional[DownloadToken]:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain_topic(f"Decision: Download {path}")
|
|
|
|
|
2021-05-15 14:03:15 +02:00
|
|
|
transformed_path = self._transformer.transform(path)
|
|
|
|
if transformed_path is None:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: No")
|
2021-05-15 14:03:15 +02:00
|
|
|
return None
|
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
fs_token = await self._output_dir.download(transformed_path, mtime, redownload, on_conflict)
|
|
|
|
if fs_token is None:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: No")
|
2021-05-22 21:36:53 +02:00
|
|
|
return None
|
|
|
|
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: Yes")
|
|
|
|
|
2021-05-22 21:36:53 +02:00
|
|
|
desc = f"[bold bright_cyan]Downloading[/] {escape(str(path))}"
|
|
|
|
return DownloadToken(self._limiter, fs_token, desc)
|
2021-05-06 01:02:40 +02:00
|
|
|
|
2021-05-22 21:45:51 +02:00
|
|
|
async def _cleanup(self) -> None:
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain_topic("Decision: Clean up files")
|
2021-05-22 21:45:51 +02:00
|
|
|
if self.error_free:
|
|
|
|
log.explain("No warnings or errors occurred during this run")
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: Yes")
|
2021-05-22 21:45:51 +02:00
|
|
|
await self._output_dir.cleanup()
|
|
|
|
else:
|
|
|
|
log.explain("Warnings or errors occurred during this run")
|
2021-05-22 22:39:57 +02:00
|
|
|
log.explain("Answer: No")
|
2021-04-29 13:53:16 +02:00
|
|
|
|
2021-04-29 13:44:29 +02:00
|
|
|
async def run(self) -> None:
|
2021-04-29 15:43:20 +02:00
|
|
|
"""
|
|
|
|
Start the crawling process. Call this function if you want to use a
|
|
|
|
crawler.
|
|
|
|
"""
|
|
|
|
|
2021-05-18 22:43:46 +02:00
|
|
|
with log.show_progress():
|
2021-05-22 21:36:53 +02:00
|
|
|
await self._run()
|
2021-05-22 21:45:51 +02:00
|
|
|
await self._cleanup()
|
2021-04-29 13:44:29 +02:00
|
|
|
|
|
|
|
@abstractmethod
|
2021-05-22 21:36:53 +02:00
|
|
|
async def _run(self) -> None:
|
2021-04-29 15:43:20 +02:00
|
|
|
"""
|
|
|
|
Overwrite this function if you are writing a crawler.
|
|
|
|
|
|
|
|
This function must not return before all crawling is complete. To crawl
|
|
|
|
multiple things concurrently, asyncio.gather can be used.
|
|
|
|
"""
|
|
|
|
|
2021-04-29 13:44:29 +02:00
|
|
|
pass
|
2021-05-13 22:28:14 +02:00
|
|
|
|
|
|
|
|
|
|
|
class HttpCrawler(Crawler):
|
|
|
|
COOKIE_FILE = PurePath(".cookies")
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
section: CrawlerSection,
|
|
|
|
config: Config,
|
|
|
|
) -> None:
|
2021-05-18 22:43:46 +02:00
|
|
|
super().__init__(name, section, config)
|
2021-05-13 22:28:14 +02:00
|
|
|
|
|
|
|
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
|
|
|
self._output_dir.register_reserved(self.COOKIE_FILE)
|
2021-05-21 12:02:51 +02:00
|
|
|
self._authentication_id = 0
|
|
|
|
self._authentication_lock = asyncio.Lock()
|
|
|
|
|
|
|
|
async def prepare_request(self) -> int:
|
|
|
|
# We acquire the lock here to ensure we wait for any concurrent authenticate to finish.
|
|
|
|
# This should reduce the amount of requests we make: If an authentication is in progress
|
|
|
|
# all future requests wait for authentication to complete.
|
|
|
|
async with self._authentication_lock:
|
|
|
|
return self._authentication_id
|
|
|
|
|
|
|
|
async def authenticate(self, current_id: int) -> None:
|
|
|
|
async with self._authentication_lock:
|
|
|
|
# Another thread successfully called authenticate in between
|
|
|
|
# We do not want to perform auth again, so return here. We can
|
|
|
|
# assume auth suceeded as authenticate will throw an error if
|
|
|
|
# it failed.
|
|
|
|
if current_id != self._authentication_id:
|
|
|
|
return
|
|
|
|
await self._authenticate()
|
|
|
|
self._authentication_id += 1
|
|
|
|
|
|
|
|
async def _authenticate(self) -> None:
|
|
|
|
"""
|
|
|
|
Performs authentication. This method must only return normally if authentication suceeded.
|
|
|
|
In all other cases it mus either retry internally or throw a terminal exception.
|
|
|
|
"""
|
|
|
|
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
2021-05-13 22:28:14 +02:00
|
|
|
|
|
|
|
async def run(self) -> None:
|
|
|
|
cookie_jar = aiohttp.CookieJar()
|
|
|
|
|
|
|
|
try:
|
|
|
|
cookie_jar.load(self._cookie_jar_path)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2021-05-14 00:09:58 +02:00
|
|
|
async with aiohttp.ClientSession(
|
2021-05-19 17:32:23 +02:00
|
|
|
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
2021-05-14 00:09:58 +02:00
|
|
|
cookie_jar=cookie_jar,
|
|
|
|
) as session:
|
2021-05-13 22:28:14 +02:00
|
|
|
self.session = session
|
|
|
|
try:
|
|
|
|
await super().run()
|
|
|
|
finally:
|
|
|
|
del self.session
|
|
|
|
|
|
|
|
try:
|
|
|
|
cookie_jar.save(self._cookie_jar_path)
|
|
|
|
except Exception:
|
2021-05-18 22:43:46 +02:00
|
|
|
log.print(f"[bold red]Warning:[/] Failed to save cookies to {escape(str(self.COOKIE_FILE))}")
|