pferd/PFERD/crawler.py

112 lines
3.6 KiB
Python
Raw Normal View History

2021-04-29 13:44:29 +02:00
import configparser
from abc import ABC, abstractmethod
from contextlib import asynccontextmanager
from pathlib import PurePath
# TODO In Python 3.9 and above, AsyncContextManager is deprecated
from typing import AsyncContextManager, AsyncIterator, Optional
2021-04-29 13:44:29 +02:00
from rich.markup import escape
from .conductor import ProgressBar, TerminalConductor
from .limiter import Limiter
from .transformer import RuleParseException, Transformer
class CrawlerLoadException(Exception):
pass
class Crawler(ABC):
def __init__(self, name: str, section: configparser.SectionProxy) -> None:
"""
2021-04-29 15:43:20 +02:00
Initialize a crawler from its name and its section in the config file.
If you are writing your own constructor for your own crawler, make sure
to call this constructor first (via super().__init__).
2021-04-29 13:44:29 +02:00
May throw a CrawlerLoadException.
"""
self.name = name
self._conductor = TerminalConductor()
self._limiter = Limiter()
try:
self._transformer = Transformer(section.get("transform", ""))
except RuleParseException as e:
e.pretty_print()
raise CrawlerLoadException()
# working_dir = Path(section.get("working_dir", ""))
# output_dir = working_dir / section.get("output_dir", name)
2021-04-29 13:44:29 +02:00
def print(self, text: str) -> None:
2021-04-29 15:43:20 +02:00
"""
Print rich markup to the terminal. Crawlers *must* use this function to
print things unless they are holding an exclusive output context
manager! Be careful to escape all user-supplied strings.
"""
2021-04-29 13:44:29 +02:00
self._conductor.print(text)
2021-04-29 15:47:52 +02:00
def exclusive_output(self) -> AsyncContextManager[None]:
2021-04-29 15:43:20 +02:00
"""
Acquire exclusive rights to the terminal output. While this context
manager is held, output such as printing and progress bars from other
threads is suspended and the current thread may do whatever it wants
with the terminal. However, it must return the terminal to its original
state before exiting the context manager.
No two threads can hold this context manager at the same time.
Useful for password or confirmation prompts as well as running other
programs while crawling (e. g. to get certain credentials).
"""
2021-04-29 15:26:10 +02:00
return self._conductor.exclusive_output()
2021-04-29 13:44:29 +02:00
@asynccontextmanager
async def progress_bar(
self,
desc: str,
2021-04-29 13:44:29 +02:00
total: Optional[int] = None,
) -> AsyncIterator[ProgressBar]:
async with self._limiter.limit():
with self._conductor.progress_bar(desc, total=total) as bar:
yield bar
def crawl_bar(self, path: PurePath) -> AsyncContextManager[ProgressBar]:
2021-04-29 14:23:09 +02:00
pathstr = escape(str(path))
desc = f"[bold magenta]Crawling[/bold magenta] {pathstr}"
return self.progress_bar(desc)
def download_bar(
self,
path: PurePath,
size: int,
) -> AsyncContextManager[ProgressBar]:
2021-04-29 14:23:09 +02:00
pathstr = escape(str(path))
desc = f"[bold green]Downloading[/bold green] {pathstr}"
return self.progress_bar(desc, total=size)
2021-04-29 13:44:29 +02:00
async def run(self) -> None:
2021-04-29 15:43:20 +02:00
"""
Start the crawling process. Call this function if you want to use a
crawler.
"""
2021-04-29 14:23:28 +02:00
async with self._conductor:
2021-04-29 13:44:29 +02:00
await self.crawl()
@abstractmethod
async def crawl(self) -> None:
2021-04-29 15:43:20 +02:00
"""
Overwrite this function if you are writing a crawler.
This function must not return before all crawling is complete. To crawl
multiple things concurrently, asyncio.gather can be used.
"""
2021-04-29 13:44:29 +02:00
pass