mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-12-03 22:02:29 +01:00
Reformat and switch to ruff
This commit is contained in:
@@ -9,21 +9,19 @@ from .pass_ import PassAuthenticator, PassAuthSection
|
||||
from .simple import SimpleAuthenticator, SimpleAuthSection
|
||||
from .tfa import TfaAuthenticator
|
||||
|
||||
AuthConstructor = Callable[[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
], Authenticator]
|
||||
AuthConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
],
|
||||
Authenticator,
|
||||
]
|
||||
|
||||
AUTHENTICATORS: Dict[str, AuthConstructor] = {
|
||||
"credential-file": lambda n, s, c:
|
||||
CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c:
|
||||
KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c:
|
||||
PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c:
|
||||
SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c:
|
||||
TfaAuthenticator(n),
|
||||
"credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c: TfaAuthenticator(n),
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ class KeyringAuthSection(AuthSection):
|
||||
|
||||
|
||||
class KeyringAuthenticator(Authenticator):
|
||||
|
||||
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
||||
super().__init__(name)
|
||||
|
||||
|
||||
@@ -21,23 +21,20 @@ GROUP.add_argument(
|
||||
"--base-url",
|
||||
type=str,
|
||||
metavar="BASE_URL",
|
||||
help="The base url of the ilias instance"
|
||||
help="The base url of the ilias instance",
|
||||
)
|
||||
|
||||
GROUP.add_argument(
|
||||
"--client-id",
|
||||
type=str,
|
||||
metavar="CLIENT_ID",
|
||||
help="The client id of the ilias instance"
|
||||
help="The client id of the ilias instance",
|
||||
)
|
||||
|
||||
configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None:
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
parser["crawl:ilias"] = {}
|
||||
|
||||
@@ -21,8 +21,8 @@ configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
|
||||
@@ -18,25 +18,25 @@ GROUP.add_argument(
|
||||
"--link-regex",
|
||||
type=str,
|
||||
metavar="REGEX",
|
||||
help="href-matching regex to identify downloadable files"
|
||||
help="href-matching regex to identify downloadable files",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="url to crawl"
|
||||
help="url to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'kit-ipd'")
|
||||
|
||||
|
||||
@@ -18,37 +18,37 @@ GROUP.add_argument(
|
||||
"target",
|
||||
type=Path,
|
||||
metavar="TARGET",
|
||||
help="directory to crawl"
|
||||
help="directory to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--crawl-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for crawl requests"
|
||||
help="artificial delay to simulate for crawl requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for download requests"
|
||||
help="artificial delay to simulate for download requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-speed",
|
||||
type=int,
|
||||
metavar="BYTES_PER_SECOND",
|
||||
help="download speed to simulate"
|
||||
help="download speed to simulate",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'local'")
|
||||
|
||||
|
||||
@@ -12,58 +12,60 @@ def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
||||
help="course id, 'desktop', or ILIAS URL to crawl",
|
||||
)
|
||||
group.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
group.add_argument(
|
||||
"--username", "-u",
|
||||
"--username",
|
||||
"-u",
|
||||
type=str,
|
||||
metavar="USERNAME",
|
||||
help="user name for authentication"
|
||||
help="user name for authentication",
|
||||
)
|
||||
group.add_argument(
|
||||
"--keyring",
|
||||
action=BooleanOptionalAction,
|
||||
help="use the system keyring to store and retrieve passwords"
|
||||
help="use the system keyring to store and retrieve passwords",
|
||||
)
|
||||
group.add_argument(
|
||||
"--credential-file",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="read username and password from a credential file"
|
||||
help="read username and password from a credential file",
|
||||
)
|
||||
group.add_argument(
|
||||
"--links",
|
||||
type=show_value_error(Links.from_string),
|
||||
metavar="OPTION",
|
||||
help="how to represent external links"
|
||||
help="how to represent external links",
|
||||
)
|
||||
group.add_argument(
|
||||
"--link-redirect-delay",
|
||||
type=int,
|
||||
metavar="SECONDS",
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)",
|
||||
)
|
||||
group.add_argument(
|
||||
"--videos",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download videos"
|
||||
help="crawl and download videos",
|
||||
)
|
||||
group.add_argument(
|
||||
"--forums",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download forum posts"
|
||||
help="crawl and download forum posts",
|
||||
)
|
||||
group.add_argument(
|
||||
"--http-timeout", "-t",
|
||||
"--http-timeout",
|
||||
"-t",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="timeout for all HTTP requests"
|
||||
help="timeout for all HTTP requests",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -15,15 +15,15 @@ class ParserLoadError(Exception):
|
||||
# TODO Replace with argparse version when updating to 3.9?
|
||||
class BooleanOptionalAction(argparse.Action):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings: List[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
self,
|
||||
option_strings: List[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
):
|
||||
if len(option_strings) != 1:
|
||||
raise ValueError("There must be exactly one option string")
|
||||
@@ -48,11 +48,11 @@ class BooleanOptionalAction(argparse.Action):
|
||||
)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: Union[str, Sequence[Any], None],
|
||||
option_string: Optional[str] = None,
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: Union[str, Sequence[Any], None],
|
||||
option_string: Optional[str] = None,
|
||||
) -> None:
|
||||
if option_string and option_string in self.option_strings:
|
||||
value = not option_string.startswith("--no-")
|
||||
@@ -67,11 +67,13 @@ def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]:
|
||||
Some validation functions (like the from_string in our enums) raise a ValueError.
|
||||
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
||||
"""
|
||||
|
||||
def wrapper(input: str) -> Any:
|
||||
try:
|
||||
return inner(input)
|
||||
except ValueError as e:
|
||||
raise ArgumentTypeError(e)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -81,52 +83,57 @@ CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group(
|
||||
description="arguments common to all crawlers",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--redownload", "-r",
|
||||
"--redownload",
|
||||
"-r",
|
||||
type=show_value_error(Redownload.from_string),
|
||||
metavar="OPTION",
|
||||
help="when to download a file that's already present locally"
|
||||
help="when to download a file that's already present locally",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--on-conflict",
|
||||
type=show_value_error(OnConflict.from_string),
|
||||
metavar="OPTION",
|
||||
help="what to do when local and remote files or directories differ"
|
||||
help="what to do when local and remote files or directories differ",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--transform", "-T",
|
||||
"--transform",
|
||||
"-T",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="RULE",
|
||||
help="add a single transformation rule. Can be specified multiple times"
|
||||
help="add a single transformation rule. Can be specified multiple times",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--tasks", "-n",
|
||||
"--tasks",
|
||||
"-n",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of concurrent tasks (crawling, downloading)"
|
||||
help="maximum number of concurrent tasks (crawling, downloading)",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--downloads", "-N",
|
||||
"--downloads",
|
||||
"-N",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of tasks that may download data at the same time"
|
||||
help="maximum number of tasks that may download data at the same time",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--task-delay", "-d",
|
||||
"--task-delay",
|
||||
"-d",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="time the crawler should wait between subsequent tasks"
|
||||
help="time the crawler should wait between subsequent tasks",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--windows-paths",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether to repair invalid paths on windows"
|
||||
help="whether to repair invalid paths on windows",
|
||||
)
|
||||
|
||||
|
||||
def load_crawler(
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
) -> None:
|
||||
if args.redownload is not None:
|
||||
section["redownload"] = args.redownload.value
|
||||
@@ -152,79 +159,79 @@ PARSER.add_argument(
|
||||
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--config", "-c",
|
||||
"--config",
|
||||
"-c",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom config file"
|
||||
help="custom config file",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config",
|
||||
action="store_true",
|
||||
help="dump current configuration to the default config path and exit"
|
||||
help="dump current configuration to the default config path and exit",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config-to",
|
||||
metavar="PATH",
|
||||
help="dump current configuration to a file and exit."
|
||||
" Use '-' as path to print to stdout instead"
|
||||
help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--debug-transforms",
|
||||
action="store_true",
|
||||
help="apply transform rules to files of previous run"
|
||||
help="apply transform rules to files of previous run",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--crawler", "-C",
|
||||
"--crawler",
|
||||
"-C",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="only execute a single crawler."
|
||||
" Can be specified multiple times to execute multiple crawlers"
|
||||
help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--skip", "-S",
|
||||
"--skip",
|
||||
"-S",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="don't execute this particular crawler."
|
||||
" Can be specified multiple times to skip multiple crawlers"
|
||||
help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--working-dir",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom working directory"
|
||||
help="custom working directory",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--explain",
|
||||
action=BooleanOptionalAction,
|
||||
help="log and explain in detail what PFERD is doing"
|
||||
help="log and explain in detail what PFERD is doing",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--status",
|
||||
action=BooleanOptionalAction,
|
||||
help="print status updates while PFERD is crawling"
|
||||
help="print status updates while PFERD is crawling",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--report",
|
||||
action=BooleanOptionalAction,
|
||||
help="print a report of all local changes before exiting"
|
||||
help="print a report of all local changes before exiting",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--share-cookies",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether crawlers should share cookies where applicable"
|
||||
help="whether crawlers should share cookies where applicable",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--show-not-deleted",
|
||||
action=BooleanOptionalAction,
|
||||
help="print messages in status and report when PFERD did not delete a local only file"
|
||||
help="print messages in status and report when PFERD did not delete a local only file",
|
||||
)
|
||||
|
||||
|
||||
def load_default_section(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
section = parser[parser.default_section]
|
||||
|
||||
|
||||
@@ -53,10 +53,10 @@ class Section:
|
||||
raise ConfigOptionError(self.s.name, key, desc)
|
||||
|
||||
def invalid_value(
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
) -> NoReturn:
|
||||
if reason is None:
|
||||
self.error(key, f"Invalid value {value!r}")
|
||||
|
||||
@@ -8,20 +8,19 @@ from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||
|
||||
CrawlerConstructor = Callable[[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
Dict[str, Authenticator], # Loaded authenticators by name
|
||||
], Crawler]
|
||||
CrawlerConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
Dict[str, Authenticator], # Loaded authenticators by name
|
||||
],
|
||||
Crawler,
|
||||
]
|
||||
|
||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
||||
"local": lambda n, s, c, a:
|
||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"ilias-web": lambda n, s, c, a:
|
||||
IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||
"kit-ilias-web": lambda n, s, c, a:
|
||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a:
|
||||
KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||
"local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||
"kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||
}
|
||||
|
||||
@@ -132,8 +132,9 @@ class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
||||
await self._stack.enter_async_context(self._limiter.limit_download())
|
||||
sink = await self._stack.enter_async_context(self._fs_token)
|
||||
# The "Downloaded ..." message is printed in the output dir, not here
|
||||
bar = self._stack.enter_context(log.download_bar("[bold bright_cyan]", "Downloading",
|
||||
fmt_path(self._path)))
|
||||
bar = self._stack.enter_context(
|
||||
log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path))
|
||||
)
|
||||
|
||||
return bar, sink
|
||||
|
||||
@@ -216,10 +217,10 @@ class CrawlerSection(Section):
|
||||
|
||||
class Crawler(ABC):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize a crawler from its name and its section in the config file.
|
||||
@@ -293,13 +294,13 @@ class Crawler(ABC):
|
||||
return CrawlToken(self._limiter, path)
|
||||
|
||||
def should_try_download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> bool:
|
||||
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||
|
||||
@@ -308,11 +309,7 @@ class Crawler(ABC):
|
||||
return False
|
||||
|
||||
should_download = self._output_dir.should_try_download(
|
||||
path,
|
||||
etag_differs=etag_differs,
|
||||
mtime=mtime,
|
||||
redownload=redownload,
|
||||
on_conflict=on_conflict
|
||||
path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict
|
||||
)
|
||||
if should_download:
|
||||
log.explain("Answer: Yes")
|
||||
@@ -322,13 +319,13 @@ class Crawler(ABC):
|
||||
return False
|
||||
|
||||
async def download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[DownloadToken]:
|
||||
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
||||
path = self._deduplicator.mark(path)
|
||||
@@ -346,7 +343,7 @@ class Crawler(ABC):
|
||||
etag_differs=etag_differs,
|
||||
mtime=mtime,
|
||||
redownload=redownload,
|
||||
on_conflict=on_conflict
|
||||
on_conflict=on_conflict,
|
||||
)
|
||||
if fs_token is None:
|
||||
log.explain("Answer: No")
|
||||
|
||||
@@ -29,11 +29,11 @@ class HttpCrawler(Crawler):
|
||||
COOKIE_FILE = PurePath(".cookies")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
) -> None:
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -252,23 +252,23 @@ class HttpCrawler(Crawler):
|
||||
self._load_cookies()
|
||||
|
||||
async with aiohttp.ClientSession(
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
),
|
||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||
# fail.
|
||||
requote_redirect_url=False
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
),
|
||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||
# fail.
|
||||
requote_redirect_url=False,
|
||||
) as session:
|
||||
self.session = session
|
||||
try:
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from .kit_ilias_web_crawler import (IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
||||
KitIliasWebCrawlerSection)
|
||||
from .kit_ilias_web_crawler import (
|
||||
IliasWebCrawler,
|
||||
IliasWebCrawlerSection,
|
||||
KitIliasWebCrawler,
|
||||
KitIliasWebCrawlerSection,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"IliasWebCrawler",
|
||||
|
||||
@@ -254,8 +254,8 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
||||
)
|
||||
|
||||
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||
bot_nav.replace_with(soupify(nav_template.replace(
|
||||
"{{left}}", left).replace("{{right}}", right).encode())
|
||||
bot_nav.replace_with(
|
||||
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||
)
|
||||
|
||||
body_str = cast(str, body.prettify())
|
||||
@@ -265,10 +265,11 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
||||
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||
if title := cast(Optional[bs4.Tag], heading.find(name="b")):
|
||||
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||
return _forum_thread_template \
|
||||
.replace("{{name}}", name) \
|
||||
.replace("{{heading}}", cast(str, heading.prettify())) \
|
||||
return (
|
||||
_forum_thread_template.replace("{{name}}", name)
|
||||
.replace("{{heading}}", cast(str, heading.prettify()))
|
||||
.replace("{{content}}", cast(str, content.prettify()))
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@@ -330,8 +331,7 @@ class Links(Enum):
|
||||
# All others get coerced to fancy
|
||||
content = cast(str, Links.FANCY.template())
|
||||
repeated_content = cast(
|
||||
re.Match[str],
|
||||
re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||
re.Match[str], re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||
).group(1)
|
||||
|
||||
parts = []
|
||||
|
||||
@@ -86,7 +86,7 @@ def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
|
||||
for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")):
|
||||
block.name = "h3"
|
||||
block["class"] += ["accordion-head"] # type: ignore
|
||||
block["class"] += ["accordion-head"]
|
||||
|
||||
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
||||
children = list(dummy.children)
|
||||
|
||||
@@ -21,8 +21,16 @@ from ..http_crawler import HttpCrawler, HttpCrawlerSection
|
||||
from .async_helper import _iorepeat
|
||||
from .file_templates import LinkData, Links, forum_thread_template, learning_module_template
|
||||
from .ilias_html_cleaner import clean, insert_base_markup
|
||||
from .kit_ilias_html import (IliasElementType, IliasForumThread, IliasLearningModulePage, IliasPage,
|
||||
IliasPageElement, IliasSoup, _sanitize_path_name, parse_ilias_forum_export)
|
||||
from .kit_ilias_html import (
|
||||
IliasElementType,
|
||||
IliasForumThread,
|
||||
IliasLearningModulePage,
|
||||
IliasPage,
|
||||
IliasPageElement,
|
||||
IliasSoup,
|
||||
_sanitize_path_name,
|
||||
parse_ilias_forum_export,
|
||||
)
|
||||
from .shibboleth_login import ShibbolethLogin
|
||||
|
||||
TargetType = Union[str, int]
|
||||
@@ -55,9 +63,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
||||
|
||||
self.invalid_value("login_type", login_type, "Should be <shibboleth | local>")
|
||||
|
||||
def tfa_auth(
|
||||
self, authenticators: Dict[str, Authenticator]
|
||||
) -> Optional[Authenticator]:
|
||||
def tfa_auth(self, authenticators: Dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||
value: Optional[str] = self.s.get("tfa_auth")
|
||||
if value is None:
|
||||
return None
|
||||
@@ -166,17 +172,19 @@ class IliasWebCrawler(HttpCrawler):
|
||||
name: str,
|
||||
section: IliasWebCrawlerSection,
|
||||
config: Config,
|
||||
authenticators: Dict[str, Authenticator]
|
||||
authenticators: Dict[str, Authenticator],
|
||||
):
|
||||
# Setting a main authenticator for cookie sharing
|
||||
auth = section.auth(authenticators)
|
||||
super().__init__(name, section, config, shared_auth=auth)
|
||||
|
||||
if section.tasks() > 1:
|
||||
log.warn("""
|
||||
log.warn(
|
||||
"""
|
||||
Please avoid using too many parallel requests as these are the KIT ILIAS
|
||||
instance's greatest bottleneck.
|
||||
""".strip())
|
||||
""".strip()
|
||||
)
|
||||
|
||||
self._auth = auth
|
||||
self._base_url = section.base_url()
|
||||
@@ -210,22 +218,19 @@ instance's greatest bottleneck.
|
||||
# Start crawling at the given course
|
||||
root_url = url_set_query_param(
|
||||
urljoin(self._base_url + "/", "goto.php"),
|
||||
"target", f"crs_{course_id}",
|
||||
"target",
|
||||
f"crs_{course_id}",
|
||||
)
|
||||
|
||||
await self._crawl_url(root_url, expected_id=course_id)
|
||||
|
||||
async def _crawl_desktop(self) -> None:
|
||||
await self._crawl_url(
|
||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"),
|
||||
crawl_nested_courses=True
|
||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"), crawl_nested_courses=True
|
||||
)
|
||||
|
||||
async def _crawl_url(
|
||||
self,
|
||||
url: str,
|
||||
expected_id: Optional[int] = None,
|
||||
crawl_nested_courses: bool = False
|
||||
self, url: str, expected_id: Optional[int] = None, crawl_nested_courses: bool = False
|
||||
) -> None:
|
||||
if awaitable := await self._handle_ilias_page(
|
||||
url, None, PurePath("."), expected_id, crawl_nested_courses
|
||||
@@ -238,7 +243,7 @@ instance's greatest bottleneck.
|
||||
current_element: Optional[IliasPageElement],
|
||||
path: PurePath,
|
||||
expected_course_id: Optional[int] = None,
|
||||
crawl_nested_courses: bool = False
|
||||
crawl_nested_courses: bool = False,
|
||||
) -> Optional[Coroutine[Any, Any, None]]:
|
||||
maybe_cl = await self.crawl(path)
|
||||
if not maybe_cl:
|
||||
@@ -319,10 +324,7 @@ instance's greatest bottleneck.
|
||||
# works correctly.
|
||||
@anoncritical
|
||||
async def _handle_ilias_element(
|
||||
self,
|
||||
parent_path: PurePath,
|
||||
element: IliasPageElement,
|
||||
crawl_nested_courses: bool = False
|
||||
self, parent_path: PurePath, element: IliasPageElement, crawl_nested_courses: bool = False
|
||||
) -> Optional[Coroutine[Any, Any, None]]:
|
||||
# element.name might contain `/` if the crawler created nested elements,
|
||||
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
||||
@@ -344,7 +346,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](enable with option 'videos')"
|
||||
"[bright_black](enable with option 'videos')",
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -356,7 +358,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](enable with option 'forums')"
|
||||
"[bright_black](enable with option 'forums')",
|
||||
)
|
||||
return None
|
||||
return await self._handle_forum(element, element_path)
|
||||
@@ -365,7 +367,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](tests contain no relevant data)"
|
||||
"[bright_black](tests contain no relevant data)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.SURVEY:
|
||||
@@ -373,7 +375,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](surveys contain no relevant data)"
|
||||
"[bright_black](surveys contain no relevant data)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
||||
@@ -381,7 +383,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](scorm learning modules are not supported)"
|
||||
"[bright_black](scorm learning modules are not supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.LITERATURE_LIST:
|
||||
@@ -389,7 +391,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](literature lists are not currently supported)"
|
||||
"[bright_black](literature lists are not currently supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.LEARNING_MODULE_HTML:
|
||||
@@ -397,7 +399,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](HTML learning modules are not supported)"
|
||||
"[bright_black](HTML learning modules are not supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.BLOG:
|
||||
@@ -405,7 +407,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](blogs are not currently supported)"
|
||||
"[bright_black](blogs are not currently supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.DCL_RECORD_LIST:
|
||||
@@ -413,7 +415,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](dcl record lists are not currently supported)"
|
||||
"[bright_black](dcl record lists are not currently supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.MEDIA_POOL:
|
||||
@@ -421,7 +423,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](media pools are not currently supported)"
|
||||
"[bright_black](media pools are not currently supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.COURSE:
|
||||
@@ -431,7 +433,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](not descending into linked course)"
|
||||
"[bright_black](not descending into linked course)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.WIKI:
|
||||
@@ -439,7 +441,7 @@ instance's greatest bottleneck.
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](wikis are not currently supported)"
|
||||
"[bright_black](wikis are not currently supported)",
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.LEARNING_MODULE:
|
||||
@@ -513,19 +515,15 @@ instance's greatest bottleneck.
|
||||
@anoncritical
|
||||
@_iorepeat(3, "resolving link")
|
||||
async def _download_link(
|
||||
self,
|
||||
link_renderer: Links,
|
||||
collection_name: str,
|
||||
links: list[LinkData],
|
||||
dl: DownloadToken
|
||||
self, link_renderer: Links, collection_name: str, links: list[LinkData], dl: DownloadToken
|
||||
) -> None:
|
||||
async with dl as (bar, sink):
|
||||
rendered = link_renderer.interpolate(self._link_file_redirect_delay, collection_name, links)
|
||||
sink.file.write(rendered.encode("utf-8"))
|
||||
sink.done()
|
||||
|
||||
async def _resolve_link_target(self, export_url: str) -> Union[BeautifulSoup, Literal['none']]:
|
||||
async def impl() -> Optional[Union[BeautifulSoup, Literal['none']]]:
|
||||
async def _resolve_link_target(self, export_url: str) -> Union[BeautifulSoup, Literal["none"]]:
|
||||
async def impl() -> Optional[Union[BeautifulSoup, Literal["none"]]]:
|
||||
async with self.session.get(export_url, allow_redirects=False) as resp:
|
||||
# No redirect means we were authenticated
|
||||
if hdrs.LOCATION not in resp.headers:
|
||||
@@ -626,7 +624,7 @@ instance's greatest bottleneck.
|
||||
if self.prev_report:
|
||||
self.report.add_custom_value(
|
||||
_get_video_cache_key(element),
|
||||
self.prev_report.get_custom_value(_get_video_cache_key(element))
|
||||
self.prev_report.get_custom_value(_get_video_cache_key(element)),
|
||||
)
|
||||
|
||||
# A video might contain other videos, so let's "crawl" the video first
|
||||
@@ -698,7 +696,7 @@ instance's greatest bottleneck.
|
||||
def add_to_report(paths: list[str]) -> None:
|
||||
self.report.add_custom_value(
|
||||
_get_video_cache_key(element),
|
||||
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))}
|
||||
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))},
|
||||
)
|
||||
|
||||
async with dl as (bar, sink):
|
||||
@@ -752,11 +750,7 @@ instance's greatest bottleneck.
|
||||
await self._stream_from_url(element, sink, bar, is_video)
|
||||
|
||||
async def _stream_from_url(
|
||||
self,
|
||||
element: IliasPageElement,
|
||||
sink: FileSink,
|
||||
bar: ProgressBar,
|
||||
is_video: bool
|
||||
self, element: IliasPageElement, sink: FileSink, bar: ProgressBar, is_video: bool
|
||||
) -> None:
|
||||
url = element.url
|
||||
|
||||
@@ -831,10 +825,10 @@ instance's greatest bottleneck.
|
||||
log.warn("Could not extract forum export url")
|
||||
return
|
||||
|
||||
export = await self._post(export_url, {
|
||||
"format": "html",
|
||||
"cmd[createExportFile]": ""
|
||||
})
|
||||
export = await self._post(
|
||||
export_url,
|
||||
{"format": "html", "cmd[createExportFile]": ""},
|
||||
)
|
||||
|
||||
elements = parse_ilias_forum_export(soupify(export))
|
||||
|
||||
@@ -848,10 +842,7 @@ instance's greatest bottleneck.
|
||||
@anoncritical
|
||||
@_iorepeat(3, "saving forum thread")
|
||||
async def _download_forum_thread(
|
||||
self,
|
||||
parent_path: PurePath,
|
||||
thread: Union[IliasForumThread, IliasPageElement],
|
||||
forum_url: str
|
||||
self, parent_path: PurePath, thread: Union[IliasForumThread, IliasPageElement], forum_url: str
|
||||
) -> None:
|
||||
path = parent_path / (_sanitize_path_name(thread.name) + ".html")
|
||||
maybe_dl = await self.download(path, mtime=thread.mtime)
|
||||
@@ -860,10 +851,7 @@ instance's greatest bottleneck.
|
||||
|
||||
async with maybe_dl as (bar, sink):
|
||||
rendered = forum_thread_template(
|
||||
thread.name,
|
||||
forum_url,
|
||||
thread.name_tag,
|
||||
await self.internalize_images(thread.content_tag)
|
||||
thread.name, forum_url, thread.name_tag, await self.internalize_images(thread.content_tag)
|
||||
)
|
||||
sink.file.write(rendered.encode("utf-8"))
|
||||
sink.done()
|
||||
@@ -891,13 +879,13 @@ instance's greatest bottleneck.
|
||||
soup = await self._get_page(element.url)
|
||||
page = IliasPage(soup, element)
|
||||
if next := page.get_learning_module_data():
|
||||
elements.extend(await self._crawl_learning_module_direction(
|
||||
cl.path, next.previous_url, "left", element
|
||||
))
|
||||
elements.extend(
|
||||
await self._crawl_learning_module_direction(cl.path, next.previous_url, "left", element)
|
||||
)
|
||||
elements.append(next)
|
||||
elements.extend(await self._crawl_learning_module_direction(
|
||||
cl.path, next.next_url, "right", element
|
||||
))
|
||||
elements.extend(
|
||||
await self._crawl_learning_module_direction(cl.path, next.next_url, "right", element)
|
||||
)
|
||||
|
||||
# Reflect their natural ordering in the file names
|
||||
for index, lm_element in enumerate(elements):
|
||||
@@ -907,9 +895,9 @@ instance's greatest bottleneck.
|
||||
for index, elem in enumerate(elements):
|
||||
prev_url = elements[index - 1].title if index > 0 else None
|
||||
next_url = elements[index + 1].title if index < len(elements) - 1 else None
|
||||
tasks.append(asyncio.create_task(
|
||||
self._download_learning_module_page(cl.path, elem, prev_url, next_url)
|
||||
))
|
||||
tasks.append(
|
||||
asyncio.create_task(self._download_learning_module_page(cl.path, elem, prev_url, next_url))
|
||||
)
|
||||
|
||||
# And execute them
|
||||
await self.gather(tasks)
|
||||
@@ -919,7 +907,7 @@ instance's greatest bottleneck.
|
||||
path: PurePath,
|
||||
start_url: Optional[str],
|
||||
dir: Union[Literal["left"], Literal["right"]],
|
||||
parent_element: IliasPageElement
|
||||
parent_element: IliasPageElement,
|
||||
) -> List[IliasLearningModulePage]:
|
||||
elements: List[IliasLearningModulePage] = []
|
||||
|
||||
@@ -950,7 +938,7 @@ instance's greatest bottleneck.
|
||||
parent_path: PurePath,
|
||||
element: IliasLearningModulePage,
|
||||
prev: Optional[str],
|
||||
next: Optional[str]
|
||||
next: Optional[str],
|
||||
) -> None:
|
||||
path = parent_path / (_sanitize_path_name(element.title) + ".html")
|
||||
maybe_dl = await self.download(path)
|
||||
@@ -1037,11 +1025,7 @@ instance's greatest bottleneck.
|
||||
)
|
||||
return soup
|
||||
|
||||
async def _post(
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Union[str, List[str]]]
|
||||
) -> bytes:
|
||||
async def _post(self, url: str, data: dict[str, Union[str, List[str]]]) -> bytes:
|
||||
form_data = aiohttp.FormData()
|
||||
for key, val in data.items():
|
||||
form_data.add_field(key, val)
|
||||
@@ -1090,8 +1074,8 @@ instance's greatest bottleneck.
|
||||
username, password = await self._auth.credentials()
|
||||
|
||||
login_form_data = aiohttp.FormData()
|
||||
login_form_data.add_field('login_form/input_3/input_4', username)
|
||||
login_form_data.add_field('login_form/input_3/input_5', password)
|
||||
login_form_data.add_field("login_form/input_3/input_4", username)
|
||||
login_form_data.add_field("login_form/input_3/input_5", password)
|
||||
|
||||
# do the actual login
|
||||
async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request:
|
||||
|
||||
@@ -42,15 +42,15 @@ class TypeMatcher:
|
||||
self.alt = alt
|
||||
|
||||
class All:
|
||||
matchers: list['IliasElementMatcher']
|
||||
matchers: list["IliasElementMatcher"]
|
||||
|
||||
def __init__(self, matchers: list['IliasElementMatcher']):
|
||||
def __init__(self, matchers: list["IliasElementMatcher"]):
|
||||
self.matchers = matchers
|
||||
|
||||
class Any:
|
||||
matchers: list['IliasElementMatcher']
|
||||
matchers: list["IliasElementMatcher"]
|
||||
|
||||
def __init__(self, matchers: list['IliasElementMatcher']):
|
||||
def __init__(self, matchers: list["IliasElementMatcher"]):
|
||||
self.matchers = matchers
|
||||
|
||||
@staticmethod
|
||||
@@ -70,11 +70,11 @@ class TypeMatcher:
|
||||
return TypeMatcher.ImgAlt(alt)
|
||||
|
||||
@staticmethod
|
||||
def all(*matchers: 'IliasElementMatcher') -> All:
|
||||
def all(*matchers: "IliasElementMatcher") -> All:
|
||||
return TypeMatcher.All(list(matchers))
|
||||
|
||||
@staticmethod
|
||||
def any(*matchers: 'IliasElementMatcher') -> Any:
|
||||
def any(*matchers: "IliasElementMatcher") -> Any:
|
||||
return TypeMatcher.Any(list(matchers))
|
||||
|
||||
@staticmethod
|
||||
@@ -127,20 +127,14 @@ class IliasElementType(Enum):
|
||||
def matcher(self) -> IliasElementMatcher:
|
||||
match self:
|
||||
case IliasElementType.BLOG:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.img_src("_blog.svg")
|
||||
)
|
||||
return TypeMatcher.any(TypeMatcher.img_src("_blog.svg"))
|
||||
case IliasElementType.BOOKING:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/book/"),
|
||||
TypeMatcher.img_src("_book.svg")
|
||||
)
|
||||
return TypeMatcher.any(TypeMatcher.path("/book/"), TypeMatcher.img_src("_book.svg"))
|
||||
case IliasElementType.COURSE:
|
||||
return TypeMatcher.any(TypeMatcher.path("/crs/"), TypeMatcher.img_src("_crsr.svg"))
|
||||
case IliasElementType.DCL_RECORD_LIST:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.img_src("_dcl.svg"),
|
||||
TypeMatcher.query("cmdclass=ildclrecordlistgui")
|
||||
TypeMatcher.img_src("_dcl.svg"), TypeMatcher.query("cmdclass=ildclrecordlistgui")
|
||||
)
|
||||
case IliasElementType.EXERCISE:
|
||||
return TypeMatcher.never()
|
||||
@@ -162,14 +156,11 @@ class IliasElementType(Enum):
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/fold/"),
|
||||
TypeMatcher.img_src("_fold.svg"),
|
||||
|
||||
TypeMatcher.path("/grp/"),
|
||||
TypeMatcher.img_src("_grp.svg"),
|
||||
|
||||
TypeMatcher.path("/copa/"),
|
||||
TypeMatcher.path("_copa_"),
|
||||
TypeMatcher.img_src("_copa.svg"),
|
||||
|
||||
# Not supported right now but warn users
|
||||
# TypeMatcher.query("baseclass=ilmediapoolpresentationgui"),
|
||||
# TypeMatcher.img_alt("medienpool"),
|
||||
@@ -188,14 +179,10 @@ class IliasElementType(Enum):
|
||||
case IliasElementType.LITERATURE_LIST:
|
||||
return TypeMatcher.img_src("_bibl.svg")
|
||||
case IliasElementType.LEARNING_MODULE:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/lm/"),
|
||||
TypeMatcher.img_src("_lm.svg")
|
||||
)
|
||||
return TypeMatcher.any(TypeMatcher.path("/lm/"), TypeMatcher.img_src("_lm.svg"))
|
||||
case IliasElementType.LEARNING_MODULE_HTML:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseclass=ilhtlmpresentationgui"),
|
||||
TypeMatcher.img_src("_htlm.svg")
|
||||
TypeMatcher.query("baseclass=ilhtlmpresentationgui"), TypeMatcher.img_src("_htlm.svg")
|
||||
)
|
||||
case IliasElementType.LINK:
|
||||
return TypeMatcher.any(
|
||||
@@ -203,17 +190,16 @@ class IliasElementType(Enum):
|
||||
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
||||
TypeMatcher.query("calldirectlink"),
|
||||
),
|
||||
TypeMatcher.img_src("_webr.svg") # duplicated :(
|
||||
TypeMatcher.img_src("_webr.svg"), # duplicated :(
|
||||
)
|
||||
case IliasElementType.LINK_COLLECTION:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
||||
TypeMatcher.img_src("_webr.svg") # duplicated :(
|
||||
TypeMatcher.img_src("_webr.svg"), # duplicated :(
|
||||
)
|
||||
case IliasElementType.MEDIA_POOL:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseclass=ilmediapoolpresentationgui"),
|
||||
TypeMatcher.img_src("_mep.svg")
|
||||
TypeMatcher.query("baseclass=ilmediapoolpresentationgui"), TypeMatcher.img_src("_mep.svg")
|
||||
)
|
||||
case IliasElementType.MEDIACAST_VIDEO:
|
||||
return TypeMatcher.never()
|
||||
@@ -221,12 +207,10 @@ class IliasElementType(Enum):
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/mcst/"),
|
||||
TypeMatcher.query("baseclass=ilmediacasthandlergui"),
|
||||
TypeMatcher.img_src("_mcst.svg")
|
||||
TypeMatcher.img_src("_mcst.svg"),
|
||||
)
|
||||
case IliasElementType.MEETING:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.img_src("_sess.svg")
|
||||
)
|
||||
return TypeMatcher.any(TypeMatcher.img_src("_sess.svg"))
|
||||
case IliasElementType.MOB_VIDEO:
|
||||
return TypeMatcher.never()
|
||||
case IliasElementType.OPENCAST_VIDEO:
|
||||
@@ -239,24 +223,19 @@ class IliasElementType(Enum):
|
||||
return TypeMatcher.never()
|
||||
case IliasElementType.SCORM_LEARNING_MODULE:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseclass=ilsahspresentationgui"),
|
||||
TypeMatcher.img_src("_sahs.svg")
|
||||
TypeMatcher.query("baseclass=ilsahspresentationgui"), TypeMatcher.img_src("_sahs.svg")
|
||||
)
|
||||
case IliasElementType.SURVEY:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/svy/"),
|
||||
TypeMatcher.img_src("svy.svg")
|
||||
)
|
||||
return TypeMatcher.any(TypeMatcher.path("/svy/"), TypeMatcher.img_src("svy.svg"))
|
||||
case IliasElementType.TEST:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("cmdclass=ilobjtestgui"),
|
||||
TypeMatcher.query("cmdclass=iltestscreengui"),
|
||||
TypeMatcher.img_src("_tst.svg")
|
||||
TypeMatcher.img_src("_tst.svg"),
|
||||
)
|
||||
case IliasElementType.WIKI:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseClass=ilwikihandlergui"),
|
||||
TypeMatcher.img_src("wiki.svg")
|
||||
TypeMatcher.query("baseClass=ilwikihandlergui"), TypeMatcher.img_src("wiki.svg")
|
||||
)
|
||||
|
||||
raise CrawlWarning(f"Unknown matcher {self}")
|
||||
@@ -291,7 +270,7 @@ class IliasPageElement:
|
||||
r"thr_pk=(?P<id>\d+)", # forums
|
||||
r"ref_id=(?P<id>\d+)",
|
||||
r"target=[a-z]+_(?P<id>\d+)",
|
||||
r"mm_(?P<id>\d+)"
|
||||
r"mm_(?P<id>\d+)",
|
||||
]
|
||||
|
||||
for regex in regexes:
|
||||
@@ -309,8 +288,8 @@ class IliasPageElement:
|
||||
name: str,
|
||||
mtime: Optional[datetime] = None,
|
||||
description: Optional[str] = None,
|
||||
skip_sanitize: bool = False
|
||||
) -> 'IliasPageElement':
|
||||
skip_sanitize: bool = False,
|
||||
) -> "IliasPageElement":
|
||||
if typ == IliasElementType.MEETING:
|
||||
normalized = IliasPageElement._normalize_meeting_name(name)
|
||||
log.explain(f"Normalized meeting name from {name!r} to {normalized!r}")
|
||||
@@ -382,7 +361,6 @@ class IliasSoup:
|
||||
|
||||
|
||||
class IliasPage:
|
||||
|
||||
def __init__(self, ilias_soup: IliasSoup, source_element: Optional[IliasPageElement]):
|
||||
self._ilias_soup = ilias_soup
|
||||
self._soup = ilias_soup.soup
|
||||
@@ -422,23 +400,26 @@ class IliasPage:
|
||||
return self._find_normal_entries()
|
||||
|
||||
def get_info_tab(self) -> Optional[IliasPageElement]:
|
||||
tab: Optional[Tag] = cast(Optional[Tag], self._soup.find(
|
||||
name="a",
|
||||
attrs={"href": lambda x: x is not None and "cmdClass=ilinfoscreengui" in x}
|
||||
))
|
||||
tab: Optional[Tag] = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find(
|
||||
name="a", attrs={"href": lambda x: x is not None and "cmdClass=ilinfoscreengui" in x}
|
||||
),
|
||||
)
|
||||
if tab is not None:
|
||||
return IliasPageElement.create_new(
|
||||
IliasElementType.INFO_TAB,
|
||||
self._abs_url_from_link(tab),
|
||||
"infos"
|
||||
IliasElementType.INFO_TAB, self._abs_url_from_link(tab), "infos"
|
||||
)
|
||||
return None
|
||||
|
||||
def get_description(self) -> Optional[BeautifulSoup]:
|
||||
def is_interesting_class(name: str | None) -> bool:
|
||||
return name in [
|
||||
"ilCOPageSection", "ilc_Paragraph", "ilc_va_ihcap_VAccordIHeadCap",
|
||||
"ilc_va_ihcap_AccordIHeadCap", "ilc_media_cont_MediaContainer"
|
||||
"ilCOPageSection",
|
||||
"ilc_Paragraph",
|
||||
"ilc_va_ihcap_VAccordIHeadCap",
|
||||
"ilc_va_ihcap_AccordIHeadCap",
|
||||
"ilc_media_cont_MediaContainer",
|
||||
]
|
||||
|
||||
paragraphs: list[Tag] = cast(list[Tag], self._soup.find_all(class_=is_interesting_class))
|
||||
@@ -457,7 +438,7 @@ class IliasPage:
|
||||
if video := p.select_one("video"):
|
||||
url, title = self._find_mob_video_url_title(video, p)
|
||||
raw_html += '<div style="min-width: 100px; min-height: 100px; border: 1px solid black;'
|
||||
raw_html += 'display: flex; justify-content: center; align-items: center;'
|
||||
raw_html += "display: flex; justify-content: center; align-items: center;"
|
||||
raw_html += ' margin: 0.5rem;">'
|
||||
if url is not None and urlparse(url).hostname != urlparse(self._page_url).hostname:
|
||||
if url.startswith("//"):
|
||||
@@ -486,7 +467,7 @@ class IliasPage:
|
||||
title=title,
|
||||
content=content,
|
||||
next_url=self._find_learning_module_next(),
|
||||
previous_url=self._find_learning_module_prev()
|
||||
previous_url=self._find_learning_module_prev(),
|
||||
)
|
||||
|
||||
def _find_learning_module_next(self) -> Optional[str]:
|
||||
@@ -517,7 +498,7 @@ class IliasPage:
|
||||
|
||||
rtoken_form = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x})
|
||||
self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x}),
|
||||
)
|
||||
if not rtoken_form:
|
||||
log.explain("Found no rtoken anywhere")
|
||||
@@ -557,9 +538,7 @@ class IliasPage:
|
||||
return True
|
||||
|
||||
# Raw listing without ILIAS fluff
|
||||
video_element_table = self._soup.find(
|
||||
name="table", id=re.compile(r"tbl_xoct_.+")
|
||||
)
|
||||
video_element_table = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+"))
|
||||
return video_element_table is not None
|
||||
|
||||
def _is_ilias_opencast_embedding(self) -> bool:
|
||||
@@ -600,24 +579,28 @@ class IliasPage:
|
||||
return self._uncollapse_future_meetings_url() is not None
|
||||
|
||||
def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]:
|
||||
element = cast(Optional[Tag], self._soup.find(
|
||||
"a",
|
||||
attrs={"href": lambda x: x is not None and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)}
|
||||
))
|
||||
element = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find(
|
||||
"a",
|
||||
attrs={
|
||||
"href": lambda x: x is not None and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)
|
||||
},
|
||||
),
|
||||
)
|
||||
if not element:
|
||||
return None
|
||||
link = self._abs_url_from_link(element)
|
||||
return IliasPageElement.create_new(IliasElementType.FOLDER, link, "show all meetings")
|
||||
|
||||
def _is_exercise_not_all_shown(self) -> bool:
|
||||
return (self._page_type == IliasElementType.EXERCISE_OVERVIEW
|
||||
and "mode=all" not in self._page_url.lower())
|
||||
return (
|
||||
self._page_type == IliasElementType.EXERCISE_OVERVIEW and "mode=all" not in self._page_url.lower()
|
||||
)
|
||||
|
||||
def _show_all_exercises(self) -> Optional[IliasPageElement]:
|
||||
return IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_OVERVIEW,
|
||||
self._page_url + "&mode=all",
|
||||
"show all exercises"
|
||||
IliasElementType.EXERCISE_OVERVIEW, self._page_url + "&mode=all", "show all exercises"
|
||||
)
|
||||
|
||||
def _is_content_tab_selected(self) -> bool:
|
||||
@@ -631,10 +614,12 @@ class IliasPage:
|
||||
return "baseClass=ilmembershipoverviewgui" in self._page_url
|
||||
|
||||
def _select_content_page_url(self) -> Optional[IliasPageElement]:
|
||||
tab = cast(Optional[Tag], self._soup.find(
|
||||
id="tab_view_content",
|
||||
attrs={"class": lambda x: x is not None and "active" not in x}
|
||||
))
|
||||
tab = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find(
|
||||
id="tab_view_content", attrs={"class": lambda x: x is not None and "active" not in x}
|
||||
),
|
||||
)
|
||||
# Already selected (or not found)
|
||||
if not tab:
|
||||
return None
|
||||
@@ -654,9 +639,7 @@ class IliasPage:
|
||||
# on the page, but defined in a JS object inside a script tag, passed to the player
|
||||
# library.
|
||||
# We do the impossible and RegEx the stream JSON object out of the page's HTML source
|
||||
regex = re.compile(
|
||||
r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE
|
||||
)
|
||||
regex = re.compile(r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE)
|
||||
json_match = regex.search(str(self._soup))
|
||||
|
||||
if json_match is None:
|
||||
@@ -687,10 +670,12 @@ class IliasPage:
|
||||
def _get_show_max_forum_entries_per_page_url(
|
||||
self, wanted_max: Optional[int] = None
|
||||
) -> Optional[IliasPageElement]:
|
||||
correct_link = cast(Optional[Tag], self._soup.find(
|
||||
"a",
|
||||
attrs={"href": lambda x: x is not None and "trows=800" in x and "cmd=showThreads" in x}
|
||||
))
|
||||
correct_link = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find(
|
||||
"a", attrs={"href": lambda x: x is not None and "trows=800" in x and "cmd=showThreads" in x}
|
||||
),
|
||||
)
|
||||
|
||||
if not correct_link:
|
||||
return None
|
||||
@@ -775,11 +760,11 @@ class IliasPage:
|
||||
continue
|
||||
if "cmd=sendfile" not in link["href"]:
|
||||
continue
|
||||
items.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
self._abs_url_from_link(link),
|
||||
_sanitize_path_name(link.get_text())
|
||||
))
|
||||
items.append(
|
||||
IliasPageElement.create_new(
|
||||
IliasElementType.FILE, self._abs_url_from_link(link), _sanitize_path_name(link.get_text())
|
||||
)
|
||||
)
|
||||
|
||||
return items
|
||||
|
||||
@@ -791,9 +776,9 @@ class IliasPage:
|
||||
#
|
||||
# We need to figure out where we are.
|
||||
|
||||
video_element_table = cast(Optional[Tag], self._soup.find(
|
||||
name="table", id=re.compile(r"tbl_xoct_.+")
|
||||
))
|
||||
video_element_table = cast(
|
||||
Optional[Tag], self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+"))
|
||||
)
|
||||
|
||||
if video_element_table is None:
|
||||
# We are in stage 1
|
||||
@@ -829,8 +814,7 @@ class IliasPage:
|
||||
|
||||
table_id = id_match.group(1)
|
||||
|
||||
query_params = {f"tbl_xoct_{table_id}_trows": "800",
|
||||
"cmd": "asyncGetTableGUI", "cmdMode": "asynch"}
|
||||
query_params = {f"tbl_xoct_{table_id}_trows": "800", "cmd": "asyncGetTableGUI", "cmdMode": "asynch"}
|
||||
url = url_set_query_params(self._page_url, query_params)
|
||||
|
||||
log.explain("Disabled pagination, retrying folder as a new entry")
|
||||
@@ -841,9 +825,9 @@ class IliasPage:
|
||||
Crawls the "second stage" video page. This page contains the actual video urls.
|
||||
"""
|
||||
# Video start links are marked with an "Abspielen" link
|
||||
video_links = cast(list[Tag], self._soup.find_all(
|
||||
name="a", text=re.compile(r"\s*(Abspielen|Play)\s*")
|
||||
))
|
||||
video_links = cast(
|
||||
list[Tag], self._soup.find_all(name="a", text=re.compile(r"\s*(Abspielen|Play)\s*"))
|
||||
)
|
||||
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
@@ -857,12 +841,12 @@ class IliasPage:
|
||||
# 6th or 7th child (1 indexed) is the modification time string. Try to find it
|
||||
# by parsing backwards from the end and finding something that looks like a date
|
||||
modification_time = None
|
||||
row: Tag = link.parent.parent.parent # type: ignore
|
||||
row: Tag = link.parent.parent.parent
|
||||
column_count = len(row.select("td.std"))
|
||||
for index in range(column_count, 0, -1):
|
||||
modification_string = link.parent.parent.parent.select_one( # type: ignore
|
||||
f"td.std:nth-child({index})"
|
||||
).get_text().strip()
|
||||
modification_string = (
|
||||
link.parent.parent.parent.select_one(f"td.std:nth-child({index})").get_text().strip()
|
||||
)
|
||||
if match := re.search(r"\d+\.\d+.\d+ \d+:\d+", modification_string):
|
||||
modification_time = datetime.strptime(match.group(0), "%d.%m.%Y %H:%M")
|
||||
break
|
||||
@@ -871,7 +855,7 @@ class IliasPage:
|
||||
log.warn(f"Could not determine upload time for {link}")
|
||||
modification_time = datetime.now()
|
||||
|
||||
title = link.parent.parent.parent.select_one("td.std:nth-child(3)").get_text().strip() # type: ignore
|
||||
title = link.parent.parent.parent.select_one("td.std:nth-child(3)").get_text().strip()
|
||||
title += ".mp4"
|
||||
|
||||
video_name: str = _sanitize_path_name(title)
|
||||
@@ -900,25 +884,29 @@ class IliasPage:
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
if link := cast(Optional[Tag], self._soup.select_one("#tab_submission > a")):
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_FILES,
|
||||
self._abs_url_from_link(link),
|
||||
"Submission"
|
||||
))
|
||||
results.append(
|
||||
IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_FILES, self._abs_url_from_link(link), "Submission"
|
||||
)
|
||||
)
|
||||
else:
|
||||
log.explain("Found no submission link for exercise, maybe it has not started yet?")
|
||||
|
||||
# Find all download links in the container (this will contain all the *feedback* files)
|
||||
download_links = cast(list[Tag], self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||
text="Download"
|
||||
))
|
||||
download_links = cast(
|
||||
list[Tag],
|
||||
self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||
text="Download",
|
||||
),
|
||||
)
|
||||
|
||||
for link in download_links:
|
||||
parent_row: Tag = cast(Tag, link.find_parent(
|
||||
attrs={"class": lambda x: x is not None and "row" in x}))
|
||||
parent_row: Tag = cast(
|
||||
Tag, link.find_parent(attrs={"class": lambda x: x is not None and "row" in x})
|
||||
)
|
||||
name_tag = cast(Optional[Tag], parent_row.find(name="div"))
|
||||
|
||||
if not name_tag:
|
||||
@@ -929,11 +917,9 @@ class IliasPage:
|
||||
name = _sanitize_path_name(name_tag.get_text().strip())
|
||||
log.explain(f"Found exercise detail entry {name!r}")
|
||||
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
self._abs_url_from_link(link),
|
||||
name
|
||||
))
|
||||
results.append(
|
||||
IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
@@ -941,12 +927,15 @@ class IliasPage:
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
# Find all download links in the container
|
||||
download_links = cast(list[Tag], self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||
text="Download"
|
||||
))
|
||||
download_links = cast(
|
||||
list[Tag],
|
||||
self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||
text="Download",
|
||||
),
|
||||
)
|
||||
|
||||
for link in download_links:
|
||||
parent_row: Tag = cast(Tag, link.find_parent("tr"))
|
||||
@@ -963,12 +952,9 @@ class IliasPage:
|
||||
if date is None:
|
||||
log.warn(f"Date parsing failed for exercise file entry {name!r}")
|
||||
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
self._abs_url_from_link(link),
|
||||
name,
|
||||
date
|
||||
))
|
||||
results.append(
|
||||
IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name, date)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
@@ -993,11 +979,11 @@ class IliasPage:
|
||||
continue
|
||||
|
||||
name = _sanitize_path_name(exercise.get_text().strip())
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE,
|
||||
self._abs_url_from_link(exercise),
|
||||
name
|
||||
))
|
||||
results.append(
|
||||
IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE, self._abs_url_from_link(exercise), name
|
||||
)
|
||||
)
|
||||
|
||||
for result in results:
|
||||
log.explain(f"Found exercise {result.name!r}")
|
||||
@@ -1043,13 +1029,11 @@ class IliasPage:
|
||||
continue
|
||||
|
||||
log.explain(f"Found {element_name!r} of type {element_type}")
|
||||
result.append(IliasPageElement.create_new(
|
||||
element_type,
|
||||
abs_url,
|
||||
element_name,
|
||||
description=description,
|
||||
skip_sanitize=True
|
||||
))
|
||||
result.append(
|
||||
IliasPageElement.create_new(
|
||||
element_type, abs_url, element_name, description=description, skip_sanitize=True
|
||||
)
|
||||
)
|
||||
|
||||
result += self._find_cards()
|
||||
result += self._find_mediacast_videos()
|
||||
@@ -1086,11 +1070,13 @@ class IliasPage:
|
||||
if not title.endswith(".mp4") and not title.endswith(".webm"):
|
||||
# just to make sure it has some kinda-alrightish ending
|
||||
title = title + ".mp4"
|
||||
videos.append(IliasPageElement.create_new(
|
||||
typ=IliasElementType.MEDIACAST_VIDEO,
|
||||
url=self._abs_url_from_relative(cast(str, url)),
|
||||
name=_sanitize_path_name(title)
|
||||
))
|
||||
videos.append(
|
||||
IliasPageElement.create_new(
|
||||
typ=IliasElementType.MEDIACAST_VIDEO,
|
||||
url=self._abs_url_from_relative(cast(str, url)),
|
||||
name=_sanitize_path_name(title),
|
||||
)
|
||||
)
|
||||
|
||||
return videos
|
||||
|
||||
@@ -1114,12 +1100,11 @@ class IliasPage:
|
||||
log.explain(f"Found external video at {url}, ignoring")
|
||||
continue
|
||||
|
||||
videos.append(IliasPageElement.create_new(
|
||||
typ=IliasElementType.MOB_VIDEO,
|
||||
url=url,
|
||||
name=_sanitize_path_name(title),
|
||||
mtime=None
|
||||
))
|
||||
videos.append(
|
||||
IliasPageElement.create_new(
|
||||
typ=IliasElementType.MOB_VIDEO, url=url, name=_sanitize_path_name(title), mtime=None
|
||||
)
|
||||
)
|
||||
|
||||
return videos
|
||||
|
||||
@@ -1161,11 +1146,11 @@ class IliasPage:
|
||||
|
||||
# We should not crawl files under meetings
|
||||
if "ilContainerListItemContentCB" in cast(str, parent.get("class")):
|
||||
link: Tag = parent.parent.find("a") # type: ignore
|
||||
link: Tag = parent.parent.find("a")
|
||||
typ = IliasPage._find_type_for_element(
|
||||
"meeting",
|
||||
self._abs_url_from_link(link),
|
||||
lambda: IliasPage._find_icon_for_folder_entry(link)
|
||||
lambda: IliasPage._find_icon_for_folder_entry(link),
|
||||
)
|
||||
return typ == IliasElementType.MEETING
|
||||
|
||||
@@ -1191,9 +1176,11 @@ class IliasPage:
|
||||
|
||||
# This is for these weird JS-y blocks and custom item groups
|
||||
if "ilContainerItemsContainer" in cast(str, parent.get("class")):
|
||||
data_store_url = parent.parent.get("data-store-url", "").lower() # type: ignore
|
||||
is_custom_item_group = "baseclass=ilcontainerblockpropertiesstoragegui" in data_store_url \
|
||||
and "cont_block_id=" in data_store_url
|
||||
data_store_url = parent.parent.get("data-store-url", "").lower()
|
||||
is_custom_item_group = (
|
||||
"baseclass=ilcontainerblockpropertiesstoragegui" in data_store_url
|
||||
and "cont_block_id=" in data_store_url
|
||||
)
|
||||
# I am currently under the impression that *only* those JS blocks have an
|
||||
# ilNoDisplay class.
|
||||
if not is_custom_item_group and "ilNoDisplay" not in cast(str, parent.get("class")):
|
||||
@@ -1212,11 +1199,15 @@ class IliasPage:
|
||||
|
||||
if outer_accordion_content:
|
||||
accordion_tag = cast(Tag, outer_accordion_content.parent)
|
||||
head_tag = cast(Tag, accordion_tag.find(attrs={
|
||||
"class": lambda x: x is not None and (
|
||||
"ilc_va_ihead_VAccordIHead" in x or "ilc_va_ihead_AccordIHead" in x
|
||||
)
|
||||
}))
|
||||
head_tag = cast(
|
||||
Tag,
|
||||
accordion_tag.find(
|
||||
attrs={
|
||||
"class": lambda x: x is not None
|
||||
and ("ilc_va_ihead_VAccordIHead" in x or "ilc_va_ihead_AccordIHead" in x)
|
||||
}
|
||||
),
|
||||
)
|
||||
found_titles.append(head_tag.get_text().strip())
|
||||
|
||||
return [_sanitize_path_name(x) for x in reversed(found_titles)]
|
||||
@@ -1224,14 +1215,12 @@ class IliasPage:
|
||||
@staticmethod
|
||||
def _find_link_description(link: Tag) -> Optional[str]:
|
||||
tile = cast(
|
||||
Tag,
|
||||
link.find_parent("div", {"class": lambda x: x is not None and "il_ContainerListItem" in x})
|
||||
Tag, link.find_parent("div", {"class": lambda x: x is not None and "il_ContainerListItem" in x})
|
||||
)
|
||||
if not tile:
|
||||
return None
|
||||
description_element = cast(
|
||||
Tag,
|
||||
tile.find("div", {"class": lambda x: x is not None and "il_Description" in x})
|
||||
Tag, tile.find("div", {"class": lambda x: x is not None and "il_Description" in x})
|
||||
)
|
||||
if not description_element:
|
||||
return None
|
||||
@@ -1242,9 +1231,15 @@ class IliasPage:
|
||||
# Files have a list of properties (type, modification date, size, etc.)
|
||||
# In a series of divs.
|
||||
# Find the parent containing all those divs, so we can filter our what we need
|
||||
properties_parent = cast(Tag, cast(Tag, link_element.find_parent(
|
||||
"div", {"class": lambda x: x is not None and "il_ContainerListItem" in x}
|
||||
)).select_one(".il_ItemProperties"))
|
||||
properties_parent = cast(
|
||||
Tag,
|
||||
cast(
|
||||
Tag,
|
||||
link_element.find_parent(
|
||||
"div", {"class": lambda x: x is not None and "il_ContainerListItem" in x}
|
||||
),
|
||||
).select_one(".il_ItemProperties"),
|
||||
)
|
||||
# The first one is always the filetype
|
||||
file_type = cast(Tag, properties_parent.select_one("span.il_ItemProperty")).get_text().strip()
|
||||
|
||||
@@ -1271,9 +1266,7 @@ class IliasPage:
|
||||
for title in card_titles:
|
||||
url = self._abs_url_from_link(title)
|
||||
name = _sanitize_path_name(title.get_text().strip())
|
||||
typ = IliasPage._find_type_for_element(
|
||||
name, url, lambda: IliasPage._find_icon_from_card(title)
|
||||
)
|
||||
typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(title))
|
||||
|
||||
if not typ:
|
||||
_unexpected_html_warning()
|
||||
@@ -1300,13 +1293,14 @@ class IliasPage:
|
||||
continue
|
||||
url = self._abs_url_from_relative(open_match.group(1))
|
||||
name = _sanitize_path_name(button.get_text().strip())
|
||||
typ = IliasPage._find_type_for_element(
|
||||
name, url, lambda: IliasPage._find_icon_from_card(button)
|
||||
typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(button))
|
||||
caption_parent = cast(
|
||||
Tag,
|
||||
button.find_parent(
|
||||
"div",
|
||||
attrs={"class": lambda x: x is not None and "caption" in x},
|
||||
),
|
||||
)
|
||||
caption_parent = cast(Tag, button.find_parent(
|
||||
"div",
|
||||
attrs={"class": lambda x: x is not None and "caption" in x},
|
||||
))
|
||||
caption_container = caption_parent.find_next_sibling("div")
|
||||
if caption_container:
|
||||
description = caption_container.get_text().strip()
|
||||
@@ -1377,9 +1371,7 @@ class IliasPage:
|
||||
|
||||
if found_parent is None:
|
||||
_unexpected_html_warning()
|
||||
log.warn_contd(
|
||||
f"Tried to figure out element type, but did not find an icon for {link_element!r}"
|
||||
)
|
||||
log.warn_contd(f"Tried to figure out element type, but did not find an icon for {link_element!r}")
|
||||
return None
|
||||
|
||||
# Find the small descriptive icon to figure out the type
|
||||
@@ -1389,8 +1381,7 @@ class IliasPage:
|
||||
img_tag = found_parent.select_one("img.icon")
|
||||
|
||||
is_session_expansion_button = found_parent.find(
|
||||
"a",
|
||||
attrs={"href": lambda x: x is not None and ("crs_next_sess=" in x or "crs_prev_sess=" in x)}
|
||||
"a", attrs={"href": lambda x: x is not None and ("crs_next_sess=" in x or "crs_prev_sess=" in x)}
|
||||
)
|
||||
if img_tag is None and is_session_expansion_button:
|
||||
log.explain("Found session expansion button, skipping it as it has no content")
|
||||
@@ -1447,9 +1438,7 @@ class IliasPage:
|
||||
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
||||
# their video listing table
|
||||
video_table = soup.find(
|
||||
recursive=True,
|
||||
name="table",
|
||||
attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
||||
recursive=True, name="table", attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
||||
)
|
||||
if video_table is not None:
|
||||
return True
|
||||
@@ -1462,8 +1451,7 @@ class IliasPage:
|
||||
@staticmethod
|
||||
def _find_date_in_text(text: str) -> Optional[datetime]:
|
||||
modification_date_match = re.search(
|
||||
r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)",
|
||||
text
|
||||
r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)", text
|
||||
)
|
||||
if modification_date_match is not None:
|
||||
modification_date_str = modification_date_match.group(1)
|
||||
@@ -1501,8 +1489,8 @@ def _unexpected_html_warning() -> None:
|
||||
log.warn("Encountered unexpected HTML structure, ignoring element.")
|
||||
|
||||
|
||||
german_months = ['Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez']
|
||||
english_months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
||||
german_months = ["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
|
||||
english_months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
|
||||
|
||||
|
||||
def demangle_date(date_str: str, fail_silently: bool = False) -> Optional[datetime]:
|
||||
@@ -1579,7 +1567,7 @@ def parse_ilias_forum_export(forum_export: BeautifulSoup) -> list[IliasForumThre
|
||||
|
||||
title = cast(Tag, p.find("b")).text
|
||||
if ":" in title:
|
||||
title = title[title.find(":") + 1:]
|
||||
title = title[title.find(":") + 1 :]
|
||||
title = title.strip()
|
||||
|
||||
if not content_tag or content_tag.find_previous_sibling("p") != title_tag:
|
||||
@@ -1604,7 +1592,7 @@ def _guess_timestamp_from_forum_post_content(content: Tag) -> Optional[datetime]
|
||||
|
||||
for post in posts:
|
||||
text = post.text.strip()
|
||||
text = text[text.rfind("|") + 1:]
|
||||
text = text[text.rfind("|") + 1 :]
|
||||
date = demangle_date(text, fail_silently=True)
|
||||
if not date:
|
||||
continue
|
||||
|
||||
@@ -38,9 +38,7 @@ class ShibbolethLogin:
|
||||
async with sess.get(url) as response:
|
||||
shib_url = response.url
|
||||
if str(shib_url).startswith(self._ilias_url):
|
||||
log.explain(
|
||||
"ILIAS recognized our shib token and logged us in in the background, returning"
|
||||
)
|
||||
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||
return
|
||||
soup: BeautifulSoup = soupify(await response.read())
|
||||
|
||||
@@ -62,7 +60,7 @@ class ShibbolethLogin:
|
||||
"fudis_web_authn_assertion_input": "",
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
data["csrf_token"] = csrf_token_input["value"]
|
||||
soup = await _post(sess, url, data)
|
||||
|
||||
if soup.find(id="attributeRelease"):
|
||||
@@ -81,7 +79,7 @@ class ShibbolethLogin:
|
||||
# (or clicking "Continue" if you have JS disabled)
|
||||
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||
url = form = soup.find("form", {"method": "post"})["action"] # type: ignore
|
||||
url = form = soup.find("form", {"method": "post"})["action"]
|
||||
data = { # using the info obtained in the while loop above
|
||||
"RelayState": cast(str, relay_state["value"]),
|
||||
"SAMLResponse": cast(str, saml_response["value"]),
|
||||
@@ -110,7 +108,7 @@ class ShibbolethLogin:
|
||||
"fudis_otp_input": tfa_token,
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
data["csrf_token"] = csrf_token_input["value"]
|
||||
return await _post(session, url, data)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -53,12 +53,11 @@ class KitIpdFolder:
|
||||
|
||||
|
||||
class KitIpdCrawler(HttpCrawler):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
self._url = section.target()
|
||||
@@ -104,11 +103,7 @@ class KitIpdCrawler(HttpCrawler):
|
||||
await self.gather(tasks)
|
||||
|
||||
async def _download_file(
|
||||
self,
|
||||
parent: PurePath,
|
||||
file: KitIpdFile,
|
||||
etag: Optional[str],
|
||||
mtime: Optional[datetime]
|
||||
self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime]
|
||||
) -> None:
|
||||
element_path = parent / file.name
|
||||
|
||||
|
||||
@@ -18,31 +18,28 @@ class LocalCrawlerSection(CrawlerSection):
|
||||
def crawl_delay(self) -> float:
|
||||
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("crawl_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("crawl_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_delay(self) -> float:
|
||||
value = self.s.getfloat("download_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("download_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("download_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_speed(self) -> Optional[int]:
|
||||
value = self.s.getint("download_speed")
|
||||
if value is not None and value <= 0:
|
||||
self.invalid_value("download_speed", value,
|
||||
"Must be greater than 0")
|
||||
self.invalid_value("download_speed", value, "Must be greater than 0")
|
||||
return value
|
||||
|
||||
|
||||
class LocalCrawler(Crawler):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -74,10 +71,12 @@ class LocalCrawler(Crawler):
|
||||
tasks = []
|
||||
|
||||
async with cl:
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
)
|
||||
)
|
||||
|
||||
for child in path.iterdir():
|
||||
pure_child = cl.path / child.name
|
||||
@@ -93,10 +92,12 @@ class LocalCrawler(Crawler):
|
||||
return
|
||||
|
||||
async with dl as (bar, sink):
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
)
|
||||
)
|
||||
|
||||
bar.set_total(stat.st_size)
|
||||
|
||||
|
||||
@@ -16,9 +16,28 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
||||
class Deduplicator:
|
||||
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||
FORBIDDEN_NAMES = {
|
||||
"CON", "PRN", "AUX", "NUL",
|
||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
||||
"CON",
|
||||
"PRN",
|
||||
"AUX",
|
||||
"NUL",
|
||||
"COM1",
|
||||
"COM2",
|
||||
"COM3",
|
||||
"COM4",
|
||||
"COM5",
|
||||
"COM6",
|
||||
"COM7",
|
||||
"COM8",
|
||||
"COM9",
|
||||
"LPT1",
|
||||
"LPT2",
|
||||
"LPT3",
|
||||
"LPT4",
|
||||
"LPT5",
|
||||
"LPT6",
|
||||
"LPT7",
|
||||
"LPT8",
|
||||
"LPT9",
|
||||
}
|
||||
|
||||
def __init__(self, windows_paths: bool) -> None:
|
||||
|
||||
@@ -12,12 +12,7 @@ class Slot:
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
task_limit: int,
|
||||
download_limit: int,
|
||||
task_delay: float
|
||||
):
|
||||
def __init__(self, task_limit: int, download_limit: int, task_delay: float):
|
||||
if task_limit <= 0:
|
||||
raise ValueError("task limit must be at least 1")
|
||||
if download_limit <= 0:
|
||||
|
||||
@@ -8,8 +8,15 @@ from rich.console import Console, Group
|
||||
from rich.live import Live
|
||||
from rich.markup import escape
|
||||
from rich.panel import Panel
|
||||
from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TimeRemainingColumn,
|
||||
TransferSpeedColumn)
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TaskID,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
from rich.table import Column
|
||||
|
||||
|
||||
@@ -176,10 +183,14 @@ class Log:
|
||||
# Our print function doesn't take types other than strings, but the
|
||||
# underlying rich.print function does. This call is a special case
|
||||
# anyways, and we're calling it internally, so this should be fine.
|
||||
self.print(Panel.fit("""
|
||||
self.print(
|
||||
Panel.fit(
|
||||
"""
|
||||
Please copy your program output and send it to the PFERD maintainers, either
|
||||
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
""".strip())) # type: ignore
|
||||
""".strip()
|
||||
)
|
||||
)
|
||||
|
||||
def explain_topic(self, text: str) -> None:
|
||||
"""
|
||||
@@ -236,10 +247,10 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
|
||||
@contextmanager
|
||||
def _bar(
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
) -> Iterator[ProgressBar]:
|
||||
if total is None:
|
||||
# Indeterminate progress bar
|
||||
@@ -255,11 +266,11 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
self._update_live()
|
||||
|
||||
def crawl_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
@@ -271,11 +282,11 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
return self._bar(self._crawl_progress, description, total)
|
||||
|
||||
def download_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
|
||||
@@ -35,8 +35,7 @@ class Redownload(Enum):
|
||||
try:
|
||||
return Redownload(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'never', 'never-smart',"
|
||||
" 'always', 'always-smart'")
|
||||
raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'")
|
||||
|
||||
|
||||
class OnConflict(Enum):
|
||||
@@ -51,8 +50,10 @@ class OnConflict(Enum):
|
||||
try:
|
||||
return OnConflict(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'")
|
||||
raise ValueError(
|
||||
"must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -96,13 +97,13 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
# download handed back to the OutputDirectory.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
@@ -118,15 +119,17 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
sink = FileSink(file)
|
||||
|
||||
async def after_download() -> None:
|
||||
await self._output_dir._after_download(DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
))
|
||||
await self._output_dir._after_download(
|
||||
DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
)
|
||||
)
|
||||
|
||||
self._stack.push_async_callback(after_download)
|
||||
self._stack.enter_context(file)
|
||||
@@ -138,10 +141,10 @@ class OutputDirectory:
|
||||
REPORT_FILE = PurePath(".report")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
if os.name == "nt":
|
||||
# Windows limits the path length to 260 for some historical reason.
|
||||
@@ -193,11 +196,11 @@ class OutputDirectory:
|
||||
return self._root / path
|
||||
|
||||
def _should_download(
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
) -> bool:
|
||||
if not local_path.exists():
|
||||
log.explain("No corresponding file present locally")
|
||||
@@ -270,9 +273,9 @@ class OutputDirectory:
|
||||
# files.
|
||||
|
||||
async def _conflict_lfrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -289,9 +292,9 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_ldrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -308,10 +311,10 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_lfrd(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -328,9 +331,9 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_delete_lf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
async with log.exclusive_output():
|
||||
@@ -353,8 +356,8 @@ class OutputDirectory:
|
||||
return base.parent / name
|
||||
|
||||
async def _create_tmp_file(
|
||||
self,
|
||||
local_path: Path,
|
||||
self,
|
||||
local_path: Path,
|
||||
) -> Tuple[Path, BinaryIO]:
|
||||
"""
|
||||
May raise an OutputDirError.
|
||||
@@ -388,14 +391,14 @@ class OutputDirectory:
|
||||
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||
|
||||
async def download(
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[FileSinkToken]:
|
||||
"""
|
||||
May throw an OutputDirError, a MarkDuplicateError or a
|
||||
|
||||
@@ -66,10 +66,10 @@ class Pferd:
|
||||
return crawlers_to_run
|
||||
|
||||
def _find_crawlers_to_run(
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[List[str]],
|
||||
cli_skips: Optional[List[str]],
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[List[str]],
|
||||
cli_skips: Optional[List[str]],
|
||||
) -> List[str]:
|
||||
log.explain_topic("Deciding which crawlers to run")
|
||||
|
||||
|
||||
@@ -208,7 +208,7 @@ class Line:
|
||||
|
||||
@property
|
||||
def rest(self) -> str:
|
||||
return self.line[self.index:]
|
||||
return self.line[self.index :]
|
||||
|
||||
def peek(self, amount: int = 1) -> str:
|
||||
return self.rest[:amount]
|
||||
@@ -327,21 +327,27 @@ def parse_right(line: Line) -> Union[str, Ignore]:
|
||||
|
||||
|
||||
def parse_arrow_name(line: Line) -> str:
|
||||
return line.one_of([
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
], "Expected arrow name")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
],
|
||||
"Expected arrow name",
|
||||
)
|
||||
|
||||
|
||||
def parse_arrow_head(line: Line) -> ArrowHead:
|
||||
return line.one_of([
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
], "Expected arrow head")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
],
|
||||
"Expected arrow head",
|
||||
)
|
||||
|
||||
|
||||
def parse_eol(line: Line) -> None:
|
||||
@@ -413,12 +419,12 @@ class Transformer:
|
||||
|
||||
def transform(self, path: PurePath) -> Optional[PurePath]:
|
||||
for i, (line, tf) in enumerate(self._tfs):
|
||||
log.explain(f"Testing rule {i+1}: {line}")
|
||||
log.explain(f"Testing rule {i + 1}: {line}")
|
||||
|
||||
try:
|
||||
result = tf.transform(path)
|
||||
except Exception as e:
|
||||
log.warn(f"Error while testing rule {i+1}: {line}")
|
||||
log.warn(f"Error while testing rule {i + 1}: {line}")
|
||||
log.warn_contd(str(e))
|
||||
continue
|
||||
|
||||
|
||||
@@ -131,10 +131,10 @@ class ReusableAsyncContextManager(ABC, Generic[T]):
|
||||
return result
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if not self._active:
|
||||
raise RuntimeError("__aexit__ called too many times")
|
||||
|
||||
Reference in New Issue
Block a user