mirror of
https://github.com/Garmelon/PFERD.git
synced 2026-01-08 13:42:30 +01:00
Compare commits
60 Commits
v3.6.0
...
debug/asyn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4eab927899 | ||
|
|
e246053de2 | ||
|
|
3f5637366e | ||
|
|
3453bbc991 | ||
|
|
bd7b384e8f | ||
|
|
6353571eb4 | ||
|
|
1e56976b9f | ||
|
|
bb0d68da65 | ||
|
|
c1c78673aa | ||
|
|
ebcfb2a2f3 | ||
|
|
5646e933fd | ||
|
|
6e563134b2 | ||
|
|
2cf0e060ed | ||
|
|
ee4625be78 | ||
|
|
f6c713d621 | ||
|
|
207af51aa4 | ||
|
|
3755f593ff | ||
|
|
465f8b28c0 | ||
|
|
27e69af2f3 | ||
|
|
56e3065950 | ||
|
|
549ce6cce9 | ||
|
|
34564cedb4 | ||
|
|
2b0d20a1f6 | ||
|
|
8caad0008d | ||
|
|
77a23265a9 | ||
|
|
4c230ef6dd | ||
|
|
b305e1ce23 | ||
|
|
bdf17f5c87 | ||
|
|
77fce7daf8 | ||
|
|
653bf139f0 | ||
|
|
3f60638d33 | ||
|
|
b97b6fae6b | ||
|
|
477234ad0d | ||
|
|
63f25277b0 | ||
|
|
c8eff04ae0 | ||
|
|
edc482cdf4 | ||
|
|
72cd0f77e2 | ||
|
|
be175f9347 | ||
|
|
ba2833dba5 | ||
|
|
2f0e792670 | ||
|
|
5f88539f7e | ||
|
|
bd9d7efe64 | ||
|
|
16a2dd5b15 | ||
|
|
678283d341 | ||
|
|
287173b0b1 | ||
|
|
712217e959 | ||
|
|
6dda4c55a8 | ||
|
|
596b6a7688 | ||
|
|
5983200247 | ||
|
|
26e802d88b | ||
|
|
f5c4e82816 | ||
|
|
f5273f7ca0 | ||
|
|
fa71a9f44f | ||
|
|
81d6ff53c4 | ||
|
|
d7a2b6e019 | ||
|
|
71c65e89d1 | ||
|
|
c1046498e7 | ||
|
|
8fbd1978af | ||
|
|
739dd95850 | ||
|
|
c54c3bcfa1 |
1
.git-blame-ignore-revs
Normal file
1
.git-blame-ignore-revs
Normal file
@@ -0,0 +1 @@
|
||||
2cf0e060ed126537dd993896b6aa793e2a6b9e80
|
||||
14
.github/workflows/build-and-release.yml
vendored
14
.github/workflows/build-and-release.yml
vendored
@@ -14,23 +14,17 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
|
||||
python: ["3.9"]
|
||||
python: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Set up project
|
||||
if: matrix.os != 'windows-latest'
|
||||
run: ./scripts/setup
|
||||
|
||||
- name: Set up project on windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
# For some reason, `pip install --upgrade pip` doesn't work on
|
||||
# 'windows-latest'. The installed pip version works fine however.
|
||||
run: ./scripts/setup --no-pip
|
||||
run: uv sync
|
||||
|
||||
- name: Run checks
|
||||
run: |
|
||||
|
||||
76
CHANGELOG.md
76
CHANGELOG.md
@@ -22,6 +22,82 @@ ambiguous situations.
|
||||
|
||||
## Unreleased
|
||||
|
||||
## Added
|
||||
- Store the description when using the `internet-shortcut` link format
|
||||
- Support for basic auth with the kit-ipd crawler
|
||||
|
||||
## Fixed
|
||||
- Event loop errors on Windows with Python 3.14
|
||||
- Sanitize `/` in headings in kit-ipd crawler
|
||||
- Crawl info tab again
|
||||
|
||||
## 3.8.3 - 2025-07-01
|
||||
|
||||
## Added
|
||||
- Support for link collections.
|
||||
In "fancy" mode, a single HTML file with multiple links is generated.
|
||||
In all other modes, PFERD creates a folder for the collection and a new file
|
||||
for every link inside.
|
||||
|
||||
## Fixed
|
||||
- Crawling of exercises with instructions
|
||||
- Don't download unavailable elements.
|
||||
Elements that are unavailable (for example, because their availability is
|
||||
time restricted) will not download the HTML for the info page anymore.
|
||||
- `base_url` argument for `ilias-web` crawler causing crashes
|
||||
|
||||
## 3.8.2 - 2025-04-29
|
||||
|
||||
## Changed
|
||||
- Explicitly mention that wikis are not supported at the moment and ignore them
|
||||
|
||||
## Fixed
|
||||
- Ilias-native login
|
||||
- Exercise crawling
|
||||
|
||||
## 3.8.1 - 2025-04-17
|
||||
|
||||
## Fixed
|
||||
- Description html files now specify at UTF-8 encoding
|
||||
- Images in descriptions now always have a white background
|
||||
|
||||
## 3.8.0 - 2025-04-16
|
||||
|
||||
### Added
|
||||
- Support for ILIAS 9
|
||||
|
||||
### Changed
|
||||
- Added prettier CSS to forum threads
|
||||
- Downloaded forum threads now link to the forum instead of the ILIAS thread
|
||||
- Increase minimum supported Python version to 3.11
|
||||
- Do not crawl nested courses (courses linked in other courses)
|
||||
|
||||
## Fixed
|
||||
- File links in report on Windows
|
||||
- TOTP authentication in KIT Shibboleth
|
||||
- Forum crawling only considering the first 20 entries
|
||||
|
||||
## 3.7.0 - 2024-11-13
|
||||
|
||||
### Added
|
||||
- Support for MOB videos in page descriptions
|
||||
- Clickable links in the report to directly open new/modified/not-deleted files
|
||||
- Support for non KIT shibboleth login
|
||||
|
||||
### Changed
|
||||
- Remove videos from description pages
|
||||
- Perform ILIAS cycle detection after processing the transform to allow
|
||||
ignoring duplicated elements
|
||||
- Parse headings (h1-h3) as folders in kit-ipd crawler
|
||||
|
||||
### Fixed
|
||||
- Personal desktop/dashboard/favorites crawling
|
||||
- Crawling of nested courses
|
||||
- Downloading of links with no target URL
|
||||
- Handle row flex on description pages
|
||||
- Add `<!DOCTYPE html>` heading to forum threads to fix mime type detection
|
||||
- Handle groups in cards
|
||||
|
||||
## 3.6.0 - 2024-10-23
|
||||
|
||||
### Added
|
||||
|
||||
24
CONFIG.md
24
CONFIG.md
@@ -153,6 +153,7 @@ requests is likely a good idea.
|
||||
- `link_regex`: A regex that is matched against the `href` part of links. If it
|
||||
matches, the given link is downloaded as a file. This is used to extract
|
||||
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
||||
- `auth`: Name of auth section to use for basic authentication. (Optional)
|
||||
|
||||
### The `ilias-web` crawler
|
||||
|
||||
@@ -163,12 +164,15 @@ out of the box for the corresponding universities:
|
||||
|
||||
[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs"
|
||||
|
||||
| University | `base_url` | `client_id` |
|
||||
|---------------|--------------------------------------|---------------|
|
||||
| FH Aachen | https://www.ili.fh-aachen.de | elearning |
|
||||
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | uk |
|
||||
| Uni Konstanz | https://ilias.uni-konstanz.de | ILIASKONSTANZ |
|
||||
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | Uni_Stuttgart |
|
||||
| University | `base_url` | `login_type` | `client_id` |
|
||||
|-----------------|-----------------------------------------|--------------|---------------|
|
||||
| FH Aachen | https://www.ili.fh-aachen.de | local | elearning |
|
||||
| HHU Düsseldorf | https://ilias.hhu.de | local | UniRZ |
|
||||
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk |
|
||||
| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ |
|
||||
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart |
|
||||
| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | |
|
||||
| KIT ILIAS Pilot | https://pilot.ilias.studium.kit.edu | shibboleth | pilot |
|
||||
|
||||
If your university isn't listed, try navigating to your instance's login page.
|
||||
Assuming no custom login service is used, the URL will look something like this:
|
||||
@@ -180,7 +184,11 @@ Assuming no custom login service is used, the URL will look something like this:
|
||||
If the values work, feel free to submit a PR and add them to the table above.
|
||||
|
||||
- `base_url`: The URL where the ILIAS instance is located. (Required)
|
||||
- `client_id`: An ID used for authentication. (Required)
|
||||
- `login_type`: How you authenticate. (Required)
|
||||
- `local`: Use `client_id` for authentication.
|
||||
- `shibboleth`: Use shibboleth for authentication.
|
||||
- `client_id`: An ID used for authentication if `login_type` is `local`. Is
|
||||
ignored if `login_type` is `shibboleth`.
|
||||
- `target`: The ILIAS element to crawl. (Required)
|
||||
- `desktop`: Crawl your personal desktop / dashboard
|
||||
- `<course id>`: Crawl the course with the given id
|
||||
@@ -191,6 +199,8 @@ If the values work, feel free to submit a PR and add them to the table above.
|
||||
and duplication warnings if you are a member of an ILIAS group. The
|
||||
`desktop` target is generally preferable.
|
||||
- `auth`: Name of auth section to use for login. (Required)
|
||||
- `tfa_auth`: Name of auth section to use for two-factor authentication. Only
|
||||
uses the auth section's password. (Default: Anonymous `tfa` authenticator)
|
||||
- `links`: How to represent external links. (Default: `fancy`)
|
||||
- `ignore`: Don't download links.
|
||||
- `plaintext`: A text file containing only the URL.
|
||||
|
||||
21
DEV.md
21
DEV.md
@@ -9,30 +9,25 @@ particular [this][ppug-1] and [this][ppug-2] guide).
|
||||
|
||||
## Setting up a dev environment
|
||||
|
||||
The use of [venv][venv] is recommended. To initially set up a development
|
||||
environment, run these commands in the same directory as this file:
|
||||
The use of [venv][venv] and [uv][uv] is recommended. To initially set up a
|
||||
development environment, run these commands in the same directory as this file:
|
||||
|
||||
```
|
||||
$ python -m venv .venv
|
||||
$ uv sync
|
||||
$ . .venv/bin/activate
|
||||
$ ./scripts/setup
|
||||
```
|
||||
|
||||
The setup script installs a few required dependencies and tools. It also
|
||||
installs PFERD via `pip install --editable .`, which means that you can just run
|
||||
`pferd` as if it was installed normally. Since PFERD was installed with
|
||||
`--editable`, there is no need to re-run `pip install` when the source code is
|
||||
changed.
|
||||
|
||||
If you get any errors because pip can't update itself, try running
|
||||
`./scripts/setup --no-pip` instead of `./scripts/setup`.
|
||||
This install all required dependencies and tools. It also installs PFERD as
|
||||
*editable*, which means that you can just run `pferd` as if it was installed
|
||||
normally. Since PFERD was installed with `--editable`, there is no need to
|
||||
re-run `uv sync` when the source code is changed.
|
||||
|
||||
For more details, see [this part of the Python Tutorial][venv-tut] and
|
||||
[this section on "development mode"][ppug-dev].
|
||||
|
||||
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
||||
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
||||
[ppug-dev]: <https://packaging.python.org/guides/distributing-packages-using-setuptools/#working-in-development-mode> "Working in “development mode”"
|
||||
[uv]: <https://docs.astral.sh/uv/> "uv - An extremely fast Python package and project manager"
|
||||
|
||||
## Checking and formatting the code
|
||||
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
Copyright 2019-2024 Garmelon, I-Al-Istannen, danstooamerican, pavelzw,
|
||||
TheChristophe, Scriptim, thelukasprobst, Toorero,
|
||||
Mr-Pine, p-fruck
|
||||
Mr-Pine, p-fruck, PinieP
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
|
||||
@@ -133,7 +133,8 @@ def main() -> None:
|
||||
# https://bugs.python.org/issue39232
|
||||
# https://github.com/encode/httpx/issues/914#issuecomment-780023632
|
||||
# TODO Fix this properly
|
||||
loop = asyncio.get_event_loop()
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
loop.run_until_complete(pferd.run(args.debug_transforms))
|
||||
loop.run_until_complete(asyncio.sleep(1))
|
||||
loop.close()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from configparser import SectionProxy
|
||||
from typing import Callable, Dict
|
||||
|
||||
from ..config import Config
|
||||
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
||||
@@ -9,21 +9,19 @@ from .pass_ import PassAuthenticator, PassAuthSection
|
||||
from .simple import SimpleAuthenticator, SimpleAuthSection
|
||||
from .tfa import TfaAuthenticator
|
||||
|
||||
AuthConstructor = Callable[[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
], Authenticator]
|
||||
AuthConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
],
|
||||
Authenticator,
|
||||
]
|
||||
|
||||
AUTHENTICATORS: Dict[str, AuthConstructor] = {
|
||||
"credential-file": lambda n, s, c:
|
||||
CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c:
|
||||
KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c:
|
||||
PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c:
|
||||
SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c:
|
||||
TfaAuthenticator(n),
|
||||
AUTHENTICATORS: dict[str, AuthConstructor] = {
|
||||
"credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c: TfaAuthenticator(n),
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Tuple
|
||||
|
||||
from ..config import Section
|
||||
|
||||
@@ -35,7 +34,7 @@ class Authenticator(ABC):
|
||||
self.name = name
|
||||
|
||||
@abstractmethod
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
pass
|
||||
|
||||
async def username(self) -> str:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
from ..config import Config
|
||||
from ..utils import fmt_real_path
|
||||
@@ -23,7 +22,9 @@ class CredentialFileAuthenticator(Authenticator):
|
||||
with open(path, encoding="utf-8") as f:
|
||||
lines = list(f)
|
||||
except UnicodeDecodeError:
|
||||
raise AuthLoadError(f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8")
|
||||
raise AuthLoadError(
|
||||
f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8"
|
||||
) from None
|
||||
except OSError as e:
|
||||
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
||||
|
||||
@@ -42,5 +43,5 @@ class CredentialFileAuthenticator(Authenticator):
|
||||
self._username = uline[9:]
|
||||
self._password = pline[9:]
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
return self._username, self._password
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
import keyring
|
||||
|
||||
@@ -17,7 +17,6 @@ class KeyringAuthSection(AuthSection):
|
||||
|
||||
|
||||
class KeyringAuthenticator(Authenticator):
|
||||
|
||||
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
||||
super().__init__(name)
|
||||
|
||||
@@ -28,7 +27,7 @@ class KeyringAuthenticator(Authenticator):
|
||||
self._password_invalidated = False
|
||||
self._username_fixed = section.username() is not None
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
# Request the username
|
||||
if self._username is None:
|
||||
async with log.exclusive_output():
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
import subprocess
|
||||
from typing import List, Tuple
|
||||
|
||||
from ..logging import log
|
||||
from .authenticator import Authenticator, AuthError, AuthSection
|
||||
@@ -12,11 +11,11 @@ class PassAuthSection(AuthSection):
|
||||
self.missing_value("passname")
|
||||
return value
|
||||
|
||||
def username_prefixes(self) -> List[str]:
|
||||
def username_prefixes(self) -> list[str]:
|
||||
value = self.s.get("username_prefixes", "login,username,user")
|
||||
return [prefix.lower() for prefix in value.split(",")]
|
||||
|
||||
def password_prefixes(self) -> List[str]:
|
||||
def password_prefixes(self) -> list[str]:
|
||||
value = self.s.get("password_prefixes", "password,pass,secret")
|
||||
return [prefix.lower() for prefix in value.split(",")]
|
||||
|
||||
@@ -31,14 +30,14 @@ class PassAuthenticator(Authenticator):
|
||||
self._username_prefixes = section.username_prefixes()
|
||||
self._password_prefixes = section.password_prefixes()
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
log.explain_topic("Obtaining credentials from pass")
|
||||
|
||||
try:
|
||||
log.explain(f"Calling 'pass show {self._passname}'")
|
||||
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}")
|
||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}") from e
|
||||
|
||||
prefixed = {}
|
||||
unprefixed = []
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
from ..logging import log
|
||||
from ..utils import agetpass, ainput
|
||||
@@ -23,7 +23,7 @@ class SimpleAuthenticator(Authenticator):
|
||||
self._username_fixed = self.username is not None
|
||||
self._password_fixed = self.password is not None
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
if self._username is not None and self._password is not None:
|
||||
return self._username, self._password
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ..logging import log
|
||||
from ..utils import ainput
|
||||
from .authenticator import Authenticator, AuthError
|
||||
@@ -17,7 +15,7 @@ class TfaAuthenticator(Authenticator):
|
||||
code = await ainput("TFA code: ")
|
||||
return code
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
raise AuthError("TFA authenticator does not support usernames")
|
||||
|
||||
def invalidate_username(self) -> None:
|
||||
|
||||
@@ -21,23 +21,20 @@ GROUP.add_argument(
|
||||
"--base-url",
|
||||
type=str,
|
||||
metavar="BASE_URL",
|
||||
help="The base url of the ilias instance"
|
||||
help="The base url of the ilias instance",
|
||||
)
|
||||
|
||||
GROUP.add_argument(
|
||||
"--client-id",
|
||||
type=str,
|
||||
metavar="CLIENT_ID",
|
||||
help="The client id of the ilias instance"
|
||||
help="The client id of the ilias instance",
|
||||
)
|
||||
|
||||
configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None:
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
parser["crawl:ilias"] = {}
|
||||
@@ -45,8 +42,8 @@ def load(
|
||||
load_crawler(args, section)
|
||||
|
||||
section["type"] = COMMAND_NAME
|
||||
if args.ilias_url is not None:
|
||||
section["base_url"] = args.ilias_url
|
||||
if args.base_url is not None:
|
||||
section["base_url"] = args.base_url
|
||||
if args.client_id is not None:
|
||||
section["client_id"] = args.client_id
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
|
||||
@@ -18,25 +18,30 @@ GROUP.add_argument(
|
||||
"--link-regex",
|
||||
type=str,
|
||||
metavar="REGEX",
|
||||
help="href-matching regex to identify downloadable files"
|
||||
help="href-matching regex to identify downloadable files",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--basic-auth",
|
||||
action="store_true",
|
||||
help="enable basic authentication",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="url to crawl"
|
||||
help="url to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'kit-ipd'")
|
||||
|
||||
@@ -50,5 +55,11 @@ def load(
|
||||
if args.link_regex:
|
||||
section["link_regex"] = str(args.link_regex)
|
||||
|
||||
if args.basic_auth:
|
||||
section["auth"] = "auth:kit-ipd"
|
||||
parser["auth:kit-ipd"] = {}
|
||||
auth_section = parser["auth:kit-ipd"]
|
||||
auth_section["type"] = "simple"
|
||||
|
||||
|
||||
SUBPARSER.set_defaults(command=load)
|
||||
|
||||
@@ -18,37 +18,37 @@ GROUP.add_argument(
|
||||
"target",
|
||||
type=Path,
|
||||
metavar="TARGET",
|
||||
help="directory to crawl"
|
||||
help="directory to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--crawl-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for crawl requests"
|
||||
help="artificial delay to simulate for crawl requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for download requests"
|
||||
help="artificial delay to simulate for download requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-speed",
|
||||
type=int,
|
||||
metavar="BYTES_PER_SECOND",
|
||||
help="download speed to simulate"
|
||||
help="download speed to simulate",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'local'")
|
||||
|
||||
|
||||
@@ -12,58 +12,60 @@ def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
||||
help="course id, 'desktop', or ILIAS URL to crawl",
|
||||
)
|
||||
group.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
group.add_argument(
|
||||
"--username", "-u",
|
||||
"--username",
|
||||
"-u",
|
||||
type=str,
|
||||
metavar="USERNAME",
|
||||
help="user name for authentication"
|
||||
help="user name for authentication",
|
||||
)
|
||||
group.add_argument(
|
||||
"--keyring",
|
||||
action=BooleanOptionalAction,
|
||||
help="use the system keyring to store and retrieve passwords"
|
||||
help="use the system keyring to store and retrieve passwords",
|
||||
)
|
||||
group.add_argument(
|
||||
"--credential-file",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="read username and password from a credential file"
|
||||
help="read username and password from a credential file",
|
||||
)
|
||||
group.add_argument(
|
||||
"--links",
|
||||
type=show_value_error(Links.from_string),
|
||||
metavar="OPTION",
|
||||
help="how to represent external links"
|
||||
help="how to represent external links",
|
||||
)
|
||||
group.add_argument(
|
||||
"--link-redirect-delay",
|
||||
type=int,
|
||||
metavar="SECONDS",
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)",
|
||||
)
|
||||
group.add_argument(
|
||||
"--videos",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download videos"
|
||||
help="crawl and download videos",
|
||||
)
|
||||
group.add_argument(
|
||||
"--forums",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download forum posts"
|
||||
help="crawl and download forum posts",
|
||||
)
|
||||
group.add_argument(
|
||||
"--http-timeout", "-t",
|
||||
"--http-timeout",
|
||||
"-t",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="timeout for all HTTP requests"
|
||||
help="timeout for all HTTP requests",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import argparse
|
||||
import configparser
|
||||
from argparse import ArgumentTypeError
|
||||
from collections.abc import Callable, Sequence
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, List, Optional, Sequence, Union
|
||||
from typing import Any, Optional
|
||||
|
||||
from ..output_dir import OnConflict, Redownload
|
||||
from ..version import NAME, VERSION
|
||||
@@ -15,15 +16,15 @@ class ParserLoadError(Exception):
|
||||
# TODO Replace with argparse version when updating to 3.9?
|
||||
class BooleanOptionalAction(argparse.Action):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings: List[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
self,
|
||||
option_strings: list[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
):
|
||||
if len(option_strings) != 1:
|
||||
raise ValueError("There must be exactly one option string")
|
||||
@@ -48,11 +49,11 @@ class BooleanOptionalAction(argparse.Action):
|
||||
)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: Union[str, Sequence[Any], None],
|
||||
option_string: Optional[str] = None,
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: str | Sequence[Any] | None,
|
||||
option_string: Optional[str] = None,
|
||||
) -> None:
|
||||
if option_string and option_string in self.option_strings:
|
||||
value = not option_string.startswith("--no-")
|
||||
@@ -67,11 +68,13 @@ def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]:
|
||||
Some validation functions (like the from_string in our enums) raise a ValueError.
|
||||
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
||||
"""
|
||||
|
||||
def wrapper(input: str) -> Any:
|
||||
try:
|
||||
return inner(input)
|
||||
except ValueError as e:
|
||||
raise ArgumentTypeError(e)
|
||||
raise ArgumentTypeError(e) from e
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -81,52 +84,57 @@ CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group(
|
||||
description="arguments common to all crawlers",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--redownload", "-r",
|
||||
"--redownload",
|
||||
"-r",
|
||||
type=show_value_error(Redownload.from_string),
|
||||
metavar="OPTION",
|
||||
help="when to download a file that's already present locally"
|
||||
help="when to download a file that's already present locally",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--on-conflict",
|
||||
type=show_value_error(OnConflict.from_string),
|
||||
metavar="OPTION",
|
||||
help="what to do when local and remote files or directories differ"
|
||||
help="what to do when local and remote files or directories differ",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--transform", "-T",
|
||||
"--transform",
|
||||
"-T",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="RULE",
|
||||
help="add a single transformation rule. Can be specified multiple times"
|
||||
help="add a single transformation rule. Can be specified multiple times",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--tasks", "-n",
|
||||
"--tasks",
|
||||
"-n",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of concurrent tasks (crawling, downloading)"
|
||||
help="maximum number of concurrent tasks (crawling, downloading)",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--downloads", "-N",
|
||||
"--downloads",
|
||||
"-N",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of tasks that may download data at the same time"
|
||||
help="maximum number of tasks that may download data at the same time",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--task-delay", "-d",
|
||||
"--task-delay",
|
||||
"-d",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="time the crawler should wait between subsequent tasks"
|
||||
help="time the crawler should wait between subsequent tasks",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--windows-paths",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether to repair invalid paths on windows"
|
||||
help="whether to repair invalid paths on windows",
|
||||
)
|
||||
|
||||
|
||||
def load_crawler(
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
) -> None:
|
||||
if args.redownload is not None:
|
||||
section["redownload"] = args.redownload.value
|
||||
@@ -152,79 +160,79 @@ PARSER.add_argument(
|
||||
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--config", "-c",
|
||||
"--config",
|
||||
"-c",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom config file"
|
||||
help="custom config file",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config",
|
||||
action="store_true",
|
||||
help="dump current configuration to the default config path and exit"
|
||||
help="dump current configuration to the default config path and exit",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config-to",
|
||||
metavar="PATH",
|
||||
help="dump current configuration to a file and exit."
|
||||
" Use '-' as path to print to stdout instead"
|
||||
help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--debug-transforms",
|
||||
action="store_true",
|
||||
help="apply transform rules to files of previous run"
|
||||
help="apply transform rules to files of previous run",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--crawler", "-C",
|
||||
"--crawler",
|
||||
"-C",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="only execute a single crawler."
|
||||
" Can be specified multiple times to execute multiple crawlers"
|
||||
help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--skip", "-S",
|
||||
"--skip",
|
||||
"-S",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="don't execute this particular crawler."
|
||||
" Can be specified multiple times to skip multiple crawlers"
|
||||
help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--working-dir",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom working directory"
|
||||
help="custom working directory",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--explain",
|
||||
action=BooleanOptionalAction,
|
||||
help="log and explain in detail what PFERD is doing"
|
||||
help="log and explain in detail what PFERD is doing",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--status",
|
||||
action=BooleanOptionalAction,
|
||||
help="print status updates while PFERD is crawling"
|
||||
help="print status updates while PFERD is crawling",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--report",
|
||||
action=BooleanOptionalAction,
|
||||
help="print a report of all local changes before exiting"
|
||||
help="print a report of all local changes before exiting",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--share-cookies",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether crawlers should share cookies where applicable"
|
||||
help="whether crawlers should share cookies where applicable",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--show-not-deleted",
|
||||
action=BooleanOptionalAction,
|
||||
help="print messages in status and report when PFERD did not delete a local only file"
|
||||
help="print messages in status and report when PFERD did not delete a local only file",
|
||||
)
|
||||
|
||||
|
||||
def load_default_section(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
section = parser[parser.default_section]
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
import sys
|
||||
from configparser import ConfigParser, SectionProxy
|
||||
from pathlib import Path
|
||||
from typing import Any, List, NoReturn, Optional, Tuple
|
||||
from typing import Any, NoReturn, Optional
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
@@ -53,10 +53,10 @@ class Section:
|
||||
raise ConfigOptionError(self.s.name, key, desc)
|
||||
|
||||
def invalid_value(
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
) -> NoReturn:
|
||||
if reason is None:
|
||||
self.error(key, f"Invalid value {value!r}")
|
||||
@@ -126,13 +126,13 @@ class Config:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
parser.read_file(f, source=str(path))
|
||||
except FileNotFoundError:
|
||||
raise ConfigLoadError(path, "File does not exist")
|
||||
raise ConfigLoadError(path, "File does not exist") from None
|
||||
except IsADirectoryError:
|
||||
raise ConfigLoadError(path, "That's a directory, not a file")
|
||||
raise ConfigLoadError(path, "That's a directory, not a file") from None
|
||||
except PermissionError:
|
||||
raise ConfigLoadError(path, "Insufficient permissions")
|
||||
raise ConfigLoadError(path, "Insufficient permissions") from None
|
||||
except UnicodeDecodeError:
|
||||
raise ConfigLoadError(path, "File is not encoded using UTF-8")
|
||||
raise ConfigLoadError(path, "File is not encoded using UTF-8") from None
|
||||
|
||||
def dump(self, path: Optional[Path] = None) -> None:
|
||||
"""
|
||||
@@ -150,8 +150,8 @@ class Config:
|
||||
|
||||
try:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
except PermissionError:
|
||||
raise ConfigDumpError(path, "Could not create parent directory")
|
||||
except PermissionError as e:
|
||||
raise ConfigDumpError(path, "Could not create parent directory") from e
|
||||
|
||||
try:
|
||||
# Ensuring we don't accidentally overwrite any existing files by
|
||||
@@ -167,16 +167,16 @@ class Config:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
self._parser.write(f)
|
||||
else:
|
||||
raise ConfigDumpError(path, "File already exists")
|
||||
raise ConfigDumpError(path, "File already exists") from None
|
||||
except IsADirectoryError:
|
||||
raise ConfigDumpError(path, "That's a directory, not a file")
|
||||
except PermissionError:
|
||||
raise ConfigDumpError(path, "Insufficient permissions")
|
||||
raise ConfigDumpError(path, "That's a directory, not a file") from None
|
||||
except PermissionError as e:
|
||||
raise ConfigDumpError(path, "Insufficient permissions") from e
|
||||
|
||||
def dump_to_stdout(self) -> None:
|
||||
self._parser.write(sys.stdout)
|
||||
|
||||
def crawl_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
def crawl_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("crawl:"):
|
||||
@@ -184,7 +184,7 @@ class Config:
|
||||
|
||||
return result
|
||||
|
||||
def auth_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
def auth_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("auth:"):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from configparser import SectionProxy
|
||||
from typing import Callable, Dict
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config
|
||||
@@ -8,20 +8,19 @@ from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||
|
||||
CrawlerConstructor = Callable[[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
Dict[str, Authenticator], # Loaded authenticators by name
|
||||
], Crawler]
|
||||
CrawlerConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
dict[str, Authenticator], # Loaded authenticators by name
|
||||
],
|
||||
Crawler,
|
||||
]
|
||||
|
||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
||||
"local": lambda n, s, c, a:
|
||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"ilias-web": lambda n, s, c, a:
|
||||
IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||
"kit-ilias-web": lambda n, s, c, a:
|
||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a:
|
||||
KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||
CRAWLERS: dict[str, CrawlerConstructor] = {
|
||||
"local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||
"kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c, a),
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import asyncio
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Awaitable, Coroutine
|
||||
from collections.abc import Awaitable, Callable, Coroutine, Sequence
|
||||
from datetime import datetime
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, TypeVar
|
||||
from typing import Any, Optional, TypeVar
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config, Section
|
||||
@@ -116,7 +116,7 @@ class CrawlToken(ReusableAsyncContextManager[ProgressBar]):
|
||||
return bar
|
||||
|
||||
|
||||
class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
||||
class DownloadToken(ReusableAsyncContextManager[tuple[ProgressBar, FileSink]]):
|
||||
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
||||
super().__init__()
|
||||
|
||||
@@ -128,12 +128,13 @@ class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
||||
def path(self) -> PurePath:
|
||||
return self._path
|
||||
|
||||
async def _on_aenter(self) -> Tuple[ProgressBar, FileSink]:
|
||||
async def _on_aenter(self) -> tuple[ProgressBar, FileSink]:
|
||||
await self._stack.enter_async_context(self._limiter.limit_download())
|
||||
sink = await self._stack.enter_async_context(self._fs_token)
|
||||
# The "Downloaded ..." message is printed in the output dir, not here
|
||||
bar = self._stack.enter_context(log.download_bar("[bold bright_cyan]", "Downloading",
|
||||
fmt_path(self._path)))
|
||||
bar = self._stack.enter_context(
|
||||
log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path))
|
||||
)
|
||||
|
||||
return bar, sink
|
||||
|
||||
@@ -149,9 +150,7 @@ class CrawlerSection(Section):
|
||||
return self.s.getboolean("skip", fallback=False)
|
||||
|
||||
def output_dir(self, name: str) -> Path:
|
||||
# TODO Use removeprefix() after switching to 3.9
|
||||
if name.startswith("crawl:"):
|
||||
name = name[len("crawl:"):]
|
||||
name = name.removeprefix("crawl:")
|
||||
return Path(self.s.get("output_dir", name)).expanduser()
|
||||
|
||||
def redownload(self) -> Redownload:
|
||||
@@ -206,7 +205,7 @@ class CrawlerSection(Section):
|
||||
on_windows = os.name == "nt"
|
||||
return self.s.getboolean("windows_paths", fallback=on_windows)
|
||||
|
||||
def auth(self, authenticators: Dict[str, Authenticator]) -> Authenticator:
|
||||
def auth(self, authenticators: dict[str, Authenticator]) -> Authenticator:
|
||||
value = self.s.get("auth")
|
||||
if value is None:
|
||||
self.missing_value("auth")
|
||||
@@ -218,10 +217,10 @@ class CrawlerSection(Section):
|
||||
|
||||
class Crawler(ABC):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize a crawler from its name and its section in the config file.
|
||||
@@ -258,8 +257,12 @@ class Crawler(ABC):
|
||||
def prev_report(self) -> Optional[Report]:
|
||||
return self._output_dir.prev_report
|
||||
|
||||
@property
|
||||
def output_dir(self) -> OutputDirectory:
|
||||
return self._output_dir
|
||||
|
||||
@staticmethod
|
||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> list[Any]:
|
||||
"""
|
||||
Similar to asyncio.gather. However, in the case of an exception, all
|
||||
still running tasks are cancelled and the exception is rethrown.
|
||||
@@ -290,12 +293,39 @@ class Crawler(ABC):
|
||||
log.explain("Answer: Yes")
|
||||
return CrawlToken(self._limiter, path)
|
||||
|
||||
def should_try_download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> bool:
|
||||
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||
|
||||
if self._transformer.transform(path) is None:
|
||||
log.explain("Answer: No (ignored)")
|
||||
return False
|
||||
|
||||
should_download = self._output_dir.should_try_download(
|
||||
path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict
|
||||
)
|
||||
if should_download:
|
||||
log.explain("Answer: Yes")
|
||||
return True
|
||||
else:
|
||||
log.explain("Answer: No")
|
||||
return False
|
||||
|
||||
async def download(
|
||||
self,
|
||||
path: PurePath,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[DownloadToken]:
|
||||
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
||||
path = self._deduplicator.mark(path)
|
||||
@@ -307,7 +337,14 @@ class Crawler(ABC):
|
||||
log.status("[bold bright_black]", "Ignored", fmt_path(path))
|
||||
return None
|
||||
|
||||
fs_token = await self._output_dir.download(path, transformed_path, mtime, redownload, on_conflict)
|
||||
fs_token = await self._output_dir.download(
|
||||
path,
|
||||
transformed_path,
|
||||
etag_differs=etag_differs,
|
||||
mtime=mtime,
|
||||
redownload=redownload,
|
||||
on_conflict=on_conflict,
|
||||
)
|
||||
if fs_token is None:
|
||||
log.explain("Answer: No")
|
||||
return None
|
||||
@@ -357,7 +394,7 @@ class Crawler(ABC):
|
||||
log.warn("Couldn't find or load old report")
|
||||
return
|
||||
|
||||
seen: Set[PurePath] = set()
|
||||
seen: set[PurePath] = set()
|
||||
for known in sorted(self.prev_report.found_paths):
|
||||
looking_at = list(reversed(known.parents)) + [known]
|
||||
for path in looking_at:
|
||||
|
||||
@@ -1,35 +1,39 @@
|
||||
import asyncio
|
||||
import http.cookies
|
||||
import ssl
|
||||
from datetime import datetime
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import aiohttp
|
||||
import certifi
|
||||
from aiohttp.client import ClientTimeout
|
||||
from bs4 import Tag
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config
|
||||
from ..logging import log
|
||||
from ..utils import fmt_real_path
|
||||
from ..utils import fmt_real_path, sanitize_path_name
|
||||
from ..version import NAME, VERSION
|
||||
from .crawler import Crawler, CrawlerSection
|
||||
|
||||
ETAGS_CUSTOM_REPORT_VALUE_KEY = "etags"
|
||||
|
||||
|
||||
class HttpCrawlerSection(CrawlerSection):
|
||||
def http_timeout(self) -> float:
|
||||
return self.s.getfloat("http_timeout", fallback=20)
|
||||
return self.s.getfloat("http_timeout", fallback=30)
|
||||
|
||||
|
||||
class HttpCrawler(Crawler):
|
||||
COOKIE_FILE = PurePath(".cookies")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
) -> None:
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -39,7 +43,7 @@ class HttpCrawler(Crawler):
|
||||
self._http_timeout = section.http_timeout()
|
||||
|
||||
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
||||
self._shared_cookie_jar_paths: Optional[List[Path]] = None
|
||||
self._shared_cookie_jar_paths: Optional[list[Path]] = None
|
||||
self._shared_auth = shared_auth
|
||||
|
||||
self._output_dir.register_reserved(self.COOKIE_FILE)
|
||||
@@ -94,7 +98,7 @@ class HttpCrawler(Crawler):
|
||||
"""
|
||||
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
||||
|
||||
def share_cookies(self, shared: Dict[Authenticator, List[Path]]) -> None:
|
||||
def share_cookies(self, shared: dict[Authenticator, list[Path]]) -> None:
|
||||
if not self._shared_auth:
|
||||
return
|
||||
|
||||
@@ -169,24 +173,102 @@ class HttpCrawler(Crawler):
|
||||
log.warn(f"Failed to save cookies to {fmt_real_path(self._cookie_jar_path)}")
|
||||
log.warn(str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_folder_structure_from_heading_hierarchy(file_link: Tag, drop_h1: bool = False) -> PurePath:
|
||||
"""
|
||||
Retrieves the hierarchy of headings associated with the give file link and constructs a folder
|
||||
structure from them.
|
||||
|
||||
<h1> level headings usually only appear once and serve as the page title, so they would introduce
|
||||
redundant nesting. To avoid this, <h1> headings are ignored via the drop_h1 parameter.
|
||||
"""
|
||||
|
||||
def find_associated_headings(tag: Tag, level: int) -> PurePath:
|
||||
if level == 0 or (level == 1 and drop_h1):
|
||||
return PurePath()
|
||||
|
||||
level_heading = tag.find_previous(name=f"h{level}")
|
||||
|
||||
if level_heading is None:
|
||||
return find_associated_headings(tag, level - 1)
|
||||
|
||||
folder_name = sanitize_path_name(level_heading.get_text().strip())
|
||||
return find_associated_headings(level_heading, level - 1) / folder_name
|
||||
|
||||
# start at level <h3> because paragraph-level headings are usually too granular for folder names
|
||||
return find_associated_headings(file_link, 3)
|
||||
|
||||
def _get_previous_etag_from_report(self, path: PurePath) -> Optional[str]:
|
||||
"""
|
||||
If available, retrieves the entity tag for a given path which was stored in the previous report.
|
||||
"""
|
||||
if not self._output_dir.prev_report:
|
||||
return None
|
||||
|
||||
etags = self._output_dir.prev_report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||
return etags.get(str(path))
|
||||
|
||||
def _add_etag_to_report(self, path: PurePath, etag: Optional[str]) -> None:
|
||||
"""
|
||||
Adds an entity tag for a given path to the report's custom values.
|
||||
"""
|
||||
if not etag:
|
||||
return
|
||||
|
||||
etags = self._output_dir.report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||
etags[str(path)] = etag
|
||||
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
||||
|
||||
async def _request_resource_version(self, resource_url: str) -> tuple[Optional[str], Optional[datetime]]:
|
||||
"""
|
||||
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
||||
If no entity tag / modification date can be obtained, the according value will be None.
|
||||
"""
|
||||
try:
|
||||
async with self.session.head(resource_url) as resp:
|
||||
if resp.status != 200:
|
||||
return None, None
|
||||
|
||||
etag_header = resp.headers.get("ETag")
|
||||
last_modified_header = resp.headers.get("Last-Modified")
|
||||
last_modified = None
|
||||
|
||||
if last_modified_header:
|
||||
try:
|
||||
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#directives
|
||||
datetime_format = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
last_modified = datetime.strptime(last_modified_header, datetime_format)
|
||||
except ValueError:
|
||||
# last_modified remains None
|
||||
pass
|
||||
|
||||
return etag_header, last_modified
|
||||
except aiohttp.ClientError:
|
||||
return None, None
|
||||
|
||||
async def run(self) -> None:
|
||||
self._request_count = 0
|
||||
self._cookie_jar = aiohttp.CookieJar()
|
||||
self._load_cookies()
|
||||
|
||||
async with aiohttp.ClientSession(
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
)
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
),
|
||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||
# fail.
|
||||
requote_redirect_url=False,
|
||||
) as session:
|
||||
self.session = session
|
||||
try:
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from .kit_ilias_web_crawler import (IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
||||
KitIliasWebCrawlerSection)
|
||||
from .kit_ilias_web_crawler import (
|
||||
IliasWebCrawler,
|
||||
IliasWebCrawlerSection,
|
||||
KitIliasWebCrawler,
|
||||
KitIliasWebCrawlerSection,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"IliasWebCrawler",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncio
|
||||
from typing import Any, Callable, Optional
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Optional
|
||||
|
||||
import aiohttp
|
||||
|
||||
@@ -15,9 +16,9 @@ def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Calla
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
except aiohttp.ContentTypeError: # invalid content type
|
||||
raise CrawlWarning("ILIAS returned an invalid content type")
|
||||
raise CrawlWarning("ILIAS returned an invalid content type") from None
|
||||
except aiohttp.TooManyRedirects:
|
||||
raise CrawlWarning("Got stuck in a redirect loop")
|
||||
raise CrawlWarning("Got stuck in a redirect loop") from None
|
||||
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
||||
last_exception = e
|
||||
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
||||
@@ -25,9 +26,10 @@ def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Calla
|
||||
except asyncio.exceptions.TimeoutError as e: # explicit http timeouts in HttpCrawler
|
||||
last_exception = e
|
||||
log.explain_topic(f"Retrying operation {name}. Retries left: {attempts - 1 - round}")
|
||||
log.explain(f"Last exception: {last_exception!r}")
|
||||
|
||||
if last_exception:
|
||||
message = f"Error in I/O Operation: {last_exception}"
|
||||
message = f"Error in I/O Operation: {last_exception!r}"
|
||||
if failure_is_error:
|
||||
raise CrawlError(message) from last_exception
|
||||
else:
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import dataclasses
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from typing import Optional, cast
|
||||
|
||||
import bs4
|
||||
|
||||
@@ -12,7 +14,9 @@ _link_template_fancy = """
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ILIAS - Link: {{name}}</title>
|
||||
<!-- REPEAT REMOVE START -->
|
||||
<meta http-equiv = "refresh" content = "{{redirect_delay}}; url = {{link}}" />
|
||||
<!-- REPEAT REMOVE END -->
|
||||
</head>
|
||||
|
||||
<style>
|
||||
@@ -23,6 +27,8 @@ _link_template_fancy = """
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
body {
|
||||
padding: 0;
|
||||
@@ -31,11 +37,16 @@ _link_template_fancy = """
|
||||
font-family: "Open Sans", Verdana, Arial, Helvetica, sans-serif;
|
||||
height: 100vh;
|
||||
}
|
||||
.row {
|
||||
background-color: white;
|
||||
.column {
|
||||
min-width: 500px;
|
||||
max-width: 90vw;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
row-gap: 5px;
|
||||
}
|
||||
.row {
|
||||
background-color: white;
|
||||
display: flex;
|
||||
padding: 1em;
|
||||
}
|
||||
.logo {
|
||||
@@ -75,19 +86,23 @@ _link_template_fancy = """
|
||||
}
|
||||
</style>
|
||||
<body class="center-flex">
|
||||
<div class="row">
|
||||
<div class="logo center-flex">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="tile">
|
||||
<div class="top-row">
|
||||
<a href="{{link}}">{{name}}</a>
|
||||
<div class="column">
|
||||
<!-- REPEAT START -->
|
||||
<div class="row">
|
||||
<div class="logo center-flex">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="bottom-row">{{description}}</div>
|
||||
<div class="tile">
|
||||
<div class="top-row">
|
||||
<a href="{{link}}">{{name}}</a>
|
||||
</div>
|
||||
<div class="bottom-row">{{description}}</div>
|
||||
</div>
|
||||
<div class="menu-button center-flex"> ⯆ </div>
|
||||
</div>
|
||||
<div class="menu-button center-flex"> ⯆ </div>
|
||||
<!-- REPEAT END -->
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -96,6 +111,7 @@ _link_template_fancy = """
|
||||
_link_template_internet_shortcut = """
|
||||
[InternetShortcut]
|
||||
URL={{link}}
|
||||
Desc={{description}}
|
||||
""".strip()
|
||||
|
||||
_learning_module_template = """
|
||||
@@ -126,6 +142,88 @@ _learning_module_template = """
|
||||
</html>
|
||||
"""
|
||||
|
||||
_forum_thread_template = """
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ILIAS - Forum: {{name}}</title>
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
body {
|
||||
font-family: 'Open Sans', Verdana, Arial, Helvetica, sans-serif;
|
||||
padding: 8px;
|
||||
}
|
||||
ul, ol, p {
|
||||
margin: 1.2em 0;
|
||||
}
|
||||
p {
|
||||
margin-top: 8px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
a {
|
||||
color: #00876c;
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
body > p:first-child > span:first-child {
|
||||
font-size: 1.6em;
|
||||
}
|
||||
body > p:first-child > span:first-child ~ span.default {
|
||||
display: inline-block;
|
||||
font-size: 1.2em;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
.ilFrmPostContent {
|
||||
margin-top: 8px;
|
||||
max-width: 64em;
|
||||
}
|
||||
.ilFrmPostContent > *:first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
.ilFrmPostTitle {
|
||||
margin-top: 24px;
|
||||
color: #00876c;
|
||||
font-weight: bold;
|
||||
}
|
||||
#ilFrmPostList {
|
||||
list-style: none;
|
||||
padding-left: 0;
|
||||
}
|
||||
li.ilFrmPostRow {
|
||||
padding: 3px 0 3px 3px;
|
||||
margin-bottom: 24px;
|
||||
border-left: 6px solid #dddddd;
|
||||
}
|
||||
.ilFrmPostRow > div {
|
||||
display: flex;
|
||||
}
|
||||
.ilFrmPostImage img {
|
||||
margin: 0 !important;
|
||||
padding: 6px 9px 9px 6px;
|
||||
}
|
||||
.ilUserIcon {
|
||||
width: 115px;
|
||||
}
|
||||
.small {
|
||||
text-decoration: none;
|
||||
font-size: 0.75rem;
|
||||
color: #6f6f6f;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{{heading}}
|
||||
{{content}}
|
||||
</body>
|
||||
</html>
|
||||
""".strip() # noqa: E501 line too long
|
||||
|
||||
|
||||
def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str:
|
||||
# Seems to be comments, ignore those.
|
||||
@@ -139,13 +237,13 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
||||
</div>
|
||||
"""
|
||||
if prev and body.select_one(".ilc_page_lnav_LeftNavigation"):
|
||||
text = body.select_one(".ilc_page_lnav_LeftNavigation").getText().strip()
|
||||
text = cast(bs4.Tag, body.select_one(".ilc_page_lnav_LeftNavigation")).get_text().strip()
|
||||
left = f'<a href="{prev}">{text}</a>'
|
||||
else:
|
||||
left = "<span></span>"
|
||||
|
||||
if next and body.select_one(".ilc_page_rnav_RightNavigation"):
|
||||
text = body.select_one(".ilc_page_rnav_RightNavigation").getText().strip()
|
||||
text = cast(bs4.Tag, body.select_one(".ilc_page_rnav_RightNavigation")).get_text().strip()
|
||||
right = f'<a href="{next}">{text}</a>'
|
||||
else:
|
||||
right = "<span></span>"
|
||||
@@ -156,12 +254,29 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
||||
)
|
||||
|
||||
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||
bot_nav.replace_with(soupify(nav_template.replace(
|
||||
"{{left}}", left).replace("{{right}}", right).encode())
|
||||
bot_nav.replace_with(
|
||||
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||
)
|
||||
|
||||
body = body.prettify()
|
||||
return _learning_module_template.replace("{{body}}", body).replace("{{name}}", name)
|
||||
body_str = body.prettify()
|
||||
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
||||
|
||||
|
||||
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||
if title := heading.find(name="b"):
|
||||
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||
return (
|
||||
_forum_thread_template.replace("{{name}}", name)
|
||||
.replace("{{heading}}", heading.prettify())
|
||||
.replace("{{content}}", content.prettify())
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class LinkData:
|
||||
name: str
|
||||
url: str
|
||||
description: str
|
||||
|
||||
|
||||
class Links(Enum):
|
||||
@@ -181,6 +296,9 @@ class Links(Enum):
|
||||
return None
|
||||
raise ValueError("Missing switch case")
|
||||
|
||||
def collection_as_one(self) -> bool:
|
||||
return self == Links.FANCY
|
||||
|
||||
def extension(self) -> Optional[str]:
|
||||
if self == Links.FANCY:
|
||||
return ".html"
|
||||
@@ -192,10 +310,47 @@ class Links(Enum):
|
||||
return None
|
||||
raise ValueError("Missing switch case")
|
||||
|
||||
def interpolate(self, redirect_delay: int, collection_name: str, links: list[LinkData]) -> str:
|
||||
template = self.template()
|
||||
if template is None:
|
||||
raise ValueError("Cannot interpolate ignored links")
|
||||
|
||||
if len(links) == 1:
|
||||
link = links[0]
|
||||
content = template
|
||||
content = content.replace("{{link}}", link.url)
|
||||
content = content.replace("{{name}}", link.name)
|
||||
content = content.replace("{{description}}", link.description)
|
||||
content = content.replace("{{redirect_delay}}", str(redirect_delay))
|
||||
return content
|
||||
if self == Links.PLAINTEXT or self == Links.INTERNET_SHORTCUT:
|
||||
return "\n".join(f"{link.url}" for link in links)
|
||||
|
||||
# All others get coerced to fancy
|
||||
content = cast(str, Links.FANCY.template())
|
||||
repeated_content = cast(
|
||||
re.Match[str], re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||
).group(1)
|
||||
|
||||
parts = []
|
||||
for link in links:
|
||||
instance = repeated_content
|
||||
instance = instance.replace("{{link}}", link.url)
|
||||
instance = instance.replace("{{name}}", link.name)
|
||||
instance = instance.replace("{{description}}", link.description)
|
||||
instance = instance.replace("{{redirect_delay}}", str(redirect_delay))
|
||||
parts.append(instance)
|
||||
|
||||
content = content.replace(repeated_content, "\n".join(parts))
|
||||
content = content.replace("{{name}}", collection_name)
|
||||
content = re.sub(r"<!-- REPEAT REMOVE START -->[\s\S]+<!-- REPEAT REMOVE END -->", "", content)
|
||||
|
||||
return content
|
||||
|
||||
@staticmethod
|
||||
def from_string(string: str) -> "Links":
|
||||
try:
|
||||
return Links(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'ignore', 'plaintext',"
|
||||
" 'html', 'internet-shortcut'")
|
||||
options = [f"'{option.value}'" for option in Links]
|
||||
raise ValueError(f"must be one of {', '.join(options)}") from None
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from typing import cast
|
||||
|
||||
from bs4 import BeautifulSoup, Comment, Tag
|
||||
|
||||
_STYLE_TAG_CONTENT = """
|
||||
@@ -12,6 +14,13 @@ _STYLE_TAG_CONTENT = """
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.row-flex {
|
||||
display: flex;
|
||||
}
|
||||
.row-flex-wrap {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.accordion-head {
|
||||
background-color: #f5f7fa;
|
||||
padding: 0.5rem 0;
|
||||
@@ -30,6 +39,10 @@ _STYLE_TAG_CONTENT = """
|
||||
margin: 0.5rem 0;
|
||||
}
|
||||
|
||||
img {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
body {
|
||||
padding: 1em;
|
||||
grid-template-columns: 1fr min(60rem, 90%) 1fr;
|
||||
@@ -47,12 +60,11 @@ _ARTICLE_WORTHY_CLASSES = [
|
||||
def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
head = soup.new_tag("head")
|
||||
soup.insert(0, head)
|
||||
# Force UTF-8 encoding
|
||||
head.append(soup.new_tag("meta", charset="utf-8"))
|
||||
|
||||
simplecss_link: Tag = soup.new_tag("link")
|
||||
# <link rel="stylesheet" href="https://cdn.simplecss.org/simple.css">
|
||||
simplecss_link["rel"] = "stylesheet"
|
||||
simplecss_link["href"] = "https://cdn.simplecss.org/simple.css"
|
||||
head.append(simplecss_link)
|
||||
head.append(soup.new_tag("link", rel="stylesheet", href="https://cdn.simplecss.org/simple.css"))
|
||||
|
||||
# Basic style tags for compat
|
||||
style: Tag = soup.new_tag("style")
|
||||
@@ -63,18 +75,18 @@ def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
|
||||
|
||||
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
for block in soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES):
|
||||
for block in cast(list[Tag], soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES)):
|
||||
block.name = "article"
|
||||
|
||||
for block in soup.find_all("h3"):
|
||||
for block in cast(list[Tag], soup.find_all("h3")):
|
||||
block.name = "div"
|
||||
|
||||
for block in soup.find_all("h1"):
|
||||
for block in cast(list[Tag], soup.find_all("h1")):
|
||||
block.name = "h3"
|
||||
|
||||
for block in soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap"):
|
||||
for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")):
|
||||
block.name = "h3"
|
||||
block["class"] += ["accordion-head"]
|
||||
block["class"] += ["accordion-head"] # type: ignore
|
||||
|
||||
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
||||
children = list(dummy.children)
|
||||
@@ -85,7 +97,12 @@ def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
if isinstance(type(children[0]), Comment):
|
||||
dummy.decompose()
|
||||
|
||||
for hrule_imposter in soup.find_all(class_="ilc_section_Separator"):
|
||||
# Delete video figures, as they can not be internalized anyway
|
||||
for video in soup.select(".ilc_media_cont_MediaContainerHighlighted .ilPageVideo"):
|
||||
if figure := video.find_parent("figure"):
|
||||
figure.decompose()
|
||||
|
||||
for hrule_imposter in cast(list[Tag], soup.find_all(class_="ilc_section_Separator")):
|
||||
hrule_imposter.insert(0, soup.new_tag("hr"))
|
||||
|
||||
return soup
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,14 @@
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from typing import Literal
|
||||
|
||||
import aiohttp
|
||||
import yarl
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from ...auth import Authenticator, TfaAuthenticator
|
||||
from ...auth import Authenticator
|
||||
from ...config import Config
|
||||
from ...logging import log
|
||||
from ...utils import soupify
|
||||
from ..crawler import CrawlError, CrawlWarning
|
||||
from .async_helper import _iorepeat
|
||||
from .ilias_web_crawler import IliasWebCrawler, IliasWebCrawlerSection
|
||||
|
||||
TargetType = Union[str, int]
|
||||
from .shibboleth_login import ShibbolethLogin
|
||||
|
||||
_ILIAS_URL = "https://ilias.studium.kit.edu"
|
||||
|
||||
|
||||
class KitShibbolethBackgroundLoginSuccessful():
|
||||
class KitShibbolethBackgroundLoginSuccessful:
|
||||
pass
|
||||
|
||||
|
||||
@@ -25,19 +16,8 @@ class KitIliasWebCrawlerSection(IliasWebCrawlerSection):
|
||||
def base_url(self) -> str:
|
||||
return _ILIAS_URL
|
||||
|
||||
def client_id(self) -> str:
|
||||
# KIT ILIAS uses the Shibboleth service for authentication. There's no
|
||||
# use for a client id.
|
||||
return "unused"
|
||||
|
||||
def tfa_auth(self, authenticators: Dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||
value: Optional[str] = self.s.get("tfa_auth")
|
||||
if value is None:
|
||||
return None
|
||||
auth = authenticators.get(value)
|
||||
if auth is None:
|
||||
self.invalid_value("tfa_auth", value, "No such auth section exists")
|
||||
return auth
|
||||
def login(self) -> Literal["shibboleth"]:
|
||||
return "shibboleth"
|
||||
|
||||
|
||||
class KitIliasWebCrawler(IliasWebCrawler):
|
||||
@@ -46,184 +26,12 @@ class KitIliasWebCrawler(IliasWebCrawler):
|
||||
name: str,
|
||||
section: KitIliasWebCrawlerSection,
|
||||
config: Config,
|
||||
authenticators: Dict[str, Authenticator]
|
||||
authenticators: dict[str, Authenticator],
|
||||
):
|
||||
super().__init__(name, section, config, authenticators)
|
||||
|
||||
self._shibboleth_login = KitShibbolethLogin(
|
||||
self._shibboleth_login = ShibbolethLogin(
|
||||
_ILIAS_URL,
|
||||
self._auth,
|
||||
section.tfa_auth(authenticators),
|
||||
)
|
||||
|
||||
# We repeat this as the login method in shibboleth doesn't handle I/O errors.
|
||||
# Shibboleth is quite reliable as well, the repeat is likely not critical here.
|
||||
@_iorepeat(3, "Login", failure_is_error=True)
|
||||
async def _authenticate(self) -> None:
|
||||
await self._shibboleth_login.login(self.session)
|
||||
|
||||
|
||||
class KitShibbolethLogin:
|
||||
"""
|
||||
Login via KIT's shibboleth system.
|
||||
"""
|
||||
|
||||
def __init__(self, authenticator: Authenticator, tfa_authenticator: Optional[Authenticator]) -> None:
|
||||
self._auth = authenticator
|
||||
self._tfa_auth = tfa_authenticator
|
||||
|
||||
async def login(self, sess: aiohttp.ClientSession) -> None:
|
||||
"""
|
||||
Performs the ILIAS Shibboleth authentication dance and saves the login
|
||||
cookies it receieves.
|
||||
|
||||
This function should only be called whenever it is detected that you're
|
||||
not logged in. The cookies obtained should be good for a few minutes,
|
||||
maybe even an hour or two.
|
||||
"""
|
||||
|
||||
# Equivalent: Click on "Mit KIT-Account anmelden" button in
|
||||
# https://ilias.studium.kit.edu/login.php
|
||||
url = f"{_ILIAS_URL}/shib_login.php"
|
||||
data = {
|
||||
"sendLogin": "1",
|
||||
"idp_selection": "https://idp.scc.kit.edu/idp/shibboleth",
|
||||
"il_target": "",
|
||||
"home_organization_selection": "Weiter",
|
||||
}
|
||||
soup: Union[BeautifulSoup, KitShibbolethBackgroundLoginSuccessful] = await _shib_post(sess, url, data)
|
||||
|
||||
if isinstance(soup, KitShibbolethBackgroundLoginSuccessful):
|
||||
return
|
||||
|
||||
# Attempt to login using credentials, if necessary
|
||||
while not self._login_successful(soup):
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = soup.find("form", {"class": "full content", "method": "post"})
|
||||
action = form["action"]
|
||||
|
||||
csrf_token = form.find("input", {"name": "csrf_token"})["value"]
|
||||
|
||||
# Equivalent: Enter credentials in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = "https://idp.scc.kit.edu" + action
|
||||
username, password = await self._auth.credentials()
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"j_username": username,
|
||||
"j_password": password,
|
||||
"csrf_token": csrf_token
|
||||
}
|
||||
soup = await _post(sess, url, data)
|
||||
|
||||
if soup.find(id="attributeRelease"):
|
||||
raise CrawlError(
|
||||
"ILIAS Shibboleth entitlements changed! "
|
||||
"Please log in once in your browser and review them"
|
||||
)
|
||||
|
||||
if self._tfa_required(soup):
|
||||
soup = await self._authenticate_tfa(sess, soup)
|
||||
|
||||
if not self._login_successful(soup):
|
||||
self._auth.invalidate_credentials()
|
||||
|
||||
# Equivalent: Being redirected via JS automatically
|
||||
# (or clicking "Continue" if you have JS disabled)
|
||||
relay_state = soup.find("input", {"name": "RelayState"})
|
||||
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
||||
url = f"{_ILIAS_URL}/Shibboleth.sso/SAML2/POST"
|
||||
data = { # using the info obtained in the while loop above
|
||||
"RelayState": relay_state["value"],
|
||||
"SAMLResponse": saml_response["value"],
|
||||
}
|
||||
await sess.post(url, data=data)
|
||||
|
||||
async def _authenticate_tfa(
|
||||
self,
|
||||
session: aiohttp.ClientSession,
|
||||
soup: BeautifulSoup
|
||||
) -> BeautifulSoup:
|
||||
if not self._tfa_auth:
|
||||
self._tfa_auth = TfaAuthenticator("ilias-anon-tfa")
|
||||
|
||||
tfa_token = await self._tfa_auth.password()
|
||||
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = soup.find("form", {"method": "post"})
|
||||
action = form["action"]
|
||||
csrf_token = form.find("input", {"name": "csrf_token"})["value"]
|
||||
|
||||
# Equivalent: Enter token in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = "https://idp.scc.kit.edu" + action
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"j_tokenNumber": tfa_token,
|
||||
"csrf_token": csrf_token
|
||||
}
|
||||
return await _post(session, url, data)
|
||||
|
||||
@staticmethod
|
||||
def _login_successful(soup: BeautifulSoup) -> bool:
|
||||
relay_state = soup.find("input", {"name": "RelayState"})
|
||||
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
||||
return relay_state is not None and saml_response is not None
|
||||
|
||||
@staticmethod
|
||||
def _tfa_required(soup: BeautifulSoup) -> bool:
|
||||
return soup.find(id="j_tokenNumber") is not None
|
||||
|
||||
|
||||
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
||||
async with session.post(url, data=data) as response:
|
||||
return soupify(await response.read())
|
||||
|
||||
|
||||
async def _shib_post(
|
||||
session: aiohttp.ClientSession,
|
||||
url: str,
|
||||
data: Any
|
||||
) -> Union[BeautifulSoup, KitShibbolethBackgroundLoginSuccessful]:
|
||||
"""
|
||||
aiohttp unescapes '/' and ':' in URL query parameters which is not RFC compliant and rejected
|
||||
by Shibboleth. Thanks a lot. So now we unroll the requests manually, parse location headers and
|
||||
build encoded URL objects ourselves... Who thought mangling location header was a good idea??
|
||||
"""
|
||||
log.explain_topic("Shib login POST")
|
||||
async with session.post(url, data=data, allow_redirects=False) as response:
|
||||
location = response.headers.get("location")
|
||||
log.explain(f"Got location {location!r}")
|
||||
if not location:
|
||||
raise CrawlWarning(f"Login failed (1), no location header present at {url}")
|
||||
correct_url = yarl.URL(location, encoded=True)
|
||||
log.explain(f"Corrected location to {correct_url!r}")
|
||||
|
||||
if str(correct_url).startswith(_ILIAS_URL):
|
||||
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||
return KitShibbolethBackgroundLoginSuccessful()
|
||||
|
||||
async with session.get(correct_url, allow_redirects=False) as response:
|
||||
location = response.headers.get("location")
|
||||
log.explain(f"Redirected to {location!r} with status {response.status}")
|
||||
# If shib still has a valid session, it will directly respond to the request
|
||||
if location is None:
|
||||
log.explain("Shib recognized us, returning its response directly")
|
||||
return soupify(await response.read())
|
||||
|
||||
as_yarl = yarl.URL(response.url)
|
||||
# Probably not needed anymore, but might catch a few weird situations with a nicer message
|
||||
if not location or not as_yarl.host:
|
||||
raise CrawlWarning(f"Login failed (2), no location header present at {correct_url}")
|
||||
|
||||
correct_url = yarl.URL.build(
|
||||
scheme=as_yarl.scheme,
|
||||
host=as_yarl.host,
|
||||
path=location,
|
||||
encoded=True
|
||||
)
|
||||
log.explain(f"Corrected location to {correct_url!r}")
|
||||
|
||||
async with session.get(correct_url, allow_redirects=False) as response:
|
||||
return soupify(await response.read())
|
||||
|
||||
127
PFERD/crawl/ilias/shibboleth_login.py
Normal file
127
PFERD/crawl/ilias/shibboleth_login.py
Normal file
@@ -0,0 +1,127 @@
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import aiohttp
|
||||
import yarl
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
from ...auth import Authenticator, TfaAuthenticator
|
||||
from ...logging import log
|
||||
from ...utils import soupify
|
||||
from ..crawler import CrawlError
|
||||
|
||||
|
||||
class ShibbolethLogin:
|
||||
"""
|
||||
Login via shibboleth system.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ilias_url: str, authenticator: Authenticator, tfa_authenticator: Optional[Authenticator]
|
||||
) -> None:
|
||||
self._ilias_url = ilias_url
|
||||
self._auth = authenticator
|
||||
self._tfa_auth = tfa_authenticator
|
||||
|
||||
async def login(self, sess: aiohttp.ClientSession) -> None:
|
||||
"""
|
||||
Performs the ILIAS Shibboleth authentication dance and saves the login
|
||||
cookies it receieves.
|
||||
|
||||
This function should only be called whenever it is detected that you're
|
||||
not logged in. The cookies obtained should be good for a few minutes,
|
||||
maybe even an hour or two.
|
||||
"""
|
||||
|
||||
# Equivalent: Click on "Mit KIT-Account anmelden" button in
|
||||
# https://ilias.studium.kit.edu/login.php
|
||||
url = f"{self._ilias_url}/shib_login.php"
|
||||
async with sess.get(url) as response:
|
||||
shib_url = response.url
|
||||
if str(shib_url).startswith(self._ilias_url):
|
||||
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||
return
|
||||
soup: BeautifulSoup = soupify(await response.read())
|
||||
|
||||
# Attempt to login using credentials, if necessary
|
||||
while not self._login_successful(soup):
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||
action = cast(str, form["action"])
|
||||
|
||||
# Equivalent: Enter credentials in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = str(shib_url.origin()) + action
|
||||
username, password = await self._auth.credentials()
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"j_username": username,
|
||||
"j_password": password,
|
||||
"fudis_web_authn_assertion_input": "",
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
soup = await _post(sess, url, data)
|
||||
|
||||
if soup.find(id="attributeRelease"):
|
||||
raise CrawlError(
|
||||
"ILIAS Shibboleth entitlements changed! "
|
||||
"Please log in once in your browser and review them"
|
||||
)
|
||||
|
||||
if self._tfa_required(soup):
|
||||
soup = await self._authenticate_tfa(sess, soup, shib_url)
|
||||
|
||||
if not self._login_successful(soup):
|
||||
self._auth.invalidate_credentials()
|
||||
|
||||
# Equivalent: Being redirected via JS automatically
|
||||
# (or clicking "Continue" if you have JS disabled)
|
||||
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||
url = cast(str, cast(Tag, soup.find("form", {"method": "post"}))["action"])
|
||||
data = { # using the info obtained in the while loop above
|
||||
"RelayState": cast(str, relay_state["value"]),
|
||||
"SAMLResponse": cast(str, saml_response["value"]),
|
||||
}
|
||||
await sess.post(cast(str, url), data=data)
|
||||
|
||||
async def _authenticate_tfa(
|
||||
self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL
|
||||
) -> BeautifulSoup:
|
||||
if not self._tfa_auth:
|
||||
self._tfa_auth = TfaAuthenticator("ilias-anon-tfa")
|
||||
|
||||
tfa_token = await self._tfa_auth.password()
|
||||
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||
action = cast(str, form["action"])
|
||||
|
||||
# Equivalent: Enter token in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = str(shib_url.origin()) + action
|
||||
username, password = await self._auth.credentials()
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"fudis_otp_input": tfa_token,
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
return await _post(session, url, data)
|
||||
|
||||
@staticmethod
|
||||
def _login_successful(soup: BeautifulSoup) -> bool:
|
||||
relay_state = soup.find("input", {"name": "RelayState"})
|
||||
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
||||
return relay_state is not None and saml_response is not None
|
||||
|
||||
@staticmethod
|
||||
def _tfa_required(soup: BeautifulSoup) -> bool:
|
||||
return soup.find(id="fudiscr-form") is not None
|
||||
|
||||
|
||||
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
||||
async with session.post(url, data=data) as response:
|
||||
return soupify(await response.read())
|
||||
@@ -1,16 +1,21 @@
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Awaitable, Generator, Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import PurePath
|
||||
from typing import Awaitable, List, Optional, Pattern, Set, Tuple, Union
|
||||
from re import Pattern
|
||||
from typing import Any, Optional, Union, cast
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import aiohttp
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config
|
||||
from ..logging import ProgressBar, log
|
||||
from ..output_dir import FileSink
|
||||
from ..utils import soupify
|
||||
from ..utils import sanitize_path_name, soupify
|
||||
from .crawler import CrawlError
|
||||
from .http_crawler import HttpCrawler, HttpCrawlerSection
|
||||
|
||||
@@ -30,126 +35,157 @@ class KitIpdCrawlerSection(HttpCrawlerSection):
|
||||
regex = self.s.get("link_regex", r"^.*?[^/]+\.(pdf|zip|c|cpp|java)$")
|
||||
return re.compile(regex)
|
||||
|
||||
def basic_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||
value: Optional[str] = self.s.get("auth")
|
||||
if value is None:
|
||||
return None
|
||||
auth = authenticators.get(value)
|
||||
if auth is None:
|
||||
self.invalid_value("auth", value, "No such auth section exists")
|
||||
return auth
|
||||
|
||||
@dataclass(unsafe_hash=True)
|
||||
|
||||
@dataclass
|
||||
class KitIpdFile:
|
||||
name: str
|
||||
url: str
|
||||
|
||||
def explain(self) -> None:
|
||||
log.explain(f"File {self.name!r} (href={self.url!r})")
|
||||
|
||||
|
||||
@dataclass
|
||||
class KitIpdFolder:
|
||||
name: str
|
||||
files: List[KitIpdFile]
|
||||
entries: list[Union[KitIpdFile, "KitIpdFolder"]]
|
||||
|
||||
def explain(self) -> None:
|
||||
log.explain_topic(f"Folder {self.name!r}")
|
||||
for file in self.files:
|
||||
log.explain(f"File {file.name!r} (href={file.url!r})")
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self.name.__hash__()
|
||||
for entry in self.entries:
|
||||
entry.explain()
|
||||
|
||||
|
||||
class KitIpdCrawler(HttpCrawler):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
authenticators: dict[str, Authenticator],
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
self._url = section.target()
|
||||
self._file_regex = section.link_regex()
|
||||
self._authenticator = section.basic_auth(authenticators)
|
||||
self._basic_auth: Optional[aiohttp.BasicAuth] = None
|
||||
|
||||
async def _run(self) -> None:
|
||||
if self._authenticator:
|
||||
username, password = await self._authenticator.credentials()
|
||||
self._basic_auth = aiohttp.BasicAuth(username, password)
|
||||
|
||||
maybe_cl = await self.crawl(PurePath("."))
|
||||
if not maybe_cl:
|
||||
return
|
||||
|
||||
tasks: List[Awaitable[None]] = []
|
||||
tasks: list[Awaitable[None]] = []
|
||||
|
||||
async with maybe_cl:
|
||||
for item in await self._fetch_items():
|
||||
item.explain()
|
||||
if isinstance(item, KitIpdFolder):
|
||||
tasks.append(self._crawl_folder(item))
|
||||
tasks.append(self._crawl_folder(PurePath("."), item))
|
||||
else:
|
||||
# Orphan files are placed in the root folder
|
||||
tasks.append(self._download_file(PurePath("."), item))
|
||||
log.explain_topic(f"Orphan file {item.name!r} (href={item.url!r})")
|
||||
log.explain("Attributing it to root folder")
|
||||
# do this here to at least be sequential and not parallel (rate limiting is hard, as the
|
||||
# crawl abstraction does not hold for these requests)
|
||||
etag, mtime = await self._request_resource_version(item.url)
|
||||
tasks.append(self._download_file(PurePath("."), item, etag, mtime))
|
||||
|
||||
await self.gather(tasks)
|
||||
|
||||
async def _crawl_folder(self, folder: KitIpdFolder) -> None:
|
||||
path = PurePath(folder.name)
|
||||
async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None:
|
||||
path = parent / sanitize_path_name(folder.name)
|
||||
if not await self.crawl(path):
|
||||
return
|
||||
|
||||
tasks = [self._download_file(path, file) for file in folder.files]
|
||||
tasks = []
|
||||
for entry in folder.entries:
|
||||
if isinstance(entry, KitIpdFolder):
|
||||
tasks.append(self._crawl_folder(path, entry))
|
||||
else:
|
||||
# do this here to at least be sequential and not parallel (rate limiting is hard, as the crawl
|
||||
# abstraction does not hold for these requests)
|
||||
etag, mtime = await self._request_resource_version(entry.url)
|
||||
tasks.append(self._download_file(path, entry, etag, mtime))
|
||||
|
||||
await self.gather(tasks)
|
||||
|
||||
async def _download_file(self, parent: PurePath, file: KitIpdFile) -> None:
|
||||
element_path = parent / file.name
|
||||
maybe_dl = await self.download(element_path)
|
||||
async def _download_file(
|
||||
self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime]
|
||||
) -> None:
|
||||
element_path = parent / sanitize_path_name(file.name)
|
||||
|
||||
prev_etag = self._get_previous_etag_from_report(element_path)
|
||||
etag_differs = None if prev_etag is None else prev_etag != etag
|
||||
|
||||
maybe_dl = await self.download(element_path, etag_differs=etag_differs, mtime=mtime)
|
||||
if not maybe_dl:
|
||||
# keep storing the known file's etag
|
||||
if prev_etag:
|
||||
self._add_etag_to_report(element_path, prev_etag)
|
||||
return
|
||||
|
||||
async with maybe_dl as (bar, sink):
|
||||
await self._stream_from_url(file.url, sink, bar)
|
||||
await self._stream_from_url(file.url, element_path, sink, bar)
|
||||
|
||||
async def _fetch_items(self) -> Set[Union[KitIpdFile, KitIpdFolder]]:
|
||||
async def _fetch_items(self) -> Iterable[KitIpdFile | KitIpdFolder]:
|
||||
page, url = await self.get_page()
|
||||
elements: List[Tag] = self._find_file_links(page)
|
||||
items: Set[Union[KitIpdFile, KitIpdFolder]] = set()
|
||||
elements: list[Tag] = self._find_file_links(page)
|
||||
|
||||
# do not add unnecessary nesting for a single <h1> heading
|
||||
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
||||
|
||||
folder_tree: KitIpdFolder = KitIpdFolder(".", [])
|
||||
for element in elements:
|
||||
folder_label = self._find_folder_label(element)
|
||||
if folder_label:
|
||||
folder = self._extract_folder(folder_label, url)
|
||||
if folder not in items:
|
||||
items.add(folder)
|
||||
folder.explain()
|
||||
else:
|
||||
file = self._extract_file(element, url)
|
||||
items.add(file)
|
||||
log.explain_topic(f"Orphan file {file.name!r} (href={file.url!r})")
|
||||
log.explain("Attributing it to root folder")
|
||||
parent = HttpCrawler.get_folder_structure_from_heading_hierarchy(element, drop_h1)
|
||||
file = self._extract_file(element, url)
|
||||
|
||||
return items
|
||||
current_folder: KitIpdFolder = folder_tree
|
||||
for folder_name in parent.parts:
|
||||
# helps the type checker to verify that current_folder is indeed a folder
|
||||
def subfolders() -> Generator[KitIpdFolder, Any, None]:
|
||||
return (entry for entry in current_folder.entries if isinstance(entry, KitIpdFolder))
|
||||
|
||||
def _extract_folder(self, folder_tag: Tag, url: str) -> KitIpdFolder:
|
||||
files: List[KitIpdFile] = []
|
||||
name = folder_tag.getText().strip()
|
||||
if not any(entry.name == folder_name for entry in subfolders()):
|
||||
current_folder.entries.append(KitIpdFolder(folder_name, []))
|
||||
current_folder = next(entry for entry in subfolders() if entry.name == folder_name)
|
||||
|
||||
container: Tag = folder_tag.findNextSibling(name="table")
|
||||
for link in self._find_file_links(container):
|
||||
files.append(self._extract_file(link, url))
|
||||
current_folder.entries.append(file)
|
||||
|
||||
return KitIpdFolder(name, files)
|
||||
|
||||
@staticmethod
|
||||
def _find_folder_label(file_link: Tag) -> Optional[Tag]:
|
||||
enclosing_table: Tag = file_link.findParent(name="table")
|
||||
if enclosing_table is None:
|
||||
return None
|
||||
return enclosing_table.findPreviousSibling(name=re.compile("^h[1-6]$"))
|
||||
return folder_tree.entries
|
||||
|
||||
def _extract_file(self, link: Tag, url: str) -> KitIpdFile:
|
||||
url = self._abs_url_from_link(url, link)
|
||||
name = os.path.basename(url)
|
||||
return KitIpdFile(name, url)
|
||||
|
||||
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> List[Tag]:
|
||||
return tag.findAll(name="a", attrs={"href": self._file_regex})
|
||||
def _find_file_links(self, tag: Tag | BeautifulSoup) -> list[Tag]:
|
||||
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
||||
|
||||
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
||||
return urljoin(url, link_tag.get("href"))
|
||||
return urljoin(url, cast(str, link_tag.get("href")))
|
||||
|
||||
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar) -> None:
|
||||
async with self.session.get(url, allow_redirects=False) as resp:
|
||||
async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None:
|
||||
async with self.session.get(url, allow_redirects=False, auth=self._basic_auth) as resp:
|
||||
if resp.status == 403:
|
||||
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
||||
if resp.status == 401:
|
||||
raise CrawlError("Received a 401. Do you maybe need credentials?")
|
||||
if resp.status >= 400:
|
||||
raise CrawlError(f"Received HTTP {resp.status} when trying to download {url!r}")
|
||||
|
||||
if resp.content_length:
|
||||
bar.set_total(resp.content_length)
|
||||
|
||||
@@ -159,8 +195,10 @@ class KitIpdCrawler(HttpCrawler):
|
||||
|
||||
sink.done()
|
||||
|
||||
async def get_page(self) -> Tuple[BeautifulSoup, str]:
|
||||
async with self.session.get(self._url) as request:
|
||||
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
||||
|
||||
async def get_page(self) -> tuple[BeautifulSoup, str]:
|
||||
async with self.session.get(self._url, auth=self._basic_auth) as request:
|
||||
# The web page for Algorithmen für Routenplanung contains some
|
||||
# weird comments that beautifulsoup doesn't parse correctly. This
|
||||
# hack enables those pages to be crawled, and should hopefully not
|
||||
|
||||
@@ -18,31 +18,28 @@ class LocalCrawlerSection(CrawlerSection):
|
||||
def crawl_delay(self) -> float:
|
||||
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("crawl_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("crawl_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_delay(self) -> float:
|
||||
value = self.s.getfloat("download_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("download_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("download_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_speed(self) -> Optional[int]:
|
||||
value = self.s.getint("download_speed")
|
||||
if value is not None and value <= 0:
|
||||
self.invalid_value("download_speed", value,
|
||||
"Must be greater than 0")
|
||||
self.invalid_value("download_speed", value, "Must be greater than 0")
|
||||
return value
|
||||
|
||||
|
||||
class LocalCrawler(Crawler):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -74,10 +71,12 @@ class LocalCrawler(Crawler):
|
||||
tasks = []
|
||||
|
||||
async with cl:
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
)
|
||||
)
|
||||
|
||||
for child in path.iterdir():
|
||||
pure_child = cl.path / child.name
|
||||
@@ -93,10 +92,12 @@ class LocalCrawler(Crawler):
|
||||
return
|
||||
|
||||
async with dl as (bar, sink):
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
)
|
||||
)
|
||||
|
||||
bar.set_total(stat.st_size)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Iterator
|
||||
from pathlib import PurePath
|
||||
from typing import Iterator, Set
|
||||
|
||||
from .logging import log
|
||||
from .utils import fmt_path
|
||||
@@ -16,15 +16,34 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
||||
class Deduplicator:
|
||||
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||
FORBIDDEN_NAMES = {
|
||||
"CON", "PRN", "AUX", "NUL",
|
||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
||||
"CON",
|
||||
"PRN",
|
||||
"AUX",
|
||||
"NUL",
|
||||
"COM1",
|
||||
"COM2",
|
||||
"COM3",
|
||||
"COM4",
|
||||
"COM5",
|
||||
"COM6",
|
||||
"COM7",
|
||||
"COM8",
|
||||
"COM9",
|
||||
"LPT1",
|
||||
"LPT2",
|
||||
"LPT3",
|
||||
"LPT4",
|
||||
"LPT5",
|
||||
"LPT6",
|
||||
"LPT7",
|
||||
"LPT8",
|
||||
"LPT9",
|
||||
}
|
||||
|
||||
def __init__(self, windows_paths: bool) -> None:
|
||||
self._windows_paths = windows_paths
|
||||
|
||||
self._known: Set[PurePath] = set()
|
||||
self._known: set[PurePath] = set()
|
||||
|
||||
def _add(self, path: PurePath) -> None:
|
||||
self._known.add(path)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import time
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncIterator, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -12,12 +13,7 @@ class Slot:
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
task_limit: int,
|
||||
download_limit: int,
|
||||
task_delay: float
|
||||
):
|
||||
def __init__(self, task_limit: int, download_limit: int, task_delay: float):
|
||||
if task_limit <= 0:
|
||||
raise ValueError("task limit must be at least 1")
|
||||
if download_limit <= 0:
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
import asyncio
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
# TODO In Python 3.9 and above, ContextManager is deprecated
|
||||
from typing import AsyncIterator, ContextManager, Iterator, List, Optional
|
||||
from collections.abc import AsyncIterator, Iterator
|
||||
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
||||
from typing import Any, Optional
|
||||
|
||||
from rich.console import Console, Group
|
||||
from rich.live import Live
|
||||
from rich.markup import escape
|
||||
from rich.panel import Panel
|
||||
from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TimeRemainingColumn,
|
||||
TransferSpeedColumn)
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TaskID,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
from rich.table import Column
|
||||
|
||||
|
||||
@@ -54,7 +61,7 @@ class Log:
|
||||
self._showing_progress = False
|
||||
self._progress_suspended = False
|
||||
self._lock = asyncio.Lock()
|
||||
self._lines: List[str] = []
|
||||
self._lines: list[str] = []
|
||||
|
||||
# Whether different parts of the output are enabled or disabled
|
||||
self.output_explain = False
|
||||
@@ -115,7 +122,7 @@ class Log:
|
||||
for line in self._lines:
|
||||
self.print(line)
|
||||
|
||||
def print(self, text: str) -> None:
|
||||
def print(self, text: Any) -> None:
|
||||
"""
|
||||
Print a normal message. Allows markup.
|
||||
"""
|
||||
@@ -177,10 +184,14 @@ class Log:
|
||||
# Our print function doesn't take types other than strings, but the
|
||||
# underlying rich.print function does. This call is a special case
|
||||
# anyways, and we're calling it internally, so this should be fine.
|
||||
self.print(Panel.fit("""
|
||||
self.print(
|
||||
Panel.fit(
|
||||
"""
|
||||
Please copy your program output and send it to the PFERD maintainers, either
|
||||
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
""".strip())) # type: ignore
|
||||
""".strip()
|
||||
)
|
||||
)
|
||||
|
||||
def explain_topic(self, text: str) -> None:
|
||||
"""
|
||||
@@ -237,10 +248,10 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
|
||||
@contextmanager
|
||||
def _bar(
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
) -> Iterator[ProgressBar]:
|
||||
if total is None:
|
||||
# Indeterminate progress bar
|
||||
@@ -256,12 +267,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
self._update_live()
|
||||
|
||||
def crawl_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> ContextManager[ProgressBar]:
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
"action" string.
|
||||
@@ -272,12 +283,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
return self._bar(self._crawl_progress, description, total)
|
||||
|
||||
def download_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> ContextManager[ProgressBar]:
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
"action" string.
|
||||
|
||||
@@ -4,12 +4,13 @@ import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
from contextlib import contextmanager
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from pathlib import Path, PurePath
|
||||
from typing import BinaryIO, Iterator, Optional, Tuple
|
||||
from typing import BinaryIO, Optional
|
||||
|
||||
from .logging import log
|
||||
from .report import Report, ReportLoadError
|
||||
@@ -35,8 +36,7 @@ class Redownload(Enum):
|
||||
try:
|
||||
return Redownload(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'never', 'never-smart',"
|
||||
" 'always', 'always-smart'")
|
||||
raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'") from None
|
||||
|
||||
|
||||
class OnConflict(Enum):
|
||||
@@ -51,12 +51,15 @@ class OnConflict(Enum):
|
||||
try:
|
||||
return OnConflict(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'")
|
||||
raise ValueError(
|
||||
"must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'"
|
||||
) from None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Heuristics:
|
||||
etag_differs: Optional[bool]
|
||||
mtime: Optional[datetime]
|
||||
|
||||
|
||||
@@ -95,13 +98,13 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
# download handed back to the OutputDirectory.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
@@ -117,15 +120,17 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
sink = FileSink(file)
|
||||
|
||||
async def after_download() -> None:
|
||||
await self._output_dir._after_download(DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
))
|
||||
await self._output_dir._after_download(
|
||||
DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
)
|
||||
)
|
||||
|
||||
self._stack.push_async_callback(after_download)
|
||||
self._stack.enter_context(file)
|
||||
@@ -137,10 +142,10 @@ class OutputDirectory:
|
||||
REPORT_FILE = PurePath(".report")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
if os.name == "nt":
|
||||
# Windows limits the path length to 260 for some historical reason.
|
||||
@@ -173,8 +178,8 @@ class OutputDirectory:
|
||||
|
||||
try:
|
||||
self._root.mkdir(parents=True, exist_ok=True)
|
||||
except OSError:
|
||||
raise OutputDirError("Failed to create base directory")
|
||||
except OSError as e:
|
||||
raise OutputDirError("Failed to create base directory") from e
|
||||
|
||||
def register_reserved(self, path: PurePath) -> None:
|
||||
self._report.mark_reserved(path)
|
||||
@@ -192,11 +197,11 @@ class OutputDirectory:
|
||||
return self._root / path
|
||||
|
||||
def _should_download(
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
) -> bool:
|
||||
if not local_path.exists():
|
||||
log.explain("No corresponding file present locally")
|
||||
@@ -233,8 +238,16 @@ class OutputDirectory:
|
||||
|
||||
remote_newer = None
|
||||
|
||||
# ETag should be a more reliable indicator than mtime, so we check it first
|
||||
if heuristics.etag_differs is not None:
|
||||
remote_newer = heuristics.etag_differs
|
||||
if remote_newer:
|
||||
log.explain("Remote file's entity tag differs")
|
||||
else:
|
||||
log.explain("Remote file's entity tag is the same")
|
||||
|
||||
# Python on Windows crashes when faced with timestamps around the unix epoch
|
||||
if heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
||||
if remote_newer is None and heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
||||
mtime = heuristics.mtime
|
||||
remote_newer = mtime.timestamp() > stat.st_mtime
|
||||
if remote_newer:
|
||||
@@ -261,9 +274,9 @@ class OutputDirectory:
|
||||
# files.
|
||||
|
||||
async def _conflict_lfrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -280,9 +293,9 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_ldrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -299,10 +312,10 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_lfrd(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
@@ -319,9 +332,9 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_delete_lf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
async with log.exclusive_output():
|
||||
@@ -344,9 +357,9 @@ class OutputDirectory:
|
||||
return base.parent / name
|
||||
|
||||
async def _create_tmp_file(
|
||||
self,
|
||||
local_path: Path,
|
||||
) -> Tuple[Path, BinaryIO]:
|
||||
self,
|
||||
local_path: Path,
|
||||
) -> tuple[Path, BinaryIO]:
|
||||
"""
|
||||
May raise an OutputDirError.
|
||||
"""
|
||||
@@ -362,20 +375,38 @@ class OutputDirectory:
|
||||
|
||||
raise OutputDirError("Failed to create temporary file")
|
||||
|
||||
def should_try_download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> bool:
|
||||
heuristics = Heuristics(etag_differs, mtime)
|
||||
redownload = self._redownload if redownload is None else redownload
|
||||
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||
local_path = self.resolve(path)
|
||||
|
||||
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||
|
||||
async def download(
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[FileSinkToken]:
|
||||
"""
|
||||
May throw an OutputDirError, a MarkDuplicateError or a
|
||||
MarkConflictError.
|
||||
"""
|
||||
|
||||
heuristics = Heuristics(mtime)
|
||||
heuristics = Heuristics(etag_differs, mtime)
|
||||
redownload = self._redownload if redownload is None else redownload
|
||||
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||
local_path = self.resolve(path)
|
||||
@@ -479,10 +510,8 @@ class OutputDirectory:
|
||||
await self._cleanup(child, pure_child)
|
||||
|
||||
if delete_self:
|
||||
try:
|
||||
with suppress(OSError):
|
||||
path.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
||||
if self._report.is_marked(pure):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Optional
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
@@ -15,7 +15,7 @@ class PferdLoadError(Exception):
|
||||
|
||||
|
||||
class Pferd:
|
||||
def __init__(self, config: Config, cli_crawlers: Optional[List[str]], cli_skips: Optional[List[str]]):
|
||||
def __init__(self, config: Config, cli_crawlers: Optional[list[str]], cli_skips: Optional[list[str]]):
|
||||
"""
|
||||
May throw PferdLoadError.
|
||||
"""
|
||||
@@ -23,10 +23,10 @@ class Pferd:
|
||||
self._config = config
|
||||
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
||||
|
||||
self._authenticators: Dict[str, Authenticator] = {}
|
||||
self._crawlers: Dict[str, Crawler] = {}
|
||||
self._authenticators: dict[str, Authenticator] = {}
|
||||
self._crawlers: dict[str, Crawler] = {}
|
||||
|
||||
def _find_config_crawlers(self, config: Config) -> List[str]:
|
||||
def _find_config_crawlers(self, config: Config) -> list[str]:
|
||||
crawl_sections = []
|
||||
|
||||
for name, section in config.crawl_sections():
|
||||
@@ -37,7 +37,7 @@ class Pferd:
|
||||
|
||||
return crawl_sections
|
||||
|
||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: List[str]) -> List[str]:
|
||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: list[str]) -> list[str]:
|
||||
if len(cli_crawlers) != len(set(cli_crawlers)):
|
||||
raise PferdLoadError("Some crawlers were selected multiple times")
|
||||
|
||||
@@ -66,14 +66,14 @@ class Pferd:
|
||||
return crawlers_to_run
|
||||
|
||||
def _find_crawlers_to_run(
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[List[str]],
|
||||
cli_skips: Optional[List[str]],
|
||||
) -> List[str]:
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[list[str]],
|
||||
cli_skips: Optional[list[str]],
|
||||
) -> list[str]:
|
||||
log.explain_topic("Deciding which crawlers to run")
|
||||
|
||||
crawlers: List[str]
|
||||
crawlers: list[str]
|
||||
if cli_crawlers is None:
|
||||
log.explain("No crawlers specified on CLI")
|
||||
log.explain("Running crawlers specified in config")
|
||||
@@ -104,7 +104,7 @@ class Pferd:
|
||||
|
||||
def _load_crawlers(self) -> None:
|
||||
# Cookie sharing
|
||||
kit_ilias_web_paths: Dict[Authenticator, List[Path]] = {}
|
||||
kit_ilias_web_paths: dict[Authenticator, list[Path]] = {}
|
||||
|
||||
for name, section in self._config.crawl_sections():
|
||||
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||
@@ -117,9 +117,8 @@ class Pferd:
|
||||
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
||||
self._crawlers[name] = crawler
|
||||
|
||||
if self._config.default_section.share_cookies():
|
||||
if isinstance(crawler, KitIliasWebCrawler):
|
||||
crawler.share_cookies(kit_ilias_web_paths)
|
||||
if self._config.default_section.share_cookies() and isinstance(crawler, KitIliasWebCrawler):
|
||||
crawler.share_cookies(kit_ilias_web_paths)
|
||||
|
||||
def debug_transforms(self) -> None:
|
||||
for name in self._crawlers_to_run:
|
||||
@@ -161,26 +160,32 @@ class Pferd:
|
||||
|
||||
def print_report(self) -> None:
|
||||
for name in self._crawlers_to_run:
|
||||
crawler = self._crawlers.get(name)
|
||||
if crawler is None:
|
||||
crawlerOpt = self._crawlers.get(name)
|
||||
if crawlerOpt is None:
|
||||
continue # Crawler failed to load
|
||||
crawler = crawlerOpt
|
||||
|
||||
log.report("")
|
||||
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
||||
|
||||
def fmt_path_link(relative_path: PurePath) -> str:
|
||||
# We need to URL-encode the path because it might contain spaces or special characters
|
||||
link = crawler.output_dir.resolve(relative_path).absolute().as_uri()
|
||||
return f"[link={link}]{fmt_path(relative_path)}[/link]"
|
||||
|
||||
something_changed = False
|
||||
for path in sorted(crawler.report.added_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_green]Added[/] {fmt_path(path)}")
|
||||
log.report(f" [bold bright_green]Added[/] {fmt_path_link(path)}")
|
||||
for path in sorted(crawler.report.changed_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_yellow]Changed[/] {fmt_path(path)}")
|
||||
log.report(f" [bold bright_yellow]Changed[/] {fmt_path_link(path)}")
|
||||
for path in sorted(crawler.report.deleted_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_magenta]Deleted[/] {fmt_path(path)}")
|
||||
for path in sorted(crawler.report.not_deleted_files):
|
||||
something_changed = True
|
||||
log.report_not_deleted(f" [bold bright_magenta]Not deleted[/] {fmt_path(path)}")
|
||||
log.report_not_deleted(f" [bold bright_magenta]Not deleted[/] {fmt_path_link(path)}")
|
||||
|
||||
for warning in crawler.report.encountered_warnings:
|
||||
something_changed = True
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class ReportLoadError(Exception):
|
||||
@@ -34,15 +34,6 @@ class MarkConflictError(Exception):
|
||||
self.collides_with = collides_with
|
||||
|
||||
|
||||
# TODO Use PurePath.is_relative_to when updating to 3.9
|
||||
def is_relative_to(a: PurePath, b: PurePath) -> bool:
|
||||
try:
|
||||
a.relative_to(b)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
class Report:
|
||||
"""
|
||||
A report of a synchronization. Includes all files found by the crawler, as
|
||||
@@ -51,32 +42,32 @@ class Report:
|
||||
|
||||
def __init__(self) -> None:
|
||||
# Paths found by the crawler, untransformed
|
||||
self.found_paths: Set[PurePath] = set()
|
||||
self.found_paths: set[PurePath] = set()
|
||||
|
||||
# Files reserved for metadata files (e. g. the report file or cookies)
|
||||
# that can't be overwritten by user transforms and won't be cleaned up
|
||||
# at the end.
|
||||
self.reserved_files: Set[PurePath] = set()
|
||||
self.reserved_files: set[PurePath] = set()
|
||||
|
||||
# Files found by the crawler, transformed. Only includes files that
|
||||
# were downloaded (or a download was attempted)
|
||||
self.known_files: Set[PurePath] = set()
|
||||
self.known_files: set[PurePath] = set()
|
||||
|
||||
self.added_files: Set[PurePath] = set()
|
||||
self.changed_files: Set[PurePath] = set()
|
||||
self.deleted_files: Set[PurePath] = set()
|
||||
self.added_files: set[PurePath] = set()
|
||||
self.changed_files: set[PurePath] = set()
|
||||
self.deleted_files: set[PurePath] = set()
|
||||
# Files that should have been deleted by the cleanup but weren't
|
||||
self.not_deleted_files: Set[PurePath] = set()
|
||||
self.not_deleted_files: set[PurePath] = set()
|
||||
|
||||
# Custom crawler-specific data
|
||||
self.custom: Dict[str, Any] = dict()
|
||||
self.custom: dict[str, Any] = dict()
|
||||
|
||||
# Encountered errors and warnings
|
||||
self.encountered_warnings: List[str] = []
|
||||
self.encountered_errors: List[str] = []
|
||||
self.encountered_warnings: list[str] = []
|
||||
self.encountered_errors: list[str] = []
|
||||
|
||||
@staticmethod
|
||||
def _get_list_of_strs(data: Dict[str, Any], key: str) -> List[str]:
|
||||
def _get_list_of_strs(data: dict[str, Any], key: str) -> list[str]:
|
||||
result: Any = data.get(key, [])
|
||||
|
||||
if not isinstance(result, list):
|
||||
@@ -89,8 +80,8 @@ class Report:
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _get_str_dictionary(data: Dict[str, Any], key: str) -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = data.get(key, {})
|
||||
def _get_str_dictionary(data: dict[str, Any], key: str) -> dict[str, Any]:
|
||||
result: dict[str, Any] = data.get(key, {})
|
||||
|
||||
if not isinstance(result, dict):
|
||||
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
||||
@@ -173,13 +164,13 @@ class Report:
|
||||
if path == other:
|
||||
raise MarkDuplicateError(path)
|
||||
|
||||
if is_relative_to(path, other) or is_relative_to(other, path):
|
||||
if path.is_relative_to(other) or other.is_relative_to(path):
|
||||
raise MarkConflictError(path, other)
|
||||
|
||||
self.known_files.add(path)
|
||||
|
||||
@property
|
||||
def marked(self) -> Set[PurePath]:
|
||||
def marked(self) -> set[PurePath]:
|
||||
return self.known_files | self.reserved_files
|
||||
|
||||
def is_marked(self, path: PurePath) -> bool:
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import ast
|
||||
import contextlib
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import PurePath
|
||||
from typing import Callable, Dict, List, Optional, Sequence, TypeVar, Union
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
from .logging import log
|
||||
from .utils import fmt_path, str_path
|
||||
@@ -23,7 +25,7 @@ class Empty:
|
||||
pass
|
||||
|
||||
|
||||
RightSide = Union[str, Ignore, Empty]
|
||||
RightSide = str | Ignore | Empty
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -35,7 +37,7 @@ class Ignored:
|
||||
pass
|
||||
|
||||
|
||||
TransformResult = Optional[Union[Transformed, Ignored]]
|
||||
TransformResult = Transformed | Ignored | None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -47,7 +49,7 @@ class Rule:
|
||||
right: RightSide
|
||||
right_index: int
|
||||
|
||||
def right_result(self, path: PurePath) -> Union[str, Transformed, Ignored]:
|
||||
def right_result(self, path: PurePath) -> str | Transformed | Ignored:
|
||||
if isinstance(self.right, str):
|
||||
return self.right
|
||||
elif isinstance(self.right, Ignore):
|
||||
@@ -93,24 +95,20 @@ class ExactReTf(Transformation):
|
||||
# since elements of "match.groups()" can be None, mypy is wrong.
|
||||
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
||||
|
||||
locals_dir: Dict[str, Union[str, int, float]] = {}
|
||||
locals_dir: dict[str, str | int | float] = {}
|
||||
for i, group in enumerate(groups):
|
||||
if group is None:
|
||||
continue
|
||||
|
||||
locals_dir[f"g{i}"] = group
|
||||
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
locals_dir[f"i{i}"] = int(group)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
locals_dir[f"f{i}"] = float(group)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
named_groups: Dict[str, str] = match.groupdict()
|
||||
named_groups: dict[str, str] = match.groupdict()
|
||||
for name, capture in named_groups.items():
|
||||
locals_dir[name] = capture
|
||||
|
||||
@@ -208,7 +206,7 @@ class Line:
|
||||
|
||||
@property
|
||||
def rest(self) -> str:
|
||||
return self.line[self.index:]
|
||||
return self.line[self.index :]
|
||||
|
||||
def peek(self, amount: int = 1) -> str:
|
||||
return self.rest[:amount]
|
||||
@@ -228,7 +226,7 @@ class Line:
|
||||
self.expect(string)
|
||||
return value
|
||||
|
||||
def one_of(self, parsers: List[Callable[[], T]], description: str) -> T:
|
||||
def one_of(self, parsers: list[Callable[[], T]], description: str) -> T:
|
||||
for parser in parsers:
|
||||
index = self.index
|
||||
try:
|
||||
@@ -315,7 +313,7 @@ def parse_left(line: Line) -> str:
|
||||
return parse_str(line)
|
||||
|
||||
|
||||
def parse_right(line: Line) -> Union[str, Ignore]:
|
||||
def parse_right(line: Line) -> str | Ignore:
|
||||
c = line.peek()
|
||||
if c in QUOTATION_MARKS:
|
||||
return parse_quoted_str(line)
|
||||
@@ -327,21 +325,27 @@ def parse_right(line: Line) -> Union[str, Ignore]:
|
||||
|
||||
|
||||
def parse_arrow_name(line: Line) -> str:
|
||||
return line.one_of([
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
], "Expected arrow name")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
],
|
||||
"Expected arrow name",
|
||||
)
|
||||
|
||||
|
||||
def parse_arrow_head(line: Line) -> ArrowHead:
|
||||
return line.one_of([
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
], "Expected arrow head")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
],
|
||||
"Expected arrow head",
|
||||
)
|
||||
|
||||
|
||||
def parse_eol(line: Line) -> None:
|
||||
@@ -413,12 +417,12 @@ class Transformer:
|
||||
|
||||
def transform(self, path: PurePath) -> Optional[PurePath]:
|
||||
for i, (line, tf) in enumerate(self._tfs):
|
||||
log.explain(f"Testing rule {i+1}: {line}")
|
||||
log.explain(f"Testing rule {i + 1}: {line}")
|
||||
|
||||
try:
|
||||
result = tf.transform(path)
|
||||
except Exception as e:
|
||||
log.warn(f"Error while testing rule {i+1}: {line}")
|
||||
log.warn(f"Error while testing rule {i + 1}: {line}")
|
||||
log.warn_contd(str(e))
|
||||
continue
|
||||
|
||||
|
||||
@@ -2,11 +2,13 @@ import asyncio
|
||||
import getpass
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from contextlib import AsyncExitStack
|
||||
from pathlib import Path, PurePath
|
||||
from types import TracebackType
|
||||
from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar
|
||||
from typing import Any, Generic, Optional, TypeVar
|
||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||
|
||||
import bs4
|
||||
@@ -79,7 +81,7 @@ def url_set_query_param(url: str, param: str, value: str) -> str:
|
||||
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
||||
|
||||
|
||||
def url_set_query_params(url: str, params: Dict[str, str]) -> str:
|
||||
def url_set_query_params(url: str, params: dict[str, str]) -> str:
|
||||
"""
|
||||
Sets multiple query parameters in an url, overwriting existing ones.
|
||||
"""
|
||||
@@ -105,17 +107,31 @@ def fmt_real_path(path: Path) -> str:
|
||||
return repr(str(path.absolute()))
|
||||
|
||||
|
||||
def sanitize_path_name(name: str) -> str:
|
||||
return name.replace("/", "-").replace("\\", "-").strip()
|
||||
|
||||
|
||||
class ReusableAsyncContextManager(ABC, Generic[T]):
|
||||
def __init__(self) -> None:
|
||||
self._active = False
|
||||
self._stack = AsyncExitStack()
|
||||
self._create_stacktrace = traceback.format_stack()
|
||||
self._enter_stacktraces = []
|
||||
|
||||
@abstractmethod
|
||||
async def _on_aenter(self) -> T:
|
||||
pass
|
||||
|
||||
async def __aenter__(self) -> T:
|
||||
self._enter_stacktraces.append(traceback.format_stack())
|
||||
|
||||
if self._active:
|
||||
print("Context manager was already active. Created at:")
|
||||
print("".join(self._create_stacktrace))
|
||||
print("\n== Previous __aenter__ calls")
|
||||
for i, stacktrace in enumerate(self._enter_stacktraces, start=1):
|
||||
print(f"\n-- __aenter__ call #{i} at:")
|
||||
print("".join(stacktrace))
|
||||
raise RuntimeError("Nested or otherwise concurrent usage is not allowed")
|
||||
|
||||
self._active = True
|
||||
@@ -124,21 +140,23 @@ class ReusableAsyncContextManager(ABC, Generic[T]):
|
||||
# See https://stackoverflow.com/a/13075071
|
||||
try:
|
||||
result: T = await self._on_aenter()
|
||||
except: # noqa: E722 do not use bare 'except'
|
||||
return result
|
||||
except:
|
||||
if not await self.__aexit__(*sys.exc_info()):
|
||||
raise
|
||||
|
||||
return result
|
||||
raise
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
self,
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if not self._active:
|
||||
raise RuntimeError("__aexit__ called too many times")
|
||||
|
||||
self._enter_stacktraces.pop()
|
||||
|
||||
result = await self._stack.__aexit__(exc_type, exc_value, traceback)
|
||||
self._active = False
|
||||
return result
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
NAME = "PFERD"
|
||||
VERSION = "3.6.0"
|
||||
VERSION = "3.8.3"
|
||||
|
||||
@@ -17,7 +17,7 @@ Binaries for Linux, Windows and Mac can be downloaded directly from the
|
||||
|
||||
### With pip
|
||||
|
||||
Ensure you have at least Python 3.9 installed. Run the following command to
|
||||
Ensure you have at least Python 3.11 installed. Run the following command to
|
||||
install PFERD or upgrade it to the latest version:
|
||||
|
||||
```
|
||||
|
||||
8
flake.lock
generated
8
flake.lock
generated
@@ -2,16 +2,16 @@
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1708979614,
|
||||
"narHash": "sha256-FWLWmYojIg6TeqxSnHkKpHu5SGnFP5um1uUjH+wRV6g=",
|
||||
"lastModified": 1760725957,
|
||||
"narHash": "sha256-tdoIhL/NlER290HfSjOkgi4jfmjeqmqrzgnmiMtGepE=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "b7ee09cf5614b02d289cd86fcfa6f24d4e078c2a",
|
||||
"rev": "81b927b14b7b3988334d5282ef9cba802e193fe1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.11",
|
||||
"ref": "nixos-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
description = "Tool for downloading course-related files from ILIAS";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs }:
|
||||
|
||||
@@ -12,7 +12,7 @@ dependencies = [
|
||||
"certifi>=2021.10.8"
|
||||
]
|
||||
dynamic = ["version"]
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[project.scripts]
|
||||
pferd = "PFERD.__main__:main"
|
||||
@@ -20,23 +20,33 @@ pferd = "PFERD.__main__:main"
|
||||
[tool.setuptools.dynamic]
|
||||
version = {attr = "PFERD.version.VERSION"}
|
||||
|
||||
[tool.flake8]
|
||||
max-line-length = 110
|
||||
[tool.ruff]
|
||||
line-length = 110
|
||||
|
||||
[tool.isort]
|
||||
line_length = 110
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
# pycodestyle
|
||||
"E",
|
||||
# Pyflakes
|
||||
"F",
|
||||
# pyupgrade
|
||||
"UP",
|
||||
# flake8-bugbear
|
||||
"B",
|
||||
# flake8-simplify
|
||||
"SIM",
|
||||
# isort
|
||||
"I",
|
||||
]
|
||||
ignore = [
|
||||
"UP045",
|
||||
"SIM114",
|
||||
"B023"
|
||||
]
|
||||
|
||||
[tool.autopep8]
|
||||
max_line_length = 110
|
||||
in-place = true
|
||||
recursive = true
|
||||
|
||||
[tool.mypy]
|
||||
disallow_any_generics = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
no_implicit_optional = true
|
||||
warn_unused_ignores = true
|
||||
warn_unreachable = true
|
||||
show_error_context = true
|
||||
ignore_missing_imports = true
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pyinstaller>=6.16.0",
|
||||
"pyright>=1.1.406",
|
||||
"ruff>=0.14.1",
|
||||
]
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
set -e
|
||||
|
||||
pyinstaller --onefile pferd.py
|
||||
uv run pyinstaller --onefile pferd.py
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
|
||||
set -e
|
||||
|
||||
mypy .
|
||||
flake8 PFERD
|
||||
uv run pyright .
|
||||
uv run ruff check
|
||||
|
||||
@@ -2,5 +2,4 @@
|
||||
|
||||
set -e
|
||||
|
||||
autopep8 .
|
||||
isort .
|
||||
uv run ruff format
|
||||
|
||||
Reference in New Issue
Block a user