mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-12-16 20:12:26 +01:00
Compare commits
44 Commits
debug/wind
...
debug/asyn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4eab927899 | ||
|
|
e246053de2 | ||
|
|
3f5637366e | ||
|
|
3453bbc991 | ||
|
|
bd7b384e8f | ||
|
|
6353571eb4 | ||
|
|
1e56976b9f | ||
|
|
bb0d68da65 | ||
|
|
c1c78673aa | ||
|
|
ebcfb2a2f3 | ||
|
|
5646e933fd | ||
|
|
6e563134b2 | ||
|
|
2cf0e060ed | ||
|
|
ee4625be78 | ||
|
|
f6c713d621 | ||
|
|
207af51aa4 | ||
|
|
3755f593ff | ||
|
|
465f8b28c0 | ||
|
|
27e69af2f3 | ||
|
|
56e3065950 | ||
|
|
549ce6cce9 | ||
|
|
34564cedb4 | ||
|
|
2b0d20a1f6 | ||
|
|
8caad0008d | ||
|
|
77a23265a9 | ||
|
|
4c230ef6dd | ||
|
|
b305e1ce23 | ||
|
|
bdf17f5c87 | ||
|
|
77fce7daf8 | ||
|
|
653bf139f0 | ||
|
|
3f60638d33 | ||
|
|
b97b6fae6b | ||
|
|
477234ad0d | ||
|
|
63f25277b0 | ||
|
|
c8eff04ae0 | ||
|
|
edc482cdf4 | ||
|
|
72cd0f77e2 | ||
|
|
be175f9347 | ||
|
|
ba2833dba5 | ||
|
|
2f0e792670 | ||
|
|
5f88539f7e | ||
|
|
bd9d7efe64 | ||
|
|
16a2dd5b15 | ||
|
|
678283d341 |
1
.git-blame-ignore-revs
Normal file
1
.git-blame-ignore-revs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
2cf0e060ed126537dd993896b6aa793e2a6b9e80
|
||||||
14
.github/workflows/build-and-release.yml
vendored
14
.github/workflows/build-and-release.yml
vendored
@@ -14,23 +14,17 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
|
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
|
||||||
python: ["3.9"]
|
python: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v7
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
- name: Set up project
|
- name: Set up project
|
||||||
if: matrix.os != 'windows-latest'
|
run: uv sync
|
||||||
run: ./scripts/setup
|
|
||||||
|
|
||||||
- name: Set up project on windows
|
|
||||||
if: matrix.os == 'windows-latest'
|
|
||||||
# For some reason, `pip install --upgrade pip` doesn't work on
|
|
||||||
# 'windows-latest'. The installed pip version works fine however.
|
|
||||||
run: ./scripts/setup --no-pip
|
|
||||||
|
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
55
CHANGELOG.md
55
CHANGELOG.md
@@ -22,6 +22,61 @@ ambiguous situations.
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## Added
|
||||||
|
- Store the description when using the `internet-shortcut` link format
|
||||||
|
- Support for basic auth with the kit-ipd crawler
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- Event loop errors on Windows with Python 3.14
|
||||||
|
- Sanitize `/` in headings in kit-ipd crawler
|
||||||
|
- Crawl info tab again
|
||||||
|
|
||||||
|
## 3.8.3 - 2025-07-01
|
||||||
|
|
||||||
|
## Added
|
||||||
|
- Support for link collections.
|
||||||
|
In "fancy" mode, a single HTML file with multiple links is generated.
|
||||||
|
In all other modes, PFERD creates a folder for the collection and a new file
|
||||||
|
for every link inside.
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- Crawling of exercises with instructions
|
||||||
|
- Don't download unavailable elements.
|
||||||
|
Elements that are unavailable (for example, because their availability is
|
||||||
|
time restricted) will not download the HTML for the info page anymore.
|
||||||
|
- `base_url` argument for `ilias-web` crawler causing crashes
|
||||||
|
|
||||||
|
## 3.8.2 - 2025-04-29
|
||||||
|
|
||||||
|
## Changed
|
||||||
|
- Explicitly mention that wikis are not supported at the moment and ignore them
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- Ilias-native login
|
||||||
|
- Exercise crawling
|
||||||
|
|
||||||
|
## 3.8.1 - 2025-04-17
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- Description html files now specify at UTF-8 encoding
|
||||||
|
- Images in descriptions now always have a white background
|
||||||
|
|
||||||
|
## 3.8.0 - 2025-04-16
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Support for ILIAS 9
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Added prettier CSS to forum threads
|
||||||
|
- Downloaded forum threads now link to the forum instead of the ILIAS thread
|
||||||
|
- Increase minimum supported Python version to 3.11
|
||||||
|
- Do not crawl nested courses (courses linked in other courses)
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- File links in report on Windows
|
||||||
|
- TOTP authentication in KIT Shibboleth
|
||||||
|
- Forum crawling only considering the first 20 entries
|
||||||
|
|
||||||
## 3.7.0 - 2024-11-13
|
## 3.7.0 - 2024-11-13
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|||||||
17
CONFIG.md
17
CONFIG.md
@@ -153,6 +153,7 @@ requests is likely a good idea.
|
|||||||
- `link_regex`: A regex that is matched against the `href` part of links. If it
|
- `link_regex`: A regex that is matched against the `href` part of links. If it
|
||||||
matches, the given link is downloaded as a file. This is used to extract
|
matches, the given link is downloaded as a file. This is used to extract
|
||||||
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
||||||
|
- `auth`: Name of auth section to use for basic authentication. (Optional)
|
||||||
|
|
||||||
### The `ilias-web` crawler
|
### The `ilias-web` crawler
|
||||||
|
|
||||||
@@ -163,13 +164,15 @@ out of the box for the corresponding universities:
|
|||||||
|
|
||||||
[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs"
|
[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs"
|
||||||
|
|
||||||
| University | `base_url` | `login_type` | `client_id` |
|
| University | `base_url` | `login_type` | `client_id` |
|
||||||
|---------------|-----------------------------------------|--------------|---------------|
|
|-----------------|-----------------------------------------|--------------|---------------|
|
||||||
| FH Aachen | https://www.ili.fh-aachen.de | local | elearning |
|
| FH Aachen | https://www.ili.fh-aachen.de | local | elearning |
|
||||||
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk |
|
| HHU Düsseldorf | https://ilias.hhu.de | local | UniRZ |
|
||||||
| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ |
|
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk |
|
||||||
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart |
|
| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ |
|
||||||
| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | |
|
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart |
|
||||||
|
| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | |
|
||||||
|
| KIT ILIAS Pilot | https://pilot.ilias.studium.kit.edu | shibboleth | pilot |
|
||||||
|
|
||||||
If your university isn't listed, try navigating to your instance's login page.
|
If your university isn't listed, try navigating to your instance's login page.
|
||||||
Assuming no custom login service is used, the URL will look something like this:
|
Assuming no custom login service is used, the URL will look something like this:
|
||||||
|
|||||||
21
DEV.md
21
DEV.md
@@ -9,30 +9,25 @@ particular [this][ppug-1] and [this][ppug-2] guide).
|
|||||||
|
|
||||||
## Setting up a dev environment
|
## Setting up a dev environment
|
||||||
|
|
||||||
The use of [venv][venv] is recommended. To initially set up a development
|
The use of [venv][venv] and [uv][uv] is recommended. To initially set up a
|
||||||
environment, run these commands in the same directory as this file:
|
development environment, run these commands in the same directory as this file:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ python -m venv .venv
|
$ uv sync
|
||||||
$ . .venv/bin/activate
|
$ . .venv/bin/activate
|
||||||
$ ./scripts/setup
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The setup script installs a few required dependencies and tools. It also
|
This install all required dependencies and tools. It also installs PFERD as
|
||||||
installs PFERD via `pip install --editable .`, which means that you can just run
|
*editable*, which means that you can just run `pferd` as if it was installed
|
||||||
`pferd` as if it was installed normally. Since PFERD was installed with
|
normally. Since PFERD was installed with `--editable`, there is no need to
|
||||||
`--editable`, there is no need to re-run `pip install` when the source code is
|
re-run `uv sync` when the source code is changed.
|
||||||
changed.
|
|
||||||
|
|
||||||
If you get any errors because pip can't update itself, try running
|
|
||||||
`./scripts/setup --no-pip` instead of `./scripts/setup`.
|
|
||||||
|
|
||||||
For more details, see [this part of the Python Tutorial][venv-tut] and
|
For more details, see [this part of the Python Tutorial][venv-tut] and
|
||||||
[this section on "development mode"][ppug-dev].
|
[this section on "development mode"][ppug-dev].
|
||||||
|
|
||||||
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
||||||
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
||||||
[ppug-dev]: <https://packaging.python.org/guides/distributing-packages-using-setuptools/#working-in-development-mode> "Working in “development mode”"
|
[uv]: <https://docs.astral.sh/uv/> "uv - An extremely fast Python package and project manager"
|
||||||
|
|
||||||
## Checking and formatting the code
|
## Checking and formatting the code
|
||||||
|
|
||||||
|
|||||||
@@ -133,7 +133,8 @@ def main() -> None:
|
|||||||
# https://bugs.python.org/issue39232
|
# https://bugs.python.org/issue39232
|
||||||
# https://github.com/encode/httpx/issues/914#issuecomment-780023632
|
# https://github.com/encode/httpx/issues/914#issuecomment-780023632
|
||||||
# TODO Fix this properly
|
# TODO Fix this properly
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
loop.run_until_complete(pferd.run(args.debug_transforms))
|
loop.run_until_complete(pferd.run(args.debug_transforms))
|
||||||
loop.run_until_complete(asyncio.sleep(1))
|
loop.run_until_complete(asyncio.sleep(1))
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
from configparser import SectionProxy
|
from configparser import SectionProxy
|
||||||
from typing import Callable, Dict
|
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
||||||
@@ -9,21 +9,19 @@ from .pass_ import PassAuthenticator, PassAuthSection
|
|||||||
from .simple import SimpleAuthenticator, SimpleAuthSection
|
from .simple import SimpleAuthenticator, SimpleAuthSection
|
||||||
from .tfa import TfaAuthenticator
|
from .tfa import TfaAuthenticator
|
||||||
|
|
||||||
AuthConstructor = Callable[[
|
AuthConstructor = Callable[
|
||||||
str, # Name (without the "auth:" prefix)
|
[
|
||||||
SectionProxy, # Authenticator's section of global config
|
str, # Name (without the "auth:" prefix)
|
||||||
Config, # Global config
|
SectionProxy, # Authenticator's section of global config
|
||||||
], Authenticator]
|
Config, # Global config
|
||||||
|
],
|
||||||
|
Authenticator,
|
||||||
|
]
|
||||||
|
|
||||||
AUTHENTICATORS: Dict[str, AuthConstructor] = {
|
AUTHENTICATORS: dict[str, AuthConstructor] = {
|
||||||
"credential-file": lambda n, s, c:
|
"credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||||
CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
"keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||||
"keyring": lambda n, s, c:
|
"pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)),
|
||||||
KeyringAuthenticator(n, KeyringAuthSection(s)),
|
"simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||||
"pass": lambda n, s, c:
|
"tfa": lambda n, s, c: TfaAuthenticator(n),
|
||||||
PassAuthenticator(n, PassAuthSection(s)),
|
|
||||||
"simple": lambda n, s, c:
|
|
||||||
SimpleAuthenticator(n, SimpleAuthSection(s)),
|
|
||||||
"tfa": lambda n, s, c:
|
|
||||||
TfaAuthenticator(n),
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..config import Section
|
from ..config import Section
|
||||||
|
|
||||||
@@ -35,7 +34,7 @@ class Authenticator(ABC):
|
|||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def username(self) -> str:
|
async def username(self) -> str:
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..utils import fmt_real_path
|
from ..utils import fmt_real_path
|
||||||
@@ -23,7 +22,9 @@ class CredentialFileAuthenticator(Authenticator):
|
|||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
lines = list(f)
|
lines = list(f)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise AuthLoadError(f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8")
|
raise AuthLoadError(
|
||||||
|
f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8"
|
||||||
|
) from None
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
||||||
|
|
||||||
@@ -42,5 +43,5 @@ class CredentialFileAuthenticator(Authenticator):
|
|||||||
self._username = uline[9:]
|
self._username = uline[9:]
|
||||||
self._password = pline[9:]
|
self._password = pline[9:]
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
return self._username, self._password
|
return self._username, self._password
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Optional, Tuple
|
from typing import Optional
|
||||||
|
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
@@ -17,7 +17,6 @@ class KeyringAuthSection(AuthSection):
|
|||||||
|
|
||||||
|
|
||||||
class KeyringAuthenticator(Authenticator):
|
class KeyringAuthenticator(Authenticator):
|
||||||
|
|
||||||
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
|
|
||||||
@@ -28,7 +27,7 @@ class KeyringAuthenticator(Authenticator):
|
|||||||
self._password_invalidated = False
|
self._password_invalidated = False
|
||||||
self._username_fixed = section.username() is not None
|
self._username_fixed = section.username() is not None
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
# Request the username
|
# Request the username
|
||||||
if self._username is None:
|
if self._username is None:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from .authenticator import Authenticator, AuthError, AuthSection
|
from .authenticator import Authenticator, AuthError, AuthSection
|
||||||
@@ -12,11 +11,11 @@ class PassAuthSection(AuthSection):
|
|||||||
self.missing_value("passname")
|
self.missing_value("passname")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def username_prefixes(self) -> List[str]:
|
def username_prefixes(self) -> list[str]:
|
||||||
value = self.s.get("username_prefixes", "login,username,user")
|
value = self.s.get("username_prefixes", "login,username,user")
|
||||||
return [prefix.lower() for prefix in value.split(",")]
|
return [prefix.lower() for prefix in value.split(",")]
|
||||||
|
|
||||||
def password_prefixes(self) -> List[str]:
|
def password_prefixes(self) -> list[str]:
|
||||||
value = self.s.get("password_prefixes", "password,pass,secret")
|
value = self.s.get("password_prefixes", "password,pass,secret")
|
||||||
return [prefix.lower() for prefix in value.split(",")]
|
return [prefix.lower() for prefix in value.split(",")]
|
||||||
|
|
||||||
@@ -31,14 +30,14 @@ class PassAuthenticator(Authenticator):
|
|||||||
self._username_prefixes = section.username_prefixes()
|
self._username_prefixes = section.username_prefixes()
|
||||||
self._password_prefixes = section.password_prefixes()
|
self._password_prefixes = section.password_prefixes()
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
log.explain_topic("Obtaining credentials from pass")
|
log.explain_topic("Obtaining credentials from pass")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.explain(f"Calling 'pass show {self._passname}'")
|
log.explain(f"Calling 'pass show {self._passname}'")
|
||||||
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}")
|
raise AuthError(f"Failed to get password info from {self._passname}: {e}") from e
|
||||||
|
|
||||||
prefixed = {}
|
prefixed = {}
|
||||||
unprefixed = []
|
unprefixed = []
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Optional, Tuple
|
from typing import Optional
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from ..utils import agetpass, ainput
|
from ..utils import agetpass, ainput
|
||||||
@@ -23,7 +23,7 @@ class SimpleAuthenticator(Authenticator):
|
|||||||
self._username_fixed = self.username is not None
|
self._username_fixed = self.username is not None
|
||||||
self._password_fixed = self.password is not None
|
self._password_fixed = self.password is not None
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
if self._username is not None and self._password is not None:
|
if self._username is not None and self._password is not None:
|
||||||
return self._username, self._password
|
return self._username, self._password
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from ..utils import ainput
|
from ..utils import ainput
|
||||||
from .authenticator import Authenticator, AuthError
|
from .authenticator import Authenticator, AuthError
|
||||||
@@ -17,7 +15,7 @@ class TfaAuthenticator(Authenticator):
|
|||||||
code = await ainput("TFA code: ")
|
code = await ainput("TFA code: ")
|
||||||
return code
|
return code
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
raise AuthError("TFA authenticator does not support usernames")
|
raise AuthError("TFA authenticator does not support usernames")
|
||||||
|
|
||||||
def invalidate_username(self) -> None:
|
def invalidate_username(self) -> None:
|
||||||
|
|||||||
@@ -21,23 +21,20 @@ GROUP.add_argument(
|
|||||||
"--base-url",
|
"--base-url",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="BASE_URL",
|
metavar="BASE_URL",
|
||||||
help="The base url of the ilias instance"
|
help="The base url of the ilias instance",
|
||||||
)
|
)
|
||||||
|
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--client-id",
|
"--client-id",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="CLIENT_ID",
|
metavar="CLIENT_ID",
|
||||||
help="The client id of the ilias instance"
|
help="The client id of the ilias instance",
|
||||||
)
|
)
|
||||||
|
|
||||||
configure_common_group_args(GROUP)
|
configure_common_group_args(GROUP)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None:
|
||||||
args: argparse.Namespace,
|
|
||||||
parser: configparser.ConfigParser,
|
|
||||||
) -> None:
|
|
||||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
parser["crawl:ilias"] = {}
|
parser["crawl:ilias"] = {}
|
||||||
@@ -45,8 +42,8 @@ def load(
|
|||||||
load_crawler(args, section)
|
load_crawler(args, section)
|
||||||
|
|
||||||
section["type"] = COMMAND_NAME
|
section["type"] = COMMAND_NAME
|
||||||
if args.ilias_url is not None:
|
if args.base_url is not None:
|
||||||
section["base_url"] = args.ilias_url
|
section["base_url"] = args.base_url
|
||||||
if args.client_id is not None:
|
if args.client_id is not None:
|
||||||
section["client_id"] = args.client_id
|
section["client_id"] = args.client_id
|
||||||
|
|
||||||
|
|||||||
@@ -21,8 +21,8 @@ configure_common_group_args(GROUP)
|
|||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
|
|||||||
@@ -18,25 +18,30 @@ GROUP.add_argument(
|
|||||||
"--link-regex",
|
"--link-regex",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="REGEX",
|
metavar="REGEX",
|
||||||
help="href-matching regex to identify downloadable files"
|
help="href-matching regex to identify downloadable files",
|
||||||
|
)
|
||||||
|
GROUP.add_argument(
|
||||||
|
"--basic-auth",
|
||||||
|
action="store_true",
|
||||||
|
help="enable basic authentication",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"target",
|
"target",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="url to crawl"
|
help="url to crawl",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain("Creating config for command 'kit-ipd'")
|
log.explain("Creating config for command 'kit-ipd'")
|
||||||
|
|
||||||
@@ -50,5 +55,11 @@ def load(
|
|||||||
if args.link_regex:
|
if args.link_regex:
|
||||||
section["link_regex"] = str(args.link_regex)
|
section["link_regex"] = str(args.link_regex)
|
||||||
|
|
||||||
|
if args.basic_auth:
|
||||||
|
section["auth"] = "auth:kit-ipd"
|
||||||
|
parser["auth:kit-ipd"] = {}
|
||||||
|
auth_section = parser["auth:kit-ipd"]
|
||||||
|
auth_section["type"] = "simple"
|
||||||
|
|
||||||
|
|
||||||
SUBPARSER.set_defaults(command=load)
|
SUBPARSER.set_defaults(command=load)
|
||||||
|
|||||||
@@ -18,37 +18,37 @@ GROUP.add_argument(
|
|||||||
"target",
|
"target",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="directory to crawl"
|
help="directory to crawl",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--crawl-delay",
|
"--crawl-delay",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="artificial delay to simulate for crawl requests"
|
help="artificial delay to simulate for crawl requests",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--download-delay",
|
"--download-delay",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="artificial delay to simulate for download requests"
|
help="artificial delay to simulate for download requests",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--download-speed",
|
"--download-speed",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="BYTES_PER_SECOND",
|
metavar="BYTES_PER_SECOND",
|
||||||
help="download speed to simulate"
|
help="download speed to simulate",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain("Creating config for command 'local'")
|
log.explain("Creating config for command 'local'")
|
||||||
|
|
||||||
|
|||||||
@@ -12,58 +12,60 @@ def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
|||||||
"target",
|
"target",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
help="course id, 'desktop', or ILIAS URL to crawl",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--username", "-u",
|
"--username",
|
||||||
|
"-u",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="USERNAME",
|
metavar="USERNAME",
|
||||||
help="user name for authentication"
|
help="user name for authentication",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--keyring",
|
"--keyring",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="use the system keyring to store and retrieve passwords"
|
help="use the system keyring to store and retrieve passwords",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--credential-file",
|
"--credential-file",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="read username and password from a credential file"
|
help="read username and password from a credential file",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--links",
|
"--links",
|
||||||
type=show_value_error(Links.from_string),
|
type=show_value_error(Links.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="how to represent external links"
|
help="how to represent external links",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--link-redirect-delay",
|
"--link-redirect-delay",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
help="time before 'fancy' links redirect to to their target (-1 to disable)",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--videos",
|
"--videos",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="crawl and download videos"
|
help="crawl and download videos",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--forums",
|
"--forums",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="crawl and download forum posts"
|
help="crawl and download forum posts",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--http-timeout", "-t",
|
"--http-timeout",
|
||||||
|
"-t",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="timeout for all HTTP requests"
|
help="timeout for all HTTP requests",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import configparser
|
import configparser
|
||||||
from argparse import ArgumentTypeError
|
from argparse import ArgumentTypeError
|
||||||
|
from collections.abc import Callable, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Callable, List, Optional, Sequence, Union
|
from typing import Any, Optional
|
||||||
|
|
||||||
from ..output_dir import OnConflict, Redownload
|
from ..output_dir import OnConflict, Redownload
|
||||||
from ..version import NAME, VERSION
|
from ..version import NAME, VERSION
|
||||||
@@ -15,15 +16,15 @@ class ParserLoadError(Exception):
|
|||||||
# TODO Replace with argparse version when updating to 3.9?
|
# TODO Replace with argparse version when updating to 3.9?
|
||||||
class BooleanOptionalAction(argparse.Action):
|
class BooleanOptionalAction(argparse.Action):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
option_strings: List[str],
|
option_strings: list[str],
|
||||||
dest: Any,
|
dest: Any,
|
||||||
default: Any = None,
|
default: Any = None,
|
||||||
type: Any = None,
|
type: Any = None,
|
||||||
choices: Any = None,
|
choices: Any = None,
|
||||||
required: Any = False,
|
required: Any = False,
|
||||||
help: Any = None,
|
help: Any = None,
|
||||||
metavar: Any = None,
|
metavar: Any = None,
|
||||||
):
|
):
|
||||||
if len(option_strings) != 1:
|
if len(option_strings) != 1:
|
||||||
raise ValueError("There must be exactly one option string")
|
raise ValueError("There must be exactly one option string")
|
||||||
@@ -48,11 +49,11 @@ class BooleanOptionalAction(argparse.Action):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
parser: argparse.ArgumentParser,
|
parser: argparse.ArgumentParser,
|
||||||
namespace: argparse.Namespace,
|
namespace: argparse.Namespace,
|
||||||
values: Union[str, Sequence[Any], None],
|
values: str | Sequence[Any] | None,
|
||||||
option_string: Optional[str] = None,
|
option_string: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if option_string and option_string in self.option_strings:
|
if option_string and option_string in self.option_strings:
|
||||||
value = not option_string.startswith("--no-")
|
value = not option_string.startswith("--no-")
|
||||||
@@ -67,11 +68,13 @@ def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]:
|
|||||||
Some validation functions (like the from_string in our enums) raise a ValueError.
|
Some validation functions (like the from_string in our enums) raise a ValueError.
|
||||||
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(input: str) -> Any:
|
def wrapper(input: str) -> Any:
|
||||||
try:
|
try:
|
||||||
return inner(input)
|
return inner(input)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ArgumentTypeError(e)
|
raise ArgumentTypeError(e) from e
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@@ -81,52 +84,57 @@ CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group(
|
|||||||
description="arguments common to all crawlers",
|
description="arguments common to all crawlers",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--redownload", "-r",
|
"--redownload",
|
||||||
|
"-r",
|
||||||
type=show_value_error(Redownload.from_string),
|
type=show_value_error(Redownload.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="when to download a file that's already present locally"
|
help="when to download a file that's already present locally",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--on-conflict",
|
"--on-conflict",
|
||||||
type=show_value_error(OnConflict.from_string),
|
type=show_value_error(OnConflict.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="what to do when local and remote files or directories differ"
|
help="what to do when local and remote files or directories differ",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--transform", "-T",
|
"--transform",
|
||||||
|
"-T",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="RULE",
|
metavar="RULE",
|
||||||
help="add a single transformation rule. Can be specified multiple times"
|
help="add a single transformation rule. Can be specified multiple times",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--tasks", "-n",
|
"--tasks",
|
||||||
|
"-n",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="N",
|
metavar="N",
|
||||||
help="maximum number of concurrent tasks (crawling, downloading)"
|
help="maximum number of concurrent tasks (crawling, downloading)",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--downloads", "-N",
|
"--downloads",
|
||||||
|
"-N",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="N",
|
metavar="N",
|
||||||
help="maximum number of tasks that may download data at the same time"
|
help="maximum number of tasks that may download data at the same time",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--task-delay", "-d",
|
"--task-delay",
|
||||||
|
"-d",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="time the crawler should wait between subsequent tasks"
|
help="time the crawler should wait between subsequent tasks",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--windows-paths",
|
"--windows-paths",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="whether to repair invalid paths on windows"
|
help="whether to repair invalid paths on windows",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_crawler(
|
def load_crawler(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
section: configparser.SectionProxy,
|
section: configparser.SectionProxy,
|
||||||
) -> None:
|
) -> None:
|
||||||
if args.redownload is not None:
|
if args.redownload is not None:
|
||||||
section["redownload"] = args.redownload.value
|
section["redownload"] = args.redownload.value
|
||||||
@@ -152,79 +160,79 @@ PARSER.add_argument(
|
|||||||
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--config", "-c",
|
"--config",
|
||||||
|
"-c",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="custom config file"
|
help="custom config file",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--dump-config",
|
"--dump-config",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="dump current configuration to the default config path and exit"
|
help="dump current configuration to the default config path and exit",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--dump-config-to",
|
"--dump-config-to",
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="dump current configuration to a file and exit."
|
help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead",
|
||||||
" Use '-' as path to print to stdout instead"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--debug-transforms",
|
"--debug-transforms",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="apply transform rules to files of previous run"
|
help="apply transform rules to files of previous run",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--crawler", "-C",
|
"--crawler",
|
||||||
|
"-C",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="NAME",
|
metavar="NAME",
|
||||||
help="only execute a single crawler."
|
help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers",
|
||||||
" Can be specified multiple times to execute multiple crawlers"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--skip", "-S",
|
"--skip",
|
||||||
|
"-S",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="NAME",
|
metavar="NAME",
|
||||||
help="don't execute this particular crawler."
|
help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers",
|
||||||
" Can be specified multiple times to skip multiple crawlers"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--working-dir",
|
"--working-dir",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="custom working directory"
|
help="custom working directory",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--explain",
|
"--explain",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="log and explain in detail what PFERD is doing"
|
help="log and explain in detail what PFERD is doing",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--status",
|
"--status",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print status updates while PFERD is crawling"
|
help="print status updates while PFERD is crawling",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--report",
|
"--report",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print a report of all local changes before exiting"
|
help="print a report of all local changes before exiting",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--share-cookies",
|
"--share-cookies",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="whether crawlers should share cookies where applicable"
|
help="whether crawlers should share cookies where applicable",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--show-not-deleted",
|
"--show-not-deleted",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print messages in status and report when PFERD did not delete a local only file"
|
help="print messages in status and report when PFERD did not delete a local only file",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_default_section(
|
def load_default_section(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
section = parser[parser.default_section]
|
section = parser[parser.default_section]
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from configparser import ConfigParser, SectionProxy
|
from configparser import ConfigParser, SectionProxy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, List, NoReturn, Optional, Tuple
|
from typing import Any, NoReturn, Optional
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
|
|
||||||
@@ -53,10 +53,10 @@ class Section:
|
|||||||
raise ConfigOptionError(self.s.name, key, desc)
|
raise ConfigOptionError(self.s.name, key, desc)
|
||||||
|
|
||||||
def invalid_value(
|
def invalid_value(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
value: Any,
|
value: Any,
|
||||||
reason: Optional[str],
|
reason: Optional[str],
|
||||||
) -> NoReturn:
|
) -> NoReturn:
|
||||||
if reason is None:
|
if reason is None:
|
||||||
self.error(key, f"Invalid value {value!r}")
|
self.error(key, f"Invalid value {value!r}")
|
||||||
@@ -126,13 +126,13 @@ class Config:
|
|||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
parser.read_file(f, source=str(path))
|
parser.read_file(f, source=str(path))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise ConfigLoadError(path, "File does not exist")
|
raise ConfigLoadError(path, "File does not exist") from None
|
||||||
except IsADirectoryError:
|
except IsADirectoryError:
|
||||||
raise ConfigLoadError(path, "That's a directory, not a file")
|
raise ConfigLoadError(path, "That's a directory, not a file") from None
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
raise ConfigLoadError(path, "Insufficient permissions")
|
raise ConfigLoadError(path, "Insufficient permissions") from None
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise ConfigLoadError(path, "File is not encoded using UTF-8")
|
raise ConfigLoadError(path, "File is not encoded using UTF-8") from None
|
||||||
|
|
||||||
def dump(self, path: Optional[Path] = None) -> None:
|
def dump(self, path: Optional[Path] = None) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -150,8 +150,8 @@ class Config:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
except PermissionError:
|
except PermissionError as e:
|
||||||
raise ConfigDumpError(path, "Could not create parent directory")
|
raise ConfigDumpError(path, "Could not create parent directory") from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Ensuring we don't accidentally overwrite any existing files by
|
# Ensuring we don't accidentally overwrite any existing files by
|
||||||
@@ -167,16 +167,16 @@ class Config:
|
|||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
self._parser.write(f)
|
self._parser.write(f)
|
||||||
else:
|
else:
|
||||||
raise ConfigDumpError(path, "File already exists")
|
raise ConfigDumpError(path, "File already exists") from None
|
||||||
except IsADirectoryError:
|
except IsADirectoryError:
|
||||||
raise ConfigDumpError(path, "That's a directory, not a file")
|
raise ConfigDumpError(path, "That's a directory, not a file") from None
|
||||||
except PermissionError:
|
except PermissionError as e:
|
||||||
raise ConfigDumpError(path, "Insufficient permissions")
|
raise ConfigDumpError(path, "Insufficient permissions") from e
|
||||||
|
|
||||||
def dump_to_stdout(self) -> None:
|
def dump_to_stdout(self) -> None:
|
||||||
self._parser.write(sys.stdout)
|
self._parser.write(sys.stdout)
|
||||||
|
|
||||||
def crawl_sections(self) -> List[Tuple[str, SectionProxy]]:
|
def crawl_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||||
result = []
|
result = []
|
||||||
for name, proxy in self._parser.items():
|
for name, proxy in self._parser.items():
|
||||||
if name.startswith("crawl:"):
|
if name.startswith("crawl:"):
|
||||||
@@ -184,7 +184,7 @@ class Config:
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def auth_sections(self) -> List[Tuple[str, SectionProxy]]:
|
def auth_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||||
result = []
|
result = []
|
||||||
for name, proxy in self._parser.items():
|
for name, proxy in self._parser.items():
|
||||||
if name.startswith("auth:"):
|
if name.startswith("auth:"):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
from configparser import SectionProxy
|
from configparser import SectionProxy
|
||||||
from typing import Callable, Dict
|
|
||||||
|
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
@@ -8,20 +8,19 @@ from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
|||||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||||
|
|
||||||
CrawlerConstructor = Callable[[
|
CrawlerConstructor = Callable[
|
||||||
str, # Name (without the "crawl:" prefix)
|
[
|
||||||
SectionProxy, # Crawler's section of global config
|
str, # Name (without the "crawl:" prefix)
|
||||||
Config, # Global config
|
SectionProxy, # Crawler's section of global config
|
||||||
Dict[str, Authenticator], # Loaded authenticators by name
|
Config, # Global config
|
||||||
], Crawler]
|
dict[str, Authenticator], # Loaded authenticators by name
|
||||||
|
],
|
||||||
|
Crawler,
|
||||||
|
]
|
||||||
|
|
||||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
CRAWLERS: dict[str, CrawlerConstructor] = {
|
||||||
"local": lambda n, s, c, a:
|
"local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
"ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||||
"ilias-web": lambda n, s, c, a:
|
"kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||||
IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
"kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c, a),
|
||||||
"kit-ilias-web": lambda n, s, c, a:
|
|
||||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
|
||||||
"kit-ipd": lambda n, s, c, a:
|
|
||||||
KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections.abc import Awaitable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine, Sequence
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, TypeVar
|
from typing import Any, Optional, TypeVar
|
||||||
|
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config, Section
|
from ..config import Config, Section
|
||||||
@@ -116,7 +116,7 @@ class CrawlToken(ReusableAsyncContextManager[ProgressBar]):
|
|||||||
return bar
|
return bar
|
||||||
|
|
||||||
|
|
||||||
class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
class DownloadToken(ReusableAsyncContextManager[tuple[ProgressBar, FileSink]]):
|
||||||
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
@@ -128,12 +128,13 @@ class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
|||||||
def path(self) -> PurePath:
|
def path(self) -> PurePath:
|
||||||
return self._path
|
return self._path
|
||||||
|
|
||||||
async def _on_aenter(self) -> Tuple[ProgressBar, FileSink]:
|
async def _on_aenter(self) -> tuple[ProgressBar, FileSink]:
|
||||||
await self._stack.enter_async_context(self._limiter.limit_download())
|
await self._stack.enter_async_context(self._limiter.limit_download())
|
||||||
sink = await self._stack.enter_async_context(self._fs_token)
|
sink = await self._stack.enter_async_context(self._fs_token)
|
||||||
# The "Downloaded ..." message is printed in the output dir, not here
|
# The "Downloaded ..." message is printed in the output dir, not here
|
||||||
bar = self._stack.enter_context(log.download_bar("[bold bright_cyan]", "Downloading",
|
bar = self._stack.enter_context(
|
||||||
fmt_path(self._path)))
|
log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path))
|
||||||
|
)
|
||||||
|
|
||||||
return bar, sink
|
return bar, sink
|
||||||
|
|
||||||
@@ -149,9 +150,7 @@ class CrawlerSection(Section):
|
|||||||
return self.s.getboolean("skip", fallback=False)
|
return self.s.getboolean("skip", fallback=False)
|
||||||
|
|
||||||
def output_dir(self, name: str) -> Path:
|
def output_dir(self, name: str) -> Path:
|
||||||
# TODO Use removeprefix() after switching to 3.9
|
name = name.removeprefix("crawl:")
|
||||||
if name.startswith("crawl:"):
|
|
||||||
name = name[len("crawl:"):]
|
|
||||||
return Path(self.s.get("output_dir", name)).expanduser()
|
return Path(self.s.get("output_dir", name)).expanduser()
|
||||||
|
|
||||||
def redownload(self) -> Redownload:
|
def redownload(self) -> Redownload:
|
||||||
@@ -206,7 +205,7 @@ class CrawlerSection(Section):
|
|||||||
on_windows = os.name == "nt"
|
on_windows = os.name == "nt"
|
||||||
return self.s.getboolean("windows_paths", fallback=on_windows)
|
return self.s.getboolean("windows_paths", fallback=on_windows)
|
||||||
|
|
||||||
def auth(self, authenticators: Dict[str, Authenticator]) -> Authenticator:
|
def auth(self, authenticators: dict[str, Authenticator]) -> Authenticator:
|
||||||
value = self.s.get("auth")
|
value = self.s.get("auth")
|
||||||
if value is None:
|
if value is None:
|
||||||
self.missing_value("auth")
|
self.missing_value("auth")
|
||||||
@@ -218,10 +217,10 @@ class CrawlerSection(Section):
|
|||||||
|
|
||||||
class Crawler(ABC):
|
class Crawler(ABC):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: CrawlerSection,
|
section: CrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize a crawler from its name and its section in the config file.
|
Initialize a crawler from its name and its section in the config file.
|
||||||
@@ -263,7 +262,7 @@ class Crawler(ABC):
|
|||||||
return self._output_dir
|
return self._output_dir
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
async def gather(awaitables: Sequence[Awaitable[Any]]) -> list[Any]:
|
||||||
"""
|
"""
|
||||||
Similar to asyncio.gather. However, in the case of an exception, all
|
Similar to asyncio.gather. However, in the case of an exception, all
|
||||||
still running tasks are cancelled and the exception is rethrown.
|
still running tasks are cancelled and the exception is rethrown.
|
||||||
@@ -294,14 +293,39 @@ class Crawler(ABC):
|
|||||||
log.explain("Answer: Yes")
|
log.explain("Answer: Yes")
|
||||||
return CrawlToken(self._limiter, path)
|
return CrawlToken(self._limiter, path)
|
||||||
|
|
||||||
|
def should_try_download(
|
||||||
|
self,
|
||||||
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
|
mtime: Optional[datetime] = None,
|
||||||
|
redownload: Optional[Redownload] = None,
|
||||||
|
on_conflict: Optional[OnConflict] = None,
|
||||||
|
) -> bool:
|
||||||
|
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||||
|
|
||||||
|
if self._transformer.transform(path) is None:
|
||||||
|
log.explain("Answer: No (ignored)")
|
||||||
|
return False
|
||||||
|
|
||||||
|
should_download = self._output_dir.should_try_download(
|
||||||
|
path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict
|
||||||
|
)
|
||||||
|
if should_download:
|
||||||
|
log.explain("Answer: Yes")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
log.explain("Answer: No")
|
||||||
|
return False
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
*,
|
*,
|
||||||
etag_differs: Optional[bool] = None,
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
) -> Optional[DownloadToken]:
|
) -> Optional[DownloadToken]:
|
||||||
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
||||||
path = self._deduplicator.mark(path)
|
path = self._deduplicator.mark(path)
|
||||||
@@ -319,7 +343,7 @@ class Crawler(ABC):
|
|||||||
etag_differs=etag_differs,
|
etag_differs=etag_differs,
|
||||||
mtime=mtime,
|
mtime=mtime,
|
||||||
redownload=redownload,
|
redownload=redownload,
|
||||||
on_conflict=on_conflict
|
on_conflict=on_conflict,
|
||||||
)
|
)
|
||||||
if fs_token is None:
|
if fs_token is None:
|
||||||
log.explain("Answer: No")
|
log.explain("Answer: No")
|
||||||
@@ -370,7 +394,7 @@ class Crawler(ABC):
|
|||||||
log.warn("Couldn't find or load old report")
|
log.warn("Couldn't find or load old report")
|
||||||
return
|
return
|
||||||
|
|
||||||
seen: Set[PurePath] = set()
|
seen: set[PurePath] = set()
|
||||||
for known in sorted(self.prev_report.found_paths):
|
for known in sorted(self.prev_report.found_paths):
|
||||||
looking_at = list(reversed(known.parents)) + [known]
|
looking_at = list(reversed(known.parents)) + [known]
|
||||||
for path in looking_at:
|
for path in looking_at:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import http.cookies
|
|||||||
import ssl
|
import ssl
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import certifi
|
import certifi
|
||||||
@@ -13,7 +13,7 @@ from bs4 import Tag
|
|||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from ..utils import fmt_real_path
|
from ..utils import fmt_real_path, sanitize_path_name
|
||||||
from ..version import NAME, VERSION
|
from ..version import NAME, VERSION
|
||||||
from .crawler import Crawler, CrawlerSection
|
from .crawler import Crawler, CrawlerSection
|
||||||
|
|
||||||
@@ -22,18 +22,18 @@ ETAGS_CUSTOM_REPORT_VALUE_KEY = "etags"
|
|||||||
|
|
||||||
class HttpCrawlerSection(CrawlerSection):
|
class HttpCrawlerSection(CrawlerSection):
|
||||||
def http_timeout(self) -> float:
|
def http_timeout(self) -> float:
|
||||||
return self.s.getfloat("http_timeout", fallback=20)
|
return self.s.getfloat("http_timeout", fallback=30)
|
||||||
|
|
||||||
|
|
||||||
class HttpCrawler(Crawler):
|
class HttpCrawler(Crawler):
|
||||||
COOKIE_FILE = PurePath(".cookies")
|
COOKIE_FILE = PurePath(".cookies")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: HttpCrawlerSection,
|
section: HttpCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
shared_auth: Optional[Authenticator] = None,
|
shared_auth: Optional[Authenticator] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
|
|
||||||
@@ -43,7 +43,7 @@ class HttpCrawler(Crawler):
|
|||||||
self._http_timeout = section.http_timeout()
|
self._http_timeout = section.http_timeout()
|
||||||
|
|
||||||
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
||||||
self._shared_cookie_jar_paths: Optional[List[Path]] = None
|
self._shared_cookie_jar_paths: Optional[list[Path]] = None
|
||||||
self._shared_auth = shared_auth
|
self._shared_auth = shared_auth
|
||||||
|
|
||||||
self._output_dir.register_reserved(self.COOKIE_FILE)
|
self._output_dir.register_reserved(self.COOKIE_FILE)
|
||||||
@@ -98,7 +98,7 @@ class HttpCrawler(Crawler):
|
|||||||
"""
|
"""
|
||||||
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
||||||
|
|
||||||
def share_cookies(self, shared: Dict[Authenticator, List[Path]]) -> None:
|
def share_cookies(self, shared: dict[Authenticator, list[Path]]) -> None:
|
||||||
if not self._shared_auth:
|
if not self._shared_auth:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -192,7 +192,7 @@ class HttpCrawler(Crawler):
|
|||||||
if level_heading is None:
|
if level_heading is None:
|
||||||
return find_associated_headings(tag, level - 1)
|
return find_associated_headings(tag, level - 1)
|
||||||
|
|
||||||
folder_name = level_heading.getText().strip()
|
folder_name = sanitize_path_name(level_heading.get_text().strip())
|
||||||
return find_associated_headings(level_heading, level - 1) / folder_name
|
return find_associated_headings(level_heading, level - 1) / folder_name
|
||||||
|
|
||||||
# start at level <h3> because paragraph-level headings are usually too granular for folder names
|
# start at level <h3> because paragraph-level headings are usually too granular for folder names
|
||||||
@@ -219,7 +219,7 @@ class HttpCrawler(Crawler):
|
|||||||
etags[str(path)] = etag
|
etags[str(path)] = etag
|
||||||
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
||||||
|
|
||||||
async def _request_resource_version(self, resource_url: str) -> Tuple[Optional[str], Optional[datetime]]:
|
async def _request_resource_version(self, resource_url: str) -> tuple[Optional[str], Optional[datetime]]:
|
||||||
"""
|
"""
|
||||||
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
||||||
If no entity tag / modification date can be obtained, the according value will be None.
|
If no entity tag / modification date can be obtained, the according value will be None.
|
||||||
@@ -231,6 +231,7 @@ class HttpCrawler(Crawler):
|
|||||||
|
|
||||||
etag_header = resp.headers.get("ETag")
|
etag_header = resp.headers.get("ETag")
|
||||||
last_modified_header = resp.headers.get("Last-Modified")
|
last_modified_header = resp.headers.get("Last-Modified")
|
||||||
|
last_modified = None
|
||||||
|
|
||||||
if last_modified_header:
|
if last_modified_header:
|
||||||
try:
|
try:
|
||||||
@@ -251,23 +252,23 @@ class HttpCrawler(Crawler):
|
|||||||
self._load_cookies()
|
self._load_cookies()
|
||||||
|
|
||||||
async with aiohttp.ClientSession(
|
async with aiohttp.ClientSession(
|
||||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||||
cookie_jar=self._cookie_jar,
|
cookie_jar=self._cookie_jar,
|
||||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||||
timeout=ClientTimeout(
|
timeout=ClientTimeout(
|
||||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||||
total=15 * 60,
|
total=15 * 60,
|
||||||
connect=self._http_timeout,
|
connect=self._http_timeout,
|
||||||
sock_connect=self._http_timeout,
|
sock_connect=self._http_timeout,
|
||||||
sock_read=self._http_timeout,
|
sock_read=self._http_timeout,
|
||||||
),
|
),
|
||||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||||
# fail.
|
# fail.
|
||||||
requote_redirect_url=False
|
requote_redirect_url=False,
|
||||||
) as session:
|
) as session:
|
||||||
self.session = session
|
self.session = session
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
from .kit_ilias_web_crawler import (IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
from .kit_ilias_web_crawler import (
|
||||||
KitIliasWebCrawlerSection)
|
IliasWebCrawler,
|
||||||
|
IliasWebCrawlerSection,
|
||||||
|
KitIliasWebCrawler,
|
||||||
|
KitIliasWebCrawlerSection,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"IliasWebCrawler",
|
"IliasWebCrawler",
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import Any, Callable, Optional
|
from collections.abc import Callable
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
@@ -15,9 +16,9 @@ def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Calla
|
|||||||
try:
|
try:
|
||||||
return await f(*args, **kwargs)
|
return await f(*args, **kwargs)
|
||||||
except aiohttp.ContentTypeError: # invalid content type
|
except aiohttp.ContentTypeError: # invalid content type
|
||||||
raise CrawlWarning("ILIAS returned an invalid content type")
|
raise CrawlWarning("ILIAS returned an invalid content type") from None
|
||||||
except aiohttp.TooManyRedirects:
|
except aiohttp.TooManyRedirects:
|
||||||
raise CrawlWarning("Got stuck in a redirect loop")
|
raise CrawlWarning("Got stuck in a redirect loop") from None
|
||||||
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
||||||
last_exception = e
|
last_exception = e
|
||||||
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
import dataclasses
|
||||||
|
import re
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional, cast
|
||||||
|
|
||||||
import bs4
|
import bs4
|
||||||
|
|
||||||
@@ -12,7 +14,9 @@ _link_template_fancy = """
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<title>ILIAS - Link: {{name}}</title>
|
<title>ILIAS - Link: {{name}}</title>
|
||||||
|
<!-- REPEAT REMOVE START -->
|
||||||
<meta http-equiv = "refresh" content = "{{redirect_delay}}; url = {{link}}" />
|
<meta http-equiv = "refresh" content = "{{redirect_delay}}; url = {{link}}" />
|
||||||
|
<!-- REPEAT REMOVE END -->
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
@@ -23,6 +27,8 @@ _link_template_fancy = """
|
|||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 4px;
|
||||||
}
|
}
|
||||||
body {
|
body {
|
||||||
padding: 0;
|
padding: 0;
|
||||||
@@ -31,11 +37,16 @@ _link_template_fancy = """
|
|||||||
font-family: "Open Sans", Verdana, Arial, Helvetica, sans-serif;
|
font-family: "Open Sans", Verdana, Arial, Helvetica, sans-serif;
|
||||||
height: 100vh;
|
height: 100vh;
|
||||||
}
|
}
|
||||||
.row {
|
.column {
|
||||||
background-color: white;
|
|
||||||
min-width: 500px;
|
min-width: 500px;
|
||||||
max-width: 90vw;
|
max-width: 90vw;
|
||||||
display: flex;
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
row-gap: 5px;
|
||||||
|
}
|
||||||
|
.row {
|
||||||
|
background-color: white;
|
||||||
|
display: flex;
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
}
|
}
|
||||||
.logo {
|
.logo {
|
||||||
@@ -75,19 +86,23 @@ _link_template_fancy = """
|
|||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<body class="center-flex">
|
<body class="center-flex">
|
||||||
<div class="row">
|
<div class="column">
|
||||||
<div class="logo center-flex">
|
<!-- REPEAT START -->
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
<div class="row">
|
||||||
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
<div class="logo center-flex">
|
||||||
</svg>
|
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||||
</div>
|
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
||||||
<div class="tile">
|
</svg>
|
||||||
<div class="top-row">
|
|
||||||
<a href="{{link}}">{{name}}</a>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="bottom-row">{{description}}</div>
|
<div class="tile">
|
||||||
|
<div class="top-row">
|
||||||
|
<a href="{{link}}">{{name}}</a>
|
||||||
|
</div>
|
||||||
|
<div class="bottom-row">{{description}}</div>
|
||||||
|
</div>
|
||||||
|
<div class="menu-button center-flex"> ⯆ </div>
|
||||||
</div>
|
</div>
|
||||||
<div class="menu-button center-flex"> ⯆ </div>
|
<!-- REPEAT END -->
|
||||||
</div>
|
</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
@@ -96,6 +111,7 @@ _link_template_fancy = """
|
|||||||
_link_template_internet_shortcut = """
|
_link_template_internet_shortcut = """
|
||||||
[InternetShortcut]
|
[InternetShortcut]
|
||||||
URL={{link}}
|
URL={{link}}
|
||||||
|
Desc={{description}}
|
||||||
""".strip()
|
""".strip()
|
||||||
|
|
||||||
_learning_module_template = """
|
_learning_module_template = """
|
||||||
@@ -126,6 +142,88 @@ _learning_module_template = """
|
|||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
_forum_thread_template = """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>ILIAS - Forum: {{name}}</title>
|
||||||
|
<style>
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: 'Open Sans', Verdana, Arial, Helvetica, sans-serif;
|
||||||
|
padding: 8px;
|
||||||
|
}
|
||||||
|
ul, ol, p {
|
||||||
|
margin: 1.2em 0;
|
||||||
|
}
|
||||||
|
p {
|
||||||
|
margin-top: 8px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #00876c;
|
||||||
|
text-decoration: none;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
body > p:first-child > span:first-child {
|
||||||
|
font-size: 1.6em;
|
||||||
|
}
|
||||||
|
body > p:first-child > span:first-child ~ span.default {
|
||||||
|
display: inline-block;
|
||||||
|
font-size: 1.2em;
|
||||||
|
padding-bottom: 8px;
|
||||||
|
}
|
||||||
|
.ilFrmPostContent {
|
||||||
|
margin-top: 8px;
|
||||||
|
max-width: 64em;
|
||||||
|
}
|
||||||
|
.ilFrmPostContent > *:first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
.ilFrmPostTitle {
|
||||||
|
margin-top: 24px;
|
||||||
|
color: #00876c;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
#ilFrmPostList {
|
||||||
|
list-style: none;
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
li.ilFrmPostRow {
|
||||||
|
padding: 3px 0 3px 3px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
border-left: 6px solid #dddddd;
|
||||||
|
}
|
||||||
|
.ilFrmPostRow > div {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
.ilFrmPostImage img {
|
||||||
|
margin: 0 !important;
|
||||||
|
padding: 6px 9px 9px 6px;
|
||||||
|
}
|
||||||
|
.ilUserIcon {
|
||||||
|
width: 115px;
|
||||||
|
}
|
||||||
|
.small {
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: #6f6f6f;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
{{heading}}
|
||||||
|
{{content}}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""".strip() # noqa: E501 line too long
|
||||||
|
|
||||||
|
|
||||||
def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str:
|
def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str:
|
||||||
# Seems to be comments, ignore those.
|
# Seems to be comments, ignore those.
|
||||||
@@ -139,13 +237,13 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
|||||||
</div>
|
</div>
|
||||||
"""
|
"""
|
||||||
if prev and body.select_one(".ilc_page_lnav_LeftNavigation"):
|
if prev and body.select_one(".ilc_page_lnav_LeftNavigation"):
|
||||||
text = body.select_one(".ilc_page_lnav_LeftNavigation").getText().strip()
|
text = cast(bs4.Tag, body.select_one(".ilc_page_lnav_LeftNavigation")).get_text().strip()
|
||||||
left = f'<a href="{prev}">{text}</a>'
|
left = f'<a href="{prev}">{text}</a>'
|
||||||
else:
|
else:
|
||||||
left = "<span></span>"
|
left = "<span></span>"
|
||||||
|
|
||||||
if next and body.select_one(".ilc_page_rnav_RightNavigation"):
|
if next and body.select_one(".ilc_page_rnav_RightNavigation"):
|
||||||
text = body.select_one(".ilc_page_rnav_RightNavigation").getText().strip()
|
text = cast(bs4.Tag, body.select_one(".ilc_page_rnav_RightNavigation")).get_text().strip()
|
||||||
right = f'<a href="{next}">{text}</a>'
|
right = f'<a href="{next}">{text}</a>'
|
||||||
else:
|
else:
|
||||||
right = "<span></span>"
|
right = "<span></span>"
|
||||||
@@ -156,12 +254,29 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
|||||||
)
|
)
|
||||||
|
|
||||||
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||||
bot_nav.replace_with(soupify(nav_template.replace(
|
bot_nav.replace_with(
|
||||||
"{{left}}", left).replace("{{right}}", right).encode())
|
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||||
)
|
)
|
||||||
|
|
||||||
body = body.prettify()
|
body_str = body.prettify()
|
||||||
return _learning_module_template.replace("{{body}}", body).replace("{{name}}", name)
|
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
||||||
|
|
||||||
|
|
||||||
|
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||||
|
if title := heading.find(name="b"):
|
||||||
|
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||||
|
return (
|
||||||
|
_forum_thread_template.replace("{{name}}", name)
|
||||||
|
.replace("{{heading}}", heading.prettify())
|
||||||
|
.replace("{{content}}", content.prettify())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class LinkData:
|
||||||
|
name: str
|
||||||
|
url: str
|
||||||
|
description: str
|
||||||
|
|
||||||
|
|
||||||
class Links(Enum):
|
class Links(Enum):
|
||||||
@@ -181,6 +296,9 @@ class Links(Enum):
|
|||||||
return None
|
return None
|
||||||
raise ValueError("Missing switch case")
|
raise ValueError("Missing switch case")
|
||||||
|
|
||||||
|
def collection_as_one(self) -> bool:
|
||||||
|
return self == Links.FANCY
|
||||||
|
|
||||||
def extension(self) -> Optional[str]:
|
def extension(self) -> Optional[str]:
|
||||||
if self == Links.FANCY:
|
if self == Links.FANCY:
|
||||||
return ".html"
|
return ".html"
|
||||||
@@ -192,10 +310,47 @@ class Links(Enum):
|
|||||||
return None
|
return None
|
||||||
raise ValueError("Missing switch case")
|
raise ValueError("Missing switch case")
|
||||||
|
|
||||||
|
def interpolate(self, redirect_delay: int, collection_name: str, links: list[LinkData]) -> str:
|
||||||
|
template = self.template()
|
||||||
|
if template is None:
|
||||||
|
raise ValueError("Cannot interpolate ignored links")
|
||||||
|
|
||||||
|
if len(links) == 1:
|
||||||
|
link = links[0]
|
||||||
|
content = template
|
||||||
|
content = content.replace("{{link}}", link.url)
|
||||||
|
content = content.replace("{{name}}", link.name)
|
||||||
|
content = content.replace("{{description}}", link.description)
|
||||||
|
content = content.replace("{{redirect_delay}}", str(redirect_delay))
|
||||||
|
return content
|
||||||
|
if self == Links.PLAINTEXT or self == Links.INTERNET_SHORTCUT:
|
||||||
|
return "\n".join(f"{link.url}" for link in links)
|
||||||
|
|
||||||
|
# All others get coerced to fancy
|
||||||
|
content = cast(str, Links.FANCY.template())
|
||||||
|
repeated_content = cast(
|
||||||
|
re.Match[str], re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||||
|
).group(1)
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
for link in links:
|
||||||
|
instance = repeated_content
|
||||||
|
instance = instance.replace("{{link}}", link.url)
|
||||||
|
instance = instance.replace("{{name}}", link.name)
|
||||||
|
instance = instance.replace("{{description}}", link.description)
|
||||||
|
instance = instance.replace("{{redirect_delay}}", str(redirect_delay))
|
||||||
|
parts.append(instance)
|
||||||
|
|
||||||
|
content = content.replace(repeated_content, "\n".join(parts))
|
||||||
|
content = content.replace("{{name}}", collection_name)
|
||||||
|
content = re.sub(r"<!-- REPEAT REMOVE START -->[\s\S]+<!-- REPEAT REMOVE END -->", "", content)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_string(string: str) -> "Links":
|
def from_string(string: str) -> "Links":
|
||||||
try:
|
try:
|
||||||
return Links(string)
|
return Links(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'ignore', 'plaintext',"
|
options = [f"'{option.value}'" for option in Links]
|
||||||
" 'html', 'internet-shortcut'")
|
raise ValueError(f"must be one of {', '.join(options)}") from None
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from typing import cast
|
||||||
|
|
||||||
from bs4 import BeautifulSoup, Comment, Tag
|
from bs4 import BeautifulSoup, Comment, Tag
|
||||||
|
|
||||||
_STYLE_TAG_CONTENT = """
|
_STYLE_TAG_CONTENT = """
|
||||||
@@ -37,6 +39,10 @@ _STYLE_TAG_CONTENT = """
|
|||||||
margin: 0.5rem 0;
|
margin: 0.5rem 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
background-color: white;
|
||||||
|
}
|
||||||
|
|
||||||
body {
|
body {
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
grid-template-columns: 1fr min(60rem, 90%) 1fr;
|
grid-template-columns: 1fr min(60rem, 90%) 1fr;
|
||||||
@@ -54,12 +60,11 @@ _ARTICLE_WORTHY_CLASSES = [
|
|||||||
def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||||
head = soup.new_tag("head")
|
head = soup.new_tag("head")
|
||||||
soup.insert(0, head)
|
soup.insert(0, head)
|
||||||
|
# Force UTF-8 encoding
|
||||||
|
head.append(soup.new_tag("meta", charset="utf-8"))
|
||||||
|
|
||||||
simplecss_link: Tag = soup.new_tag("link")
|
|
||||||
# <link rel="stylesheet" href="https://cdn.simplecss.org/simple.css">
|
# <link rel="stylesheet" href="https://cdn.simplecss.org/simple.css">
|
||||||
simplecss_link["rel"] = "stylesheet"
|
head.append(soup.new_tag("link", rel="stylesheet", href="https://cdn.simplecss.org/simple.css"))
|
||||||
simplecss_link["href"] = "https://cdn.simplecss.org/simple.css"
|
|
||||||
head.append(simplecss_link)
|
|
||||||
|
|
||||||
# Basic style tags for compat
|
# Basic style tags for compat
|
||||||
style: Tag = soup.new_tag("style")
|
style: Tag = soup.new_tag("style")
|
||||||
@@ -70,18 +75,18 @@ def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
|||||||
|
|
||||||
|
|
||||||
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||||
for block in soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES):
|
for block in cast(list[Tag], soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES)):
|
||||||
block.name = "article"
|
block.name = "article"
|
||||||
|
|
||||||
for block in soup.find_all("h3"):
|
for block in cast(list[Tag], soup.find_all("h3")):
|
||||||
block.name = "div"
|
block.name = "div"
|
||||||
|
|
||||||
for block in soup.find_all("h1"):
|
for block in cast(list[Tag], soup.find_all("h1")):
|
||||||
block.name = "h3"
|
block.name = "h3"
|
||||||
|
|
||||||
for block in soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap"):
|
for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")):
|
||||||
block.name = "h3"
|
block.name = "h3"
|
||||||
block["class"] += ["accordion-head"]
|
block["class"] += ["accordion-head"] # type: ignore
|
||||||
|
|
||||||
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
||||||
children = list(dummy.children)
|
children = list(dummy.children)
|
||||||
@@ -97,7 +102,7 @@ def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
|||||||
if figure := video.find_parent("figure"):
|
if figure := video.find_parent("figure"):
|
||||||
figure.decompose()
|
figure.decompose()
|
||||||
|
|
||||||
for hrule_imposter in soup.find_all(class_="ilc_section_Separator"):
|
for hrule_imposter in cast(list[Tag], soup.find_all(class_="ilc_section_Separator")):
|
||||||
hrule_imposter.insert(0, soup.new_tag("hr"))
|
hrule_imposter.insert(0, soup.new_tag("hr"))
|
||||||
|
|
||||||
return soup
|
return soup
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
from collections.abc import Awaitable, Coroutine
|
from collections.abc import Awaitable, Coroutine
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Any, Dict, List, Literal, Optional, Set, Union, cast
|
from typing import Any, Literal, Optional, cast
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -15,17 +15,24 @@ from ...auth import Authenticator
|
|||||||
from ...config import Config
|
from ...config import Config
|
||||||
from ...logging import ProgressBar, log
|
from ...logging import ProgressBar, log
|
||||||
from ...output_dir import FileSink, Redownload
|
from ...output_dir import FileSink, Redownload
|
||||||
from ...utils import fmt_path, soupify, url_set_query_param
|
from ...utils import fmt_path, sanitize_path_name, soupify, url_set_query_param
|
||||||
from ..crawler import CrawlError, CrawlToken, CrawlWarning, DownloadToken, anoncritical
|
from ..crawler import CrawlError, CrawlToken, CrawlWarning, DownloadToken, anoncritical
|
||||||
from ..http_crawler import HttpCrawler, HttpCrawlerSection
|
from ..http_crawler import HttpCrawler, HttpCrawlerSection
|
||||||
from .async_helper import _iorepeat
|
from .async_helper import _iorepeat
|
||||||
from .file_templates import Links, learning_module_template
|
from .file_templates import LinkData, Links, forum_thread_template, learning_module_template
|
||||||
from .ilias_html_cleaner import clean, insert_base_markup
|
from .ilias_html_cleaner import clean, insert_base_markup
|
||||||
from .kit_ilias_html import (IliasElementType, IliasForumThread, IliasLearningModulePage, IliasPage,
|
from .kit_ilias_html import (
|
||||||
IliasPageElement, _sanitize_path_name, parse_ilias_forum_export)
|
IliasElementType,
|
||||||
|
IliasForumThread,
|
||||||
|
IliasLearningModulePage,
|
||||||
|
IliasPage,
|
||||||
|
IliasPageElement,
|
||||||
|
IliasSoup,
|
||||||
|
parse_ilias_forum_export,
|
||||||
|
)
|
||||||
from .shibboleth_login import ShibbolethLogin
|
from .shibboleth_login import ShibbolethLogin
|
||||||
|
|
||||||
TargetType = Union[str, int]
|
TargetType = str | int
|
||||||
|
|
||||||
|
|
||||||
class LoginTypeLocal:
|
class LoginTypeLocal:
|
||||||
@@ -41,7 +48,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
|
|
||||||
return base_url
|
return base_url
|
||||||
|
|
||||||
def login(self) -> Union[Literal["shibboleth"], LoginTypeLocal]:
|
def login(self) -> Literal["shibboleth"] | LoginTypeLocal:
|
||||||
login_type = self.s.get("login_type")
|
login_type = self.s.get("login_type")
|
||||||
if not login_type:
|
if not login_type:
|
||||||
self.missing_value("login_type")
|
self.missing_value("login_type")
|
||||||
@@ -55,9 +62,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
|
|
||||||
self.invalid_value("login_type", login_type, "Should be <shibboleth | local>")
|
self.invalid_value("login_type", login_type, "Should be <shibboleth | local>")
|
||||||
|
|
||||||
def tfa_auth(
|
def tfa_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||||
self, authenticators: Dict[str, Authenticator]
|
|
||||||
) -> Optional[Authenticator]:
|
|
||||||
value: Optional[str] = self.s.get("tfa_auth")
|
value: Optional[str] = self.s.get("tfa_auth")
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
@@ -104,10 +109,10 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
return self.s.getboolean("forums", fallback=False)
|
return self.s.getboolean("forums", fallback=False)
|
||||||
|
|
||||||
|
|
||||||
_DIRECTORY_PAGES: Set[IliasElementType] = {
|
_DIRECTORY_PAGES: set[IliasElementType] = {
|
||||||
IliasElementType.COURSE,
|
|
||||||
IliasElementType.EXERCISE,
|
IliasElementType.EXERCISE,
|
||||||
IliasElementType.EXERCISE_FILES,
|
IliasElementType.EXERCISE_FILES,
|
||||||
|
IliasElementType.EXERCISE_OVERVIEW,
|
||||||
IliasElementType.FOLDER,
|
IliasElementType.FOLDER,
|
||||||
IliasElementType.INFO_TAB,
|
IliasElementType.INFO_TAB,
|
||||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||||
@@ -116,7 +121,7 @@ _DIRECTORY_PAGES: Set[IliasElementType] = {
|
|||||||
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
||||||
}
|
}
|
||||||
|
|
||||||
_VIDEO_ELEMENTS: Set[IliasElementType] = {
|
_VIDEO_ELEMENTS: set[IliasElementType] = {
|
||||||
IliasElementType.MEDIACAST_VIDEO,
|
IliasElementType.MEDIACAST_VIDEO,
|
||||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||||
IliasElementType.OPENCAST_VIDEO,
|
IliasElementType.OPENCAST_VIDEO,
|
||||||
@@ -166,17 +171,19 @@ class IliasWebCrawler(HttpCrawler):
|
|||||||
name: str,
|
name: str,
|
||||||
section: IliasWebCrawlerSection,
|
section: IliasWebCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
authenticators: Dict[str, Authenticator]
|
authenticators: dict[str, Authenticator],
|
||||||
):
|
):
|
||||||
# Setting a main authenticator for cookie sharing
|
# Setting a main authenticator for cookie sharing
|
||||||
auth = section.auth(authenticators)
|
auth = section.auth(authenticators)
|
||||||
super().__init__(name, section, config, shared_auth=auth)
|
super().__init__(name, section, config, shared_auth=auth)
|
||||||
|
|
||||||
if section.tasks() > 1:
|
if section.tasks() > 1:
|
||||||
log.warn("""
|
log.warn(
|
||||||
|
"""
|
||||||
Please avoid using too many parallel requests as these are the KIT ILIAS
|
Please avoid using too many parallel requests as these are the KIT ILIAS
|
||||||
instance's greatest bottleneck.
|
instance's greatest bottleneck.
|
||||||
""".strip())
|
""".strip()
|
||||||
|
)
|
||||||
|
|
||||||
self._auth = auth
|
self._auth = auth
|
||||||
self._base_url = section.base_url()
|
self._base_url = section.base_url()
|
||||||
@@ -193,7 +200,7 @@ instance's greatest bottleneck.
|
|||||||
self._links = section.links()
|
self._links = section.links()
|
||||||
self._videos = section.videos()
|
self._videos = section.videos()
|
||||||
self._forums = section.forums()
|
self._forums = section.forums()
|
||||||
self._visited_urls: Dict[str, PurePath] = dict()
|
self._visited_urls: dict[str, PurePath] = dict()
|
||||||
|
|
||||||
async def _run(self) -> None:
|
async def _run(self) -> None:
|
||||||
if isinstance(self._target, int):
|
if isinstance(self._target, int):
|
||||||
@@ -210,18 +217,23 @@ instance's greatest bottleneck.
|
|||||||
# Start crawling at the given course
|
# Start crawling at the given course
|
||||||
root_url = url_set_query_param(
|
root_url = url_set_query_param(
|
||||||
urljoin(self._base_url + "/", "goto.php"),
|
urljoin(self._base_url + "/", "goto.php"),
|
||||||
"target", f"crs_{course_id}",
|
"target",
|
||||||
|
f"crs_{course_id}",
|
||||||
)
|
)
|
||||||
|
|
||||||
await self._crawl_url(root_url, expected_id=course_id)
|
await self._crawl_url(root_url, expected_id=course_id)
|
||||||
|
|
||||||
async def _crawl_desktop(self) -> None:
|
async def _crawl_desktop(self) -> None:
|
||||||
await self._crawl_url(
|
await self._crawl_url(
|
||||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show")
|
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"), crawl_nested_courses=True
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _crawl_url(self, url: str, expected_id: Optional[int] = None) -> None:
|
async def _crawl_url(
|
||||||
if awaitable := await self._handle_ilias_page(url, None, PurePath("."), expected_id):
|
self, url: str, expected_id: Optional[int] = None, crawl_nested_courses: bool = False
|
||||||
|
) -> None:
|
||||||
|
if awaitable := await self._handle_ilias_page(
|
||||||
|
url, None, PurePath("."), expected_id, crawl_nested_courses
|
||||||
|
):
|
||||||
await awaitable
|
await awaitable
|
||||||
|
|
||||||
async def _handle_ilias_page(
|
async def _handle_ilias_page(
|
||||||
@@ -230,6 +242,7 @@ instance's greatest bottleneck.
|
|||||||
current_element: Optional[IliasPageElement],
|
current_element: Optional[IliasPageElement],
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
expected_course_id: Optional[int] = None,
|
expected_course_id: Optional[int] = None,
|
||||||
|
crawl_nested_courses: bool = False,
|
||||||
) -> Optional[Coroutine[Any, Any, None]]:
|
) -> Optional[Coroutine[Any, Any, None]]:
|
||||||
maybe_cl = await self.crawl(path)
|
maybe_cl = await self.crawl(path)
|
||||||
if not maybe_cl:
|
if not maybe_cl:
|
||||||
@@ -237,7 +250,9 @@ instance's greatest bottleneck.
|
|||||||
if current_element:
|
if current_element:
|
||||||
self._ensure_not_seen(current_element, path)
|
self._ensure_not_seen(current_element, path)
|
||||||
|
|
||||||
return self._crawl_ilias_page(url, current_element, maybe_cl, expected_course_id)
|
return self._crawl_ilias_page(
|
||||||
|
url, current_element, maybe_cl, expected_course_id, crawl_nested_courses
|
||||||
|
)
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _crawl_ilias_page(
|
async def _crawl_ilias_page(
|
||||||
@@ -246,10 +261,11 @@ instance's greatest bottleneck.
|
|||||||
current_element: Optional[IliasPageElement],
|
current_element: Optional[IliasPageElement],
|
||||||
cl: CrawlToken,
|
cl: CrawlToken,
|
||||||
expected_course_id: Optional[int] = None,
|
expected_course_id: Optional[int] = None,
|
||||||
|
crawl_nested_courses: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
elements: List[IliasPageElement] = []
|
elements: list[IliasPageElement] = []
|
||||||
# A list as variable redefinitions are not propagated to outer scopes
|
# A list as variable redefinitions are not propagated to outer scopes
|
||||||
description: List[BeautifulSoup] = []
|
description: list[BeautifulSoup] = []
|
||||||
|
|
||||||
@_iorepeat(3, "crawling folder")
|
@_iorepeat(3, "crawling folder")
|
||||||
async def gather_elements() -> None:
|
async def gather_elements() -> None:
|
||||||
@@ -257,6 +273,7 @@ instance's greatest bottleneck.
|
|||||||
async with cl:
|
async with cl:
|
||||||
next_stage_url: Optional[str] = url
|
next_stage_url: Optional[str] = url
|
||||||
current_parent = current_element
|
current_parent = current_element
|
||||||
|
page = None
|
||||||
|
|
||||||
while next_stage_url:
|
while next_stage_url:
|
||||||
soup = await self._get_page(next_stage_url)
|
soup = await self._get_page(next_stage_url)
|
||||||
@@ -266,19 +283,22 @@ instance's greatest bottleneck.
|
|||||||
# If we expect to find a root course, enforce it
|
# If we expect to find a root course, enforce it
|
||||||
if current_parent is None and expected_course_id is not None:
|
if current_parent is None and expected_course_id is not None:
|
||||||
perma_link = IliasPage.get_soup_permalink(soup)
|
perma_link = IliasPage.get_soup_permalink(soup)
|
||||||
if not perma_link or "crs_" not in perma_link:
|
if not perma_link or "crs/" not in perma_link:
|
||||||
raise CrawlError("Invalid course id? Didn't find anything looking like a course")
|
raise CrawlError("Invalid course id? Didn't find anything looking like a course")
|
||||||
if str(expected_course_id) not in perma_link:
|
if str(expected_course_id) not in perma_link:
|
||||||
raise CrawlError(f"Expected course id {expected_course_id} but got {perma_link}")
|
raise CrawlError(f"Expected course id {expected_course_id} but got {perma_link}")
|
||||||
|
|
||||||
page = IliasPage(soup, next_stage_url, current_parent)
|
page = IliasPage(soup, current_parent)
|
||||||
if next_element := page.get_next_stage_element():
|
if next_element := page.get_next_stage_element():
|
||||||
current_parent = next_element
|
current_parent = next_element
|
||||||
next_stage_url = next_element.url
|
next_stage_url = next_element.url
|
||||||
else:
|
else:
|
||||||
next_stage_url = None
|
next_stage_url = None
|
||||||
|
|
||||||
|
page = cast(IliasPage, page)
|
||||||
elements.extend(page.get_child_elements())
|
elements.extend(page.get_child_elements())
|
||||||
|
if current_element is None and (info_tab := page.get_info_tab()):
|
||||||
|
elements.append(info_tab)
|
||||||
if description_string := page.get_description():
|
if description_string := page.get_description():
|
||||||
description.append(description_string)
|
description.append(description_string)
|
||||||
|
|
||||||
@@ -290,9 +310,9 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
elements.sort(key=lambda e: e.id())
|
elements.sort(key=lambda e: e.id())
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
for element in elements:
|
for element in elements:
|
||||||
if handle := await self._handle_ilias_element(cl.path, element):
|
if handle := await self._handle_ilias_element(cl.path, element, crawl_nested_courses):
|
||||||
tasks.append(asyncio.create_task(handle))
|
tasks.append(asyncio.create_task(handle))
|
||||||
|
|
||||||
# And execute them
|
# And execute them
|
||||||
@@ -305,24 +325,30 @@ instance's greatest bottleneck.
|
|||||||
# works correctly.
|
# works correctly.
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _handle_ilias_element(
|
async def _handle_ilias_element(
|
||||||
self,
|
self, parent_path: PurePath, element: IliasPageElement, crawl_nested_courses: bool = False
|
||||||
parent_path: PurePath,
|
|
||||||
element: IliasPageElement,
|
|
||||||
) -> Optional[Coroutine[Any, Any, None]]:
|
) -> Optional[Coroutine[Any, Any, None]]:
|
||||||
# element.name might contain `/` if the crawler created nested elements,
|
# element.name might contain `/` if the crawler created nested elements,
|
||||||
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
||||||
# directory escape attacks.
|
# directory escape attacks.
|
||||||
element_path = PurePath(parent_path, element.name)
|
element_path = PurePath(parent_path, element.name)
|
||||||
|
|
||||||
if element.type in _VIDEO_ELEMENTS:
|
# This is symptomatic of no access to the element, for example, because
|
||||||
if not self._videos:
|
# of time availability restrictions.
|
||||||
log.status(
|
if "cmdClass=ilInfoScreenGUI" in element.url and "cmd=showSummary" in element.url:
|
||||||
"[bold bright_black]",
|
log.explain(
|
||||||
"Ignored",
|
"Skipping element as url points to info screen, "
|
||||||
fmt_path(element_path),
|
"this should only happen with not-yet-released elements"
|
||||||
"[bright_black](enable with option 'videos')"
|
)
|
||||||
)
|
return None
|
||||||
return None
|
|
||||||
|
if element.type in _VIDEO_ELEMENTS and not self._videos:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](enable with option 'videos')",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
if element.type == IliasElementType.FILE:
|
if element.type == IliasElementType.FILE:
|
||||||
return await self._handle_file(element, element_path)
|
return await self._handle_file(element, element_path)
|
||||||
@@ -332,7 +358,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](enable with option 'forums')"
|
"[bright_black](enable with option 'forums')",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
return await self._handle_forum(element, element_path)
|
return await self._handle_forum(element, element_path)
|
||||||
@@ -341,7 +367,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](tests contain no relevant data)"
|
"[bright_black](tests contain no relevant data)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.SURVEY:
|
elif element.type == IliasElementType.SURVEY:
|
||||||
@@ -349,7 +375,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](surveys contain no relevant data)"
|
"[bright_black](surveys contain no relevant data)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
||||||
@@ -357,13 +383,73 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](scorm learning modules are not supported)"
|
"[bright_black](scorm learning modules are not supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.LITERATURE_LIST:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](literature lists are not currently supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.LEARNING_MODULE_HTML:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](HTML learning modules are not supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.BLOG:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](blogs are not currently supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.DCL_RECORD_LIST:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](dcl record lists are not currently supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.MEDIA_POOL:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](media pools are not currently supported)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.COURSE:
|
||||||
|
if crawl_nested_courses:
|
||||||
|
return await self._handle_ilias_page(element.url, element, element_path)
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](not descending into linked course)",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
elif element.type == IliasElementType.WIKI:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](wikis are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.LEARNING_MODULE:
|
elif element.type == IliasElementType.LEARNING_MODULE:
|
||||||
return await self._handle_learning_module(element, element_path)
|
return await self._handle_learning_module(element, element_path)
|
||||||
elif element.type == IliasElementType.LINK:
|
elif element.type == IliasElementType.LINK:
|
||||||
return await self._handle_link(element, element_path)
|
return await self._handle_link(element, element_path)
|
||||||
|
elif element.type == IliasElementType.LINK_COLLECTION:
|
||||||
|
return await self._handle_link(element, element_path)
|
||||||
elif element.type == IliasElementType.BOOKING:
|
elif element.type == IliasElementType.BOOKING:
|
||||||
return await self._handle_booking(element, element_path)
|
return await self._handle_booking(element, element_path)
|
||||||
elif element.type == IliasElementType.OPENCAST_VIDEO:
|
elif element.type == IliasElementType.OPENCAST_VIDEO:
|
||||||
@@ -389,44 +475,93 @@ instance's greatest bottleneck.
|
|||||||
log.explain_topic(f"Decision: Crawl Link {fmt_path(element_path)}")
|
log.explain_topic(f"Decision: Crawl Link {fmt_path(element_path)}")
|
||||||
log.explain(f"Links type is {self._links}")
|
log.explain(f"Links type is {self._links}")
|
||||||
|
|
||||||
link_template_maybe = self._links.template()
|
export_url = url_set_query_param(element.url, "cmd", "exportHTML")
|
||||||
link_extension = self._links.extension()
|
resolved = await self._resolve_link_target(export_url)
|
||||||
if not link_template_maybe or not link_extension:
|
if resolved == "none":
|
||||||
|
links = [LinkData(element.name, "", element.description or "")]
|
||||||
|
else:
|
||||||
|
links = self._parse_link_content(element, cast(BeautifulSoup, resolved))
|
||||||
|
|
||||||
|
maybe_extension = self._links.extension()
|
||||||
|
|
||||||
|
if not maybe_extension:
|
||||||
log.explain("Answer: No")
|
log.explain("Answer: No")
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
log.explain("Answer: Yes")
|
log.explain("Answer: Yes")
|
||||||
element_path = element_path.with_name(element_path.name + link_extension)
|
|
||||||
|
|
||||||
maybe_dl = await self.download(element_path, mtime=element.mtime)
|
if len(links) <= 1 or self._links.collection_as_one():
|
||||||
if not maybe_dl:
|
element_path = element_path.with_name(element_path.name + maybe_extension)
|
||||||
|
maybe_dl = await self.download(element_path, mtime=element.mtime)
|
||||||
|
if not maybe_dl:
|
||||||
|
return None
|
||||||
|
return self._download_link(self._links, element.name, links, maybe_dl)
|
||||||
|
|
||||||
|
maybe_cl = await self.crawl(element_path)
|
||||||
|
if not maybe_cl:
|
||||||
return None
|
return None
|
||||||
|
# Required for download_all closure
|
||||||
|
cl = maybe_cl
|
||||||
|
extension = maybe_extension
|
||||||
|
|
||||||
return self._download_link(element, link_template_maybe, maybe_dl)
|
async def download_all() -> None:
|
||||||
|
for link in links:
|
||||||
|
path = cl.path / (sanitize_path_name(link.name) + extension)
|
||||||
|
if dl := await self.download(path, mtime=element.mtime):
|
||||||
|
await self._download_link(self._links, element.name, [link], dl)
|
||||||
|
|
||||||
|
return download_all()
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(3, "resolving link")
|
@_iorepeat(3, "resolving link")
|
||||||
async def _download_link(self, element: IliasPageElement, link_template: str, dl: DownloadToken) -> None:
|
async def _download_link(
|
||||||
async with dl as (bar, sink):
|
self, link_renderer: Links, collection_name: str, links: list[LinkData], dl: DownloadToken
|
||||||
export_url = element.url.replace("cmd=calldirectlink", "cmd=exportHTML")
|
|
||||||
real_url = await self._resolve_link_target(export_url)
|
|
||||||
self._write_link_content(link_template, real_url, element.name, element.description, sink)
|
|
||||||
|
|
||||||
def _write_link_content(
|
|
||||||
self,
|
|
||||||
link_template: str,
|
|
||||||
url: str,
|
|
||||||
name: str,
|
|
||||||
description: Optional[str],
|
|
||||||
sink: FileSink,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
content = link_template
|
async with dl as (bar, sink):
|
||||||
content = content.replace("{{link}}", url)
|
rendered = link_renderer.interpolate(self._link_file_redirect_delay, collection_name, links)
|
||||||
content = content.replace("{{name}}", name)
|
sink.file.write(rendered.encode("utf-8"))
|
||||||
content = content.replace("{{description}}", str(description))
|
sink.done()
|
||||||
content = content.replace("{{redirect_delay}}", str(self._link_file_redirect_delay))
|
|
||||||
sink.file.write(content.encode("utf-8"))
|
async def _resolve_link_target(self, export_url: str) -> BeautifulSoup | Literal["none"]:
|
||||||
sink.done()
|
async def impl() -> Optional[BeautifulSoup | Literal["none"]]:
|
||||||
|
async with self.session.get(export_url, allow_redirects=False) as resp:
|
||||||
|
# No redirect means we were authenticated
|
||||||
|
if hdrs.LOCATION not in resp.headers:
|
||||||
|
return soupify(await resp.read()) # .select_one("a").get("href").strip() # type: ignore
|
||||||
|
# We are either unauthenticated or the link is not active
|
||||||
|
new_url = resp.headers[hdrs.LOCATION].lower()
|
||||||
|
if "baseclass=illinkresourcehandlergui" in new_url and "cmd=infoscreen" in new_url:
|
||||||
|
return "none"
|
||||||
|
return None
|
||||||
|
|
||||||
|
auth_id = await self._current_auth_id()
|
||||||
|
target = await impl()
|
||||||
|
if target is not None:
|
||||||
|
return target
|
||||||
|
|
||||||
|
await self.authenticate(auth_id)
|
||||||
|
|
||||||
|
target = await impl()
|
||||||
|
if target is not None:
|
||||||
|
return target
|
||||||
|
|
||||||
|
raise CrawlError("resolve_link_target failed even after authenticating")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_link_content(element: IliasPageElement, content: BeautifulSoup) -> list[LinkData]:
|
||||||
|
links = list(content.select("a"))
|
||||||
|
if len(links) == 1:
|
||||||
|
url = str(links[0].get("href")).strip()
|
||||||
|
return [LinkData(name=element.name, description=element.description or "", url=url)]
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for link in links:
|
||||||
|
url = str(link.get("href")).strip()
|
||||||
|
name = link.get_text(strip=True)
|
||||||
|
description = cast(Tag, link.find_next_sibling("dd")).get_text(strip=True)
|
||||||
|
results.append(LinkData(name=name, description=description, url=url.strip()))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
async def _handle_booking(
|
async def _handle_booking(
|
||||||
self,
|
self,
|
||||||
@@ -451,7 +586,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
self._ensure_not_seen(element, element_path)
|
self._ensure_not_seen(element, element_path)
|
||||||
|
|
||||||
return self._download_booking(element, link_template_maybe, maybe_dl)
|
return self._download_booking(element, maybe_dl)
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(1, "downloading description")
|
@_iorepeat(1, "downloading description")
|
||||||
@@ -461,10 +596,10 @@ instance's greatest bottleneck.
|
|||||||
if not dl:
|
if not dl:
|
||||||
return
|
return
|
||||||
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (_bar, sink):
|
||||||
description = clean(insert_base_markup(description))
|
description = clean(insert_base_markup(description))
|
||||||
description = await self.internalize_images(description)
|
description_tag = await self.internalize_images(description)
|
||||||
sink.file.write(description.prettify().encode("utf-8"))
|
sink.file.write(description_tag.prettify().encode("utf-8"))
|
||||||
sink.done()
|
sink.done()
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
@@ -472,36 +607,13 @@ instance's greatest bottleneck.
|
|||||||
async def _download_booking(
|
async def _download_booking(
|
||||||
self,
|
self,
|
||||||
element: IliasPageElement,
|
element: IliasPageElement,
|
||||||
link_template: str,
|
|
||||||
dl: DownloadToken,
|
dl: DownloadToken,
|
||||||
) -> None:
|
) -> None:
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
self._write_link_content(link_template, element.url, element.name, element.description, sink)
|
links = [LinkData(name=element.name, description=element.description or "", url=element.url)]
|
||||||
|
rendered = self._links.interpolate(self._link_file_redirect_delay, element.name, links)
|
||||||
async def _resolve_link_target(self, export_url: str) -> str:
|
sink.file.write(rendered.encode("utf-8"))
|
||||||
async def impl() -> Optional[str]:
|
sink.done()
|
||||||
async with self.session.get(export_url, allow_redirects=False) as resp:
|
|
||||||
# No redirect means we were authenticated
|
|
||||||
if hdrs.LOCATION not in resp.headers:
|
|
||||||
return soupify(await resp.read()).select_one("a").get("href").strip()
|
|
||||||
# We are either unauthenticated or the link is not active
|
|
||||||
new_url = resp.headers[hdrs.LOCATION].lower()
|
|
||||||
if "baseclass=illinkresourcehandlergui" in new_url and "cmd=infoscreen" in new_url:
|
|
||||||
return ""
|
|
||||||
return None
|
|
||||||
|
|
||||||
auth_id = await self._current_auth_id()
|
|
||||||
target = await impl()
|
|
||||||
if target is not None:
|
|
||||||
return target
|
|
||||||
|
|
||||||
await self.authenticate(auth_id)
|
|
||||||
|
|
||||||
target = await impl()
|
|
||||||
if target is not None:
|
|
||||||
return target
|
|
||||||
|
|
||||||
raise CrawlError("resolve_link_target failed even after authenticating")
|
|
||||||
|
|
||||||
async def _handle_opencast_video(
|
async def _handle_opencast_video(
|
||||||
self,
|
self,
|
||||||
@@ -512,7 +624,7 @@ instance's greatest bottleneck.
|
|||||||
if self.prev_report:
|
if self.prev_report:
|
||||||
self.report.add_custom_value(
|
self.report.add_custom_value(
|
||||||
_get_video_cache_key(element),
|
_get_video_cache_key(element),
|
||||||
self.prev_report.get_custom_value(_get_video_cache_key(element))
|
self.prev_report.get_custom_value(_get_video_cache_key(element)),
|
||||||
)
|
)
|
||||||
|
|
||||||
# A video might contain other videos, so let's "crawl" the video first
|
# A video might contain other videos, so let's "crawl" the video first
|
||||||
@@ -546,7 +658,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
def _previous_contained_opencast_videos(
|
def _previous_contained_opencast_videos(
|
||||||
self, element: IliasPageElement, element_path: PurePath
|
self, element: IliasPageElement, element_path: PurePath
|
||||||
) -> List[PurePath]:
|
) -> list[PurePath]:
|
||||||
if not self.prev_report:
|
if not self.prev_report:
|
||||||
return []
|
return []
|
||||||
custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element))
|
custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element))
|
||||||
@@ -584,11 +696,11 @@ instance's greatest bottleneck.
|
|||||||
def add_to_report(paths: list[str]) -> None:
|
def add_to_report(paths: list[str]) -> None:
|
||||||
self.report.add_custom_value(
|
self.report.add_custom_value(
|
||||||
_get_video_cache_key(element),
|
_get_video_cache_key(element),
|
||||||
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))}
|
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))},
|
||||||
)
|
)
|
||||||
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
page = IliasPage(await self._get_page(element.url), element.url, element)
|
page = IliasPage(await self._get_page(element.url), element)
|
||||||
stream_elements = page.get_child_elements()
|
stream_elements = page.get_child_elements()
|
||||||
|
|
||||||
if len(stream_elements) > 1:
|
if len(stream_elements) > 1:
|
||||||
@@ -598,11 +710,11 @@ instance's greatest bottleneck.
|
|||||||
stream_element = stream_elements[0]
|
stream_element = stream_elements[0]
|
||||||
|
|
||||||
# We do not have a local cache yet
|
# We do not have a local cache yet
|
||||||
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
await self._stream_from_url(stream_element, sink, bar, is_video=True)
|
||||||
add_to_report([str(self._transformer.transform(dl.path))])
|
add_to_report([str(self._transformer.transform(dl.path))])
|
||||||
return
|
return
|
||||||
|
|
||||||
contained_video_paths: List[str] = []
|
contained_video_paths: list[str] = []
|
||||||
|
|
||||||
for stream_element in stream_elements:
|
for stream_element in stream_elements:
|
||||||
video_path = dl.path.parent / stream_element.name
|
video_path = dl.path.parent / stream_element.name
|
||||||
@@ -613,7 +725,7 @@ instance's greatest bottleneck.
|
|||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
log.explain(f"Streaming video from real url {stream_element.url}")
|
log.explain(f"Streaming video from real url {stream_element.url}")
|
||||||
contained_video_paths.append(str(self._transformer.transform(maybe_dl.path)))
|
contained_video_paths.append(str(self._transformer.transform(maybe_dl.path)))
|
||||||
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
await self._stream_from_url(stream_element, sink, bar, is_video=True)
|
||||||
|
|
||||||
add_to_report(contained_video_paths)
|
add_to_report(contained_video_paths)
|
||||||
|
|
||||||
@@ -635,12 +747,15 @@ instance's greatest bottleneck.
|
|||||||
async def _download_file(self, element: IliasPageElement, dl: DownloadToken, is_video: bool) -> None:
|
async def _download_file(self, element: IliasPageElement, dl: DownloadToken, is_video: bool) -> None:
|
||||||
assert dl # The function is only reached when dl is not None
|
assert dl # The function is only reached when dl is not None
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
await self._stream_from_url(element.url, sink, bar, is_video)
|
await self._stream_from_url(element, sink, bar, is_video)
|
||||||
|
|
||||||
|
async def _stream_from_url(
|
||||||
|
self, element: IliasPageElement, sink: FileSink, bar: ProgressBar, is_video: bool
|
||||||
|
) -> None:
|
||||||
|
url = element.url
|
||||||
|
|
||||||
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar, is_video: bool) -> None:
|
|
||||||
async def try_stream() -> bool:
|
async def try_stream() -> bool:
|
||||||
next_url = url
|
next_url = url
|
||||||
|
|
||||||
# Normal files redirect to the magazine if we are not authenticated. As files could be HTML,
|
# Normal files redirect to the magazine if we are not authenticated. As files could be HTML,
|
||||||
# we can not match on the content type here. Instead, we disallow redirects and inspect the
|
# we can not match on the content type here. Instead, we disallow redirects and inspect the
|
||||||
# new location. If we are redirected anywhere but the ILIAS 8 "sendfile" command, we assume
|
# new location. If we are redirected anywhere but the ILIAS 8 "sendfile" command, we assume
|
||||||
@@ -688,7 +803,7 @@ instance's greatest bottleneck.
|
|||||||
await self.authenticate(auth_id)
|
await self.authenticate(auth_id)
|
||||||
|
|
||||||
if not await try_stream():
|
if not await try_stream():
|
||||||
raise CrawlError("File streaming failed after authenticate()")
|
raise CrawlError(f"File streaming failed after authenticate() {element!r}")
|
||||||
|
|
||||||
async def _handle_forum(
|
async def _handle_forum(
|
||||||
self,
|
self,
|
||||||
@@ -703,36 +818,23 @@ instance's greatest bottleneck.
|
|||||||
@_iorepeat(3, "crawling forum")
|
@_iorepeat(3, "crawling forum")
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _crawl_forum(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
async def _crawl_forum(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
||||||
elements: List[IliasForumThread] = []
|
|
||||||
|
|
||||||
async with cl:
|
async with cl:
|
||||||
next_stage_url = element.url
|
inner = IliasPage(await self._get_page(element.url), element)
|
||||||
while next_stage_url:
|
export_url = inner.get_forum_export_url()
|
||||||
log.explain_topic(f"Parsing HTML page for {fmt_path(cl.path)}")
|
if not export_url:
|
||||||
log.explain(f"URL: {next_stage_url}")
|
log.warn("Could not extract forum export url")
|
||||||
|
|
||||||
soup = await self._get_page(next_stage_url)
|
|
||||||
page = IliasPage(soup, next_stage_url, element)
|
|
||||||
|
|
||||||
if next := page.get_next_stage_element():
|
|
||||||
next_stage_url = next.url
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
download_data = page.get_download_forum_data()
|
|
||||||
if not download_data:
|
|
||||||
raise CrawlWarning("Failed to extract forum data")
|
|
||||||
if download_data.empty:
|
|
||||||
log.explain("Forum had no threads")
|
|
||||||
return
|
return
|
||||||
html = await self._post_authenticated(download_data.url, download_data.form_data)
|
|
||||||
elements = parse_ilias_forum_export(soupify(html))
|
|
||||||
|
|
||||||
elements.sort(key=lambda elem: elem.title)
|
export = await self._post(
|
||||||
|
export_url,
|
||||||
|
{"format": "html", "cmd[createExportFile]": ""},
|
||||||
|
)
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
elements = parse_ilias_forum_export(soupify(export))
|
||||||
for elem in elements:
|
|
||||||
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, elem)))
|
tasks: list[Awaitable[None]] = []
|
||||||
|
for thread in elements:
|
||||||
|
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, element.url)))
|
||||||
|
|
||||||
# And execute them
|
# And execute them
|
||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
@@ -740,20 +842,18 @@ instance's greatest bottleneck.
|
|||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(3, "saving forum thread")
|
@_iorepeat(3, "saving forum thread")
|
||||||
async def _download_forum_thread(
|
async def _download_forum_thread(
|
||||||
self,
|
self, parent_path: PurePath, thread: IliasForumThread | IliasPageElement, forum_url: str
|
||||||
parent_path: PurePath,
|
|
||||||
element: IliasForumThread,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
path = parent_path / (_sanitize_path_name(element.title) + ".html")
|
path = parent_path / (sanitize_path_name(thread.name) + ".html")
|
||||||
maybe_dl = await self.download(path, mtime=element.mtime)
|
maybe_dl = await self.download(path, mtime=thread.mtime)
|
||||||
if not maybe_dl:
|
if not maybe_dl or not isinstance(thread, IliasForumThread):
|
||||||
return
|
return
|
||||||
|
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
content = "<!DOCTYPE html>\n"
|
rendered = forum_thread_template(
|
||||||
content += element.title_tag.prettify()
|
thread.name, forum_url, thread.name_tag, await self.internalize_images(thread.content_tag)
|
||||||
content += element.content_tag.prettify()
|
)
|
||||||
sink.file.write(content.encode("utf-8"))
|
sink.file.write(rendered.encode("utf-8"))
|
||||||
sink.done()
|
sink.done()
|
||||||
|
|
||||||
async def _handle_learning_module(
|
async def _handle_learning_module(
|
||||||
@@ -771,33 +871,33 @@ instance's greatest bottleneck.
|
|||||||
@_iorepeat(3, "crawling learning module")
|
@_iorepeat(3, "crawling learning module")
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _crawl_learning_module(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
async def _crawl_learning_module(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
||||||
elements: List[IliasLearningModulePage] = []
|
elements: list[IliasLearningModulePage] = []
|
||||||
|
|
||||||
async with cl:
|
async with cl:
|
||||||
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
||||||
log.explain(f"URL: {element.url}")
|
log.explain(f"URL: {element.url}")
|
||||||
soup = await self._get_page(element.url)
|
soup = await self._get_page(element.url)
|
||||||
page = IliasPage(soup, element.url, element)
|
page = IliasPage(soup, element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(
|
||||||
cl.path, next.previous_url, "left", element
|
await self._crawl_learning_module_direction(cl.path, next.previous_url, "left", element)
|
||||||
))
|
)
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(
|
||||||
cl.path, next.next_url, "right", element
|
await self._crawl_learning_module_direction(cl.path, next.next_url, "right", element)
|
||||||
))
|
)
|
||||||
|
|
||||||
# Reflect their natural ordering in the file names
|
# Reflect their natural ordering in the file names
|
||||||
for index, lm_element in enumerate(elements):
|
for index, lm_element in enumerate(elements):
|
||||||
lm_element.title = f"{index:02}_{lm_element.title}"
|
lm_element.title = f"{index:02}_{lm_element.title}"
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
for index, elem in enumerate(elements):
|
for index, elem in enumerate(elements):
|
||||||
prev_url = elements[index - 1].title if index > 0 else None
|
prev_url = elements[index - 1].title if index > 0 else None
|
||||||
next_url = elements[index + 1].title if index < len(elements) - 1 else None
|
next_url = elements[index + 1].title if index < len(elements) - 1 else None
|
||||||
tasks.append(asyncio.create_task(
|
tasks.append(
|
||||||
self._download_learning_module_page(cl.path, elem, prev_url, next_url)
|
asyncio.create_task(self._download_learning_module_page(cl.path, elem, prev_url, next_url))
|
||||||
))
|
)
|
||||||
|
|
||||||
# And execute them
|
# And execute them
|
||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
@@ -806,10 +906,10 @@ instance's greatest bottleneck.
|
|||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
start_url: Optional[str],
|
start_url: Optional[str],
|
||||||
dir: Union[Literal["left"], Literal["right"]],
|
dir: Literal["left"] | Literal["right"],
|
||||||
parent_element: IliasPageElement
|
parent_element: IliasPageElement,
|
||||||
) -> List[IliasLearningModulePage]:
|
) -> list[IliasLearningModulePage]:
|
||||||
elements: List[IliasLearningModulePage] = []
|
elements: list[IliasLearningModulePage] = []
|
||||||
|
|
||||||
if not start_url:
|
if not start_url:
|
||||||
return elements
|
return elements
|
||||||
@@ -820,13 +920,10 @@ instance's greatest bottleneck.
|
|||||||
log.explain_topic(f"Parsing HTML page for {fmt_path(path)} ({dir}-{counter})")
|
log.explain_topic(f"Parsing HTML page for {fmt_path(path)} ({dir}-{counter})")
|
||||||
log.explain(f"URL: {next_element_url}")
|
log.explain(f"URL: {next_element_url}")
|
||||||
soup = await self._get_page(next_element_url)
|
soup = await self._get_page(next_element_url)
|
||||||
page = IliasPage(soup, next_element_url, parent_element)
|
page = IliasPage(soup, parent_element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
if dir == "left":
|
next_element_url = next.previous_url if dir == "left" else next.next_url
|
||||||
next_element_url = next.previous_url
|
|
||||||
else:
|
|
||||||
next_element_url = next.next_url
|
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
return elements
|
return elements
|
||||||
@@ -838,9 +935,9 @@ instance's greatest bottleneck.
|
|||||||
parent_path: PurePath,
|
parent_path: PurePath,
|
||||||
element: IliasLearningModulePage,
|
element: IliasLearningModulePage,
|
||||||
prev: Optional[str],
|
prev: Optional[str],
|
||||||
next: Optional[str]
|
next: Optional[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
path = parent_path / (_sanitize_path_name(element.title) + ".html")
|
path = parent_path / (sanitize_path_name(element.title) + ".html")
|
||||||
maybe_dl = await self.download(path)
|
maybe_dl = await self.download(path)
|
||||||
if not maybe_dl:
|
if not maybe_dl:
|
||||||
return
|
return
|
||||||
@@ -849,17 +946,11 @@ instance's greatest bottleneck.
|
|||||||
return
|
return
|
||||||
|
|
||||||
if prev:
|
if prev:
|
||||||
prev_p = self._transformer.transform(parent_path / (_sanitize_path_name(prev) + ".html"))
|
prev_p = self._transformer.transform(parent_path / (sanitize_path_name(prev) + ".html"))
|
||||||
if prev_p:
|
prev = os.path.relpath(prev_p, my_path.parent) if prev_p else None
|
||||||
prev = os.path.relpath(prev_p, my_path.parent)
|
|
||||||
else:
|
|
||||||
prev = None
|
|
||||||
if next:
|
if next:
|
||||||
next_p = self._transformer.transform(parent_path / (_sanitize_path_name(next) + ".html"))
|
next_p = self._transformer.transform(parent_path / (sanitize_path_name(next) + ".html"))
|
||||||
if next_p:
|
next = os.path.relpath(next_p, my_path.parent) if next_p else None
|
||||||
next = os.path.relpath(next_p, my_path.parent)
|
|
||||||
else:
|
|
||||||
next = None
|
|
||||||
|
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
content = element.content
|
content = element.content
|
||||||
@@ -873,19 +964,16 @@ instance's greatest bottleneck.
|
|||||||
"""
|
"""
|
||||||
log.explain_topic("Internalizing images")
|
log.explain_topic("Internalizing images")
|
||||||
for elem in tag.find_all(recursive=True):
|
for elem in tag.find_all(recursive=True):
|
||||||
if not isinstance(elem, Tag):
|
if elem.name == "img" and (src := elem.attrs.get("src", None)):
|
||||||
continue
|
url = urljoin(self._base_url, cast(str, src))
|
||||||
if elem.name == "img":
|
if not url.startswith(self._base_url):
|
||||||
if src := elem.attrs.get("src", None):
|
continue
|
||||||
url = urljoin(self._base_url, src)
|
log.explain(f"Internalizing {url!r}")
|
||||||
if not url.startswith(self._base_url):
|
img = await self._get_authenticated(url)
|
||||||
continue
|
elem.attrs["src"] = "data:;base64," + base64.b64encode(img).decode()
|
||||||
log.explain(f"Internalizing {url!r}")
|
if elem.name == "iframe" and cast(str, elem.attrs.get("src", "")).startswith("//"):
|
||||||
img = await self._get_authenticated(url)
|
|
||||||
elem.attrs["src"] = "data:;base64," + base64.b64encode(img).decode()
|
|
||||||
if elem.name == "iframe" and elem.attrs.get("src", "").startswith("//"):
|
|
||||||
# For unknown reasons the protocol seems to be stripped.
|
# For unknown reasons the protocol seems to be stripped.
|
||||||
elem.attrs["src"] = "https:" + elem.attrs["src"]
|
elem.attrs["src"] = "https:" + cast(str, elem.attrs["src"])
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
def _ensure_not_seen(self, element: IliasPageElement, parent_path: PurePath) -> None:
|
def _ensure_not_seen(self, element: IliasPageElement, parent_path: PurePath) -> None:
|
||||||
@@ -897,10 +985,10 @@ instance's greatest bottleneck.
|
|||||||
)
|
)
|
||||||
self._visited_urls[element.url] = parent_path
|
self._visited_urls[element.url] = parent_path
|
||||||
|
|
||||||
async def _get_page(self, url: str, root_page_allowed: bool = False) -> BeautifulSoup:
|
async def _get_page(self, url: str, root_page_allowed: bool = False) -> IliasSoup:
|
||||||
auth_id = await self._current_auth_id()
|
auth_id = await self._current_auth_id()
|
||||||
async with self.session.get(url) as request:
|
async with self.session.get(url) as request:
|
||||||
soup = soupify(await request.read())
|
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
||||||
if IliasPage.is_logged_in(soup):
|
if IliasPage.is_logged_in(soup):
|
||||||
return self._verify_page(soup, url, root_page_allowed)
|
return self._verify_page(soup, url, root_page_allowed)
|
||||||
|
|
||||||
@@ -909,13 +997,13 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
# Retry once after authenticating. If this fails, we will die.
|
# Retry once after authenticating. If this fails, we will die.
|
||||||
async with self.session.get(url) as request:
|
async with self.session.get(url) as request:
|
||||||
soup = soupify(await request.read())
|
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
||||||
if IliasPage.is_logged_in(soup):
|
if IliasPage.is_logged_in(soup):
|
||||||
return self._verify_page(soup, url, root_page_allowed)
|
return self._verify_page(soup, url, root_page_allowed)
|
||||||
raise CrawlError(f"get_page failed even after authenticating on {url!r}")
|
raise CrawlError(f"get_page failed even after authenticating on {url!r}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _verify_page(soup: BeautifulSoup, url: str, root_page_allowed: bool) -> BeautifulSoup:
|
def _verify_page(soup: IliasSoup, url: str, root_page_allowed: bool) -> IliasSoup:
|
||||||
if IliasPage.is_root_page(soup) and not root_page_allowed:
|
if IliasPage.is_root_page(soup) and not root_page_allowed:
|
||||||
raise CrawlError(
|
raise CrawlError(
|
||||||
"Unexpectedly encountered ILIAS root page. "
|
"Unexpectedly encountered ILIAS root page. "
|
||||||
@@ -927,29 +1015,15 @@ instance's greatest bottleneck.
|
|||||||
)
|
)
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
async def _post_authenticated(
|
async def _post(self, url: str, data: dict[str, str | list[str]]) -> bytes:
|
||||||
self,
|
|
||||||
url: str,
|
|
||||||
data: dict[str, Union[str, List[str]]]
|
|
||||||
) -> bytes:
|
|
||||||
auth_id = await self._current_auth_id()
|
|
||||||
|
|
||||||
form_data = aiohttp.FormData()
|
form_data = aiohttp.FormData()
|
||||||
for key, val in data.items():
|
for key, val in data.items():
|
||||||
form_data.add_field(key, val)
|
form_data.add_field(key, val)
|
||||||
|
|
||||||
async with self.session.post(url, data=form_data(), allow_redirects=False) as request:
|
async with self.session.post(url, data=form_data()) as request:
|
||||||
if request.status == 200:
|
if request.status == 200:
|
||||||
return await request.read()
|
return await request.read()
|
||||||
|
raise CrawlError(f"post failed with status {request.status}")
|
||||||
# We weren't authenticated, so try to do that
|
|
||||||
await self.authenticate(auth_id)
|
|
||||||
|
|
||||||
# Retry once after authenticating. If this fails, we will die.
|
|
||||||
async with self.session.post(url, data=data, allow_redirects=False) as request:
|
|
||||||
if request.status == 200:
|
|
||||||
return await request.read()
|
|
||||||
raise CrawlError("post_authenticated failed even after authenticating")
|
|
||||||
|
|
||||||
async def _get_authenticated(self, url: str) -> bytes:
|
async def _get_authenticated(self, url: str) -> bytes:
|
||||||
auth_id = await self._current_auth_id()
|
auth_id = await self._current_auth_id()
|
||||||
@@ -979,52 +1053,22 @@ instance's greatest bottleneck.
|
|||||||
async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request:
|
async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request:
|
||||||
login_page = soupify(await request.read())
|
login_page = soupify(await request.read())
|
||||||
|
|
||||||
login_form = login_page.find("form", attrs={"name": "formlogin"})
|
login_form = login_page.find("form", attrs={"name": "login_form"})
|
||||||
if login_form is None:
|
if login_form is None:
|
||||||
raise CrawlError("Could not find the login form! Specified client id might be invalid.")
|
raise CrawlError("Could not find the login form! Specified client id might be invalid.")
|
||||||
|
|
||||||
login_url = login_form.attrs.get("action")
|
login_url = cast(Optional[str], login_form.attrs.get("action"))
|
||||||
if login_url is None:
|
if login_url is None:
|
||||||
raise CrawlError("Could not find the action URL in the login form!")
|
raise CrawlError("Could not find the action URL in the login form!")
|
||||||
|
|
||||||
username, password = await self._auth.credentials()
|
username, password = await self._auth.credentials()
|
||||||
|
|
||||||
login_data = {
|
login_form_data = aiohttp.FormData()
|
||||||
"username": username,
|
login_form_data.add_field("login_form/input_3/input_4", username)
|
||||||
"password": password,
|
login_form_data.add_field("login_form/input_3/input_5", password)
|
||||||
"cmd[doStandardAuthentication]": "Login",
|
|
||||||
}
|
|
||||||
|
|
||||||
# do the actual login
|
# do the actual login
|
||||||
async with self.session.post(urljoin(self._base_url, login_url), data=login_data) as request:
|
async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request:
|
||||||
soup = soupify(await request.read())
|
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
||||||
if not self._is_logged_in(soup):
|
if not IliasPage.is_logged_in(soup):
|
||||||
self._auth.invalidate_credentials()
|
self._auth.invalidate_credentials()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _is_logged_in(soup: BeautifulSoup) -> bool:
|
|
||||||
# Normal ILIAS pages
|
|
||||||
mainbar: Optional[Tag] = soup.find(class_="il-maincontrols-metabar")
|
|
||||||
if mainbar is not None:
|
|
||||||
login_button = mainbar.find(attrs={"href": lambda x: x and "login.php" in x})
|
|
||||||
shib_login = soup.find(id="button_shib_login")
|
|
||||||
return not login_button and not shib_login
|
|
||||||
|
|
||||||
# Personal Desktop
|
|
||||||
if soup.find("a", attrs={"href": lambda x: x and "block_type=pditems" in x}):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
|
||||||
# their video listing table
|
|
||||||
video_table = soup.find(
|
|
||||||
recursive=True,
|
|
||||||
name="table",
|
|
||||||
attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
|
||||||
)
|
|
||||||
if video_table is not None:
|
|
||||||
return True
|
|
||||||
# The individual video player wrapper page has nothing of the above.
|
|
||||||
# Match it by its playerContainer.
|
|
||||||
if soup.select_one("#playerContainer") is not None:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
|||||||
from typing import Dict, Literal
|
from typing import Literal
|
||||||
|
|
||||||
from ...auth import Authenticator
|
from ...auth import Authenticator
|
||||||
from ...config import Config
|
from ...config import Config
|
||||||
@@ -26,7 +26,7 @@ class KitIliasWebCrawler(IliasWebCrawler):
|
|||||||
name: str,
|
name: str,
|
||||||
section: KitIliasWebCrawlerSection,
|
section: KitIliasWebCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
authenticators: Dict[str, Authenticator],
|
authenticators: dict[str, Authenticator],
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config, authenticators)
|
super().__init__(name, section, config, authenticators)
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional, cast
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import yarl
|
import yarl
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup, Tag
|
||||||
|
|
||||||
from ...auth import Authenticator, TfaAuthenticator
|
from ...auth import Authenticator, TfaAuthenticator
|
||||||
from ...logging import log
|
from ...logging import log
|
||||||
@@ -38,9 +38,7 @@ class ShibbolethLogin:
|
|||||||
async with sess.get(url) as response:
|
async with sess.get(url) as response:
|
||||||
shib_url = response.url
|
shib_url = response.url
|
||||||
if str(shib_url).startswith(self._ilias_url):
|
if str(shib_url).startswith(self._ilias_url):
|
||||||
log.explain(
|
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||||
"ILIAS recognized our shib token and logged us in in the background, returning"
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
soup: BeautifulSoup = soupify(await response.read())
|
soup: BeautifulSoup = soupify(await response.read())
|
||||||
|
|
||||||
@@ -48,8 +46,8 @@ class ShibbolethLogin:
|
|||||||
while not self._login_successful(soup):
|
while not self._login_successful(soup):
|
||||||
# Searching the form here so that this fails before asking for
|
# Searching the form here so that this fails before asking for
|
||||||
# credentials rather than after asking.
|
# credentials rather than after asking.
|
||||||
form = soup.find("form", {"method": "post"})
|
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||||
action = form["action"]
|
action = cast(str, form["action"])
|
||||||
|
|
||||||
# Equivalent: Enter credentials in
|
# Equivalent: Enter credentials in
|
||||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||||
@@ -59,9 +57,10 @@ class ShibbolethLogin:
|
|||||||
"_eventId_proceed": "",
|
"_eventId_proceed": "",
|
||||||
"j_username": username,
|
"j_username": username,
|
||||||
"j_password": password,
|
"j_password": password,
|
||||||
|
"fudis_web_authn_assertion_input": "",
|
||||||
}
|
}
|
||||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||||
data["csrf_token"] = csrf_token_input["value"]
|
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||||
soup = await _post(sess, url, data)
|
soup = await _post(sess, url, data)
|
||||||
|
|
||||||
if soup.find(id="attributeRelease"):
|
if soup.find(id="attributeRelease"):
|
||||||
@@ -78,14 +77,14 @@ class ShibbolethLogin:
|
|||||||
|
|
||||||
# Equivalent: Being redirected via JS automatically
|
# Equivalent: Being redirected via JS automatically
|
||||||
# (or clicking "Continue" if you have JS disabled)
|
# (or clicking "Continue" if you have JS disabled)
|
||||||
relay_state = soup.find("input", {"name": "RelayState"})
|
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||||
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||||
url = form = soup.find("form", {"method": "post"})["action"]
|
url = cast(str, cast(Tag, soup.find("form", {"method": "post"}))["action"])
|
||||||
data = { # using the info obtained in the while loop above
|
data = { # using the info obtained in the while loop above
|
||||||
"RelayState": relay_state["value"],
|
"RelayState": cast(str, relay_state["value"]),
|
||||||
"SAMLResponse": saml_response["value"],
|
"SAMLResponse": cast(str, saml_response["value"]),
|
||||||
}
|
}
|
||||||
await sess.post(url, data=data)
|
await sess.post(cast(str, url), data=data)
|
||||||
|
|
||||||
async def _authenticate_tfa(
|
async def _authenticate_tfa(
|
||||||
self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL
|
self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL
|
||||||
@@ -97,8 +96,8 @@ class ShibbolethLogin:
|
|||||||
|
|
||||||
# Searching the form here so that this fails before asking for
|
# Searching the form here so that this fails before asking for
|
||||||
# credentials rather than after asking.
|
# credentials rather than after asking.
|
||||||
form = soup.find("form", {"method": "post"})
|
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||||
action = form["action"]
|
action = cast(str, form["action"])
|
||||||
|
|
||||||
# Equivalent: Enter token in
|
# Equivalent: Enter token in
|
||||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||||
@@ -106,10 +105,10 @@ class ShibbolethLogin:
|
|||||||
username, password = await self._auth.credentials()
|
username, password = await self._auth.credentials()
|
||||||
data = {
|
data = {
|
||||||
"_eventId_proceed": "",
|
"_eventId_proceed": "",
|
||||||
"j_tokenNumber": tfa_token,
|
"fudis_otp_input": tfa_token,
|
||||||
}
|
}
|
||||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||||
data["csrf_token"] = csrf_token_input["value"]
|
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||||
return await _post(session, url, data)
|
return await _post(session, url, data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -120,7 +119,7 @@ class ShibbolethLogin:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _tfa_required(soup: BeautifulSoup) -> bool:
|
def _tfa_required(soup: BeautifulSoup) -> bool:
|
||||||
return soup.find(id="j_tokenNumber") is not None
|
return soup.find(id="fudiscr-form") is not None
|
||||||
|
|
||||||
|
|
||||||
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
||||||
|
|||||||
@@ -1,17 +1,21 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Awaitable, Generator, Iterable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Any, Awaitable, Generator, Iterable, List, Optional, Pattern, Tuple, Union
|
from re import Pattern
|
||||||
|
from typing import Any, Optional, Union, cast
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
from bs4 import BeautifulSoup, Tag
|
from bs4 import BeautifulSoup, Tag
|
||||||
|
|
||||||
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..logging import ProgressBar, log
|
from ..logging import ProgressBar, log
|
||||||
from ..output_dir import FileSink
|
from ..output_dir import FileSink
|
||||||
from ..utils import soupify
|
from ..utils import sanitize_path_name, soupify
|
||||||
from .crawler import CrawlError
|
from .crawler import CrawlError
|
||||||
from .http_crawler import HttpCrawler, HttpCrawlerSection
|
from .http_crawler import HttpCrawler, HttpCrawlerSection
|
||||||
|
|
||||||
@@ -31,6 +35,15 @@ class KitIpdCrawlerSection(HttpCrawlerSection):
|
|||||||
regex = self.s.get("link_regex", r"^.*?[^/]+\.(pdf|zip|c|cpp|java)$")
|
regex = self.s.get("link_regex", r"^.*?[^/]+\.(pdf|zip|c|cpp|java)$")
|
||||||
return re.compile(regex)
|
return re.compile(regex)
|
||||||
|
|
||||||
|
def basic_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||||
|
value: Optional[str] = self.s.get("auth")
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
auth = authenticators.get(value)
|
||||||
|
if auth is None:
|
||||||
|
self.invalid_value("auth", value, "No such auth section exists")
|
||||||
|
return auth
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class KitIpdFile:
|
class KitIpdFile:
|
||||||
@@ -44,7 +57,7 @@ class KitIpdFile:
|
|||||||
@dataclass
|
@dataclass
|
||||||
class KitIpdFolder:
|
class KitIpdFolder:
|
||||||
name: str
|
name: str
|
||||||
entries: List[Union[KitIpdFile, "KitIpdFolder"]]
|
entries: list[Union[KitIpdFile, "KitIpdFolder"]]
|
||||||
|
|
||||||
def explain(self) -> None:
|
def explain(self) -> None:
|
||||||
log.explain_topic(f"Folder {self.name!r}")
|
log.explain_topic(f"Folder {self.name!r}")
|
||||||
@@ -53,23 +66,29 @@ class KitIpdFolder:
|
|||||||
|
|
||||||
|
|
||||||
class KitIpdCrawler(HttpCrawler):
|
class KitIpdCrawler(HttpCrawler):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: KitIpdCrawlerSection,
|
section: KitIpdCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
|
authenticators: dict[str, Authenticator],
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
self._url = section.target()
|
self._url = section.target()
|
||||||
self._file_regex = section.link_regex()
|
self._file_regex = section.link_regex()
|
||||||
|
self._authenticator = section.basic_auth(authenticators)
|
||||||
|
self._basic_auth: Optional[aiohttp.BasicAuth] = None
|
||||||
|
|
||||||
async def _run(self) -> None:
|
async def _run(self) -> None:
|
||||||
|
if self._authenticator:
|
||||||
|
username, password = await self._authenticator.credentials()
|
||||||
|
self._basic_auth = aiohttp.BasicAuth(username, password)
|
||||||
|
|
||||||
maybe_cl = await self.crawl(PurePath("."))
|
maybe_cl = await self.crawl(PurePath("."))
|
||||||
if not maybe_cl:
|
if not maybe_cl:
|
||||||
return
|
return
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
|
|
||||||
async with maybe_cl:
|
async with maybe_cl:
|
||||||
for item in await self._fetch_items():
|
for item in await self._fetch_items():
|
||||||
@@ -87,7 +106,7 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
|
|
||||||
async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None:
|
async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None:
|
||||||
path = parent / folder.name
|
path = parent / sanitize_path_name(folder.name)
|
||||||
if not await self.crawl(path):
|
if not await self.crawl(path):
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -104,13 +123,9 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
|
|
||||||
async def _download_file(
|
async def _download_file(
|
||||||
self,
|
self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime]
|
||||||
parent: PurePath,
|
|
||||||
file: KitIpdFile,
|
|
||||||
etag: Optional[str],
|
|
||||||
mtime: Optional[datetime]
|
|
||||||
) -> None:
|
) -> None:
|
||||||
element_path = parent / file.name
|
element_path = parent / sanitize_path_name(file.name)
|
||||||
|
|
||||||
prev_etag = self._get_previous_etag_from_report(element_path)
|
prev_etag = self._get_previous_etag_from_report(element_path)
|
||||||
etag_differs = None if prev_etag is None else prev_etag != etag
|
etag_differs = None if prev_etag is None else prev_etag != etag
|
||||||
@@ -125,9 +140,9 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
await self._stream_from_url(file.url, element_path, sink, bar)
|
await self._stream_from_url(file.url, element_path, sink, bar)
|
||||||
|
|
||||||
async def _fetch_items(self) -> Iterable[Union[KitIpdFile, KitIpdFolder]]:
|
async def _fetch_items(self) -> Iterable[KitIpdFile | KitIpdFolder]:
|
||||||
page, url = await self.get_page()
|
page, url = await self.get_page()
|
||||||
elements: List[Tag] = self._find_file_links(page)
|
elements: list[Tag] = self._find_file_links(page)
|
||||||
|
|
||||||
# do not add unnecessary nesting for a single <h1> heading
|
# do not add unnecessary nesting for a single <h1> heading
|
||||||
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
||||||
@@ -156,16 +171,21 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
return KitIpdFile(name, url)
|
return KitIpdFile(name, url)
|
||||||
|
|
||||||
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> List[Tag]:
|
def _find_file_links(self, tag: Tag | BeautifulSoup) -> list[Tag]:
|
||||||
return tag.findAll(name="a", attrs={"href": self._file_regex})
|
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
||||||
|
|
||||||
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
||||||
return urljoin(url, link_tag.get("href"))
|
return urljoin(url, cast(str, link_tag.get("href")))
|
||||||
|
|
||||||
async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None:
|
async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None:
|
||||||
async with self.session.get(url, allow_redirects=False) as resp:
|
async with self.session.get(url, allow_redirects=False, auth=self._basic_auth) as resp:
|
||||||
if resp.status == 403:
|
if resp.status == 403:
|
||||||
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
||||||
|
if resp.status == 401:
|
||||||
|
raise CrawlError("Received a 401. Do you maybe need credentials?")
|
||||||
|
if resp.status >= 400:
|
||||||
|
raise CrawlError(f"Received HTTP {resp.status} when trying to download {url!r}")
|
||||||
|
|
||||||
if resp.content_length:
|
if resp.content_length:
|
||||||
bar.set_total(resp.content_length)
|
bar.set_total(resp.content_length)
|
||||||
|
|
||||||
@@ -177,8 +197,8 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
|
|
||||||
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
||||||
|
|
||||||
async def get_page(self) -> Tuple[BeautifulSoup, str]:
|
async def get_page(self) -> tuple[BeautifulSoup, str]:
|
||||||
async with self.session.get(self._url) as request:
|
async with self.session.get(self._url, auth=self._basic_auth) as request:
|
||||||
# The web page for Algorithmen für Routenplanung contains some
|
# The web page for Algorithmen für Routenplanung contains some
|
||||||
# weird comments that beautifulsoup doesn't parse correctly. This
|
# weird comments that beautifulsoup doesn't parse correctly. This
|
||||||
# hack enables those pages to be crawled, and should hopefully not
|
# hack enables those pages to be crawled, and should hopefully not
|
||||||
|
|||||||
@@ -18,31 +18,28 @@ class LocalCrawlerSection(CrawlerSection):
|
|||||||
def crawl_delay(self) -> float:
|
def crawl_delay(self) -> float:
|
||||||
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
||||||
if value < 0:
|
if value < 0:
|
||||||
self.invalid_value("crawl_delay", value,
|
self.invalid_value("crawl_delay", value, "Must not be negative")
|
||||||
"Must not be negative")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def download_delay(self) -> float:
|
def download_delay(self) -> float:
|
||||||
value = self.s.getfloat("download_delay", fallback=0.0)
|
value = self.s.getfloat("download_delay", fallback=0.0)
|
||||||
if value < 0:
|
if value < 0:
|
||||||
self.invalid_value("download_delay", value,
|
self.invalid_value("download_delay", value, "Must not be negative")
|
||||||
"Must not be negative")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def download_speed(self) -> Optional[int]:
|
def download_speed(self) -> Optional[int]:
|
||||||
value = self.s.getint("download_speed")
|
value = self.s.getint("download_speed")
|
||||||
if value is not None and value <= 0:
|
if value is not None and value <= 0:
|
||||||
self.invalid_value("download_speed", value,
|
self.invalid_value("download_speed", value, "Must be greater than 0")
|
||||||
"Must be greater than 0")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
class LocalCrawler(Crawler):
|
class LocalCrawler(Crawler):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: LocalCrawlerSection,
|
section: LocalCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
|
|
||||||
@@ -74,10 +71,12 @@ class LocalCrawler(Crawler):
|
|||||||
tasks = []
|
tasks = []
|
||||||
|
|
||||||
async with cl:
|
async with cl:
|
||||||
await asyncio.sleep(random.uniform(
|
await asyncio.sleep(
|
||||||
0.5 * self._crawl_delay,
|
random.uniform(
|
||||||
self._crawl_delay,
|
0.5 * self._crawl_delay,
|
||||||
))
|
self._crawl_delay,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for child in path.iterdir():
|
for child in path.iterdir():
|
||||||
pure_child = cl.path / child.name
|
pure_child = cl.path / child.name
|
||||||
@@ -93,10 +92,12 @@ class LocalCrawler(Crawler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
await asyncio.sleep(random.uniform(
|
await asyncio.sleep(
|
||||||
0.5 * self._download_delay,
|
random.uniform(
|
||||||
self._download_delay,
|
0.5 * self._download_delay,
|
||||||
))
|
self._download_delay,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
bar.set_total(stat.st_size)
|
bar.set_total(stat.st_size)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Iterator
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Iterator, Set
|
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .utils import fmt_path
|
from .utils import fmt_path
|
||||||
@@ -16,15 +16,34 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
|||||||
class Deduplicator:
|
class Deduplicator:
|
||||||
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||||
FORBIDDEN_NAMES = {
|
FORBIDDEN_NAMES = {
|
||||||
"CON", "PRN", "AUX", "NUL",
|
"CON",
|
||||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
"PRN",
|
||||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
"AUX",
|
||||||
|
"NUL",
|
||||||
|
"COM1",
|
||||||
|
"COM2",
|
||||||
|
"COM3",
|
||||||
|
"COM4",
|
||||||
|
"COM5",
|
||||||
|
"COM6",
|
||||||
|
"COM7",
|
||||||
|
"COM8",
|
||||||
|
"COM9",
|
||||||
|
"LPT1",
|
||||||
|
"LPT2",
|
||||||
|
"LPT3",
|
||||||
|
"LPT4",
|
||||||
|
"LPT5",
|
||||||
|
"LPT6",
|
||||||
|
"LPT7",
|
||||||
|
"LPT8",
|
||||||
|
"LPT9",
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, windows_paths: bool) -> None:
|
def __init__(self, windows_paths: bool) -> None:
|
||||||
self._windows_paths = windows_paths
|
self._windows_paths = windows_paths
|
||||||
|
|
||||||
self._known: Set[PurePath] = set()
|
self._known: set[PurePath] = set()
|
||||||
|
|
||||||
def _add(self, path: PurePath) -> None:
|
def _add(self, path: PurePath) -> None:
|
||||||
self._known.add(path)
|
self._known.add(path)
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
|
from collections.abc import AsyncIterator
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import AsyncIterator, Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -12,12 +13,7 @@ class Slot:
|
|||||||
|
|
||||||
|
|
||||||
class Limiter:
|
class Limiter:
|
||||||
def __init__(
|
def __init__(self, task_limit: int, download_limit: int, task_delay: float):
|
||||||
self,
|
|
||||||
task_limit: int,
|
|
||||||
download_limit: int,
|
|
||||||
task_delay: float
|
|
||||||
):
|
|
||||||
if task_limit <= 0:
|
if task_limit <= 0:
|
||||||
raise ValueError("task limit must be at least 1")
|
raise ValueError("task limit must be at least 1")
|
||||||
if download_limit <= 0:
|
if download_limit <= 0:
|
||||||
|
|||||||
@@ -1,16 +1,23 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import asynccontextmanager, contextmanager
|
from collections.abc import AsyncIterator, Iterator
|
||||||
# TODO In Python 3.9 and above, ContextManager is deprecated
|
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
||||||
from typing import AsyncIterator, ContextManager, Iterator, List, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from rich.console import Console, Group
|
from rich.console import Console, Group
|
||||||
from rich.live import Live
|
from rich.live import Live
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TimeRemainingColumn,
|
from rich.progress import (
|
||||||
TransferSpeedColumn)
|
BarColumn,
|
||||||
|
DownloadColumn,
|
||||||
|
Progress,
|
||||||
|
TaskID,
|
||||||
|
TextColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
TransferSpeedColumn,
|
||||||
|
)
|
||||||
from rich.table import Column
|
from rich.table import Column
|
||||||
|
|
||||||
|
|
||||||
@@ -54,7 +61,7 @@ class Log:
|
|||||||
self._showing_progress = False
|
self._showing_progress = False
|
||||||
self._progress_suspended = False
|
self._progress_suspended = False
|
||||||
self._lock = asyncio.Lock()
|
self._lock = asyncio.Lock()
|
||||||
self._lines: List[str] = []
|
self._lines: list[str] = []
|
||||||
|
|
||||||
# Whether different parts of the output are enabled or disabled
|
# Whether different parts of the output are enabled or disabled
|
||||||
self.output_explain = False
|
self.output_explain = False
|
||||||
@@ -115,7 +122,7 @@ class Log:
|
|||||||
for line in self._lines:
|
for line in self._lines:
|
||||||
self.print(line)
|
self.print(line)
|
||||||
|
|
||||||
def print(self, text: str) -> None:
|
def print(self, text: Any) -> None:
|
||||||
"""
|
"""
|
||||||
Print a normal message. Allows markup.
|
Print a normal message. Allows markup.
|
||||||
"""
|
"""
|
||||||
@@ -177,10 +184,14 @@ class Log:
|
|||||||
# Our print function doesn't take types other than strings, but the
|
# Our print function doesn't take types other than strings, but the
|
||||||
# underlying rich.print function does. This call is a special case
|
# underlying rich.print function does. This call is a special case
|
||||||
# anyways, and we're calling it internally, so this should be fine.
|
# anyways, and we're calling it internally, so this should be fine.
|
||||||
self.print(Panel.fit("""
|
self.print(
|
||||||
|
Panel.fit(
|
||||||
|
"""
|
||||||
Please copy your program output and send it to the PFERD maintainers, either
|
Please copy your program output and send it to the PFERD maintainers, either
|
||||||
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||||
""".strip())) # type: ignore
|
""".strip()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def explain_topic(self, text: str) -> None:
|
def explain_topic(self, text: str) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -237,10 +248,10 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _bar(
|
def _bar(
|
||||||
self,
|
self,
|
||||||
progress: Progress,
|
progress: Progress,
|
||||||
description: str,
|
description: str,
|
||||||
total: Optional[float],
|
total: Optional[float],
|
||||||
) -> Iterator[ProgressBar]:
|
) -> Iterator[ProgressBar]:
|
||||||
if total is None:
|
if total is None:
|
||||||
# Indeterminate progress bar
|
# Indeterminate progress bar
|
||||||
@@ -256,12 +267,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
self._update_live()
|
self._update_live()
|
||||||
|
|
||||||
def crawl_bar(
|
def crawl_bar(
|
||||||
self,
|
self,
|
||||||
style: str,
|
style: str,
|
||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> ContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
"action" string.
|
"action" string.
|
||||||
@@ -272,12 +283,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
return self._bar(self._crawl_progress, description, total)
|
return self._bar(self._crawl_progress, description, total)
|
||||||
|
|
||||||
def download_bar(
|
def download_bar(
|
||||||
self,
|
self,
|
||||||
style: str,
|
style: str,
|
||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> ContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
"action" string.
|
"action" string.
|
||||||
|
|||||||
@@ -4,12 +4,13 @@ import os
|
|||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import string
|
import string
|
||||||
from contextlib import contextmanager
|
from collections.abc import Iterator
|
||||||
|
from contextlib import contextmanager, suppress
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import BinaryIO, Iterator, Optional, Tuple
|
from typing import BinaryIO, Optional
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .report import Report, ReportLoadError
|
from .report import Report, ReportLoadError
|
||||||
@@ -35,8 +36,7 @@ class Redownload(Enum):
|
|||||||
try:
|
try:
|
||||||
return Redownload(string)
|
return Redownload(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'never', 'never-smart',"
|
raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'") from None
|
||||||
" 'always', 'always-smart'")
|
|
||||||
|
|
||||||
|
|
||||||
class OnConflict(Enum):
|
class OnConflict(Enum):
|
||||||
@@ -51,8 +51,10 @@ class OnConflict(Enum):
|
|||||||
try:
|
try:
|
||||||
return OnConflict(string)
|
return OnConflict(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
raise ValueError(
|
||||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'")
|
"must be one of 'prompt', 'local-first',"
|
||||||
|
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -96,13 +98,13 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
|||||||
# download handed back to the OutputDirectory.
|
# download handed back to the OutputDirectory.
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
output_dir: "OutputDirectory",
|
output_dir: "OutputDirectory",
|
||||||
remote_path: PurePath,
|
remote_path: PurePath,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
heuristics: Heuristics,
|
heuristics: Heuristics,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
@@ -118,15 +120,17 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
|||||||
sink = FileSink(file)
|
sink = FileSink(file)
|
||||||
|
|
||||||
async def after_download() -> None:
|
async def after_download() -> None:
|
||||||
await self._output_dir._after_download(DownloadInfo(
|
await self._output_dir._after_download(
|
||||||
self._remote_path,
|
DownloadInfo(
|
||||||
self._path,
|
self._remote_path,
|
||||||
self._local_path,
|
self._path,
|
||||||
tmp_path,
|
self._local_path,
|
||||||
self._heuristics,
|
tmp_path,
|
||||||
self._on_conflict,
|
self._heuristics,
|
||||||
sink.is_done(),
|
self._on_conflict,
|
||||||
))
|
sink.is_done(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._stack.push_async_callback(after_download)
|
self._stack.push_async_callback(after_download)
|
||||||
self._stack.enter_context(file)
|
self._stack.enter_context(file)
|
||||||
@@ -138,10 +142,10 @@ class OutputDirectory:
|
|||||||
REPORT_FILE = PurePath(".report")
|
REPORT_FILE = PurePath(".report")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: Path,
|
root: Path,
|
||||||
redownload: Redownload,
|
redownload: Redownload,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
):
|
):
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
# Windows limits the path length to 260 for some historical reason.
|
# Windows limits the path length to 260 for some historical reason.
|
||||||
@@ -174,8 +178,8 @@ class OutputDirectory:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
self._root.mkdir(parents=True, exist_ok=True)
|
self._root.mkdir(parents=True, exist_ok=True)
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise OutputDirError("Failed to create base directory")
|
raise OutputDirError("Failed to create base directory") from e
|
||||||
|
|
||||||
def register_reserved(self, path: PurePath) -> None:
|
def register_reserved(self, path: PurePath) -> None:
|
||||||
self._report.mark_reserved(path)
|
self._report.mark_reserved(path)
|
||||||
@@ -193,11 +197,11 @@ class OutputDirectory:
|
|||||||
return self._root / path
|
return self._root / path
|
||||||
|
|
||||||
def _should_download(
|
def _should_download(
|
||||||
self,
|
self,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
heuristics: Heuristics,
|
heuristics: Heuristics,
|
||||||
redownload: Redownload,
|
redownload: Redownload,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if not local_path.exists():
|
if not local_path.exists():
|
||||||
log.explain("No corresponding file present locally")
|
log.explain("No corresponding file present locally")
|
||||||
@@ -270,9 +274,9 @@ class OutputDirectory:
|
|||||||
# files.
|
# files.
|
||||||
|
|
||||||
async def _conflict_lfrf(
|
async def _conflict_lfrf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -289,9 +293,9 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_ldrf(
|
async def _conflict_ldrf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -308,10 +312,10 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_lfrd(
|
async def _conflict_lfrd(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
parent: PurePath,
|
parent: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -328,9 +332,9 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_delete_lf(
|
async def _conflict_delete_lf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict == OnConflict.PROMPT:
|
if on_conflict == OnConflict.PROMPT:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -353,9 +357,9 @@ class OutputDirectory:
|
|||||||
return base.parent / name
|
return base.parent / name
|
||||||
|
|
||||||
async def _create_tmp_file(
|
async def _create_tmp_file(
|
||||||
self,
|
self,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
) -> Tuple[Path, BinaryIO]:
|
) -> tuple[Path, BinaryIO]:
|
||||||
"""
|
"""
|
||||||
May raise an OutputDirError.
|
May raise an OutputDirError.
|
||||||
"""
|
"""
|
||||||
@@ -371,15 +375,31 @@ class OutputDirectory:
|
|||||||
|
|
||||||
raise OutputDirError("Failed to create temporary file")
|
raise OutputDirError("Failed to create temporary file")
|
||||||
|
|
||||||
|
def should_try_download(
|
||||||
|
self,
|
||||||
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
|
mtime: Optional[datetime] = None,
|
||||||
|
redownload: Optional[Redownload] = None,
|
||||||
|
on_conflict: Optional[OnConflict] = None,
|
||||||
|
) -> bool:
|
||||||
|
heuristics = Heuristics(etag_differs, mtime)
|
||||||
|
redownload = self._redownload if redownload is None else redownload
|
||||||
|
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||||
|
local_path = self.resolve(path)
|
||||||
|
|
||||||
|
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
remote_path: PurePath,
|
remote_path: PurePath,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
*,
|
*,
|
||||||
etag_differs: Optional[bool] = None,
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
) -> Optional[FileSinkToken]:
|
) -> Optional[FileSinkToken]:
|
||||||
"""
|
"""
|
||||||
May throw an OutputDirError, a MarkDuplicateError or a
|
May throw an OutputDirError, a MarkDuplicateError or a
|
||||||
@@ -490,10 +510,8 @@ class OutputDirectory:
|
|||||||
await self._cleanup(child, pure_child)
|
await self._cleanup(child, pure_child)
|
||||||
|
|
||||||
if delete_self:
|
if delete_self:
|
||||||
try:
|
with suppress(OSError):
|
||||||
path.rmdir()
|
path.rmdir()
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
||||||
if self._report.is_marked(pure):
|
if self._report.is_marked(pure):
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Dict, List, Optional
|
from typing import Optional
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
|
|
||||||
@@ -16,7 +15,7 @@ class PferdLoadError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class Pferd:
|
class Pferd:
|
||||||
def __init__(self, config: Config, cli_crawlers: Optional[List[str]], cli_skips: Optional[List[str]]):
|
def __init__(self, config: Config, cli_crawlers: Optional[list[str]], cli_skips: Optional[list[str]]):
|
||||||
"""
|
"""
|
||||||
May throw PferdLoadError.
|
May throw PferdLoadError.
|
||||||
"""
|
"""
|
||||||
@@ -24,10 +23,10 @@ class Pferd:
|
|||||||
self._config = config
|
self._config = config
|
||||||
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
||||||
|
|
||||||
self._authenticators: Dict[str, Authenticator] = {}
|
self._authenticators: dict[str, Authenticator] = {}
|
||||||
self._crawlers: Dict[str, Crawler] = {}
|
self._crawlers: dict[str, Crawler] = {}
|
||||||
|
|
||||||
def _find_config_crawlers(self, config: Config) -> List[str]:
|
def _find_config_crawlers(self, config: Config) -> list[str]:
|
||||||
crawl_sections = []
|
crawl_sections = []
|
||||||
|
|
||||||
for name, section in config.crawl_sections():
|
for name, section in config.crawl_sections():
|
||||||
@@ -38,7 +37,7 @@ class Pferd:
|
|||||||
|
|
||||||
return crawl_sections
|
return crawl_sections
|
||||||
|
|
||||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: List[str]) -> List[str]:
|
def _find_cli_crawlers(self, config: Config, cli_crawlers: list[str]) -> list[str]:
|
||||||
if len(cli_crawlers) != len(set(cli_crawlers)):
|
if len(cli_crawlers) != len(set(cli_crawlers)):
|
||||||
raise PferdLoadError("Some crawlers were selected multiple times")
|
raise PferdLoadError("Some crawlers were selected multiple times")
|
||||||
|
|
||||||
@@ -67,14 +66,14 @@ class Pferd:
|
|||||||
return crawlers_to_run
|
return crawlers_to_run
|
||||||
|
|
||||||
def _find_crawlers_to_run(
|
def _find_crawlers_to_run(
|
||||||
self,
|
self,
|
||||||
config: Config,
|
config: Config,
|
||||||
cli_crawlers: Optional[List[str]],
|
cli_crawlers: Optional[list[str]],
|
||||||
cli_skips: Optional[List[str]],
|
cli_skips: Optional[list[str]],
|
||||||
) -> List[str]:
|
) -> list[str]:
|
||||||
log.explain_topic("Deciding which crawlers to run")
|
log.explain_topic("Deciding which crawlers to run")
|
||||||
|
|
||||||
crawlers: List[str]
|
crawlers: list[str]
|
||||||
if cli_crawlers is None:
|
if cli_crawlers is None:
|
||||||
log.explain("No crawlers specified on CLI")
|
log.explain("No crawlers specified on CLI")
|
||||||
log.explain("Running crawlers specified in config")
|
log.explain("Running crawlers specified in config")
|
||||||
@@ -105,7 +104,7 @@ class Pferd:
|
|||||||
|
|
||||||
def _load_crawlers(self) -> None:
|
def _load_crawlers(self) -> None:
|
||||||
# Cookie sharing
|
# Cookie sharing
|
||||||
kit_ilias_web_paths: Dict[Authenticator, List[Path]] = {}
|
kit_ilias_web_paths: dict[Authenticator, list[Path]] = {}
|
||||||
|
|
||||||
for name, section in self._config.crawl_sections():
|
for name, section in self._config.crawl_sections():
|
||||||
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||||
@@ -118,9 +117,8 @@ class Pferd:
|
|||||||
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
||||||
self._crawlers[name] = crawler
|
self._crawlers[name] = crawler
|
||||||
|
|
||||||
if self._config.default_section.share_cookies():
|
if self._config.default_section.share_cookies() and isinstance(crawler, KitIliasWebCrawler):
|
||||||
if isinstance(crawler, KitIliasWebCrawler):
|
crawler.share_cookies(kit_ilias_web_paths)
|
||||||
crawler.share_cookies(kit_ilias_web_paths)
|
|
||||||
|
|
||||||
def debug_transforms(self) -> None:
|
def debug_transforms(self) -> None:
|
||||||
for name in self._crawlers_to_run:
|
for name in self._crawlers_to_run:
|
||||||
@@ -162,16 +160,17 @@ class Pferd:
|
|||||||
|
|
||||||
def print_report(self) -> None:
|
def print_report(self) -> None:
|
||||||
for name in self._crawlers_to_run:
|
for name in self._crawlers_to_run:
|
||||||
crawler = self._crawlers.get(name)
|
crawlerOpt = self._crawlers.get(name)
|
||||||
if crawler is None:
|
if crawlerOpt is None:
|
||||||
continue # Crawler failed to load
|
continue # Crawler failed to load
|
||||||
|
crawler = crawlerOpt
|
||||||
|
|
||||||
log.report("")
|
log.report("")
|
||||||
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
||||||
|
|
||||||
def fmt_path_link(relative_path: PurePath) -> str:
|
def fmt_path_link(relative_path: PurePath) -> str:
|
||||||
# We need to URL-encode the path because it might contain spaces or special characters
|
# We need to URL-encode the path because it might contain spaces or special characters
|
||||||
link = f"file://{quote(str(crawler.output_dir.resolve(relative_path).absolute()))}"
|
link = crawler.output_dir.resolve(relative_path).absolute().as_uri()
|
||||||
return f"[link={link}]{fmt_path(relative_path)}[/link]"
|
return f"[link={link}]{fmt_path(relative_path)}[/link]"
|
||||||
|
|
||||||
something_changed = False
|
something_changed = False
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Dict, List, Optional, Set
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
class ReportLoadError(Exception):
|
class ReportLoadError(Exception):
|
||||||
@@ -34,15 +34,6 @@ class MarkConflictError(Exception):
|
|||||||
self.collides_with = collides_with
|
self.collides_with = collides_with
|
||||||
|
|
||||||
|
|
||||||
# TODO Use PurePath.is_relative_to when updating to 3.9
|
|
||||||
def is_relative_to(a: PurePath, b: PurePath) -> bool:
|
|
||||||
try:
|
|
||||||
a.relative_to(b)
|
|
||||||
return True
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Report:
|
class Report:
|
||||||
"""
|
"""
|
||||||
A report of a synchronization. Includes all files found by the crawler, as
|
A report of a synchronization. Includes all files found by the crawler, as
|
||||||
@@ -51,32 +42,32 @@ class Report:
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
# Paths found by the crawler, untransformed
|
# Paths found by the crawler, untransformed
|
||||||
self.found_paths: Set[PurePath] = set()
|
self.found_paths: set[PurePath] = set()
|
||||||
|
|
||||||
# Files reserved for metadata files (e. g. the report file or cookies)
|
# Files reserved for metadata files (e. g. the report file or cookies)
|
||||||
# that can't be overwritten by user transforms and won't be cleaned up
|
# that can't be overwritten by user transforms and won't be cleaned up
|
||||||
# at the end.
|
# at the end.
|
||||||
self.reserved_files: Set[PurePath] = set()
|
self.reserved_files: set[PurePath] = set()
|
||||||
|
|
||||||
# Files found by the crawler, transformed. Only includes files that
|
# Files found by the crawler, transformed. Only includes files that
|
||||||
# were downloaded (or a download was attempted)
|
# were downloaded (or a download was attempted)
|
||||||
self.known_files: Set[PurePath] = set()
|
self.known_files: set[PurePath] = set()
|
||||||
|
|
||||||
self.added_files: Set[PurePath] = set()
|
self.added_files: set[PurePath] = set()
|
||||||
self.changed_files: Set[PurePath] = set()
|
self.changed_files: set[PurePath] = set()
|
||||||
self.deleted_files: Set[PurePath] = set()
|
self.deleted_files: set[PurePath] = set()
|
||||||
# Files that should have been deleted by the cleanup but weren't
|
# Files that should have been deleted by the cleanup but weren't
|
||||||
self.not_deleted_files: Set[PurePath] = set()
|
self.not_deleted_files: set[PurePath] = set()
|
||||||
|
|
||||||
# Custom crawler-specific data
|
# Custom crawler-specific data
|
||||||
self.custom: Dict[str, Any] = dict()
|
self.custom: dict[str, Any] = dict()
|
||||||
|
|
||||||
# Encountered errors and warnings
|
# Encountered errors and warnings
|
||||||
self.encountered_warnings: List[str] = []
|
self.encountered_warnings: list[str] = []
|
||||||
self.encountered_errors: List[str] = []
|
self.encountered_errors: list[str] = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_list_of_strs(data: Dict[str, Any], key: str) -> List[str]:
|
def _get_list_of_strs(data: dict[str, Any], key: str) -> list[str]:
|
||||||
result: Any = data.get(key, [])
|
result: Any = data.get(key, [])
|
||||||
|
|
||||||
if not isinstance(result, list):
|
if not isinstance(result, list):
|
||||||
@@ -89,8 +80,8 @@ class Report:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_str_dictionary(data: Dict[str, Any], key: str) -> Dict[str, Any]:
|
def _get_str_dictionary(data: dict[str, Any], key: str) -> dict[str, Any]:
|
||||||
result: Dict[str, Any] = data.get(key, {})
|
result: dict[str, Any] = data.get(key, {})
|
||||||
|
|
||||||
if not isinstance(result, dict):
|
if not isinstance(result, dict):
|
||||||
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
||||||
@@ -173,13 +164,13 @@ class Report:
|
|||||||
if path == other:
|
if path == other:
|
||||||
raise MarkDuplicateError(path)
|
raise MarkDuplicateError(path)
|
||||||
|
|
||||||
if is_relative_to(path, other) or is_relative_to(other, path):
|
if path.is_relative_to(other) or other.is_relative_to(path):
|
||||||
raise MarkConflictError(path, other)
|
raise MarkConflictError(path, other)
|
||||||
|
|
||||||
self.known_files.add(path)
|
self.known_files.add(path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def marked(self) -> Set[PurePath]:
|
def marked(self) -> set[PurePath]:
|
||||||
return self.known_files | self.reserved_files
|
return self.known_files | self.reserved_files
|
||||||
|
|
||||||
def is_marked(self, path: PurePath) -> bool:
|
def is_marked(self, path: PurePath) -> bool:
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import ast
|
import ast
|
||||||
|
import contextlib
|
||||||
import re
|
import re
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Callable, Sequence
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Callable, Dict, List, Optional, Sequence, TypeVar, Union
|
from typing import Optional, TypeVar
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .utils import fmt_path, str_path
|
from .utils import fmt_path, str_path
|
||||||
@@ -23,7 +25,7 @@ class Empty:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
RightSide = Union[str, Ignore, Empty]
|
RightSide = str | Ignore | Empty
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -35,7 +37,7 @@ class Ignored:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
TransformResult = Optional[Union[Transformed, Ignored]]
|
TransformResult = Transformed | Ignored | None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -47,7 +49,7 @@ class Rule:
|
|||||||
right: RightSide
|
right: RightSide
|
||||||
right_index: int
|
right_index: int
|
||||||
|
|
||||||
def right_result(self, path: PurePath) -> Union[str, Transformed, Ignored]:
|
def right_result(self, path: PurePath) -> str | Transformed | Ignored:
|
||||||
if isinstance(self.right, str):
|
if isinstance(self.right, str):
|
||||||
return self.right
|
return self.right
|
||||||
elif isinstance(self.right, Ignore):
|
elif isinstance(self.right, Ignore):
|
||||||
@@ -93,24 +95,20 @@ class ExactReTf(Transformation):
|
|||||||
# since elements of "match.groups()" can be None, mypy is wrong.
|
# since elements of "match.groups()" can be None, mypy is wrong.
|
||||||
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
||||||
|
|
||||||
locals_dir: Dict[str, Union[str, int, float]] = {}
|
locals_dir: dict[str, str | int | float] = {}
|
||||||
for i, group in enumerate(groups):
|
for i, group in enumerate(groups):
|
||||||
if group is None:
|
if group is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
locals_dir[f"g{i}"] = group
|
locals_dir[f"g{i}"] = group
|
||||||
|
|
||||||
try:
|
with contextlib.suppress(ValueError):
|
||||||
locals_dir[f"i{i}"] = int(group)
|
locals_dir[f"i{i}"] = int(group)
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
with contextlib.suppress(ValueError):
|
||||||
locals_dir[f"f{i}"] = float(group)
|
locals_dir[f"f{i}"] = float(group)
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
named_groups: Dict[str, str] = match.groupdict()
|
named_groups: dict[str, str] = match.groupdict()
|
||||||
for name, capture in named_groups.items():
|
for name, capture in named_groups.items():
|
||||||
locals_dir[name] = capture
|
locals_dir[name] = capture
|
||||||
|
|
||||||
@@ -208,7 +206,7 @@ class Line:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def rest(self) -> str:
|
def rest(self) -> str:
|
||||||
return self.line[self.index:]
|
return self.line[self.index :]
|
||||||
|
|
||||||
def peek(self, amount: int = 1) -> str:
|
def peek(self, amount: int = 1) -> str:
|
||||||
return self.rest[:amount]
|
return self.rest[:amount]
|
||||||
@@ -228,7 +226,7 @@ class Line:
|
|||||||
self.expect(string)
|
self.expect(string)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def one_of(self, parsers: List[Callable[[], T]], description: str) -> T:
|
def one_of(self, parsers: list[Callable[[], T]], description: str) -> T:
|
||||||
for parser in parsers:
|
for parser in parsers:
|
||||||
index = self.index
|
index = self.index
|
||||||
try:
|
try:
|
||||||
@@ -315,7 +313,7 @@ def parse_left(line: Line) -> str:
|
|||||||
return parse_str(line)
|
return parse_str(line)
|
||||||
|
|
||||||
|
|
||||||
def parse_right(line: Line) -> Union[str, Ignore]:
|
def parse_right(line: Line) -> str | Ignore:
|
||||||
c = line.peek()
|
c = line.peek()
|
||||||
if c in QUOTATION_MARKS:
|
if c in QUOTATION_MARKS:
|
||||||
return parse_quoted_str(line)
|
return parse_quoted_str(line)
|
||||||
@@ -327,21 +325,27 @@ def parse_right(line: Line) -> Union[str, Ignore]:
|
|||||||
|
|
||||||
|
|
||||||
def parse_arrow_name(line: Line) -> str:
|
def parse_arrow_name(line: Line) -> str:
|
||||||
return line.one_of([
|
return line.one_of(
|
||||||
lambda: line.expect("exact-re"),
|
[
|
||||||
lambda: line.expect("exact"),
|
lambda: line.expect("exact-re"),
|
||||||
lambda: line.expect("name-re"),
|
lambda: line.expect("exact"),
|
||||||
lambda: line.expect("name"),
|
lambda: line.expect("name-re"),
|
||||||
lambda: line.expect("re"),
|
lambda: line.expect("name"),
|
||||||
lambda: line.expect(""),
|
lambda: line.expect("re"),
|
||||||
], "Expected arrow name")
|
lambda: line.expect(""),
|
||||||
|
],
|
||||||
|
"Expected arrow name",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_arrow_head(line: Line) -> ArrowHead:
|
def parse_arrow_head(line: Line) -> ArrowHead:
|
||||||
return line.one_of([
|
return line.one_of(
|
||||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
[
|
||||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||||
], "Expected arrow head")
|
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||||
|
],
|
||||||
|
"Expected arrow head",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_eol(line: Line) -> None:
|
def parse_eol(line: Line) -> None:
|
||||||
@@ -413,12 +417,12 @@ class Transformer:
|
|||||||
|
|
||||||
def transform(self, path: PurePath) -> Optional[PurePath]:
|
def transform(self, path: PurePath) -> Optional[PurePath]:
|
||||||
for i, (line, tf) in enumerate(self._tfs):
|
for i, (line, tf) in enumerate(self._tfs):
|
||||||
log.explain(f"Testing rule {i+1}: {line}")
|
log.explain(f"Testing rule {i + 1}: {line}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = tf.transform(path)
|
result = tf.transform(path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warn(f"Error while testing rule {i+1}: {line}")
|
log.warn(f"Error while testing rule {i + 1}: {line}")
|
||||||
log.warn_contd(str(e))
|
log.warn_contd(str(e))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,13 @@ import asyncio
|
|||||||
import getpass
|
import getpass
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
import traceback
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Callable
|
||||||
from contextlib import AsyncExitStack
|
from contextlib import AsyncExitStack
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar
|
from typing import Any, Generic, Optional, TypeVar
|
||||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||||
|
|
||||||
import bs4
|
import bs4
|
||||||
@@ -79,7 +81,7 @@ def url_set_query_param(url: str, param: str, value: str) -> str:
|
|||||||
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
||||||
|
|
||||||
|
|
||||||
def url_set_query_params(url: str, params: Dict[str, str]) -> str:
|
def url_set_query_params(url: str, params: dict[str, str]) -> str:
|
||||||
"""
|
"""
|
||||||
Sets multiple query parameters in an url, overwriting existing ones.
|
Sets multiple query parameters in an url, overwriting existing ones.
|
||||||
"""
|
"""
|
||||||
@@ -105,17 +107,31 @@ def fmt_real_path(path: Path) -> str:
|
|||||||
return repr(str(path.absolute()))
|
return repr(str(path.absolute()))
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_path_name(name: str) -> str:
|
||||||
|
return name.replace("/", "-").replace("\\", "-").strip()
|
||||||
|
|
||||||
|
|
||||||
class ReusableAsyncContextManager(ABC, Generic[T]):
|
class ReusableAsyncContextManager(ABC, Generic[T]):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._active = False
|
self._active = False
|
||||||
self._stack = AsyncExitStack()
|
self._stack = AsyncExitStack()
|
||||||
|
self._create_stacktrace = traceback.format_stack()
|
||||||
|
self._enter_stacktraces = []
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def _on_aenter(self) -> T:
|
async def _on_aenter(self) -> T:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def __aenter__(self) -> T:
|
async def __aenter__(self) -> T:
|
||||||
|
self._enter_stacktraces.append(traceback.format_stack())
|
||||||
|
|
||||||
if self._active:
|
if self._active:
|
||||||
|
print("Context manager was already active. Created at:")
|
||||||
|
print("".join(self._create_stacktrace))
|
||||||
|
print("\n== Previous __aenter__ calls")
|
||||||
|
for i, stacktrace in enumerate(self._enter_stacktraces, start=1):
|
||||||
|
print(f"\n-- __aenter__ call #{i} at:")
|
||||||
|
print("".join(stacktrace))
|
||||||
raise RuntimeError("Nested or otherwise concurrent usage is not allowed")
|
raise RuntimeError("Nested or otherwise concurrent usage is not allowed")
|
||||||
|
|
||||||
self._active = True
|
self._active = True
|
||||||
@@ -124,21 +140,23 @@ class ReusableAsyncContextManager(ABC, Generic[T]):
|
|||||||
# See https://stackoverflow.com/a/13075071
|
# See https://stackoverflow.com/a/13075071
|
||||||
try:
|
try:
|
||||||
result: T = await self._on_aenter()
|
result: T = await self._on_aenter()
|
||||||
except: # noqa: E722 do not use bare 'except'
|
return result
|
||||||
|
except:
|
||||||
if not await self.__aexit__(*sys.exc_info()):
|
if not await self.__aexit__(*sys.exc_info()):
|
||||||
raise
|
raise
|
||||||
|
raise
|
||||||
return result
|
|
||||||
|
|
||||||
async def __aexit__(
|
async def __aexit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc_value: Optional[BaseException],
|
exc_value: Optional[BaseException],
|
||||||
traceback: Optional[TracebackType],
|
traceback: Optional[TracebackType],
|
||||||
) -> Optional[bool]:
|
) -> Optional[bool]:
|
||||||
if not self._active:
|
if not self._active:
|
||||||
raise RuntimeError("__aexit__ called too many times")
|
raise RuntimeError("__aexit__ called too many times")
|
||||||
|
|
||||||
|
self._enter_stacktraces.pop()
|
||||||
|
|
||||||
result = await self._stack.__aexit__(exc_type, exc_value, traceback)
|
result = await self._stack.__aexit__(exc_type, exc_value, traceback)
|
||||||
self._active = False
|
self._active = False
|
||||||
return result
|
return result
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
NAME = "PFERD"
|
NAME = "PFERD"
|
||||||
VERSION = "3.7.0"
|
VERSION = "3.8.3"
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ Binaries for Linux, Windows and Mac can be downloaded directly from the
|
|||||||
|
|
||||||
### With pip
|
### With pip
|
||||||
|
|
||||||
Ensure you have at least Python 3.9 installed. Run the following command to
|
Ensure you have at least Python 3.11 installed. Run the following command to
|
||||||
install PFERD or upgrade it to the latest version:
|
install PFERD or upgrade it to the latest version:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
8
flake.lock
generated
8
flake.lock
generated
@@ -2,16 +2,16 @@
|
|||||||
"nodes": {
|
"nodes": {
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1708979614,
|
"lastModified": 1760725957,
|
||||||
"narHash": "sha256-FWLWmYojIg6TeqxSnHkKpHu5SGnFP5um1uUjH+wRV6g=",
|
"narHash": "sha256-tdoIhL/NlER290HfSjOkgi4jfmjeqmqrzgnmiMtGepE=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "b7ee09cf5614b02d289cd86fcfa6f24d4e078c2a",
|
"rev": "81b927b14b7b3988334d5282ef9cba802e193fe1",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.11",
|
"ref": "nixos-25.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
description = "Tool for downloading course-related files from ILIAS";
|
description = "Tool for downloading course-related files from ILIAS";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs }:
|
outputs = { self, nixpkgs }:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ dependencies = [
|
|||||||
"certifi>=2021.10.8"
|
"certifi>=2021.10.8"
|
||||||
]
|
]
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.11"
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
pferd = "PFERD.__main__:main"
|
pferd = "PFERD.__main__:main"
|
||||||
@@ -20,23 +20,33 @@ pferd = "PFERD.__main__:main"
|
|||||||
[tool.setuptools.dynamic]
|
[tool.setuptools.dynamic]
|
||||||
version = {attr = "PFERD.version.VERSION"}
|
version = {attr = "PFERD.version.VERSION"}
|
||||||
|
|
||||||
[tool.flake8]
|
[tool.ruff]
|
||||||
max-line-length = 110
|
line-length = 110
|
||||||
|
|
||||||
[tool.isort]
|
[tool.ruff.lint]
|
||||||
line_length = 110
|
select = [
|
||||||
|
# pycodestyle
|
||||||
|
"E",
|
||||||
|
# Pyflakes
|
||||||
|
"F",
|
||||||
|
# pyupgrade
|
||||||
|
"UP",
|
||||||
|
# flake8-bugbear
|
||||||
|
"B",
|
||||||
|
# flake8-simplify
|
||||||
|
"SIM",
|
||||||
|
# isort
|
||||||
|
"I",
|
||||||
|
]
|
||||||
|
ignore = [
|
||||||
|
"UP045",
|
||||||
|
"SIM114",
|
||||||
|
"B023"
|
||||||
|
]
|
||||||
|
|
||||||
[tool.autopep8]
|
[dependency-groups]
|
||||||
max_line_length = 110
|
dev = [
|
||||||
in-place = true
|
"pyinstaller>=6.16.0",
|
||||||
recursive = true
|
"pyright>=1.1.406",
|
||||||
|
"ruff>=0.14.1",
|
||||||
[tool.mypy]
|
]
|
||||||
disallow_any_generics = true
|
|
||||||
disallow_untyped_defs = true
|
|
||||||
disallow_incomplete_defs = true
|
|
||||||
no_implicit_optional = true
|
|
||||||
warn_unused_ignores = true
|
|
||||||
warn_unreachable = true
|
|
||||||
show_error_context = true
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|||||||
@@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
pyinstaller --onefile pferd.py
|
uv run pyinstaller --onefile pferd.py
|
||||||
|
|||||||
@@ -2,5 +2,5 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
mypy .
|
uv run pyright .
|
||||||
flake8 PFERD
|
uv run ruff check
|
||||||
|
|||||||
@@ -2,5 +2,4 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
autopep8 .
|
uv run ruff format
|
||||||
isort .
|
|
||||||
|
|||||||
Reference in New Issue
Block a user