mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-10-19 16:22:33 +02:00
Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
45e25db5ad | ||
![]() |
ef7d66c5af | ||
![]() |
5646e933fd | ||
![]() |
6e563134b2 | ||
![]() |
2cf0e060ed | ||
![]() |
ee4625be78 | ||
![]() |
f6c713d621 |
1
.git-blame-ignore-revs
Normal file
1
.git-blame-ignore-revs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
2cf0e060ed126537dd993896b6aa793e2a6b9e80
|
12
.github/workflows/build-and-release.yml
vendored
12
.github/workflows/build-and-release.yml
vendored
@@ -18,19 +18,13 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v7
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
- name: Set up project
|
- name: Set up project
|
||||||
if: matrix.os != 'windows-latest'
|
run: uv sync
|
||||||
run: ./scripts/setup
|
|
||||||
|
|
||||||
- name: Set up project on windows
|
|
||||||
if: matrix.os == 'windows-latest'
|
|
||||||
# For some reason, `pip install --upgrade pip` doesn't work on
|
|
||||||
# 'windows-latest'. The installed pip version works fine however.
|
|
||||||
run: ./scripts/setup --no-pip
|
|
||||||
|
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: |
|
run: |
|
||||||
|
21
DEV.md
21
DEV.md
@@ -9,30 +9,25 @@ particular [this][ppug-1] and [this][ppug-2] guide).
|
|||||||
|
|
||||||
## Setting up a dev environment
|
## Setting up a dev environment
|
||||||
|
|
||||||
The use of [venv][venv] is recommended. To initially set up a development
|
The use of [venv][venv] and [uv][uv] is recommended. To initially set up a
|
||||||
environment, run these commands in the same directory as this file:
|
development environment, run these commands in the same directory as this file:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ python -m venv .venv
|
$ uv sync
|
||||||
$ . .venv/bin/activate
|
$ . .venv/bin/activate
|
||||||
$ ./scripts/setup
|
|
||||||
```
|
```
|
||||||
|
|
||||||
The setup script installs a few required dependencies and tools. It also
|
This install all required dependencies and tools. It also installs PFERD as
|
||||||
installs PFERD via `pip install --editable .`, which means that you can just run
|
*editable*, which means that you can just run `pferd` as if it was installed
|
||||||
`pferd` as if it was installed normally. Since PFERD was installed with
|
normally. Since PFERD was installed with `--editable`, there is no need to
|
||||||
`--editable`, there is no need to re-run `pip install` when the source code is
|
re-run `uv sync` when the source code is changed.
|
||||||
changed.
|
|
||||||
|
|
||||||
If you get any errors because pip can't update itself, try running
|
|
||||||
`./scripts/setup --no-pip` instead of `./scripts/setup`.
|
|
||||||
|
|
||||||
For more details, see [this part of the Python Tutorial][venv-tut] and
|
For more details, see [this part of the Python Tutorial][venv-tut] and
|
||||||
[this section on "development mode"][ppug-dev].
|
[this section on "development mode"][ppug-dev].
|
||||||
|
|
||||||
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
||||||
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
||||||
[ppug-dev]: <https://packaging.python.org/guides/distributing-packages-using-setuptools/#working-in-development-mode> "Working in “development mode”"
|
[uv]: <https://docs.astral.sh/uv/> "uv - An extremely fast Python package and project manager"
|
||||||
|
|
||||||
## Checking and formatting the code
|
## Checking and formatting the code
|
||||||
|
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
from configparser import SectionProxy
|
from configparser import SectionProxy
|
||||||
from typing import Callable, Dict
|
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
||||||
@@ -9,21 +9,19 @@ from .pass_ import PassAuthenticator, PassAuthSection
|
|||||||
from .simple import SimpleAuthenticator, SimpleAuthSection
|
from .simple import SimpleAuthenticator, SimpleAuthSection
|
||||||
from .tfa import TfaAuthenticator
|
from .tfa import TfaAuthenticator
|
||||||
|
|
||||||
AuthConstructor = Callable[[
|
AuthConstructor = Callable[
|
||||||
str, # Name (without the "auth:" prefix)
|
[
|
||||||
SectionProxy, # Authenticator's section of global config
|
str, # Name (without the "auth:" prefix)
|
||||||
Config, # Global config
|
SectionProxy, # Authenticator's section of global config
|
||||||
], Authenticator]
|
Config, # Global config
|
||||||
|
],
|
||||||
|
Authenticator,
|
||||||
|
]
|
||||||
|
|
||||||
AUTHENTICATORS: Dict[str, AuthConstructor] = {
|
AUTHENTICATORS: dict[str, AuthConstructor] = {
|
||||||
"credential-file": lambda n, s, c:
|
"credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||||
CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
"keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||||
"keyring": lambda n, s, c:
|
"pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)),
|
||||||
KeyringAuthenticator(n, KeyringAuthSection(s)),
|
"simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||||
"pass": lambda n, s, c:
|
"tfa": lambda n, s, c: TfaAuthenticator(n),
|
||||||
PassAuthenticator(n, PassAuthSection(s)),
|
|
||||||
"simple": lambda n, s, c:
|
|
||||||
SimpleAuthenticator(n, SimpleAuthSection(s)),
|
|
||||||
"tfa": lambda n, s, c:
|
|
||||||
TfaAuthenticator(n),
|
|
||||||
}
|
}
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..config import Section
|
from ..config import Section
|
||||||
|
|
||||||
@@ -35,7 +34,7 @@ class Authenticator(ABC):
|
|||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def username(self) -> str:
|
async def username(self) -> str:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..utils import fmt_real_path
|
from ..utils import fmt_real_path
|
||||||
@@ -23,7 +22,9 @@ class CredentialFileAuthenticator(Authenticator):
|
|||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
lines = list(f)
|
lines = list(f)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise AuthLoadError(f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8")
|
raise AuthLoadError(
|
||||||
|
f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8"
|
||||||
|
) from None
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
||||||
|
|
||||||
@@ -42,5 +43,5 @@ class CredentialFileAuthenticator(Authenticator):
|
|||||||
self._username = uline[9:]
|
self._username = uline[9:]
|
||||||
self._password = pline[9:]
|
self._password = pline[9:]
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
return self._username, self._password
|
return self._username, self._password
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
from typing import Optional, Tuple, cast
|
from typing import Optional
|
||||||
|
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
@@ -13,11 +13,10 @@ class KeyringAuthSection(AuthSection):
|
|||||||
return self.s.get("username")
|
return self.s.get("username")
|
||||||
|
|
||||||
def keyring_name(self) -> str:
|
def keyring_name(self) -> str:
|
||||||
return cast(str, self.s.get("keyring_name", fallback=NAME))
|
return self.s.get("keyring_name", fallback=NAME)
|
||||||
|
|
||||||
|
|
||||||
class KeyringAuthenticator(Authenticator):
|
class KeyringAuthenticator(Authenticator):
|
||||||
|
|
||||||
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
|
|
||||||
@@ -28,7 +27,7 @@ class KeyringAuthenticator(Authenticator):
|
|||||||
self._password_invalidated = False
|
self._password_invalidated = False
|
||||||
self._username_fixed = section.username() is not None
|
self._username_fixed = section.username() is not None
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
# Request the username
|
# Request the username
|
||||||
if self._username is None:
|
if self._username is None:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from .authenticator import Authenticator, AuthError, AuthSection
|
from .authenticator import Authenticator, AuthError, AuthSection
|
||||||
@@ -12,11 +11,11 @@ class PassAuthSection(AuthSection):
|
|||||||
self.missing_value("passname")
|
self.missing_value("passname")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def username_prefixes(self) -> List[str]:
|
def username_prefixes(self) -> list[str]:
|
||||||
value = self.s.get("username_prefixes", "login,username,user")
|
value = self.s.get("username_prefixes", "login,username,user")
|
||||||
return [prefix.lower() for prefix in value.split(",")]
|
return [prefix.lower() for prefix in value.split(",")]
|
||||||
|
|
||||||
def password_prefixes(self) -> List[str]:
|
def password_prefixes(self) -> list[str]:
|
||||||
value = self.s.get("password_prefixes", "password,pass,secret")
|
value = self.s.get("password_prefixes", "password,pass,secret")
|
||||||
return [prefix.lower() for prefix in value.split(",")]
|
return [prefix.lower() for prefix in value.split(",")]
|
||||||
|
|
||||||
@@ -31,14 +30,14 @@ class PassAuthenticator(Authenticator):
|
|||||||
self._username_prefixes = section.username_prefixes()
|
self._username_prefixes = section.username_prefixes()
|
||||||
self._password_prefixes = section.password_prefixes()
|
self._password_prefixes = section.password_prefixes()
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
log.explain_topic("Obtaining credentials from pass")
|
log.explain_topic("Obtaining credentials from pass")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.explain(f"Calling 'pass show {self._passname}'")
|
log.explain(f"Calling 'pass show {self._passname}'")
|
||||||
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}")
|
raise AuthError(f"Failed to get password info from {self._passname}: {e}") from e
|
||||||
|
|
||||||
prefixed = {}
|
prefixed = {}
|
||||||
unprefixed = []
|
unprefixed = []
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
from typing import Optional, Tuple
|
from typing import Optional
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from ..utils import agetpass, ainput
|
from ..utils import agetpass, ainput
|
||||||
@@ -23,7 +23,7 @@ class SimpleAuthenticator(Authenticator):
|
|||||||
self._username_fixed = self.username is not None
|
self._username_fixed = self.username is not None
|
||||||
self._password_fixed = self.password is not None
|
self._password_fixed = self.password is not None
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
if self._username is not None and self._password is not None:
|
if self._username is not None and self._password is not None:
|
||||||
return self._username, self._password
|
return self._username, self._password
|
||||||
|
|
||||||
|
@@ -1,5 +1,3 @@
|
|||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from ..utils import ainput
|
from ..utils import ainput
|
||||||
from .authenticator import Authenticator, AuthError
|
from .authenticator import Authenticator, AuthError
|
||||||
@@ -17,7 +15,7 @@ class TfaAuthenticator(Authenticator):
|
|||||||
code = await ainput("TFA code: ")
|
code = await ainput("TFA code: ")
|
||||||
return code
|
return code
|
||||||
|
|
||||||
async def credentials(self) -> Tuple[str, str]:
|
async def credentials(self) -> tuple[str, str]:
|
||||||
raise AuthError("TFA authenticator does not support usernames")
|
raise AuthError("TFA authenticator does not support usernames")
|
||||||
|
|
||||||
def invalidate_username(self) -> None:
|
def invalidate_username(self) -> None:
|
||||||
|
@@ -21,23 +21,20 @@ GROUP.add_argument(
|
|||||||
"--base-url",
|
"--base-url",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="BASE_URL",
|
metavar="BASE_URL",
|
||||||
help="The base url of the ilias instance"
|
help="The base url of the ilias instance",
|
||||||
)
|
)
|
||||||
|
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--client-id",
|
"--client-id",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="CLIENT_ID",
|
metavar="CLIENT_ID",
|
||||||
help="The client id of the ilias instance"
|
help="The client id of the ilias instance",
|
||||||
)
|
)
|
||||||
|
|
||||||
configure_common_group_args(GROUP)
|
configure_common_group_args(GROUP)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None:
|
||||||
args: argparse.Namespace,
|
|
||||||
parser: configparser.ConfigParser,
|
|
||||||
) -> None:
|
|
||||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
parser["crawl:ilias"] = {}
|
parser["crawl:ilias"] = {}
|
||||||
|
@@ -21,8 +21,8 @@ configure_common_group_args(GROUP)
|
|||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
|
@@ -18,25 +18,25 @@ GROUP.add_argument(
|
|||||||
"--link-regex",
|
"--link-regex",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="REGEX",
|
metavar="REGEX",
|
||||||
help="href-matching regex to identify downloadable files"
|
help="href-matching regex to identify downloadable files",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"target",
|
"target",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="url to crawl"
|
help="url to crawl",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain("Creating config for command 'kit-ipd'")
|
log.explain("Creating config for command 'kit-ipd'")
|
||||||
|
|
||||||
|
@@ -18,37 +18,37 @@ GROUP.add_argument(
|
|||||||
"target",
|
"target",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="directory to crawl"
|
help="directory to crawl",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--crawl-delay",
|
"--crawl-delay",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="artificial delay to simulate for crawl requests"
|
help="artificial delay to simulate for crawl requests",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--download-delay",
|
"--download-delay",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="artificial delay to simulate for download requests"
|
help="artificial delay to simulate for download requests",
|
||||||
)
|
)
|
||||||
GROUP.add_argument(
|
GROUP.add_argument(
|
||||||
"--download-speed",
|
"--download-speed",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="BYTES_PER_SECOND",
|
metavar="BYTES_PER_SECOND",
|
||||||
help="download speed to simulate"
|
help="download speed to simulate",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain("Creating config for command 'local'")
|
log.explain("Creating config for command 'local'")
|
||||||
|
|
||||||
|
@@ -12,58 +12,60 @@ def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
|||||||
"target",
|
"target",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="TARGET",
|
metavar="TARGET",
|
||||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
help="course id, 'desktop', or ILIAS URL to crawl",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"output",
|
"output",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="OUTPUT",
|
metavar="OUTPUT",
|
||||||
help="output directory"
|
help="output directory",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--username", "-u",
|
"--username",
|
||||||
|
"-u",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="USERNAME",
|
metavar="USERNAME",
|
||||||
help="user name for authentication"
|
help="user name for authentication",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--keyring",
|
"--keyring",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="use the system keyring to store and retrieve passwords"
|
help="use the system keyring to store and retrieve passwords",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--credential-file",
|
"--credential-file",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="read username and password from a credential file"
|
help="read username and password from a credential file",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--links",
|
"--links",
|
||||||
type=show_value_error(Links.from_string),
|
type=show_value_error(Links.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="how to represent external links"
|
help="how to represent external links",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--link-redirect-delay",
|
"--link-redirect-delay",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
help="time before 'fancy' links redirect to to their target (-1 to disable)",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--videos",
|
"--videos",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="crawl and download videos"
|
help="crawl and download videos",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--forums",
|
"--forums",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="crawl and download forum posts"
|
help="crawl and download forum posts",
|
||||||
)
|
)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--http-timeout", "-t",
|
"--http-timeout",
|
||||||
|
"-t",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="timeout for all HTTP requests"
|
help="timeout for all HTTP requests",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import configparser
|
import configparser
|
||||||
from argparse import ArgumentTypeError
|
from argparse import ArgumentTypeError
|
||||||
|
from collections.abc import Callable, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Callable, List, Optional, Sequence, Union
|
from typing import Any, Optional
|
||||||
|
|
||||||
from ..output_dir import OnConflict, Redownload
|
from ..output_dir import OnConflict, Redownload
|
||||||
from ..version import NAME, VERSION
|
from ..version import NAME, VERSION
|
||||||
@@ -15,15 +16,15 @@ class ParserLoadError(Exception):
|
|||||||
# TODO Replace with argparse version when updating to 3.9?
|
# TODO Replace with argparse version when updating to 3.9?
|
||||||
class BooleanOptionalAction(argparse.Action):
|
class BooleanOptionalAction(argparse.Action):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
option_strings: List[str],
|
option_strings: list[str],
|
||||||
dest: Any,
|
dest: Any,
|
||||||
default: Any = None,
|
default: Any = None,
|
||||||
type: Any = None,
|
type: Any = None,
|
||||||
choices: Any = None,
|
choices: Any = None,
|
||||||
required: Any = False,
|
required: Any = False,
|
||||||
help: Any = None,
|
help: Any = None,
|
||||||
metavar: Any = None,
|
metavar: Any = None,
|
||||||
):
|
):
|
||||||
if len(option_strings) != 1:
|
if len(option_strings) != 1:
|
||||||
raise ValueError("There must be exactly one option string")
|
raise ValueError("There must be exactly one option string")
|
||||||
@@ -48,11 +49,11 @@ class BooleanOptionalAction(argparse.Action):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def __call__(
|
def __call__(
|
||||||
self,
|
self,
|
||||||
parser: argparse.ArgumentParser,
|
parser: argparse.ArgumentParser,
|
||||||
namespace: argparse.Namespace,
|
namespace: argparse.Namespace,
|
||||||
values: Union[str, Sequence[Any], None],
|
values: str | Sequence[Any] | None,
|
||||||
option_string: Optional[str] = None,
|
option_string: Optional[str] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if option_string and option_string in self.option_strings:
|
if option_string and option_string in self.option_strings:
|
||||||
value = not option_string.startswith("--no-")
|
value = not option_string.startswith("--no-")
|
||||||
@@ -67,11 +68,13 @@ def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]:
|
|||||||
Some validation functions (like the from_string in our enums) raise a ValueError.
|
Some validation functions (like the from_string in our enums) raise a ValueError.
|
||||||
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(input: str) -> Any:
|
def wrapper(input: str) -> Any:
|
||||||
try:
|
try:
|
||||||
return inner(input)
|
return inner(input)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ArgumentTypeError(e)
|
raise ArgumentTypeError(e) from e
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@@ -81,52 +84,57 @@ CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group(
|
|||||||
description="arguments common to all crawlers",
|
description="arguments common to all crawlers",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--redownload", "-r",
|
"--redownload",
|
||||||
|
"-r",
|
||||||
type=show_value_error(Redownload.from_string),
|
type=show_value_error(Redownload.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="when to download a file that's already present locally"
|
help="when to download a file that's already present locally",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--on-conflict",
|
"--on-conflict",
|
||||||
type=show_value_error(OnConflict.from_string),
|
type=show_value_error(OnConflict.from_string),
|
||||||
metavar="OPTION",
|
metavar="OPTION",
|
||||||
help="what to do when local and remote files or directories differ"
|
help="what to do when local and remote files or directories differ",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--transform", "-T",
|
"--transform",
|
||||||
|
"-T",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="RULE",
|
metavar="RULE",
|
||||||
help="add a single transformation rule. Can be specified multiple times"
|
help="add a single transformation rule. Can be specified multiple times",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--tasks", "-n",
|
"--tasks",
|
||||||
|
"-n",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="N",
|
metavar="N",
|
||||||
help="maximum number of concurrent tasks (crawling, downloading)"
|
help="maximum number of concurrent tasks (crawling, downloading)",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--downloads", "-N",
|
"--downloads",
|
||||||
|
"-N",
|
||||||
type=int,
|
type=int,
|
||||||
metavar="N",
|
metavar="N",
|
||||||
help="maximum number of tasks that may download data at the same time"
|
help="maximum number of tasks that may download data at the same time",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--task-delay", "-d",
|
"--task-delay",
|
||||||
|
"-d",
|
||||||
type=float,
|
type=float,
|
||||||
metavar="SECONDS",
|
metavar="SECONDS",
|
||||||
help="time the crawler should wait between subsequent tasks"
|
help="time the crawler should wait between subsequent tasks",
|
||||||
)
|
)
|
||||||
CRAWLER_PARSER_GROUP.add_argument(
|
CRAWLER_PARSER_GROUP.add_argument(
|
||||||
"--windows-paths",
|
"--windows-paths",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="whether to repair invalid paths on windows"
|
help="whether to repair invalid paths on windows",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_crawler(
|
def load_crawler(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
section: configparser.SectionProxy,
|
section: configparser.SectionProxy,
|
||||||
) -> None:
|
) -> None:
|
||||||
if args.redownload is not None:
|
if args.redownload is not None:
|
||||||
section["redownload"] = args.redownload.value
|
section["redownload"] = args.redownload.value
|
||||||
@@ -152,79 +160,79 @@ PARSER.add_argument(
|
|||||||
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--config", "-c",
|
"--config",
|
||||||
|
"-c",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="custom config file"
|
help="custom config file",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--dump-config",
|
"--dump-config",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="dump current configuration to the default config path and exit"
|
help="dump current configuration to the default config path and exit",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--dump-config-to",
|
"--dump-config-to",
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="dump current configuration to a file and exit."
|
help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead",
|
||||||
" Use '-' as path to print to stdout instead"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--debug-transforms",
|
"--debug-transforms",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="apply transform rules to files of previous run"
|
help="apply transform rules to files of previous run",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--crawler", "-C",
|
"--crawler",
|
||||||
|
"-C",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="NAME",
|
metavar="NAME",
|
||||||
help="only execute a single crawler."
|
help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers",
|
||||||
" Can be specified multiple times to execute multiple crawlers"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--skip", "-S",
|
"--skip",
|
||||||
|
"-S",
|
||||||
action="append",
|
action="append",
|
||||||
type=str,
|
type=str,
|
||||||
metavar="NAME",
|
metavar="NAME",
|
||||||
help="don't execute this particular crawler."
|
help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers",
|
||||||
" Can be specified multiple times to skip multiple crawlers"
|
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--working-dir",
|
"--working-dir",
|
||||||
type=Path,
|
type=Path,
|
||||||
metavar="PATH",
|
metavar="PATH",
|
||||||
help="custom working directory"
|
help="custom working directory",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--explain",
|
"--explain",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="log and explain in detail what PFERD is doing"
|
help="log and explain in detail what PFERD is doing",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--status",
|
"--status",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print status updates while PFERD is crawling"
|
help="print status updates while PFERD is crawling",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--report",
|
"--report",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print a report of all local changes before exiting"
|
help="print a report of all local changes before exiting",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--share-cookies",
|
"--share-cookies",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="whether crawlers should share cookies where applicable"
|
help="whether crawlers should share cookies where applicable",
|
||||||
)
|
)
|
||||||
PARSER.add_argument(
|
PARSER.add_argument(
|
||||||
"--show-not-deleted",
|
"--show-not-deleted",
|
||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="print messages in status and report when PFERD did not delete a local only file"
|
help="print messages in status and report when PFERD did not delete a local only file",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_default_section(
|
def load_default_section(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
section = parser[parser.default_section]
|
section = parser[parser.default_section]
|
||||||
|
|
||||||
|
@@ -3,7 +3,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from configparser import ConfigParser, SectionProxy
|
from configparser import ConfigParser, SectionProxy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, List, NoReturn, Optional, Tuple
|
from typing import Any, NoReturn, Optional
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
|
|
||||||
@@ -53,10 +53,10 @@ class Section:
|
|||||||
raise ConfigOptionError(self.s.name, key, desc)
|
raise ConfigOptionError(self.s.name, key, desc)
|
||||||
|
|
||||||
def invalid_value(
|
def invalid_value(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
value: Any,
|
value: Any,
|
||||||
reason: Optional[str],
|
reason: Optional[str],
|
||||||
) -> NoReturn:
|
) -> NoReturn:
|
||||||
if reason is None:
|
if reason is None:
|
||||||
self.error(key, f"Invalid value {value!r}")
|
self.error(key, f"Invalid value {value!r}")
|
||||||
@@ -126,13 +126,13 @@ class Config:
|
|||||||
with open(path, encoding="utf-8") as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
parser.read_file(f, source=str(path))
|
parser.read_file(f, source=str(path))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise ConfigLoadError(path, "File does not exist")
|
raise ConfigLoadError(path, "File does not exist") from None
|
||||||
except IsADirectoryError:
|
except IsADirectoryError:
|
||||||
raise ConfigLoadError(path, "That's a directory, not a file")
|
raise ConfigLoadError(path, "That's a directory, not a file") from None
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
raise ConfigLoadError(path, "Insufficient permissions")
|
raise ConfigLoadError(path, "Insufficient permissions") from None
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise ConfigLoadError(path, "File is not encoded using UTF-8")
|
raise ConfigLoadError(path, "File is not encoded using UTF-8") from None
|
||||||
|
|
||||||
def dump(self, path: Optional[Path] = None) -> None:
|
def dump(self, path: Optional[Path] = None) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -150,8 +150,8 @@ class Config:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
except PermissionError:
|
except PermissionError as e:
|
||||||
raise ConfigDumpError(path, "Could not create parent directory")
|
raise ConfigDumpError(path, "Could not create parent directory") from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Ensuring we don't accidentally overwrite any existing files by
|
# Ensuring we don't accidentally overwrite any existing files by
|
||||||
@@ -167,16 +167,16 @@ class Config:
|
|||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
self._parser.write(f)
|
self._parser.write(f)
|
||||||
else:
|
else:
|
||||||
raise ConfigDumpError(path, "File already exists")
|
raise ConfigDumpError(path, "File already exists") from None
|
||||||
except IsADirectoryError:
|
except IsADirectoryError:
|
||||||
raise ConfigDumpError(path, "That's a directory, not a file")
|
raise ConfigDumpError(path, "That's a directory, not a file") from None
|
||||||
except PermissionError:
|
except PermissionError as e:
|
||||||
raise ConfigDumpError(path, "Insufficient permissions")
|
raise ConfigDumpError(path, "Insufficient permissions") from e
|
||||||
|
|
||||||
def dump_to_stdout(self) -> None:
|
def dump_to_stdout(self) -> None:
|
||||||
self._parser.write(sys.stdout)
|
self._parser.write(sys.stdout)
|
||||||
|
|
||||||
def crawl_sections(self) -> List[Tuple[str, SectionProxy]]:
|
def crawl_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||||
result = []
|
result = []
|
||||||
for name, proxy in self._parser.items():
|
for name, proxy in self._parser.items():
|
||||||
if name.startswith("crawl:"):
|
if name.startswith("crawl:"):
|
||||||
@@ -184,7 +184,7 @@ class Config:
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def auth_sections(self) -> List[Tuple[str, SectionProxy]]:
|
def auth_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||||
result = []
|
result = []
|
||||||
for name, proxy in self._parser.items():
|
for name, proxy in self._parser.items():
|
||||||
if name.startswith("auth:"):
|
if name.startswith("auth:"):
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
from configparser import SectionProxy
|
from configparser import SectionProxy
|
||||||
from typing import Callable, Dict
|
|
||||||
|
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
@@ -8,20 +8,19 @@ from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
|||||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||||
|
|
||||||
CrawlerConstructor = Callable[[
|
CrawlerConstructor = Callable[
|
||||||
str, # Name (without the "crawl:" prefix)
|
[
|
||||||
SectionProxy, # Crawler's section of global config
|
str, # Name (without the "crawl:" prefix)
|
||||||
Config, # Global config
|
SectionProxy, # Crawler's section of global config
|
||||||
Dict[str, Authenticator], # Loaded authenticators by name
|
Config, # Global config
|
||||||
], Crawler]
|
dict[str, Authenticator], # Loaded authenticators by name
|
||||||
|
],
|
||||||
|
Crawler,
|
||||||
|
]
|
||||||
|
|
||||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
CRAWLERS: dict[str, CrawlerConstructor] = {
|
||||||
"local": lambda n, s, c, a:
|
"local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
"ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||||
"ilias-web": lambda n, s, c, a:
|
"kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||||
IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
"kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||||
"kit-ilias-web": lambda n, s, c, a:
|
|
||||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
|
||||||
"kit-ipd": lambda n, s, c, a:
|
|
||||||
KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
|
||||||
}
|
}
|
||||||
|
@@ -1,10 +1,10 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections.abc import Awaitable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine, Sequence
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, TypeVar
|
from typing import Any, Optional, TypeVar
|
||||||
|
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config, Section
|
from ..config import Config, Section
|
||||||
@@ -116,7 +116,7 @@ class CrawlToken(ReusableAsyncContextManager[ProgressBar]):
|
|||||||
return bar
|
return bar
|
||||||
|
|
||||||
|
|
||||||
class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
class DownloadToken(ReusableAsyncContextManager[tuple[ProgressBar, FileSink]]):
|
||||||
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
@@ -128,12 +128,13 @@ class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
|||||||
def path(self) -> PurePath:
|
def path(self) -> PurePath:
|
||||||
return self._path
|
return self._path
|
||||||
|
|
||||||
async def _on_aenter(self) -> Tuple[ProgressBar, FileSink]:
|
async def _on_aenter(self) -> tuple[ProgressBar, FileSink]:
|
||||||
await self._stack.enter_async_context(self._limiter.limit_download())
|
await self._stack.enter_async_context(self._limiter.limit_download())
|
||||||
sink = await self._stack.enter_async_context(self._fs_token)
|
sink = await self._stack.enter_async_context(self._fs_token)
|
||||||
# The "Downloaded ..." message is printed in the output dir, not here
|
# The "Downloaded ..." message is printed in the output dir, not here
|
||||||
bar = self._stack.enter_context(log.download_bar("[bold bright_cyan]", "Downloading",
|
bar = self._stack.enter_context(
|
||||||
fmt_path(self._path)))
|
log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path))
|
||||||
|
)
|
||||||
|
|
||||||
return bar, sink
|
return bar, sink
|
||||||
|
|
||||||
@@ -204,7 +205,7 @@ class CrawlerSection(Section):
|
|||||||
on_windows = os.name == "nt"
|
on_windows = os.name == "nt"
|
||||||
return self.s.getboolean("windows_paths", fallback=on_windows)
|
return self.s.getboolean("windows_paths", fallback=on_windows)
|
||||||
|
|
||||||
def auth(self, authenticators: Dict[str, Authenticator]) -> Authenticator:
|
def auth(self, authenticators: dict[str, Authenticator]) -> Authenticator:
|
||||||
value = self.s.get("auth")
|
value = self.s.get("auth")
|
||||||
if value is None:
|
if value is None:
|
||||||
self.missing_value("auth")
|
self.missing_value("auth")
|
||||||
@@ -216,10 +217,10 @@ class CrawlerSection(Section):
|
|||||||
|
|
||||||
class Crawler(ABC):
|
class Crawler(ABC):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: CrawlerSection,
|
section: CrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize a crawler from its name and its section in the config file.
|
Initialize a crawler from its name and its section in the config file.
|
||||||
@@ -261,7 +262,7 @@ class Crawler(ABC):
|
|||||||
return self._output_dir
|
return self._output_dir
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
async def gather(awaitables: Sequence[Awaitable[Any]]) -> list[Any]:
|
||||||
"""
|
"""
|
||||||
Similar to asyncio.gather. However, in the case of an exception, all
|
Similar to asyncio.gather. However, in the case of an exception, all
|
||||||
still running tasks are cancelled and the exception is rethrown.
|
still running tasks are cancelled and the exception is rethrown.
|
||||||
@@ -293,13 +294,13 @@ class Crawler(ABC):
|
|||||||
return CrawlToken(self._limiter, path)
|
return CrawlToken(self._limiter, path)
|
||||||
|
|
||||||
def should_try_download(
|
def should_try_download(
|
||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
*,
|
*,
|
||||||
etag_differs: Optional[bool] = None,
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||||
|
|
||||||
@@ -308,11 +309,7 @@ class Crawler(ABC):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
should_download = self._output_dir.should_try_download(
|
should_download = self._output_dir.should_try_download(
|
||||||
path,
|
path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict
|
||||||
etag_differs=etag_differs,
|
|
||||||
mtime=mtime,
|
|
||||||
redownload=redownload,
|
|
||||||
on_conflict=on_conflict
|
|
||||||
)
|
)
|
||||||
if should_download:
|
if should_download:
|
||||||
log.explain("Answer: Yes")
|
log.explain("Answer: Yes")
|
||||||
@@ -322,13 +319,13 @@ class Crawler(ABC):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
*,
|
*,
|
||||||
etag_differs: Optional[bool] = None,
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
) -> Optional[DownloadToken]:
|
) -> Optional[DownloadToken]:
|
||||||
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
||||||
path = self._deduplicator.mark(path)
|
path = self._deduplicator.mark(path)
|
||||||
@@ -346,7 +343,7 @@ class Crawler(ABC):
|
|||||||
etag_differs=etag_differs,
|
etag_differs=etag_differs,
|
||||||
mtime=mtime,
|
mtime=mtime,
|
||||||
redownload=redownload,
|
redownload=redownload,
|
||||||
on_conflict=on_conflict
|
on_conflict=on_conflict,
|
||||||
)
|
)
|
||||||
if fs_token is None:
|
if fs_token is None:
|
||||||
log.explain("Answer: No")
|
log.explain("Answer: No")
|
||||||
@@ -397,7 +394,7 @@ class Crawler(ABC):
|
|||||||
log.warn("Couldn't find or load old report")
|
log.warn("Couldn't find or load old report")
|
||||||
return
|
return
|
||||||
|
|
||||||
seen: Set[PurePath] = set()
|
seen: set[PurePath] = set()
|
||||||
for known in sorted(self.prev_report.found_paths):
|
for known in sorted(self.prev_report.found_paths):
|
||||||
looking_at = list(reversed(known.parents)) + [known]
|
looking_at = list(reversed(known.parents)) + [known]
|
||||||
for path in looking_at:
|
for path in looking_at:
|
||||||
|
@@ -3,7 +3,7 @@ import http.cookies
|
|||||||
import ssl
|
import ssl
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Dict, List, Optional, Tuple, cast
|
from typing import Any, Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import certifi
|
import certifi
|
||||||
@@ -29,11 +29,11 @@ class HttpCrawler(Crawler):
|
|||||||
COOKIE_FILE = PurePath(".cookies")
|
COOKIE_FILE = PurePath(".cookies")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: HttpCrawlerSection,
|
section: HttpCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
shared_auth: Optional[Authenticator] = None,
|
shared_auth: Optional[Authenticator] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
|
|
||||||
@@ -43,7 +43,7 @@ class HttpCrawler(Crawler):
|
|||||||
self._http_timeout = section.http_timeout()
|
self._http_timeout = section.http_timeout()
|
||||||
|
|
||||||
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
||||||
self._shared_cookie_jar_paths: Optional[List[Path]] = None
|
self._shared_cookie_jar_paths: Optional[list[Path]] = None
|
||||||
self._shared_auth = shared_auth
|
self._shared_auth = shared_auth
|
||||||
|
|
||||||
self._output_dir.register_reserved(self.COOKIE_FILE)
|
self._output_dir.register_reserved(self.COOKIE_FILE)
|
||||||
@@ -98,7 +98,7 @@ class HttpCrawler(Crawler):
|
|||||||
"""
|
"""
|
||||||
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
||||||
|
|
||||||
def share_cookies(self, shared: Dict[Authenticator, List[Path]]) -> None:
|
def share_cookies(self, shared: dict[Authenticator, list[Path]]) -> None:
|
||||||
if not self._shared_auth:
|
if not self._shared_auth:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -187,7 +187,7 @@ class HttpCrawler(Crawler):
|
|||||||
if level == 0 or (level == 1 and drop_h1):
|
if level == 0 or (level == 1 and drop_h1):
|
||||||
return PurePath()
|
return PurePath()
|
||||||
|
|
||||||
level_heading = cast(Optional[Tag], tag.find_previous(name=f"h{level}"))
|
level_heading = tag.find_previous(name=f"h{level}")
|
||||||
|
|
||||||
if level_heading is None:
|
if level_heading is None:
|
||||||
return find_associated_headings(tag, level - 1)
|
return find_associated_headings(tag, level - 1)
|
||||||
@@ -219,7 +219,7 @@ class HttpCrawler(Crawler):
|
|||||||
etags[str(path)] = etag
|
etags[str(path)] = etag
|
||||||
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
||||||
|
|
||||||
async def _request_resource_version(self, resource_url: str) -> Tuple[Optional[str], Optional[datetime]]:
|
async def _request_resource_version(self, resource_url: str) -> tuple[Optional[str], Optional[datetime]]:
|
||||||
"""
|
"""
|
||||||
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
||||||
If no entity tag / modification date can be obtained, the according value will be None.
|
If no entity tag / modification date can be obtained, the according value will be None.
|
||||||
@@ -252,23 +252,23 @@ class HttpCrawler(Crawler):
|
|||||||
self._load_cookies()
|
self._load_cookies()
|
||||||
|
|
||||||
async with aiohttp.ClientSession(
|
async with aiohttp.ClientSession(
|
||||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||||
cookie_jar=self._cookie_jar,
|
cookie_jar=self._cookie_jar,
|
||||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||||
timeout=ClientTimeout(
|
timeout=ClientTimeout(
|
||||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||||
total=15 * 60,
|
total=15 * 60,
|
||||||
connect=self._http_timeout,
|
connect=self._http_timeout,
|
||||||
sock_connect=self._http_timeout,
|
sock_connect=self._http_timeout,
|
||||||
sock_read=self._http_timeout,
|
sock_read=self._http_timeout,
|
||||||
),
|
),
|
||||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||||
# fail.
|
# fail.
|
||||||
requote_redirect_url=False
|
requote_redirect_url=False,
|
||||||
) as session:
|
) as session:
|
||||||
self.session = session
|
self.session = session
|
||||||
try:
|
try:
|
||||||
|
@@ -1,5 +1,9 @@
|
|||||||
from .kit_ilias_web_crawler import (IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
from .kit_ilias_web_crawler import (
|
||||||
KitIliasWebCrawlerSection)
|
IliasWebCrawler,
|
||||||
|
IliasWebCrawlerSection,
|
||||||
|
KitIliasWebCrawler,
|
||||||
|
KitIliasWebCrawlerSection,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"IliasWebCrawler",
|
"IliasWebCrawler",
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from typing import Any, Callable, Optional
|
from collections.abc import Callable
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
@@ -15,9 +16,9 @@ def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Calla
|
|||||||
try:
|
try:
|
||||||
return await f(*args, **kwargs)
|
return await f(*args, **kwargs)
|
||||||
except aiohttp.ContentTypeError: # invalid content type
|
except aiohttp.ContentTypeError: # invalid content type
|
||||||
raise CrawlWarning("ILIAS returned an invalid content type")
|
raise CrawlWarning("ILIAS returned an invalid content type") from None
|
||||||
except aiohttp.TooManyRedirects:
|
except aiohttp.TooManyRedirects:
|
||||||
raise CrawlWarning("Got stuck in a redirect loop")
|
raise CrawlWarning("Got stuck in a redirect loop") from None
|
||||||
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
||||||
last_exception = e
|
last_exception = e
|
||||||
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
||||||
|
@@ -254,21 +254,22 @@ def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next
|
|||||||
)
|
)
|
||||||
|
|
||||||
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||||
bot_nav.replace_with(soupify(nav_template.replace(
|
bot_nav.replace_with(
|
||||||
"{{left}}", left).replace("{{right}}", right).encode())
|
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||||
)
|
)
|
||||||
|
|
||||||
body_str = cast(str, body.prettify())
|
body_str = body.prettify()
|
||||||
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
||||||
|
|
||||||
|
|
||||||
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||||
if title := cast(Optional[bs4.Tag], heading.find(name="b")):
|
if title := heading.find(name="b"):
|
||||||
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||||
return _forum_thread_template \
|
return (
|
||||||
.replace("{{name}}", name) \
|
_forum_thread_template.replace("{{name}}", name)
|
||||||
.replace("{{heading}}", cast(str, heading.prettify())) \
|
.replace("{{heading}}", heading.prettify())
|
||||||
.replace("{{content}}", cast(str, content.prettify()))
|
.replace("{{content}}", content.prettify())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
@@ -296,9 +297,7 @@ class Links(Enum):
|
|||||||
raise ValueError("Missing switch case")
|
raise ValueError("Missing switch case")
|
||||||
|
|
||||||
def collection_as_one(self) -> bool:
|
def collection_as_one(self) -> bool:
|
||||||
if self == Links.FANCY:
|
return self == Links.FANCY
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def extension(self) -> Optional[str]:
|
def extension(self) -> Optional[str]:
|
||||||
if self == Links.FANCY:
|
if self == Links.FANCY:
|
||||||
@@ -330,8 +329,7 @@ class Links(Enum):
|
|||||||
# All others get coerced to fancy
|
# All others get coerced to fancy
|
||||||
content = cast(str, Links.FANCY.template())
|
content = cast(str, Links.FANCY.template())
|
||||||
repeated_content = cast(
|
repeated_content = cast(
|
||||||
re.Match[str],
|
re.Match[str], re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||||
re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
|
||||||
).group(1)
|
).group(1)
|
||||||
|
|
||||||
parts = []
|
parts = []
|
||||||
@@ -355,4 +353,4 @@ class Links(Enum):
|
|||||||
return Links(string)
|
return Links(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
options = [f"'{option.value}'" for option in Links]
|
options = [f"'{option.value}'" for option in Links]
|
||||||
raise ValueError(f"must be one of {', '.join(options)}")
|
raise ValueError(f"must be one of {', '.join(options)}") from None
|
||||||
|
@@ -4,7 +4,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
from collections.abc import Awaitable, Coroutine
|
from collections.abc import Awaitable, Coroutine
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Any, Dict, List, Literal, Optional, Set, Union, cast
|
from typing import Any, Literal, Optional, cast
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@@ -21,11 +21,19 @@ from ..http_crawler import HttpCrawler, HttpCrawlerSection
|
|||||||
from .async_helper import _iorepeat
|
from .async_helper import _iorepeat
|
||||||
from .file_templates import LinkData, Links, forum_thread_template, learning_module_template
|
from .file_templates import LinkData, Links, forum_thread_template, learning_module_template
|
||||||
from .ilias_html_cleaner import clean, insert_base_markup
|
from .ilias_html_cleaner import clean, insert_base_markup
|
||||||
from .kit_ilias_html import (IliasElementType, IliasForumThread, IliasLearningModulePage, IliasPage,
|
from .kit_ilias_html import (
|
||||||
IliasPageElement, IliasSoup, _sanitize_path_name, parse_ilias_forum_export)
|
IliasElementType,
|
||||||
|
IliasForumThread,
|
||||||
|
IliasLearningModulePage,
|
||||||
|
IliasPage,
|
||||||
|
IliasPageElement,
|
||||||
|
IliasSoup,
|
||||||
|
_sanitize_path_name,
|
||||||
|
parse_ilias_forum_export,
|
||||||
|
)
|
||||||
from .shibboleth_login import ShibbolethLogin
|
from .shibboleth_login import ShibbolethLogin
|
||||||
|
|
||||||
TargetType = Union[str, int]
|
TargetType = str | int
|
||||||
|
|
||||||
|
|
||||||
class LoginTypeLocal:
|
class LoginTypeLocal:
|
||||||
@@ -41,7 +49,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
|
|
||||||
return base_url
|
return base_url
|
||||||
|
|
||||||
def login(self) -> Union[Literal["shibboleth"], LoginTypeLocal]:
|
def login(self) -> Literal["shibboleth"] | LoginTypeLocal:
|
||||||
login_type = self.s.get("login_type")
|
login_type = self.s.get("login_type")
|
||||||
if not login_type:
|
if not login_type:
|
||||||
self.missing_value("login_type")
|
self.missing_value("login_type")
|
||||||
@@ -55,9 +63,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
|
|
||||||
self.invalid_value("login_type", login_type, "Should be <shibboleth | local>")
|
self.invalid_value("login_type", login_type, "Should be <shibboleth | local>")
|
||||||
|
|
||||||
def tfa_auth(
|
def tfa_auth(self, authenticators: dict[str, Authenticator]) -> Optional[Authenticator]:
|
||||||
self, authenticators: Dict[str, Authenticator]
|
|
||||||
) -> Optional[Authenticator]:
|
|
||||||
value: Optional[str] = self.s.get("tfa_auth")
|
value: Optional[str] = self.s.get("tfa_auth")
|
||||||
if value is None:
|
if value is None:
|
||||||
return None
|
return None
|
||||||
@@ -104,7 +110,7 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
return self.s.getboolean("forums", fallback=False)
|
return self.s.getboolean("forums", fallback=False)
|
||||||
|
|
||||||
|
|
||||||
_DIRECTORY_PAGES: Set[IliasElementType] = {
|
_DIRECTORY_PAGES: set[IliasElementType] = {
|
||||||
IliasElementType.EXERCISE,
|
IliasElementType.EXERCISE,
|
||||||
IliasElementType.EXERCISE_FILES,
|
IliasElementType.EXERCISE_FILES,
|
||||||
IliasElementType.EXERCISE_OVERVIEW,
|
IliasElementType.EXERCISE_OVERVIEW,
|
||||||
@@ -116,7 +122,7 @@ _DIRECTORY_PAGES: Set[IliasElementType] = {
|
|||||||
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
||||||
}
|
}
|
||||||
|
|
||||||
_VIDEO_ELEMENTS: Set[IliasElementType] = {
|
_VIDEO_ELEMENTS: set[IliasElementType] = {
|
||||||
IliasElementType.MEDIACAST_VIDEO,
|
IliasElementType.MEDIACAST_VIDEO,
|
||||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||||
IliasElementType.OPENCAST_VIDEO,
|
IliasElementType.OPENCAST_VIDEO,
|
||||||
@@ -166,17 +172,19 @@ class IliasWebCrawler(HttpCrawler):
|
|||||||
name: str,
|
name: str,
|
||||||
section: IliasWebCrawlerSection,
|
section: IliasWebCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
authenticators: Dict[str, Authenticator]
|
authenticators: dict[str, Authenticator],
|
||||||
):
|
):
|
||||||
# Setting a main authenticator for cookie sharing
|
# Setting a main authenticator for cookie sharing
|
||||||
auth = section.auth(authenticators)
|
auth = section.auth(authenticators)
|
||||||
super().__init__(name, section, config, shared_auth=auth)
|
super().__init__(name, section, config, shared_auth=auth)
|
||||||
|
|
||||||
if section.tasks() > 1:
|
if section.tasks() > 1:
|
||||||
log.warn("""
|
log.warn(
|
||||||
|
"""
|
||||||
Please avoid using too many parallel requests as these are the KIT ILIAS
|
Please avoid using too many parallel requests as these are the KIT ILIAS
|
||||||
instance's greatest bottleneck.
|
instance's greatest bottleneck.
|
||||||
""".strip())
|
""".strip()
|
||||||
|
)
|
||||||
|
|
||||||
self._auth = auth
|
self._auth = auth
|
||||||
self._base_url = section.base_url()
|
self._base_url = section.base_url()
|
||||||
@@ -193,7 +201,7 @@ instance's greatest bottleneck.
|
|||||||
self._links = section.links()
|
self._links = section.links()
|
||||||
self._videos = section.videos()
|
self._videos = section.videos()
|
||||||
self._forums = section.forums()
|
self._forums = section.forums()
|
||||||
self._visited_urls: Dict[str, PurePath] = dict()
|
self._visited_urls: dict[str, PurePath] = dict()
|
||||||
|
|
||||||
async def _run(self) -> None:
|
async def _run(self) -> None:
|
||||||
if isinstance(self._target, int):
|
if isinstance(self._target, int):
|
||||||
@@ -210,22 +218,19 @@ instance's greatest bottleneck.
|
|||||||
# Start crawling at the given course
|
# Start crawling at the given course
|
||||||
root_url = url_set_query_param(
|
root_url = url_set_query_param(
|
||||||
urljoin(self._base_url + "/", "goto.php"),
|
urljoin(self._base_url + "/", "goto.php"),
|
||||||
"target", f"crs_{course_id}",
|
"target",
|
||||||
|
f"crs_{course_id}",
|
||||||
)
|
)
|
||||||
|
|
||||||
await self._crawl_url(root_url, expected_id=course_id)
|
await self._crawl_url(root_url, expected_id=course_id)
|
||||||
|
|
||||||
async def _crawl_desktop(self) -> None:
|
async def _crawl_desktop(self) -> None:
|
||||||
await self._crawl_url(
|
await self._crawl_url(
|
||||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"),
|
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"), crawl_nested_courses=True
|
||||||
crawl_nested_courses=True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _crawl_url(
|
async def _crawl_url(
|
||||||
self,
|
self, url: str, expected_id: Optional[int] = None, crawl_nested_courses: bool = False
|
||||||
url: str,
|
|
||||||
expected_id: Optional[int] = None,
|
|
||||||
crawl_nested_courses: bool = False
|
|
||||||
) -> None:
|
) -> None:
|
||||||
if awaitable := await self._handle_ilias_page(
|
if awaitable := await self._handle_ilias_page(
|
||||||
url, None, PurePath("."), expected_id, crawl_nested_courses
|
url, None, PurePath("."), expected_id, crawl_nested_courses
|
||||||
@@ -238,7 +243,7 @@ instance's greatest bottleneck.
|
|||||||
current_element: Optional[IliasPageElement],
|
current_element: Optional[IliasPageElement],
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
expected_course_id: Optional[int] = None,
|
expected_course_id: Optional[int] = None,
|
||||||
crawl_nested_courses: bool = False
|
crawl_nested_courses: bool = False,
|
||||||
) -> Optional[Coroutine[Any, Any, None]]:
|
) -> Optional[Coroutine[Any, Any, None]]:
|
||||||
maybe_cl = await self.crawl(path)
|
maybe_cl = await self.crawl(path)
|
||||||
if not maybe_cl:
|
if not maybe_cl:
|
||||||
@@ -259,9 +264,9 @@ instance's greatest bottleneck.
|
|||||||
expected_course_id: Optional[int] = None,
|
expected_course_id: Optional[int] = None,
|
||||||
crawl_nested_courses: bool = False,
|
crawl_nested_courses: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
elements: List[IliasPageElement] = []
|
elements: list[IliasPageElement] = []
|
||||||
# A list as variable redefinitions are not propagated to outer scopes
|
# A list as variable redefinitions are not propagated to outer scopes
|
||||||
description: List[BeautifulSoup] = []
|
description: list[BeautifulSoup] = []
|
||||||
|
|
||||||
@_iorepeat(3, "crawling folder")
|
@_iorepeat(3, "crawling folder")
|
||||||
async def gather_elements() -> None:
|
async def gather_elements() -> None:
|
||||||
@@ -304,7 +309,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
elements.sort(key=lambda e: e.id())
|
elements.sort(key=lambda e: e.id())
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
for element in elements:
|
for element in elements:
|
||||||
if handle := await self._handle_ilias_element(cl.path, element, crawl_nested_courses):
|
if handle := await self._handle_ilias_element(cl.path, element, crawl_nested_courses):
|
||||||
tasks.append(asyncio.create_task(handle))
|
tasks.append(asyncio.create_task(handle))
|
||||||
@@ -319,10 +324,7 @@ instance's greatest bottleneck.
|
|||||||
# works correctly.
|
# works correctly.
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _handle_ilias_element(
|
async def _handle_ilias_element(
|
||||||
self,
|
self, parent_path: PurePath, element: IliasPageElement, crawl_nested_courses: bool = False
|
||||||
parent_path: PurePath,
|
|
||||||
element: IliasPageElement,
|
|
||||||
crawl_nested_courses: bool = False
|
|
||||||
) -> Optional[Coroutine[Any, Any, None]]:
|
) -> Optional[Coroutine[Any, Any, None]]:
|
||||||
# element.name might contain `/` if the crawler created nested elements,
|
# element.name might contain `/` if the crawler created nested elements,
|
||||||
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
||||||
@@ -338,15 +340,14 @@ instance's greatest bottleneck.
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if element.type in _VIDEO_ELEMENTS:
|
if element.type in _VIDEO_ELEMENTS and not self._videos:
|
||||||
if not self._videos:
|
log.status(
|
||||||
log.status(
|
"[bold bright_black]",
|
||||||
"[bold bright_black]",
|
"Ignored",
|
||||||
"Ignored",
|
fmt_path(element_path),
|
||||||
fmt_path(element_path),
|
"[bright_black](enable with option 'videos')",
|
||||||
"[bright_black](enable with option 'videos')"
|
)
|
||||||
)
|
return None
|
||||||
return None
|
|
||||||
|
|
||||||
if element.type == IliasElementType.FILE:
|
if element.type == IliasElementType.FILE:
|
||||||
return await self._handle_file(element, element_path)
|
return await self._handle_file(element, element_path)
|
||||||
@@ -356,7 +357,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](enable with option 'forums')"
|
"[bright_black](enable with option 'forums')",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
return await self._handle_forum(element, element_path)
|
return await self._handle_forum(element, element_path)
|
||||||
@@ -365,7 +366,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](tests contain no relevant data)"
|
"[bright_black](tests contain no relevant data)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.SURVEY:
|
elif element.type == IliasElementType.SURVEY:
|
||||||
@@ -373,7 +374,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](surveys contain no relevant data)"
|
"[bright_black](surveys contain no relevant data)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
||||||
@@ -381,7 +382,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](scorm learning modules are not supported)"
|
"[bright_black](scorm learning modules are not supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.LITERATURE_LIST:
|
elif element.type == IliasElementType.LITERATURE_LIST:
|
||||||
@@ -389,7 +390,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](literature lists are not currently supported)"
|
"[bright_black](literature lists are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.LEARNING_MODULE_HTML:
|
elif element.type == IliasElementType.LEARNING_MODULE_HTML:
|
||||||
@@ -397,7 +398,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](HTML learning modules are not supported)"
|
"[bright_black](HTML learning modules are not supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.BLOG:
|
elif element.type == IliasElementType.BLOG:
|
||||||
@@ -405,7 +406,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](blogs are not currently supported)"
|
"[bright_black](blogs are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.DCL_RECORD_LIST:
|
elif element.type == IliasElementType.DCL_RECORD_LIST:
|
||||||
@@ -413,7 +414,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](dcl record lists are not currently supported)"
|
"[bright_black](dcl record lists are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.MEDIA_POOL:
|
elif element.type == IliasElementType.MEDIA_POOL:
|
||||||
@@ -421,7 +422,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](media pools are not currently supported)"
|
"[bright_black](media pools are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.COURSE:
|
elif element.type == IliasElementType.COURSE:
|
||||||
@@ -431,7 +432,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](not descending into linked course)"
|
"[bright_black](not descending into linked course)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.WIKI:
|
elif element.type == IliasElementType.WIKI:
|
||||||
@@ -439,7 +440,7 @@ instance's greatest bottleneck.
|
|||||||
"[bold bright_black]",
|
"[bold bright_black]",
|
||||||
"Ignored",
|
"Ignored",
|
||||||
fmt_path(element_path),
|
fmt_path(element_path),
|
||||||
"[bright_black](wikis are not currently supported)"
|
"[bright_black](wikis are not currently supported)",
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
elif element.type == IliasElementType.LEARNING_MODULE:
|
elif element.type == IliasElementType.LEARNING_MODULE:
|
||||||
@@ -513,19 +514,15 @@ instance's greatest bottleneck.
|
|||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(3, "resolving link")
|
@_iorepeat(3, "resolving link")
|
||||||
async def _download_link(
|
async def _download_link(
|
||||||
self,
|
self, link_renderer: Links, collection_name: str, links: list[LinkData], dl: DownloadToken
|
||||||
link_renderer: Links,
|
|
||||||
collection_name: str,
|
|
||||||
links: list[LinkData],
|
|
||||||
dl: DownloadToken
|
|
||||||
) -> None:
|
) -> None:
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
rendered = link_renderer.interpolate(self._link_file_redirect_delay, collection_name, links)
|
rendered = link_renderer.interpolate(self._link_file_redirect_delay, collection_name, links)
|
||||||
sink.file.write(rendered.encode("utf-8"))
|
sink.file.write(rendered.encode("utf-8"))
|
||||||
sink.done()
|
sink.done()
|
||||||
|
|
||||||
async def _resolve_link_target(self, export_url: str) -> Union[BeautifulSoup, Literal['none']]:
|
async def _resolve_link_target(self, export_url: str) -> BeautifulSoup | Literal["none"]:
|
||||||
async def impl() -> Optional[Union[BeautifulSoup, Literal['none']]]:
|
async def impl() -> Optional[BeautifulSoup | Literal["none"]]:
|
||||||
async with self.session.get(export_url, allow_redirects=False) as resp:
|
async with self.session.get(export_url, allow_redirects=False) as resp:
|
||||||
# No redirect means we were authenticated
|
# No redirect means we were authenticated
|
||||||
if hdrs.LOCATION not in resp.headers:
|
if hdrs.LOCATION not in resp.headers:
|
||||||
@@ -551,7 +548,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_link_content(element: IliasPageElement, content: BeautifulSoup) -> list[LinkData]:
|
def _parse_link_content(element: IliasPageElement, content: BeautifulSoup) -> list[LinkData]:
|
||||||
links = cast(list[Tag], list(content.select("a")))
|
links = list(content.select("a"))
|
||||||
if len(links) == 1:
|
if len(links) == 1:
|
||||||
url = str(links[0].get("href")).strip()
|
url = str(links[0].get("href")).strip()
|
||||||
return [LinkData(name=element.name, description=element.description or "", url=url)]
|
return [LinkData(name=element.name, description=element.description or "", url=url)]
|
||||||
@@ -601,7 +598,7 @@ instance's greatest bottleneck.
|
|||||||
async with dl as (_bar, sink):
|
async with dl as (_bar, sink):
|
||||||
description = clean(insert_base_markup(description))
|
description = clean(insert_base_markup(description))
|
||||||
description_tag = await self.internalize_images(description)
|
description_tag = await self.internalize_images(description)
|
||||||
sink.file.write(cast(str, description_tag.prettify()).encode("utf-8"))
|
sink.file.write(description_tag.prettify().encode("utf-8"))
|
||||||
sink.done()
|
sink.done()
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
@@ -626,7 +623,7 @@ instance's greatest bottleneck.
|
|||||||
if self.prev_report:
|
if self.prev_report:
|
||||||
self.report.add_custom_value(
|
self.report.add_custom_value(
|
||||||
_get_video_cache_key(element),
|
_get_video_cache_key(element),
|
||||||
self.prev_report.get_custom_value(_get_video_cache_key(element))
|
self.prev_report.get_custom_value(_get_video_cache_key(element)),
|
||||||
)
|
)
|
||||||
|
|
||||||
# A video might contain other videos, so let's "crawl" the video first
|
# A video might contain other videos, so let's "crawl" the video first
|
||||||
@@ -660,7 +657,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
def _previous_contained_opencast_videos(
|
def _previous_contained_opencast_videos(
|
||||||
self, element: IliasPageElement, element_path: PurePath
|
self, element: IliasPageElement, element_path: PurePath
|
||||||
) -> List[PurePath]:
|
) -> list[PurePath]:
|
||||||
if not self.prev_report:
|
if not self.prev_report:
|
||||||
return []
|
return []
|
||||||
custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element))
|
custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element))
|
||||||
@@ -698,7 +695,7 @@ instance's greatest bottleneck.
|
|||||||
def add_to_report(paths: list[str]) -> None:
|
def add_to_report(paths: list[str]) -> None:
|
||||||
self.report.add_custom_value(
|
self.report.add_custom_value(
|
||||||
_get_video_cache_key(element),
|
_get_video_cache_key(element),
|
||||||
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))}
|
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))},
|
||||||
)
|
)
|
||||||
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
@@ -716,7 +713,7 @@ instance's greatest bottleneck.
|
|||||||
add_to_report([str(self._transformer.transform(dl.path))])
|
add_to_report([str(self._transformer.transform(dl.path))])
|
||||||
return
|
return
|
||||||
|
|
||||||
contained_video_paths: List[str] = []
|
contained_video_paths: list[str] = []
|
||||||
|
|
||||||
for stream_element in stream_elements:
|
for stream_element in stream_elements:
|
||||||
video_path = dl.path.parent / stream_element.name
|
video_path = dl.path.parent / stream_element.name
|
||||||
@@ -752,11 +749,7 @@ instance's greatest bottleneck.
|
|||||||
await self._stream_from_url(element, sink, bar, is_video)
|
await self._stream_from_url(element, sink, bar, is_video)
|
||||||
|
|
||||||
async def _stream_from_url(
|
async def _stream_from_url(
|
||||||
self,
|
self, element: IliasPageElement, sink: FileSink, bar: ProgressBar, is_video: bool
|
||||||
element: IliasPageElement,
|
|
||||||
sink: FileSink,
|
|
||||||
bar: ProgressBar,
|
|
||||||
is_video: bool
|
|
||||||
) -> None:
|
) -> None:
|
||||||
url = element.url
|
url = element.url
|
||||||
|
|
||||||
@@ -831,14 +824,14 @@ instance's greatest bottleneck.
|
|||||||
log.warn("Could not extract forum export url")
|
log.warn("Could not extract forum export url")
|
||||||
return
|
return
|
||||||
|
|
||||||
export = await self._post(export_url, {
|
export = await self._post(
|
||||||
"format": "html",
|
export_url,
|
||||||
"cmd[createExportFile]": ""
|
{"format": "html", "cmd[createExportFile]": ""},
|
||||||
})
|
)
|
||||||
|
|
||||||
elements = parse_ilias_forum_export(soupify(export))
|
elements = parse_ilias_forum_export(soupify(export))
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
for thread in elements:
|
for thread in elements:
|
||||||
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, element.url)))
|
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, element.url)))
|
||||||
|
|
||||||
@@ -848,10 +841,7 @@ instance's greatest bottleneck.
|
|||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(3, "saving forum thread")
|
@_iorepeat(3, "saving forum thread")
|
||||||
async def _download_forum_thread(
|
async def _download_forum_thread(
|
||||||
self,
|
self, parent_path: PurePath, thread: IliasForumThread | IliasPageElement, forum_url: str
|
||||||
parent_path: PurePath,
|
|
||||||
thread: Union[IliasForumThread, IliasPageElement],
|
|
||||||
forum_url: str
|
|
||||||
) -> None:
|
) -> None:
|
||||||
path = parent_path / (_sanitize_path_name(thread.name) + ".html")
|
path = parent_path / (_sanitize_path_name(thread.name) + ".html")
|
||||||
maybe_dl = await self.download(path, mtime=thread.mtime)
|
maybe_dl = await self.download(path, mtime=thread.mtime)
|
||||||
@@ -860,10 +850,7 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
rendered = forum_thread_template(
|
rendered = forum_thread_template(
|
||||||
thread.name,
|
thread.name, forum_url, thread.name_tag, await self.internalize_images(thread.content_tag)
|
||||||
forum_url,
|
|
||||||
thread.name_tag,
|
|
||||||
await self.internalize_images(thread.content_tag)
|
|
||||||
)
|
)
|
||||||
sink.file.write(rendered.encode("utf-8"))
|
sink.file.write(rendered.encode("utf-8"))
|
||||||
sink.done()
|
sink.done()
|
||||||
@@ -883,7 +870,7 @@ instance's greatest bottleneck.
|
|||||||
@_iorepeat(3, "crawling learning module")
|
@_iorepeat(3, "crawling learning module")
|
||||||
@anoncritical
|
@anoncritical
|
||||||
async def _crawl_learning_module(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
async def _crawl_learning_module(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
||||||
elements: List[IliasLearningModulePage] = []
|
elements: list[IliasLearningModulePage] = []
|
||||||
|
|
||||||
async with cl:
|
async with cl:
|
||||||
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
||||||
@@ -891,25 +878,25 @@ instance's greatest bottleneck.
|
|||||||
soup = await self._get_page(element.url)
|
soup = await self._get_page(element.url)
|
||||||
page = IliasPage(soup, element)
|
page = IliasPage(soup, element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(
|
||||||
cl.path, next.previous_url, "left", element
|
await self._crawl_learning_module_direction(cl.path, next.previous_url, "left", element)
|
||||||
))
|
)
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(
|
||||||
cl.path, next.next_url, "right", element
|
await self._crawl_learning_module_direction(cl.path, next.next_url, "right", element)
|
||||||
))
|
)
|
||||||
|
|
||||||
# Reflect their natural ordering in the file names
|
# Reflect their natural ordering in the file names
|
||||||
for index, lm_element in enumerate(elements):
|
for index, lm_element in enumerate(elements):
|
||||||
lm_element.title = f"{index:02}_{lm_element.title}"
|
lm_element.title = f"{index:02}_{lm_element.title}"
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
for index, elem in enumerate(elements):
|
for index, elem in enumerate(elements):
|
||||||
prev_url = elements[index - 1].title if index > 0 else None
|
prev_url = elements[index - 1].title if index > 0 else None
|
||||||
next_url = elements[index + 1].title if index < len(elements) - 1 else None
|
next_url = elements[index + 1].title if index < len(elements) - 1 else None
|
||||||
tasks.append(asyncio.create_task(
|
tasks.append(
|
||||||
self._download_learning_module_page(cl.path, elem, prev_url, next_url)
|
asyncio.create_task(self._download_learning_module_page(cl.path, elem, prev_url, next_url))
|
||||||
))
|
)
|
||||||
|
|
||||||
# And execute them
|
# And execute them
|
||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
@@ -918,10 +905,10 @@ instance's greatest bottleneck.
|
|||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
start_url: Optional[str],
|
start_url: Optional[str],
|
||||||
dir: Union[Literal["left"], Literal["right"]],
|
dir: Literal["left"] | Literal["right"],
|
||||||
parent_element: IliasPageElement
|
parent_element: IliasPageElement,
|
||||||
) -> List[IliasLearningModulePage]:
|
) -> list[IliasLearningModulePage]:
|
||||||
elements: List[IliasLearningModulePage] = []
|
elements: list[IliasLearningModulePage] = []
|
||||||
|
|
||||||
if not start_url:
|
if not start_url:
|
||||||
return elements
|
return elements
|
||||||
@@ -935,10 +922,7 @@ instance's greatest bottleneck.
|
|||||||
page = IliasPage(soup, parent_element)
|
page = IliasPage(soup, parent_element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
if dir == "left":
|
next_element_url = next.previous_url if dir == "left" else next.next_url
|
||||||
next_element_url = next.previous_url
|
|
||||||
else:
|
|
||||||
next_element_url = next.next_url
|
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
return elements
|
return elements
|
||||||
@@ -950,7 +934,7 @@ instance's greatest bottleneck.
|
|||||||
parent_path: PurePath,
|
parent_path: PurePath,
|
||||||
element: IliasLearningModulePage,
|
element: IliasLearningModulePage,
|
||||||
prev: Optional[str],
|
prev: Optional[str],
|
||||||
next: Optional[str]
|
next: Optional[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
path = parent_path / (_sanitize_path_name(element.title) + ".html")
|
path = parent_path / (_sanitize_path_name(element.title) + ".html")
|
||||||
maybe_dl = await self.download(path)
|
maybe_dl = await self.download(path)
|
||||||
@@ -962,16 +946,10 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
if prev:
|
if prev:
|
||||||
prev_p = self._transformer.transform(parent_path / (_sanitize_path_name(prev) + ".html"))
|
prev_p = self._transformer.transform(parent_path / (_sanitize_path_name(prev) + ".html"))
|
||||||
if prev_p:
|
prev = os.path.relpath(prev_p, my_path.parent) if prev_p else None
|
||||||
prev = cast(str, os.path.relpath(prev_p, my_path.parent))
|
|
||||||
else:
|
|
||||||
prev = None
|
|
||||||
if next:
|
if next:
|
||||||
next_p = self._transformer.transform(parent_path / (_sanitize_path_name(next) + ".html"))
|
next_p = self._transformer.transform(parent_path / (_sanitize_path_name(next) + ".html"))
|
||||||
if next_p:
|
next = os.path.relpath(next_p, my_path.parent) if next_p else None
|
||||||
next = cast(str, os.path.relpath(next_p, my_path.parent))
|
|
||||||
else:
|
|
||||||
next = None
|
|
||||||
|
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
content = element.content
|
content = element.content
|
||||||
@@ -985,16 +963,13 @@ instance's greatest bottleneck.
|
|||||||
"""
|
"""
|
||||||
log.explain_topic("Internalizing images")
|
log.explain_topic("Internalizing images")
|
||||||
for elem in tag.find_all(recursive=True):
|
for elem in tag.find_all(recursive=True):
|
||||||
if not isinstance(elem, Tag):
|
if elem.name == "img" and (src := elem.attrs.get("src", None)):
|
||||||
continue
|
url = urljoin(self._base_url, cast(str, src))
|
||||||
if elem.name == "img":
|
if not url.startswith(self._base_url):
|
||||||
if src := elem.attrs.get("src", None):
|
continue
|
||||||
url = urljoin(self._base_url, cast(str, src))
|
log.explain(f"Internalizing {url!r}")
|
||||||
if not url.startswith(self._base_url):
|
img = await self._get_authenticated(url)
|
||||||
continue
|
elem.attrs["src"] = "data:;base64," + base64.b64encode(img).decode()
|
||||||
log.explain(f"Internalizing {url!r}")
|
|
||||||
img = await self._get_authenticated(url)
|
|
||||||
elem.attrs["src"] = "data:;base64," + base64.b64encode(img).decode()
|
|
||||||
if elem.name == "iframe" and cast(str, elem.attrs.get("src", "")).startswith("//"):
|
if elem.name == "iframe" and cast(str, elem.attrs.get("src", "")).startswith("//"):
|
||||||
# For unknown reasons the protocol seems to be stripped.
|
# For unknown reasons the protocol seems to be stripped.
|
||||||
elem.attrs["src"] = "https:" + cast(str, elem.attrs["src"])
|
elem.attrs["src"] = "https:" + cast(str, elem.attrs["src"])
|
||||||
@@ -1039,11 +1014,7 @@ instance's greatest bottleneck.
|
|||||||
)
|
)
|
||||||
return soup
|
return soup
|
||||||
|
|
||||||
async def _post(
|
async def _post(self, url: str, data: dict[str, str | list[str]]) -> bytes:
|
||||||
self,
|
|
||||||
url: str,
|
|
||||||
data: dict[str, Union[str, List[str]]]
|
|
||||||
) -> bytes:
|
|
||||||
form_data = aiohttp.FormData()
|
form_data = aiohttp.FormData()
|
||||||
for key, val in data.items():
|
for key, val in data.items():
|
||||||
form_data.add_field(key, val)
|
form_data.add_field(key, val)
|
||||||
@@ -1081,7 +1052,7 @@ instance's greatest bottleneck.
|
|||||||
async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request:
|
async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request:
|
||||||
login_page = soupify(await request.read())
|
login_page = soupify(await request.read())
|
||||||
|
|
||||||
login_form = cast(Optional[Tag], login_page.find("form", attrs={"name": "login_form"}))
|
login_form = login_page.find("form", attrs={"name": "login_form"})
|
||||||
if login_form is None:
|
if login_form is None:
|
||||||
raise CrawlError("Could not find the login form! Specified client id might be invalid.")
|
raise CrawlError("Could not find the login form! Specified client id might be invalid.")
|
||||||
|
|
||||||
@@ -1092,8 +1063,8 @@ instance's greatest bottleneck.
|
|||||||
username, password = await self._auth.credentials()
|
username, password = await self._auth.credentials()
|
||||||
|
|
||||||
login_form_data = aiohttp.FormData()
|
login_form_data = aiohttp.FormData()
|
||||||
login_form_data.add_field('login_form/input_3/input_4', username)
|
login_form_data.add_field("login_form/input_3/input_4", username)
|
||||||
login_form_data.add_field('login_form/input_3/input_5', password)
|
login_form_data.add_field("login_form/input_3/input_5", password)
|
||||||
|
|
||||||
# do the actual login
|
# do the actual login
|
||||||
async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request:
|
async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request:
|
||||||
|
@@ -1,9 +1,10 @@
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Callable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import date, datetime, timedelta
|
from datetime import date, datetime, timedelta
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Callable, Dict, Optional, Union, cast
|
from typing import Optional, cast
|
||||||
from urllib.parse import urljoin, urlparse
|
from urllib.parse import urljoin, urlparse
|
||||||
|
|
||||||
from bs4 import BeautifulSoup, Tag
|
from bs4 import BeautifulSoup, Tag
|
||||||
@@ -13,7 +14,7 @@ from PFERD.crawl.crawler import CrawlWarning
|
|||||||
from PFERD.logging import log
|
from PFERD.logging import log
|
||||||
from PFERD.utils import url_set_query_params
|
from PFERD.utils import url_set_query_params
|
||||||
|
|
||||||
TargetType = Union[str, int]
|
TargetType = str | int
|
||||||
|
|
||||||
|
|
||||||
class TypeMatcher:
|
class TypeMatcher:
|
||||||
@@ -42,15 +43,15 @@ class TypeMatcher:
|
|||||||
self.alt = alt
|
self.alt = alt
|
||||||
|
|
||||||
class All:
|
class All:
|
||||||
matchers: list['IliasElementMatcher']
|
matchers: list["IliasElementMatcher"]
|
||||||
|
|
||||||
def __init__(self, matchers: list['IliasElementMatcher']):
|
def __init__(self, matchers: list["IliasElementMatcher"]):
|
||||||
self.matchers = matchers
|
self.matchers = matchers
|
||||||
|
|
||||||
class Any:
|
class Any:
|
||||||
matchers: list['IliasElementMatcher']
|
matchers: list["IliasElementMatcher"]
|
||||||
|
|
||||||
def __init__(self, matchers: list['IliasElementMatcher']):
|
def __init__(self, matchers: list["IliasElementMatcher"]):
|
||||||
self.matchers = matchers
|
self.matchers = matchers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -70,11 +71,11 @@ class TypeMatcher:
|
|||||||
return TypeMatcher.ImgAlt(alt)
|
return TypeMatcher.ImgAlt(alt)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def all(*matchers: 'IliasElementMatcher') -> All:
|
def all(*matchers: "IliasElementMatcher") -> All:
|
||||||
return TypeMatcher.All(list(matchers))
|
return TypeMatcher.All(list(matchers))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def any(*matchers: 'IliasElementMatcher') -> Any:
|
def any(*matchers: "IliasElementMatcher") -> Any:
|
||||||
return TypeMatcher.Any(list(matchers))
|
return TypeMatcher.Any(list(matchers))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -127,20 +128,14 @@ class IliasElementType(Enum):
|
|||||||
def matcher(self) -> IliasElementMatcher:
|
def matcher(self) -> IliasElementMatcher:
|
||||||
match self:
|
match self:
|
||||||
case IliasElementType.BLOG:
|
case IliasElementType.BLOG:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(TypeMatcher.img_src("_blog.svg"))
|
||||||
TypeMatcher.img_src("_blog.svg")
|
|
||||||
)
|
|
||||||
case IliasElementType.BOOKING:
|
case IliasElementType.BOOKING:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(TypeMatcher.path("/book/"), TypeMatcher.img_src("_book.svg"))
|
||||||
TypeMatcher.path("/book/"),
|
|
||||||
TypeMatcher.img_src("_book.svg")
|
|
||||||
)
|
|
||||||
case IliasElementType.COURSE:
|
case IliasElementType.COURSE:
|
||||||
return TypeMatcher.any(TypeMatcher.path("/crs/"), TypeMatcher.img_src("_crsr.svg"))
|
return TypeMatcher.any(TypeMatcher.path("/crs/"), TypeMatcher.img_src("_crsr.svg"))
|
||||||
case IliasElementType.DCL_RECORD_LIST:
|
case IliasElementType.DCL_RECORD_LIST:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.img_src("_dcl.svg"),
|
TypeMatcher.img_src("_dcl.svg"), TypeMatcher.query("cmdclass=ildclrecordlistgui")
|
||||||
TypeMatcher.query("cmdclass=ildclrecordlistgui")
|
|
||||||
)
|
)
|
||||||
case IliasElementType.EXERCISE:
|
case IliasElementType.EXERCISE:
|
||||||
return TypeMatcher.never()
|
return TypeMatcher.never()
|
||||||
@@ -162,14 +157,11 @@ class IliasElementType(Enum):
|
|||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.path("/fold/"),
|
TypeMatcher.path("/fold/"),
|
||||||
TypeMatcher.img_src("_fold.svg"),
|
TypeMatcher.img_src("_fold.svg"),
|
||||||
|
|
||||||
TypeMatcher.path("/grp/"),
|
TypeMatcher.path("/grp/"),
|
||||||
TypeMatcher.img_src("_grp.svg"),
|
TypeMatcher.img_src("_grp.svg"),
|
||||||
|
|
||||||
TypeMatcher.path("/copa/"),
|
TypeMatcher.path("/copa/"),
|
||||||
TypeMatcher.path("_copa_"),
|
TypeMatcher.path("_copa_"),
|
||||||
TypeMatcher.img_src("_copa.svg"),
|
TypeMatcher.img_src("_copa.svg"),
|
||||||
|
|
||||||
# Not supported right now but warn users
|
# Not supported right now but warn users
|
||||||
# TypeMatcher.query("baseclass=ilmediapoolpresentationgui"),
|
# TypeMatcher.query("baseclass=ilmediapoolpresentationgui"),
|
||||||
# TypeMatcher.img_alt("medienpool"),
|
# TypeMatcher.img_alt("medienpool"),
|
||||||
@@ -188,14 +180,10 @@ class IliasElementType(Enum):
|
|||||||
case IliasElementType.LITERATURE_LIST:
|
case IliasElementType.LITERATURE_LIST:
|
||||||
return TypeMatcher.img_src("_bibl.svg")
|
return TypeMatcher.img_src("_bibl.svg")
|
||||||
case IliasElementType.LEARNING_MODULE:
|
case IliasElementType.LEARNING_MODULE:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(TypeMatcher.path("/lm/"), TypeMatcher.img_src("_lm.svg"))
|
||||||
TypeMatcher.path("/lm/"),
|
|
||||||
TypeMatcher.img_src("_lm.svg")
|
|
||||||
)
|
|
||||||
case IliasElementType.LEARNING_MODULE_HTML:
|
case IliasElementType.LEARNING_MODULE_HTML:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("baseclass=ilhtlmpresentationgui"),
|
TypeMatcher.query("baseclass=ilhtlmpresentationgui"), TypeMatcher.img_src("_htlm.svg")
|
||||||
TypeMatcher.img_src("_htlm.svg")
|
|
||||||
)
|
)
|
||||||
case IliasElementType.LINK:
|
case IliasElementType.LINK:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
@@ -203,17 +191,16 @@ class IliasElementType(Enum):
|
|||||||
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
||||||
TypeMatcher.query("calldirectlink"),
|
TypeMatcher.query("calldirectlink"),
|
||||||
),
|
),
|
||||||
TypeMatcher.img_src("_webr.svg") # duplicated :(
|
TypeMatcher.img_src("_webr.svg"), # duplicated :(
|
||||||
)
|
)
|
||||||
case IliasElementType.LINK_COLLECTION:
|
case IliasElementType.LINK_COLLECTION:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
TypeMatcher.query("baseclass=illinkresourcehandlergui"),
|
||||||
TypeMatcher.img_src("_webr.svg") # duplicated :(
|
TypeMatcher.img_src("_webr.svg"), # duplicated :(
|
||||||
)
|
)
|
||||||
case IliasElementType.MEDIA_POOL:
|
case IliasElementType.MEDIA_POOL:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("baseclass=ilmediapoolpresentationgui"),
|
TypeMatcher.query("baseclass=ilmediapoolpresentationgui"), TypeMatcher.img_src("_mep.svg")
|
||||||
TypeMatcher.img_src("_mep.svg")
|
|
||||||
)
|
)
|
||||||
case IliasElementType.MEDIACAST_VIDEO:
|
case IliasElementType.MEDIACAST_VIDEO:
|
||||||
return TypeMatcher.never()
|
return TypeMatcher.never()
|
||||||
@@ -221,12 +208,10 @@ class IliasElementType(Enum):
|
|||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.path("/mcst/"),
|
TypeMatcher.path("/mcst/"),
|
||||||
TypeMatcher.query("baseclass=ilmediacasthandlergui"),
|
TypeMatcher.query("baseclass=ilmediacasthandlergui"),
|
||||||
TypeMatcher.img_src("_mcst.svg")
|
TypeMatcher.img_src("_mcst.svg"),
|
||||||
)
|
)
|
||||||
case IliasElementType.MEETING:
|
case IliasElementType.MEETING:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(TypeMatcher.img_src("_sess.svg"))
|
||||||
TypeMatcher.img_src("_sess.svg")
|
|
||||||
)
|
|
||||||
case IliasElementType.MOB_VIDEO:
|
case IliasElementType.MOB_VIDEO:
|
||||||
return TypeMatcher.never()
|
return TypeMatcher.never()
|
||||||
case IliasElementType.OPENCAST_VIDEO:
|
case IliasElementType.OPENCAST_VIDEO:
|
||||||
@@ -239,24 +224,19 @@ class IliasElementType(Enum):
|
|||||||
return TypeMatcher.never()
|
return TypeMatcher.never()
|
||||||
case IliasElementType.SCORM_LEARNING_MODULE:
|
case IliasElementType.SCORM_LEARNING_MODULE:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("baseclass=ilsahspresentationgui"),
|
TypeMatcher.query("baseclass=ilsahspresentationgui"), TypeMatcher.img_src("_sahs.svg")
|
||||||
TypeMatcher.img_src("_sahs.svg")
|
|
||||||
)
|
)
|
||||||
case IliasElementType.SURVEY:
|
case IliasElementType.SURVEY:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(TypeMatcher.path("/svy/"), TypeMatcher.img_src("svy.svg"))
|
||||||
TypeMatcher.path("/svy/"),
|
|
||||||
TypeMatcher.img_src("svy.svg")
|
|
||||||
)
|
|
||||||
case IliasElementType.TEST:
|
case IliasElementType.TEST:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("cmdclass=ilobjtestgui"),
|
TypeMatcher.query("cmdclass=ilobjtestgui"),
|
||||||
TypeMatcher.query("cmdclass=iltestscreengui"),
|
TypeMatcher.query("cmdclass=iltestscreengui"),
|
||||||
TypeMatcher.img_src("_tst.svg")
|
TypeMatcher.img_src("_tst.svg"),
|
||||||
)
|
)
|
||||||
case IliasElementType.WIKI:
|
case IliasElementType.WIKI:
|
||||||
return TypeMatcher.any(
|
return TypeMatcher.any(
|
||||||
TypeMatcher.query("baseClass=ilwikihandlergui"),
|
TypeMatcher.query("baseClass=ilwikihandlergui"), TypeMatcher.img_src("wiki.svg")
|
||||||
TypeMatcher.img_src("wiki.svg")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
raise CrawlWarning(f"Unknown matcher {self}")
|
raise CrawlWarning(f"Unknown matcher {self}")
|
||||||
@@ -291,7 +271,7 @@ class IliasPageElement:
|
|||||||
r"thr_pk=(?P<id>\d+)", # forums
|
r"thr_pk=(?P<id>\d+)", # forums
|
||||||
r"ref_id=(?P<id>\d+)",
|
r"ref_id=(?P<id>\d+)",
|
||||||
r"target=[a-z]+_(?P<id>\d+)",
|
r"target=[a-z]+_(?P<id>\d+)",
|
||||||
r"mm_(?P<id>\d+)"
|
r"mm_(?P<id>\d+)",
|
||||||
]
|
]
|
||||||
|
|
||||||
for regex in regexes:
|
for regex in regexes:
|
||||||
@@ -309,8 +289,8 @@ class IliasPageElement:
|
|||||||
name: str,
|
name: str,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
description: Optional[str] = None,
|
description: Optional[str] = None,
|
||||||
skip_sanitize: bool = False
|
skip_sanitize: bool = False,
|
||||||
) -> 'IliasPageElement':
|
) -> "IliasPageElement":
|
||||||
if typ == IliasElementType.MEETING:
|
if typ == IliasElementType.MEETING:
|
||||||
normalized = IliasPageElement._normalize_meeting_name(name)
|
normalized = IliasPageElement._normalize_meeting_name(name)
|
||||||
log.explain(f"Normalized meeting name from {name!r} to {normalized!r}")
|
log.explain(f"Normalized meeting name from {name!r} to {normalized!r}")
|
||||||
@@ -329,7 +309,7 @@ class IliasPageElement:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# This checks whether we can reach a `:` without passing a `-`
|
# This checks whether we can reach a `:` without passing a `-`
|
||||||
if re.search(r"^[^-]+: ", meeting_name):
|
if re.search(r"^[^-]+: ", meeting_name): # noqa: SIM108
|
||||||
# Meeting name only contains date: "05. Jan 2000:"
|
# Meeting name only contains date: "05. Jan 2000:"
|
||||||
split_delimiter = ":"
|
split_delimiter = ":"
|
||||||
else:
|
else:
|
||||||
@@ -352,7 +332,7 @@ class IliasPageElement:
|
|||||||
@dataclass
|
@dataclass
|
||||||
class IliasDownloadForumData:
|
class IliasDownloadForumData:
|
||||||
url: str
|
url: str
|
||||||
form_data: Dict[str, Union[str, list[str]]]
|
form_data: dict[str, str | list[str]]
|
||||||
empty: bool
|
empty: bool
|
||||||
|
|
||||||
|
|
||||||
@@ -382,7 +362,6 @@ class IliasSoup:
|
|||||||
|
|
||||||
|
|
||||||
class IliasPage:
|
class IliasPage:
|
||||||
|
|
||||||
def __init__(self, ilias_soup: IliasSoup, source_element: Optional[IliasPageElement]):
|
def __init__(self, ilias_soup: IliasSoup, source_element: Optional[IliasPageElement]):
|
||||||
self._ilias_soup = ilias_soup
|
self._ilias_soup = ilias_soup
|
||||||
self._soup = ilias_soup.soup
|
self._soup = ilias_soup.soup
|
||||||
@@ -422,23 +401,23 @@ class IliasPage:
|
|||||||
return self._find_normal_entries()
|
return self._find_normal_entries()
|
||||||
|
|
||||||
def get_info_tab(self) -> Optional[IliasPageElement]:
|
def get_info_tab(self) -> Optional[IliasPageElement]:
|
||||||
tab: Optional[Tag] = cast(Optional[Tag], self._soup.find(
|
tab: Optional[Tag] = self._soup.find(
|
||||||
name="a",
|
name="a", attrs={"href": lambda x: x is not None and "cmdClass=ilinfoscreengui" in x}
|
||||||
attrs={"href": lambda x: x is not None and "cmdClass=ilinfoscreengui" in x}
|
)
|
||||||
))
|
|
||||||
if tab is not None:
|
if tab is not None:
|
||||||
return IliasPageElement.create_new(
|
return IliasPageElement.create_new(
|
||||||
IliasElementType.INFO_TAB,
|
IliasElementType.INFO_TAB, self._abs_url_from_link(tab), "infos"
|
||||||
self._abs_url_from_link(tab),
|
|
||||||
"infos"
|
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_description(self) -> Optional[BeautifulSoup]:
|
def get_description(self) -> Optional[BeautifulSoup]:
|
||||||
def is_interesting_class(name: str) -> bool:
|
def is_interesting_class(name: str | None) -> bool:
|
||||||
return name in [
|
return name in [
|
||||||
"ilCOPageSection", "ilc_Paragraph", "ilc_va_ihcap_VAccordIHeadCap",
|
"ilCOPageSection",
|
||||||
"ilc_va_ihcap_AccordIHeadCap", "ilc_media_cont_MediaContainer"
|
"ilc_Paragraph",
|
||||||
|
"ilc_va_ihcap_VAccordIHeadCap",
|
||||||
|
"ilc_va_ihcap_AccordIHeadCap",
|
||||||
|
"ilc_media_cont_MediaContainer",
|
||||||
]
|
]
|
||||||
|
|
||||||
paragraphs: list[Tag] = cast(list[Tag], self._soup.find_all(class_=is_interesting_class))
|
paragraphs: list[Tag] = cast(list[Tag], self._soup.find_all(class_=is_interesting_class))
|
||||||
@@ -452,21 +431,20 @@ class IliasPage:
|
|||||||
for p in paragraphs:
|
for p in paragraphs:
|
||||||
if p.find_parent(class_=is_interesting_class):
|
if p.find_parent(class_=is_interesting_class):
|
||||||
continue
|
continue
|
||||||
if "ilc_media_cont_MediaContainer" in p["class"]:
|
if "ilc_media_cont_MediaContainer" in p["class"] and (video := p.select_one("video")):
|
||||||
# We have an embedded video which should be downloaded by _find_mob_videos
|
# We have an embedded video which should be downloaded by _find_mob_videos
|
||||||
if video := p.select_one("video"):
|
url, title = self._find_mob_video_url_title(video, p)
|
||||||
url, title = self._find_mob_video_url_title(video, p)
|
raw_html += '<div style="min-width: 100px; min-height: 100px; border: 1px solid black;'
|
||||||
raw_html += '<div style="min-width: 100px; min-height: 100px; border: 1px solid black;'
|
raw_html += "display: flex; justify-content: center; align-items: center;"
|
||||||
raw_html += 'display: flex; justify-content: center; align-items: center;'
|
raw_html += ' margin: 0.5rem;">'
|
||||||
raw_html += ' margin: 0.5rem;">'
|
if url is not None and urlparse(url).hostname != urlparse(self._page_url).hostname:
|
||||||
if url is not None and urlparse(url).hostname != urlparse(self._page_url).hostname:
|
if url.startswith("//"):
|
||||||
if url.startswith("//"):
|
url = "https:" + url
|
||||||
url = "https:" + url
|
raw_html += f'<a href="{url}" target="_blank">External Video: {title}</a>'
|
||||||
raw_html += f'<a href="{url}" target="_blank">External Video: {title}</a>'
|
else:
|
||||||
else:
|
raw_html += f"Video elided. Filename: '{title}'."
|
||||||
raw_html += f"Video elided. Filename: '{title}'."
|
raw_html += "</div>\n"
|
||||||
raw_html += "</div>\n"
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
# Ignore special listings (like folder groupings)
|
# Ignore special listings (like folder groupings)
|
||||||
if "ilc_section_Special" in p["class"]:
|
if "ilc_section_Special" in p["class"]:
|
||||||
@@ -486,7 +464,7 @@ class IliasPage:
|
|||||||
title=title,
|
title=title,
|
||||||
content=content,
|
content=content,
|
||||||
next_url=self._find_learning_module_next(),
|
next_url=self._find_learning_module_next(),
|
||||||
previous_url=self._find_learning_module_prev()
|
previous_url=self._find_learning_module_prev(),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _find_learning_module_next(self) -> Optional[str]:
|
def _find_learning_module_next(self) -> Optional[str]:
|
||||||
@@ -515,10 +493,7 @@ class IliasPage:
|
|||||||
base_url = re.sub(r"cmd=\w+", "cmd=post", base_url)
|
base_url = re.sub(r"cmd=\w+", "cmd=post", base_url)
|
||||||
base_url = re.sub(r"cmdClass=\w+", "cmdClass=ilExportGUI", base_url)
|
base_url = re.sub(r"cmdClass=\w+", "cmdClass=ilExportGUI", base_url)
|
||||||
|
|
||||||
rtoken_form = cast(
|
rtoken_form = self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x})
|
||||||
Optional[Tag],
|
|
||||||
self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x})
|
|
||||||
)
|
|
||||||
if not rtoken_form:
|
if not rtoken_form:
|
||||||
log.explain("Found no rtoken anywhere")
|
log.explain("Found no rtoken anywhere")
|
||||||
return None
|
return None
|
||||||
@@ -557,9 +532,7 @@ class IliasPage:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# Raw listing without ILIAS fluff
|
# Raw listing without ILIAS fluff
|
||||||
video_element_table = self._soup.find(
|
video_element_table = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+"))
|
||||||
name="table", id=re.compile(r"tbl_xoct_.+")
|
|
||||||
)
|
|
||||||
return video_element_table is not None
|
return video_element_table is not None
|
||||||
|
|
||||||
def _is_ilias_opencast_embedding(self) -> bool:
|
def _is_ilias_opencast_embedding(self) -> bool:
|
||||||
@@ -600,24 +573,23 @@ class IliasPage:
|
|||||||
return self._uncollapse_future_meetings_url() is not None
|
return self._uncollapse_future_meetings_url() is not None
|
||||||
|
|
||||||
def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]:
|
def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]:
|
||||||
element = cast(Optional[Tag], self._soup.find(
|
element = self._soup.find(
|
||||||
"a",
|
"a",
|
||||||
attrs={"href": lambda x: x is not None and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)}
|
attrs={"href": lambda x: x is not None and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)},
|
||||||
))
|
)
|
||||||
if not element:
|
if not element:
|
||||||
return None
|
return None
|
||||||
link = self._abs_url_from_link(element)
|
link = self._abs_url_from_link(element)
|
||||||
return IliasPageElement.create_new(IliasElementType.FOLDER, link, "show all meetings")
|
return IliasPageElement.create_new(IliasElementType.FOLDER, link, "show all meetings")
|
||||||
|
|
||||||
def _is_exercise_not_all_shown(self) -> bool:
|
def _is_exercise_not_all_shown(self) -> bool:
|
||||||
return (self._page_type == IliasElementType.EXERCISE_OVERVIEW
|
return (
|
||||||
and "mode=all" not in self._page_url.lower())
|
self._page_type == IliasElementType.EXERCISE_OVERVIEW and "mode=all" not in self._page_url.lower()
|
||||||
|
)
|
||||||
|
|
||||||
def _show_all_exercises(self) -> Optional[IliasPageElement]:
|
def _show_all_exercises(self) -> Optional[IliasPageElement]:
|
||||||
return IliasPageElement.create_new(
|
return IliasPageElement.create_new(
|
||||||
IliasElementType.EXERCISE_OVERVIEW,
|
IliasElementType.EXERCISE_OVERVIEW, self._page_url + "&mode=all", "show all exercises"
|
||||||
self._page_url + "&mode=all",
|
|
||||||
"show all exercises"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _is_content_tab_selected(self) -> bool:
|
def _is_content_tab_selected(self) -> bool:
|
||||||
@@ -631,14 +603,13 @@ class IliasPage:
|
|||||||
return "baseClass=ilmembershipoverviewgui" in self._page_url
|
return "baseClass=ilmembershipoverviewgui" in self._page_url
|
||||||
|
|
||||||
def _select_content_page_url(self) -> Optional[IliasPageElement]:
|
def _select_content_page_url(self) -> Optional[IliasPageElement]:
|
||||||
tab = cast(Optional[Tag], self._soup.find(
|
tab = self._soup.find(
|
||||||
id="tab_view_content",
|
id="tab_view_content", attrs={"class": lambda x: x is not None and "active" not in x}
|
||||||
attrs={"class": lambda x: x is not None and "active" not in x}
|
)
|
||||||
))
|
|
||||||
# Already selected (or not found)
|
# Already selected (or not found)
|
||||||
if not tab:
|
if not tab:
|
||||||
return None
|
return None
|
||||||
link = cast(Optional[Tag], tab.find("a"))
|
link = tab.find("a")
|
||||||
if link:
|
if link:
|
||||||
link_str = self._abs_url_from_link(link)
|
link_str = self._abs_url_from_link(link)
|
||||||
return IliasPageElement.create_new(IliasElementType.FOLDER, link_str, "select content page")
|
return IliasPageElement.create_new(IliasElementType.FOLDER, link_str, "select content page")
|
||||||
@@ -654,9 +625,7 @@ class IliasPage:
|
|||||||
# on the page, but defined in a JS object inside a script tag, passed to the player
|
# on the page, but defined in a JS object inside a script tag, passed to the player
|
||||||
# library.
|
# library.
|
||||||
# We do the impossible and RegEx the stream JSON object out of the page's HTML source
|
# We do the impossible and RegEx the stream JSON object out of the page's HTML source
|
||||||
regex = re.compile(
|
regex = re.compile(r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE)
|
||||||
r"({\"streams\"[\s\S]+?),\s*{\"paella_config_file", re.IGNORECASE
|
|
||||||
)
|
|
||||||
json_match = regex.search(str(self._soup))
|
json_match = regex.search(str(self._soup))
|
||||||
|
|
||||||
if json_match is None:
|
if json_match is None:
|
||||||
@@ -687,10 +656,9 @@ class IliasPage:
|
|||||||
def _get_show_max_forum_entries_per_page_url(
|
def _get_show_max_forum_entries_per_page_url(
|
||||||
self, wanted_max: Optional[int] = None
|
self, wanted_max: Optional[int] = None
|
||||||
) -> Optional[IliasPageElement]:
|
) -> Optional[IliasPageElement]:
|
||||||
correct_link = cast(Optional[Tag], self._soup.find(
|
correct_link = self._soup.find(
|
||||||
"a",
|
"a", attrs={"href": lambda x: x is not None and "trows=800" in x and "cmd=showThreads" in x}
|
||||||
attrs={"href": lambda x: x is not None and "trows=800" in x and "cmd=showThreads" in x}
|
)
|
||||||
))
|
|
||||||
|
|
||||||
if not correct_link:
|
if not correct_link:
|
||||||
return None
|
return None
|
||||||
@@ -721,7 +689,7 @@ class IliasPage:
|
|||||||
|
|
||||||
titles: list[Tag] = self._soup.select("#block_pditems_0 .il-item-title")
|
titles: list[Tag] = self._soup.select("#block_pditems_0 .il-item-title")
|
||||||
for title in titles:
|
for title in titles:
|
||||||
link = cast(Optional[Tag], title.find("a"))
|
link = title.find("a")
|
||||||
|
|
||||||
if not link:
|
if not link:
|
||||||
log.explain(f"Skipping offline item: {title.get_text().strip()!r}")
|
log.explain(f"Skipping offline item: {title.get_text().strip()!r}")
|
||||||
@@ -775,11 +743,11 @@ class IliasPage:
|
|||||||
continue
|
continue
|
||||||
if "cmd=sendfile" not in link["href"]:
|
if "cmd=sendfile" not in link["href"]:
|
||||||
continue
|
continue
|
||||||
items.append(IliasPageElement.create_new(
|
items.append(
|
||||||
IliasElementType.FILE,
|
IliasPageElement.create_new(
|
||||||
self._abs_url_from_link(link),
|
IliasElementType.FILE, self._abs_url_from_link(link), _sanitize_path_name(link.get_text())
|
||||||
_sanitize_path_name(link.get_text())
|
)
|
||||||
))
|
)
|
||||||
|
|
||||||
return items
|
return items
|
||||||
|
|
||||||
@@ -791,9 +759,7 @@ class IliasPage:
|
|||||||
#
|
#
|
||||||
# We need to figure out where we are.
|
# We need to figure out where we are.
|
||||||
|
|
||||||
video_element_table = cast(Optional[Tag], self._soup.find(
|
video_element_table = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+"))
|
||||||
name="table", id=re.compile(r"tbl_xoct_.+")
|
|
||||||
))
|
|
||||||
|
|
||||||
if video_element_table is None:
|
if video_element_table is None:
|
||||||
# We are in stage 1
|
# We are in stage 1
|
||||||
@@ -809,14 +775,14 @@ class IliasPage:
|
|||||||
|
|
||||||
is_paginated = self._soup.find(id=re.compile(r"tab_page_sel.+")) is not None
|
is_paginated = self._soup.find(id=re.compile(r"tab_page_sel.+")) is not None
|
||||||
|
|
||||||
if is_paginated and not self._page_type == IliasElementType.OPENCAST_VIDEO_FOLDER:
|
if is_paginated and self._page_type != IliasElementType.OPENCAST_VIDEO_FOLDER:
|
||||||
# We are in stage 2 - try to break pagination
|
# We are in stage 2 - try to break pagination
|
||||||
return self._find_opencast_video_entries_paginated()
|
return self._find_opencast_video_entries_paginated()
|
||||||
|
|
||||||
return self._find_opencast_video_entries_no_paging()
|
return self._find_opencast_video_entries_no_paging()
|
||||||
|
|
||||||
def _find_opencast_video_entries_paginated(self) -> list[IliasPageElement]:
|
def _find_opencast_video_entries_paginated(self) -> list[IliasPageElement]:
|
||||||
table_element = cast(Optional[Tag], self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+")))
|
table_element = self._soup.find(name="table", id=re.compile(r"tbl_xoct_.+"))
|
||||||
|
|
||||||
if table_element is None:
|
if table_element is None:
|
||||||
log.warn("Couldn't increase elements per page (table not found). I might miss elements.")
|
log.warn("Couldn't increase elements per page (table not found). I might miss elements.")
|
||||||
@@ -829,8 +795,7 @@ class IliasPage:
|
|||||||
|
|
||||||
table_id = id_match.group(1)
|
table_id = id_match.group(1)
|
||||||
|
|
||||||
query_params = {f"tbl_xoct_{table_id}_trows": "800",
|
query_params = {f"tbl_xoct_{table_id}_trows": "800", "cmd": "asyncGetTableGUI", "cmdMode": "asynch"}
|
||||||
"cmd": "asyncGetTableGUI", "cmdMode": "asynch"}
|
|
||||||
url = url_set_query_params(self._page_url, query_params)
|
url = url_set_query_params(self._page_url, query_params)
|
||||||
|
|
||||||
log.explain("Disabled pagination, retrying folder as a new entry")
|
log.explain("Disabled pagination, retrying folder as a new entry")
|
||||||
@@ -841,9 +806,9 @@ class IliasPage:
|
|||||||
Crawls the "second stage" video page. This page contains the actual video urls.
|
Crawls the "second stage" video page. This page contains the actual video urls.
|
||||||
"""
|
"""
|
||||||
# Video start links are marked with an "Abspielen" link
|
# Video start links are marked with an "Abspielen" link
|
||||||
video_links = cast(list[Tag], self._soup.find_all(
|
video_links = cast(
|
||||||
name="a", text=re.compile(r"\s*(Abspielen|Play)\s*")
|
list[Tag], self._soup.find_all(name="a", text=re.compile(r"\s*(Abspielen|Play)\s*"))
|
||||||
))
|
)
|
||||||
|
|
||||||
results: list[IliasPageElement] = []
|
results: list[IliasPageElement] = []
|
||||||
|
|
||||||
@@ -860,9 +825,7 @@ class IliasPage:
|
|||||||
row: Tag = link.parent.parent.parent # type: ignore
|
row: Tag = link.parent.parent.parent # type: ignore
|
||||||
column_count = len(row.select("td.std"))
|
column_count = len(row.select("td.std"))
|
||||||
for index in range(column_count, 0, -1):
|
for index in range(column_count, 0, -1):
|
||||||
modification_string = link.parent.parent.parent.select_one( # type: ignore
|
modification_string = cast(Tag, row.select_one(f"td.std:nth-child({index})")).get_text().strip()
|
||||||
f"td.std:nth-child({index})"
|
|
||||||
).get_text().strip()
|
|
||||||
if match := re.search(r"\d+\.\d+.\d+ \d+:\d+", modification_string):
|
if match := re.search(r"\d+\.\d+.\d+ \d+:\d+", modification_string):
|
||||||
modification_time = datetime.strptime(match.group(0), "%d.%m.%Y %H:%M")
|
modification_time = datetime.strptime(match.group(0), "%d.%m.%Y %H:%M")
|
||||||
break
|
break
|
||||||
@@ -871,7 +834,7 @@ class IliasPage:
|
|||||||
log.warn(f"Could not determine upload time for {link}")
|
log.warn(f"Could not determine upload time for {link}")
|
||||||
modification_time = datetime.now()
|
modification_time = datetime.now()
|
||||||
|
|
||||||
title = link.parent.parent.parent.select_one("td.std:nth-child(3)").get_text().strip() # type: ignore
|
title = cast(Tag, row.select_one("td.std:nth-child(3)")).get_text().strip()
|
||||||
title += ".mp4"
|
title += ".mp4"
|
||||||
|
|
||||||
video_name: str = _sanitize_path_name(title)
|
video_name: str = _sanitize_path_name(title)
|
||||||
@@ -899,27 +862,31 @@ class IliasPage:
|
|||||||
def _find_exercise_entries_detail_page(self) -> list[IliasPageElement]:
|
def _find_exercise_entries_detail_page(self) -> list[IliasPageElement]:
|
||||||
results: list[IliasPageElement] = []
|
results: list[IliasPageElement] = []
|
||||||
|
|
||||||
if link := cast(Optional[Tag], self._soup.select_one("#tab_submission > a")):
|
if link := self._soup.select_one("#tab_submission > a"):
|
||||||
results.append(IliasPageElement.create_new(
|
results.append(
|
||||||
IliasElementType.EXERCISE_FILES,
|
IliasPageElement.create_new(
|
||||||
self._abs_url_from_link(link),
|
IliasElementType.EXERCISE_FILES, self._abs_url_from_link(link), "Submission"
|
||||||
"Submission"
|
)
|
||||||
))
|
)
|
||||||
else:
|
else:
|
||||||
log.explain("Found no submission link for exercise, maybe it has not started yet?")
|
log.explain("Found no submission link for exercise, maybe it has not started yet?")
|
||||||
|
|
||||||
# Find all download links in the container (this will contain all the *feedback* files)
|
# Find all download links in the container (this will contain all the *feedback* files)
|
||||||
download_links = cast(list[Tag], self._soup.find_all(
|
download_links = cast(
|
||||||
name="a",
|
list[Tag],
|
||||||
# download links contain the given command class
|
self._soup.find_all(
|
||||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
name="a",
|
||||||
text="Download"
|
# download links contain the given command class
|
||||||
))
|
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||||
|
text="Download",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
for link in download_links:
|
for link in download_links:
|
||||||
parent_row: Tag = cast(Tag, link.find_parent(
|
parent_row: Tag = cast(
|
||||||
attrs={"class": lambda x: x is not None and "row" in x}))
|
Tag, link.find_parent(attrs={"class": lambda x: x is not None and "row" in x})
|
||||||
name_tag = cast(Optional[Tag], parent_row.find(name="div"))
|
)
|
||||||
|
name_tag = parent_row.find(name="div")
|
||||||
|
|
||||||
if not name_tag:
|
if not name_tag:
|
||||||
log.warn("Could not find name tag for exercise entry")
|
log.warn("Could not find name tag for exercise entry")
|
||||||
@@ -929,11 +896,9 @@ class IliasPage:
|
|||||||
name = _sanitize_path_name(name_tag.get_text().strip())
|
name = _sanitize_path_name(name_tag.get_text().strip())
|
||||||
log.explain(f"Found exercise detail entry {name!r}")
|
log.explain(f"Found exercise detail entry {name!r}")
|
||||||
|
|
||||||
results.append(IliasPageElement.create_new(
|
results.append(
|
||||||
IliasElementType.FILE,
|
IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name)
|
||||||
self._abs_url_from_link(link),
|
)
|
||||||
name
|
|
||||||
))
|
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@@ -941,12 +906,15 @@ class IliasPage:
|
|||||||
results: list[IliasPageElement] = []
|
results: list[IliasPageElement] = []
|
||||||
|
|
||||||
# Find all download links in the container
|
# Find all download links in the container
|
||||||
download_links = cast(list[Tag], self._soup.find_all(
|
download_links = cast(
|
||||||
name="a",
|
list[Tag],
|
||||||
# download links contain the given command class
|
self._soup.find_all(
|
||||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
name="a",
|
||||||
text="Download"
|
# download links contain the given command class
|
||||||
))
|
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||||
|
text="Download",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
for link in download_links:
|
for link in download_links:
|
||||||
parent_row: Tag = cast(Tag, link.find_parent("tr"))
|
parent_row: Tag = cast(Tag, link.find_parent("tr"))
|
||||||
@@ -963,19 +931,16 @@ class IliasPage:
|
|||||||
if date is None:
|
if date is None:
|
||||||
log.warn(f"Date parsing failed for exercise file entry {name!r}")
|
log.warn(f"Date parsing failed for exercise file entry {name!r}")
|
||||||
|
|
||||||
results.append(IliasPageElement.create_new(
|
results.append(
|
||||||
IliasElementType.FILE,
|
IliasPageElement.create_new(IliasElementType.FILE, self._abs_url_from_link(link), name, date)
|
||||||
self._abs_url_from_link(link),
|
)
|
||||||
name,
|
|
||||||
date
|
|
||||||
))
|
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def _find_exercise_entries_root_page(self) -> list[IliasPageElement]:
|
def _find_exercise_entries_root_page(self) -> list[IliasPageElement]:
|
||||||
results: list[IliasPageElement] = []
|
results: list[IliasPageElement] = []
|
||||||
|
|
||||||
content_tab = cast(Optional[Tag], self._soup.find(id="ilContentContainer"))
|
content_tab = self._soup.find(id="ilContentContainer")
|
||||||
if not content_tab:
|
if not content_tab:
|
||||||
log.warn("Could not find content tab in exercise overview page")
|
log.warn("Could not find content tab in exercise overview page")
|
||||||
_unexpected_html_warning()
|
_unexpected_html_warning()
|
||||||
@@ -993,11 +958,11 @@ class IliasPage:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
name = _sanitize_path_name(exercise.get_text().strip())
|
name = _sanitize_path_name(exercise.get_text().strip())
|
||||||
results.append(IliasPageElement.create_new(
|
results.append(
|
||||||
IliasElementType.EXERCISE,
|
IliasPageElement.create_new(
|
||||||
self._abs_url_from_link(exercise),
|
IliasElementType.EXERCISE, self._abs_url_from_link(exercise), name
|
||||||
name
|
)
|
||||||
))
|
)
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
log.explain(f"Found exercise {result.name!r}")
|
log.explain(f"Found exercise {result.name!r}")
|
||||||
@@ -1043,13 +1008,11 @@ class IliasPage:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
log.explain(f"Found {element_name!r} of type {element_type}")
|
log.explain(f"Found {element_name!r} of type {element_type}")
|
||||||
result.append(IliasPageElement.create_new(
|
result.append(
|
||||||
element_type,
|
IliasPageElement.create_new(
|
||||||
abs_url,
|
element_type, abs_url, element_name, description=description, skip_sanitize=True
|
||||||
element_name,
|
)
|
||||||
description=description,
|
)
|
||||||
skip_sanitize=True
|
|
||||||
))
|
|
||||||
|
|
||||||
result += self._find_cards()
|
result += self._find_cards()
|
||||||
result += self._find_mediacast_videos()
|
result += self._find_mediacast_videos()
|
||||||
@@ -1086,11 +1049,13 @@ class IliasPage:
|
|||||||
if not title.endswith(".mp4") and not title.endswith(".webm"):
|
if not title.endswith(".mp4") and not title.endswith(".webm"):
|
||||||
# just to make sure it has some kinda-alrightish ending
|
# just to make sure it has some kinda-alrightish ending
|
||||||
title = title + ".mp4"
|
title = title + ".mp4"
|
||||||
videos.append(IliasPageElement.create_new(
|
videos.append(
|
||||||
typ=IliasElementType.MEDIACAST_VIDEO,
|
IliasPageElement.create_new(
|
||||||
url=self._abs_url_from_relative(cast(str, url)),
|
typ=IliasElementType.MEDIACAST_VIDEO,
|
||||||
name=_sanitize_path_name(title)
|
url=self._abs_url_from_relative(cast(str, url)),
|
||||||
))
|
name=_sanitize_path_name(title),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return videos
|
return videos
|
||||||
|
|
||||||
@@ -1114,12 +1079,11 @@ class IliasPage:
|
|||||||
log.explain(f"Found external video at {url}, ignoring")
|
log.explain(f"Found external video at {url}, ignoring")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
videos.append(IliasPageElement.create_new(
|
videos.append(
|
||||||
typ=IliasElementType.MOB_VIDEO,
|
IliasPageElement.create_new(
|
||||||
url=url,
|
typ=IliasElementType.MOB_VIDEO, url=url, name=_sanitize_path_name(title), mtime=None
|
||||||
name=_sanitize_path_name(title),
|
)
|
||||||
mtime=None
|
)
|
||||||
))
|
|
||||||
|
|
||||||
return videos
|
return videos
|
||||||
|
|
||||||
@@ -1133,7 +1097,7 @@ class IliasPage:
|
|||||||
if url is None and video_element.get("src"):
|
if url is None and video_element.get("src"):
|
||||||
url = cast(Optional[str], video_element.get("src"))
|
url = cast(Optional[str], video_element.get("src"))
|
||||||
|
|
||||||
fig_caption = cast(Optional[Tag], figure.select_one("figcaption"))
|
fig_caption = figure.select_one("figcaption")
|
||||||
if fig_caption:
|
if fig_caption:
|
||||||
title = cast(Tag, figure.select_one("figcaption")).get_text().strip() + ".mp4"
|
title = cast(Tag, figure.select_one("figcaption")).get_text().strip() + ".mp4"
|
||||||
elif url is not None:
|
elif url is not None:
|
||||||
@@ -1161,11 +1125,11 @@ class IliasPage:
|
|||||||
|
|
||||||
# We should not crawl files under meetings
|
# We should not crawl files under meetings
|
||||||
if "ilContainerListItemContentCB" in cast(str, parent.get("class")):
|
if "ilContainerListItemContentCB" in cast(str, parent.get("class")):
|
||||||
link: Tag = parent.parent.find("a") # type: ignore
|
link: Tag = cast(Tag, cast(Tag, parent.parent).find("a"))
|
||||||
typ = IliasPage._find_type_for_element(
|
typ = IliasPage._find_type_for_element(
|
||||||
"meeting",
|
"meeting",
|
||||||
self._abs_url_from_link(link),
|
self._abs_url_from_link(link),
|
||||||
lambda: IliasPage._find_icon_for_folder_entry(link)
|
lambda: IliasPage._find_icon_for_folder_entry(link),
|
||||||
)
|
)
|
||||||
return typ == IliasElementType.MEETING
|
return typ == IliasElementType.MEETING
|
||||||
|
|
||||||
@@ -1179,6 +1143,9 @@ class IliasPage:
|
|||||||
"""
|
"""
|
||||||
found_titles = []
|
found_titles = []
|
||||||
|
|
||||||
|
if None == "hey":
|
||||||
|
pass
|
||||||
|
|
||||||
outer_accordion_content: Optional[Tag] = None
|
outer_accordion_content: Optional[Tag] = None
|
||||||
|
|
||||||
parents: list[Tag] = list(tag.parents)
|
parents: list[Tag] = list(tag.parents)
|
||||||
@@ -1191,9 +1158,11 @@ class IliasPage:
|
|||||||
|
|
||||||
# This is for these weird JS-y blocks and custom item groups
|
# This is for these weird JS-y blocks and custom item groups
|
||||||
if "ilContainerItemsContainer" in cast(str, parent.get("class")):
|
if "ilContainerItemsContainer" in cast(str, parent.get("class")):
|
||||||
data_store_url = parent.parent.get("data-store-url", "").lower() # type: ignore
|
data_store_url = cast(str, cast(Tag, parent.parent).get("data-store-url", "")).lower()
|
||||||
is_custom_item_group = "baseclass=ilcontainerblockpropertiesstoragegui" in data_store_url \
|
is_custom_item_group = (
|
||||||
and "cont_block_id=" in data_store_url
|
"baseclass=ilcontainerblockpropertiesstoragegui" in data_store_url
|
||||||
|
and "cont_block_id=" in data_store_url
|
||||||
|
)
|
||||||
# I am currently under the impression that *only* those JS blocks have an
|
# I am currently under the impression that *only* those JS blocks have an
|
||||||
# ilNoDisplay class.
|
# ilNoDisplay class.
|
||||||
if not is_custom_item_group and "ilNoDisplay" not in cast(str, parent.get("class")):
|
if not is_custom_item_group and "ilNoDisplay" not in cast(str, parent.get("class")):
|
||||||
@@ -1212,11 +1181,15 @@ class IliasPage:
|
|||||||
|
|
||||||
if outer_accordion_content:
|
if outer_accordion_content:
|
||||||
accordion_tag = cast(Tag, outer_accordion_content.parent)
|
accordion_tag = cast(Tag, outer_accordion_content.parent)
|
||||||
head_tag = cast(Tag, accordion_tag.find(attrs={
|
head_tag = cast(
|
||||||
"class": lambda x: x is not None and (
|
Tag,
|
||||||
"ilc_va_ihead_VAccordIHead" in x or "ilc_va_ihead_AccordIHead" in x
|
accordion_tag.find(
|
||||||
)
|
attrs={
|
||||||
}))
|
"class": lambda x: x is not None
|
||||||
|
and ("ilc_va_ihead_VAccordIHead" in x or "ilc_va_ihead_AccordIHead" in x)
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
found_titles.append(head_tag.get_text().strip())
|
found_titles.append(head_tag.get_text().strip())
|
||||||
|
|
||||||
return [_sanitize_path_name(x) for x in reversed(found_titles)]
|
return [_sanitize_path_name(x) for x in reversed(found_titles)]
|
||||||
@@ -1224,14 +1197,12 @@ class IliasPage:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_link_description(link: Tag) -> Optional[str]:
|
def _find_link_description(link: Tag) -> Optional[str]:
|
||||||
tile = cast(
|
tile = cast(
|
||||||
Tag,
|
Tag, link.find_parent("div", {"class": lambda x: x is not None and "il_ContainerListItem" in x})
|
||||||
link.find_parent("div", {"class": lambda x: x is not None and "il_ContainerListItem" in x})
|
|
||||||
)
|
)
|
||||||
if not tile:
|
if not tile:
|
||||||
return None
|
return None
|
||||||
description_element = cast(
|
description_element = cast(
|
||||||
Tag,
|
Tag, tile.find("div", {"class": lambda x: x is not None and "il_Description" in x})
|
||||||
tile.find("div", {"class": lambda x: x is not None and "il_Description" in x})
|
|
||||||
)
|
)
|
||||||
if not description_element:
|
if not description_element:
|
||||||
return None
|
return None
|
||||||
@@ -1242,9 +1213,15 @@ class IliasPage:
|
|||||||
# Files have a list of properties (type, modification date, size, etc.)
|
# Files have a list of properties (type, modification date, size, etc.)
|
||||||
# In a series of divs.
|
# In a series of divs.
|
||||||
# Find the parent containing all those divs, so we can filter our what we need
|
# Find the parent containing all those divs, so we can filter our what we need
|
||||||
properties_parent = cast(Tag, cast(Tag, link_element.find_parent(
|
properties_parent = cast(
|
||||||
"div", {"class": lambda x: "il_ContainerListItem" in x}
|
Tag,
|
||||||
)).select_one(".il_ItemProperties"))
|
cast(
|
||||||
|
Tag,
|
||||||
|
link_element.find_parent(
|
||||||
|
"div", {"class": lambda x: x is not None and "il_ContainerListItem" in x}
|
||||||
|
),
|
||||||
|
).select_one(".il_ItemProperties"),
|
||||||
|
)
|
||||||
# The first one is always the filetype
|
# The first one is always the filetype
|
||||||
file_type = cast(Tag, properties_parent.select_one("span.il_ItemProperty")).get_text().strip()
|
file_type = cast(Tag, properties_parent.select_one("span.il_ItemProperty")).get_text().strip()
|
||||||
|
|
||||||
@@ -1271,9 +1248,7 @@ class IliasPage:
|
|||||||
for title in card_titles:
|
for title in card_titles:
|
||||||
url = self._abs_url_from_link(title)
|
url = self._abs_url_from_link(title)
|
||||||
name = _sanitize_path_name(title.get_text().strip())
|
name = _sanitize_path_name(title.get_text().strip())
|
||||||
typ = IliasPage._find_type_for_element(
|
typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(title))
|
||||||
name, url, lambda: IliasPage._find_icon_from_card(title)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not typ:
|
if not typ:
|
||||||
_unexpected_html_warning()
|
_unexpected_html_warning()
|
||||||
@@ -1300,18 +1275,16 @@ class IliasPage:
|
|||||||
continue
|
continue
|
||||||
url = self._abs_url_from_relative(open_match.group(1))
|
url = self._abs_url_from_relative(open_match.group(1))
|
||||||
name = _sanitize_path_name(button.get_text().strip())
|
name = _sanitize_path_name(button.get_text().strip())
|
||||||
typ = IliasPage._find_type_for_element(
|
typ = IliasPage._find_type_for_element(name, url, lambda: IliasPage._find_icon_from_card(button))
|
||||||
name, url, lambda: IliasPage._find_icon_from_card(button)
|
caption_parent = cast(
|
||||||
|
Tag,
|
||||||
|
button.find_parent(
|
||||||
|
"div",
|
||||||
|
attrs={"class": lambda x: x is not None and "caption" in x},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
caption_parent = cast(Tag, button.find_parent(
|
|
||||||
"div",
|
|
||||||
attrs={"class": lambda x: x is not None and "caption" in x},
|
|
||||||
))
|
|
||||||
caption_container = caption_parent.find_next_sibling("div")
|
caption_container = caption_parent.find_next_sibling("div")
|
||||||
if caption_container:
|
description = caption_container.get_text().strip() if caption_container else None
|
||||||
description = caption_container.get_text().strip()
|
|
||||||
else:
|
|
||||||
description = None
|
|
||||||
|
|
||||||
if not typ:
|
if not typ:
|
||||||
_unexpected_html_warning()
|
_unexpected_html_warning()
|
||||||
@@ -1377,9 +1350,7 @@ class IliasPage:
|
|||||||
|
|
||||||
if found_parent is None:
|
if found_parent is None:
|
||||||
_unexpected_html_warning()
|
_unexpected_html_warning()
|
||||||
log.warn_contd(
|
log.warn_contd(f"Tried to figure out element type, but did not find an icon for {link_element!r}")
|
||||||
f"Tried to figure out element type, but did not find an icon for {link_element!r}"
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Find the small descriptive icon to figure out the type
|
# Find the small descriptive icon to figure out the type
|
||||||
@@ -1389,8 +1360,7 @@ class IliasPage:
|
|||||||
img_tag = found_parent.select_one("img.icon")
|
img_tag = found_parent.select_one("img.icon")
|
||||||
|
|
||||||
is_session_expansion_button = found_parent.find(
|
is_session_expansion_button = found_parent.find(
|
||||||
"a",
|
"a", attrs={"href": lambda x: x is not None and ("crs_next_sess=" in x or "crs_prev_sess=" in x)}
|
||||||
attrs={"href": lambda x: x is not None and ("crs_next_sess=" in x or "crs_prev_sess=" in x)}
|
|
||||||
)
|
)
|
||||||
if img_tag is None and is_session_expansion_button:
|
if img_tag is None and is_session_expansion_button:
|
||||||
log.explain("Found session expansion button, skipping it as it has no content")
|
log.explain("Found session expansion button, skipping it as it has no content")
|
||||||
@@ -1426,7 +1396,7 @@ class IliasPage:
|
|||||||
def is_logged_in(ilias_soup: IliasSoup) -> bool:
|
def is_logged_in(ilias_soup: IliasSoup) -> bool:
|
||||||
soup = ilias_soup.soup
|
soup = ilias_soup.soup
|
||||||
# Normal ILIAS pages
|
# Normal ILIAS pages
|
||||||
mainbar = cast(Optional[Tag], soup.find(class_="il-maincontrols-metabar"))
|
mainbar = soup.find(class_="il-maincontrols-metabar")
|
||||||
if mainbar is not None:
|
if mainbar is not None:
|
||||||
login_button = mainbar.find(attrs={"href": lambda x: x is not None and "login.php" in x})
|
login_button = mainbar.find(attrs={"href": lambda x: x is not None and "login.php" in x})
|
||||||
shib_login = soup.find(id="button_shib_login")
|
shib_login = soup.find(id="button_shib_login")
|
||||||
@@ -1447,23 +1417,18 @@ class IliasPage:
|
|||||||
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
||||||
# their video listing table
|
# their video listing table
|
||||||
video_table = soup.find(
|
video_table = soup.find(
|
||||||
recursive=True,
|
recursive=True, name="table", attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
||||||
name="table",
|
|
||||||
attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
|
||||||
)
|
)
|
||||||
if video_table is not None:
|
if video_table is not None:
|
||||||
return True
|
return True
|
||||||
# The individual video player wrapper page has nothing of the above.
|
# The individual video player wrapper page has nothing of the above.
|
||||||
# Match it by its playerContainer.
|
# Match it by its playerContainer.
|
||||||
if soup.select_one("#playerContainer") is not None:
|
return soup.select_one("#playerContainer") is not None
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_date_in_text(text: str) -> Optional[datetime]:
|
def _find_date_in_text(text: str) -> Optional[datetime]:
|
||||||
modification_date_match = re.search(
|
modification_date_match = re.search(
|
||||||
r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)",
|
r"(((\d+\. \w+ \d+)|(Gestern|Yesterday)|(Heute|Today)|(Morgen|Tomorrow)), \d+:\d+)", text
|
||||||
text
|
|
||||||
)
|
)
|
||||||
if modification_date_match is not None:
|
if modification_date_match is not None:
|
||||||
modification_date_str = modification_date_match.group(1)
|
modification_date_str = modification_date_match.group(1)
|
||||||
@@ -1501,8 +1466,8 @@ def _unexpected_html_warning() -> None:
|
|||||||
log.warn("Encountered unexpected HTML structure, ignoring element.")
|
log.warn("Encountered unexpected HTML structure, ignoring element.")
|
||||||
|
|
||||||
|
|
||||||
german_months = ['Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez']
|
german_months = ["Jan", "Feb", "Mär", "Apr", "Mai", "Jun", "Jul", "Aug", "Sep", "Okt", "Nov", "Dez"]
|
||||||
english_months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
english_months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
|
||||||
|
|
||||||
|
|
||||||
def demangle_date(date_str: str, fail_silently: bool = False) -> Optional[datetime]:
|
def demangle_date(date_str: str, fail_silently: bool = False) -> Optional[datetime]:
|
||||||
@@ -1517,11 +1482,11 @@ def demangle_date(date_str: str, fail_silently: bool = False) -> Optional[dateti
|
|||||||
# Normalize whitespace because users
|
# Normalize whitespace because users
|
||||||
date_str = re.sub(r"\s+", " ", date_str)
|
date_str = re.sub(r"\s+", " ", date_str)
|
||||||
|
|
||||||
date_str = re.sub("Gestern|Yesterday", _format_date_english(_yesterday()), date_str, re.I)
|
date_str = re.sub("Gestern|Yesterday", _format_date_english(_yesterday()), date_str, flags=re.I)
|
||||||
date_str = re.sub("Heute|Today", _format_date_english(date.today()), date_str, re.I)
|
date_str = re.sub("Heute|Today", _format_date_english(date.today()), date_str, flags=re.I)
|
||||||
date_str = re.sub("Morgen|Tomorrow", _format_date_english(_tomorrow()), date_str, re.I)
|
date_str = re.sub("Morgen|Tomorrow", _format_date_english(_tomorrow()), date_str, flags=re.I)
|
||||||
date_str = date_str.strip()
|
date_str = date_str.strip()
|
||||||
for german, english in zip(german_months, english_months):
|
for german, english in zip(german_months, english_months, strict=True):
|
||||||
date_str = date_str.replace(german, english)
|
date_str = date_str.replace(german, english)
|
||||||
# Remove trailing dots for abbreviations, e.g. "20. Apr. 2020" -> "20. Apr 2020"
|
# Remove trailing dots for abbreviations, e.g. "20. Apr. 2020" -> "20. Apr 2020"
|
||||||
date_str = date_str.replace(english + ".", english)
|
date_str = date_str.replace(english + ".", english)
|
||||||
@@ -1575,11 +1540,11 @@ def parse_ilias_forum_export(forum_export: BeautifulSoup) -> list[IliasForumThre
|
|||||||
elements = []
|
elements = []
|
||||||
for p in forum_export.select("body > p"):
|
for p in forum_export.select("body > p"):
|
||||||
title_tag = p
|
title_tag = p
|
||||||
content_tag = cast(Optional[Tag], p.find_next_sibling("ul"))
|
content_tag = p.find_next_sibling("ul")
|
||||||
|
|
||||||
title = cast(Tag, p.find("b")).text
|
title = cast(Tag, p.find("b")).text
|
||||||
if ":" in title:
|
if ":" in title:
|
||||||
title = title[title.find(":") + 1:]
|
title = title[title.find(":") + 1 :]
|
||||||
title = title.strip()
|
title = title.strip()
|
||||||
|
|
||||||
if not content_tag or content_tag.find_previous_sibling("p") != title_tag:
|
if not content_tag or content_tag.find_previous_sibling("p") != title_tag:
|
||||||
@@ -1604,7 +1569,7 @@ def _guess_timestamp_from_forum_post_content(content: Tag) -> Optional[datetime]
|
|||||||
|
|
||||||
for post in posts:
|
for post in posts:
|
||||||
text = post.text.strip()
|
text = post.text.strip()
|
||||||
text = text[text.rfind("|") + 1:]
|
text = text[text.rfind("|") + 1 :]
|
||||||
date = demangle_date(text, fail_silently=True)
|
date = demangle_date(text, fail_silently=True)
|
||||||
if not date:
|
if not date:
|
||||||
continue
|
continue
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
from typing import Dict, Literal
|
from typing import Literal
|
||||||
|
|
||||||
from ...auth import Authenticator
|
from ...auth import Authenticator
|
||||||
from ...config import Config
|
from ...config import Config
|
||||||
@@ -26,7 +26,7 @@ class KitIliasWebCrawler(IliasWebCrawler):
|
|||||||
name: str,
|
name: str,
|
||||||
section: KitIliasWebCrawlerSection,
|
section: KitIliasWebCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
authenticators: Dict[str, Authenticator],
|
authenticators: dict[str, Authenticator],
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config, authenticators)
|
super().__init__(name, section, config, authenticators)
|
||||||
|
|
||||||
|
@@ -38,9 +38,7 @@ class ShibbolethLogin:
|
|||||||
async with sess.get(url) as response:
|
async with sess.get(url) as response:
|
||||||
shib_url = response.url
|
shib_url = response.url
|
||||||
if str(shib_url).startswith(self._ilias_url):
|
if str(shib_url).startswith(self._ilias_url):
|
||||||
log.explain(
|
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||||
"ILIAS recognized our shib token and logged us in in the background, returning"
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
soup: BeautifulSoup = soupify(await response.read())
|
soup: BeautifulSoup = soupify(await response.read())
|
||||||
|
|
||||||
@@ -81,7 +79,7 @@ class ShibbolethLogin:
|
|||||||
# (or clicking "Continue" if you have JS disabled)
|
# (or clicking "Continue" if you have JS disabled)
|
||||||
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||||
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||||
url = form = soup.find("form", {"method": "post"})["action"] # type: ignore
|
url = cast(str, cast(Tag, soup.find("form", {"method": "post"}))["action"])
|
||||||
data = { # using the info obtained in the while loop above
|
data = { # using the info obtained in the while loop above
|
||||||
"RelayState": cast(str, relay_state["value"]),
|
"RelayState": cast(str, relay_state["value"]),
|
||||||
"SAMLResponse": cast(str, saml_response["value"]),
|
"SAMLResponse": cast(str, saml_response["value"]),
|
||||||
|
@@ -1,9 +1,11 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
from collections.abc import Awaitable, Generator, Iterable
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Any, Awaitable, Generator, Iterable, List, Optional, Pattern, Tuple, Union, cast
|
from re import Pattern
|
||||||
|
from typing import Any, Optional, Union, cast
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from bs4 import BeautifulSoup, Tag
|
from bs4 import BeautifulSoup, Tag
|
||||||
@@ -44,7 +46,7 @@ class KitIpdFile:
|
|||||||
@dataclass
|
@dataclass
|
||||||
class KitIpdFolder:
|
class KitIpdFolder:
|
||||||
name: str
|
name: str
|
||||||
entries: List[Union[KitIpdFile, "KitIpdFolder"]]
|
entries: list[Union[KitIpdFile, "KitIpdFolder"]]
|
||||||
|
|
||||||
def explain(self) -> None:
|
def explain(self) -> None:
|
||||||
log.explain_topic(f"Folder {self.name!r}")
|
log.explain_topic(f"Folder {self.name!r}")
|
||||||
@@ -53,12 +55,11 @@ class KitIpdFolder:
|
|||||||
|
|
||||||
|
|
||||||
class KitIpdCrawler(HttpCrawler):
|
class KitIpdCrawler(HttpCrawler):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: KitIpdCrawlerSection,
|
section: KitIpdCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
self._url = section.target()
|
self._url = section.target()
|
||||||
@@ -69,7 +70,7 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
if not maybe_cl:
|
if not maybe_cl:
|
||||||
return
|
return
|
||||||
|
|
||||||
tasks: List[Awaitable[None]] = []
|
tasks: list[Awaitable[None]] = []
|
||||||
|
|
||||||
async with maybe_cl:
|
async with maybe_cl:
|
||||||
for item in await self._fetch_items():
|
for item in await self._fetch_items():
|
||||||
@@ -104,11 +105,7 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
|
|
||||||
async def _download_file(
|
async def _download_file(
|
||||||
self,
|
self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime]
|
||||||
parent: PurePath,
|
|
||||||
file: KitIpdFile,
|
|
||||||
etag: Optional[str],
|
|
||||||
mtime: Optional[datetime]
|
|
||||||
) -> None:
|
) -> None:
|
||||||
element_path = parent / file.name
|
element_path = parent / file.name
|
||||||
|
|
||||||
@@ -125,9 +122,9 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
await self._stream_from_url(file.url, element_path, sink, bar)
|
await self._stream_from_url(file.url, element_path, sink, bar)
|
||||||
|
|
||||||
async def _fetch_items(self) -> Iterable[Union[KitIpdFile, KitIpdFolder]]:
|
async def _fetch_items(self) -> Iterable[KitIpdFile | KitIpdFolder]:
|
||||||
page, url = await self.get_page()
|
page, url = await self.get_page()
|
||||||
elements: List[Tag] = self._find_file_links(page)
|
elements: list[Tag] = self._find_file_links(page)
|
||||||
|
|
||||||
# do not add unnecessary nesting for a single <h1> heading
|
# do not add unnecessary nesting for a single <h1> heading
|
||||||
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
||||||
@@ -156,7 +153,7 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
return KitIpdFile(name, url)
|
return KitIpdFile(name, url)
|
||||||
|
|
||||||
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> list[Tag]:
|
def _find_file_links(self, tag: Tag | BeautifulSoup) -> list[Tag]:
|
||||||
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
||||||
|
|
||||||
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
||||||
@@ -177,7 +174,7 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
|
|
||||||
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
||||||
|
|
||||||
async def get_page(self) -> Tuple[BeautifulSoup, str]:
|
async def get_page(self) -> tuple[BeautifulSoup, str]:
|
||||||
async with self.session.get(self._url) as request:
|
async with self.session.get(self._url) as request:
|
||||||
# The web page for Algorithmen für Routenplanung contains some
|
# The web page for Algorithmen für Routenplanung contains some
|
||||||
# weird comments that beautifulsoup doesn't parse correctly. This
|
# weird comments that beautifulsoup doesn't parse correctly. This
|
||||||
|
@@ -18,31 +18,28 @@ class LocalCrawlerSection(CrawlerSection):
|
|||||||
def crawl_delay(self) -> float:
|
def crawl_delay(self) -> float:
|
||||||
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
||||||
if value < 0:
|
if value < 0:
|
||||||
self.invalid_value("crawl_delay", value,
|
self.invalid_value("crawl_delay", value, "Must not be negative")
|
||||||
"Must not be negative")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def download_delay(self) -> float:
|
def download_delay(self) -> float:
|
||||||
value = self.s.getfloat("download_delay", fallback=0.0)
|
value = self.s.getfloat("download_delay", fallback=0.0)
|
||||||
if value < 0:
|
if value < 0:
|
||||||
self.invalid_value("download_delay", value,
|
self.invalid_value("download_delay", value, "Must not be negative")
|
||||||
"Must not be negative")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def download_speed(self) -> Optional[int]:
|
def download_speed(self) -> Optional[int]:
|
||||||
value = self.s.getint("download_speed")
|
value = self.s.getint("download_speed")
|
||||||
if value is not None and value <= 0:
|
if value is not None and value <= 0:
|
||||||
self.invalid_value("download_speed", value,
|
self.invalid_value("download_speed", value, "Must be greater than 0")
|
||||||
"Must be greater than 0")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
class LocalCrawler(Crawler):
|
class LocalCrawler(Crawler):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
section: LocalCrawlerSection,
|
section: LocalCrawlerSection,
|
||||||
config: Config,
|
config: Config,
|
||||||
):
|
):
|
||||||
super().__init__(name, section, config)
|
super().__init__(name, section, config)
|
||||||
|
|
||||||
@@ -74,10 +71,12 @@ class LocalCrawler(Crawler):
|
|||||||
tasks = []
|
tasks = []
|
||||||
|
|
||||||
async with cl:
|
async with cl:
|
||||||
await asyncio.sleep(random.uniform(
|
await asyncio.sleep(
|
||||||
0.5 * self._crawl_delay,
|
random.uniform(
|
||||||
self._crawl_delay,
|
0.5 * self._crawl_delay,
|
||||||
))
|
self._crawl_delay,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for child in path.iterdir():
|
for child in path.iterdir():
|
||||||
pure_child = cl.path / child.name
|
pure_child = cl.path / child.name
|
||||||
@@ -93,10 +92,12 @@ class LocalCrawler(Crawler):
|
|||||||
return
|
return
|
||||||
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
await asyncio.sleep(random.uniform(
|
await asyncio.sleep(
|
||||||
0.5 * self._download_delay,
|
random.uniform(
|
||||||
self._download_delay,
|
0.5 * self._download_delay,
|
||||||
))
|
self._download_delay,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
bar.set_total(stat.st_size)
|
bar.set_total(stat.st_size)
|
||||||
|
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
|
from collections.abc import Iterator
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Iterator, Set
|
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .utils import fmt_path
|
from .utils import fmt_path
|
||||||
@@ -16,15 +16,34 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
|||||||
class Deduplicator:
|
class Deduplicator:
|
||||||
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||||
FORBIDDEN_NAMES = {
|
FORBIDDEN_NAMES = {
|
||||||
"CON", "PRN", "AUX", "NUL",
|
"CON",
|
||||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
"PRN",
|
||||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
"AUX",
|
||||||
|
"NUL",
|
||||||
|
"COM1",
|
||||||
|
"COM2",
|
||||||
|
"COM3",
|
||||||
|
"COM4",
|
||||||
|
"COM5",
|
||||||
|
"COM6",
|
||||||
|
"COM7",
|
||||||
|
"COM8",
|
||||||
|
"COM9",
|
||||||
|
"LPT1",
|
||||||
|
"LPT2",
|
||||||
|
"LPT3",
|
||||||
|
"LPT4",
|
||||||
|
"LPT5",
|
||||||
|
"LPT6",
|
||||||
|
"LPT7",
|
||||||
|
"LPT8",
|
||||||
|
"LPT9",
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, windows_paths: bool) -> None:
|
def __init__(self, windows_paths: bool) -> None:
|
||||||
self._windows_paths = windows_paths
|
self._windows_paths = windows_paths
|
||||||
|
|
||||||
self._known: Set[PurePath] = set()
|
self._known: set[PurePath] = set()
|
||||||
|
|
||||||
def _add(self, path: PurePath) -> None:
|
def _add(self, path: PurePath) -> None:
|
||||||
self._known.add(path)
|
self._known.add(path)
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
|
from collections.abc import AsyncIterator
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import AsyncIterator, Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -12,12 +13,7 @@ class Slot:
|
|||||||
|
|
||||||
|
|
||||||
class Limiter:
|
class Limiter:
|
||||||
def __init__(
|
def __init__(self, task_limit: int, download_limit: int, task_delay: float):
|
||||||
self,
|
|
||||||
task_limit: int,
|
|
||||||
download_limit: int,
|
|
||||||
task_delay: float
|
|
||||||
):
|
|
||||||
if task_limit <= 0:
|
if task_limit <= 0:
|
||||||
raise ValueError("task limit must be at least 1")
|
raise ValueError("task limit must be at least 1")
|
||||||
if download_limit <= 0:
|
if download_limit <= 0:
|
||||||
|
@@ -1,15 +1,23 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
from collections.abc import AsyncIterator, Iterator
|
||||||
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
||||||
from typing import AsyncIterator, Iterator, List, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from rich.console import Console, Group
|
from rich.console import Console, Group
|
||||||
from rich.live import Live
|
from rich.live import Live
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TimeRemainingColumn,
|
from rich.progress import (
|
||||||
TransferSpeedColumn)
|
BarColumn,
|
||||||
|
DownloadColumn,
|
||||||
|
Progress,
|
||||||
|
TaskID,
|
||||||
|
TextColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
TransferSpeedColumn,
|
||||||
|
)
|
||||||
from rich.table import Column
|
from rich.table import Column
|
||||||
|
|
||||||
|
|
||||||
@@ -53,7 +61,7 @@ class Log:
|
|||||||
self._showing_progress = False
|
self._showing_progress = False
|
||||||
self._progress_suspended = False
|
self._progress_suspended = False
|
||||||
self._lock = asyncio.Lock()
|
self._lock = asyncio.Lock()
|
||||||
self._lines: List[str] = []
|
self._lines: list[str] = []
|
||||||
|
|
||||||
# Whether different parts of the output are enabled or disabled
|
# Whether different parts of the output are enabled or disabled
|
||||||
self.output_explain = False
|
self.output_explain = False
|
||||||
@@ -114,7 +122,7 @@ class Log:
|
|||||||
for line in self._lines:
|
for line in self._lines:
|
||||||
self.print(line)
|
self.print(line)
|
||||||
|
|
||||||
def print(self, text: str) -> None:
|
def print(self, text: Any) -> None:
|
||||||
"""
|
"""
|
||||||
Print a normal message. Allows markup.
|
Print a normal message. Allows markup.
|
||||||
"""
|
"""
|
||||||
@@ -176,10 +184,14 @@ class Log:
|
|||||||
# Our print function doesn't take types other than strings, but the
|
# Our print function doesn't take types other than strings, but the
|
||||||
# underlying rich.print function does. This call is a special case
|
# underlying rich.print function does. This call is a special case
|
||||||
# anyways, and we're calling it internally, so this should be fine.
|
# anyways, and we're calling it internally, so this should be fine.
|
||||||
self.print(Panel.fit("""
|
self.print(
|
||||||
|
Panel.fit(
|
||||||
|
"""
|
||||||
Please copy your program output and send it to the PFERD maintainers, either
|
Please copy your program output and send it to the PFERD maintainers, either
|
||||||
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||||
""".strip())) # type: ignore
|
""".strip()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
def explain_topic(self, text: str) -> None:
|
def explain_topic(self, text: str) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -236,10 +248,10 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _bar(
|
def _bar(
|
||||||
self,
|
self,
|
||||||
progress: Progress,
|
progress: Progress,
|
||||||
description: str,
|
description: str,
|
||||||
total: Optional[float],
|
total: Optional[float],
|
||||||
) -> Iterator[ProgressBar]:
|
) -> Iterator[ProgressBar]:
|
||||||
if total is None:
|
if total is None:
|
||||||
# Indeterminate progress bar
|
# Indeterminate progress bar
|
||||||
@@ -255,11 +267,11 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
self._update_live()
|
self._update_live()
|
||||||
|
|
||||||
def crawl_bar(
|
def crawl_bar(
|
||||||
self,
|
self,
|
||||||
style: str,
|
style: str,
|
||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> AbstractContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
@@ -271,11 +283,11 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
return self._bar(self._crawl_progress, description, total)
|
return self._bar(self._crawl_progress, description, total)
|
||||||
|
|
||||||
def download_bar(
|
def download_bar(
|
||||||
self,
|
self,
|
||||||
style: str,
|
style: str,
|
||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> AbstractContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
|
@@ -4,12 +4,13 @@ import os
|
|||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import string
|
import string
|
||||||
from contextlib import contextmanager
|
from collections.abc import Iterator
|
||||||
|
from contextlib import contextmanager, suppress
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import BinaryIO, Iterator, Optional, Tuple
|
from typing import BinaryIO, Optional
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .report import Report, ReportLoadError
|
from .report import Report, ReportLoadError
|
||||||
@@ -35,8 +36,7 @@ class Redownload(Enum):
|
|||||||
try:
|
try:
|
||||||
return Redownload(string)
|
return Redownload(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'never', 'never-smart',"
|
raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'") from None
|
||||||
" 'always', 'always-smart'")
|
|
||||||
|
|
||||||
|
|
||||||
class OnConflict(Enum):
|
class OnConflict(Enum):
|
||||||
@@ -51,8 +51,10 @@ class OnConflict(Enum):
|
|||||||
try:
|
try:
|
||||||
return OnConflict(string)
|
return OnConflict(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
raise ValueError(
|
||||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'")
|
"must be one of 'prompt', 'local-first',"
|
||||||
|
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -96,13 +98,13 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
|||||||
# download handed back to the OutputDirectory.
|
# download handed back to the OutputDirectory.
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
output_dir: "OutputDirectory",
|
output_dir: "OutputDirectory",
|
||||||
remote_path: PurePath,
|
remote_path: PurePath,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
heuristics: Heuristics,
|
heuristics: Heuristics,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
):
|
):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
@@ -118,15 +120,17 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
|||||||
sink = FileSink(file)
|
sink = FileSink(file)
|
||||||
|
|
||||||
async def after_download() -> None:
|
async def after_download() -> None:
|
||||||
await self._output_dir._after_download(DownloadInfo(
|
await self._output_dir._after_download(
|
||||||
self._remote_path,
|
DownloadInfo(
|
||||||
self._path,
|
self._remote_path,
|
||||||
self._local_path,
|
self._path,
|
||||||
tmp_path,
|
self._local_path,
|
||||||
self._heuristics,
|
tmp_path,
|
||||||
self._on_conflict,
|
self._heuristics,
|
||||||
sink.is_done(),
|
self._on_conflict,
|
||||||
))
|
sink.is_done(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._stack.push_async_callback(after_download)
|
self._stack.push_async_callback(after_download)
|
||||||
self._stack.enter_context(file)
|
self._stack.enter_context(file)
|
||||||
@@ -138,10 +142,10 @@ class OutputDirectory:
|
|||||||
REPORT_FILE = PurePath(".report")
|
REPORT_FILE = PurePath(".report")
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
root: Path,
|
root: Path,
|
||||||
redownload: Redownload,
|
redownload: Redownload,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
):
|
):
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
# Windows limits the path length to 260 for some historical reason.
|
# Windows limits the path length to 260 for some historical reason.
|
||||||
@@ -174,8 +178,8 @@ class OutputDirectory:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
self._root.mkdir(parents=True, exist_ok=True)
|
self._root.mkdir(parents=True, exist_ok=True)
|
||||||
except OSError:
|
except OSError as e:
|
||||||
raise OutputDirError("Failed to create base directory")
|
raise OutputDirError("Failed to create base directory") from e
|
||||||
|
|
||||||
def register_reserved(self, path: PurePath) -> None:
|
def register_reserved(self, path: PurePath) -> None:
|
||||||
self._report.mark_reserved(path)
|
self._report.mark_reserved(path)
|
||||||
@@ -193,11 +197,11 @@ class OutputDirectory:
|
|||||||
return self._root / path
|
return self._root / path
|
||||||
|
|
||||||
def _should_download(
|
def _should_download(
|
||||||
self,
|
self,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
heuristics: Heuristics,
|
heuristics: Heuristics,
|
||||||
redownload: Redownload,
|
redownload: Redownload,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if not local_path.exists():
|
if not local_path.exists():
|
||||||
log.explain("No corresponding file present locally")
|
log.explain("No corresponding file present locally")
|
||||||
@@ -270,9 +274,9 @@ class OutputDirectory:
|
|||||||
# files.
|
# files.
|
||||||
|
|
||||||
async def _conflict_lfrf(
|
async def _conflict_lfrf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -289,9 +293,9 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_ldrf(
|
async def _conflict_ldrf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -308,10 +312,10 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_lfrd(
|
async def _conflict_lfrd(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
parent: PurePath,
|
parent: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -328,9 +332,9 @@ class OutputDirectory:
|
|||||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||||
|
|
||||||
async def _conflict_delete_lf(
|
async def _conflict_delete_lf(
|
||||||
self,
|
self,
|
||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict == OnConflict.PROMPT:
|
if on_conflict == OnConflict.PROMPT:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
@@ -353,9 +357,9 @@ class OutputDirectory:
|
|||||||
return base.parent / name
|
return base.parent / name
|
||||||
|
|
||||||
async def _create_tmp_file(
|
async def _create_tmp_file(
|
||||||
self,
|
self,
|
||||||
local_path: Path,
|
local_path: Path,
|
||||||
) -> Tuple[Path, BinaryIO]:
|
) -> tuple[Path, BinaryIO]:
|
||||||
"""
|
"""
|
||||||
May raise an OutputDirError.
|
May raise an OutputDirError.
|
||||||
"""
|
"""
|
||||||
@@ -388,14 +392,14 @@ class OutputDirectory:
|
|||||||
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
remote_path: PurePath,
|
remote_path: PurePath,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
*,
|
*,
|
||||||
etag_differs: Optional[bool] = None,
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
) -> Optional[FileSinkToken]:
|
) -> Optional[FileSinkToken]:
|
||||||
"""
|
"""
|
||||||
May throw an OutputDirError, a MarkDuplicateError or a
|
May throw an OutputDirError, a MarkDuplicateError or a
|
||||||
@@ -506,10 +510,8 @@ class OutputDirectory:
|
|||||||
await self._cleanup(child, pure_child)
|
await self._cleanup(child, pure_child)
|
||||||
|
|
||||||
if delete_self:
|
if delete_self:
|
||||||
try:
|
with suppress(OSError):
|
||||||
path.rmdir()
|
path.rmdir()
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
||||||
if self._report.is_marked(pure):
|
if self._report.is_marked(pure):
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Dict, List, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ class PferdLoadError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class Pferd:
|
class Pferd:
|
||||||
def __init__(self, config: Config, cli_crawlers: Optional[List[str]], cli_skips: Optional[List[str]]):
|
def __init__(self, config: Config, cli_crawlers: Optional[list[str]], cli_skips: Optional[list[str]]):
|
||||||
"""
|
"""
|
||||||
May throw PferdLoadError.
|
May throw PferdLoadError.
|
||||||
"""
|
"""
|
||||||
@@ -23,10 +23,10 @@ class Pferd:
|
|||||||
self._config = config
|
self._config = config
|
||||||
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
||||||
|
|
||||||
self._authenticators: Dict[str, Authenticator] = {}
|
self._authenticators: dict[str, Authenticator] = {}
|
||||||
self._crawlers: Dict[str, Crawler] = {}
|
self._crawlers: dict[str, Crawler] = {}
|
||||||
|
|
||||||
def _find_config_crawlers(self, config: Config) -> List[str]:
|
def _find_config_crawlers(self, config: Config) -> list[str]:
|
||||||
crawl_sections = []
|
crawl_sections = []
|
||||||
|
|
||||||
for name, section in config.crawl_sections():
|
for name, section in config.crawl_sections():
|
||||||
@@ -37,7 +37,7 @@ class Pferd:
|
|||||||
|
|
||||||
return crawl_sections
|
return crawl_sections
|
||||||
|
|
||||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: List[str]) -> List[str]:
|
def _find_cli_crawlers(self, config: Config, cli_crawlers: list[str]) -> list[str]:
|
||||||
if len(cli_crawlers) != len(set(cli_crawlers)):
|
if len(cli_crawlers) != len(set(cli_crawlers)):
|
||||||
raise PferdLoadError("Some crawlers were selected multiple times")
|
raise PferdLoadError("Some crawlers were selected multiple times")
|
||||||
|
|
||||||
@@ -66,14 +66,14 @@ class Pferd:
|
|||||||
return crawlers_to_run
|
return crawlers_to_run
|
||||||
|
|
||||||
def _find_crawlers_to_run(
|
def _find_crawlers_to_run(
|
||||||
self,
|
self,
|
||||||
config: Config,
|
config: Config,
|
||||||
cli_crawlers: Optional[List[str]],
|
cli_crawlers: Optional[list[str]],
|
||||||
cli_skips: Optional[List[str]],
|
cli_skips: Optional[list[str]],
|
||||||
) -> List[str]:
|
) -> list[str]:
|
||||||
log.explain_topic("Deciding which crawlers to run")
|
log.explain_topic("Deciding which crawlers to run")
|
||||||
|
|
||||||
crawlers: List[str]
|
crawlers: list[str]
|
||||||
if cli_crawlers is None:
|
if cli_crawlers is None:
|
||||||
log.explain("No crawlers specified on CLI")
|
log.explain("No crawlers specified on CLI")
|
||||||
log.explain("Running crawlers specified in config")
|
log.explain("Running crawlers specified in config")
|
||||||
@@ -104,7 +104,7 @@ class Pferd:
|
|||||||
|
|
||||||
def _load_crawlers(self) -> None:
|
def _load_crawlers(self) -> None:
|
||||||
# Cookie sharing
|
# Cookie sharing
|
||||||
kit_ilias_web_paths: Dict[Authenticator, List[Path]] = {}
|
kit_ilias_web_paths: dict[Authenticator, list[Path]] = {}
|
||||||
|
|
||||||
for name, section in self._config.crawl_sections():
|
for name, section in self._config.crawl_sections():
|
||||||
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||||
@@ -117,9 +117,8 @@ class Pferd:
|
|||||||
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
||||||
self._crawlers[name] = crawler
|
self._crawlers[name] = crawler
|
||||||
|
|
||||||
if self._config.default_section.share_cookies():
|
if self._config.default_section.share_cookies() and isinstance(crawler, KitIliasWebCrawler):
|
||||||
if isinstance(crawler, KitIliasWebCrawler):
|
crawler.share_cookies(kit_ilias_web_paths)
|
||||||
crawler.share_cookies(kit_ilias_web_paths)
|
|
||||||
|
|
||||||
def debug_transforms(self) -> None:
|
def debug_transforms(self) -> None:
|
||||||
for name in self._crawlers_to_run:
|
for name in self._crawlers_to_run:
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Dict, List, Optional, Set
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
class ReportLoadError(Exception):
|
class ReportLoadError(Exception):
|
||||||
@@ -42,32 +42,32 @@ class Report:
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
# Paths found by the crawler, untransformed
|
# Paths found by the crawler, untransformed
|
||||||
self.found_paths: Set[PurePath] = set()
|
self.found_paths: set[PurePath] = set()
|
||||||
|
|
||||||
# Files reserved for metadata files (e. g. the report file or cookies)
|
# Files reserved for metadata files (e. g. the report file or cookies)
|
||||||
# that can't be overwritten by user transforms and won't be cleaned up
|
# that can't be overwritten by user transforms and won't be cleaned up
|
||||||
# at the end.
|
# at the end.
|
||||||
self.reserved_files: Set[PurePath] = set()
|
self.reserved_files: set[PurePath] = set()
|
||||||
|
|
||||||
# Files found by the crawler, transformed. Only includes files that
|
# Files found by the crawler, transformed. Only includes files that
|
||||||
# were downloaded (or a download was attempted)
|
# were downloaded (or a download was attempted)
|
||||||
self.known_files: Set[PurePath] = set()
|
self.known_files: set[PurePath] = set()
|
||||||
|
|
||||||
self.added_files: Set[PurePath] = set()
|
self.added_files: set[PurePath] = set()
|
||||||
self.changed_files: Set[PurePath] = set()
|
self.changed_files: set[PurePath] = set()
|
||||||
self.deleted_files: Set[PurePath] = set()
|
self.deleted_files: set[PurePath] = set()
|
||||||
# Files that should have been deleted by the cleanup but weren't
|
# Files that should have been deleted by the cleanup but weren't
|
||||||
self.not_deleted_files: Set[PurePath] = set()
|
self.not_deleted_files: set[PurePath] = set()
|
||||||
|
|
||||||
# Custom crawler-specific data
|
# Custom crawler-specific data
|
||||||
self.custom: Dict[str, Any] = dict()
|
self.custom: dict[str, Any] = dict()
|
||||||
|
|
||||||
# Encountered errors and warnings
|
# Encountered errors and warnings
|
||||||
self.encountered_warnings: List[str] = []
|
self.encountered_warnings: list[str] = []
|
||||||
self.encountered_errors: List[str] = []
|
self.encountered_errors: list[str] = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_list_of_strs(data: Dict[str, Any], key: str) -> List[str]:
|
def _get_list_of_strs(data: dict[str, Any], key: str) -> list[str]:
|
||||||
result: Any = data.get(key, [])
|
result: Any = data.get(key, [])
|
||||||
|
|
||||||
if not isinstance(result, list):
|
if not isinstance(result, list):
|
||||||
@@ -80,8 +80,8 @@ class Report:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_str_dictionary(data: Dict[str, Any], key: str) -> Dict[str, Any]:
|
def _get_str_dictionary(data: dict[str, Any], key: str) -> dict[str, Any]:
|
||||||
result: Dict[str, Any] = data.get(key, {})
|
result: dict[str, Any] = data.get(key, {})
|
||||||
|
|
||||||
if not isinstance(result, dict):
|
if not isinstance(result, dict):
|
||||||
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
||||||
@@ -170,7 +170,7 @@ class Report:
|
|||||||
self.known_files.add(path)
|
self.known_files.add(path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def marked(self) -> Set[PurePath]:
|
def marked(self) -> set[PurePath]:
|
||||||
return self.known_files | self.reserved_files
|
return self.known_files | self.reserved_files
|
||||||
|
|
||||||
def is_marked(self, path: PurePath) -> bool:
|
def is_marked(self, path: PurePath) -> bool:
|
||||||
|
@@ -1,10 +1,12 @@
|
|||||||
import ast
|
import ast
|
||||||
|
import contextlib
|
||||||
import re
|
import re
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Callable, Sequence
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Callable, Dict, List, Optional, Sequence, TypeVar, Union
|
from typing import Optional, TypeVar
|
||||||
|
|
||||||
from .logging import log
|
from .logging import log
|
||||||
from .utils import fmt_path, str_path
|
from .utils import fmt_path, str_path
|
||||||
@@ -23,7 +25,7 @@ class Empty:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
RightSide = Union[str, Ignore, Empty]
|
RightSide = str | Ignore | Empty
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -35,7 +37,7 @@ class Ignored:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
TransformResult = Optional[Union[Transformed, Ignored]]
|
TransformResult = Transformed | Ignored | None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -47,7 +49,7 @@ class Rule:
|
|||||||
right: RightSide
|
right: RightSide
|
||||||
right_index: int
|
right_index: int
|
||||||
|
|
||||||
def right_result(self, path: PurePath) -> Union[str, Transformed, Ignored]:
|
def right_result(self, path: PurePath) -> str | Transformed | Ignored:
|
||||||
if isinstance(self.right, str):
|
if isinstance(self.right, str):
|
||||||
return self.right
|
return self.right
|
||||||
elif isinstance(self.right, Ignore):
|
elif isinstance(self.right, Ignore):
|
||||||
@@ -93,24 +95,20 @@ class ExactReTf(Transformation):
|
|||||||
# since elements of "match.groups()" can be None, mypy is wrong.
|
# since elements of "match.groups()" can be None, mypy is wrong.
|
||||||
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
||||||
|
|
||||||
locals_dir: Dict[str, Union[str, int, float]] = {}
|
locals_dir: dict[str, str | int | float] = {}
|
||||||
for i, group in enumerate(groups):
|
for i, group in enumerate(groups):
|
||||||
if group is None:
|
if group is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
locals_dir[f"g{i}"] = group
|
locals_dir[f"g{i}"] = group
|
||||||
|
|
||||||
try:
|
with contextlib.suppress(ValueError):
|
||||||
locals_dir[f"i{i}"] = int(group)
|
locals_dir[f"i{i}"] = int(group)
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
with contextlib.suppress(ValueError):
|
||||||
locals_dir[f"f{i}"] = float(group)
|
locals_dir[f"f{i}"] = float(group)
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
named_groups: Dict[str, str] = match.groupdict()
|
named_groups: dict[str, str] = match.groupdict()
|
||||||
for name, capture in named_groups.items():
|
for name, capture in named_groups.items():
|
||||||
locals_dir[name] = capture
|
locals_dir[name] = capture
|
||||||
|
|
||||||
@@ -208,7 +206,7 @@ class Line:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def rest(self) -> str:
|
def rest(self) -> str:
|
||||||
return self.line[self.index:]
|
return self.line[self.index :]
|
||||||
|
|
||||||
def peek(self, amount: int = 1) -> str:
|
def peek(self, amount: int = 1) -> str:
|
||||||
return self.rest[:amount]
|
return self.rest[:amount]
|
||||||
@@ -228,7 +226,7 @@ class Line:
|
|||||||
self.expect(string)
|
self.expect(string)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def one_of(self, parsers: List[Callable[[], T]], description: str) -> T:
|
def one_of(self, parsers: list[Callable[[], T]], description: str) -> T:
|
||||||
for parser in parsers:
|
for parser in parsers:
|
||||||
index = self.index
|
index = self.index
|
||||||
try:
|
try:
|
||||||
@@ -315,7 +313,7 @@ def parse_left(line: Line) -> str:
|
|||||||
return parse_str(line)
|
return parse_str(line)
|
||||||
|
|
||||||
|
|
||||||
def parse_right(line: Line) -> Union[str, Ignore]:
|
def parse_right(line: Line) -> str | Ignore:
|
||||||
c = line.peek()
|
c = line.peek()
|
||||||
if c in QUOTATION_MARKS:
|
if c in QUOTATION_MARKS:
|
||||||
return parse_quoted_str(line)
|
return parse_quoted_str(line)
|
||||||
@@ -327,21 +325,27 @@ def parse_right(line: Line) -> Union[str, Ignore]:
|
|||||||
|
|
||||||
|
|
||||||
def parse_arrow_name(line: Line) -> str:
|
def parse_arrow_name(line: Line) -> str:
|
||||||
return line.one_of([
|
return line.one_of(
|
||||||
lambda: line.expect("exact-re"),
|
[
|
||||||
lambda: line.expect("exact"),
|
lambda: line.expect("exact-re"),
|
||||||
lambda: line.expect("name-re"),
|
lambda: line.expect("exact"),
|
||||||
lambda: line.expect("name"),
|
lambda: line.expect("name-re"),
|
||||||
lambda: line.expect("re"),
|
lambda: line.expect("name"),
|
||||||
lambda: line.expect(""),
|
lambda: line.expect("re"),
|
||||||
], "Expected arrow name")
|
lambda: line.expect(""),
|
||||||
|
],
|
||||||
|
"Expected arrow name",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_arrow_head(line: Line) -> ArrowHead:
|
def parse_arrow_head(line: Line) -> ArrowHead:
|
||||||
return line.one_of([
|
return line.one_of(
|
||||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
[
|
||||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||||
], "Expected arrow head")
|
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||||
|
],
|
||||||
|
"Expected arrow head",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_eol(line: Line) -> None:
|
def parse_eol(line: Line) -> None:
|
||||||
@@ -413,12 +417,12 @@ class Transformer:
|
|||||||
|
|
||||||
def transform(self, path: PurePath) -> Optional[PurePath]:
|
def transform(self, path: PurePath) -> Optional[PurePath]:
|
||||||
for i, (line, tf) in enumerate(self._tfs):
|
for i, (line, tf) in enumerate(self._tfs):
|
||||||
log.explain(f"Testing rule {i+1}: {line}")
|
log.explain(f"Testing rule {i + 1}: {line}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = tf.transform(path)
|
result = tf.transform(path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.warn(f"Error while testing rule {i+1}: {line}")
|
log.warn(f"Error while testing rule {i + 1}: {line}")
|
||||||
log.warn_contd(str(e))
|
log.warn_contd(str(e))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@@ -3,10 +3,11 @@ import getpass
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from collections.abc import Callable
|
||||||
from contextlib import AsyncExitStack
|
from contextlib import AsyncExitStack
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar
|
from typing import Any, Generic, Optional, TypeVar
|
||||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||||
|
|
||||||
import bs4
|
import bs4
|
||||||
@@ -79,7 +80,7 @@ def url_set_query_param(url: str, param: str, value: str) -> str:
|
|||||||
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
||||||
|
|
||||||
|
|
||||||
def url_set_query_params(url: str, params: Dict[str, str]) -> str:
|
def url_set_query_params(url: str, params: dict[str, str]) -> str:
|
||||||
"""
|
"""
|
||||||
Sets multiple query parameters in an url, overwriting existing ones.
|
Sets multiple query parameters in an url, overwriting existing ones.
|
||||||
"""
|
"""
|
||||||
@@ -131,10 +132,10 @@ class ReusableAsyncContextManager(ABC, Generic[T]):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
async def __aexit__(
|
async def __aexit__(
|
||||||
self,
|
self,
|
||||||
exc_type: Optional[Type[BaseException]],
|
exc_type: Optional[type[BaseException]],
|
||||||
exc_value: Optional[BaseException],
|
exc_value: Optional[BaseException],
|
||||||
traceback: Optional[TracebackType],
|
traceback: Optional[TracebackType],
|
||||||
) -> Optional[bool]:
|
) -> Optional[bool]:
|
||||||
if not self._active:
|
if not self._active:
|
||||||
raise RuntimeError("__aexit__ called too many times")
|
raise RuntimeError("__aexit__ called too many times")
|
||||||
|
@@ -20,16 +20,29 @@ pferd = "PFERD.__main__:main"
|
|||||||
[tool.setuptools.dynamic]
|
[tool.setuptools.dynamic]
|
||||||
version = {attr = "PFERD.version.VERSION"}
|
version = {attr = "PFERD.version.VERSION"}
|
||||||
|
|
||||||
[tool.flake8]
|
[tool.ruff]
|
||||||
max-line-length = 110
|
line-length = 110
|
||||||
|
|
||||||
[tool.isort]
|
[tool.ruff.lint]
|
||||||
line_length = 110
|
select = [
|
||||||
|
# pycodestyle
|
||||||
[tool.autopep8]
|
"E",
|
||||||
max_line_length = 110
|
# Pyflakes
|
||||||
in-place = true
|
"F",
|
||||||
recursive = true
|
# pyupgrade
|
||||||
|
"UP",
|
||||||
|
# flake8-bugbear
|
||||||
|
"B",
|
||||||
|
# flake8-simplify
|
||||||
|
"SIM",
|
||||||
|
# isort
|
||||||
|
"I",
|
||||||
|
]
|
||||||
|
ignore = [
|
||||||
|
"UP045",
|
||||||
|
"SIM114",
|
||||||
|
"B023"
|
||||||
|
]
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
disallow_any_generics = true
|
disallow_any_generics = true
|
||||||
@@ -40,3 +53,10 @@ warn_unused_ignores = true
|
|||||||
warn_unreachable = true
|
warn_unreachable = true
|
||||||
show_error_context = true
|
show_error_context = true
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"mypy>=1.18.2",
|
||||||
|
"pyinstaller>=6.16.0",
|
||||||
|
"ruff>=0.14.1",
|
||||||
|
]
|
||||||
|
@@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
pyinstaller --onefile pferd.py
|
uv run pyinstaller --onefile pferd.py
|
||||||
|
@@ -2,5 +2,5 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
mypy .
|
uv run mypy .
|
||||||
flake8 PFERD
|
uv run ruff check
|
||||||
|
@@ -2,5 +2,4 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
autopep8 .
|
uv run ruff format
|
||||||
isort .
|
|
||||||
|
Reference in New Issue
Block a user