mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-10-19 16:22:33 +02:00
Compare commits
108 Commits
6d44aac278
...
test-uv
Author | SHA1 | Date | |
---|---|---|---|
![]() |
45e25db5ad | ||
![]() |
ef7d66c5af | ||
![]() |
5646e933fd | ||
![]() |
6e563134b2 | ||
![]() |
2cf0e060ed | ||
![]() |
ee4625be78 | ||
![]() |
f6c713d621 | ||
![]() |
207af51aa4 | ||
![]() |
3755f593ff | ||
![]() |
465f8b28c0 | ||
![]() |
27e69af2f3 | ||
![]() |
56e3065950 | ||
![]() |
549ce6cce9 | ||
![]() |
34564cedb4 | ||
![]() |
2b0d20a1f6 | ||
![]() |
8caad0008d | ||
![]() |
77a23265a9 | ||
![]() |
4c230ef6dd | ||
![]() |
b305e1ce23 | ||
![]() |
bdf17f5c87 | ||
![]() |
77fce7daf8 | ||
![]() |
653bf139f0 | ||
![]() |
3f60638d33 | ||
![]() |
b97b6fae6b | ||
![]() |
477234ad0d | ||
![]() |
63f25277b0 | ||
![]() |
c8eff04ae0 | ||
![]() |
edc482cdf4 | ||
![]() |
72cd0f77e2 | ||
![]() |
be175f9347 | ||
![]() |
ba2833dba5 | ||
![]() |
2f0e792670 | ||
![]() |
5f88539f7e | ||
![]() |
bd9d7efe64 | ||
![]() |
16a2dd5b15 | ||
![]() |
678283d341 | ||
![]() |
287173b0b1 | ||
![]() |
712217e959 | ||
![]() |
6dda4c55a8 | ||
![]() |
596b6a7688 | ||
![]() |
5983200247 | ||
![]() |
26e802d88b | ||
![]() |
f5c4e82816 | ||
![]() |
f5273f7ca0 | ||
![]() |
fa71a9f44f | ||
![]() |
81d6ff53c4 | ||
![]() |
d7a2b6e019 | ||
![]() |
71c65e89d1 | ||
![]() |
c1046498e7 | ||
![]() |
8fbd1978af | ||
![]() |
739dd95850 | ||
![]() |
c54c3bcfa1 | ||
![]() |
d7f2229978 | ||
![]() |
52fdeae752 | ||
![]() |
f9bb2e41cf | ||
![]() |
4f9e2ab48d | ||
![]() |
19beb8f07b | ||
![]() |
c897d9e2f5 | ||
![]() |
21a266e302 | ||
![]() |
b29b6f93f8 | ||
![]() |
318226d7cb | ||
![]() |
422cf05f15 | ||
![]() |
819c6673c7 | ||
![]() |
89b44c69a7 | ||
![]() |
4b4f72b2ca | ||
![]() |
778517d8c6 | ||
![]() |
428b0179fc | ||
![]() |
ade6309dd9 | ||
![]() |
fd6cb7b966 | ||
![]() |
5c87517ceb | ||
![]() |
b01f093474 | ||
![]() |
3a05b90525 | ||
![]() |
7a00f73e0e | ||
![]() |
5d0621420e | ||
![]() |
df98153169 | ||
![]() |
fc1f68ccd9 | ||
![]() |
3e831c7e23 | ||
![]() |
bbcfe9c8dd | ||
![]() |
eb01aa86cb | ||
![]() |
3db186a978 | ||
![]() |
4a5959fd58 | ||
![]() |
1cbc2b717a | ||
![]() |
da627ff929 | ||
![]() |
c1b592ac29 | ||
![]() |
eb0c956d32 | ||
![]() |
ab0cb2d956 | ||
![]() |
a117126389 | ||
![]() |
e9f8901520 | ||
![]() |
266812f90e | ||
![]() |
533bc27439 | ||
![]() |
0113a0ca10 | ||
![]() |
40f8a05ad6 | ||
![]() |
50b50513c6 | ||
![]() |
df3514cd03 | ||
![]() |
ad53185247 | ||
![]() |
87b67e9271 | ||
![]() |
b54b3b979c | ||
![]() |
2184ac8040 | ||
![]() |
b3d412360b | ||
![]() |
dbc2553b11 | ||
![]() |
68c398f1fe | ||
![]() |
123a57beec | ||
![]() |
d204dac8ce | ||
![]() |
443f7fe839 | ||
![]() |
0294ceb7d5 | ||
![]() |
6f30c6583d | ||
![]() |
467fc526e8 | ||
![]() |
722d2eb393 |
1
.git-blame-ignore-revs
Normal file
1
.git-blame-ignore-revs
Normal file
@@ -0,0 +1 @@
|
||||
2cf0e060ed126537dd993896b6aa793e2a6b9e80
|
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: monthly
|
||||
groups:
|
||||
gh-actions:
|
||||
patterns:
|
||||
- "*"
|
41
.github/workflows/build-and-release.yml
vendored
41
.github/workflows/build-and-release.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: build-and-release
|
||||
|
||||
on: push
|
||||
on: [push, pull_request]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -13,28 +13,26 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python: ["3.9"]
|
||||
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
|
||||
python: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Set up project
|
||||
if: matrix.os != 'windows-latest'
|
||||
run: ./scripts/setup
|
||||
|
||||
- name: Set up project on windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
# For some reason, `pip install --upgrade pip` doesn't work on
|
||||
# 'windows-latest'. The installed pip version works fine however.
|
||||
run: ./scripts/setup --no-pip
|
||||
run: uv sync
|
||||
|
||||
- name: Run checks
|
||||
run: ./scripts/check
|
||||
run: |
|
||||
./scripts/check
|
||||
./scripts/format
|
||||
|
||||
- name: Assert no changes
|
||||
run: git diff --exit-code
|
||||
|
||||
- name: Build
|
||||
run: ./scripts/build
|
||||
@@ -45,9 +43,9 @@ jobs:
|
||||
run: mv dist/pferd* dist/pferd-${{ matrix.os }}
|
||||
|
||||
- name: Upload binary
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Binaries
|
||||
name: pferd-${{ matrix.os }}
|
||||
path: dist/pferd-${{ matrix.os }}
|
||||
|
||||
release:
|
||||
@@ -57,18 +55,20 @@ jobs:
|
||||
steps:
|
||||
|
||||
- name: Download binaries
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: Binaries
|
||||
pattern: pferd-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Rename binaries
|
||||
run: |
|
||||
mv pferd-ubuntu-latest pferd-linux
|
||||
mv pferd-windows-latest pferd-windows.exe
|
||||
mv pferd-macos-13 pferd-mac-x86_64
|
||||
mv pferd-macos-latest pferd-mac
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@@ -76,3 +76,4 @@ jobs:
|
||||
pferd-linux
|
||||
pferd-windows.exe
|
||||
pferd-mac
|
||||
pferd-mac-x86_64
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
||||
/PFERD.egg-info/
|
||||
__pycache__/
|
||||
/.vscode/
|
||||
/.idea/
|
||||
|
||||
# pyinstaller
|
||||
/pferd.spec
|
||||
|
120
CHANGELOG.md
120
CHANGELOG.md
@@ -22,6 +22,126 @@ ambiguous situations.
|
||||
|
||||
## Unreleased
|
||||
|
||||
## Added
|
||||
- Store the description when using the `internet-shortcut` link format
|
||||
|
||||
## 3.8.3 - 2025-07-01
|
||||
|
||||
## Added
|
||||
- Support for link collections.
|
||||
In "fancy" mode, a single HTML file with multiple links is generated.
|
||||
In all other modes, PFERD creates a folder for the collection and a new file
|
||||
for every link inside.
|
||||
|
||||
## Fixed
|
||||
- Crawling of exercises with instructions
|
||||
- Don't download unavailable elements.
|
||||
Elements that are unavailable (for example, because their availability is
|
||||
time restricted) will not download the HTML for the info page anymore.
|
||||
- `base_url` argument for `ilias-web` crawler causing crashes
|
||||
|
||||
## 3.8.2 - 2025-04-29
|
||||
|
||||
## Changed
|
||||
- Explicitly mention that wikis are not supported at the moment and ignore them
|
||||
|
||||
## Fixed
|
||||
- Ilias-native login
|
||||
- Exercise crawling
|
||||
|
||||
## 3.8.1 - 2025-04-17
|
||||
|
||||
## Fixed
|
||||
- Description html files now specify at UTF-8 encoding
|
||||
- Images in descriptions now always have a white background
|
||||
|
||||
## 3.8.0 - 2025-04-16
|
||||
|
||||
### Added
|
||||
- Support for ILIAS 9
|
||||
|
||||
### Changed
|
||||
- Added prettier CSS to forum threads
|
||||
- Downloaded forum threads now link to the forum instead of the ILIAS thread
|
||||
- Increase minimum supported Python version to 3.11
|
||||
- Do not crawl nested courses (courses linked in other courses)
|
||||
|
||||
## Fixed
|
||||
- File links in report on Windows
|
||||
- TOTP authentication in KIT Shibboleth
|
||||
- Forum crawling only considering the first 20 entries
|
||||
|
||||
## 3.7.0 - 2024-11-13
|
||||
|
||||
### Added
|
||||
- Support for MOB videos in page descriptions
|
||||
- Clickable links in the report to directly open new/modified/not-deleted files
|
||||
- Support for non KIT shibboleth login
|
||||
|
||||
### Changed
|
||||
- Remove videos from description pages
|
||||
- Perform ILIAS cycle detection after processing the transform to allow
|
||||
ignoring duplicated elements
|
||||
- Parse headings (h1-h3) as folders in kit-ipd crawler
|
||||
|
||||
### Fixed
|
||||
- Personal desktop/dashboard/favorites crawling
|
||||
- Crawling of nested courses
|
||||
- Downloading of links with no target URL
|
||||
- Handle row flex on description pages
|
||||
- Add `<!DOCTYPE html>` heading to forum threads to fix mime type detection
|
||||
- Handle groups in cards
|
||||
|
||||
## 3.6.0 - 2024-10-23
|
||||
|
||||
### Added
|
||||
- Generic `ilias-web` crawler and `ilias-web` CLI command
|
||||
- Support for the course overview page. Using this URL as a target might cause
|
||||
duplication warnings, as subgroups are listed separately.
|
||||
- Support for named capture groups in regex transforms
|
||||
- Crawl custom item groups as folders
|
||||
|
||||
### Fixed
|
||||
- Normalization of meeting names in cards
|
||||
- Sanitization of slashes in exercise container names
|
||||
|
||||
## 3.5.2 - 2024-04-14
|
||||
|
||||
### Fixed
|
||||
- Crawling of personal desktop with ILIAS 8
|
||||
- Crawling of empty personal desktops
|
||||
|
||||
## 3.5.1 - 2024-04-09
|
||||
|
||||
### Added
|
||||
- Support for ILIAS 8
|
||||
|
||||
### Fixed
|
||||
- Video name deduplication
|
||||
|
||||
## 3.5.0 - 2023-09-13
|
||||
|
||||
### Added
|
||||
- `no-delete-prompt-override` conflict resolution strategy
|
||||
- Support for ILIAS learning modules
|
||||
- `show_not_deleted` option to stop printing the "Not Deleted" status or report
|
||||
message. This combines nicely with the `no-delete-prompt-override` strategy,
|
||||
causing PFERD to mostly ignore local-only files.
|
||||
- Support for mediacast video listings
|
||||
- Crawling of files in info tab
|
||||
|
||||
### Changed
|
||||
- Remove size suffix for files in content pages
|
||||
|
||||
### Fixed
|
||||
- Crawling of courses with the timeline view as the default tab
|
||||
- Crawling of file and custom opencast cards
|
||||
- Crawling of button cards without descriptions
|
||||
- Abort crawling when encountering an unexpected ilias root page redirect
|
||||
- Sanitize ascii control characters on Windows
|
||||
- Crawling of paginated past meetings
|
||||
- Ignore SCORM learning modules
|
||||
|
||||
## 3.4.3 - 2022-11-29
|
||||
|
||||
### Added
|
||||
|
91
CONFIG.md
91
CONFIG.md
@@ -4,11 +4,11 @@ A config file consists of sections. A section begins with a `[section]` header,
|
||||
which is followed by a list of `key = value` pairs. Comments must be on their
|
||||
own line and start with `#`. Multiline values must be indented beyond their key.
|
||||
Boolean values can be `yes` or `no`. For more details and some examples on the
|
||||
format, see the [configparser documentation][1] ([interpolation][2] is
|
||||
disabled).
|
||||
format, see the [configparser documentation][cp-file]
|
||||
([interpolation][cp-interp] is disabled).
|
||||
|
||||
[1]: <https://docs.python.org/3/library/configparser.html#supported-ini-file-structure> "Supported INI File Structure"
|
||||
[2]: <https://docs.python.org/3/library/configparser.html#interpolation-of-values> "Interpolation of values"
|
||||
[cp-file]: <https://docs.python.org/3/library/configparser.html#supported-ini-file-structure> "Supported INI File Structure"
|
||||
[cp-interp]: <https://docs.python.org/3/library/configparser.html#interpolation-of-values> "Interpolation of values"
|
||||
|
||||
## The `DEFAULT` section
|
||||
|
||||
@@ -26,6 +26,9 @@ default values for the other sections.
|
||||
`Added ...`) while running a crawler. (Default: `yes`)
|
||||
- `report`: Whether PFERD should print a report of added, changed and deleted
|
||||
local files for all crawlers before exiting. (Default: `yes`)
|
||||
- `show_not_deleted`: Whether PFERD should print messages in status and report
|
||||
when a local-only file wasn't deleted. Combines nicely with the
|
||||
`no-delete-prompt-override` conflict resolution strategy.
|
||||
- `share_cookies`: Whether crawlers should share cookies where applicable. For
|
||||
example, some crawlers share cookies if they crawl the same website using the
|
||||
same account. (Default: `yes`)
|
||||
@@ -75,6 +78,9 @@ common to all crawlers:
|
||||
using `prompt` and always choosing "yes".
|
||||
- `no-delete`: Never delete local files, but overwrite local files if the
|
||||
remote file is different.
|
||||
- `no-delete-prompt-overwrite`: Never delete local files, but prompt to
|
||||
overwrite local files if the remote file is different. Combines nicely
|
||||
with the `show_not_deleted` option.
|
||||
- `transform`: Rules for renaming and excluding certain files and directories.
|
||||
For more details, see [this section](#transformation-rules). (Default: empty)
|
||||
- `tasks`: The maximum number of concurrent tasks (such as crawling or
|
||||
@@ -140,7 +146,7 @@ crawler simulate a slower, network-based crawler.
|
||||
|
||||
This crawler crawls a KIT-IPD page by url. The root page can be crawled from
|
||||
outside the KIT network so you will be informed about any new/deleted files,
|
||||
but downloading files requires you to be within. Adding a show delay between
|
||||
but downloading files requires you to be within. Adding a short delay between
|
||||
requests is likely a good idea.
|
||||
|
||||
- `target`: URL to a KIT-IPD page
|
||||
@@ -148,6 +154,64 @@ requests is likely a good idea.
|
||||
matches, the given link is downloaded as a file. This is used to extract
|
||||
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
||||
|
||||
### The `ilias-web` crawler
|
||||
|
||||
This crawler crawls a generic ILIAS instance.
|
||||
|
||||
Inspired by [this ILIAS downloader][ilias-dl], the following configurations should work
|
||||
out of the box for the corresponding universities:
|
||||
|
||||
[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs"
|
||||
|
||||
| University | `base_url` | `login_type` | `client_id` |
|
||||
|-----------------|-----------------------------------------|--------------|---------------|
|
||||
| FH Aachen | https://www.ili.fh-aachen.de | local | elearning |
|
||||
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk |
|
||||
| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ |
|
||||
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart |
|
||||
| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | |
|
||||
| KIT ILIAS Pilot | https://pilot.ilias.studium.kit.edu | shibboleth | pilot |
|
||||
|
||||
If your university isn't listed, try navigating to your instance's login page.
|
||||
Assuming no custom login service is used, the URL will look something like this:
|
||||
|
||||
```jinja
|
||||
{{ base_url }}/login.php?client_id={{ client_id }}&cmd=force_login&lang=
|
||||
```
|
||||
|
||||
If the values work, feel free to submit a PR and add them to the table above.
|
||||
|
||||
- `base_url`: The URL where the ILIAS instance is located. (Required)
|
||||
- `login_type`: How you authenticate. (Required)
|
||||
- `local`: Use `client_id` for authentication.
|
||||
- `shibboleth`: Use shibboleth for authentication.
|
||||
- `client_id`: An ID used for authentication if `login_type` is `local`. Is
|
||||
ignored if `login_type` is `shibboleth`.
|
||||
- `target`: The ILIAS element to crawl. (Required)
|
||||
- `desktop`: Crawl your personal desktop / dashboard
|
||||
- `<course id>`: Crawl the course with the given id
|
||||
- `<url>`: Crawl a given element by URL (preferably the permanent URL linked
|
||||
at the bottom of its ILIAS page).
|
||||
This also supports the "My Courses" overview page to download *all*
|
||||
courses. Note that this might produce confusing local directory layouts
|
||||
and duplication warnings if you are a member of an ILIAS group. The
|
||||
`desktop` target is generally preferable.
|
||||
- `auth`: Name of auth section to use for login. (Required)
|
||||
- `tfa_auth`: Name of auth section to use for two-factor authentication. Only
|
||||
uses the auth section's password. (Default: Anonymous `tfa` authenticator)
|
||||
- `links`: How to represent external links. (Default: `fancy`)
|
||||
- `ignore`: Don't download links.
|
||||
- `plaintext`: A text file containing only the URL.
|
||||
- `fancy`: A HTML file looking like the ILIAS link element.
|
||||
- `internet-shortcut`: An internet shortcut file (`.url` file).
|
||||
- `link_redirect_delay`: Time (in seconds) until `fancy` link files will
|
||||
redirect to the actual URL. Set to a negative value to disable the automatic
|
||||
redirect. (Default: `-1`)
|
||||
- `videos`: Whether to download videos. (Default: `no`)
|
||||
- `forums`: Whether to download forum threads. (Default: `no`)
|
||||
- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default:
|
||||
`20.0`)
|
||||
|
||||
### The `kit-ilias-web` crawler
|
||||
|
||||
This crawler crawls the KIT ILIAS instance.
|
||||
@@ -226,10 +290,10 @@ is stored in the keyring.
|
||||
|
||||
### The `pass` authenticator
|
||||
|
||||
This authenticator queries the [`pass` password manager][3] for a username and
|
||||
password. It tries to be mostly compatible with [browserpass][4] and
|
||||
[passff][5], so see those links for an overview of the format. If PFERD fails
|
||||
to load your password, you can use the `--explain` flag to see why.
|
||||
This authenticator queries the [`pass` password manager][pass] for a username
|
||||
and password. It tries to be mostly compatible with [browserpass][browserpass]
|
||||
and [passff][passff], so see those links for an overview of the format. If PFERD
|
||||
fails to load your password, you can use the `--explain` flag to see why.
|
||||
|
||||
- `passname`: The name of the password to use (Required)
|
||||
- `username_prefixes`: A comma-separated list of username line prefixes
|
||||
@@ -237,9 +301,9 @@ to load your password, you can use the `--explain` flag to see why.
|
||||
- `password_prefixes`: A comma-separated list of password line prefixes
|
||||
(Default: `password,pass,secret`)
|
||||
|
||||
[3]: <https://www.passwordstore.org/> "Pass: The Standard Unix Password Manager"
|
||||
[4]: <https://github.com/browserpass/browserpass-extension#organizing-password-store> "Organizing password store"
|
||||
[5]: <https://github.com/passff/passff#multi-line-format> "Multi-line format"
|
||||
[pass]: <https://www.passwordstore.org/> "Pass: The Standard Unix Password Manager"
|
||||
[browserpass]: <https://github.com/browserpass/browserpass-extension#organizing-password-store> "Organizing password store"
|
||||
[passff]: <https://github.com/passff/passff#multi-line-format> "Multi-line format"
|
||||
|
||||
### The `tfa` authenticator
|
||||
|
||||
@@ -338,7 +402,8 @@ matches `SOURCE`, the output path is created using `TARGET` as template.
|
||||
be referred to as `{g<n>}` (e.g. `{g3}`). `{g0}` refers to the original path.
|
||||
If capturing group *n*'s contents are a valid integer, the integer value is
|
||||
available as `{i<n>}` (e.g. `{i3}`). If capturing group *n*'s contents are a
|
||||
valid float, the float value is available as `{f<n>}` (e.g. `{f3}`). If a
|
||||
valid float, the float value is available as `{f<n>}` (e.g. `{f3}`). Named capture
|
||||
groups (e.g. `(?P<name>)`) are available by their name (e.g. `{name}`). If a
|
||||
capturing group is not present (e.g. when matching the string `cd` with the
|
||||
regex `(ab)?cd`), the corresponding variables are not defined.
|
||||
|
||||
|
21
DEV.md
21
DEV.md
@@ -9,30 +9,25 @@ particular [this][ppug-1] and [this][ppug-2] guide).
|
||||
|
||||
## Setting up a dev environment
|
||||
|
||||
The use of [venv][venv] is recommended. To initially set up a development
|
||||
environment, run these commands in the same directory as this file:
|
||||
The use of [venv][venv] and [uv][uv] is recommended. To initially set up a
|
||||
development environment, run these commands in the same directory as this file:
|
||||
|
||||
```
|
||||
$ python -m venv .venv
|
||||
$ uv sync
|
||||
$ . .venv/bin/activate
|
||||
$ ./scripts/setup
|
||||
```
|
||||
|
||||
The setup script installs a few required dependencies and tools. It also
|
||||
installs PFERD via `pip install --editable .`, which means that you can just run
|
||||
`pferd` as if it was installed normally. Since PFERD was installed with
|
||||
`--editable`, there is no need to re-run `pip install` when the source code is
|
||||
changed.
|
||||
|
||||
If you get any errors because pip can't update itself, try running
|
||||
`./scripts/setup --no-pip` instead of `./scripts/setup`.
|
||||
This install all required dependencies and tools. It also installs PFERD as
|
||||
*editable*, which means that you can just run `pferd` as if it was installed
|
||||
normally. Since PFERD was installed with `--editable`, there is no need to
|
||||
re-run `uv sync` when the source code is changed.
|
||||
|
||||
For more details, see [this part of the Python Tutorial][venv-tut] and
|
||||
[this section on "development mode"][ppug-dev].
|
||||
|
||||
[venv]: <https://docs.python.org/3/library/venv.html> "venv - Creation of virtual environments"
|
||||
[venv-tut]: <https://docs.python.org/3/tutorial/venv.html> "12. Virtual Environments and Packages"
|
||||
[ppug-dev]: <https://packaging.python.org/guides/distributing-packages-using-setuptools/#working-in-development-mode> "Working in “development mode”"
|
||||
[uv]: <https://docs.astral.sh/uv/> "uv - An extremely fast Python package and project manager"
|
||||
|
||||
## Checking and formatting the code
|
||||
|
||||
|
5
LICENSE
5
LICENSE
@@ -1,5 +1,6 @@
|
||||
Copyright 2019-2021 Garmelon, I-Al-Istannen, danstooamerican, pavelzw,
|
||||
TheChristophe, Scriptim, thelukasprobst, Toorero
|
||||
Copyright 2019-2024 Garmelon, I-Al-Istannen, danstooamerican, pavelzw,
|
||||
TheChristophe, Scriptim, thelukasprobst, Toorero,
|
||||
Mr-Pine, p-fruck, PinieP
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
|
@@ -47,6 +47,8 @@ def configure_logging_from_args(args: argparse.Namespace) -> None:
|
||||
log.output_explain = args.explain
|
||||
if args.status is not None:
|
||||
log.output_status = args.status
|
||||
if args.show_not_deleted is not None:
|
||||
log.output_not_deleted = args.show_not_deleted
|
||||
if args.report is not None:
|
||||
log.output_report = args.report
|
||||
|
||||
@@ -72,6 +74,8 @@ def configure_logging_from_config(args: argparse.Namespace, config: Config) -> N
|
||||
log.output_status = config.default_section.status()
|
||||
if args.report is None:
|
||||
log.output_report = config.default_section.report()
|
||||
if args.show_not_deleted is None:
|
||||
log.output_not_deleted = config.default_section.show_not_deleted()
|
||||
except ConfigOptionError as e:
|
||||
log.error(str(e))
|
||||
sys.exit(1)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Callable
|
||||
from configparser import SectionProxy
|
||||
from typing import Callable, Dict
|
||||
|
||||
from ..config import Config
|
||||
from .authenticator import Authenticator, AuthError, AuthLoadError, AuthSection # noqa: F401
|
||||
@@ -9,21 +9,19 @@ from .pass_ import PassAuthenticator, PassAuthSection
|
||||
from .simple import SimpleAuthenticator, SimpleAuthSection
|
||||
from .tfa import TfaAuthenticator
|
||||
|
||||
AuthConstructor = Callable[[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
], Authenticator]
|
||||
AuthConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "auth:" prefix)
|
||||
SectionProxy, # Authenticator's section of global config
|
||||
Config, # Global config
|
||||
],
|
||||
Authenticator,
|
||||
]
|
||||
|
||||
AUTHENTICATORS: Dict[str, AuthConstructor] = {
|
||||
"credential-file": lambda n, s, c:
|
||||
CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c:
|
||||
KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c:
|
||||
PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c:
|
||||
SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c:
|
||||
TfaAuthenticator(n),
|
||||
AUTHENTICATORS: dict[str, AuthConstructor] = {
|
||||
"credential-file": lambda n, s, c: CredentialFileAuthenticator(n, CredentialFileAuthSection(s), c),
|
||||
"keyring": lambda n, s, c: KeyringAuthenticator(n, KeyringAuthSection(s)),
|
||||
"pass": lambda n, s, c: PassAuthenticator(n, PassAuthSection(s)),
|
||||
"simple": lambda n, s, c: SimpleAuthenticator(n, SimpleAuthSection(s)),
|
||||
"tfa": lambda n, s, c: TfaAuthenticator(n),
|
||||
}
|
||||
|
@@ -1,5 +1,4 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Tuple
|
||||
|
||||
from ..config import Section
|
||||
|
||||
@@ -35,7 +34,7 @@ class Authenticator(ABC):
|
||||
self.name = name
|
||||
|
||||
@abstractmethod
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
pass
|
||||
|
||||
async def username(self) -> str:
|
||||
|
@@ -1,5 +1,4 @@
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
from ..config import Config
|
||||
from ..utils import fmt_real_path
|
||||
@@ -23,7 +22,9 @@ class CredentialFileAuthenticator(Authenticator):
|
||||
with open(path, encoding="utf-8") as f:
|
||||
lines = list(f)
|
||||
except UnicodeDecodeError:
|
||||
raise AuthLoadError(f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8")
|
||||
raise AuthLoadError(
|
||||
f"Credential file at {fmt_real_path(path)} is not encoded using UTF-8"
|
||||
) from None
|
||||
except OSError as e:
|
||||
raise AuthLoadError(f"No credential file at {fmt_real_path(path)}") from e
|
||||
|
||||
@@ -42,5 +43,5 @@ class CredentialFileAuthenticator(Authenticator):
|
||||
self._username = uline[9:]
|
||||
self._password = pline[9:]
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
return self._username, self._password
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
import keyring
|
||||
|
||||
@@ -17,7 +17,6 @@ class KeyringAuthSection(AuthSection):
|
||||
|
||||
|
||||
class KeyringAuthenticator(Authenticator):
|
||||
|
||||
def __init__(self, name: str, section: KeyringAuthSection) -> None:
|
||||
super().__init__(name)
|
||||
|
||||
@@ -28,7 +27,7 @@ class KeyringAuthenticator(Authenticator):
|
||||
self._password_invalidated = False
|
||||
self._username_fixed = section.username() is not None
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
# Request the username
|
||||
if self._username is None:
|
||||
async with log.exclusive_output():
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
import subprocess
|
||||
from typing import List, Tuple
|
||||
|
||||
from ..logging import log
|
||||
from .authenticator import Authenticator, AuthError, AuthSection
|
||||
@@ -12,11 +11,11 @@ class PassAuthSection(AuthSection):
|
||||
self.missing_value("passname")
|
||||
return value
|
||||
|
||||
def username_prefixes(self) -> List[str]:
|
||||
def username_prefixes(self) -> list[str]:
|
||||
value = self.s.get("username_prefixes", "login,username,user")
|
||||
return [prefix.lower() for prefix in value.split(",")]
|
||||
|
||||
def password_prefixes(self) -> List[str]:
|
||||
def password_prefixes(self) -> list[str]:
|
||||
value = self.s.get("password_prefixes", "password,pass,secret")
|
||||
return [prefix.lower() for prefix in value.split(",")]
|
||||
|
||||
@@ -31,14 +30,14 @@ class PassAuthenticator(Authenticator):
|
||||
self._username_prefixes = section.username_prefixes()
|
||||
self._password_prefixes = section.password_prefixes()
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
log.explain_topic("Obtaining credentials from pass")
|
||||
|
||||
try:
|
||||
log.explain(f"Calling 'pass show {self._passname}'")
|
||||
result = subprocess.check_output(["pass", "show", self._passname], text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}")
|
||||
raise AuthError(f"Failed to get password info from {self._passname}: {e}") from e
|
||||
|
||||
prefixed = {}
|
||||
unprefixed = []
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Tuple
|
||||
from typing import Optional
|
||||
|
||||
from ..logging import log
|
||||
from ..utils import agetpass, ainput
|
||||
@@ -23,7 +23,7 @@ class SimpleAuthenticator(Authenticator):
|
||||
self._username_fixed = self.username is not None
|
||||
self._password_fixed = self.password is not None
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
if self._username is not None and self._password is not None:
|
||||
return self._username, self._password
|
||||
|
||||
|
@@ -1,5 +1,3 @@
|
||||
from typing import Tuple
|
||||
|
||||
from ..logging import log
|
||||
from ..utils import ainput
|
||||
from .authenticator import Authenticator, AuthError
|
||||
@@ -17,7 +15,7 @@ class TfaAuthenticator(Authenticator):
|
||||
code = await ainput("TFA code: ")
|
||||
return code
|
||||
|
||||
async def credentials(self) -> Tuple[str, str]:
|
||||
async def credentials(self) -> tuple[str, str]:
|
||||
raise AuthError("TFA authenticator does not support usernames")
|
||||
|
||||
def invalidate_username(self) -> None:
|
||||
|
@@ -8,6 +8,7 @@
|
||||
# well.
|
||||
|
||||
from . import command_local # noqa: F401 imported but unused
|
||||
from . import command_ilias_web # noqa: F401 imported but unused
|
||||
from . import command_kit_ilias_web # noqa: F401 imported but unused
|
||||
from . import command_kit_ipd # noqa: F401 imported but unused
|
||||
from .parser import PARSER, ParserLoadError, load_default_section # noqa: F401 imported but unused
|
||||
|
53
PFERD/cli/command_ilias_web.py
Normal file
53
PFERD/cli/command_ilias_web.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import argparse
|
||||
import configparser
|
||||
|
||||
from ..logging import log
|
||||
from .common_ilias_args import configure_common_group_args, load_common
|
||||
from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler
|
||||
|
||||
COMMAND_NAME = "ilias-web"
|
||||
|
||||
SUBPARSER = SUBPARSERS.add_parser(
|
||||
COMMAND_NAME,
|
||||
parents=[CRAWLER_PARSER],
|
||||
)
|
||||
|
||||
GROUP = SUBPARSER.add_argument_group(
|
||||
title=f"{COMMAND_NAME} crawler arguments",
|
||||
description=f"arguments for the '{COMMAND_NAME}' crawler",
|
||||
)
|
||||
|
||||
GROUP.add_argument(
|
||||
"--base-url",
|
||||
type=str,
|
||||
metavar="BASE_URL",
|
||||
help="The base url of the ilias instance",
|
||||
)
|
||||
|
||||
GROUP.add_argument(
|
||||
"--client-id",
|
||||
type=str,
|
||||
metavar="CLIENT_ID",
|
||||
help="The client id of the ilias instance",
|
||||
)
|
||||
|
||||
configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(args: argparse.Namespace, parser: configparser.ConfigParser) -> None:
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
parser["crawl:ilias"] = {}
|
||||
section = parser["crawl:ilias"]
|
||||
load_crawler(args, section)
|
||||
|
||||
section["type"] = COMMAND_NAME
|
||||
if args.base_url is not None:
|
||||
section["base_url"] = args.base_url
|
||||
if args.client_id is not None:
|
||||
section["client_id"] = args.client_id
|
||||
|
||||
load_common(section, args, parser)
|
||||
|
||||
|
||||
SUBPARSER.set_defaults(command=load)
|
@@ -1,120 +1,37 @@
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
|
||||
from ..crawl.ilias.file_templates import Links
|
||||
from ..logging import log
|
||||
from .parser import (CRAWLER_PARSER, SUBPARSERS, BooleanOptionalAction, ParserLoadError, load_crawler,
|
||||
show_value_error)
|
||||
from .common_ilias_args import configure_common_group_args, load_common
|
||||
from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler
|
||||
|
||||
COMMAND_NAME = "kit-ilias-web"
|
||||
|
||||
SUBPARSER = SUBPARSERS.add_parser(
|
||||
"kit-ilias-web",
|
||||
COMMAND_NAME,
|
||||
parents=[CRAWLER_PARSER],
|
||||
)
|
||||
|
||||
GROUP = SUBPARSER.add_argument_group(
|
||||
title="kit-ilias-web crawler arguments",
|
||||
description="arguments for the 'kit-ilias-web' crawler",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--username", "-u",
|
||||
type=str,
|
||||
metavar="USERNAME",
|
||||
help="user name for authentication"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--keyring",
|
||||
action=BooleanOptionalAction,
|
||||
help="use the system keyring to store and retrieve passwords"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--credential-file",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="read username and password from a credential file"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--links",
|
||||
type=show_value_error(Links.from_string),
|
||||
metavar="OPTION",
|
||||
help="how to represent external links"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--link-redirect-delay",
|
||||
type=int,
|
||||
metavar="SECONDS",
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--videos",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download videos"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--forums",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download forum posts"
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--http-timeout", "-t",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="timeout for all HTTP requests"
|
||||
title=f"{COMMAND_NAME} crawler arguments",
|
||||
description=f"arguments for the '{COMMAND_NAME}' crawler",
|
||||
)
|
||||
|
||||
configure_common_group_args(GROUP)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'kit-ilias-web'")
|
||||
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||
|
||||
parser["crawl:ilias"] = {}
|
||||
section = parser["crawl:ilias"]
|
||||
load_crawler(args, section)
|
||||
|
||||
section["type"] = "kit-ilias-web"
|
||||
section["target"] = str(args.target)
|
||||
section["output_dir"] = str(args.output)
|
||||
section["auth"] = "auth:ilias"
|
||||
if args.links is not None:
|
||||
section["links"] = str(args.links.value)
|
||||
if args.link_redirect_delay is not None:
|
||||
section["link_redirect_delay"] = str(args.link_redirect_delay)
|
||||
if args.videos is not None:
|
||||
section["videos"] = "yes" if args.videos else "no"
|
||||
if args.forums is not None:
|
||||
section["forums"] = "yes" if args.forums else "no"
|
||||
if args.http_timeout is not None:
|
||||
section["http_timeout"] = str(args.http_timeout)
|
||||
|
||||
parser["auth:ilias"] = {}
|
||||
auth_section = parser["auth:ilias"]
|
||||
if args.credential_file is not None:
|
||||
if args.username is not None:
|
||||
raise ParserLoadError("--credential-file and --username can't be used together")
|
||||
if args.keyring:
|
||||
raise ParserLoadError("--credential-file and --keyring can't be used together")
|
||||
auth_section["type"] = "credential-file"
|
||||
auth_section["path"] = str(args.credential_file)
|
||||
elif args.keyring:
|
||||
auth_section["type"] = "keyring"
|
||||
else:
|
||||
auth_section["type"] = "simple"
|
||||
if args.username is not None:
|
||||
auth_section["username"] = args.username
|
||||
section["type"] = COMMAND_NAME
|
||||
load_common(section, args, parser)
|
||||
|
||||
|
||||
SUBPARSER.set_defaults(command=load)
|
||||
|
@@ -18,25 +18,25 @@ GROUP.add_argument(
|
||||
"--link-regex",
|
||||
type=str,
|
||||
metavar="REGEX",
|
||||
help="href-matching regex to identify downloadable files"
|
||||
help="href-matching regex to identify downloadable files",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="url to crawl"
|
||||
help="url to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'kit-ipd'")
|
||||
|
||||
|
@@ -18,37 +18,37 @@ GROUP.add_argument(
|
||||
"target",
|
||||
type=Path,
|
||||
metavar="TARGET",
|
||||
help="directory to crawl"
|
||||
help="directory to crawl",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory"
|
||||
help="output directory",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--crawl-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for crawl requests"
|
||||
help="artificial delay to simulate for crawl requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-delay",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="artificial delay to simulate for download requests"
|
||||
help="artificial delay to simulate for download requests",
|
||||
)
|
||||
GROUP.add_argument(
|
||||
"--download-speed",
|
||||
type=int,
|
||||
metavar="BYTES_PER_SECOND",
|
||||
help="download speed to simulate"
|
||||
help="download speed to simulate",
|
||||
)
|
||||
|
||||
|
||||
def load(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
log.explain("Creating config for command 'local'")
|
||||
|
||||
|
106
PFERD/cli/common_ilias_args.py
Normal file
106
PFERD/cli/common_ilias_args.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
|
||||
from ..crawl.ilias.file_templates import Links
|
||||
from .parser import BooleanOptionalAction, ParserLoadError, show_value_error
|
||||
|
||||
|
||||
def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
||||
"""These arguments are shared between the KIT and generic Ilias web command."""
|
||||
group.add_argument(
|
||||
"target",
|
||||
type=str,
|
||||
metavar="TARGET",
|
||||
help="course id, 'desktop', or ILIAS URL to crawl",
|
||||
)
|
||||
group.add_argument(
|
||||
"output",
|
||||
type=Path,
|
||||
metavar="OUTPUT",
|
||||
help="output directory",
|
||||
)
|
||||
group.add_argument(
|
||||
"--username",
|
||||
"-u",
|
||||
type=str,
|
||||
metavar="USERNAME",
|
||||
help="user name for authentication",
|
||||
)
|
||||
group.add_argument(
|
||||
"--keyring",
|
||||
action=BooleanOptionalAction,
|
||||
help="use the system keyring to store and retrieve passwords",
|
||||
)
|
||||
group.add_argument(
|
||||
"--credential-file",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="read username and password from a credential file",
|
||||
)
|
||||
group.add_argument(
|
||||
"--links",
|
||||
type=show_value_error(Links.from_string),
|
||||
metavar="OPTION",
|
||||
help="how to represent external links",
|
||||
)
|
||||
group.add_argument(
|
||||
"--link-redirect-delay",
|
||||
type=int,
|
||||
metavar="SECONDS",
|
||||
help="time before 'fancy' links redirect to to their target (-1 to disable)",
|
||||
)
|
||||
group.add_argument(
|
||||
"--videos",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download videos",
|
||||
)
|
||||
group.add_argument(
|
||||
"--forums",
|
||||
action=BooleanOptionalAction,
|
||||
help="crawl and download forum posts",
|
||||
)
|
||||
group.add_argument(
|
||||
"--http-timeout",
|
||||
"-t",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="timeout for all HTTP requests",
|
||||
)
|
||||
|
||||
|
||||
def load_common(
|
||||
section: configparser.SectionProxy,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
"""Load common config between generic and KIT ilias web command"""
|
||||
section["target"] = str(args.target)
|
||||
section["output_dir"] = str(args.output)
|
||||
section["auth"] = "auth:ilias"
|
||||
if args.links is not None:
|
||||
section["links"] = str(args.links.value)
|
||||
if args.link_redirect_delay is not None:
|
||||
section["link_redirect_delay"] = str(args.link_redirect_delay)
|
||||
if args.videos is not None:
|
||||
section["videos"] = "yes" if args.videos else "no"
|
||||
if args.forums is not None:
|
||||
section["forums"] = "yes" if args.forums else "no"
|
||||
if args.http_timeout is not None:
|
||||
section["http_timeout"] = str(args.http_timeout)
|
||||
|
||||
parser["auth:ilias"] = {}
|
||||
auth_section = parser["auth:ilias"]
|
||||
if args.credential_file is not None:
|
||||
if args.username is not None:
|
||||
raise ParserLoadError("--credential-file and --username can't be used together")
|
||||
if args.keyring:
|
||||
raise ParserLoadError("--credential-file and --keyring can't be used together")
|
||||
auth_section["type"] = "credential-file"
|
||||
auth_section["path"] = str(args.credential_file)
|
||||
elif args.keyring:
|
||||
auth_section["type"] = "keyring"
|
||||
else:
|
||||
auth_section["type"] = "simple"
|
||||
if args.username is not None:
|
||||
auth_section["username"] = args.username
|
@@ -1,8 +1,9 @@
|
||||
import argparse
|
||||
import configparser
|
||||
from argparse import ArgumentTypeError
|
||||
from collections.abc import Callable, Sequence
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, List, Optional, Sequence, Union
|
||||
from typing import Any, Optional
|
||||
|
||||
from ..output_dir import OnConflict, Redownload
|
||||
from ..version import NAME, VERSION
|
||||
@@ -15,15 +16,15 @@ class ParserLoadError(Exception):
|
||||
# TODO Replace with argparse version when updating to 3.9?
|
||||
class BooleanOptionalAction(argparse.Action):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings: List[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
self,
|
||||
option_strings: list[str],
|
||||
dest: Any,
|
||||
default: Any = None,
|
||||
type: Any = None,
|
||||
choices: Any = None,
|
||||
required: Any = False,
|
||||
help: Any = None,
|
||||
metavar: Any = None,
|
||||
):
|
||||
if len(option_strings) != 1:
|
||||
raise ValueError("There must be exactly one option string")
|
||||
@@ -48,11 +49,11 @@ class BooleanOptionalAction(argparse.Action):
|
||||
)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: Union[str, Sequence[Any], None],
|
||||
option_string: Optional[str] = None,
|
||||
self,
|
||||
parser: argparse.ArgumentParser,
|
||||
namespace: argparse.Namespace,
|
||||
values: str | Sequence[Any] | None,
|
||||
option_string: Optional[str] = None,
|
||||
) -> None:
|
||||
if option_string and option_string in self.option_strings:
|
||||
value = not option_string.startswith("--no-")
|
||||
@@ -67,11 +68,13 @@ def show_value_error(inner: Callable[[str], Any]) -> Callable[[str], Any]:
|
||||
Some validation functions (like the from_string in our enums) raise a ValueError.
|
||||
Argparse only pretty-prints ArgumentTypeErrors though, so we need to wrap our ValueErrors.
|
||||
"""
|
||||
|
||||
def wrapper(input: str) -> Any:
|
||||
try:
|
||||
return inner(input)
|
||||
except ValueError as e:
|
||||
raise ArgumentTypeError(e)
|
||||
raise ArgumentTypeError(e) from e
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -81,52 +84,57 @@ CRAWLER_PARSER_GROUP = CRAWLER_PARSER.add_argument_group(
|
||||
description="arguments common to all crawlers",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--redownload", "-r",
|
||||
"--redownload",
|
||||
"-r",
|
||||
type=show_value_error(Redownload.from_string),
|
||||
metavar="OPTION",
|
||||
help="when to download a file that's already present locally"
|
||||
help="when to download a file that's already present locally",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--on-conflict",
|
||||
type=show_value_error(OnConflict.from_string),
|
||||
metavar="OPTION",
|
||||
help="what to do when local and remote files or directories differ"
|
||||
help="what to do when local and remote files or directories differ",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--transform", "-T",
|
||||
"--transform",
|
||||
"-T",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="RULE",
|
||||
help="add a single transformation rule. Can be specified multiple times"
|
||||
help="add a single transformation rule. Can be specified multiple times",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--tasks", "-n",
|
||||
"--tasks",
|
||||
"-n",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of concurrent tasks (crawling, downloading)"
|
||||
help="maximum number of concurrent tasks (crawling, downloading)",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--downloads", "-N",
|
||||
"--downloads",
|
||||
"-N",
|
||||
type=int,
|
||||
metavar="N",
|
||||
help="maximum number of tasks that may download data at the same time"
|
||||
help="maximum number of tasks that may download data at the same time",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--task-delay", "-d",
|
||||
"--task-delay",
|
||||
"-d",
|
||||
type=float,
|
||||
metavar="SECONDS",
|
||||
help="time the crawler should wait between subsequent tasks"
|
||||
help="time the crawler should wait between subsequent tasks",
|
||||
)
|
||||
CRAWLER_PARSER_GROUP.add_argument(
|
||||
"--windows-paths",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether to repair invalid paths on windows"
|
||||
help="whether to repair invalid paths on windows",
|
||||
)
|
||||
|
||||
|
||||
def load_crawler(
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
args: argparse.Namespace,
|
||||
section: configparser.SectionProxy,
|
||||
) -> None:
|
||||
if args.redownload is not None:
|
||||
section["redownload"] = args.redownload.value
|
||||
@@ -152,74 +160,79 @@ PARSER.add_argument(
|
||||
version=f"{NAME} {VERSION} (https://github.com/Garmelon/PFERD)",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--config", "-c",
|
||||
"--config",
|
||||
"-c",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom config file"
|
||||
help="custom config file",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config",
|
||||
action="store_true",
|
||||
help="dump current configuration to the default config path and exit"
|
||||
help="dump current configuration to the default config path and exit",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--dump-config-to",
|
||||
metavar="PATH",
|
||||
help="dump current configuration to a file and exit."
|
||||
" Use '-' as path to print to stdout instead"
|
||||
help="dump current configuration to a file and exit. Use '-' as path to print to stdout instead",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--debug-transforms",
|
||||
action="store_true",
|
||||
help="apply transform rules to files of previous run"
|
||||
help="apply transform rules to files of previous run",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--crawler", "-C",
|
||||
"--crawler",
|
||||
"-C",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="only execute a single crawler."
|
||||
" Can be specified multiple times to execute multiple crawlers"
|
||||
help="only execute a single crawler. Can be specified multiple times to execute multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--skip", "-S",
|
||||
"--skip",
|
||||
"-S",
|
||||
action="append",
|
||||
type=str,
|
||||
metavar="NAME",
|
||||
help="don't execute this particular crawler."
|
||||
" Can be specified multiple times to skip multiple crawlers"
|
||||
help="don't execute this particular crawler. Can be specified multiple times to skip multiple crawlers",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--working-dir",
|
||||
type=Path,
|
||||
metavar="PATH",
|
||||
help="custom working directory"
|
||||
help="custom working directory",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--explain",
|
||||
action=BooleanOptionalAction,
|
||||
help="log and explain in detail what PFERD is doing"
|
||||
help="log and explain in detail what PFERD is doing",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--status",
|
||||
action=BooleanOptionalAction,
|
||||
help="print status updates while PFERD is crawling"
|
||||
help="print status updates while PFERD is crawling",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--report",
|
||||
action=BooleanOptionalAction,
|
||||
help="print a report of all local changes before exiting"
|
||||
help="print a report of all local changes before exiting",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--share-cookies",
|
||||
action=BooleanOptionalAction,
|
||||
help="whether crawlers should share cookies where applicable"
|
||||
help="whether crawlers should share cookies where applicable",
|
||||
)
|
||||
PARSER.add_argument(
|
||||
"--show-not-deleted",
|
||||
action=BooleanOptionalAction,
|
||||
help="print messages in status and report when PFERD did not delete a local only file",
|
||||
)
|
||||
|
||||
|
||||
def load_default_section(
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
args: argparse.Namespace,
|
||||
parser: configparser.ConfigParser,
|
||||
) -> None:
|
||||
section = parser[parser.default_section]
|
||||
|
||||
@@ -233,6 +246,8 @@ def load_default_section(
|
||||
section["report"] = "yes" if args.report else "no"
|
||||
if args.share_cookies is not None:
|
||||
section["share_cookies"] = "yes" if args.share_cookies else "no"
|
||||
if args.show_not_deleted is not None:
|
||||
section["show_not_deleted"] = "yes" if args.show_not_deleted else "no"
|
||||
|
||||
|
||||
SUBPARSERS = PARSER.add_subparsers(title="crawlers")
|
||||
|
@@ -3,7 +3,7 @@ import os
|
||||
import sys
|
||||
from configparser import ConfigParser, SectionProxy
|
||||
from pathlib import Path
|
||||
from typing import Any, List, NoReturn, Optional, Tuple
|
||||
from typing import Any, NoReturn, Optional
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
@@ -53,10 +53,10 @@ class Section:
|
||||
raise ConfigOptionError(self.s.name, key, desc)
|
||||
|
||||
def invalid_value(
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
reason: Optional[str],
|
||||
) -> NoReturn:
|
||||
if reason is None:
|
||||
self.error(key, f"Invalid value {value!r}")
|
||||
@@ -82,6 +82,9 @@ class DefaultSection(Section):
|
||||
def report(self) -> bool:
|
||||
return self.s.getboolean("report", fallback=True)
|
||||
|
||||
def show_not_deleted(self) -> bool:
|
||||
return self.s.getboolean("show_not_deleted", fallback=True)
|
||||
|
||||
def share_cookies(self) -> bool:
|
||||
return self.s.getboolean("share_cookies", fallback=True)
|
||||
|
||||
@@ -123,13 +126,13 @@ class Config:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
parser.read_file(f, source=str(path))
|
||||
except FileNotFoundError:
|
||||
raise ConfigLoadError(path, "File does not exist")
|
||||
raise ConfigLoadError(path, "File does not exist") from None
|
||||
except IsADirectoryError:
|
||||
raise ConfigLoadError(path, "That's a directory, not a file")
|
||||
raise ConfigLoadError(path, "That's a directory, not a file") from None
|
||||
except PermissionError:
|
||||
raise ConfigLoadError(path, "Insufficient permissions")
|
||||
raise ConfigLoadError(path, "Insufficient permissions") from None
|
||||
except UnicodeDecodeError:
|
||||
raise ConfigLoadError(path, "File is not encoded using UTF-8")
|
||||
raise ConfigLoadError(path, "File is not encoded using UTF-8") from None
|
||||
|
||||
def dump(self, path: Optional[Path] = None) -> None:
|
||||
"""
|
||||
@@ -147,8 +150,8 @@ class Config:
|
||||
|
||||
try:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
except PermissionError:
|
||||
raise ConfigDumpError(path, "Could not create parent directory")
|
||||
except PermissionError as e:
|
||||
raise ConfigDumpError(path, "Could not create parent directory") from e
|
||||
|
||||
try:
|
||||
# Ensuring we don't accidentally overwrite any existing files by
|
||||
@@ -164,16 +167,16 @@ class Config:
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
self._parser.write(f)
|
||||
else:
|
||||
raise ConfigDumpError(path, "File already exists")
|
||||
raise ConfigDumpError(path, "File already exists") from None
|
||||
except IsADirectoryError:
|
||||
raise ConfigDumpError(path, "That's a directory, not a file")
|
||||
except PermissionError:
|
||||
raise ConfigDumpError(path, "Insufficient permissions")
|
||||
raise ConfigDumpError(path, "That's a directory, not a file") from None
|
||||
except PermissionError as e:
|
||||
raise ConfigDumpError(path, "Insufficient permissions") from e
|
||||
|
||||
def dump_to_stdout(self) -> None:
|
||||
self._parser.write(sys.stdout)
|
||||
|
||||
def crawl_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
def crawl_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("crawl:"):
|
||||
@@ -181,7 +184,7 @@ class Config:
|
||||
|
||||
return result
|
||||
|
||||
def auth_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
def auth_sections(self) -> list[tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("auth:"):
|
||||
|
@@ -1,25 +1,26 @@
|
||||
from collections.abc import Callable
|
||||
from configparser import SectionProxy
|
||||
from typing import Callable, Dict
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config
|
||||
from .crawler import Crawler, CrawlError, CrawlerSection # noqa: F401
|
||||
from .ilias import KitIliasWebCrawler, KitIliasWebCrawlerSection
|
||||
from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler, KitIliasWebCrawlerSection
|
||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||
|
||||
CrawlerConstructor = Callable[[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
Dict[str, Authenticator], # Loaded authenticators by name
|
||||
], Crawler]
|
||||
CrawlerConstructor = Callable[
|
||||
[
|
||||
str, # Name (without the "crawl:" prefix)
|
||||
SectionProxy, # Crawler's section of global config
|
||||
Config, # Global config
|
||||
dict[str, Authenticator], # Loaded authenticators by name
|
||||
],
|
||||
Crawler,
|
||||
]
|
||||
|
||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
||||
"local": lambda n, s, c, a:
|
||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"kit-ilias-web": lambda n, s, c, a:
|
||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a:
|
||||
KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||
CRAWLERS: dict[str, CrawlerConstructor] = {
|
||||
"local": lambda n, s, c, a: LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||
"ilias-web": lambda n, s, c, a: IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||
"kit-ilias-web": lambda n, s, c, a: KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||
"kit-ipd": lambda n, s, c, a: KitIpdCrawler(n, KitIpdCrawlerSection(s), c),
|
||||
}
|
||||
|
@@ -1,10 +1,10 @@
|
||||
import asyncio
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Awaitable, Coroutine
|
||||
from collections.abc import Awaitable, Callable, Coroutine, Sequence
|
||||
from datetime import datetime
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, TypeVar
|
||||
from typing import Any, Optional, TypeVar
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config, Section
|
||||
@@ -116,7 +116,7 @@ class CrawlToken(ReusableAsyncContextManager[ProgressBar]):
|
||||
return bar
|
||||
|
||||
|
||||
class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
||||
class DownloadToken(ReusableAsyncContextManager[tuple[ProgressBar, FileSink]]):
|
||||
def __init__(self, limiter: Limiter, fs_token: FileSinkToken, path: PurePath):
|
||||
super().__init__()
|
||||
|
||||
@@ -128,12 +128,13 @@ class DownloadToken(ReusableAsyncContextManager[Tuple[ProgressBar, FileSink]]):
|
||||
def path(self) -> PurePath:
|
||||
return self._path
|
||||
|
||||
async def _on_aenter(self) -> Tuple[ProgressBar, FileSink]:
|
||||
async def _on_aenter(self) -> tuple[ProgressBar, FileSink]:
|
||||
await self._stack.enter_async_context(self._limiter.limit_download())
|
||||
sink = await self._stack.enter_async_context(self._fs_token)
|
||||
# The "Downloaded ..." message is printed in the output dir, not here
|
||||
bar = self._stack.enter_context(log.download_bar("[bold bright_cyan]", "Downloading",
|
||||
fmt_path(self._path)))
|
||||
bar = self._stack.enter_context(
|
||||
log.download_bar("[bold bright_cyan]", "Downloading", fmt_path(self._path))
|
||||
)
|
||||
|
||||
return bar, sink
|
||||
|
||||
@@ -149,9 +150,7 @@ class CrawlerSection(Section):
|
||||
return self.s.getboolean("skip", fallback=False)
|
||||
|
||||
def output_dir(self, name: str) -> Path:
|
||||
# TODO Use removeprefix() after switching to 3.9
|
||||
if name.startswith("crawl:"):
|
||||
name = name[len("crawl:"):]
|
||||
name = name.removeprefix("crawl:")
|
||||
return Path(self.s.get("output_dir", name)).expanduser()
|
||||
|
||||
def redownload(self) -> Redownload:
|
||||
@@ -206,7 +205,7 @@ class CrawlerSection(Section):
|
||||
on_windows = os.name == "nt"
|
||||
return self.s.getboolean("windows_paths", fallback=on_windows)
|
||||
|
||||
def auth(self, authenticators: Dict[str, Authenticator]) -> Authenticator:
|
||||
def auth(self, authenticators: dict[str, Authenticator]) -> Authenticator:
|
||||
value = self.s.get("auth")
|
||||
if value is None:
|
||||
self.missing_value("auth")
|
||||
@@ -218,10 +217,10 @@ class CrawlerSection(Section):
|
||||
|
||||
class Crawler(ABC):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: CrawlerSection,
|
||||
config: Config,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize a crawler from its name and its section in the config file.
|
||||
@@ -258,8 +257,12 @@ class Crawler(ABC):
|
||||
def prev_report(self) -> Optional[Report]:
|
||||
return self._output_dir.prev_report
|
||||
|
||||
@property
|
||||
def output_dir(self) -> OutputDirectory:
|
||||
return self._output_dir
|
||||
|
||||
@staticmethod
|
||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> list[Any]:
|
||||
"""
|
||||
Similar to asyncio.gather. However, in the case of an exception, all
|
||||
still running tasks are cancelled and the exception is rethrown.
|
||||
@@ -290,12 +293,39 @@ class Crawler(ABC):
|
||||
log.explain("Answer: Yes")
|
||||
return CrawlToken(self._limiter, path)
|
||||
|
||||
def should_try_download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> bool:
|
||||
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||
|
||||
if self._transformer.transform(path) is None:
|
||||
log.explain("Answer: No (ignored)")
|
||||
return False
|
||||
|
||||
should_download = self._output_dir.should_try_download(
|
||||
path, etag_differs=etag_differs, mtime=mtime, redownload=redownload, on_conflict=on_conflict
|
||||
)
|
||||
if should_download:
|
||||
log.explain("Answer: Yes")
|
||||
return True
|
||||
else:
|
||||
log.explain("Answer: No")
|
||||
return False
|
||||
|
||||
async def download(
|
||||
self,
|
||||
path: PurePath,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[DownloadToken]:
|
||||
log.explain_topic(f"Decision: Download {fmt_path(path)}")
|
||||
path = self._deduplicator.mark(path)
|
||||
@@ -307,7 +337,14 @@ class Crawler(ABC):
|
||||
log.status("[bold bright_black]", "Ignored", fmt_path(path))
|
||||
return None
|
||||
|
||||
fs_token = await self._output_dir.download(path, transformed_path, mtime, redownload, on_conflict)
|
||||
fs_token = await self._output_dir.download(
|
||||
path,
|
||||
transformed_path,
|
||||
etag_differs=etag_differs,
|
||||
mtime=mtime,
|
||||
redownload=redownload,
|
||||
on_conflict=on_conflict,
|
||||
)
|
||||
if fs_token is None:
|
||||
log.explain("Answer: No")
|
||||
return None
|
||||
@@ -357,7 +394,7 @@ class Crawler(ABC):
|
||||
log.warn("Couldn't find or load old report")
|
||||
return
|
||||
|
||||
seen: Set[PurePath] = set()
|
||||
seen: set[PurePath] = set()
|
||||
for known in sorted(self.prev_report.found_paths):
|
||||
looking_at = list(reversed(known.parents)) + [known]
|
||||
for path in looking_at:
|
||||
|
@@ -1,12 +1,14 @@
|
||||
import asyncio
|
||||
import http.cookies
|
||||
import ssl
|
||||
from datetime import datetime
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import aiohttp
|
||||
import certifi
|
||||
from aiohttp.client import ClientTimeout
|
||||
from bs4 import Tag
|
||||
|
||||
from ..auth import Authenticator
|
||||
from ..config import Config
|
||||
@@ -15,21 +17,23 @@ from ..utils import fmt_real_path
|
||||
from ..version import NAME, VERSION
|
||||
from .crawler import Crawler, CrawlerSection
|
||||
|
||||
ETAGS_CUSTOM_REPORT_VALUE_KEY = "etags"
|
||||
|
||||
|
||||
class HttpCrawlerSection(CrawlerSection):
|
||||
def http_timeout(self) -> float:
|
||||
return self.s.getfloat("http_timeout", fallback=20)
|
||||
return self.s.getfloat("http_timeout", fallback=30)
|
||||
|
||||
|
||||
class HttpCrawler(Crawler):
|
||||
COOKIE_FILE = PurePath(".cookies")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
self,
|
||||
name: str,
|
||||
section: HttpCrawlerSection,
|
||||
config: Config,
|
||||
shared_auth: Optional[Authenticator] = None,
|
||||
) -> None:
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -39,7 +43,7 @@ class HttpCrawler(Crawler):
|
||||
self._http_timeout = section.http_timeout()
|
||||
|
||||
self._cookie_jar_path = self._output_dir.resolve(self.COOKIE_FILE)
|
||||
self._shared_cookie_jar_paths: Optional[List[Path]] = None
|
||||
self._shared_cookie_jar_paths: Optional[list[Path]] = None
|
||||
self._shared_auth = shared_auth
|
||||
|
||||
self._output_dir.register_reserved(self.COOKIE_FILE)
|
||||
@@ -94,7 +98,7 @@ class HttpCrawler(Crawler):
|
||||
"""
|
||||
raise RuntimeError("_authenticate() was called but crawler doesn't provide an implementation")
|
||||
|
||||
def share_cookies(self, shared: Dict[Authenticator, List[Path]]) -> None:
|
||||
def share_cookies(self, shared: dict[Authenticator, list[Path]]) -> None:
|
||||
if not self._shared_auth:
|
||||
return
|
||||
|
||||
@@ -169,24 +173,102 @@ class HttpCrawler(Crawler):
|
||||
log.warn(f"Failed to save cookies to {fmt_real_path(self._cookie_jar_path)}")
|
||||
log.warn(str(e))
|
||||
|
||||
@staticmethod
|
||||
def get_folder_structure_from_heading_hierarchy(file_link: Tag, drop_h1: bool = False) -> PurePath:
|
||||
"""
|
||||
Retrieves the hierarchy of headings associated with the give file link and constructs a folder
|
||||
structure from them.
|
||||
|
||||
<h1> level headings usually only appear once and serve as the page title, so they would introduce
|
||||
redundant nesting. To avoid this, <h1> headings are ignored via the drop_h1 parameter.
|
||||
"""
|
||||
|
||||
def find_associated_headings(tag: Tag, level: int) -> PurePath:
|
||||
if level == 0 or (level == 1 and drop_h1):
|
||||
return PurePath()
|
||||
|
||||
level_heading = tag.find_previous(name=f"h{level}")
|
||||
|
||||
if level_heading is None:
|
||||
return find_associated_headings(tag, level - 1)
|
||||
|
||||
folder_name = level_heading.get_text().strip()
|
||||
return find_associated_headings(level_heading, level - 1) / folder_name
|
||||
|
||||
# start at level <h3> because paragraph-level headings are usually too granular for folder names
|
||||
return find_associated_headings(file_link, 3)
|
||||
|
||||
def _get_previous_etag_from_report(self, path: PurePath) -> Optional[str]:
|
||||
"""
|
||||
If available, retrieves the entity tag for a given path which was stored in the previous report.
|
||||
"""
|
||||
if not self._output_dir.prev_report:
|
||||
return None
|
||||
|
||||
etags = self._output_dir.prev_report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||
return etags.get(str(path))
|
||||
|
||||
def _add_etag_to_report(self, path: PurePath, etag: Optional[str]) -> None:
|
||||
"""
|
||||
Adds an entity tag for a given path to the report's custom values.
|
||||
"""
|
||||
if not etag:
|
||||
return
|
||||
|
||||
etags = self._output_dir.report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||
etags[str(path)] = etag
|
||||
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
||||
|
||||
async def _request_resource_version(self, resource_url: str) -> tuple[Optional[str], Optional[datetime]]:
|
||||
"""
|
||||
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
||||
If no entity tag / modification date can be obtained, the according value will be None.
|
||||
"""
|
||||
try:
|
||||
async with self.session.head(resource_url) as resp:
|
||||
if resp.status != 200:
|
||||
return None, None
|
||||
|
||||
etag_header = resp.headers.get("ETag")
|
||||
last_modified_header = resp.headers.get("Last-Modified")
|
||||
last_modified = None
|
||||
|
||||
if last_modified_header:
|
||||
try:
|
||||
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#directives
|
||||
datetime_format = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
last_modified = datetime.strptime(last_modified_header, datetime_format)
|
||||
except ValueError:
|
||||
# last_modified remains None
|
||||
pass
|
||||
|
||||
return etag_header, last_modified
|
||||
except aiohttp.ClientError:
|
||||
return None, None
|
||||
|
||||
async def run(self) -> None:
|
||||
self._request_count = 0
|
||||
self._cookie_jar = aiohttp.CookieJar()
|
||||
self._load_cookies()
|
||||
|
||||
async with aiohttp.ClientSession(
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
)
|
||||
headers={"User-Agent": f"{NAME}/{VERSION}"},
|
||||
cookie_jar=self._cookie_jar,
|
||||
connector=aiohttp.TCPConnector(ssl=ssl.create_default_context(cafile=certifi.where())),
|
||||
timeout=ClientTimeout(
|
||||
# 30 minutes. No download in the history of downloads was longer than 30 minutes.
|
||||
# This is enough to transfer a 600 MB file over a 3 Mib/s connection.
|
||||
# Allowing an arbitrary value could be annoying for overnight batch jobs
|
||||
total=15 * 60,
|
||||
connect=self._http_timeout,
|
||||
sock_connect=self._http_timeout,
|
||||
sock_read=self._http_timeout,
|
||||
),
|
||||
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||
# fail.
|
||||
requote_redirect_url=False,
|
||||
) as session:
|
||||
self.session = session
|
||||
try:
|
||||
|
@@ -1,3 +1,13 @@
|
||||
from .kit_ilias_web_crawler import KitIliasWebCrawler, KitIliasWebCrawlerSection
|
||||
from .kit_ilias_web_crawler import (
|
||||
IliasWebCrawler,
|
||||
IliasWebCrawlerSection,
|
||||
KitIliasWebCrawler,
|
||||
KitIliasWebCrawlerSection,
|
||||
)
|
||||
|
||||
__all__ = ["KitIliasWebCrawler", "KitIliasWebCrawlerSection"]
|
||||
__all__ = [
|
||||
"IliasWebCrawler",
|
||||
"IliasWebCrawlerSection",
|
||||
"KitIliasWebCrawler",
|
||||
"KitIliasWebCrawlerSection",
|
||||
]
|
||||
|
41
PFERD/crawl/ilias/async_helper.py
Normal file
41
PFERD/crawl/ilias/async_helper.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Optional
|
||||
|
||||
import aiohttp
|
||||
|
||||
from ...logging import log
|
||||
from ..crawler import AWrapped, CrawlError, CrawlWarning
|
||||
|
||||
|
||||
def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Callable[[AWrapped], AWrapped]:
|
||||
def decorator(f: AWrapped) -> AWrapped:
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> Optional[Any]:
|
||||
last_exception: Optional[BaseException] = None
|
||||
for round in range(attempts):
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
except aiohttp.ContentTypeError: # invalid content type
|
||||
raise CrawlWarning("ILIAS returned an invalid content type") from None
|
||||
except aiohttp.TooManyRedirects:
|
||||
raise CrawlWarning("Got stuck in a redirect loop") from None
|
||||
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
||||
last_exception = e
|
||||
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
||||
last_exception = e
|
||||
except asyncio.exceptions.TimeoutError as e: # explicit http timeouts in HttpCrawler
|
||||
last_exception = e
|
||||
log.explain_topic(f"Retrying operation {name}. Retries left: {attempts - 1 - round}")
|
||||
log.explain(f"Last exception: {last_exception!r}")
|
||||
|
||||
if last_exception:
|
||||
message = f"Error in I/O Operation: {last_exception!r}"
|
||||
if failure_is_error:
|
||||
raise CrawlError(message) from last_exception
|
||||
else:
|
||||
raise CrawlWarning(message) from last_exception
|
||||
raise CrawlError("Impossible return in ilias _iorepeat")
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
return decorator
|
@@ -1,5 +1,11 @@
|
||||
import dataclasses
|
||||
import re
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from typing import Optional, cast
|
||||
|
||||
import bs4
|
||||
|
||||
from PFERD.utils import soupify
|
||||
|
||||
_link_template_plain = "{{link}}"
|
||||
_link_template_fancy = """
|
||||
@@ -8,7 +14,9 @@ _link_template_fancy = """
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ILIAS - Link: {{name}}</title>
|
||||
<!-- REPEAT REMOVE START -->
|
||||
<meta http-equiv = "refresh" content = "{{redirect_delay}}; url = {{link}}" />
|
||||
<!-- REPEAT REMOVE END -->
|
||||
</head>
|
||||
|
||||
<style>
|
||||
@@ -19,6 +27,8 @@ _link_template_fancy = """
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
body {
|
||||
padding: 0;
|
||||
@@ -27,11 +37,16 @@ _link_template_fancy = """
|
||||
font-family: "Open Sans", Verdana, Arial, Helvetica, sans-serif;
|
||||
height: 100vh;
|
||||
}
|
||||
.row {
|
||||
background-color: white;
|
||||
.column {
|
||||
min-width: 500px;
|
||||
max-width: 90vw;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
row-gap: 5px;
|
||||
}
|
||||
.row {
|
||||
background-color: white;
|
||||
display: flex;
|
||||
padding: 1em;
|
||||
}
|
||||
.logo {
|
||||
@@ -71,19 +86,23 @@ _link_template_fancy = """
|
||||
}
|
||||
</style>
|
||||
<body class="center-flex">
|
||||
<div class="row">
|
||||
<div class="logo center-flex">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="tile">
|
||||
<div class="top-row">
|
||||
<a href="{{link}}">{{name}}</a>
|
||||
<div class="column">
|
||||
<!-- REPEAT START -->
|
||||
<div class="row">
|
||||
<div class="logo center-flex">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24">
|
||||
<path d="M12 0c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm9.567 9.098c-.059-.058-.127-.108-.206-.138-.258-.101-1.35.603-1.515.256-.108-.231-.327.148-.578.008-.121-.067-.459-.52-.611-.465-.312.112.479.974.694 1.087.203-.154.86-.469 1.002-.039.271.812-.745 1.702-1.264 2.171-.775.702-.63-.454-1.159-.86-.277-.213-.274-.667-.555-.824-.125-.071-.7-.732-.694-.821l-.017.167c-.095.072-.297-.27-.319-.325 0 .298.485.772.646 1.011.273.409.42 1.005.756 1.339.179.18.866.923 1.045.908l.921-.437c.649.154-1.531 3.237-1.738 3.619-.171.321.139 1.112.114 1.49-.029.437-.374.579-.7.817-.35.255-.268.752-.562.934-.521.321-.897 1.366-1.639 1.361-.219-.001-1.151.364-1.273.007-.095-.258-.223-.455-.356-.71-.131-.25-.015-.51-.175-.731-.11-.154-.479-.502-.513-.684-.002-.157.118-.632.283-.715.231-.118.044-.462.016-.663-.048-.357-.27-.652-.535-.859-.393-.302-.189-.542-.098-.974 0-.206-.126-.476-.402-.396-.57.166-.396-.445-.812-.417-.299.021-.543.211-.821.295-.349.104-.707-.083-1.053-.126-1.421-.179-1.885-1.804-1.514-2.976.037-.192-.115-.547-.048-.696.159-.352.485-.752.768-1.021.16-.152.365-.113.553-.231.29-.182.294-.558.578-.789.404-.328.956-.321 1.482-.392.281-.037 1.35-.268 1.518-.06 0 .039.193.611-.019.578.438.023 1.061.756 1.476.585.213-.089.135-.744.573-.427.265.19 1.45.275 1.696.07.152-.125.236-.939.053-1.031.117.116-.618.125-.686.099-.122-.044-.235.115-.43.025.117.055-.651-.358-.22-.674-.181.132-.349-.037-.544.109-.135.109.062.181-.13.277-.305.155-.535-.53-.649-.607-.118-.077-1.024-.713-.777-.298l.797.793c-.04.026-.209-.289-.209-.059.053-.136.02.585-.105.35-.056-.09.091-.14.006-.271 0-.085-.23-.169-.275-.228-.126-.157-.462-.502-.644-.585-.05-.024-.771.088-.832.111-.071.099-.131.203-.181.314-.149.055-.29.127-.423.216l-.159.356c-.068.061-.772.294-.776.303.03-.076-.492-.172-.457-.324.038-.167.215-.687.169-.877-.048-.199 1.085.287 1.158-.238.029-.227.047-.492-.316-.531.069.008.702-.249.807-.364.148-.169.486-.447.731-.447.286 0 .225-.417.356-.622.133.053-.071.38.088.512-.01-.104.45.057.494.033.105-.056.691-.023.601-.299-.101-.28.052-.197.183-.255-.02.008.248-.458.363-.456-.104-.089-.398.112-.516.103-.308-.024-.177-.525-.061-.672.09-.116-.246-.258-.25-.036-.006.332-.314.633-.243 1.075.109.666-.743-.161-.816-.115-.283.172-.515-.216-.368-.449.149-.238.51-.226.659-.48.104-.179.227-.389.388-.524.541-.454.689-.091 1.229-.042.526.048.178.125.105.327-.07.192.289.261.413.1.071-.092.232-.326.301-.499.07-.175.578-.2.527-.365 2.72 1.148 4.827 3.465 5.694 6.318zm-11.113-3.779l.068-.087.073-.019c.042-.034.086-.118.151-.104.043.009.146.095.111.148-.037.054-.066-.049-.081.101-.018.169-.188.167-.313.222-.087.037-.175-.018-.09-.104l.088-.108-.007-.049zm.442.245c.046-.045.138-.008.151-.094.014-.084.078-.178-.008-.335-.022-.042.116-.082.051-.137l-.109.032s.155-.668.364-.366l-.089.103c.135.134.172.47.215.687.127.066.324.078.098.192.117-.02-.618.314-.715.178-.072-.083.317-.139.307-.173-.004-.011-.317-.02-.265-.087zm1.43-3.547l-.356.326c-.36.298-1.28.883-1.793.705-.524-.18-1.647.667-1.826.673-.067.003.002-.641.36-.689-.141.021.993-.575 1.185-.805.678-.146 1.381-.227 2.104-.227l.326.017zm-5.086 1.19c.07.082.278.092-.026.288-.183.11-.377.809-.548.809-.51.223-.542-.439-1.109.413-.078.115-.395.158-.644.236.685-.688 1.468-1.279 2.327-1.746zm-5.24 8.793c0-.541.055-1.068.139-1.586l.292.185c.113.135.113.719.169.911.139.482.484.751.748 1.19.155.261.414.923.332 1.197.109-.179 1.081.824 1.259 1.033.418.492.74 1.088.061 1.574-.219.158.334 1.14.049 1.382l-.365.094c-.225.138-.235.397-.166.631-1.562-1.765-2.518-4.076-2.518-6.611zm14.347-5.823c.083-.01-.107.167-.107.167.033.256.222.396.581.527.437.157.038.455-.213.385-.139-.039-.854-.255-.879.025 0 .167-.679.001-.573-.175.073-.119.05-.387.186-.562.193-.255.38-.116.386.032-.001.394.398-.373.619-.399z"/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class="bottom-row">{{description}}</div>
|
||||
<div class="tile">
|
||||
<div class="top-row">
|
||||
<a href="{{link}}">{{name}}</a>
|
||||
</div>
|
||||
<div class="bottom-row">{{description}}</div>
|
||||
</div>
|
||||
<div class="menu-button center-flex"> ⯆ </div>
|
||||
</div>
|
||||
<div class="menu-button center-flex"> ⯆ </div>
|
||||
<!-- REPEAT END -->
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -92,8 +111,173 @@ _link_template_fancy = """
|
||||
_link_template_internet_shortcut = """
|
||||
[InternetShortcut]
|
||||
URL={{link}}
|
||||
Desc={{description}}
|
||||
""".strip()
|
||||
|
||||
_learning_module_template = """
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>{{name}}</title>
|
||||
</head>
|
||||
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.center-flex {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
.nav {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
</style>
|
||||
<body class="center-flex">
|
||||
{{body}}
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
_forum_thread_template = """
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>ILIAS - Forum: {{name}}</title>
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
body {
|
||||
font-family: 'Open Sans', Verdana, Arial, Helvetica, sans-serif;
|
||||
padding: 8px;
|
||||
}
|
||||
ul, ol, p {
|
||||
margin: 1.2em 0;
|
||||
}
|
||||
p {
|
||||
margin-top: 8px;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
a {
|
||||
color: #00876c;
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
body > p:first-child > span:first-child {
|
||||
font-size: 1.6em;
|
||||
}
|
||||
body > p:first-child > span:first-child ~ span.default {
|
||||
display: inline-block;
|
||||
font-size: 1.2em;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
.ilFrmPostContent {
|
||||
margin-top: 8px;
|
||||
max-width: 64em;
|
||||
}
|
||||
.ilFrmPostContent > *:first-child {
|
||||
margin-top: 0px;
|
||||
}
|
||||
.ilFrmPostTitle {
|
||||
margin-top: 24px;
|
||||
color: #00876c;
|
||||
font-weight: bold;
|
||||
}
|
||||
#ilFrmPostList {
|
||||
list-style: none;
|
||||
padding-left: 0;
|
||||
}
|
||||
li.ilFrmPostRow {
|
||||
padding: 3px 0 3px 3px;
|
||||
margin-bottom: 24px;
|
||||
border-left: 6px solid #dddddd;
|
||||
}
|
||||
.ilFrmPostRow > div {
|
||||
display: flex;
|
||||
}
|
||||
.ilFrmPostImage img {
|
||||
margin: 0 !important;
|
||||
padding: 6px 9px 9px 6px;
|
||||
}
|
||||
.ilUserIcon {
|
||||
width: 115px;
|
||||
}
|
||||
.small {
|
||||
text-decoration: none;
|
||||
font-size: 0.75rem;
|
||||
color: #6f6f6f;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{{heading}}
|
||||
{{content}}
|
||||
</body>
|
||||
</html>
|
||||
""".strip() # noqa: E501 line too long
|
||||
|
||||
|
||||
def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str:
|
||||
# Seems to be comments, ignore those.
|
||||
for elem in body.select(".il-copg-mob-fullscreen-modal"):
|
||||
elem.decompose()
|
||||
|
||||
nav_template = """
|
||||
<div class="nav">
|
||||
{{left}}
|
||||
{{right}}
|
||||
</div>
|
||||
"""
|
||||
if prev and body.select_one(".ilc_page_lnav_LeftNavigation"):
|
||||
text = cast(bs4.Tag, body.select_one(".ilc_page_lnav_LeftNavigation")).get_text().strip()
|
||||
left = f'<a href="{prev}">{text}</a>'
|
||||
else:
|
||||
left = "<span></span>"
|
||||
|
||||
if next and body.select_one(".ilc_page_rnav_RightNavigation"):
|
||||
text = cast(bs4.Tag, body.select_one(".ilc_page_rnav_RightNavigation")).get_text().strip()
|
||||
right = f'<a href="{next}">{text}</a>'
|
||||
else:
|
||||
right = "<span></span>"
|
||||
|
||||
if top_nav := body.select_one(".ilc_page_tnav_TopNavigation"):
|
||||
top_nav.replace_with(
|
||||
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||
)
|
||||
|
||||
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||
bot_nav.replace_with(
|
||||
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||
)
|
||||
|
||||
body_str = body.prettify()
|
||||
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
||||
|
||||
|
||||
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||
if title := heading.find(name="b"):
|
||||
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||
return (
|
||||
_forum_thread_template.replace("{{name}}", name)
|
||||
.replace("{{heading}}", heading.prettify())
|
||||
.replace("{{content}}", content.prettify())
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class LinkData:
|
||||
name: str
|
||||
url: str
|
||||
description: str
|
||||
|
||||
|
||||
class Links(Enum):
|
||||
IGNORE = "ignore"
|
||||
@@ -102,31 +286,71 @@ class Links(Enum):
|
||||
INTERNET_SHORTCUT = "internet-shortcut"
|
||||
|
||||
def template(self) -> Optional[str]:
|
||||
if self == self.FANCY:
|
||||
if self == Links.FANCY:
|
||||
return _link_template_fancy
|
||||
elif self == self.PLAINTEXT:
|
||||
elif self == Links.PLAINTEXT:
|
||||
return _link_template_plain
|
||||
elif self == self.INTERNET_SHORTCUT:
|
||||
elif self == Links.INTERNET_SHORTCUT:
|
||||
return _link_template_internet_shortcut
|
||||
elif self == self.IGNORE:
|
||||
elif self == Links.IGNORE:
|
||||
return None
|
||||
raise ValueError("Missing switch case")
|
||||
|
||||
def collection_as_one(self) -> bool:
|
||||
return self == Links.FANCY
|
||||
|
||||
def extension(self) -> Optional[str]:
|
||||
if self == self.FANCY:
|
||||
if self == Links.FANCY:
|
||||
return ".html"
|
||||
elif self == self.PLAINTEXT:
|
||||
elif self == Links.PLAINTEXT:
|
||||
return ".txt"
|
||||
elif self == self.INTERNET_SHORTCUT:
|
||||
elif self == Links.INTERNET_SHORTCUT:
|
||||
return ".url"
|
||||
elif self == self.IGNORE:
|
||||
elif self == Links.IGNORE:
|
||||
return None
|
||||
raise ValueError("Missing switch case")
|
||||
|
||||
def interpolate(self, redirect_delay: int, collection_name: str, links: list[LinkData]) -> str:
|
||||
template = self.template()
|
||||
if template is None:
|
||||
raise ValueError("Cannot interpolate ignored links")
|
||||
|
||||
if len(links) == 1:
|
||||
link = links[0]
|
||||
content = template
|
||||
content = content.replace("{{link}}", link.url)
|
||||
content = content.replace("{{name}}", link.name)
|
||||
content = content.replace("{{description}}", link.description)
|
||||
content = content.replace("{{redirect_delay}}", str(redirect_delay))
|
||||
return content
|
||||
if self == Links.PLAINTEXT or self == Links.INTERNET_SHORTCUT:
|
||||
return "\n".join(f"{link.url}" for link in links)
|
||||
|
||||
# All others get coerced to fancy
|
||||
content = cast(str, Links.FANCY.template())
|
||||
repeated_content = cast(
|
||||
re.Match[str], re.search(r"<!-- REPEAT START -->([\s\S]+)<!-- REPEAT END -->", content)
|
||||
).group(1)
|
||||
|
||||
parts = []
|
||||
for link in links:
|
||||
instance = repeated_content
|
||||
instance = instance.replace("{{link}}", link.url)
|
||||
instance = instance.replace("{{name}}", link.name)
|
||||
instance = instance.replace("{{description}}", link.description)
|
||||
instance = instance.replace("{{redirect_delay}}", str(redirect_delay))
|
||||
parts.append(instance)
|
||||
|
||||
content = content.replace(repeated_content, "\n".join(parts))
|
||||
content = content.replace("{{name}}", collection_name)
|
||||
content = re.sub(r"<!-- REPEAT REMOVE START -->[\s\S]+<!-- REPEAT REMOVE END -->", "", content)
|
||||
|
||||
return content
|
||||
|
||||
@staticmethod
|
||||
def from_string(string: str) -> "Links":
|
||||
try:
|
||||
return Links(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'ignore', 'plaintext',"
|
||||
" 'html', 'internet-shortcut'")
|
||||
options = [f"'{option.value}'" for option in Links]
|
||||
raise ValueError(f"must be one of {', '.join(options)}") from None
|
||||
|
@@ -1,3 +1,5 @@
|
||||
from typing import cast
|
||||
|
||||
from bs4 import BeautifulSoup, Comment, Tag
|
||||
|
||||
_STYLE_TAG_CONTENT = """
|
||||
@@ -12,6 +14,13 @@ _STYLE_TAG_CONTENT = """
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.row-flex {
|
||||
display: flex;
|
||||
}
|
||||
.row-flex-wrap {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.accordion-head {
|
||||
background-color: #f5f7fa;
|
||||
padding: 0.5rem 0;
|
||||
@@ -30,6 +39,10 @@ _STYLE_TAG_CONTENT = """
|
||||
margin: 0.5rem 0;
|
||||
}
|
||||
|
||||
img {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
body {
|
||||
padding: 1em;
|
||||
grid-template-columns: 1fr min(60rem, 90%) 1fr;
|
||||
@@ -47,12 +60,11 @@ _ARTICLE_WORTHY_CLASSES = [
|
||||
def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
head = soup.new_tag("head")
|
||||
soup.insert(0, head)
|
||||
# Force UTF-8 encoding
|
||||
head.append(soup.new_tag("meta", charset="utf-8"))
|
||||
|
||||
simplecss_link: Tag = soup.new_tag("link")
|
||||
# <link rel="stylesheet" href="https://cdn.simplecss.org/simple.css">
|
||||
simplecss_link["rel"] = "stylesheet"
|
||||
simplecss_link["href"] = "https://cdn.simplecss.org/simple.css"
|
||||
head.append(simplecss_link)
|
||||
head.append(soup.new_tag("link", rel="stylesheet", href="https://cdn.simplecss.org/simple.css"))
|
||||
|
||||
# Basic style tags for compat
|
||||
style: Tag = soup.new_tag("style")
|
||||
@@ -63,18 +75,18 @@ def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
|
||||
|
||||
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
for block in soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES):
|
||||
for block in cast(list[Tag], soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES)):
|
||||
block.name = "article"
|
||||
|
||||
for block in soup.find_all("h3"):
|
||||
for block in cast(list[Tag], soup.find_all("h3")):
|
||||
block.name = "div"
|
||||
|
||||
for block in soup.find_all("h1"):
|
||||
for block in cast(list[Tag], soup.find_all("h1")):
|
||||
block.name = "h3"
|
||||
|
||||
for block in soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap"):
|
||||
for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")):
|
||||
block.name = "h3"
|
||||
block["class"] += ["accordion-head"]
|
||||
block["class"] += ["accordion-head"] # type: ignore
|
||||
|
||||
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
||||
children = list(dummy.children)
|
||||
@@ -82,10 +94,15 @@ def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
dummy.decompose()
|
||||
if len(children) > 1:
|
||||
continue
|
||||
if type(children[0]) == Comment:
|
||||
if isinstance(type(children[0]), Comment):
|
||||
dummy.decompose()
|
||||
|
||||
for hrule_imposter in soup.find_all(class_="ilc_section_Separator"):
|
||||
# Delete video figures, as they can not be internalized anyway
|
||||
for video in soup.select(".ilc_media_cont_MediaContainerHighlighted .ilPageVideo"):
|
||||
if figure := video.find_parent("figure"):
|
||||
figure.decompose()
|
||||
|
||||
for hrule_imposter in cast(list[Tag], soup.find_all(class_="ilc_section_Separator")):
|
||||
hrule_imposter.insert(0, soup.new_tag("hr"))
|
||||
|
||||
return soup
|
||||
|
1073
PFERD/crawl/ilias/ilias_web_crawler.py
Normal file
1073
PFERD/crawl/ilias/ilias_web_crawler.py
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
127
PFERD/crawl/ilias/shibboleth_login.py
Normal file
127
PFERD/crawl/ilias/shibboleth_login.py
Normal file
@@ -0,0 +1,127 @@
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import aiohttp
|
||||
import yarl
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
|
||||
from ...auth import Authenticator, TfaAuthenticator
|
||||
from ...logging import log
|
||||
from ...utils import soupify
|
||||
from ..crawler import CrawlError
|
||||
|
||||
|
||||
class ShibbolethLogin:
|
||||
"""
|
||||
Login via shibboleth system.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ilias_url: str, authenticator: Authenticator, tfa_authenticator: Optional[Authenticator]
|
||||
) -> None:
|
||||
self._ilias_url = ilias_url
|
||||
self._auth = authenticator
|
||||
self._tfa_auth = tfa_authenticator
|
||||
|
||||
async def login(self, sess: aiohttp.ClientSession) -> None:
|
||||
"""
|
||||
Performs the ILIAS Shibboleth authentication dance and saves the login
|
||||
cookies it receieves.
|
||||
|
||||
This function should only be called whenever it is detected that you're
|
||||
not logged in. The cookies obtained should be good for a few minutes,
|
||||
maybe even an hour or two.
|
||||
"""
|
||||
|
||||
# Equivalent: Click on "Mit KIT-Account anmelden" button in
|
||||
# https://ilias.studium.kit.edu/login.php
|
||||
url = f"{self._ilias_url}/shib_login.php"
|
||||
async with sess.get(url) as response:
|
||||
shib_url = response.url
|
||||
if str(shib_url).startswith(self._ilias_url):
|
||||
log.explain("ILIAS recognized our shib token and logged us in in the background, returning")
|
||||
return
|
||||
soup: BeautifulSoup = soupify(await response.read())
|
||||
|
||||
# Attempt to login using credentials, if necessary
|
||||
while not self._login_successful(soup):
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||
action = cast(str, form["action"])
|
||||
|
||||
# Equivalent: Enter credentials in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = str(shib_url.origin()) + action
|
||||
username, password = await self._auth.credentials()
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"j_username": username,
|
||||
"j_password": password,
|
||||
"fudis_web_authn_assertion_input": "",
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
soup = await _post(sess, url, data)
|
||||
|
||||
if soup.find(id="attributeRelease"):
|
||||
raise CrawlError(
|
||||
"ILIAS Shibboleth entitlements changed! "
|
||||
"Please log in once in your browser and review them"
|
||||
)
|
||||
|
||||
if self._tfa_required(soup):
|
||||
soup = await self._authenticate_tfa(sess, soup, shib_url)
|
||||
|
||||
if not self._login_successful(soup):
|
||||
self._auth.invalidate_credentials()
|
||||
|
||||
# Equivalent: Being redirected via JS automatically
|
||||
# (or clicking "Continue" if you have JS disabled)
|
||||
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||
url = cast(str, cast(Tag, soup.find("form", {"method": "post"}))["action"])
|
||||
data = { # using the info obtained in the while loop above
|
||||
"RelayState": cast(str, relay_state["value"]),
|
||||
"SAMLResponse": cast(str, saml_response["value"]),
|
||||
}
|
||||
await sess.post(cast(str, url), data=data)
|
||||
|
||||
async def _authenticate_tfa(
|
||||
self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL
|
||||
) -> BeautifulSoup:
|
||||
if not self._tfa_auth:
|
||||
self._tfa_auth = TfaAuthenticator("ilias-anon-tfa")
|
||||
|
||||
tfa_token = await self._tfa_auth.password()
|
||||
|
||||
# Searching the form here so that this fails before asking for
|
||||
# credentials rather than after asking.
|
||||
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||
action = cast(str, form["action"])
|
||||
|
||||
# Equivalent: Enter token in
|
||||
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||
url = str(shib_url.origin()) + action
|
||||
username, password = await self._auth.credentials()
|
||||
data = {
|
||||
"_eventId_proceed": "",
|
||||
"fudis_otp_input": tfa_token,
|
||||
}
|
||||
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||
return await _post(session, url, data)
|
||||
|
||||
@staticmethod
|
||||
def _login_successful(soup: BeautifulSoup) -> bool:
|
||||
relay_state = soup.find("input", {"name": "RelayState"})
|
||||
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
||||
return relay_state is not None and saml_response is not None
|
||||
|
||||
@staticmethod
|
||||
def _tfa_required(soup: BeautifulSoup) -> bool:
|
||||
return soup.find(id="fudiscr-form") is not None
|
||||
|
||||
|
||||
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
||||
async with session.post(url, data=data) as response:
|
||||
return soupify(await response.read())
|
@@ -1,8 +1,11 @@
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Awaitable, Generator, Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import PurePath
|
||||
from typing import Awaitable, List, Optional, Pattern, Set, Tuple, Union
|
||||
from re import Pattern
|
||||
from typing import Any, Optional, Union, cast
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
@@ -31,33 +34,32 @@ class KitIpdCrawlerSection(HttpCrawlerSection):
|
||||
return re.compile(regex)
|
||||
|
||||
|
||||
@dataclass(unsafe_hash=True)
|
||||
@dataclass
|
||||
class KitIpdFile:
|
||||
name: str
|
||||
url: str
|
||||
|
||||
def explain(self) -> None:
|
||||
log.explain(f"File {self.name!r} (href={self.url!r})")
|
||||
|
||||
|
||||
@dataclass
|
||||
class KitIpdFolder:
|
||||
name: str
|
||||
files: List[KitIpdFile]
|
||||
entries: list[Union[KitIpdFile, "KitIpdFolder"]]
|
||||
|
||||
def explain(self) -> None:
|
||||
log.explain_topic(f"Folder {self.name!r}")
|
||||
for file in self.files:
|
||||
log.explain(f"File {file.name!r} (href={file.url!r})")
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self.name.__hash__()
|
||||
for entry in self.entries:
|
||||
entry.explain()
|
||||
|
||||
|
||||
class KitIpdCrawler(HttpCrawler):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: KitIpdCrawlerSection,
|
||||
config: Config,
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
self._url = section.target()
|
||||
@@ -68,85 +70,96 @@ class KitIpdCrawler(HttpCrawler):
|
||||
if not maybe_cl:
|
||||
return
|
||||
|
||||
tasks: List[Awaitable[None]] = []
|
||||
tasks: list[Awaitable[None]] = []
|
||||
|
||||
async with maybe_cl:
|
||||
for item in await self._fetch_items():
|
||||
item.explain()
|
||||
if isinstance(item, KitIpdFolder):
|
||||
tasks.append(self._crawl_folder(item))
|
||||
tasks.append(self._crawl_folder(PurePath("."), item))
|
||||
else:
|
||||
# Orphan files are placed in the root folder
|
||||
tasks.append(self._download_file(PurePath("."), item))
|
||||
log.explain_topic(f"Orphan file {item.name!r} (href={item.url!r})")
|
||||
log.explain("Attributing it to root folder")
|
||||
# do this here to at least be sequential and not parallel (rate limiting is hard, as the
|
||||
# crawl abstraction does not hold for these requests)
|
||||
etag, mtime = await self._request_resource_version(item.url)
|
||||
tasks.append(self._download_file(PurePath("."), item, etag, mtime))
|
||||
|
||||
await self.gather(tasks)
|
||||
|
||||
async def _crawl_folder(self, folder: KitIpdFolder) -> None:
|
||||
path = PurePath(folder.name)
|
||||
async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None:
|
||||
path = parent / folder.name
|
||||
if not await self.crawl(path):
|
||||
return
|
||||
|
||||
tasks = [self._download_file(path, file) for file in folder.files]
|
||||
tasks = []
|
||||
for entry in folder.entries:
|
||||
if isinstance(entry, KitIpdFolder):
|
||||
tasks.append(self._crawl_folder(path, entry))
|
||||
else:
|
||||
# do this here to at least be sequential and not parallel (rate limiting is hard, as the crawl
|
||||
# abstraction does not hold for these requests)
|
||||
etag, mtime = await self._request_resource_version(entry.url)
|
||||
tasks.append(self._download_file(path, entry, etag, mtime))
|
||||
|
||||
await self.gather(tasks)
|
||||
|
||||
async def _download_file(self, parent: PurePath, file: KitIpdFile) -> None:
|
||||
async def _download_file(
|
||||
self, parent: PurePath, file: KitIpdFile, etag: Optional[str], mtime: Optional[datetime]
|
||||
) -> None:
|
||||
element_path = parent / file.name
|
||||
maybe_dl = await self.download(element_path)
|
||||
|
||||
prev_etag = self._get_previous_etag_from_report(element_path)
|
||||
etag_differs = None if prev_etag is None else prev_etag != etag
|
||||
|
||||
maybe_dl = await self.download(element_path, etag_differs=etag_differs, mtime=mtime)
|
||||
if not maybe_dl:
|
||||
# keep storing the known file's etag
|
||||
if prev_etag:
|
||||
self._add_etag_to_report(element_path, prev_etag)
|
||||
return
|
||||
|
||||
async with maybe_dl as (bar, sink):
|
||||
await self._stream_from_url(file.url, sink, bar)
|
||||
await self._stream_from_url(file.url, element_path, sink, bar)
|
||||
|
||||
async def _fetch_items(self) -> Set[Union[KitIpdFile, KitIpdFolder]]:
|
||||
async def _fetch_items(self) -> Iterable[KitIpdFile | KitIpdFolder]:
|
||||
page, url = await self.get_page()
|
||||
elements: List[Tag] = self._find_file_links(page)
|
||||
items: Set[Union[KitIpdFile, KitIpdFolder]] = set()
|
||||
elements: list[Tag] = self._find_file_links(page)
|
||||
|
||||
# do not add unnecessary nesting for a single <h1> heading
|
||||
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
||||
|
||||
folder_tree: KitIpdFolder = KitIpdFolder(".", [])
|
||||
for element in elements:
|
||||
folder_label = self._find_folder_label(element)
|
||||
if folder_label:
|
||||
folder = self._extract_folder(folder_label, url)
|
||||
if folder not in items:
|
||||
items.add(folder)
|
||||
folder.explain()
|
||||
else:
|
||||
file = self._extract_file(element, url)
|
||||
items.add(file)
|
||||
log.explain_topic(f"Orphan file {file.name!r} (href={file.url!r})")
|
||||
log.explain("Attributing it to root folder")
|
||||
parent = HttpCrawler.get_folder_structure_from_heading_hierarchy(element, drop_h1)
|
||||
file = self._extract_file(element, url)
|
||||
|
||||
return items
|
||||
current_folder: KitIpdFolder = folder_tree
|
||||
for folder_name in parent.parts:
|
||||
# helps the type checker to verify that current_folder is indeed a folder
|
||||
def subfolders() -> Generator[KitIpdFolder, Any, None]:
|
||||
return (entry for entry in current_folder.entries if isinstance(entry, KitIpdFolder))
|
||||
|
||||
def _extract_folder(self, folder_tag: Tag, url: str) -> KitIpdFolder:
|
||||
files: List[KitIpdFile] = []
|
||||
name = folder_tag.getText().strip()
|
||||
if not any(entry.name == folder_name for entry in subfolders()):
|
||||
current_folder.entries.append(KitIpdFolder(folder_name, []))
|
||||
current_folder = next(entry for entry in subfolders() if entry.name == folder_name)
|
||||
|
||||
container: Tag = folder_tag.findNextSibling(name="table")
|
||||
for link in self._find_file_links(container):
|
||||
files.append(self._extract_file(link, url))
|
||||
current_folder.entries.append(file)
|
||||
|
||||
return KitIpdFolder(name, files)
|
||||
|
||||
@staticmethod
|
||||
def _find_folder_label(file_link: Tag) -> Optional[Tag]:
|
||||
enclosing_table: Tag = file_link.findParent(name="table")
|
||||
if enclosing_table is None:
|
||||
return None
|
||||
return enclosing_table.findPreviousSibling(name=re.compile("^h[1-6]$"))
|
||||
return folder_tree.entries
|
||||
|
||||
def _extract_file(self, link: Tag, url: str) -> KitIpdFile:
|
||||
url = self._abs_url_from_link(url, link)
|
||||
name = os.path.basename(url)
|
||||
return KitIpdFile(name, url)
|
||||
|
||||
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> List[Tag]:
|
||||
return tag.findAll(name="a", attrs={"href": self._file_regex})
|
||||
def _find_file_links(self, tag: Tag | BeautifulSoup) -> list[Tag]:
|
||||
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
||||
|
||||
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
||||
return urljoin(url, link_tag.get("href"))
|
||||
return urljoin(url, cast(str, link_tag.get("href")))
|
||||
|
||||
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar) -> None:
|
||||
async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None:
|
||||
async with self.session.get(url, allow_redirects=False) as resp:
|
||||
if resp.status == 403:
|
||||
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
||||
@@ -159,7 +172,9 @@ class KitIpdCrawler(HttpCrawler):
|
||||
|
||||
sink.done()
|
||||
|
||||
async def get_page(self) -> Tuple[BeautifulSoup, str]:
|
||||
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
||||
|
||||
async def get_page(self) -> tuple[BeautifulSoup, str]:
|
||||
async with self.session.get(self._url) as request:
|
||||
# The web page for Algorithmen für Routenplanung contains some
|
||||
# weird comments that beautifulsoup doesn't parse correctly. This
|
||||
|
@@ -18,31 +18,28 @@ class LocalCrawlerSection(CrawlerSection):
|
||||
def crawl_delay(self) -> float:
|
||||
value = self.s.getfloat("crawl_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("crawl_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("crawl_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_delay(self) -> float:
|
||||
value = self.s.getfloat("download_delay", fallback=0.0)
|
||||
if value < 0:
|
||||
self.invalid_value("download_delay", value,
|
||||
"Must not be negative")
|
||||
self.invalid_value("download_delay", value, "Must not be negative")
|
||||
return value
|
||||
|
||||
def download_speed(self) -> Optional[int]:
|
||||
value = self.s.getint("download_speed")
|
||||
if value is not None and value <= 0:
|
||||
self.invalid_value("download_speed", value,
|
||||
"Must be greater than 0")
|
||||
self.invalid_value("download_speed", value, "Must be greater than 0")
|
||||
return value
|
||||
|
||||
|
||||
class LocalCrawler(Crawler):
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
self,
|
||||
name: str,
|
||||
section: LocalCrawlerSection,
|
||||
config: Config,
|
||||
):
|
||||
super().__init__(name, section, config)
|
||||
|
||||
@@ -74,10 +71,12 @@ class LocalCrawler(Crawler):
|
||||
tasks = []
|
||||
|
||||
async with cl:
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._crawl_delay,
|
||||
self._crawl_delay,
|
||||
)
|
||||
)
|
||||
|
||||
for child in path.iterdir():
|
||||
pure_child = cl.path / child.name
|
||||
@@ -93,10 +92,12 @@ class LocalCrawler(Crawler):
|
||||
return
|
||||
|
||||
async with dl as (bar, sink):
|
||||
await asyncio.sleep(random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
))
|
||||
await asyncio.sleep(
|
||||
random.uniform(
|
||||
0.5 * self._download_delay,
|
||||
self._download_delay,
|
||||
)
|
||||
)
|
||||
|
||||
bar.set_total(stat.st_size)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
from collections.abc import Iterator
|
||||
from pathlib import PurePath
|
||||
from typing import Iterator, Set
|
||||
|
||||
from .logging import log
|
||||
from .utils import fmt_path
|
||||
@@ -14,17 +14,36 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
||||
|
||||
|
||||
class Deduplicator:
|
||||
FORBIDDEN_CHARS = '<>:"/\\|?*'
|
||||
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||
FORBIDDEN_NAMES = {
|
||||
"CON", "PRN", "AUX", "NUL",
|
||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
|
||||
"CON",
|
||||
"PRN",
|
||||
"AUX",
|
||||
"NUL",
|
||||
"COM1",
|
||||
"COM2",
|
||||
"COM3",
|
||||
"COM4",
|
||||
"COM5",
|
||||
"COM6",
|
||||
"COM7",
|
||||
"COM8",
|
||||
"COM9",
|
||||
"LPT1",
|
||||
"LPT2",
|
||||
"LPT3",
|
||||
"LPT4",
|
||||
"LPT5",
|
||||
"LPT6",
|
||||
"LPT7",
|
||||
"LPT8",
|
||||
"LPT9",
|
||||
}
|
||||
|
||||
def __init__(self, windows_paths: bool) -> None:
|
||||
self._windows_paths = windows_paths
|
||||
|
||||
self._known: Set[PurePath] = set()
|
||||
self._known: set[PurePath] = set()
|
||||
|
||||
def _add(self, path: PurePath) -> None:
|
||||
self._known.add(path)
|
||||
|
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import time
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from typing import AsyncIterator, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -12,12 +13,7 @@ class Slot:
|
||||
|
||||
|
||||
class Limiter:
|
||||
def __init__(
|
||||
self,
|
||||
task_limit: int,
|
||||
download_limit: int,
|
||||
task_delay: float
|
||||
):
|
||||
def __init__(self, task_limit: int, download_limit: int, task_delay: float):
|
||||
if task_limit <= 0:
|
||||
raise ValueError("task limit must be at least 1")
|
||||
if download_limit <= 0:
|
||||
|
@@ -1,16 +1,23 @@
|
||||
import asyncio
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
# TODO In Python 3.9 and above, ContextManager is deprecated
|
||||
from typing import AsyncIterator, ContextManager, Iterator, List, Optional
|
||||
from collections.abc import AsyncIterator, Iterator
|
||||
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
||||
from typing import Any, Optional
|
||||
|
||||
from rich.console import Console, Group
|
||||
from rich.live import Live
|
||||
from rich.markup import escape
|
||||
from rich.panel import Panel
|
||||
from rich.progress import (BarColumn, DownloadColumn, Progress, TaskID, TextColumn, TimeRemainingColumn,
|
||||
TransferSpeedColumn)
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TaskID,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
from rich.table import Column
|
||||
|
||||
|
||||
@@ -54,11 +61,12 @@ class Log:
|
||||
self._showing_progress = False
|
||||
self._progress_suspended = False
|
||||
self._lock = asyncio.Lock()
|
||||
self._lines: List[str] = []
|
||||
self._lines: list[str] = []
|
||||
|
||||
# Whether different parts of the output are enabled or disabled
|
||||
self.output_explain = False
|
||||
self.output_status = True
|
||||
self.output_not_deleted = True
|
||||
self.output_report = True
|
||||
|
||||
def _update_live(self) -> None:
|
||||
@@ -114,7 +122,7 @@ class Log:
|
||||
for line in self._lines:
|
||||
self.print(line)
|
||||
|
||||
def print(self, text: str) -> None:
|
||||
def print(self, text: Any) -> None:
|
||||
"""
|
||||
Print a normal message. Allows markup.
|
||||
"""
|
||||
@@ -176,10 +184,14 @@ class Log:
|
||||
# Our print function doesn't take types other than strings, but the
|
||||
# underlying rich.print function does. This call is a special case
|
||||
# anyways, and we're calling it internally, so this should be fine.
|
||||
self.print(Panel.fit("""
|
||||
self.print(
|
||||
Panel.fit(
|
||||
"""
|
||||
Please copy your program output and send it to the PFERD maintainers, either
|
||||
directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
""".strip())) # type: ignore
|
||||
""".strip()
|
||||
)
|
||||
)
|
||||
|
||||
def explain_topic(self, text: str) -> None:
|
||||
"""
|
||||
@@ -207,6 +219,17 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
action = escape(f"{action:<{self.STATUS_WIDTH}}")
|
||||
self.print(f"{style}{action}[/] {escape(text)} {suffix}")
|
||||
|
||||
def not_deleted(self, style: str, action: str, text: str, suffix: str = "") -> None:
|
||||
"""
|
||||
Print a message for a local only file that wasn't
|
||||
deleted while crawling. Allows markup in the "style"
|
||||
argument which will be applied to the "action" string.
|
||||
"""
|
||||
|
||||
if self.output_status and self.output_not_deleted:
|
||||
action = escape(f"{action:<{self.STATUS_WIDTH}}")
|
||||
self.print(f"{style}{action}[/] {escape(text)} {suffix}")
|
||||
|
||||
def report(self, text: str) -> None:
|
||||
"""
|
||||
Print a report after crawling. Allows markup.
|
||||
@@ -215,12 +238,20 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
if self.output_report:
|
||||
self.print(text)
|
||||
|
||||
def report_not_deleted(self, text: str) -> None:
|
||||
"""
|
||||
Print a report for a local only file that wasn't deleted after crawling. Allows markup.
|
||||
"""
|
||||
|
||||
if self.output_report and self.output_not_deleted:
|
||||
self.print(text)
|
||||
|
||||
@contextmanager
|
||||
def _bar(
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
self,
|
||||
progress: Progress,
|
||||
description: str,
|
||||
total: Optional[float],
|
||||
) -> Iterator[ProgressBar]:
|
||||
if total is None:
|
||||
# Indeterminate progress bar
|
||||
@@ -236,12 +267,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
self._update_live()
|
||||
|
||||
def crawl_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> ContextManager[ProgressBar]:
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
"action" string.
|
||||
@@ -252,12 +283,12 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
||||
return self._bar(self._crawl_progress, description, total)
|
||||
|
||||
def download_bar(
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> ContextManager[ProgressBar]:
|
||||
self,
|
||||
style: str,
|
||||
action: str,
|
||||
text: str,
|
||||
total: Optional[float] = None,
|
||||
) -> AbstractContextManager[ProgressBar]:
|
||||
"""
|
||||
Allows markup in the "style" argument which will be applied to the
|
||||
"action" string.
|
||||
|
@@ -4,12 +4,13 @@ import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
from contextlib import contextmanager
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from pathlib import Path, PurePath
|
||||
from typing import BinaryIO, Iterator, Optional, Tuple
|
||||
from typing import BinaryIO, Optional
|
||||
|
||||
from .logging import log
|
||||
from .report import Report, ReportLoadError
|
||||
@@ -35,8 +36,7 @@ class Redownload(Enum):
|
||||
try:
|
||||
return Redownload(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'never', 'never-smart',"
|
||||
" 'always', 'always-smart'")
|
||||
raise ValueError("must be one of 'never', 'never-smart', 'always', 'always-smart'") from None
|
||||
|
||||
|
||||
class OnConflict(Enum):
|
||||
@@ -44,18 +44,22 @@ class OnConflict(Enum):
|
||||
LOCAL_FIRST = "local-first"
|
||||
REMOTE_FIRST = "remote-first"
|
||||
NO_DELETE = "no-delete"
|
||||
NO_DELETE_PROMPT_OVERWRITE = "no-delete-prompt-overwrite"
|
||||
|
||||
@staticmethod
|
||||
def from_string(string: str) -> "OnConflict":
|
||||
try:
|
||||
return OnConflict(string)
|
||||
except ValueError:
|
||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete'")
|
||||
raise ValueError(
|
||||
"must be one of 'prompt', 'local-first',"
|
||||
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'"
|
||||
) from None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Heuristics:
|
||||
etag_differs: Optional[bool]
|
||||
mtime: Optional[datetime]
|
||||
|
||||
|
||||
@@ -94,13 +98,13 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
# download handed back to the OutputDirectory.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
output_dir: "OutputDirectory",
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
@@ -116,15 +120,17 @@ class FileSinkToken(ReusableAsyncContextManager[FileSink]):
|
||||
sink = FileSink(file)
|
||||
|
||||
async def after_download() -> None:
|
||||
await self._output_dir._after_download(DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
))
|
||||
await self._output_dir._after_download(
|
||||
DownloadInfo(
|
||||
self._remote_path,
|
||||
self._path,
|
||||
self._local_path,
|
||||
tmp_path,
|
||||
self._heuristics,
|
||||
self._on_conflict,
|
||||
sink.is_done(),
|
||||
)
|
||||
)
|
||||
|
||||
self._stack.push_async_callback(after_download)
|
||||
self._stack.enter_context(file)
|
||||
@@ -136,10 +142,10 @@ class OutputDirectory:
|
||||
REPORT_FILE = PurePath(".report")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
root: Path,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
):
|
||||
if os.name == "nt":
|
||||
# Windows limits the path length to 260 for some historical reason.
|
||||
@@ -172,8 +178,8 @@ class OutputDirectory:
|
||||
|
||||
try:
|
||||
self._root.mkdir(parents=True, exist_ok=True)
|
||||
except OSError:
|
||||
raise OutputDirError("Failed to create base directory")
|
||||
except OSError as e:
|
||||
raise OutputDirError("Failed to create base directory") from e
|
||||
|
||||
def register_reserved(self, path: PurePath) -> None:
|
||||
self._report.mark_reserved(path)
|
||||
@@ -191,11 +197,11 @@ class OutputDirectory:
|
||||
return self._root / path
|
||||
|
||||
def _should_download(
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
self,
|
||||
local_path: Path,
|
||||
heuristics: Heuristics,
|
||||
redownload: Redownload,
|
||||
on_conflict: OnConflict,
|
||||
) -> bool:
|
||||
if not local_path.exists():
|
||||
log.explain("No corresponding file present locally")
|
||||
@@ -232,8 +238,16 @@ class OutputDirectory:
|
||||
|
||||
remote_newer = None
|
||||
|
||||
# ETag should be a more reliable indicator than mtime, so we check it first
|
||||
if heuristics.etag_differs is not None:
|
||||
remote_newer = heuristics.etag_differs
|
||||
if remote_newer:
|
||||
log.explain("Remote file's entity tag differs")
|
||||
else:
|
||||
log.explain("Remote file's entity tag is the same")
|
||||
|
||||
# Python on Windows crashes when faced with timestamps around the unix epoch
|
||||
if heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
||||
if remote_newer is None and heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
||||
mtime = heuristics.mtime
|
||||
remote_newer = mtime.timestamp() > stat.st_mtime
|
||||
if remote_newer:
|
||||
@@ -260,11 +274,11 @@ class OutputDirectory:
|
||||
# files.
|
||||
|
||||
async def _conflict_lfrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
prompt = f"Replace {fmt_path(path)} with remote file?"
|
||||
return await prompt_yes_no(prompt, default=False)
|
||||
@@ -279,11 +293,11 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_ldrf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
prompt = f"Recursively delete {fmt_path(path)} and replace with remote file?"
|
||||
return await prompt_yes_no(prompt, default=False)
|
||||
@@ -298,12 +312,12 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_lfrd(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
parent: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
async with log.exclusive_output():
|
||||
prompt = f"Delete {fmt_path(parent)} so remote file {fmt_path(path)} can be downloaded?"
|
||||
return await prompt_yes_no(prompt, default=False)
|
||||
@@ -318,9 +332,9 @@ class OutputDirectory:
|
||||
raise ValueError(f"{on_conflict!r} is not a valid conflict policy")
|
||||
|
||||
async def _conflict_delete_lf(
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
self,
|
||||
on_conflict: OnConflict,
|
||||
path: PurePath,
|
||||
) -> bool:
|
||||
if on_conflict == OnConflict.PROMPT:
|
||||
async with log.exclusive_output():
|
||||
@@ -330,7 +344,7 @@ class OutputDirectory:
|
||||
return False
|
||||
elif on_conflict == OnConflict.REMOTE_FIRST:
|
||||
return True
|
||||
elif on_conflict == OnConflict.NO_DELETE:
|
||||
elif on_conflict in {OnConflict.NO_DELETE, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||
return False
|
||||
|
||||
# This should never be reached
|
||||
@@ -343,9 +357,9 @@ class OutputDirectory:
|
||||
return base.parent / name
|
||||
|
||||
async def _create_tmp_file(
|
||||
self,
|
||||
local_path: Path,
|
||||
) -> Tuple[Path, BinaryIO]:
|
||||
self,
|
||||
local_path: Path,
|
||||
) -> tuple[Path, BinaryIO]:
|
||||
"""
|
||||
May raise an OutputDirError.
|
||||
"""
|
||||
@@ -361,20 +375,38 @@ class OutputDirectory:
|
||||
|
||||
raise OutputDirError("Failed to create temporary file")
|
||||
|
||||
def should_try_download(
|
||||
self,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> bool:
|
||||
heuristics = Heuristics(etag_differs, mtime)
|
||||
redownload = self._redownload if redownload is None else redownload
|
||||
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||
local_path = self.resolve(path)
|
||||
|
||||
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||
|
||||
async def download(
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
self,
|
||||
remote_path: PurePath,
|
||||
path: PurePath,
|
||||
*,
|
||||
etag_differs: Optional[bool] = None,
|
||||
mtime: Optional[datetime] = None,
|
||||
redownload: Optional[Redownload] = None,
|
||||
on_conflict: Optional[OnConflict] = None,
|
||||
) -> Optional[FileSinkToken]:
|
||||
"""
|
||||
May throw an OutputDirError, a MarkDuplicateError or a
|
||||
MarkConflictError.
|
||||
"""
|
||||
|
||||
heuristics = Heuristics(mtime)
|
||||
heuristics = Heuristics(etag_differs, mtime)
|
||||
redownload = self._redownload if redownload is None else redownload
|
||||
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||
local_path = self.resolve(path)
|
||||
@@ -478,10 +510,8 @@ class OutputDirectory:
|
||||
await self._cleanup(child, pure_child)
|
||||
|
||||
if delete_self:
|
||||
try:
|
||||
with suppress(OSError):
|
||||
path.rmdir()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
async def _cleanup_file(self, path: Path, pure: PurePath) -> None:
|
||||
if self._report.is_marked(pure):
|
||||
@@ -495,7 +525,7 @@ class OutputDirectory:
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
log.status("[bold bright_magenta]", "Not deleted", fmt_path(pure))
|
||||
log.not_deleted("[bold bright_magenta]", "Not deleted", fmt_path(pure))
|
||||
self._report.not_delete_file(pure)
|
||||
|
||||
def load_prev_report(self) -> None:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Optional
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
@@ -15,7 +15,7 @@ class PferdLoadError(Exception):
|
||||
|
||||
|
||||
class Pferd:
|
||||
def __init__(self, config: Config, cli_crawlers: Optional[List[str]], cli_skips: Optional[List[str]]):
|
||||
def __init__(self, config: Config, cli_crawlers: Optional[list[str]], cli_skips: Optional[list[str]]):
|
||||
"""
|
||||
May throw PferdLoadError.
|
||||
"""
|
||||
@@ -23,10 +23,10 @@ class Pferd:
|
||||
self._config = config
|
||||
self._crawlers_to_run = self._find_crawlers_to_run(config, cli_crawlers, cli_skips)
|
||||
|
||||
self._authenticators: Dict[str, Authenticator] = {}
|
||||
self._crawlers: Dict[str, Crawler] = {}
|
||||
self._authenticators: dict[str, Authenticator] = {}
|
||||
self._crawlers: dict[str, Crawler] = {}
|
||||
|
||||
def _find_config_crawlers(self, config: Config) -> List[str]:
|
||||
def _find_config_crawlers(self, config: Config) -> list[str]:
|
||||
crawl_sections = []
|
||||
|
||||
for name, section in config.crawl_sections():
|
||||
@@ -37,7 +37,7 @@ class Pferd:
|
||||
|
||||
return crawl_sections
|
||||
|
||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: List[str]) -> List[str]:
|
||||
def _find_cli_crawlers(self, config: Config, cli_crawlers: list[str]) -> list[str]:
|
||||
if len(cli_crawlers) != len(set(cli_crawlers)):
|
||||
raise PferdLoadError("Some crawlers were selected multiple times")
|
||||
|
||||
@@ -66,14 +66,14 @@ class Pferd:
|
||||
return crawlers_to_run
|
||||
|
||||
def _find_crawlers_to_run(
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[List[str]],
|
||||
cli_skips: Optional[List[str]],
|
||||
) -> List[str]:
|
||||
self,
|
||||
config: Config,
|
||||
cli_crawlers: Optional[list[str]],
|
||||
cli_skips: Optional[list[str]],
|
||||
) -> list[str]:
|
||||
log.explain_topic("Deciding which crawlers to run")
|
||||
|
||||
crawlers: List[str]
|
||||
crawlers: list[str]
|
||||
if cli_crawlers is None:
|
||||
log.explain("No crawlers specified on CLI")
|
||||
log.explain("Running crawlers specified in config")
|
||||
@@ -104,7 +104,7 @@ class Pferd:
|
||||
|
||||
def _load_crawlers(self) -> None:
|
||||
# Cookie sharing
|
||||
kit_ilias_web_paths: Dict[Authenticator, List[Path]] = {}
|
||||
kit_ilias_web_paths: dict[Authenticator, list[Path]] = {}
|
||||
|
||||
for name, section in self._config.crawl_sections():
|
||||
log.print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||
@@ -117,9 +117,8 @@ class Pferd:
|
||||
crawler = crawler_constructor(name, section, self._config, self._authenticators)
|
||||
self._crawlers[name] = crawler
|
||||
|
||||
if self._config.default_section.share_cookies():
|
||||
if isinstance(crawler, KitIliasWebCrawler):
|
||||
crawler.share_cookies(kit_ilias_web_paths)
|
||||
if self._config.default_section.share_cookies() and isinstance(crawler, KitIliasWebCrawler):
|
||||
crawler.share_cookies(kit_ilias_web_paths)
|
||||
|
||||
def debug_transforms(self) -> None:
|
||||
for name in self._crawlers_to_run:
|
||||
@@ -168,19 +167,24 @@ class Pferd:
|
||||
log.report("")
|
||||
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
||||
|
||||
def fmt_path_link(relative_path: PurePath) -> str:
|
||||
# We need to URL-encode the path because it might contain spaces or special characters
|
||||
link = crawler.output_dir.resolve(relative_path).absolute().as_uri()
|
||||
return f"[link={link}]{fmt_path(relative_path)}[/link]"
|
||||
|
||||
something_changed = False
|
||||
for path in sorted(crawler.report.added_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_green]Added[/] {fmt_path(path)}")
|
||||
log.report(f" [bold bright_green]Added[/] {fmt_path_link(path)}")
|
||||
for path in sorted(crawler.report.changed_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_yellow]Changed[/] {fmt_path(path)}")
|
||||
log.report(f" [bold bright_yellow]Changed[/] {fmt_path_link(path)}")
|
||||
for path in sorted(crawler.report.deleted_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_magenta]Deleted[/] {fmt_path(path)}")
|
||||
for path in sorted(crawler.report.not_deleted_files):
|
||||
something_changed = True
|
||||
log.report(f" [bold bright_magenta]Not deleted[/] {fmt_path(path)}")
|
||||
log.report_not_deleted(f" [bold bright_magenta]Not deleted[/] {fmt_path_link(path)}")
|
||||
|
||||
for warning in crawler.report.encountered_warnings:
|
||||
something_changed = True
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class ReportLoadError(Exception):
|
||||
@@ -34,15 +34,6 @@ class MarkConflictError(Exception):
|
||||
self.collides_with = collides_with
|
||||
|
||||
|
||||
# TODO Use PurePath.is_relative_to when updating to 3.9
|
||||
def is_relative_to(a: PurePath, b: PurePath) -> bool:
|
||||
try:
|
||||
a.relative_to(b)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
class Report:
|
||||
"""
|
||||
A report of a synchronization. Includes all files found by the crawler, as
|
||||
@@ -51,32 +42,32 @@ class Report:
|
||||
|
||||
def __init__(self) -> None:
|
||||
# Paths found by the crawler, untransformed
|
||||
self.found_paths: Set[PurePath] = set()
|
||||
self.found_paths: set[PurePath] = set()
|
||||
|
||||
# Files reserved for metadata files (e. g. the report file or cookies)
|
||||
# that can't be overwritten by user transforms and won't be cleaned up
|
||||
# at the end.
|
||||
self.reserved_files: Set[PurePath] = set()
|
||||
self.reserved_files: set[PurePath] = set()
|
||||
|
||||
# Files found by the crawler, transformed. Only includes files that
|
||||
# were downloaded (or a download was attempted)
|
||||
self.known_files: Set[PurePath] = set()
|
||||
self.known_files: set[PurePath] = set()
|
||||
|
||||
self.added_files: Set[PurePath] = set()
|
||||
self.changed_files: Set[PurePath] = set()
|
||||
self.deleted_files: Set[PurePath] = set()
|
||||
self.added_files: set[PurePath] = set()
|
||||
self.changed_files: set[PurePath] = set()
|
||||
self.deleted_files: set[PurePath] = set()
|
||||
# Files that should have been deleted by the cleanup but weren't
|
||||
self.not_deleted_files: Set[PurePath] = set()
|
||||
self.not_deleted_files: set[PurePath] = set()
|
||||
|
||||
# Custom crawler-specific data
|
||||
self.custom: Dict[str, Any] = dict()
|
||||
self.custom: dict[str, Any] = dict()
|
||||
|
||||
# Encountered errors and warnings
|
||||
self.encountered_warnings: List[str] = []
|
||||
self.encountered_errors: List[str] = []
|
||||
self.encountered_warnings: list[str] = []
|
||||
self.encountered_errors: list[str] = []
|
||||
|
||||
@staticmethod
|
||||
def _get_list_of_strs(data: Dict[str, Any], key: str) -> List[str]:
|
||||
def _get_list_of_strs(data: dict[str, Any], key: str) -> list[str]:
|
||||
result: Any = data.get(key, [])
|
||||
|
||||
if not isinstance(result, list):
|
||||
@@ -89,8 +80,8 @@ class Report:
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _get_str_dictionary(data: Dict[str, Any], key: str) -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = data.get(key, {})
|
||||
def _get_str_dictionary(data: dict[str, Any], key: str) -> dict[str, Any]:
|
||||
result: dict[str, Any] = data.get(key, {})
|
||||
|
||||
if not isinstance(result, dict):
|
||||
raise ReportLoadError(f"Incorrect format: {key!r} is not a dictionary")
|
||||
@@ -173,13 +164,13 @@ class Report:
|
||||
if path == other:
|
||||
raise MarkDuplicateError(path)
|
||||
|
||||
if is_relative_to(path, other) or is_relative_to(other, path):
|
||||
if path.is_relative_to(other) or other.is_relative_to(path):
|
||||
raise MarkConflictError(path, other)
|
||||
|
||||
self.known_files.add(path)
|
||||
|
||||
@property
|
||||
def marked(self) -> Set[PurePath]:
|
||||
def marked(self) -> set[PurePath]:
|
||||
return self.known_files | self.reserved_files
|
||||
|
||||
def is_marked(self, path: PurePath) -> bool:
|
||||
|
@@ -1,10 +1,12 @@
|
||||
import ast
|
||||
import contextlib
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import PurePath
|
||||
from typing import Callable, Dict, List, Optional, Sequence, TypeVar, Union
|
||||
from typing import Optional, TypeVar
|
||||
|
||||
from .logging import log
|
||||
from .utils import fmt_path, str_path
|
||||
@@ -23,7 +25,7 @@ class Empty:
|
||||
pass
|
||||
|
||||
|
||||
RightSide = Union[str, Ignore, Empty]
|
||||
RightSide = str | Ignore | Empty
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -35,7 +37,7 @@ class Ignored:
|
||||
pass
|
||||
|
||||
|
||||
TransformResult = Optional[Union[Transformed, Ignored]]
|
||||
TransformResult = Transformed | Ignored | None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -47,7 +49,7 @@ class Rule:
|
||||
right: RightSide
|
||||
right_index: int
|
||||
|
||||
def right_result(self, path: PurePath) -> Union[str, Transformed, Ignored]:
|
||||
def right_result(self, path: PurePath) -> str | Transformed | Ignored:
|
||||
if isinstance(self.right, str):
|
||||
return self.right
|
||||
elif isinstance(self.right, Ignore):
|
||||
@@ -93,22 +95,22 @@ class ExactReTf(Transformation):
|
||||
# since elements of "match.groups()" can be None, mypy is wrong.
|
||||
groups: Sequence[Optional[str]] = [match[0]] + list(match.groups())
|
||||
|
||||
locals_dir: Dict[str, Union[str, int, float]] = {}
|
||||
locals_dir: dict[str, str | int | float] = {}
|
||||
for i, group in enumerate(groups):
|
||||
if group is None:
|
||||
continue
|
||||
|
||||
locals_dir[f"g{i}"] = group
|
||||
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
locals_dir[f"i{i}"] = int(group)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
locals_dir[f"f{i}"] = float(group)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
named_groups: dict[str, str] = match.groupdict()
|
||||
for name, capture in named_groups.items():
|
||||
locals_dir[name] = capture
|
||||
|
||||
result = eval(f"f{right!r}", {}, locals_dir)
|
||||
return Transformed(PurePath(result))
|
||||
@@ -204,7 +206,7 @@ class Line:
|
||||
|
||||
@property
|
||||
def rest(self) -> str:
|
||||
return self.line[self.index:]
|
||||
return self.line[self.index :]
|
||||
|
||||
def peek(self, amount: int = 1) -> str:
|
||||
return self.rest[:amount]
|
||||
@@ -224,7 +226,7 @@ class Line:
|
||||
self.expect(string)
|
||||
return value
|
||||
|
||||
def one_of(self, parsers: List[Callable[[], T]], description: str) -> T:
|
||||
def one_of(self, parsers: list[Callable[[], T]], description: str) -> T:
|
||||
for parser in parsers:
|
||||
index = self.index
|
||||
try:
|
||||
@@ -311,7 +313,7 @@ def parse_left(line: Line) -> str:
|
||||
return parse_str(line)
|
||||
|
||||
|
||||
def parse_right(line: Line) -> Union[str, Ignore]:
|
||||
def parse_right(line: Line) -> str | Ignore:
|
||||
c = line.peek()
|
||||
if c in QUOTATION_MARKS:
|
||||
return parse_quoted_str(line)
|
||||
@@ -323,21 +325,27 @@ def parse_right(line: Line) -> Union[str, Ignore]:
|
||||
|
||||
|
||||
def parse_arrow_name(line: Line) -> str:
|
||||
return line.one_of([
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
], "Expected arrow name")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect("exact-re"),
|
||||
lambda: line.expect("exact"),
|
||||
lambda: line.expect("name-re"),
|
||||
lambda: line.expect("name"),
|
||||
lambda: line.expect("re"),
|
||||
lambda: line.expect(""),
|
||||
],
|
||||
"Expected arrow name",
|
||||
)
|
||||
|
||||
|
||||
def parse_arrow_head(line: Line) -> ArrowHead:
|
||||
return line.one_of([
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
], "Expected arrow head")
|
||||
return line.one_of(
|
||||
[
|
||||
lambda: line.expect_with(">>", ArrowHead.SEQUENCE),
|
||||
lambda: line.expect_with(">", ArrowHead.NORMAL),
|
||||
],
|
||||
"Expected arrow head",
|
||||
)
|
||||
|
||||
|
||||
def parse_eol(line: Line) -> None:
|
||||
@@ -409,12 +417,12 @@ class Transformer:
|
||||
|
||||
def transform(self, path: PurePath) -> Optional[PurePath]:
|
||||
for i, (line, tf) in enumerate(self._tfs):
|
||||
log.explain(f"Testing rule {i+1}: {line}")
|
||||
log.explain(f"Testing rule {i + 1}: {line}")
|
||||
|
||||
try:
|
||||
result = tf.transform(path)
|
||||
except Exception as e:
|
||||
log.warn(f"Error while testing rule {i+1}: {line}")
|
||||
log.warn(f"Error while testing rule {i + 1}: {line}")
|
||||
log.warn_contd(str(e))
|
||||
continue
|
||||
|
||||
|
@@ -3,10 +3,11 @@ import getpass
|
||||
import sys
|
||||
import threading
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from contextlib import AsyncExitStack
|
||||
from pathlib import Path, PurePath
|
||||
from types import TracebackType
|
||||
from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar
|
||||
from typing import Any, Generic, Optional, TypeVar
|
||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||
|
||||
import bs4
|
||||
@@ -79,7 +80,7 @@ def url_set_query_param(url: str, param: str, value: str) -> str:
|
||||
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
|
||||
|
||||
|
||||
def url_set_query_params(url: str, params: Dict[str, str]) -> str:
|
||||
def url_set_query_params(url: str, params: dict[str, str]) -> str:
|
||||
"""
|
||||
Sets multiple query parameters in an url, overwriting existing ones.
|
||||
"""
|
||||
@@ -131,10 +132,10 @@ class ReusableAsyncContextManager(ABC, Generic[T]):
|
||||
return result
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
self,
|
||||
exc_type: Optional[type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> Optional[bool]:
|
||||
if not self._active:
|
||||
raise RuntimeError("__aexit__ called too many times")
|
||||
|
@@ -1,2 +1,2 @@
|
||||
NAME = "PFERD"
|
||||
VERSION = "3.4.3"
|
||||
VERSION = "3.8.3"
|
||||
|
13
README.md
13
README.md
@@ -17,7 +17,7 @@ Binaries for Linux, Windows and Mac can be downloaded directly from the
|
||||
|
||||
### With pip
|
||||
|
||||
Ensure you have at least Python 3.9 installed. Run the following command to
|
||||
Ensure you have at least Python 3.11 installed. Run the following command to
|
||||
install PFERD or upgrade it to the latest version:
|
||||
|
||||
```
|
||||
@@ -56,6 +56,17 @@ Also, you can download most ILIAS pages directly like this:
|
||||
$ pferd kit-ilias-web <url> <output_directory>
|
||||
```
|
||||
|
||||
PFERD supports other ILIAS instances as well, using the `ilias-web` crawler (see
|
||||
the [config section on `ilias-web`](CONFIG.md#the-ilias-web-crawler) for more
|
||||
detail on the `base-url` and `client-id` parameters):
|
||||
|
||||
```
|
||||
$ pferd ilias-web \
|
||||
--base-url https://ilias.my-university.example \
|
||||
--client-id My_University desktop \
|
||||
<output_directory>
|
||||
```
|
||||
|
||||
However, the CLI only lets you download a single thing at a time, and the
|
||||
resulting command can grow long quite quickly. Because of this, PFERD can also
|
||||
be used with a config file.
|
||||
|
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1751211869,
|
||||
"narHash": "sha256-1Cu92i1KSPbhPCKxoiVG5qnoRiKTgR5CcGSRyLpOd7Y=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "b43c397f6c213918d6cfe6e3550abfe79b5d1c51",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-25.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
41
flake.nix
Normal file
41
flake.nix
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
description = "Tool for downloading course-related files from ILIAS";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs }:
|
||||
let
|
||||
# Helper function to generate an attrset '{ x86_64-linux = f "x86_64-linux"; ... }'.
|
||||
forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed;
|
||||
in
|
||||
{
|
||||
packages = forAllSystems (system:
|
||||
let pkgs = import nixpkgs { inherit system; };
|
||||
in
|
||||
rec {
|
||||
default = pkgs.python3Packages.buildPythonApplication rec {
|
||||
pname = "pferd";
|
||||
# Performing black magic
|
||||
# Don't worry, I sacrificed enough goats for the next few years
|
||||
version = (pkgs.lib.importTOML ./PFERD/version.py).VERSION;
|
||||
format = "pyproject";
|
||||
|
||||
src = ./.;
|
||||
|
||||
nativeBuildInputs = with pkgs.python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
propagatedBuildInputs = with pkgs.python3Packages; [
|
||||
aiohttp
|
||||
beautifulsoup4
|
||||
rich
|
||||
keyring
|
||||
certifi
|
||||
];
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
11
mypy.ini
11
mypy.ini
@@ -1,11 +0,0 @@
|
||||
[mypy]
|
||||
disallow_any_generics = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_incomplete_defs = True
|
||||
no_implicit_optional = True
|
||||
warn_unused_ignores = True
|
||||
warn_unreachable = True
|
||||
show_error_context = True
|
||||
|
||||
[mypy-rich.*,bs4,keyring]
|
||||
ignore_missing_imports = True
|
@@ -1,3 +1,62 @@
|
||||
[build-system]
|
||||
requires = ["setuptools", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "PFERD"
|
||||
dependencies = [
|
||||
"aiohttp>=3.8.1",
|
||||
"beautifulsoup4>=4.10.0",
|
||||
"rich>=11.0.0",
|
||||
"keyring>=23.5.0",
|
||||
"certifi>=2021.10.8"
|
||||
]
|
||||
dynamic = ["version"]
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[project.scripts]
|
||||
pferd = "PFERD.__main__:main"
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = {attr = "PFERD.version.VERSION"}
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 110
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
# pycodestyle
|
||||
"E",
|
||||
# Pyflakes
|
||||
"F",
|
||||
# pyupgrade
|
||||
"UP",
|
||||
# flake8-bugbear
|
||||
"B",
|
||||
# flake8-simplify
|
||||
"SIM",
|
||||
# isort
|
||||
"I",
|
||||
]
|
||||
ignore = [
|
||||
"UP045",
|
||||
"SIM114",
|
||||
"B023"
|
||||
]
|
||||
|
||||
[tool.mypy]
|
||||
disallow_any_generics = true
|
||||
disallow_untyped_defs = true
|
||||
disallow_incomplete_defs = true
|
||||
no_implicit_optional = true
|
||||
warn_unused_ignores = true
|
||||
warn_unreachable = true
|
||||
show_error_context = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"mypy>=1.18.2",
|
||||
"pyinstaller>=6.16.0",
|
||||
"ruff>=0.14.1",
|
||||
]
|
||||
|
@@ -2,4 +2,4 @@
|
||||
|
||||
set -e
|
||||
|
||||
pyinstaller --onefile pferd.py
|
||||
uv run pyinstaller --onefile pferd.py
|
||||
|
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import time
|
||||
import re
|
||||
import time
|
||||
from subprocess import run
|
||||
|
||||
|
||||
|
@@ -2,5 +2,5 @@
|
||||
|
||||
set -e
|
||||
|
||||
mypy PFERD
|
||||
flake8 PFERD
|
||||
uv run mypy .
|
||||
uv run ruff check
|
||||
|
@@ -2,5 +2,4 @@
|
||||
|
||||
set -e
|
||||
|
||||
autopep8 --recursive --in-place PFERD
|
||||
isort PFERD
|
||||
uv run ruff format
|
||||
|
@@ -13,5 +13,5 @@ pip install --upgrade setuptools
|
||||
pip install --editable .
|
||||
|
||||
# Installing tools and type hints
|
||||
pip install --upgrade mypy flake8 autopep8 isort pyinstaller
|
||||
pip install --upgrade mypy flake8 flake8-pyproject autopep8 isort pyinstaller
|
||||
pip install --upgrade types-chardet types-certifi
|
||||
|
23
setup.cfg
23
setup.cfg
@@ -1,23 +0,0 @@
|
||||
[metadata]
|
||||
name = PFERD
|
||||
version = attr: PFERD.version.VERSION
|
||||
|
||||
[options]
|
||||
packages = find:
|
||||
python_requires = >=3.9
|
||||
install_requires =
|
||||
aiohttp>=3.8.1
|
||||
beautifulsoup4>=4.10.0
|
||||
rich>=11.0.0
|
||||
keyring>=23.5.0
|
||||
certifi>=2021.10.8
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
pferd = PFERD.__main__:main
|
||||
|
||||
[flake8]
|
||||
max_line_length = 110
|
||||
|
||||
[isort]
|
||||
line_length = 110
|
Reference in New Issue
Block a user