mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-07-12 14:12:30 +02:00
Compare commits
93 Commits
Author | SHA1 | Date | |
---|---|---|---|
7291382430 | |||
1a430ad5d1 | |||
f6bdeb6b9d | |||
63f25277b0 | |||
c8eff04ae0 | |||
edc482cdf4 | |||
72cd0f77e2 | |||
be175f9347 | |||
ba2833dba5 | |||
2f0e792670 | |||
5f88539f7e | |||
bd9d7efe64 | |||
16a2dd5b15 | |||
678283d341 | |||
287173b0b1 | |||
712217e959 | |||
6dda4c55a8 | |||
596b6a7688 | |||
5983200247 | |||
26e802d88b | |||
f5c4e82816 | |||
f5273f7ca0 | |||
fa71a9f44f | |||
81d6ff53c4 | |||
d7a2b6e019 | |||
71c65e89d1 | |||
c1046498e7 | |||
8fbd1978af | |||
739dd95850 | |||
c54c3bcfa1 | |||
d7f2229978 | |||
52fdeae752 | |||
f9bb2e41cf | |||
4f9e2ab48d | |||
19beb8f07b | |||
c897d9e2f5 | |||
21a266e302 | |||
b29b6f93f8 | |||
318226d7cb | |||
422cf05f15 | |||
819c6673c7 | |||
89b44c69a7 | |||
4b4f72b2ca | |||
778517d8c6 | |||
428b0179fc | |||
ade6309dd9 | |||
fd6cb7b966 | |||
5c87517ceb | |||
b01f093474 | |||
3a05b90525 | |||
7a00f73e0e | |||
5d0621420e | |||
df98153169 | |||
fc1f68ccd9 | |||
3e831c7e23 | |||
bbcfe9c8dd | |||
eb01aa86cb | |||
3db186a978 | |||
4a5959fd58 | |||
1cbc2b717a | |||
da627ff929 | |||
c1b592ac29 | |||
eb0c956d32 | |||
ab0cb2d956 | |||
a117126389 | |||
e9f8901520 | |||
266812f90e | |||
533bc27439 | |||
0113a0ca10 | |||
40f8a05ad6 | |||
50b50513c6 | |||
df3514cd03 | |||
ad53185247 | |||
87b67e9271 | |||
b54b3b979c | |||
2184ac8040 | |||
b3d412360b | |||
dbc2553b11 | |||
68c398f1fe | |||
123a57beec | |||
d204dac8ce | |||
443f7fe839 | |||
0294ceb7d5 | |||
6f30c6583d | |||
467fc526e8 | |||
722d2eb393 | |||
6d44aac278 | |||
55a2de6b88 | |||
c0d6d8b229 | |||
635caa765d | |||
e69b55b349 | |||
07200bbde5 | |||
c020cccc64 |
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: monthly
|
||||||
|
groups:
|
||||||
|
gh-actions:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
31
.github/workflows/build-and-release.yml
vendored
31
.github/workflows/build-and-release.yml
vendored
@ -1,6 +1,6 @@
|
|||||||
name: build-and-release
|
name: build-and-release
|
||||||
|
|
||||||
on: push
|
on: [push, pull_request]
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@ -13,13 +13,12 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
os: [ubuntu-latest, windows-latest, macos-13, macos-latest]
|
||||||
python: ["3.9"]
|
python: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/setup-python@v5
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python }}
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
@ -34,7 +33,12 @@ jobs:
|
|||||||
run: ./scripts/setup --no-pip
|
run: ./scripts/setup --no-pip
|
||||||
|
|
||||||
- name: Run checks
|
- name: Run checks
|
||||||
run: ./scripts/check
|
run: |
|
||||||
|
./scripts/check
|
||||||
|
./scripts/format
|
||||||
|
|
||||||
|
- name: Assert no changes
|
||||||
|
run: git diff --exit-code
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: ./scripts/build
|
run: ./scripts/build
|
||||||
@ -45,9 +49,9 @@ jobs:
|
|||||||
run: mv dist/pferd* dist/pferd-${{ matrix.os }}
|
run: mv dist/pferd* dist/pferd-${{ matrix.os }}
|
||||||
|
|
||||||
- name: Upload binary
|
- name: Upload binary
|
||||||
uses: actions/upload-artifact@v2
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: Binaries
|
name: pferd-${{ matrix.os }}
|
||||||
path: dist/pferd-${{ matrix.os }}
|
path: dist/pferd-${{ matrix.os }}
|
||||||
|
|
||||||
release:
|
release:
|
||||||
@ -57,18 +61,20 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Download binaries
|
- name: Download binaries
|
||||||
uses: actions/download-artifact@v2
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: Binaries
|
pattern: pferd-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Rename binaries
|
- name: Rename binaries
|
||||||
run: |
|
run: |
|
||||||
mv pferd-ubuntu-latest pferd-linux
|
mv pferd-ubuntu-latest pferd-linux
|
||||||
mv pferd-windows-latest pferd-windows.exe
|
mv pferd-windows-latest pferd-windows.exe
|
||||||
|
mv pferd-macos-13 pferd-mac-x86_64
|
||||||
mv pferd-macos-latest pferd-mac
|
mv pferd-macos-latest pferd-mac
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@ -76,3 +82,4 @@ jobs:
|
|||||||
pferd-linux
|
pferd-linux
|
||||||
pferd-windows.exe
|
pferd-windows.exe
|
||||||
pferd-mac
|
pferd-mac
|
||||||
|
pferd-mac-x86_64
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
|||||||
/PFERD.egg-info/
|
/PFERD.egg-info/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
|
/.idea/
|
||||||
|
|
||||||
# pyinstaller
|
# pyinstaller
|
||||||
/pferd.spec
|
/pferd.spec
|
||||||
|
97
CHANGELOG.md
97
CHANGELOG.md
@ -22,6 +22,103 @@ ambiguous situations.
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 3.8.0 - 2025-04-15
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Support for ILIAS 9
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Added prettier CSS to forum threads
|
||||||
|
- Increase minimum supported Python version to 3.11
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
- File links in report on Windows
|
||||||
|
- TOTP authentication in KIT Shibboleth
|
||||||
|
- Forum crawling only considering the first 20 entries
|
||||||
|
|
||||||
|
## 3.7.0 - 2024-11-13
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Support for MOB videos in page descriptions
|
||||||
|
- Clickable links in the report to directly open new/modified/not-deleted files
|
||||||
|
- Support for non KIT shibboleth login
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Remove videos from description pages
|
||||||
|
- Perform ILIAS cycle detection after processing the transform to allow
|
||||||
|
ignoring duplicated elements
|
||||||
|
- Parse headings (h1-h3) as folders in kit-ipd crawler
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Personal desktop/dashboard/favorites crawling
|
||||||
|
- Crawling of nested courses
|
||||||
|
- Downloading of links with no target URL
|
||||||
|
- Handle row flex on description pages
|
||||||
|
- Add `<!DOCTYPE html>` heading to forum threads to fix mime type detection
|
||||||
|
- Handle groups in cards
|
||||||
|
|
||||||
|
## 3.6.0 - 2024-10-23
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Generic `ilias-web` crawler and `ilias-web` CLI command
|
||||||
|
- Support for the course overview page. Using this URL as a target might cause
|
||||||
|
duplication warnings, as subgroups are listed separately.
|
||||||
|
- Support for named capture groups in regex transforms
|
||||||
|
- Crawl custom item groups as folders
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Normalization of meeting names in cards
|
||||||
|
- Sanitization of slashes in exercise container names
|
||||||
|
|
||||||
|
## 3.5.2 - 2024-04-14
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Crawling of personal desktop with ILIAS 8
|
||||||
|
- Crawling of empty personal desktops
|
||||||
|
|
||||||
|
## 3.5.1 - 2024-04-09
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Support for ILIAS 8
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Video name deduplication
|
||||||
|
|
||||||
|
## 3.5.0 - 2023-09-13
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- `no-delete-prompt-override` conflict resolution strategy
|
||||||
|
- Support for ILIAS learning modules
|
||||||
|
- `show_not_deleted` option to stop printing the "Not Deleted" status or report
|
||||||
|
message. This combines nicely with the `no-delete-prompt-override` strategy,
|
||||||
|
causing PFERD to mostly ignore local-only files.
|
||||||
|
- Support for mediacast video listings
|
||||||
|
- Crawling of files in info tab
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Remove size suffix for files in content pages
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Crawling of courses with the timeline view as the default tab
|
||||||
|
- Crawling of file and custom opencast cards
|
||||||
|
- Crawling of button cards without descriptions
|
||||||
|
- Abort crawling when encountering an unexpected ilias root page redirect
|
||||||
|
- Sanitize ascii control characters on Windows
|
||||||
|
- Crawling of paginated past meetings
|
||||||
|
- Ignore SCORM learning modules
|
||||||
|
|
||||||
|
## 3.4.3 - 2022-11-29
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Missing documentation for `forums` option
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Clear up error message shown when multiple paths are found to an element
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- IPD crawler unnecessarily appending trailing slashes
|
||||||
|
- Crawling opencast when ILIAS is set to English
|
||||||
|
|
||||||
## 3.4.2 - 2022-10-26
|
## 3.4.2 - 2022-10-26
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
93
CONFIG.md
93
CONFIG.md
@ -4,11 +4,11 @@ A config file consists of sections. A section begins with a `[section]` header,
|
|||||||
which is followed by a list of `key = value` pairs. Comments must be on their
|
which is followed by a list of `key = value` pairs. Comments must be on their
|
||||||
own line and start with `#`. Multiline values must be indented beyond their key.
|
own line and start with `#`. Multiline values must be indented beyond their key.
|
||||||
Boolean values can be `yes` or `no`. For more details and some examples on the
|
Boolean values can be `yes` or `no`. For more details and some examples on the
|
||||||
format, see the [configparser documentation][1] ([interpolation][2] is
|
format, see the [configparser documentation][cp-file]
|
||||||
disabled).
|
([interpolation][cp-interp] is disabled).
|
||||||
|
|
||||||
[1]: <https://docs.python.org/3/library/configparser.html#supported-ini-file-structure> "Supported INI File Structure"
|
[cp-file]: <https://docs.python.org/3/library/configparser.html#supported-ini-file-structure> "Supported INI File Structure"
|
||||||
[2]: <https://docs.python.org/3/library/configparser.html#interpolation-of-values> "Interpolation of values"
|
[cp-interp]: <https://docs.python.org/3/library/configparser.html#interpolation-of-values> "Interpolation of values"
|
||||||
|
|
||||||
## The `DEFAULT` section
|
## The `DEFAULT` section
|
||||||
|
|
||||||
@ -26,6 +26,9 @@ default values for the other sections.
|
|||||||
`Added ...`) while running a crawler. (Default: `yes`)
|
`Added ...`) while running a crawler. (Default: `yes`)
|
||||||
- `report`: Whether PFERD should print a report of added, changed and deleted
|
- `report`: Whether PFERD should print a report of added, changed and deleted
|
||||||
local files for all crawlers before exiting. (Default: `yes`)
|
local files for all crawlers before exiting. (Default: `yes`)
|
||||||
|
- `show_not_deleted`: Whether PFERD should print messages in status and report
|
||||||
|
when a local-only file wasn't deleted. Combines nicely with the
|
||||||
|
`no-delete-prompt-override` conflict resolution strategy.
|
||||||
- `share_cookies`: Whether crawlers should share cookies where applicable. For
|
- `share_cookies`: Whether crawlers should share cookies where applicable. For
|
||||||
example, some crawlers share cookies if they crawl the same website using the
|
example, some crawlers share cookies if they crawl the same website using the
|
||||||
same account. (Default: `yes`)
|
same account. (Default: `yes`)
|
||||||
@ -75,6 +78,9 @@ common to all crawlers:
|
|||||||
using `prompt` and always choosing "yes".
|
using `prompt` and always choosing "yes".
|
||||||
- `no-delete`: Never delete local files, but overwrite local files if the
|
- `no-delete`: Never delete local files, but overwrite local files if the
|
||||||
remote file is different.
|
remote file is different.
|
||||||
|
- `no-delete-prompt-overwrite`: Never delete local files, but prompt to
|
||||||
|
overwrite local files if the remote file is different. Combines nicely
|
||||||
|
with the `show_not_deleted` option.
|
||||||
- `transform`: Rules for renaming and excluding certain files and directories.
|
- `transform`: Rules for renaming and excluding certain files and directories.
|
||||||
For more details, see [this section](#transformation-rules). (Default: empty)
|
For more details, see [this section](#transformation-rules). (Default: empty)
|
||||||
- `tasks`: The maximum number of concurrent tasks (such as crawling or
|
- `tasks`: The maximum number of concurrent tasks (such as crawling or
|
||||||
@ -140,7 +146,7 @@ crawler simulate a slower, network-based crawler.
|
|||||||
|
|
||||||
This crawler crawls a KIT-IPD page by url. The root page can be crawled from
|
This crawler crawls a KIT-IPD page by url. The root page can be crawled from
|
||||||
outside the KIT network so you will be informed about any new/deleted files,
|
outside the KIT network so you will be informed about any new/deleted files,
|
||||||
but downloading files requires you to be within. Adding a show delay between
|
but downloading files requires you to be within. Adding a short delay between
|
||||||
requests is likely a good idea.
|
requests is likely a good idea.
|
||||||
|
|
||||||
- `target`: URL to a KIT-IPD page
|
- `target`: URL to a KIT-IPD page
|
||||||
@ -148,6 +154,63 @@ requests is likely a good idea.
|
|||||||
matches, the given link is downloaded as a file. This is used to extract
|
matches, the given link is downloaded as a file. This is used to extract
|
||||||
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
files from KIT-IPD pages. (Default: `^.*?[^/]+\.(pdf|zip|c|cpp|java)$`)
|
||||||
|
|
||||||
|
### The `ilias-web` crawler
|
||||||
|
|
||||||
|
This crawler crawls a generic ILIAS instance.
|
||||||
|
|
||||||
|
Inspired by [this ILIAS downloader][ilias-dl], the following configurations should work
|
||||||
|
out of the box for the corresponding universities:
|
||||||
|
|
||||||
|
[ilias-dl]: https://github.com/V3lop5/ilias-downloader/blob/main/configs "ilias-downloader configs"
|
||||||
|
|
||||||
|
| University | `base_url` | `login_type` | `client_id` |
|
||||||
|
|---------------|-----------------------------------------|--------------|---------------|
|
||||||
|
| FH Aachen | https://www.ili.fh-aachen.de | local | elearning |
|
||||||
|
| Uni Köln | https://www.ilias.uni-koeln.de/ilias | local | uk |
|
||||||
|
| Uni Konstanz | https://ilias.uni-konstanz.de | local | ILIASKONSTANZ |
|
||||||
|
| Uni Stuttgart | https://ilias3.uni-stuttgart.de | local | Uni_Stuttgart |
|
||||||
|
| Uni Tübingen | https://ovidius.uni-tuebingen.de/ilias3 | shibboleth | |
|
||||||
|
|
||||||
|
If your university isn't listed, try navigating to your instance's login page.
|
||||||
|
Assuming no custom login service is used, the URL will look something like this:
|
||||||
|
|
||||||
|
```jinja
|
||||||
|
{{ base_url }}/login.php?client_id={{ client_id }}&cmd=force_login&lang=
|
||||||
|
```
|
||||||
|
|
||||||
|
If the values work, feel free to submit a PR and add them to the table above.
|
||||||
|
|
||||||
|
- `base_url`: The URL where the ILIAS instance is located. (Required)
|
||||||
|
- `login_type`: How you authenticate. (Required)
|
||||||
|
- `local`: Use `client_id` for authentication.
|
||||||
|
- `shibboleth`: Use shibboleth for authentication.
|
||||||
|
- `client_id`: An ID used for authentication if `login_type` is `local`. Is
|
||||||
|
ignored if `login_type` is `shibboleth`.
|
||||||
|
- `target`: The ILIAS element to crawl. (Required)
|
||||||
|
- `desktop`: Crawl your personal desktop / dashboard
|
||||||
|
- `<course id>`: Crawl the course with the given id
|
||||||
|
- `<url>`: Crawl a given element by URL (preferably the permanent URL linked
|
||||||
|
at the bottom of its ILIAS page).
|
||||||
|
This also supports the "My Courses" overview page to download *all*
|
||||||
|
courses. Note that this might produce confusing local directory layouts
|
||||||
|
and duplication warnings if you are a member of an ILIAS group. The
|
||||||
|
`desktop` target is generally preferable.
|
||||||
|
- `auth`: Name of auth section to use for login. (Required)
|
||||||
|
- `tfa_auth`: Name of auth section to use for two-factor authentication. Only
|
||||||
|
uses the auth section's password. (Default: Anonymous `tfa` authenticator)
|
||||||
|
- `links`: How to represent external links. (Default: `fancy`)
|
||||||
|
- `ignore`: Don't download links.
|
||||||
|
- `plaintext`: A text file containing only the URL.
|
||||||
|
- `fancy`: A HTML file looking like the ILIAS link element.
|
||||||
|
- `internet-shortcut`: An internet shortcut file (`.url` file).
|
||||||
|
- `link_redirect_delay`: Time (in seconds) until `fancy` link files will
|
||||||
|
redirect to the actual URL. Set to a negative value to disable the automatic
|
||||||
|
redirect. (Default: `-1`)
|
||||||
|
- `videos`: Whether to download videos. (Default: `no`)
|
||||||
|
- `forums`: Whether to download forum threads. (Default: `no`)
|
||||||
|
- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default:
|
||||||
|
`20.0`)
|
||||||
|
|
||||||
### The `kit-ilias-web` crawler
|
### The `kit-ilias-web` crawler
|
||||||
|
|
||||||
This crawler crawls the KIT ILIAS instance.
|
This crawler crawls the KIT ILIAS instance.
|
||||||
@ -181,6 +244,7 @@ script once per day should be fine.
|
|||||||
redirect to the actual URL. Set to a negative value to disable the automatic
|
redirect to the actual URL. Set to a negative value to disable the automatic
|
||||||
redirect. (Default: `-1`)
|
redirect. (Default: `-1`)
|
||||||
- `videos`: Whether to download videos. (Default: `no`)
|
- `videos`: Whether to download videos. (Default: `no`)
|
||||||
|
- `forums`: Whether to download forum threads. (Default: `no`)
|
||||||
- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default:
|
- `http_timeout`: The timeout (in seconds) for all HTTP requests. (Default:
|
||||||
`20.0`)
|
`20.0`)
|
||||||
|
|
||||||
@ -225,10 +289,10 @@ is stored in the keyring.
|
|||||||
|
|
||||||
### The `pass` authenticator
|
### The `pass` authenticator
|
||||||
|
|
||||||
This authenticator queries the [`pass` password manager][3] for a username and
|
This authenticator queries the [`pass` password manager][pass] for a username
|
||||||
password. It tries to be mostly compatible with [browserpass][4] and
|
and password. It tries to be mostly compatible with [browserpass][browserpass]
|
||||||
[passff][5], so see those links for an overview of the format. If PFERD fails
|
and [passff][passff], so see those links for an overview of the format. If PFERD
|
||||||
to load your password, you can use the `--explain` flag to see why.
|
fails to load your password, you can use the `--explain` flag to see why.
|
||||||
|
|
||||||
- `passname`: The name of the password to use (Required)
|
- `passname`: The name of the password to use (Required)
|
||||||
- `username_prefixes`: A comma-separated list of username line prefixes
|
- `username_prefixes`: A comma-separated list of username line prefixes
|
||||||
@ -236,9 +300,9 @@ to load your password, you can use the `--explain` flag to see why.
|
|||||||
- `password_prefixes`: A comma-separated list of password line prefixes
|
- `password_prefixes`: A comma-separated list of password line prefixes
|
||||||
(Default: `password,pass,secret`)
|
(Default: `password,pass,secret`)
|
||||||
|
|
||||||
[3]: <https://www.passwordstore.org/> "Pass: The Standard Unix Password Manager"
|
[pass]: <https://www.passwordstore.org/> "Pass: The Standard Unix Password Manager"
|
||||||
[4]: <https://github.com/browserpass/browserpass-extension#organizing-password-store> "Organizing password store"
|
[browserpass]: <https://github.com/browserpass/browserpass-extension#organizing-password-store> "Organizing password store"
|
||||||
[5]: <https://github.com/passff/passff#multi-line-format> "Multi-line format"
|
[passff]: <https://github.com/passff/passff#multi-line-format> "Multi-line format"
|
||||||
|
|
||||||
### The `tfa` authenticator
|
### The `tfa` authenticator
|
||||||
|
|
||||||
@ -289,7 +353,7 @@ path matches `SOURCE`, it is renamed to `TARGET`.
|
|||||||
Example: `foo/bar --> baz`
|
Example: `foo/bar --> baz`
|
||||||
- Doesn't match `foo`, `a/foo/bar` or `foo/baz`
|
- Doesn't match `foo`, `a/foo/bar` or `foo/baz`
|
||||||
- Converts `foo/bar` into `baz`
|
- Converts `foo/bar` into `baz`
|
||||||
- Converts `foo/bar/wargl` into `bar/wargl`
|
- Converts `foo/bar/wargl` into `baz/wargl`
|
||||||
|
|
||||||
Example: `foo/bar --> !`
|
Example: `foo/bar --> !`
|
||||||
- Doesn't match `foo`, `a/foo/bar` or `foo/baz`
|
- Doesn't match `foo`, `a/foo/bar` or `foo/baz`
|
||||||
@ -337,7 +401,8 @@ matches `SOURCE`, the output path is created using `TARGET` as template.
|
|||||||
be referred to as `{g<n>}` (e.g. `{g3}`). `{g0}` refers to the original path.
|
be referred to as `{g<n>}` (e.g. `{g3}`). `{g0}` refers to the original path.
|
||||||
If capturing group *n*'s contents are a valid integer, the integer value is
|
If capturing group *n*'s contents are a valid integer, the integer value is
|
||||||
available as `{i<n>}` (e.g. `{i3}`). If capturing group *n*'s contents are a
|
available as `{i<n>}` (e.g. `{i3}`). If capturing group *n*'s contents are a
|
||||||
valid float, the float value is available as `{f<n>}` (e.g. `{f3}`). If a
|
valid float, the float value is available as `{f<n>}` (e.g. `{f3}`). Named capture
|
||||||
|
groups (e.g. `(?P<name>)`) are available by their name (e.g. `{name}`). If a
|
||||||
capturing group is not present (e.g. when matching the string `cd` with the
|
capturing group is not present (e.g. when matching the string `cd` with the
|
||||||
regex `(ab)?cd`), the corresponding variables are not defined.
|
regex `(ab)?cd`), the corresponding variables are not defined.
|
||||||
|
|
||||||
|
5
LICENSE
5
LICENSE
@ -1,5 +1,6 @@
|
|||||||
Copyright 2019-2021 Garmelon, I-Al-Istannen, danstooamerican, pavelzw,
|
Copyright 2019-2024 Garmelon, I-Al-Istannen, danstooamerican, pavelzw,
|
||||||
TheChristophe, Scriptim, thelukasprobst, Toorero
|
TheChristophe, Scriptim, thelukasprobst, Toorero,
|
||||||
|
Mr-Pine, p-fruck, PinieP
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
@ -47,6 +47,8 @@ def configure_logging_from_args(args: argparse.Namespace) -> None:
|
|||||||
log.output_explain = args.explain
|
log.output_explain = args.explain
|
||||||
if args.status is not None:
|
if args.status is not None:
|
||||||
log.output_status = args.status
|
log.output_status = args.status
|
||||||
|
if args.show_not_deleted is not None:
|
||||||
|
log.output_not_deleted = args.show_not_deleted
|
||||||
if args.report is not None:
|
if args.report is not None:
|
||||||
log.output_report = args.report
|
log.output_report = args.report
|
||||||
|
|
||||||
@ -72,6 +74,8 @@ def configure_logging_from_config(args: argparse.Namespace, config: Config) -> N
|
|||||||
log.output_status = config.default_section.status()
|
log.output_status = config.default_section.status()
|
||||||
if args.report is None:
|
if args.report is None:
|
||||||
log.output_report = config.default_section.report()
|
log.output_report = config.default_section.report()
|
||||||
|
if args.show_not_deleted is None:
|
||||||
|
log.output_not_deleted = config.default_section.show_not_deleted()
|
||||||
except ConfigOptionError as e:
|
except ConfigOptionError as e:
|
||||||
log.error(str(e))
|
log.error(str(e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import Optional, Tuple
|
from typing import Optional, Tuple, cast
|
||||||
|
|
||||||
import keyring
|
import keyring
|
||||||
|
|
||||||
@ -13,7 +13,7 @@ class KeyringAuthSection(AuthSection):
|
|||||||
return self.s.get("username")
|
return self.s.get("username")
|
||||||
|
|
||||||
def keyring_name(self) -> str:
|
def keyring_name(self) -> str:
|
||||||
return self.s.get("keyring_name", fallback=NAME)
|
return cast(str, self.s.get("keyring_name", fallback=NAME))
|
||||||
|
|
||||||
|
|
||||||
class KeyringAuthenticator(Authenticator):
|
class KeyringAuthenticator(Authenticator):
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
# well.
|
# well.
|
||||||
|
|
||||||
from . import command_local # noqa: F401 imported but unused
|
from . import command_local # noqa: F401 imported but unused
|
||||||
|
from . import command_ilias_web # noqa: F401 imported but unused
|
||||||
from . import command_kit_ilias_web # noqa: F401 imported but unused
|
from . import command_kit_ilias_web # noqa: F401 imported but unused
|
||||||
from . import command_kit_ipd # noqa: F401 imported but unused
|
from . import command_kit_ipd # noqa: F401 imported but unused
|
||||||
from .parser import PARSER, ParserLoadError, load_default_section # noqa: F401 imported but unused
|
from .parser import PARSER, ParserLoadError, load_default_section # noqa: F401 imported but unused
|
||||||
|
56
PFERD/cli/command_ilias_web.py
Normal file
56
PFERD/cli/command_ilias_web.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import argparse
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
from ..logging import log
|
||||||
|
from .common_ilias_args import configure_common_group_args, load_common
|
||||||
|
from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler
|
||||||
|
|
||||||
|
COMMAND_NAME = "ilias-web"
|
||||||
|
|
||||||
|
SUBPARSER = SUBPARSERS.add_parser(
|
||||||
|
COMMAND_NAME,
|
||||||
|
parents=[CRAWLER_PARSER],
|
||||||
|
)
|
||||||
|
|
||||||
|
GROUP = SUBPARSER.add_argument_group(
|
||||||
|
title=f"{COMMAND_NAME} crawler arguments",
|
||||||
|
description=f"arguments for the '{COMMAND_NAME}' crawler",
|
||||||
|
)
|
||||||
|
|
||||||
|
GROUP.add_argument(
|
||||||
|
"--base-url",
|
||||||
|
type=str,
|
||||||
|
metavar="BASE_URL",
|
||||||
|
help="The base url of the ilias instance"
|
||||||
|
)
|
||||||
|
|
||||||
|
GROUP.add_argument(
|
||||||
|
"--client-id",
|
||||||
|
type=str,
|
||||||
|
metavar="CLIENT_ID",
|
||||||
|
help="The client id of the ilias instance"
|
||||||
|
)
|
||||||
|
|
||||||
|
configure_common_group_args(GROUP)
|
||||||
|
|
||||||
|
|
||||||
|
def load(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
parser: configparser.ConfigParser,
|
||||||
|
) -> None:
|
||||||
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
|
parser["crawl:ilias"] = {}
|
||||||
|
section = parser["crawl:ilias"]
|
||||||
|
load_crawler(args, section)
|
||||||
|
|
||||||
|
section["type"] = COMMAND_NAME
|
||||||
|
if args.ilias_url is not None:
|
||||||
|
section["base_url"] = args.ilias_url
|
||||||
|
if args.client_id is not None:
|
||||||
|
section["client_id"] = args.client_id
|
||||||
|
|
||||||
|
load_common(section, args, parser)
|
||||||
|
|
||||||
|
|
||||||
|
SUBPARSER.set_defaults(command=load)
|
@ -1,120 +1,37 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import configparser
|
import configparser
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from ..crawl.ilias.file_templates import Links
|
|
||||||
from ..logging import log
|
from ..logging import log
|
||||||
from .parser import (CRAWLER_PARSER, SUBPARSERS, BooleanOptionalAction, ParserLoadError, load_crawler,
|
from .common_ilias_args import configure_common_group_args, load_common
|
||||||
show_value_error)
|
from .parser import CRAWLER_PARSER, SUBPARSERS, load_crawler
|
||||||
|
|
||||||
|
COMMAND_NAME = "kit-ilias-web"
|
||||||
|
|
||||||
SUBPARSER = SUBPARSERS.add_parser(
|
SUBPARSER = SUBPARSERS.add_parser(
|
||||||
"kit-ilias-web",
|
COMMAND_NAME,
|
||||||
parents=[CRAWLER_PARSER],
|
parents=[CRAWLER_PARSER],
|
||||||
)
|
)
|
||||||
|
|
||||||
GROUP = SUBPARSER.add_argument_group(
|
GROUP = SUBPARSER.add_argument_group(
|
||||||
title="kit-ilias-web crawler arguments",
|
title=f"{COMMAND_NAME} crawler arguments",
|
||||||
description="arguments for the 'kit-ilias-web' crawler",
|
description=f"arguments for the '{COMMAND_NAME}' crawler",
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"target",
|
|
||||||
type=str,
|
|
||||||
metavar="TARGET",
|
|
||||||
help="course id, 'desktop', or ILIAS URL to crawl"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"output",
|
|
||||||
type=Path,
|
|
||||||
metavar="OUTPUT",
|
|
||||||
help="output directory"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--username", "-u",
|
|
||||||
type=str,
|
|
||||||
metavar="USERNAME",
|
|
||||||
help="user name for authentication"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--keyring",
|
|
||||||
action=BooleanOptionalAction,
|
|
||||||
help="use the system keyring to store and retrieve passwords"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--credential-file",
|
|
||||||
type=Path,
|
|
||||||
metavar="PATH",
|
|
||||||
help="read username and password from a credential file"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--links",
|
|
||||||
type=show_value_error(Links.from_string),
|
|
||||||
metavar="OPTION",
|
|
||||||
help="how to represent external links"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--link-redirect-delay",
|
|
||||||
type=int,
|
|
||||||
metavar="SECONDS",
|
|
||||||
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--videos",
|
|
||||||
action=BooleanOptionalAction,
|
|
||||||
help="crawl and download videos"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--forums",
|
|
||||||
action=BooleanOptionalAction,
|
|
||||||
help="crawl and download forum posts"
|
|
||||||
)
|
|
||||||
GROUP.add_argument(
|
|
||||||
"--http-timeout", "-t",
|
|
||||||
type=float,
|
|
||||||
metavar="SECONDS",
|
|
||||||
help="timeout for all HTTP requests"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
configure_common_group_args(GROUP)
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
parser: configparser.ConfigParser,
|
parser: configparser.ConfigParser,
|
||||||
) -> None:
|
) -> None:
|
||||||
log.explain("Creating config for command 'kit-ilias-web'")
|
log.explain(f"Creating config for command '{COMMAND_NAME}'")
|
||||||
|
|
||||||
parser["crawl:ilias"] = {}
|
parser["crawl:ilias"] = {}
|
||||||
section = parser["crawl:ilias"]
|
section = parser["crawl:ilias"]
|
||||||
load_crawler(args, section)
|
load_crawler(args, section)
|
||||||
|
|
||||||
section["type"] = "kit-ilias-web"
|
section["type"] = COMMAND_NAME
|
||||||
section["target"] = str(args.target)
|
load_common(section, args, parser)
|
||||||
section["output_dir"] = str(args.output)
|
|
||||||
section["auth"] = "auth:ilias"
|
|
||||||
if args.links is not None:
|
|
||||||
section["links"] = str(args.links.value)
|
|
||||||
if args.link_redirect_delay is not None:
|
|
||||||
section["link_redirect_delay"] = str(args.link_redirect_delay)
|
|
||||||
if args.videos is not None:
|
|
||||||
section["videos"] = "yes" if args.videos else "no"
|
|
||||||
if args.forums is not None:
|
|
||||||
section["forums"] = "yes" if args.forums else "no"
|
|
||||||
if args.http_timeout is not None:
|
|
||||||
section["http_timeout"] = str(args.http_timeout)
|
|
||||||
|
|
||||||
parser["auth:ilias"] = {}
|
|
||||||
auth_section = parser["auth:ilias"]
|
|
||||||
if args.credential_file is not None:
|
|
||||||
if args.username is not None:
|
|
||||||
raise ParserLoadError("--credential-file and --username can't be used together")
|
|
||||||
if args.keyring:
|
|
||||||
raise ParserLoadError("--credential-file and --keyring can't be used together")
|
|
||||||
auth_section["type"] = "credential-file"
|
|
||||||
auth_section["path"] = str(args.credential_file)
|
|
||||||
elif args.keyring:
|
|
||||||
auth_section["type"] = "keyring"
|
|
||||||
else:
|
|
||||||
auth_section["type"] = "simple"
|
|
||||||
if args.username is not None:
|
|
||||||
auth_section["username"] = args.username
|
|
||||||
|
|
||||||
|
|
||||||
SUBPARSER.set_defaults(command=load)
|
SUBPARSER.set_defaults(command=load)
|
||||||
|
104
PFERD/cli/common_ilias_args.py
Normal file
104
PFERD/cli/common_ilias_args.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
import argparse
|
||||||
|
import configparser
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ..crawl.ilias.file_templates import Links
|
||||||
|
from .parser import BooleanOptionalAction, ParserLoadError, show_value_error
|
||||||
|
|
||||||
|
|
||||||
|
def configure_common_group_args(group: argparse._ArgumentGroup) -> None:
|
||||||
|
"""These arguments are shared between the KIT and generic Ilias web command."""
|
||||||
|
group.add_argument(
|
||||||
|
"target",
|
||||||
|
type=str,
|
||||||
|
metavar="TARGET",
|
||||||
|
help="course id, 'desktop', or ILIAS URL to crawl"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"output",
|
||||||
|
type=Path,
|
||||||
|
metavar="OUTPUT",
|
||||||
|
help="output directory"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--username", "-u",
|
||||||
|
type=str,
|
||||||
|
metavar="USERNAME",
|
||||||
|
help="user name for authentication"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--keyring",
|
||||||
|
action=BooleanOptionalAction,
|
||||||
|
help="use the system keyring to store and retrieve passwords"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--credential-file",
|
||||||
|
type=Path,
|
||||||
|
metavar="PATH",
|
||||||
|
help="read username and password from a credential file"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--links",
|
||||||
|
type=show_value_error(Links.from_string),
|
||||||
|
metavar="OPTION",
|
||||||
|
help="how to represent external links"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--link-redirect-delay",
|
||||||
|
type=int,
|
||||||
|
metavar="SECONDS",
|
||||||
|
help="time before 'fancy' links redirect to to their target (-1 to disable)"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--videos",
|
||||||
|
action=BooleanOptionalAction,
|
||||||
|
help="crawl and download videos"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--forums",
|
||||||
|
action=BooleanOptionalAction,
|
||||||
|
help="crawl and download forum posts"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--http-timeout", "-t",
|
||||||
|
type=float,
|
||||||
|
metavar="SECONDS",
|
||||||
|
help="timeout for all HTTP requests"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_common(
|
||||||
|
section: configparser.SectionProxy,
|
||||||
|
args: argparse.Namespace,
|
||||||
|
parser: configparser.ConfigParser,
|
||||||
|
) -> None:
|
||||||
|
"""Load common config between generic and KIT ilias web command"""
|
||||||
|
section["target"] = str(args.target)
|
||||||
|
section["output_dir"] = str(args.output)
|
||||||
|
section["auth"] = "auth:ilias"
|
||||||
|
if args.links is not None:
|
||||||
|
section["links"] = str(args.links.value)
|
||||||
|
if args.link_redirect_delay is not None:
|
||||||
|
section["link_redirect_delay"] = str(args.link_redirect_delay)
|
||||||
|
if args.videos is not None:
|
||||||
|
section["videos"] = "yes" if args.videos else "no"
|
||||||
|
if args.forums is not None:
|
||||||
|
section["forums"] = "yes" if args.forums else "no"
|
||||||
|
if args.http_timeout is not None:
|
||||||
|
section["http_timeout"] = str(args.http_timeout)
|
||||||
|
|
||||||
|
parser["auth:ilias"] = {}
|
||||||
|
auth_section = parser["auth:ilias"]
|
||||||
|
if args.credential_file is not None:
|
||||||
|
if args.username is not None:
|
||||||
|
raise ParserLoadError("--credential-file and --username can't be used together")
|
||||||
|
if args.keyring:
|
||||||
|
raise ParserLoadError("--credential-file and --keyring can't be used together")
|
||||||
|
auth_section["type"] = "credential-file"
|
||||||
|
auth_section["path"] = str(args.credential_file)
|
||||||
|
elif args.keyring:
|
||||||
|
auth_section["type"] = "keyring"
|
||||||
|
else:
|
||||||
|
auth_section["type"] = "simple"
|
||||||
|
if args.username is not None:
|
||||||
|
auth_section["username"] = args.username
|
@ -215,6 +215,11 @@ PARSER.add_argument(
|
|||||||
action=BooleanOptionalAction,
|
action=BooleanOptionalAction,
|
||||||
help="whether crawlers should share cookies where applicable"
|
help="whether crawlers should share cookies where applicable"
|
||||||
)
|
)
|
||||||
|
PARSER.add_argument(
|
||||||
|
"--show-not-deleted",
|
||||||
|
action=BooleanOptionalAction,
|
||||||
|
help="print messages in status and report when PFERD did not delete a local only file"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_default_section(
|
def load_default_section(
|
||||||
@ -233,6 +238,8 @@ def load_default_section(
|
|||||||
section["report"] = "yes" if args.report else "no"
|
section["report"] = "yes" if args.report else "no"
|
||||||
if args.share_cookies is not None:
|
if args.share_cookies is not None:
|
||||||
section["share_cookies"] = "yes" if args.share_cookies else "no"
|
section["share_cookies"] = "yes" if args.share_cookies else "no"
|
||||||
|
if args.show_not_deleted is not None:
|
||||||
|
section["show_not_deleted"] = "yes" if args.show_not_deleted else "no"
|
||||||
|
|
||||||
|
|
||||||
SUBPARSERS = PARSER.add_subparsers(title="crawlers")
|
SUBPARSERS = PARSER.add_subparsers(title="crawlers")
|
||||||
|
@ -82,6 +82,9 @@ class DefaultSection(Section):
|
|||||||
def report(self) -> bool:
|
def report(self) -> bool:
|
||||||
return self.s.getboolean("report", fallback=True)
|
return self.s.getboolean("report", fallback=True)
|
||||||
|
|
||||||
|
def show_not_deleted(self) -> bool:
|
||||||
|
return self.s.getboolean("show_not_deleted", fallback=True)
|
||||||
|
|
||||||
def share_cookies(self) -> bool:
|
def share_cookies(self) -> bool:
|
||||||
return self.s.getboolean("share_cookies", fallback=True)
|
return self.s.getboolean("share_cookies", fallback=True)
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ from typing import Callable, Dict
|
|||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from .crawler import Crawler, CrawlError, CrawlerSection # noqa: F401
|
from .crawler import Crawler, CrawlError, CrawlerSection # noqa: F401
|
||||||
from .ilias import KitIliasWebCrawler, KitIliasWebCrawlerSection
|
from .ilias import IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler, KitIliasWebCrawlerSection
|
||||||
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
from .kit_ipd_crawler import KitIpdCrawler, KitIpdCrawlerSection
|
||||||
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
from .local_crawler import LocalCrawler, LocalCrawlerSection
|
||||||
|
|
||||||
@ -18,6 +18,8 @@ CrawlerConstructor = Callable[[
|
|||||||
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
CRAWLERS: Dict[str, CrawlerConstructor] = {
|
||||||
"local": lambda n, s, c, a:
|
"local": lambda n, s, c, a:
|
||||||
LocalCrawler(n, LocalCrawlerSection(s), c),
|
LocalCrawler(n, LocalCrawlerSection(s), c),
|
||||||
|
"ilias-web": lambda n, s, c, a:
|
||||||
|
IliasWebCrawler(n, IliasWebCrawlerSection(s), c, a),
|
||||||
"kit-ilias-web": lambda n, s, c, a:
|
"kit-ilias-web": lambda n, s, c, a:
|
||||||
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
KitIliasWebCrawler(n, KitIliasWebCrawlerSection(s), c, a),
|
||||||
"kit-ipd": lambda n, s, c, a:
|
"kit-ipd": lambda n, s, c, a:
|
||||||
|
@ -149,9 +149,7 @@ class CrawlerSection(Section):
|
|||||||
return self.s.getboolean("skip", fallback=False)
|
return self.s.getboolean("skip", fallback=False)
|
||||||
|
|
||||||
def output_dir(self, name: str) -> Path:
|
def output_dir(self, name: str) -> Path:
|
||||||
# TODO Use removeprefix() after switching to 3.9
|
name = name.removeprefix("crawl:")
|
||||||
if name.startswith("crawl:"):
|
|
||||||
name = name[len("crawl:"):]
|
|
||||||
return Path(self.s.get("output_dir", name)).expanduser()
|
return Path(self.s.get("output_dir", name)).expanduser()
|
||||||
|
|
||||||
def redownload(self) -> Redownload:
|
def redownload(self) -> Redownload:
|
||||||
@ -258,6 +256,10 @@ class Crawler(ABC):
|
|||||||
def prev_report(self) -> Optional[Report]:
|
def prev_report(self) -> Optional[Report]:
|
||||||
return self._output_dir.prev_report
|
return self._output_dir.prev_report
|
||||||
|
|
||||||
|
@property
|
||||||
|
def output_dir(self) -> OutputDirectory:
|
||||||
|
return self._output_dir
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
async def gather(awaitables: Sequence[Awaitable[Any]]) -> List[Any]:
|
||||||
"""
|
"""
|
||||||
@ -290,9 +292,40 @@ class Crawler(ABC):
|
|||||||
log.explain("Answer: Yes")
|
log.explain("Answer: Yes")
|
||||||
return CrawlToken(self._limiter, path)
|
return CrawlToken(self._limiter, path)
|
||||||
|
|
||||||
|
def should_try_download(
|
||||||
|
self,
|
||||||
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
|
mtime: Optional[datetime] = None,
|
||||||
|
redownload: Optional[Redownload] = None,
|
||||||
|
on_conflict: Optional[OnConflict] = None,
|
||||||
|
) -> bool:
|
||||||
|
log.explain_topic(f"Decision: Should Download {fmt_path(path)}")
|
||||||
|
|
||||||
|
if self._transformer.transform(path) is None:
|
||||||
|
log.explain("Answer: No (ignored)")
|
||||||
|
return False
|
||||||
|
|
||||||
|
should_download = self._output_dir.should_try_download(
|
||||||
|
path,
|
||||||
|
etag_differs=etag_differs,
|
||||||
|
mtime=mtime,
|
||||||
|
redownload=redownload,
|
||||||
|
on_conflict=on_conflict
|
||||||
|
)
|
||||||
|
if should_download:
|
||||||
|
log.explain("Answer: Yes")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
log.explain("Answer: No")
|
||||||
|
return False
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
@ -307,7 +340,14 @@ class Crawler(ABC):
|
|||||||
log.status("[bold bright_black]", "Ignored", fmt_path(path))
|
log.status("[bold bright_black]", "Ignored", fmt_path(path))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
fs_token = await self._output_dir.download(path, transformed_path, mtime, redownload, on_conflict)
|
fs_token = await self._output_dir.download(
|
||||||
|
path,
|
||||||
|
transformed_path,
|
||||||
|
etag_differs=etag_differs,
|
||||||
|
mtime=mtime,
|
||||||
|
redownload=redownload,
|
||||||
|
on_conflict=on_conflict
|
||||||
|
)
|
||||||
if fs_token is None:
|
if fs_token is None:
|
||||||
log.explain("Answer: No")
|
log.explain("Answer: No")
|
||||||
return None
|
return None
|
||||||
|
@ -1,12 +1,14 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import http.cookies
|
import http.cookies
|
||||||
import ssl
|
import ssl
|
||||||
|
from datetime import datetime
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Tuple, cast
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import certifi
|
import certifi
|
||||||
from aiohttp.client import ClientTimeout
|
from aiohttp.client import ClientTimeout
|
||||||
|
from bs4 import Tag
|
||||||
|
|
||||||
from ..auth import Authenticator
|
from ..auth import Authenticator
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
@ -15,10 +17,12 @@ from ..utils import fmt_real_path
|
|||||||
from ..version import NAME, VERSION
|
from ..version import NAME, VERSION
|
||||||
from .crawler import Crawler, CrawlerSection
|
from .crawler import Crawler, CrawlerSection
|
||||||
|
|
||||||
|
ETAGS_CUSTOM_REPORT_VALUE_KEY = "etags"
|
||||||
|
|
||||||
|
|
||||||
class HttpCrawlerSection(CrawlerSection):
|
class HttpCrawlerSection(CrawlerSection):
|
||||||
def http_timeout(self) -> float:
|
def http_timeout(self) -> float:
|
||||||
return self.s.getfloat("http_timeout", fallback=20)
|
return self.s.getfloat("http_timeout", fallback=30)
|
||||||
|
|
||||||
|
|
||||||
class HttpCrawler(Crawler):
|
class HttpCrawler(Crawler):
|
||||||
@ -169,6 +173,79 @@ class HttpCrawler(Crawler):
|
|||||||
log.warn(f"Failed to save cookies to {fmt_real_path(self._cookie_jar_path)}")
|
log.warn(f"Failed to save cookies to {fmt_real_path(self._cookie_jar_path)}")
|
||||||
log.warn(str(e))
|
log.warn(str(e))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_folder_structure_from_heading_hierarchy(file_link: Tag, drop_h1: bool = False) -> PurePath:
|
||||||
|
"""
|
||||||
|
Retrieves the hierarchy of headings associated with the give file link and constructs a folder
|
||||||
|
structure from them.
|
||||||
|
|
||||||
|
<h1> level headings usually only appear once and serve as the page title, so they would introduce
|
||||||
|
redundant nesting. To avoid this, <h1> headings are ignored via the drop_h1 parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find_associated_headings(tag: Tag, level: int) -> PurePath:
|
||||||
|
if level == 0 or (level == 1 and drop_h1):
|
||||||
|
return PurePath()
|
||||||
|
|
||||||
|
level_heading = cast(Optional[Tag], tag.find_previous(name=f"h{level}"))
|
||||||
|
|
||||||
|
if level_heading is None:
|
||||||
|
return find_associated_headings(tag, level - 1)
|
||||||
|
|
||||||
|
folder_name = level_heading.get_text().strip()
|
||||||
|
return find_associated_headings(level_heading, level - 1) / folder_name
|
||||||
|
|
||||||
|
# start at level <h3> because paragraph-level headings are usually too granular for folder names
|
||||||
|
return find_associated_headings(file_link, 3)
|
||||||
|
|
||||||
|
def _get_previous_etag_from_report(self, path: PurePath) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
If available, retrieves the entity tag for a given path which was stored in the previous report.
|
||||||
|
"""
|
||||||
|
if not self._output_dir.prev_report:
|
||||||
|
return None
|
||||||
|
|
||||||
|
etags = self._output_dir.prev_report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||||
|
return etags.get(str(path))
|
||||||
|
|
||||||
|
def _add_etag_to_report(self, path: PurePath, etag: Optional[str]) -> None:
|
||||||
|
"""
|
||||||
|
Adds an entity tag for a given path to the report's custom values.
|
||||||
|
"""
|
||||||
|
if not etag:
|
||||||
|
return
|
||||||
|
|
||||||
|
etags = self._output_dir.report.get_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY) or {}
|
||||||
|
etags[str(path)] = etag
|
||||||
|
self._output_dir.report.add_custom_value(ETAGS_CUSTOM_REPORT_VALUE_KEY, etags)
|
||||||
|
|
||||||
|
async def _request_resource_version(self, resource_url: str) -> Tuple[Optional[str], Optional[datetime]]:
|
||||||
|
"""
|
||||||
|
Requests the ETag and Last-Modified headers of a resource via a HEAD request.
|
||||||
|
If no entity tag / modification date can be obtained, the according value will be None.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
async with self.session.head(resource_url) as resp:
|
||||||
|
if resp.status != 200:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
etag_header = resp.headers.get("ETag")
|
||||||
|
last_modified_header = resp.headers.get("Last-Modified")
|
||||||
|
last_modified = None
|
||||||
|
|
||||||
|
if last_modified_header:
|
||||||
|
try:
|
||||||
|
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified#directives
|
||||||
|
datetime_format = "%a, %d %b %Y %H:%M:%S GMT"
|
||||||
|
last_modified = datetime.strptime(last_modified_header, datetime_format)
|
||||||
|
except ValueError:
|
||||||
|
# last_modified remains None
|
||||||
|
pass
|
||||||
|
|
||||||
|
return etag_header, last_modified
|
||||||
|
except aiohttp.ClientError:
|
||||||
|
return None, None
|
||||||
|
|
||||||
async def run(self) -> None:
|
async def run(self) -> None:
|
||||||
self._request_count = 0
|
self._request_count = 0
|
||||||
self._cookie_jar = aiohttp.CookieJar()
|
self._cookie_jar = aiohttp.CookieJar()
|
||||||
@ -186,7 +263,12 @@ class HttpCrawler(Crawler):
|
|||||||
connect=self._http_timeout,
|
connect=self._http_timeout,
|
||||||
sock_connect=self._http_timeout,
|
sock_connect=self._http_timeout,
|
||||||
sock_read=self._http_timeout,
|
sock_read=self._http_timeout,
|
||||||
)
|
),
|
||||||
|
# See https://github.com/aio-libs/aiohttp/issues/6626
|
||||||
|
# Without this aiohttp will mangle the redirect header from Shibboleth, invalidating the
|
||||||
|
# passed signature. Shibboleth will not accept the broken signature and authentication will
|
||||||
|
# fail.
|
||||||
|
requote_redirect_url=False
|
||||||
) as session:
|
) as session:
|
||||||
self.session = session
|
self.session = session
|
||||||
try:
|
try:
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
from .kit_ilias_web_crawler import KitIliasWebCrawler, KitIliasWebCrawlerSection
|
from .kit_ilias_web_crawler import (IliasWebCrawler, IliasWebCrawlerSection, KitIliasWebCrawler,
|
||||||
|
KitIliasWebCrawlerSection)
|
||||||
|
|
||||||
__all__ = ["KitIliasWebCrawler", "KitIliasWebCrawlerSection"]
|
__all__ = [
|
||||||
|
"IliasWebCrawler",
|
||||||
|
"IliasWebCrawlerSection",
|
||||||
|
"KitIliasWebCrawler",
|
||||||
|
"KitIliasWebCrawlerSection",
|
||||||
|
]
|
||||||
|
40
PFERD/crawl/ilias/async_helper.py
Normal file
40
PFERD/crawl/ilias/async_helper.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
import asyncio
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
from ...logging import log
|
||||||
|
from ..crawler import AWrapped, CrawlError, CrawlWarning
|
||||||
|
|
||||||
|
|
||||||
|
def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Callable[[AWrapped], AWrapped]:
|
||||||
|
def decorator(f: AWrapped) -> AWrapped:
|
||||||
|
async def wrapper(*args: Any, **kwargs: Any) -> Optional[Any]:
|
||||||
|
last_exception: Optional[BaseException] = None
|
||||||
|
for round in range(attempts):
|
||||||
|
try:
|
||||||
|
return await f(*args, **kwargs)
|
||||||
|
except aiohttp.ContentTypeError: # invalid content type
|
||||||
|
raise CrawlWarning("ILIAS returned an invalid content type")
|
||||||
|
except aiohttp.TooManyRedirects:
|
||||||
|
raise CrawlWarning("Got stuck in a redirect loop")
|
||||||
|
except aiohttp.ClientPayloadError as e: # encoding or not enough bytes
|
||||||
|
last_exception = e
|
||||||
|
except aiohttp.ClientConnectionError as e: # e.g. timeout, disconnect, resolve failed, etc.
|
||||||
|
last_exception = e
|
||||||
|
except asyncio.exceptions.TimeoutError as e: # explicit http timeouts in HttpCrawler
|
||||||
|
last_exception = e
|
||||||
|
log.explain_topic(f"Retrying operation {name}. Retries left: {attempts - 1 - round}")
|
||||||
|
log.explain(f"Last exception: {last_exception!r}")
|
||||||
|
|
||||||
|
if last_exception:
|
||||||
|
message = f"Error in I/O Operation: {last_exception!r}"
|
||||||
|
if failure_is_error:
|
||||||
|
raise CrawlError(message) from last_exception
|
||||||
|
else:
|
||||||
|
raise CrawlWarning(message) from last_exception
|
||||||
|
raise CrawlError("Impossible return in ilias _iorepeat")
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
@ -1,5 +1,9 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional, cast
|
||||||
|
|
||||||
|
import bs4
|
||||||
|
|
||||||
|
from PFERD.utils import soupify
|
||||||
|
|
||||||
_link_template_plain = "{{link}}"
|
_link_template_plain = "{{link}}"
|
||||||
_link_template_fancy = """
|
_link_template_fancy = """
|
||||||
@ -94,6 +98,162 @@ _link_template_internet_shortcut = """
|
|||||||
URL={{link}}
|
URL={{link}}
|
||||||
""".strip()
|
""".strip()
|
||||||
|
|
||||||
|
_learning_module_template = """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>{{name}}</title>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
.center-flex {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
.nav {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<body class="center-flex">
|
||||||
|
{{body}}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
_forum_thread_template = """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>ILIAS - Forum: {{name}}</title>
|
||||||
|
<style>
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: 'Open Sans', Verdana, Arial, Helvetica, sans-serif;
|
||||||
|
padding: 8px;
|
||||||
|
}
|
||||||
|
ul, ol, p {
|
||||||
|
margin: 1.2em 0;
|
||||||
|
}
|
||||||
|
p {
|
||||||
|
margin-top: 8px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #00876c;
|
||||||
|
text-decoration: none;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
body > p:first-child > span:first-child {
|
||||||
|
font-size: 1.6em;
|
||||||
|
}
|
||||||
|
body > p:first-child > span:first-child ~ span.default {
|
||||||
|
display: inline-block;
|
||||||
|
font-size: 1.2em;
|
||||||
|
padding-bottom: 8px;
|
||||||
|
}
|
||||||
|
.ilFrmPostContent {
|
||||||
|
margin-top: 8px;
|
||||||
|
max-width: 64em;
|
||||||
|
}
|
||||||
|
.ilFrmPostContent > *:first-child {
|
||||||
|
margin-top: 0px;
|
||||||
|
}
|
||||||
|
.ilFrmPostTitle {
|
||||||
|
margin-top: 24px;
|
||||||
|
color: #00876c;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
#ilFrmPostList {
|
||||||
|
list-style: none;
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
li.ilFrmPostRow {
|
||||||
|
padding: 3px 0 3px 3px;
|
||||||
|
margin-bottom: 24px;
|
||||||
|
border-left: 6px solid #dddddd;
|
||||||
|
}
|
||||||
|
.ilFrmPostRow > div {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
.ilFrmPostImage img {
|
||||||
|
margin: 0 !important;
|
||||||
|
padding: 6px 9px 9px 6px;
|
||||||
|
}
|
||||||
|
.ilUserIcon {
|
||||||
|
width: 115px;
|
||||||
|
}
|
||||||
|
.small {
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 0.75rem;
|
||||||
|
color: #6f6f6f;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
{{heading}}
|
||||||
|
{{content}}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""".strip() # noqa: E501 line too long
|
||||||
|
|
||||||
|
|
||||||
|
def learning_module_template(body: bs4.Tag, name: str, prev: Optional[str], next: Optional[str]) -> str:
|
||||||
|
# Seems to be comments, ignore those.
|
||||||
|
for elem in body.select(".il-copg-mob-fullscreen-modal"):
|
||||||
|
elem.decompose()
|
||||||
|
|
||||||
|
nav_template = """
|
||||||
|
<div class="nav">
|
||||||
|
{{left}}
|
||||||
|
{{right}}
|
||||||
|
</div>
|
||||||
|
"""
|
||||||
|
if prev and body.select_one(".ilc_page_lnav_LeftNavigation"):
|
||||||
|
text = cast(bs4.Tag, body.select_one(".ilc_page_lnav_LeftNavigation")).get_text().strip()
|
||||||
|
left = f'<a href="{prev}">{text}</a>'
|
||||||
|
else:
|
||||||
|
left = "<span></span>"
|
||||||
|
|
||||||
|
if next and body.select_one(".ilc_page_rnav_RightNavigation"):
|
||||||
|
text = cast(bs4.Tag, body.select_one(".ilc_page_rnav_RightNavigation")).get_text().strip()
|
||||||
|
right = f'<a href="{next}">{text}</a>'
|
||||||
|
else:
|
||||||
|
right = "<span></span>"
|
||||||
|
|
||||||
|
if top_nav := body.select_one(".ilc_page_tnav_TopNavigation"):
|
||||||
|
top_nav.replace_with(
|
||||||
|
soupify(nav_template.replace("{{left}}", left).replace("{{right}}", right).encode())
|
||||||
|
)
|
||||||
|
|
||||||
|
if bot_nav := body.select_one(".ilc_page_bnav_BottomNavigation"):
|
||||||
|
bot_nav.replace_with(soupify(nav_template.replace(
|
||||||
|
"{{left}}", left).replace("{{right}}", right).encode())
|
||||||
|
)
|
||||||
|
|
||||||
|
body_str = cast(str, body.prettify())
|
||||||
|
return _learning_module_template.replace("{{body}}", body_str).replace("{{name}}", name)
|
||||||
|
|
||||||
|
|
||||||
|
def forum_thread_template(name: str, url: str, heading: bs4.Tag, content: bs4.Tag) -> str:
|
||||||
|
if title := cast(Optional[bs4.Tag], heading.find(name="b")):
|
||||||
|
title.wrap(bs4.Tag(name="a", attrs={"href": url}))
|
||||||
|
return _forum_thread_template \
|
||||||
|
.replace("{{name}}", name) \
|
||||||
|
.replace("{{heading}}", cast(str, heading.prettify())) \
|
||||||
|
.replace("{{content}}", cast(str, content.prettify()))
|
||||||
|
|
||||||
|
|
||||||
class Links(Enum):
|
class Links(Enum):
|
||||||
IGNORE = "ignore"
|
IGNORE = "ignore"
|
||||||
@ -102,24 +262,24 @@ class Links(Enum):
|
|||||||
INTERNET_SHORTCUT = "internet-shortcut"
|
INTERNET_SHORTCUT = "internet-shortcut"
|
||||||
|
|
||||||
def template(self) -> Optional[str]:
|
def template(self) -> Optional[str]:
|
||||||
if self == self.FANCY:
|
if self == Links.FANCY:
|
||||||
return _link_template_fancy
|
return _link_template_fancy
|
||||||
elif self == self.PLAINTEXT:
|
elif self == Links.PLAINTEXT:
|
||||||
return _link_template_plain
|
return _link_template_plain
|
||||||
elif self == self.INTERNET_SHORTCUT:
|
elif self == Links.INTERNET_SHORTCUT:
|
||||||
return _link_template_internet_shortcut
|
return _link_template_internet_shortcut
|
||||||
elif self == self.IGNORE:
|
elif self == Links.IGNORE:
|
||||||
return None
|
return None
|
||||||
raise ValueError("Missing switch case")
|
raise ValueError("Missing switch case")
|
||||||
|
|
||||||
def extension(self) -> Optional[str]:
|
def extension(self) -> Optional[str]:
|
||||||
if self == self.FANCY:
|
if self == Links.FANCY:
|
||||||
return ".html"
|
return ".html"
|
||||||
elif self == self.PLAINTEXT:
|
elif self == Links.PLAINTEXT:
|
||||||
return ".txt"
|
return ".txt"
|
||||||
elif self == self.INTERNET_SHORTCUT:
|
elif self == Links.INTERNET_SHORTCUT:
|
||||||
return ".url"
|
return ".url"
|
||||||
elif self == self.IGNORE:
|
elif self == Links.IGNORE:
|
||||||
return None
|
return None
|
||||||
raise ValueError("Missing switch case")
|
raise ValueError("Missing switch case")
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
from typing import cast
|
||||||
|
|
||||||
from bs4 import BeautifulSoup, Comment, Tag
|
from bs4 import BeautifulSoup, Comment, Tag
|
||||||
|
|
||||||
_STYLE_TAG_CONTENT = """
|
_STYLE_TAG_CONTENT = """
|
||||||
@ -12,6 +14,13 @@ _STYLE_TAG_CONTENT = """
|
|||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.row-flex {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
.row-flex-wrap {
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
.accordion-head {
|
.accordion-head {
|
||||||
background-color: #f5f7fa;
|
background-color: #f5f7fa;
|
||||||
padding: 0.5rem 0;
|
padding: 0.5rem 0;
|
||||||
@ -63,18 +72,18 @@ def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
|||||||
|
|
||||||
|
|
||||||
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
||||||
for block in soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES):
|
for block in cast(list[Tag], soup.find_all(class_=lambda x: x in _ARTICLE_WORTHY_CLASSES)):
|
||||||
block.name = "article"
|
block.name = "article"
|
||||||
|
|
||||||
for block in soup.find_all("h3"):
|
for block in cast(list[Tag], soup.find_all("h3")):
|
||||||
block.name = "div"
|
block.name = "div"
|
||||||
|
|
||||||
for block in soup.find_all("h1"):
|
for block in cast(list[Tag], soup.find_all("h1")):
|
||||||
block.name = "h3"
|
block.name = "h3"
|
||||||
|
|
||||||
for block in soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap"):
|
for block in cast(list[Tag], soup.find_all(class_="ilc_va_ihcap_VAccordIHeadCap")):
|
||||||
block.name = "h3"
|
block.name = "h3"
|
||||||
block["class"] += ["accordion-head"]
|
block["class"] += ["accordion-head"] # type: ignore
|
||||||
|
|
||||||
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
for dummy in soup.select(".ilc_text_block_Standard.ilc_Paragraph"):
|
||||||
children = list(dummy.children)
|
children = list(dummy.children)
|
||||||
@ -82,10 +91,15 @@ def clean(soup: BeautifulSoup) -> BeautifulSoup:
|
|||||||
dummy.decompose()
|
dummy.decompose()
|
||||||
if len(children) > 1:
|
if len(children) > 1:
|
||||||
continue
|
continue
|
||||||
if type(children[0]) == Comment:
|
if isinstance(type(children[0]), Comment):
|
||||||
dummy.decompose()
|
dummy.decompose()
|
||||||
|
|
||||||
for hrule_imposter in soup.find_all(class_="ilc_section_Separator"):
|
# Delete video figures, as they can not be internalized anyway
|
||||||
|
for video in soup.select(".ilc_media_cont_MediaContainerHighlighted .ilPageVideo"):
|
||||||
|
if figure := video.find_parent("figure"):
|
||||||
|
figure.decompose()
|
||||||
|
|
||||||
|
for hrule_imposter in cast(list[Tag], soup.find_all(class_="ilc_section_Separator")):
|
||||||
hrule_imposter.insert(0, soup.new_tag("hr"))
|
hrule_imposter.insert(0, soup.new_tag("hr"))
|
||||||
|
|
||||||
return soup
|
return soup
|
||||||
|
1096
PFERD/crawl/ilias/ilias_web_crawler.py
Normal file
1096
PFERD/crawl/ilias/ilias_web_crawler.py
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
129
PFERD/crawl/ilias/shibboleth_login.py
Normal file
129
PFERD/crawl/ilias/shibboleth_login.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
from typing import Any, Optional, cast
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import yarl
|
||||||
|
from bs4 import BeautifulSoup, Tag
|
||||||
|
|
||||||
|
from ...auth import Authenticator, TfaAuthenticator
|
||||||
|
from ...logging import log
|
||||||
|
from ...utils import soupify
|
||||||
|
from ..crawler import CrawlError
|
||||||
|
|
||||||
|
|
||||||
|
class ShibbolethLogin:
|
||||||
|
"""
|
||||||
|
Login via shibboleth system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, ilias_url: str, authenticator: Authenticator, tfa_authenticator: Optional[Authenticator]
|
||||||
|
) -> None:
|
||||||
|
self._ilias_url = ilias_url
|
||||||
|
self._auth = authenticator
|
||||||
|
self._tfa_auth = tfa_authenticator
|
||||||
|
|
||||||
|
async def login(self, sess: aiohttp.ClientSession) -> None:
|
||||||
|
"""
|
||||||
|
Performs the ILIAS Shibboleth authentication dance and saves the login
|
||||||
|
cookies it receieves.
|
||||||
|
|
||||||
|
This function should only be called whenever it is detected that you're
|
||||||
|
not logged in. The cookies obtained should be good for a few minutes,
|
||||||
|
maybe even an hour or two.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Equivalent: Click on "Mit KIT-Account anmelden" button in
|
||||||
|
# https://ilias.studium.kit.edu/login.php
|
||||||
|
url = f"{self._ilias_url}/shib_login.php"
|
||||||
|
async with sess.get(url) as response:
|
||||||
|
shib_url = response.url
|
||||||
|
if str(shib_url).startswith(self._ilias_url):
|
||||||
|
log.explain(
|
||||||
|
"ILIAS recognized our shib token and logged us in in the background, returning"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
soup: BeautifulSoup = soupify(await response.read())
|
||||||
|
|
||||||
|
# Attempt to login using credentials, if necessary
|
||||||
|
while not self._login_successful(soup):
|
||||||
|
# Searching the form here so that this fails before asking for
|
||||||
|
# credentials rather than after asking.
|
||||||
|
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||||
|
action = cast(str, form["action"])
|
||||||
|
|
||||||
|
# Equivalent: Enter credentials in
|
||||||
|
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||||
|
url = str(shib_url.origin()) + action
|
||||||
|
username, password = await self._auth.credentials()
|
||||||
|
data = {
|
||||||
|
"_eventId_proceed": "",
|
||||||
|
"j_username": username,
|
||||||
|
"j_password": password,
|
||||||
|
"fudis_web_authn_assertion_input": "",
|
||||||
|
}
|
||||||
|
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||||
|
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||||
|
soup = await _post(sess, url, data)
|
||||||
|
|
||||||
|
if soup.find(id="attributeRelease"):
|
||||||
|
raise CrawlError(
|
||||||
|
"ILIAS Shibboleth entitlements changed! "
|
||||||
|
"Please log in once in your browser and review them"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._tfa_required(soup):
|
||||||
|
soup = await self._authenticate_tfa(sess, soup, shib_url)
|
||||||
|
|
||||||
|
if not self._login_successful(soup):
|
||||||
|
self._auth.invalidate_credentials()
|
||||||
|
|
||||||
|
# Equivalent: Being redirected via JS automatically
|
||||||
|
# (or clicking "Continue" if you have JS disabled)
|
||||||
|
relay_state = cast(Tag, soup.find("input", {"name": "RelayState"}))
|
||||||
|
saml_response = cast(Tag, soup.find("input", {"name": "SAMLResponse"}))
|
||||||
|
url = form = soup.find("form", {"method": "post"})["action"] # type: ignore
|
||||||
|
data = { # using the info obtained in the while loop above
|
||||||
|
"RelayState": cast(str, relay_state["value"]),
|
||||||
|
"SAMLResponse": cast(str, saml_response["value"]),
|
||||||
|
}
|
||||||
|
await sess.post(cast(str, url), data=data)
|
||||||
|
|
||||||
|
async def _authenticate_tfa(
|
||||||
|
self, session: aiohttp.ClientSession, soup: BeautifulSoup, shib_url: yarl.URL
|
||||||
|
) -> BeautifulSoup:
|
||||||
|
if not self._tfa_auth:
|
||||||
|
self._tfa_auth = TfaAuthenticator("ilias-anon-tfa")
|
||||||
|
|
||||||
|
tfa_token = await self._tfa_auth.password()
|
||||||
|
|
||||||
|
# Searching the form here so that this fails before asking for
|
||||||
|
# credentials rather than after asking.
|
||||||
|
form = cast(Tag, soup.find("form", {"method": "post"}))
|
||||||
|
action = cast(str, form["action"])
|
||||||
|
|
||||||
|
# Equivalent: Enter token in
|
||||||
|
# https://idp.scc.kit.edu/idp/profile/SAML2/Redirect/SSO
|
||||||
|
url = str(shib_url.origin()) + action
|
||||||
|
username, password = await self._auth.credentials()
|
||||||
|
data = {
|
||||||
|
"_eventId_proceed": "",
|
||||||
|
"fudis_otp_input": tfa_token,
|
||||||
|
}
|
||||||
|
if csrf_token_input := form.find("input", {"name": "csrf_token"}):
|
||||||
|
data["csrf_token"] = csrf_token_input["value"] # type: ignore
|
||||||
|
return await _post(session, url, data)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _login_successful(soup: BeautifulSoup) -> bool:
|
||||||
|
relay_state = soup.find("input", {"name": "RelayState"})
|
||||||
|
saml_response = soup.find("input", {"name": "SAMLResponse"})
|
||||||
|
return relay_state is not None and saml_response is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _tfa_required(soup: BeautifulSoup) -> bool:
|
||||||
|
return soup.find(id="fudiscr-form") is not None
|
||||||
|
|
||||||
|
|
||||||
|
async def _post(session: aiohttp.ClientSession, url: str, data: Any) -> BeautifulSoup:
|
||||||
|
async with session.post(url, data=data) as response:
|
||||||
|
return soupify(await response.read())
|
@ -1,8 +1,9 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import Awaitable, List, Optional, Pattern, Set, Union
|
from typing import Any, Awaitable, Generator, Iterable, List, Optional, Pattern, Tuple, Union, cast
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from bs4 import BeautifulSoup, Tag
|
from bs4 import BeautifulSoup, Tag
|
||||||
@ -24,9 +25,6 @@ class KitIpdCrawlerSection(HttpCrawlerSection):
|
|||||||
if not target.startswith("https://"):
|
if not target.startswith("https://"):
|
||||||
self.invalid_value("target", target, "Should be a URL")
|
self.invalid_value("target", target, "Should be a URL")
|
||||||
|
|
||||||
if not target.endswith("/"):
|
|
||||||
target = target + "/"
|
|
||||||
|
|
||||||
return target
|
return target
|
||||||
|
|
||||||
def link_regex(self) -> Pattern[str]:
|
def link_regex(self) -> Pattern[str]:
|
||||||
@ -34,24 +32,24 @@ class KitIpdCrawlerSection(HttpCrawlerSection):
|
|||||||
return re.compile(regex)
|
return re.compile(regex)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(unsafe_hash=True)
|
@dataclass
|
||||||
class KitIpdFile:
|
class KitIpdFile:
|
||||||
name: str
|
name: str
|
||||||
url: str
|
url: str
|
||||||
|
|
||||||
|
def explain(self) -> None:
|
||||||
|
log.explain(f"File {self.name!r} (href={self.url!r})")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class KitIpdFolder:
|
class KitIpdFolder:
|
||||||
name: str
|
name: str
|
||||||
files: List[KitIpdFile]
|
entries: List[Union[KitIpdFile, "KitIpdFolder"]]
|
||||||
|
|
||||||
def explain(self) -> None:
|
def explain(self) -> None:
|
||||||
log.explain_topic(f"Folder {self.name!r}")
|
log.explain_topic(f"Folder {self.name!r}")
|
||||||
for file in self.files:
|
for entry in self.entries:
|
||||||
log.explain(f"File {file.name!r} (href={file.url!r})")
|
entry.explain()
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
|
||||||
return self.name.__hash__()
|
|
||||||
|
|
||||||
|
|
||||||
class KitIpdCrawler(HttpCrawler):
|
class KitIpdCrawler(HttpCrawler):
|
||||||
@ -75,81 +73,96 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
|
|
||||||
async with maybe_cl:
|
async with maybe_cl:
|
||||||
for item in await self._fetch_items():
|
for item in await self._fetch_items():
|
||||||
|
item.explain()
|
||||||
if isinstance(item, KitIpdFolder):
|
if isinstance(item, KitIpdFolder):
|
||||||
tasks.append(self._crawl_folder(item))
|
tasks.append(self._crawl_folder(PurePath("."), item))
|
||||||
else:
|
else:
|
||||||
# Orphan files are placed in the root folder
|
log.explain_topic(f"Orphan file {item.name!r} (href={item.url!r})")
|
||||||
tasks.append(self._download_file(PurePath("."), item))
|
log.explain("Attributing it to root folder")
|
||||||
|
# do this here to at least be sequential and not parallel (rate limiting is hard, as the
|
||||||
|
# crawl abstraction does not hold for these requests)
|
||||||
|
etag, mtime = await self._request_resource_version(item.url)
|
||||||
|
tasks.append(self._download_file(PurePath("."), item, etag, mtime))
|
||||||
|
|
||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
|
|
||||||
async def _crawl_folder(self, folder: KitIpdFolder) -> None:
|
async def _crawl_folder(self, parent: PurePath, folder: KitIpdFolder) -> None:
|
||||||
path = PurePath(folder.name)
|
path = parent / folder.name
|
||||||
if not await self.crawl(path):
|
if not await self.crawl(path):
|
||||||
return
|
return
|
||||||
|
|
||||||
tasks = [self._download_file(path, file) for file in folder.files]
|
tasks = []
|
||||||
|
for entry in folder.entries:
|
||||||
|
if isinstance(entry, KitIpdFolder):
|
||||||
|
tasks.append(self._crawl_folder(path, entry))
|
||||||
|
else:
|
||||||
|
# do this here to at least be sequential and not parallel (rate limiting is hard, as the crawl
|
||||||
|
# abstraction does not hold for these requests)
|
||||||
|
etag, mtime = await self._request_resource_version(entry.url)
|
||||||
|
tasks.append(self._download_file(path, entry, etag, mtime))
|
||||||
|
|
||||||
await self.gather(tasks)
|
await self.gather(tasks)
|
||||||
|
|
||||||
async def _download_file(self, parent: PurePath, file: KitIpdFile) -> None:
|
async def _download_file(
|
||||||
|
self,
|
||||||
|
parent: PurePath,
|
||||||
|
file: KitIpdFile,
|
||||||
|
etag: Optional[str],
|
||||||
|
mtime: Optional[datetime]
|
||||||
|
) -> None:
|
||||||
element_path = parent / file.name
|
element_path = parent / file.name
|
||||||
maybe_dl = await self.download(element_path)
|
|
||||||
|
prev_etag = self._get_previous_etag_from_report(element_path)
|
||||||
|
etag_differs = None if prev_etag is None else prev_etag != etag
|
||||||
|
|
||||||
|
maybe_dl = await self.download(element_path, etag_differs=etag_differs, mtime=mtime)
|
||||||
if not maybe_dl:
|
if not maybe_dl:
|
||||||
|
# keep storing the known file's etag
|
||||||
|
if prev_etag:
|
||||||
|
self._add_etag_to_report(element_path, prev_etag)
|
||||||
return
|
return
|
||||||
|
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
await self._stream_from_url(file.url, sink, bar)
|
await self._stream_from_url(file.url, element_path, sink, bar)
|
||||||
|
|
||||||
async def _fetch_items(self) -> Set[Union[KitIpdFile, KitIpdFolder]]:
|
async def _fetch_items(self) -> Iterable[Union[KitIpdFile, KitIpdFolder]]:
|
||||||
page = await self.get_page()
|
page, url = await self.get_page()
|
||||||
elements: List[Tag] = self._find_file_links(page)
|
elements: List[Tag] = self._find_file_links(page)
|
||||||
items: Set[Union[KitIpdFile, KitIpdFolder]] = set()
|
|
||||||
|
|
||||||
|
# do not add unnecessary nesting for a single <h1> heading
|
||||||
|
drop_h1: bool = len(page.find_all(name="h1")) <= 1
|
||||||
|
|
||||||
|
folder_tree: KitIpdFolder = KitIpdFolder(".", [])
|
||||||
for element in elements:
|
for element in elements:
|
||||||
folder_label = self._find_folder_label(element)
|
parent = HttpCrawler.get_folder_structure_from_heading_hierarchy(element, drop_h1)
|
||||||
if folder_label:
|
file = self._extract_file(element, url)
|
||||||
folder = self._extract_folder(folder_label)
|
|
||||||
if folder not in items:
|
|
||||||
items.add(folder)
|
|
||||||
folder.explain()
|
|
||||||
else:
|
|
||||||
file = self._extract_file(element)
|
|
||||||
items.add(file)
|
|
||||||
log.explain_topic(f"Orphan file {file.name!r} (href={file.url!r})")
|
|
||||||
log.explain("Attributing it to root folder")
|
|
||||||
|
|
||||||
return items
|
current_folder: KitIpdFolder = folder_tree
|
||||||
|
for folder_name in parent.parts:
|
||||||
|
# helps the type checker to verify that current_folder is indeed a folder
|
||||||
|
def subfolders() -> Generator[KitIpdFolder, Any, None]:
|
||||||
|
return (entry for entry in current_folder.entries if isinstance(entry, KitIpdFolder))
|
||||||
|
|
||||||
def _extract_folder(self, folder_tag: Tag) -> KitIpdFolder:
|
if not any(entry.name == folder_name for entry in subfolders()):
|
||||||
files: List[KitIpdFile] = []
|
current_folder.entries.append(KitIpdFolder(folder_name, []))
|
||||||
name = folder_tag.getText().strip()
|
current_folder = next(entry for entry in subfolders() if entry.name == folder_name)
|
||||||
|
|
||||||
container: Tag = folder_tag.findNextSibling(name="table")
|
current_folder.entries.append(file)
|
||||||
for link in self._find_file_links(container):
|
|
||||||
files.append(self._extract_file(link))
|
|
||||||
|
|
||||||
return KitIpdFolder(name, files)
|
return folder_tree.entries
|
||||||
|
|
||||||
@staticmethod
|
def _extract_file(self, link: Tag, url: str) -> KitIpdFile:
|
||||||
def _find_folder_label(file_link: Tag) -> Optional[Tag]:
|
url = self._abs_url_from_link(url, link)
|
||||||
enclosing_table: Tag = file_link.findParent(name="table")
|
|
||||||
if enclosing_table is None:
|
|
||||||
return None
|
|
||||||
return enclosing_table.findPreviousSibling(name=re.compile("^h[1-6]$"))
|
|
||||||
|
|
||||||
def _extract_file(self, link: Tag) -> KitIpdFile:
|
|
||||||
url = self._abs_url_from_link(link)
|
|
||||||
name = os.path.basename(url)
|
name = os.path.basename(url)
|
||||||
return KitIpdFile(name, url)
|
return KitIpdFile(name, url)
|
||||||
|
|
||||||
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> List[Tag]:
|
def _find_file_links(self, tag: Union[Tag, BeautifulSoup]) -> list[Tag]:
|
||||||
return tag.findAll(name="a", attrs={"href": self._file_regex})
|
return cast(list[Tag], tag.find_all(name="a", attrs={"href": self._file_regex}))
|
||||||
|
|
||||||
def _abs_url_from_link(self, link_tag: Tag) -> str:
|
def _abs_url_from_link(self, url: str, link_tag: Tag) -> str:
|
||||||
return urljoin(self._url, link_tag.get("href"))
|
return urljoin(url, cast(str, link_tag.get("href")))
|
||||||
|
|
||||||
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar) -> None:
|
async def _stream_from_url(self, url: str, path: PurePath, sink: FileSink, bar: ProgressBar) -> None:
|
||||||
async with self.session.get(url, allow_redirects=False) as resp:
|
async with self.session.get(url, allow_redirects=False) as resp:
|
||||||
if resp.status == 403:
|
if resp.status == 403:
|
||||||
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
raise CrawlError("Received a 403. Are you within the KIT network/VPN?")
|
||||||
@ -162,7 +175,9 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
|
|
||||||
sink.done()
|
sink.done()
|
||||||
|
|
||||||
async def get_page(self) -> BeautifulSoup:
|
self._add_etag_to_report(path, resp.headers.get("ETag"))
|
||||||
|
|
||||||
|
async def get_page(self) -> Tuple[BeautifulSoup, str]:
|
||||||
async with self.session.get(self._url) as request:
|
async with self.session.get(self._url) as request:
|
||||||
# The web page for Algorithmen für Routenplanung contains some
|
# The web page for Algorithmen für Routenplanung contains some
|
||||||
# weird comments that beautifulsoup doesn't parse correctly. This
|
# weird comments that beautifulsoup doesn't parse correctly. This
|
||||||
@ -170,4 +185,4 @@ class KitIpdCrawler(HttpCrawler):
|
|||||||
# cause issues on other pages.
|
# cause issues on other pages.
|
||||||
content = (await request.read()).decode("utf-8")
|
content = (await request.read()).decode("utf-8")
|
||||||
content = re.sub(r"<!--.*?-->", "", content)
|
content = re.sub(r"<!--.*?-->", "", content)
|
||||||
return soupify(content.encode("utf-8"))
|
return soupify(content.encode("utf-8")), str(request.url)
|
||||||
|
@ -14,7 +14,7 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
|||||||
|
|
||||||
|
|
||||||
class Deduplicator:
|
class Deduplicator:
|
||||||
FORBIDDEN_CHARS = '<>:"/\\|?*'
|
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||||
FORBIDDEN_NAMES = {
|
FORBIDDEN_NAMES = {
|
||||||
"CON", "PRN", "AUX", "NUL",
|
"CON", "PRN", "AUX", "NUL",
|
||||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import asynccontextmanager, contextmanager
|
from contextlib import AbstractContextManager, asynccontextmanager, contextmanager
|
||||||
# TODO In Python 3.9 and above, ContextManager is deprecated
|
from typing import AsyncIterator, Iterator, List, Optional
|
||||||
from typing import AsyncIterator, ContextManager, Iterator, List, Optional
|
|
||||||
|
|
||||||
from rich.console import Console, Group
|
from rich.console import Console, Group
|
||||||
from rich.live import Live
|
from rich.live import Live
|
||||||
@ -59,6 +58,7 @@ class Log:
|
|||||||
# Whether different parts of the output are enabled or disabled
|
# Whether different parts of the output are enabled or disabled
|
||||||
self.output_explain = False
|
self.output_explain = False
|
||||||
self.output_status = True
|
self.output_status = True
|
||||||
|
self.output_not_deleted = True
|
||||||
self.output_report = True
|
self.output_report = True
|
||||||
|
|
||||||
def _update_live(self) -> None:
|
def _update_live(self) -> None:
|
||||||
@ -207,6 +207,17 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
action = escape(f"{action:<{self.STATUS_WIDTH}}")
|
action = escape(f"{action:<{self.STATUS_WIDTH}}")
|
||||||
self.print(f"{style}{action}[/] {escape(text)} {suffix}")
|
self.print(f"{style}{action}[/] {escape(text)} {suffix}")
|
||||||
|
|
||||||
|
def not_deleted(self, style: str, action: str, text: str, suffix: str = "") -> None:
|
||||||
|
"""
|
||||||
|
Print a message for a local only file that wasn't
|
||||||
|
deleted while crawling. Allows markup in the "style"
|
||||||
|
argument which will be applied to the "action" string.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.output_status and self.output_not_deleted:
|
||||||
|
action = escape(f"{action:<{self.STATUS_WIDTH}}")
|
||||||
|
self.print(f"{style}{action}[/] {escape(text)} {suffix}")
|
||||||
|
|
||||||
def report(self, text: str) -> None:
|
def report(self, text: str) -> None:
|
||||||
"""
|
"""
|
||||||
Print a report after crawling. Allows markup.
|
Print a report after crawling. Allows markup.
|
||||||
@ -215,6 +226,14 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
if self.output_report:
|
if self.output_report:
|
||||||
self.print(text)
|
self.print(text)
|
||||||
|
|
||||||
|
def report_not_deleted(self, text: str) -> None:
|
||||||
|
"""
|
||||||
|
Print a report for a local only file that wasn't deleted after crawling. Allows markup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.output_report and self.output_not_deleted:
|
||||||
|
self.print(text)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _bar(
|
def _bar(
|
||||||
self,
|
self,
|
||||||
@ -241,7 +260,7 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> ContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
"action" string.
|
"action" string.
|
||||||
@ -257,7 +276,7 @@ directly or as a GitHub issue: https://github.com/Garmelon/PFERD/issues/new
|
|||||||
action: str,
|
action: str,
|
||||||
text: str,
|
text: str,
|
||||||
total: Optional[float] = None,
|
total: Optional[float] = None,
|
||||||
) -> ContextManager[ProgressBar]:
|
) -> AbstractContextManager[ProgressBar]:
|
||||||
"""
|
"""
|
||||||
Allows markup in the "style" argument which will be applied to the
|
Allows markup in the "style" argument which will be applied to the
|
||||||
"action" string.
|
"action" string.
|
||||||
|
@ -44,6 +44,7 @@ class OnConflict(Enum):
|
|||||||
LOCAL_FIRST = "local-first"
|
LOCAL_FIRST = "local-first"
|
||||||
REMOTE_FIRST = "remote-first"
|
REMOTE_FIRST = "remote-first"
|
||||||
NO_DELETE = "no-delete"
|
NO_DELETE = "no-delete"
|
||||||
|
NO_DELETE_PROMPT_OVERWRITE = "no-delete-prompt-overwrite"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_string(string: str) -> "OnConflict":
|
def from_string(string: str) -> "OnConflict":
|
||||||
@ -51,11 +52,12 @@ class OnConflict(Enum):
|
|||||||
return OnConflict(string)
|
return OnConflict(string)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("must be one of 'prompt', 'local-first',"
|
raise ValueError("must be one of 'prompt', 'local-first',"
|
||||||
" 'remote-first', 'no-delete'")
|
" 'remote-first', 'no-delete', 'no-delete-prompt-overwrite'")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Heuristics:
|
class Heuristics:
|
||||||
|
etag_differs: Optional[bool]
|
||||||
mtime: Optional[datetime]
|
mtime: Optional[datetime]
|
||||||
|
|
||||||
|
|
||||||
@ -232,8 +234,16 @@ class OutputDirectory:
|
|||||||
|
|
||||||
remote_newer = None
|
remote_newer = None
|
||||||
|
|
||||||
|
# ETag should be a more reliable indicator than mtime, so we check it first
|
||||||
|
if heuristics.etag_differs is not None:
|
||||||
|
remote_newer = heuristics.etag_differs
|
||||||
|
if remote_newer:
|
||||||
|
log.explain("Remote file's entity tag differs")
|
||||||
|
else:
|
||||||
|
log.explain("Remote file's entity tag is the same")
|
||||||
|
|
||||||
# Python on Windows crashes when faced with timestamps around the unix epoch
|
# Python on Windows crashes when faced with timestamps around the unix epoch
|
||||||
if heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
if remote_newer is None and heuristics.mtime and (os.name != "nt" or heuristics.mtime.year > 1970):
|
||||||
mtime = heuristics.mtime
|
mtime = heuristics.mtime
|
||||||
remote_newer = mtime.timestamp() > stat.st_mtime
|
remote_newer = mtime.timestamp() > stat.st_mtime
|
||||||
if remote_newer:
|
if remote_newer:
|
||||||
@ -264,7 +274,7 @@ class OutputDirectory:
|
|||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict == OnConflict.PROMPT:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
prompt = f"Replace {fmt_path(path)} with remote file?"
|
prompt = f"Replace {fmt_path(path)} with remote file?"
|
||||||
return await prompt_yes_no(prompt, default=False)
|
return await prompt_yes_no(prompt, default=False)
|
||||||
@ -283,7 +293,7 @@ class OutputDirectory:
|
|||||||
on_conflict: OnConflict,
|
on_conflict: OnConflict,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict == OnConflict.PROMPT:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
prompt = f"Recursively delete {fmt_path(path)} and replace with remote file?"
|
prompt = f"Recursively delete {fmt_path(path)} and replace with remote file?"
|
||||||
return await prompt_yes_no(prompt, default=False)
|
return await prompt_yes_no(prompt, default=False)
|
||||||
@ -303,7 +313,7 @@ class OutputDirectory:
|
|||||||
path: PurePath,
|
path: PurePath,
|
||||||
parent: PurePath,
|
parent: PurePath,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if on_conflict == OnConflict.PROMPT:
|
if on_conflict in {OnConflict.PROMPT, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
async with log.exclusive_output():
|
async with log.exclusive_output():
|
||||||
prompt = f"Delete {fmt_path(parent)} so remote file {fmt_path(path)} can be downloaded?"
|
prompt = f"Delete {fmt_path(parent)} so remote file {fmt_path(path)} can be downloaded?"
|
||||||
return await prompt_yes_no(prompt, default=False)
|
return await prompt_yes_no(prompt, default=False)
|
||||||
@ -330,7 +340,7 @@ class OutputDirectory:
|
|||||||
return False
|
return False
|
||||||
elif on_conflict == OnConflict.REMOTE_FIRST:
|
elif on_conflict == OnConflict.REMOTE_FIRST:
|
||||||
return True
|
return True
|
||||||
elif on_conflict == OnConflict.NO_DELETE:
|
elif on_conflict in {OnConflict.NO_DELETE, OnConflict.NO_DELETE_PROMPT_OVERWRITE}:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# This should never be reached
|
# This should never be reached
|
||||||
@ -361,10 +371,28 @@ class OutputDirectory:
|
|||||||
|
|
||||||
raise OutputDirError("Failed to create temporary file")
|
raise OutputDirError("Failed to create temporary file")
|
||||||
|
|
||||||
|
def should_try_download(
|
||||||
|
self,
|
||||||
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
|
mtime: Optional[datetime] = None,
|
||||||
|
redownload: Optional[Redownload] = None,
|
||||||
|
on_conflict: Optional[OnConflict] = None,
|
||||||
|
) -> bool:
|
||||||
|
heuristics = Heuristics(etag_differs, mtime)
|
||||||
|
redownload = self._redownload if redownload is None else redownload
|
||||||
|
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||||
|
local_path = self.resolve(path)
|
||||||
|
|
||||||
|
return self._should_download(local_path, heuristics, redownload, on_conflict)
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
remote_path: PurePath,
|
remote_path: PurePath,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
|
*,
|
||||||
|
etag_differs: Optional[bool] = None,
|
||||||
mtime: Optional[datetime] = None,
|
mtime: Optional[datetime] = None,
|
||||||
redownload: Optional[Redownload] = None,
|
redownload: Optional[Redownload] = None,
|
||||||
on_conflict: Optional[OnConflict] = None,
|
on_conflict: Optional[OnConflict] = None,
|
||||||
@ -374,7 +402,7 @@ class OutputDirectory:
|
|||||||
MarkConflictError.
|
MarkConflictError.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
heuristics = Heuristics(mtime)
|
heuristics = Heuristics(etag_differs, mtime)
|
||||||
redownload = self._redownload if redownload is None else redownload
|
redownload = self._redownload if redownload is None else redownload
|
||||||
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
on_conflict = self._on_conflict if on_conflict is None else on_conflict
|
||||||
local_path = self.resolve(path)
|
local_path = self.resolve(path)
|
||||||
@ -495,7 +523,7 @@ class OutputDirectory:
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
log.status("[bold bright_magenta]", "Not deleted", fmt_path(pure))
|
log.not_deleted("[bold bright_magenta]", "Not deleted", fmt_path(pure))
|
||||||
self._report.not_delete_file(pure)
|
self._report.not_delete_file(pure)
|
||||||
|
|
||||||
def load_prev_report(self) -> None:
|
def load_prev_report(self) -> None:
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path, PurePath
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
@ -168,19 +168,24 @@ class Pferd:
|
|||||||
log.report("")
|
log.report("")
|
||||||
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
log.report(f"[bold bright_cyan]Report[/] for {escape(name)}")
|
||||||
|
|
||||||
|
def fmt_path_link(relative_path: PurePath) -> str:
|
||||||
|
# We need to URL-encode the path because it might contain spaces or special characters
|
||||||
|
link = crawler.output_dir.resolve(relative_path).absolute().as_uri()
|
||||||
|
return f"[link={link}]{fmt_path(relative_path)}[/link]"
|
||||||
|
|
||||||
something_changed = False
|
something_changed = False
|
||||||
for path in sorted(crawler.report.added_files):
|
for path in sorted(crawler.report.added_files):
|
||||||
something_changed = True
|
something_changed = True
|
||||||
log.report(f" [bold bright_green]Added[/] {fmt_path(path)}")
|
log.report(f" [bold bright_green]Added[/] {fmt_path_link(path)}")
|
||||||
for path in sorted(crawler.report.changed_files):
|
for path in sorted(crawler.report.changed_files):
|
||||||
something_changed = True
|
something_changed = True
|
||||||
log.report(f" [bold bright_yellow]Changed[/] {fmt_path(path)}")
|
log.report(f" [bold bright_yellow]Changed[/] {fmt_path_link(path)}")
|
||||||
for path in sorted(crawler.report.deleted_files):
|
for path in sorted(crawler.report.deleted_files):
|
||||||
something_changed = True
|
something_changed = True
|
||||||
log.report(f" [bold bright_magenta]Deleted[/] {fmt_path(path)}")
|
log.report(f" [bold bright_magenta]Deleted[/] {fmt_path(path)}")
|
||||||
for path in sorted(crawler.report.not_deleted_files):
|
for path in sorted(crawler.report.not_deleted_files):
|
||||||
something_changed = True
|
something_changed = True
|
||||||
log.report(f" [bold bright_magenta]Not deleted[/] {fmt_path(path)}")
|
log.report_not_deleted(f" [bold bright_magenta]Not deleted[/] {fmt_path_link(path)}")
|
||||||
|
|
||||||
for warning in crawler.report.encountered_warnings:
|
for warning in crawler.report.encountered_warnings:
|
||||||
something_changed = True
|
something_changed = True
|
||||||
|
@ -34,15 +34,6 @@ class MarkConflictError(Exception):
|
|||||||
self.collides_with = collides_with
|
self.collides_with = collides_with
|
||||||
|
|
||||||
|
|
||||||
# TODO Use PurePath.is_relative_to when updating to 3.9
|
|
||||||
def is_relative_to(a: PurePath, b: PurePath) -> bool:
|
|
||||||
try:
|
|
||||||
a.relative_to(b)
|
|
||||||
return True
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Report:
|
class Report:
|
||||||
"""
|
"""
|
||||||
A report of a synchronization. Includes all files found by the crawler, as
|
A report of a synchronization. Includes all files found by the crawler, as
|
||||||
@ -173,7 +164,7 @@ class Report:
|
|||||||
if path == other:
|
if path == other:
|
||||||
raise MarkDuplicateError(path)
|
raise MarkDuplicateError(path)
|
||||||
|
|
||||||
if is_relative_to(path, other) or is_relative_to(other, path):
|
if path.is_relative_to(other) or other.is_relative_to(path):
|
||||||
raise MarkConflictError(path, other)
|
raise MarkConflictError(path, other)
|
||||||
|
|
||||||
self.known_files.add(path)
|
self.known_files.add(path)
|
||||||
|
@ -110,6 +110,10 @@ class ExactReTf(Transformation):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
named_groups: Dict[str, str] = match.groupdict()
|
||||||
|
for name, capture in named_groups.items():
|
||||||
|
locals_dir[name] = capture
|
||||||
|
|
||||||
result = eval(f"f{right!r}", {}, locals_dir)
|
result = eval(f"f{right!r}", {}, locals_dir)
|
||||||
return Transformed(PurePath(result))
|
return Transformed(PurePath(result))
|
||||||
|
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
NAME = "PFERD"
|
NAME = "PFERD"
|
||||||
VERSION = "3.4.2"
|
VERSION = "3.8.0"
|
||||||
|
16
README.md
16
README.md
@ -17,7 +17,7 @@ Binaries for Linux, Windows and Mac can be downloaded directly from the
|
|||||||
|
|
||||||
### With pip
|
### With pip
|
||||||
|
|
||||||
Ensure you have at least Python 3.9 installed. Run the following command to
|
Ensure you have at least Python 3.11 installed. Run the following command to
|
||||||
install PFERD or upgrade it to the latest version:
|
install PFERD or upgrade it to the latest version:
|
||||||
|
|
||||||
```
|
```
|
||||||
@ -30,7 +30,10 @@ The use of [venv](https://docs.python.org/3/library/venv.html) is recommended.
|
|||||||
|
|
||||||
Unofficial packages are available for:
|
Unofficial packages are available for:
|
||||||
- [AUR](https://aur.archlinux.org/packages/pferd)
|
- [AUR](https://aur.archlinux.org/packages/pferd)
|
||||||
|
- [brew](https://formulae.brew.sh/formula/pferd)
|
||||||
|
- [conda-forge](https://github.com/conda-forge/pferd-feedstock)
|
||||||
- [nixpkgs](https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/pferd/default.nix)
|
- [nixpkgs](https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/misc/pferd/default.nix)
|
||||||
|
- [PyPi](https://pypi.org/project/pferd)
|
||||||
|
|
||||||
See also PFERD's [repology page](https://repology.org/project/pferd/versions).
|
See also PFERD's [repology page](https://repology.org/project/pferd/versions).
|
||||||
|
|
||||||
@ -53,6 +56,17 @@ Also, you can download most ILIAS pages directly like this:
|
|||||||
$ pferd kit-ilias-web <url> <output_directory>
|
$ pferd kit-ilias-web <url> <output_directory>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
PFERD supports other ILIAS instances as well, using the `ilias-web` crawler (see
|
||||||
|
the [config section on `ilias-web`](CONFIG.md#the-ilias-web-crawler) for more
|
||||||
|
detail on the `base-url` and `client-id` parameters):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ pferd ilias-web \
|
||||||
|
--base-url https://ilias.my-university.example \
|
||||||
|
--client-id My_University desktop \
|
||||||
|
<output_directory>
|
||||||
|
```
|
||||||
|
|
||||||
However, the CLI only lets you download a single thing at a time, and the
|
However, the CLI only lets you download a single thing at a time, and the
|
||||||
resulting command can grow long quite quickly. Because of this, PFERD can also
|
resulting command can grow long quite quickly. Because of this, PFERD can also
|
||||||
be used with a config file.
|
be used with a config file.
|
||||||
|
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1744440957,
|
||||||
|
"narHash": "sha256-FHlSkNqFmPxPJvy+6fNLaNeWnF1lZSgqVCl/eWaJRc4=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "26d499fc9f1d567283d5d56fcf367edd815dba1d",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-24.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
41
flake.nix
Normal file
41
flake.nix
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
description = "Tool for downloading course-related files from ILIAS";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs }:
|
||||||
|
let
|
||||||
|
# Helper function to generate an attrset '{ x86_64-linux = f "x86_64-linux"; ... }'.
|
||||||
|
forAllSystems = nixpkgs.lib.genAttrs nixpkgs.lib.systems.flakeExposed;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages = forAllSystems (system:
|
||||||
|
let pkgs = import nixpkgs { inherit system; };
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
default = pkgs.python3Packages.buildPythonApplication rec {
|
||||||
|
pname = "pferd";
|
||||||
|
# Performing black magic
|
||||||
|
# Don't worry, I sacrificed enough goats for the next few years
|
||||||
|
version = (pkgs.lib.importTOML ./PFERD/version.py).VERSION;
|
||||||
|
format = "pyproject";
|
||||||
|
|
||||||
|
src = ./.;
|
||||||
|
|
||||||
|
nativeBuildInputs = with pkgs.python3Packages; [
|
||||||
|
setuptools
|
||||||
|
];
|
||||||
|
|
||||||
|
propagatedBuildInputs = with pkgs.python3Packages; [
|
||||||
|
aiohttp
|
||||||
|
beautifulsoup4
|
||||||
|
rich
|
||||||
|
keyring
|
||||||
|
certifi
|
||||||
|
];
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
11
mypy.ini
11
mypy.ini
@ -1,11 +0,0 @@
|
|||||||
[mypy]
|
|
||||||
disallow_any_generics = True
|
|
||||||
disallow_untyped_defs = True
|
|
||||||
disallow_incomplete_defs = True
|
|
||||||
no_implicit_optional = True
|
|
||||||
warn_unused_ignores = True
|
|
||||||
warn_unreachable = True
|
|
||||||
show_error_context = True
|
|
||||||
|
|
||||||
[mypy-rich.*,bs4,keyring]
|
|
||||||
ignore_missing_imports = True
|
|
@ -1,3 +1,42 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools", "wheel"]
|
requires = ["setuptools", "wheel"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "PFERD"
|
||||||
|
dependencies = [
|
||||||
|
"aiohttp>=3.8.1",
|
||||||
|
"beautifulsoup4>=4.10.0",
|
||||||
|
"rich>=11.0.0",
|
||||||
|
"keyring>=23.5.0",
|
||||||
|
"certifi>=2021.10.8"
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
requires-python = ">=3.11"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
pferd = "PFERD.__main__:main"
|
||||||
|
|
||||||
|
[tool.setuptools.dynamic]
|
||||||
|
version = {attr = "PFERD.version.VERSION"}
|
||||||
|
|
||||||
|
[tool.flake8]
|
||||||
|
max-line-length = 110
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
line_length = 110
|
||||||
|
|
||||||
|
[tool.autopep8]
|
||||||
|
max_line_length = 110
|
||||||
|
in-place = true
|
||||||
|
recursive = true
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
disallow_any_generics = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
disallow_incomplete_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_unreachable = true
|
||||||
|
show_error_context = true
|
||||||
|
ignore_missing_imports = true
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import time
|
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
from subprocess import run
|
from subprocess import run
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,5 +2,5 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
mypy PFERD
|
mypy .
|
||||||
flake8 PFERD
|
flake8 PFERD
|
||||||
|
@ -2,5 +2,5 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
autopep8 --recursive --in-place PFERD
|
autopep8 .
|
||||||
isort PFERD
|
isort .
|
||||||
|
@ -13,5 +13,5 @@ pip install --upgrade setuptools
|
|||||||
pip install --editable .
|
pip install --editable .
|
||||||
|
|
||||||
# Installing tools and type hints
|
# Installing tools and type hints
|
||||||
pip install --upgrade mypy flake8 autopep8 isort pyinstaller
|
pip install --upgrade mypy flake8 flake8-pyproject autopep8 isort pyinstaller
|
||||||
pip install --upgrade types-chardet types-certifi
|
pip install --upgrade types-chardet types-certifi
|
||||||
|
23
setup.cfg
23
setup.cfg
@ -1,23 +0,0 @@
|
|||||||
[metadata]
|
|
||||||
name = PFERD
|
|
||||||
version = attr: PFERD.version.VERSION
|
|
||||||
|
|
||||||
[options]
|
|
||||||
packages = find:
|
|
||||||
python_requires = >=3.9
|
|
||||||
install_requires =
|
|
||||||
aiohttp>=3.8.1
|
|
||||||
beautifulsoup4>=4.10.0
|
|
||||||
rich>=11.0.0
|
|
||||||
keyring>=23.5.0
|
|
||||||
certifi>=2021.10.8
|
|
||||||
|
|
||||||
[options.entry_points]
|
|
||||||
console_scripts =
|
|
||||||
pferd = PFERD.__main__:main
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
max_line_length = 110
|
|
||||||
|
|
||||||
[isort]
|
|
||||||
line_length = 110
|
|
Reference in New Issue
Block a user