mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-07-12 22:22:30 +02:00
Compare commits
14 Commits
b54b3b979c
...
v3.5.1
Author | SHA1 | Date | |
---|---|---|---|
da627ff929 | |||
c1b592ac29 | |||
eb0c956d32 | |||
ab0cb2d956 | |||
a117126389 | |||
e9f8901520 | |||
266812f90e | |||
533bc27439 | |||
0113a0ca10 | |||
40f8a05ad6 | |||
50b50513c6 | |||
df3514cd03 | |||
ad53185247 | |||
87b67e9271 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
|||||||
/PFERD.egg-info/
|
/PFERD.egg-info/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
/.vscode/
|
/.vscode/
|
||||||
|
/.idea/
|
||||||
|
|
||||||
# pyinstaller
|
# pyinstaller
|
||||||
/pferd.spec
|
/pferd.spec
|
||||||
|
34
CHANGELOG.md
34
CHANGELOG.md
@ -22,20 +22,36 @@ ambiguous situations.
|
|||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
|
## 3.5.1 - 2024-04-09
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Support for ILIAS 8
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Video name deduplication
|
||||||
|
|
||||||
|
## 3.5.0 - 2023-09-13
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- `no-delete-prompt-override` conflict resolution strategy
|
||||||
|
- Support for ILIAS learning modules
|
||||||
|
- `show_not_deleted` option to stop printing the "Not Deleted" status or report
|
||||||
|
message. This combines nicely with the `no-delete-prompt-override` strategy,
|
||||||
|
causing PFERD to mostly ignore local-only files.
|
||||||
|
- Support for mediacast video listings
|
||||||
|
- Crawling of files in info tab
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Remove size suffix for files in content pages
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Crawling of courses with the timeline view as the default tab
|
- Crawling of courses with the timeline view as the default tab
|
||||||
- Crawling of file and custom opencast cards
|
- Crawling of file and custom opencast cards
|
||||||
- Crawling of button cards without descriptions
|
- Crawling of button cards without descriptions
|
||||||
- Abort crawling when encountering an unexpected ilias root page redirect
|
- Abort crawling when encountering an unexpected ilias root page redirect
|
||||||
- Remove size suffix for files in content pages
|
- Sanitize ascii control characters on Windows
|
||||||
|
- Crawling of paginated past meetings
|
||||||
### Added
|
- Ignore SCORM learning modules
|
||||||
- `no-delete-prompt-override` conflict resolution strategy
|
|
||||||
- support for ILIAS learning modules
|
|
||||||
- `show_not_deleted` option to stop printing the "Not Deleted" status or report
|
|
||||||
message. This combines nicely with the `no-delete-prompt-override` strategy,
|
|
||||||
causing PFERD to mostly ignore local-only files.
|
|
||||||
- support for mediacast video listings
|
|
||||||
|
|
||||||
## 3.4.3 - 2022-11-29
|
## 3.4.3 - 2022-11-29
|
||||||
|
|
||||||
|
@ -22,10 +22,12 @@ class IliasElementType(Enum):
|
|||||||
FOLDER = "folder"
|
FOLDER = "folder"
|
||||||
FORUM = "forum"
|
FORUM = "forum"
|
||||||
LINK = "link"
|
LINK = "link"
|
||||||
|
INFO_TAB = "info_tab"
|
||||||
LEARNING_MODULE = "learning_module"
|
LEARNING_MODULE = "learning_module"
|
||||||
BOOKING = "booking"
|
BOOKING = "booking"
|
||||||
MEETING = "meeting"
|
MEETING = "meeting"
|
||||||
SURVEY = "survey"
|
SURVEY = "survey"
|
||||||
|
SCORM_LEARNING_MODULE = "scorm_learning_module"
|
||||||
MEDIACAST_VIDEO_FOLDER = "mediacast_video_folder"
|
MEDIACAST_VIDEO_FOLDER = "mediacast_video_folder"
|
||||||
MEDIACAST_VIDEO = "mediacast_video"
|
MEDIACAST_VIDEO = "mediacast_video"
|
||||||
OPENCAST_VIDEO = "opencast_video"
|
OPENCAST_VIDEO = "opencast_video"
|
||||||
@ -93,13 +95,9 @@ class IliasPage:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_root_page(soup: BeautifulSoup) -> bool:
|
def is_root_page(soup: BeautifulSoup) -> bool:
|
||||||
permalink = soup.find(id="current_perma_link")
|
if permalink := IliasPage.get_soup_permalink(soup):
|
||||||
if permalink is None:
|
return "goto.php?target=root_" in permalink
|
||||||
return False
|
return False
|
||||||
value = permalink.attrs.get("value")
|
|
||||||
if value is None:
|
|
||||||
return False
|
|
||||||
return "goto.php?target=root_" in value
|
|
||||||
|
|
||||||
def get_child_elements(self) -> List[IliasPageElement]:
|
def get_child_elements(self) -> List[IliasPageElement]:
|
||||||
"""
|
"""
|
||||||
@ -120,9 +118,25 @@ class IliasPage:
|
|||||||
if self._is_content_page():
|
if self._is_content_page():
|
||||||
log.explain("Page is a content page, searching for elements")
|
log.explain("Page is a content page, searching for elements")
|
||||||
return self._find_copa_entries()
|
return self._find_copa_entries()
|
||||||
|
if self._is_info_tab():
|
||||||
|
log.explain("Page is info tab, searching for elements")
|
||||||
|
return self._find_info_tab_entries()
|
||||||
log.explain("Page is a normal folder, searching for elements")
|
log.explain("Page is a normal folder, searching for elements")
|
||||||
return self._find_normal_entries()
|
return self._find_normal_entries()
|
||||||
|
|
||||||
|
def get_info_tab(self) -> Optional[IliasPageElement]:
|
||||||
|
tab: Optional[Tag] = self._soup.find(
|
||||||
|
name="a",
|
||||||
|
attrs={"href": lambda x: x and "cmdClass=ilinfoscreengui" in x}
|
||||||
|
)
|
||||||
|
if tab is not None:
|
||||||
|
return IliasPageElement(
|
||||||
|
IliasElementType.INFO_TAB,
|
||||||
|
self._abs_url_from_link(tab),
|
||||||
|
"infos"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
def get_description(self) -> Optional[BeautifulSoup]:
|
def get_description(self) -> Optional[BeautifulSoup]:
|
||||||
def is_interesting_class(name: str) -> bool:
|
def is_interesting_class(name: str) -> bool:
|
||||||
return name in ["ilCOPageSection", "ilc_Paragraph", "ilc_va_ihcap_VAccordIHeadCap"]
|
return name in ["ilCOPageSection", "ilc_Paragraph", "ilc_va_ihcap_VAccordIHeadCap"]
|
||||||
@ -209,7 +223,11 @@ class IliasPage:
|
|||||||
log.explain("Requesting *all* future meetings")
|
log.explain("Requesting *all* future meetings")
|
||||||
return self._uncollapse_future_meetings_url()
|
return self._uncollapse_future_meetings_url()
|
||||||
if not self._is_content_tab_selected():
|
if not self._is_content_tab_selected():
|
||||||
|
if self._page_type != IliasElementType.INFO_TAB:
|
||||||
|
log.explain("Selecting content tab")
|
||||||
return self._select_content_page_url()
|
return self._select_content_page_url()
|
||||||
|
else:
|
||||||
|
log.explain("Crawling info tab, skipping content select")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _is_forum_page(self) -> bool:
|
def _is_forum_page(self) -> bool:
|
||||||
@ -257,22 +275,23 @@ class IliasPage:
|
|||||||
return self._soup.find("a", attrs={"href": lambda x: x and "block_type=pditems" in x})
|
return self._soup.find("a", attrs={"href": lambda x: x and "block_type=pditems" in x})
|
||||||
|
|
||||||
def _is_content_page(self) -> bool:
|
def _is_content_page(self) -> bool:
|
||||||
link = self._soup.find(id="current_perma_link")
|
if link := self.get_permalink():
|
||||||
if not link:
|
return "target=copa_" in link
|
||||||
return False
|
return False
|
||||||
return "target=copa_" in link.get("value")
|
|
||||||
|
|
||||||
def _is_learning_module_page(self) -> bool:
|
def _is_learning_module_page(self) -> bool:
|
||||||
link = self._soup.find(id="current_perma_link")
|
if link := self.get_permalink():
|
||||||
if not link:
|
return "target=pg_" in link
|
||||||
return False
|
return False
|
||||||
return "target=pg_" in link.get("value")
|
|
||||||
|
|
||||||
def _contains_collapsed_future_meetings(self) -> bool:
|
def _contains_collapsed_future_meetings(self) -> bool:
|
||||||
return self._uncollapse_future_meetings_url() is not None
|
return self._uncollapse_future_meetings_url() is not None
|
||||||
|
|
||||||
def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]:
|
def _uncollapse_future_meetings_url(self) -> Optional[IliasPageElement]:
|
||||||
element = self._soup.find("a", attrs={"href": lambda x: x and "crs_next_sess=1" in x})
|
element = self._soup.find(
|
||||||
|
"a",
|
||||||
|
attrs={"href": lambda x: x and ("crs_next_sess=1" in x or "crs_prev_sess=1" in x)}
|
||||||
|
)
|
||||||
if not element:
|
if not element:
|
||||||
return None
|
return None
|
||||||
link = self._abs_url_from_link(element)
|
link = self._abs_url_from_link(element)
|
||||||
@ -281,6 +300,10 @@ class IliasPage:
|
|||||||
def _is_content_tab_selected(self) -> bool:
|
def _is_content_tab_selected(self) -> bool:
|
||||||
return self._select_content_page_url() is None
|
return self._select_content_page_url() is None
|
||||||
|
|
||||||
|
def _is_info_tab(self) -> bool:
|
||||||
|
might_be_info = self._soup.find("form", attrs={"name": lambda x: x == "formInfoScreen"}) is not None
|
||||||
|
return self._page_type == IliasElementType.INFO_TAB and might_be_info
|
||||||
|
|
||||||
def _select_content_page_url(self) -> Optional[IliasPageElement]:
|
def _select_content_page_url(self) -> Optional[IliasPageElement]:
|
||||||
tab = self._soup.find(
|
tab = self._soup.find(
|
||||||
id="tab_view_content",
|
id="tab_view_content",
|
||||||
@ -389,6 +412,23 @@ class IliasPage:
|
|||||||
|
|
||||||
return items
|
return items
|
||||||
|
|
||||||
|
def _find_info_tab_entries(self) -> List[IliasPageElement]:
|
||||||
|
items = []
|
||||||
|
links: List[Tag] = self._soup.select("a.il_ContainerItemCommand")
|
||||||
|
|
||||||
|
for link in links:
|
||||||
|
if "cmdClass=ilobjcoursegui" not in link["href"]:
|
||||||
|
continue
|
||||||
|
if "cmd=sendfile" not in link["href"]:
|
||||||
|
continue
|
||||||
|
items.append(IliasPageElement(
|
||||||
|
IliasElementType.FILE,
|
||||||
|
self._abs_url_from_link(link),
|
||||||
|
_sanitize_path_name(link.getText())
|
||||||
|
))
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
def _find_opencast_video_entries(self) -> List[IliasPageElement]:
|
def _find_opencast_video_entries(self) -> List[IliasPageElement]:
|
||||||
# ILIAS has three stages for video pages
|
# ILIAS has three stages for video pages
|
||||||
# 1. The initial dummy page without any videos. This page contains the link to the listing
|
# 1. The initial dummy page without any videos. This page contains the link to the listing
|
||||||
@ -467,8 +507,8 @@ class IliasPage:
|
|||||||
modification_string = link.parent.parent.parent.select_one(
|
modification_string = link.parent.parent.parent.select_one(
|
||||||
f"td.std:nth-child({index})"
|
f"td.std:nth-child({index})"
|
||||||
).getText().strip()
|
).getText().strip()
|
||||||
if re.search(r"\d+\.\d+.\d+ - \d+:\d+", modification_string):
|
if match := re.search(r"\d+\.\d+.\d+ \d+:\d+", modification_string):
|
||||||
modification_time = datetime.strptime(modification_string, "%d.%m.%Y - %H:%M")
|
modification_time = datetime.strptime(match.group(0), "%d.%m.%Y %H:%M")
|
||||||
break
|
break
|
||||||
|
|
||||||
if modification_time is None:
|
if modification_time is None:
|
||||||
@ -567,7 +607,7 @@ class IliasPage:
|
|||||||
file_listings: List[Tag] = container.findAll(
|
file_listings: List[Tag] = container.findAll(
|
||||||
name="a",
|
name="a",
|
||||||
# download links contain the given command class
|
# download links contain the given command class
|
||||||
attrs={"href": lambda x: x and "cmdClass=ilexsubmissionfilegui" in x}
|
attrs={"href": lambda x: x and "cmdclass=ilexsubmissionfilegui" in x.lower()}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add each listing as a new
|
# Add each listing as a new
|
||||||
@ -908,6 +948,9 @@ class IliasPage:
|
|||||||
if "baseClass=ilMediaCastHandlerGUI" in parsed_url.query:
|
if "baseClass=ilMediaCastHandlerGUI" in parsed_url.query:
|
||||||
return IliasElementType.MEDIACAST_VIDEO_FOLDER
|
return IliasElementType.MEDIACAST_VIDEO_FOLDER
|
||||||
|
|
||||||
|
if "baseClass=ilSAHSPresentationGUI" in parsed_url.query:
|
||||||
|
return IliasElementType.SCORM_LEARNING_MODULE
|
||||||
|
|
||||||
# Booking and Meeting can not be detected based on the link. They do have a ref_id though, so
|
# Booking and Meeting can not be detected based on the link. They do have a ref_id though, so
|
||||||
# try to guess it from the image.
|
# try to guess it from the image.
|
||||||
|
|
||||||
@ -949,7 +992,11 @@ class IliasPage:
|
|||||||
if img_tag is None:
|
if img_tag is None:
|
||||||
img_tag = found_parent.select_one("img.icon")
|
img_tag = found_parent.select_one("img.icon")
|
||||||
|
|
||||||
if img_tag is None and found_parent.find("a", attrs={"href": lambda x: x and "crs_next_sess=" in x}):
|
is_session_expansion_button = found_parent.find(
|
||||||
|
"a",
|
||||||
|
attrs={"href": lambda x: x and ("crs_next_sess=" in x or "crs_prev_sess=" in x)}
|
||||||
|
)
|
||||||
|
if img_tag is None and is_session_expansion_button:
|
||||||
log.explain("Found session expansion button, skipping it as it has no content")
|
log.explain("Found session expansion button, skipping it as it has no content")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -982,6 +1029,9 @@ class IliasPage:
|
|||||||
if str(img_tag["src"]).endswith("icon_mcst.svg"):
|
if str(img_tag["src"]).endswith("icon_mcst.svg"):
|
||||||
return IliasElementType.MEDIACAST_VIDEO_FOLDER
|
return IliasElementType.MEDIACAST_VIDEO_FOLDER
|
||||||
|
|
||||||
|
if str(img_tag["src"]).endswith("icon_sahs.svg"):
|
||||||
|
return IliasElementType.SCORM_LEARNING_MODULE
|
||||||
|
|
||||||
return IliasElementType.FOLDER
|
return IliasElementType.FOLDER
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -1011,6 +1061,37 @@ class IliasPage:
|
|||||||
rest_of_name = split_delimiter.join(meeting_name.split(split_delimiter)[1:])
|
rest_of_name = split_delimiter.join(meeting_name.split(split_delimiter)[1:])
|
||||||
return datetime.strftime(date_portion, "%Y-%m-%d") + split_delimiter + rest_of_name
|
return datetime.strftime(date_portion, "%Y-%m-%d") + split_delimiter + rest_of_name
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_logged_in(soup: BeautifulSoup) -> bool:
|
||||||
|
# Normal ILIAS pages
|
||||||
|
mainbar: Optional[Tag] = soup.find(class_="il-maincontrols-metabar")
|
||||||
|
if mainbar is not None:
|
||||||
|
login_button = mainbar.find(attrs={"href": lambda x: x and "login.php" in x})
|
||||||
|
shib_login = soup.find(id="button_shib_login")
|
||||||
|
return not login_button and not shib_login
|
||||||
|
|
||||||
|
# Personal Desktop
|
||||||
|
if soup.find("a", attrs={"href": lambda x: x and "block_type=pditems" in x}):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
||||||
|
# their video listing table
|
||||||
|
video_table = soup.find(
|
||||||
|
recursive=True,
|
||||||
|
name="table",
|
||||||
|
attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
||||||
|
)
|
||||||
|
if video_table is not None:
|
||||||
|
return True
|
||||||
|
# The individual video player wrapper page has nothing of the above.
|
||||||
|
# Match it by its playerContainer.
|
||||||
|
if soup.select_one("#playerContainer") is not None:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_permalink(self) -> Optional[str]:
|
||||||
|
return IliasPage.get_soup_permalink(self._soup)
|
||||||
|
|
||||||
def _abs_url_from_link(self, link_tag: Tag) -> str:
|
def _abs_url_from_link(self, link_tag: Tag) -> str:
|
||||||
"""
|
"""
|
||||||
Create an absolute url from an <a> tag.
|
Create an absolute url from an <a> tag.
|
||||||
@ -1023,6 +1104,13 @@ class IliasPage:
|
|||||||
"""
|
"""
|
||||||
return urljoin(self._page_url, relative_url)
|
return urljoin(self._page_url, relative_url)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_soup_permalink(soup: BeautifulSoup) -> Optional[str]:
|
||||||
|
perma_link_element: Tag = soup.select_one(".il-footer-permanent-url > a")
|
||||||
|
if not perma_link_element or not perma_link_element.get("href"):
|
||||||
|
return None
|
||||||
|
return perma_link_element.get("href")
|
||||||
|
|
||||||
|
|
||||||
def _unexpected_html_warning() -> None:
|
def _unexpected_html_warning() -> None:
|
||||||
log.warn("Encountered unexpected HTML structure, ignoring element.")
|
log.warn("Encountered unexpected HTML structure, ignoring element.")
|
||||||
|
@ -81,24 +81,25 @@ class KitIliasWebCrawlerSection(HttpCrawlerSection):
|
|||||||
return self.s.getboolean("forums", fallback=False)
|
return self.s.getboolean("forums", fallback=False)
|
||||||
|
|
||||||
|
|
||||||
_DIRECTORY_PAGES: Set[IliasElementType] = set([
|
_DIRECTORY_PAGES: Set[IliasElementType] = {
|
||||||
IliasElementType.EXERCISE,
|
IliasElementType.EXERCISE,
|
||||||
IliasElementType.EXERCISE_FILES,
|
IliasElementType.EXERCISE_FILES,
|
||||||
IliasElementType.FOLDER,
|
IliasElementType.FOLDER,
|
||||||
|
IliasElementType.INFO_TAB,
|
||||||
IliasElementType.MEETING,
|
IliasElementType.MEETING,
|
||||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||||
IliasElementType.OPENCAST_VIDEO_FOLDER,
|
IliasElementType.OPENCAST_VIDEO_FOLDER,
|
||||||
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
||||||
])
|
}
|
||||||
|
|
||||||
_VIDEO_ELEMENTS: Set[IliasElementType] = set([
|
_VIDEO_ELEMENTS: Set[IliasElementType] = {
|
||||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||||
IliasElementType.MEDIACAST_VIDEO,
|
IliasElementType.MEDIACAST_VIDEO,
|
||||||
IliasElementType.OPENCAST_VIDEO,
|
IliasElementType.OPENCAST_VIDEO,
|
||||||
IliasElementType.OPENCAST_VIDEO_PLAYER,
|
IliasElementType.OPENCAST_VIDEO_PLAYER,
|
||||||
IliasElementType.OPENCAST_VIDEO_FOLDER,
|
IliasElementType.OPENCAST_VIDEO_FOLDER,
|
||||||
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
IliasElementType.OPENCAST_VIDEO_FOLDER_MAYBE_PAGINATED,
|
||||||
])
|
}
|
||||||
|
|
||||||
|
|
||||||
def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Callable[[AWrapped], AWrapped]:
|
def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Callable[[AWrapped], AWrapped]:
|
||||||
@ -129,6 +130,7 @@ def _iorepeat(attempts: int, name: str, failure_is_error: bool = False) -> Calla
|
|||||||
raise CrawlError("Impossible return in ilias _iorepeat")
|
raise CrawlError("Impossible return in ilias _iorepeat")
|
||||||
|
|
||||||
return wrapper # type: ignore
|
return wrapper # type: ignore
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@ -139,6 +141,10 @@ def _wrap_io_in_warning(name: str) -> Callable[[AWrapped], AWrapped]:
|
|||||||
return _iorepeat(1, name)
|
return _iorepeat(1, name)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_video_cache_key(element: IliasPageElement) -> str:
|
||||||
|
return f"ilias-video-cache-{element.id()}"
|
||||||
|
|
||||||
|
|
||||||
# Crawler control flow:
|
# Crawler control flow:
|
||||||
#
|
#
|
||||||
# crawl_desktop -+
|
# crawl_desktop -+
|
||||||
@ -248,8 +254,8 @@ instance's greatest bottleneck.
|
|||||||
soup = await self._get_page(next_stage_url, root_page_allowed=True)
|
soup = await self._get_page(next_stage_url, root_page_allowed=True)
|
||||||
|
|
||||||
if current_parent is None and expected_id is not None:
|
if current_parent is None and expected_id is not None:
|
||||||
perma_link_element: Tag = soup.find(id="current_perma_link")
|
perma_link = IliasPage.get_soup_permalink(soup)
|
||||||
if not perma_link_element or "crs_" not in perma_link_element.get("value"):
|
if not perma_link or "crs_" not in perma_link:
|
||||||
raise CrawlError("Invalid course id? Didn't find anything looking like a course")
|
raise CrawlError("Invalid course id? Didn't find anything looking like a course")
|
||||||
|
|
||||||
log.explain_topic(f"Parsing HTML page for {fmt_path(cl.path)}")
|
log.explain_topic(f"Parsing HTML page for {fmt_path(cl.path)}")
|
||||||
@ -262,6 +268,8 @@ instance's greatest bottleneck.
|
|||||||
next_stage_url = None
|
next_stage_url = None
|
||||||
|
|
||||||
elements.extend(page.get_child_elements())
|
elements.extend(page.get_child_elements())
|
||||||
|
if info_tab := page.get_info_tab():
|
||||||
|
elements.append(info_tab)
|
||||||
if description_string := page.get_description():
|
if description_string := page.get_description():
|
||||||
description.append(description_string)
|
description.append(description_string)
|
||||||
|
|
||||||
@ -400,6 +408,14 @@ instance's greatest bottleneck.
|
|||||||
"[bright_black](surveys contain no relevant data)"
|
"[bright_black](surveys contain no relevant data)"
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
elif element.type == IliasElementType.SCORM_LEARNING_MODULE:
|
||||||
|
log.status(
|
||||||
|
"[bold bright_black]",
|
||||||
|
"Ignored",
|
||||||
|
fmt_path(element_path),
|
||||||
|
"[bright_black](scorm learning modules are not supported)"
|
||||||
|
)
|
||||||
|
return None
|
||||||
elif element.type == IliasElementType.LEARNING_MODULE:
|
elif element.type == IliasElementType.LEARNING_MODULE:
|
||||||
return await self._handle_learning_module(element, element_path)
|
return await self._handle_learning_module(element, element_path)
|
||||||
elif element.type == IliasElementType.LINK:
|
elif element.type == IliasElementType.LINK:
|
||||||
@ -536,8 +552,8 @@ instance's greatest bottleneck.
|
|||||||
# Copy old mapping as it is likely still relevant
|
# Copy old mapping as it is likely still relevant
|
||||||
if self.prev_report:
|
if self.prev_report:
|
||||||
self.report.add_custom_value(
|
self.report.add_custom_value(
|
||||||
str(element_path),
|
_get_video_cache_key(element),
|
||||||
self.prev_report.get_custom_value(str(element_path))
|
self.prev_report.get_custom_value(_get_video_cache_key(element))
|
||||||
)
|
)
|
||||||
|
|
||||||
# A video might contain other videos, so let's "crawl" the video first
|
# A video might contain other videos, so let's "crawl" the video first
|
||||||
@ -547,58 +563,69 @@ instance's greatest bottleneck.
|
|||||||
# to ensure backwards compatibility.
|
# to ensure backwards compatibility.
|
||||||
maybe_dl = await self.download(element_path, mtime=element.mtime, redownload=Redownload.ALWAYS)
|
maybe_dl = await self.download(element_path, mtime=element.mtime, redownload=Redownload.ALWAYS)
|
||||||
|
|
||||||
# If we do not want to crawl it (user filter) or we have every file
|
# If we do not want to crawl it (user filter), we can move on
|
||||||
# from the cached mapping already, we can ignore this and bail
|
if not maybe_dl:
|
||||||
if not maybe_dl or self._all_opencast_videos_locally_present(element_path):
|
return None
|
||||||
# Mark all existing cideos as known so they do not get deleted
|
|
||||||
# during dleanup. We "downloaded" them, just without actually making
|
# If we have every file from the cached mapping already, we can ignore this and bail
|
||||||
# a network request as we assumed they did not change.
|
if self._all_opencast_videos_locally_present(element, maybe_dl.path):
|
||||||
for video in self._previous_contained_opencast_videos(element_path):
|
# Mark all existing videos as known to ensure they do not get deleted during cleanup.
|
||||||
|
# We "downloaded" them, just without actually making a network request as we assumed
|
||||||
|
# they did not change.
|
||||||
|
contained = self._previous_contained_opencast_videos(element, maybe_dl.path)
|
||||||
|
if len(contained) > 1:
|
||||||
|
# Only do this if we threw away the original dl token,
|
||||||
|
# to not download single-stream videos twice
|
||||||
|
for video in contained:
|
||||||
await self.download(video)
|
await self.download(video)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return self._download_opencast_video(element_path, element, maybe_dl)
|
return self._download_opencast_video(element, maybe_dl)
|
||||||
|
|
||||||
def _previous_contained_opencast_videos(self, video_path: PurePath) -> List[PurePath]:
|
def _previous_contained_opencast_videos(
|
||||||
|
self, element: IliasPageElement, element_path: PurePath
|
||||||
|
) -> List[PurePath]:
|
||||||
if not self.prev_report:
|
if not self.prev_report:
|
||||||
return []
|
return []
|
||||||
custom_value = self.prev_report.get_custom_value(str(video_path))
|
custom_value = self.prev_report.get_custom_value(_get_video_cache_key(element))
|
||||||
if not custom_value:
|
if not custom_value:
|
||||||
return []
|
return []
|
||||||
names = cast(List[str], custom_value)
|
cached_value = cast(dict[str, Any], custom_value)
|
||||||
folder = video_path.parent
|
if "known_paths" not in cached_value or "own_path" not in cached_value:
|
||||||
return [PurePath(folder, name) for name in names]
|
log.explain(f"'known_paths' or 'own_path' missing from cached value: {cached_value}")
|
||||||
|
return []
|
||||||
|
transformed_own_path = self._transformer.transform(element_path)
|
||||||
|
if cached_value["own_path"] != str(transformed_own_path):
|
||||||
|
log.explain(
|
||||||
|
f"own_path '{transformed_own_path}' does not match cached value: '{cached_value['own_path']}"
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
return [PurePath(name) for name in cached_value["known_paths"]]
|
||||||
|
|
||||||
def _all_opencast_videos_locally_present(self, video_path: PurePath) -> bool:
|
def _all_opencast_videos_locally_present(self, element: IliasPageElement, element_path: PurePath) -> bool:
|
||||||
if contained_videos := self._previous_contained_opencast_videos(video_path):
|
log.explain_topic(f"Checking local cache for video {fmt_path(element_path)}")
|
||||||
log.explain_topic(f"Checking local cache for video {video_path.name}")
|
if contained_videos := self._previous_contained_opencast_videos(element, element_path):
|
||||||
all_found_locally = True
|
log.explain(
|
||||||
for video in contained_videos:
|
f"The following contained videos are known: {','.join(map(fmt_path, contained_videos))}"
|
||||||
transformed_path = self._to_local_opencast_video_path(video)
|
)
|
||||||
if transformed_path:
|
if all(self._output_dir.resolve(path).exists() for path in contained_videos):
|
||||||
exists_locally = self._output_dir.resolve(transformed_path).exists()
|
log.explain("Found all known videos locally, skipping enumeration request")
|
||||||
all_found_locally = all_found_locally and exists_locally
|
|
||||||
if all_found_locally:
|
|
||||||
log.explain("Found all videos locally, skipping enumeration request")
|
|
||||||
return True
|
return True
|
||||||
log.explain("Missing at least one video, continuing with requests!")
|
log.explain("Missing at least one video, continuing with requests!")
|
||||||
|
else:
|
||||||
|
log.explain("No local cache present")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _to_local_opencast_video_path(self, path: PurePath) -> Optional[PurePath]:
|
|
||||||
if transformed := self._transformer.transform(path):
|
|
||||||
return self._deduplicator.fixup_path(transformed)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@anoncritical
|
@anoncritical
|
||||||
@_iorepeat(3, "downloading video")
|
@_iorepeat(3, "downloading video")
|
||||||
async def _download_opencast_video(
|
async def _download_opencast_video(self, element: IliasPageElement, dl: DownloadToken) -> None:
|
||||||
self,
|
def add_to_report(paths: list[str]) -> None:
|
||||||
original_path: PurePath,
|
self.report.add_custom_value(
|
||||||
element: IliasPageElement,
|
_get_video_cache_key(element),
|
||||||
dl: DownloadToken
|
{"known_paths": paths, "own_path": str(self._transformer.transform(dl.path))}
|
||||||
) -> None:
|
)
|
||||||
stream_elements: List[IliasPageElement] = []
|
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
page = IliasPage(await self._get_page(element.url), element.url, element)
|
page = IliasPage(await self._get_page(element.url), element.url, element)
|
||||||
stream_elements = page.get_child_elements()
|
stream_elements = page.get_child_elements()
|
||||||
@ -609,32 +636,25 @@ instance's greatest bottleneck.
|
|||||||
log.explain(f"Using single video mode for {element.name}")
|
log.explain(f"Using single video mode for {element.name}")
|
||||||
stream_element = stream_elements[0]
|
stream_element = stream_elements[0]
|
||||||
|
|
||||||
transformed_path = self._to_local_opencast_video_path(original_path)
|
|
||||||
if not transformed_path:
|
|
||||||
raise CrawlError(f"Download returned a path but transform did not for {original_path}")
|
|
||||||
|
|
||||||
# We do not have a local cache yet
|
# We do not have a local cache yet
|
||||||
if self._output_dir.resolve(transformed_path).exists():
|
|
||||||
log.explain(f"Video for {element.name} existed locally")
|
|
||||||
else:
|
|
||||||
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
||||||
self.report.add_custom_value(str(original_path), [original_path.name])
|
add_to_report([str(self._transformer.transform(dl.path))])
|
||||||
return
|
return
|
||||||
|
|
||||||
contained_video_paths: List[str] = []
|
contained_video_paths: List[str] = []
|
||||||
|
|
||||||
for stream_element in stream_elements:
|
for stream_element in stream_elements:
|
||||||
video_path = original_path.parent / stream_element.name
|
video_path = dl.path.parent / stream_element.name
|
||||||
contained_video_paths.append(str(video_path))
|
|
||||||
|
|
||||||
maybe_dl = await self.download(video_path, mtime=element.mtime, redownload=Redownload.NEVER)
|
maybe_dl = await self.download(video_path, mtime=element.mtime, redownload=Redownload.NEVER)
|
||||||
if not maybe_dl:
|
if not maybe_dl:
|
||||||
continue
|
continue
|
||||||
async with maybe_dl as (bar, sink):
|
async with maybe_dl as (bar, sink):
|
||||||
log.explain(f"Streaming video from real url {stream_element.url}")
|
log.explain(f"Streaming video from real url {stream_element.url}")
|
||||||
|
contained_video_paths.append(str(self._transformer.transform(maybe_dl.path)))
|
||||||
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
await self._stream_from_url(stream_element.url, sink, bar, is_video=True)
|
||||||
|
|
||||||
self.report.add_custom_value(str(original_path), contained_video_paths)
|
add_to_report(contained_video_paths)
|
||||||
|
|
||||||
async def _handle_file(
|
async def _handle_file(
|
||||||
self,
|
self,
|
||||||
@ -646,8 +666,8 @@ instance's greatest bottleneck.
|
|||||||
return None
|
return None
|
||||||
return self._download_file(element, maybe_dl)
|
return self._download_file(element, maybe_dl)
|
||||||
|
|
||||||
@anoncritical
|
|
||||||
@_iorepeat(3, "downloading file")
|
@_iorepeat(3, "downloading file")
|
||||||
|
@anoncritical
|
||||||
async def _download_file(self, element: IliasPageElement, dl: DownloadToken) -> None:
|
async def _download_file(self, element: IliasPageElement, dl: DownloadToken) -> None:
|
||||||
assert dl # The function is only reached when dl is not None
|
assert dl # The function is only reached when dl is not None
|
||||||
async with dl as (bar, sink):
|
async with dl as (bar, sink):
|
||||||
@ -655,12 +675,28 @@ instance's greatest bottleneck.
|
|||||||
|
|
||||||
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar, is_video: bool) -> None:
|
async def _stream_from_url(self, url: str, sink: FileSink, bar: ProgressBar, is_video: bool) -> None:
|
||||||
async def try_stream() -> bool:
|
async def try_stream() -> bool:
|
||||||
async with self.session.get(url, allow_redirects=is_video) as resp:
|
next_url = url
|
||||||
|
|
||||||
|
# Normal files redirect to the magazine if we are not authenticated. As files could be HTML,
|
||||||
|
# we can not match on the content type here. Instead, we disallow redirects and inspect the
|
||||||
|
# new location. If we are redirected anywhere but the ILIAS 8 "sendfile" command, we assume
|
||||||
|
# our authentication expired.
|
||||||
if not is_video:
|
if not is_video:
|
||||||
# Redirect means we weren't authenticated
|
async with self.session.get(url, allow_redirects=False) as resp:
|
||||||
|
# Redirect to anything except a "sendfile" means we weren't authenticated
|
||||||
if hdrs.LOCATION in resp.headers:
|
if hdrs.LOCATION in resp.headers:
|
||||||
|
if "&cmd=sendfile" not in resp.headers[hdrs.LOCATION]:
|
||||||
return False
|
return False
|
||||||
# we wanted a video but got HTML
|
# Directly follow the redirect to not make a second, unnecessary request
|
||||||
|
next_url = resp.headers[hdrs.LOCATION]
|
||||||
|
|
||||||
|
# Let's try this again and follow redirects
|
||||||
|
return await fetch_follow_redirects(next_url)
|
||||||
|
|
||||||
|
async def fetch_follow_redirects(file_url: str) -> bool:
|
||||||
|
async with self.session.get(file_url) as resp:
|
||||||
|
# We wanted a video but got HTML => Forbidden, auth expired. Logging in won't really
|
||||||
|
# solve that depending on the setup, but it is better than nothing.
|
||||||
if is_video and "html" in resp.content_type:
|
if is_video and "html" in resp.content_type:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -705,7 +741,7 @@ instance's greatest bottleneck.
|
|||||||
log.explain(f"URL: {next_stage_url}")
|
log.explain(f"URL: {next_stage_url}")
|
||||||
|
|
||||||
soup = await self._get_page(next_stage_url)
|
soup = await self._get_page(next_stage_url)
|
||||||
page = IliasPage(soup, next_stage_url, None)
|
page = IliasPage(soup, next_stage_url, element)
|
||||||
|
|
||||||
if next := page.get_next_stage_element():
|
if next := page.get_next_stage_element():
|
||||||
next_stage_url = next.url
|
next_stage_url = next.url
|
||||||
@ -717,7 +753,6 @@ instance's greatest bottleneck.
|
|||||||
raise CrawlWarning("Failed to extract forum data")
|
raise CrawlWarning("Failed to extract forum data")
|
||||||
if download_data.empty:
|
if download_data.empty:
|
||||||
log.explain("Forum had no threads")
|
log.explain("Forum had no threads")
|
||||||
elements = []
|
|
||||||
return
|
return
|
||||||
html = await self._post_authenticated(download_data.url, download_data.form_data)
|
html = await self._post_authenticated(download_data.url, download_data.form_data)
|
||||||
elements = parse_ilias_forum_export(soupify(html))
|
elements = parse_ilias_forum_export(soupify(html))
|
||||||
@ -768,14 +803,14 @@ instance's greatest bottleneck.
|
|||||||
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
log.explain_topic(f"Parsing initial HTML page for {fmt_path(cl.path)}")
|
||||||
log.explain(f"URL: {element.url}")
|
log.explain(f"URL: {element.url}")
|
||||||
soup = await self._get_page(element.url)
|
soup = await self._get_page(element.url)
|
||||||
page = IliasPage(soup, element.url, None)
|
page = IliasPage(soup, element.url, element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(await self._crawl_learning_module_direction(
|
||||||
cl.path, next.previous_url, "left"
|
cl.path, next.previous_url, "left", element
|
||||||
))
|
))
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
elements.extend(await self._crawl_learning_module_direction(
|
elements.extend(await self._crawl_learning_module_direction(
|
||||||
cl.path, next.next_url, "right"
|
cl.path, next.next_url, "right", element
|
||||||
))
|
))
|
||||||
|
|
||||||
# Reflect their natural ordering in the file names
|
# Reflect their natural ordering in the file names
|
||||||
@ -797,7 +832,8 @@ instance's greatest bottleneck.
|
|||||||
self,
|
self,
|
||||||
path: PurePath,
|
path: PurePath,
|
||||||
start_url: Optional[str],
|
start_url: Optional[str],
|
||||||
dir: Union[Literal["left"], Literal["right"]]
|
dir: Union[Literal["left"], Literal["right"]],
|
||||||
|
parent_element: IliasPageElement
|
||||||
) -> List[IliasLearningModulePage]:
|
) -> List[IliasLearningModulePage]:
|
||||||
elements: List[IliasLearningModulePage] = []
|
elements: List[IliasLearningModulePage] = []
|
||||||
|
|
||||||
@ -810,7 +846,7 @@ instance's greatest bottleneck.
|
|||||||
log.explain_topic(f"Parsing HTML page for {fmt_path(path)} ({dir}-{counter})")
|
log.explain_topic(f"Parsing HTML page for {fmt_path(path)} ({dir}-{counter})")
|
||||||
log.explain(f"URL: {next_element_url}")
|
log.explain(f"URL: {next_element_url}")
|
||||||
soup = await self._get_page(next_element_url)
|
soup = await self._get_page(next_element_url)
|
||||||
page = IliasPage(soup, next_element_url, None)
|
page = IliasPage(soup, next_element_url, parent_element)
|
||||||
if next := page.get_learning_module_data():
|
if next := page.get_learning_module_data():
|
||||||
elements.append(next)
|
elements.append(next)
|
||||||
if dir == "left":
|
if dir == "left":
|
||||||
@ -882,7 +918,7 @@ instance's greatest bottleneck.
|
|||||||
auth_id = await self._current_auth_id()
|
auth_id = await self._current_auth_id()
|
||||||
async with self.session.get(url) as request:
|
async with self.session.get(url) as request:
|
||||||
soup = soupify(await request.read())
|
soup = soupify(await request.read())
|
||||||
if self._is_logged_in(soup):
|
if IliasPage.is_logged_in(soup):
|
||||||
return self._verify_page(soup, url, root_page_allowed)
|
return self._verify_page(soup, url, root_page_allowed)
|
||||||
|
|
||||||
# We weren't authenticated, so try to do that
|
# We weren't authenticated, so try to do that
|
||||||
@ -891,11 +927,12 @@ instance's greatest bottleneck.
|
|||||||
# Retry once after authenticating. If this fails, we will die.
|
# Retry once after authenticating. If this fails, we will die.
|
||||||
async with self.session.get(url) as request:
|
async with self.session.get(url) as request:
|
||||||
soup = soupify(await request.read())
|
soup = soupify(await request.read())
|
||||||
if self._is_logged_in(soup):
|
if IliasPage.is_logged_in(soup):
|
||||||
return self._verify_page(soup, url, root_page_allowed)
|
return self._verify_page(soup, url, root_page_allowed)
|
||||||
raise CrawlError("get_page failed even after authenticating")
|
raise CrawlError(f"get_page failed even after authenticating on {url!r}")
|
||||||
|
|
||||||
def _verify_page(self, soup: BeautifulSoup, url: str, root_page_allowed: bool) -> BeautifulSoup:
|
@staticmethod
|
||||||
|
def _verify_page(soup: BeautifulSoup, url: str, root_page_allowed: bool) -> BeautifulSoup:
|
||||||
if IliasPage.is_root_page(soup) and not root_page_allowed:
|
if IliasPage.is_root_page(soup) and not root_page_allowed:
|
||||||
raise CrawlError(
|
raise CrawlError(
|
||||||
"Unexpectedly encountered ILIAS root page. "
|
"Unexpectedly encountered ILIAS root page. "
|
||||||
@ -953,34 +990,6 @@ instance's greatest bottleneck.
|
|||||||
async def _authenticate(self) -> None:
|
async def _authenticate(self) -> None:
|
||||||
await self._shibboleth_login.login(self.session)
|
await self._shibboleth_login.login(self.session)
|
||||||
|
|
||||||
@ staticmethod
|
|
||||||
def _is_logged_in(soup: BeautifulSoup) -> bool:
|
|
||||||
# Normal ILIAS pages
|
|
||||||
mainbar: Optional[Tag] = soup.find(class_="il-maincontrols-metabar")
|
|
||||||
if mainbar is not None:
|
|
||||||
login_button = mainbar.find(attrs={"href": lambda x: x and "login.php" in x})
|
|
||||||
shib_login = soup.find(id="button_shib_login")
|
|
||||||
return not login_button and not shib_login
|
|
||||||
|
|
||||||
# Personal Desktop
|
|
||||||
if soup.find("a", attrs={"href": lambda x: x and "block_type=pditems" in x}):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Video listing embeds do not have complete ILIAS html. Try to match them by
|
|
||||||
# their video listing table
|
|
||||||
video_table = soup.find(
|
|
||||||
recursive=True,
|
|
||||||
name="table",
|
|
||||||
attrs={"id": lambda x: x is not None and x.startswith("tbl_xoct")}
|
|
||||||
)
|
|
||||||
if video_table is not None:
|
|
||||||
return True
|
|
||||||
# The individual video player wrapper page has nothing of the above.
|
|
||||||
# Match it by its playerContainer.
|
|
||||||
if soup.select_one("#playerContainer") is not None:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class KitShibbolethLogin:
|
class KitShibbolethLogin:
|
||||||
"""
|
"""
|
||||||
@ -1127,7 +1136,7 @@ async def _shib_post(
|
|||||||
async with session.get(correct_url, allow_redirects=False) as response:
|
async with session.get(correct_url, allow_redirects=False) as response:
|
||||||
location = response.headers.get("location")
|
location = response.headers.get("location")
|
||||||
log.explain(f"Redirected to {location!r} with status {response.status}")
|
log.explain(f"Redirected to {location!r} with status {response.status}")
|
||||||
# If shib still still has a valid session, it will directly respond to the request
|
# If shib still has a valid session, it will directly respond to the request
|
||||||
if location is None:
|
if location is None:
|
||||||
log.explain("Shib recognized us, returning its response directly")
|
log.explain("Shib recognized us, returning its response directly")
|
||||||
return soupify(await response.read())
|
return soupify(await response.read())
|
||||||
|
@ -14,7 +14,7 @@ def name_variants(path: PurePath) -> Iterator[PurePath]:
|
|||||||
|
|
||||||
|
|
||||||
class Deduplicator:
|
class Deduplicator:
|
||||||
FORBIDDEN_CHARS = '<>:"/\\|?*'
|
FORBIDDEN_CHARS = '<>:"/\\|?*' + "".join([chr(i) for i in range(0, 32)])
|
||||||
FORBIDDEN_NAMES = {
|
FORBIDDEN_NAMES = {
|
||||||
"CON", "PRN", "AUX", "NUL",
|
"CON", "PRN", "AUX", "NUL",
|
||||||
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
"COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
NAME = "PFERD"
|
NAME = "PFERD"
|
||||||
VERSION = "3.4.3"
|
VERSION = "3.5.1"
|
||||||
|
8
flake.lock
generated
8
flake.lock
generated
@ -2,16 +2,16 @@
|
|||||||
"nodes": {
|
"nodes": {
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1692986144,
|
"lastModified": 1708979614,
|
||||||
"narHash": "sha256-M4VFpy7Av9j+33HF5nIGm0k2+DXXW4qSSKdidIKg5jY=",
|
"narHash": "sha256-FWLWmYojIg6TeqxSnHkKpHu5SGnFP5um1uUjH+wRV6g=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "74e5bdc5478ebbe7ba5849f0d765f92757bb9dbf",
|
"rev": "b7ee09cf5614b02d289cd86fcfa6f24d4e078c2a",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.05",
|
"ref": "nixos-23.11",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
description = "Tool for downloading course-related files from ILIAS";
|
description = "Tool for downloading course-related files from ILIAS";
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05";
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs }:
|
outputs = { self, nixpkgs }:
|
||||||
|
Reference in New Issue
Block a user