mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-07-12 14:12:30 +02:00
Compare commits
3 Commits
fix/exerci
...
v3.8.0
Author | SHA1 | Date | |
---|---|---|---|
7291382430 | |||
1a430ad5d1 | |||
f6bdeb6b9d |
22
CHANGELOG.md
22
CHANGELOG.md
@ -22,34 +22,14 @@ ambiguous situations.
|
||||
|
||||
## Unreleased
|
||||
|
||||
## Fixed
|
||||
- Crawling of exercises with instructions
|
||||
|
||||
## 3.8.2 - 2025-04-29
|
||||
|
||||
## Changed
|
||||
- Explicitly mention that wikis are not supported at the moment and ignore them
|
||||
|
||||
## Fixed
|
||||
- Ilias-native login
|
||||
- Exercise crawling
|
||||
|
||||
## 3.8.1 - 2025-04-17
|
||||
|
||||
## Fixed
|
||||
- Description html files now specify at UTF-8 encoding
|
||||
- Images in descriptions now always have a white background
|
||||
|
||||
## 3.8.0 - 2025-04-16
|
||||
## 3.8.0 - 2025-04-15
|
||||
|
||||
### Added
|
||||
- Support for ILIAS 9
|
||||
|
||||
### Changed
|
||||
- Added prettier CSS to forum threads
|
||||
- Downloaded forum threads now link to the forum instead of the ILIAS thread
|
||||
- Increase minimum supported Python version to 3.11
|
||||
- Do not crawl nested courses (courses linked in other courses)
|
||||
|
||||
## Fixed
|
||||
- File links in report on Windows
|
||||
|
@ -39,10 +39,6 @@ _STYLE_TAG_CONTENT = """
|
||||
margin: 0.5rem 0;
|
||||
}
|
||||
|
||||
img {
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
body {
|
||||
padding: 1em;
|
||||
grid-template-columns: 1fr min(60rem, 90%) 1fr;
|
||||
@ -60,11 +56,12 @@ _ARTICLE_WORTHY_CLASSES = [
|
||||
def insert_base_markup(soup: BeautifulSoup) -> BeautifulSoup:
|
||||
head = soup.new_tag("head")
|
||||
soup.insert(0, head)
|
||||
# Force UTF-8 encoding
|
||||
head.append(soup.new_tag("meta", charset="utf-8"))
|
||||
|
||||
simplecss_link: Tag = soup.new_tag("link")
|
||||
# <link rel="stylesheet" href="https://cdn.simplecss.org/simple.css">
|
||||
head.append(soup.new_tag("link", rel="stylesheet", href="https://cdn.simplecss.org/simple.css"))
|
||||
simplecss_link["rel"] = "stylesheet"
|
||||
simplecss_link["href"] = "https://cdn.simplecss.org/simple.css"
|
||||
head.append(simplecss_link)
|
||||
|
||||
# Basic style tags for compat
|
||||
style: Tag = soup.new_tag("style")
|
||||
|
@ -107,7 +107,6 @@ class IliasWebCrawlerSection(HttpCrawlerSection):
|
||||
_DIRECTORY_PAGES: Set[IliasElementType] = {
|
||||
IliasElementType.EXERCISE,
|
||||
IliasElementType.EXERCISE_FILES,
|
||||
IliasElementType.EXERCISE_OVERVIEW,
|
||||
IliasElementType.FOLDER,
|
||||
IliasElementType.INFO_TAB,
|
||||
IliasElementType.MEDIACAST_VIDEO_FOLDER,
|
||||
@ -217,19 +216,11 @@ instance's greatest bottleneck.
|
||||
|
||||
async def _crawl_desktop(self) -> None:
|
||||
await self._crawl_url(
|
||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show"),
|
||||
crawl_nested_courses=True
|
||||
urljoin(self._base_url, "/ilias.php?baseClass=ilDashboardGUI&cmd=show")
|
||||
)
|
||||
|
||||
async def _crawl_url(
|
||||
self,
|
||||
url: str,
|
||||
expected_id: Optional[int] = None,
|
||||
crawl_nested_courses: bool = False
|
||||
) -> None:
|
||||
if awaitable := await self._handle_ilias_page(
|
||||
url, None, PurePath("."), expected_id, crawl_nested_courses
|
||||
):
|
||||
async def _crawl_url(self, url: str, expected_id: Optional[int] = None) -> None:
|
||||
if awaitable := await self._handle_ilias_page(url, None, PurePath("."), expected_id):
|
||||
await awaitable
|
||||
|
||||
async def _handle_ilias_page(
|
||||
@ -238,7 +229,6 @@ instance's greatest bottleneck.
|
||||
current_element: Optional[IliasPageElement],
|
||||
path: PurePath,
|
||||
expected_course_id: Optional[int] = None,
|
||||
crawl_nested_courses: bool = False
|
||||
) -> Optional[Coroutine[Any, Any, None]]:
|
||||
maybe_cl = await self.crawl(path)
|
||||
if not maybe_cl:
|
||||
@ -246,9 +236,7 @@ instance's greatest bottleneck.
|
||||
if current_element:
|
||||
self._ensure_not_seen(current_element, path)
|
||||
|
||||
return self._crawl_ilias_page(
|
||||
url, current_element, maybe_cl, expected_course_id, crawl_nested_courses
|
||||
)
|
||||
return self._crawl_ilias_page(url, current_element, maybe_cl, expected_course_id)
|
||||
|
||||
@anoncritical
|
||||
async def _crawl_ilias_page(
|
||||
@ -257,7 +245,6 @@ instance's greatest bottleneck.
|
||||
current_element: Optional[IliasPageElement],
|
||||
cl: CrawlToken,
|
||||
expected_course_id: Optional[int] = None,
|
||||
crawl_nested_courses: bool = False,
|
||||
) -> None:
|
||||
elements: List[IliasPageElement] = []
|
||||
# A list as variable redefinitions are not propagated to outer scopes
|
||||
@ -306,7 +293,7 @@ instance's greatest bottleneck.
|
||||
|
||||
tasks: List[Awaitable[None]] = []
|
||||
for element in elements:
|
||||
if handle := await self._handle_ilias_element(cl.path, element, crawl_nested_courses):
|
||||
if handle := await self._handle_ilias_element(cl.path, element):
|
||||
tasks.append(asyncio.create_task(handle))
|
||||
|
||||
# And execute them
|
||||
@ -322,7 +309,6 @@ instance's greatest bottleneck.
|
||||
self,
|
||||
parent_path: PurePath,
|
||||
element: IliasPageElement,
|
||||
crawl_nested_courses: bool = False
|
||||
) -> Optional[Coroutine[Any, Any, None]]:
|
||||
# element.name might contain `/` if the crawler created nested elements,
|
||||
# so we can not sanitize it here. We trust in the output dir to thwart worst-case
|
||||
@ -416,21 +402,11 @@ instance's greatest bottleneck.
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.COURSE:
|
||||
if crawl_nested_courses:
|
||||
return await self._handle_ilias_page(element.url, element, element_path)
|
||||
log.status(
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](not descending into linked course)"
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.WIKI:
|
||||
log.status(
|
||||
"[bold bright_black]",
|
||||
"Ignored",
|
||||
fmt_path(element_path),
|
||||
"[bright_black](wikis are not currently supported)"
|
||||
"[bright_black](not descending into linked course, download it separately)"
|
||||
)
|
||||
return None
|
||||
elif element.type == IliasElementType.LEARNING_MODULE:
|
||||
@ -783,23 +759,70 @@ instance's greatest bottleneck.
|
||||
@_iorepeat(3, "crawling forum")
|
||||
@anoncritical
|
||||
async def _crawl_forum(self, element: IliasPageElement, cl: CrawlToken) -> None:
|
||||
elements: List[IliasForumThread] = []
|
||||
|
||||
async with cl:
|
||||
inner = IliasPage(await self._get_page(element.url), element)
|
||||
export_url = inner.get_forum_export_url()
|
||||
if not export_url:
|
||||
log.warn("Could not extract forum export url")
|
||||
next_stage_url = element.url
|
||||
page = None
|
||||
|
||||
while next_stage_url:
|
||||
log.explain_topic(f"Parsing HTML page for {fmt_path(cl.path)}")
|
||||
log.explain(f"URL: {next_stage_url}")
|
||||
|
||||
soup = await self._get_page(next_stage_url)
|
||||
page = IliasPage(soup, element)
|
||||
|
||||
if next := page.get_next_stage_element():
|
||||
next_stage_url = next.url
|
||||
else:
|
||||
break
|
||||
|
||||
forum_threads: list[tuple[IliasPageElement, bool]] = []
|
||||
for entry in cast(IliasPage, page).get_forum_entries():
|
||||
path = cl.path / (_sanitize_path_name(entry.name) + ".html")
|
||||
forum_threads.append((entry, self.should_try_download(path, mtime=entry.mtime)))
|
||||
|
||||
# Sort the ids. The forum download will *preserve* this ordering
|
||||
forum_threads.sort(key=lambda elem: elem[0].id())
|
||||
|
||||
if not forum_threads:
|
||||
log.explain("Forum had no threads")
|
||||
return
|
||||
|
||||
export = await self._post(export_url, {
|
||||
"format": "html",
|
||||
"cmd[createExportFile]": ""
|
||||
})
|
||||
download_data = cast(IliasPage, page).get_download_forum_data(
|
||||
[thread.id() for thread, download in forum_threads if download]
|
||||
)
|
||||
if not download_data:
|
||||
raise CrawlWarning("Failed to extract forum data")
|
||||
|
||||
elements = parse_ilias_forum_export(soupify(export))
|
||||
if not download_data.empty:
|
||||
html = await self._post_authenticated(download_data.url, download_data.form_data)
|
||||
elements = parse_ilias_forum_export(soupify(html))
|
||||
else:
|
||||
elements = []
|
||||
|
||||
# Verify that ILIAS does not change the order, as we depend on it later. Otherwise, we could not call
|
||||
# download in the correct order, potentially messing up duplication handling.
|
||||
expected_element_titles = [thread.name for thread, download in forum_threads if download]
|
||||
actual_element_titles = [_sanitize_path_name(thread.name) for thread in elements]
|
||||
if expected_element_titles != actual_element_titles:
|
||||
raise CrawlWarning(
|
||||
f"Forum thread order mismatch: {expected_element_titles} != {actual_element_titles}"
|
||||
)
|
||||
|
||||
tasks: List[Awaitable[None]] = []
|
||||
for thread in elements:
|
||||
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, element.url)))
|
||||
for thread, download in forum_threads:
|
||||
if download:
|
||||
# This only works because ILIAS keeps the order in the export
|
||||
elem = elements.pop(0)
|
||||
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, elem, thread)))
|
||||
else:
|
||||
# We only downloaded the threads we "should_try_download"ed. This can be an
|
||||
# over-approximation and all will be fine.
|
||||
# If we selected too few, e.g. because there was a duplicate title and the mtime of the
|
||||
# original is newer than the update of the duplicate.
|
||||
# This causes stale data locally, but I consider this problem acceptable right now.
|
||||
tasks.append(asyncio.create_task(self._download_forum_thread(cl.path, thread, thread)))
|
||||
|
||||
# And execute them
|
||||
await self.gather(tasks)
|
||||
@ -810,7 +833,7 @@ instance's greatest bottleneck.
|
||||
self,
|
||||
parent_path: PurePath,
|
||||
thread: Union[IliasForumThread, IliasPageElement],
|
||||
forum_url: str
|
||||
element: IliasPageElement
|
||||
) -> None:
|
||||
path = parent_path / (_sanitize_path_name(thread.name) + ".html")
|
||||
maybe_dl = await self.download(path, mtime=thread.mtime)
|
||||
@ -820,7 +843,7 @@ instance's greatest bottleneck.
|
||||
async with maybe_dl as (bar, sink):
|
||||
rendered = forum_thread_template(
|
||||
thread.name,
|
||||
forum_url,
|
||||
element.url,
|
||||
thread.name_tag,
|
||||
await self.internalize_images(thread.content_tag)
|
||||
)
|
||||
@ -998,19 +1021,29 @@ instance's greatest bottleneck.
|
||||
)
|
||||
return soup
|
||||
|
||||
async def _post(
|
||||
async def _post_authenticated(
|
||||
self,
|
||||
url: str,
|
||||
data: dict[str, Union[str, List[str]]]
|
||||
) -> bytes:
|
||||
auth_id = await self._current_auth_id()
|
||||
|
||||
form_data = aiohttp.FormData()
|
||||
for key, val in data.items():
|
||||
form_data.add_field(key, val)
|
||||
|
||||
async with self.session.post(url, data=form_data()) as request:
|
||||
async with self.session.post(url, data=form_data(), allow_redirects=False) as request:
|
||||
if request.status == 200:
|
||||
return await request.read()
|
||||
raise CrawlError(f"post failed with status {request.status}")
|
||||
|
||||
# We weren't authenticated, so try to do that
|
||||
await self.authenticate(auth_id)
|
||||
|
||||
# Retry once after authenticating. If this fails, we will die.
|
||||
async with self.session.post(url, data=data, allow_redirects=False) as request:
|
||||
if request.status == 200:
|
||||
return await request.read()
|
||||
raise CrawlError("post_authenticated failed even after authenticating")
|
||||
|
||||
async def _get_authenticated(self, url: str) -> bytes:
|
||||
auth_id = await self._current_auth_id()
|
||||
@ -1040,7 +1073,7 @@ instance's greatest bottleneck.
|
||||
async with self.session.get(urljoin(self._base_url, "/login.php"), params=params) as request:
|
||||
login_page = soupify(await request.read())
|
||||
|
||||
login_form = cast(Optional[Tag], login_page.find("form", attrs={"name": "login_form"}))
|
||||
login_form = cast(Optional[Tag], login_page.find("form", attrs={"name": "formlogin"}))
|
||||
if login_form is None:
|
||||
raise CrawlError("Could not find the login form! Specified client id might be invalid.")
|
||||
|
||||
@ -1050,12 +1083,14 @@ instance's greatest bottleneck.
|
||||
|
||||
username, password = await self._auth.credentials()
|
||||
|
||||
login_form_data = aiohttp.FormData()
|
||||
login_form_data.add_field('login_form/input_3/input_4', username)
|
||||
login_form_data.add_field('login_form/input_3/input_5', password)
|
||||
login_data = {
|
||||
"username": username,
|
||||
"password": password,
|
||||
"cmd[doStandardAuthentication]": "Login",
|
||||
}
|
||||
|
||||
# do the actual login
|
||||
async with self.session.post(urljoin(self._base_url, login_url), data=login_form_data) as request:
|
||||
async with self.session.post(urljoin(self._base_url, login_url), data=login_data) as request:
|
||||
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
||||
if not IliasPage.is_logged_in(soup):
|
||||
self._auth.invalidate_credentials()
|
||||
|
@ -97,8 +97,7 @@ class IliasElementType(Enum):
|
||||
BOOKING = "booking"
|
||||
COURSE = "course"
|
||||
DCL_RECORD_LIST = "dcl_record_list"
|
||||
EXERCISE_OVERVIEW = "exercise_overview"
|
||||
EXERCISE = "exercise" # own submitted files
|
||||
EXERCISE = "exercise"
|
||||
EXERCISE_FILES = "exercise_files" # own submitted files
|
||||
FILE = "file"
|
||||
FOLDER = "folder"
|
||||
@ -121,7 +120,6 @@ class IliasElementType(Enum):
|
||||
SCORM_LEARNING_MODULE = "scorm_learning_module"
|
||||
SURVEY = "survey"
|
||||
TEST = "test" # an online test. Will be ignored currently.
|
||||
WIKI = "wiki"
|
||||
|
||||
def matcher(self) -> IliasElementMatcher:
|
||||
match self:
|
||||
@ -142,15 +140,13 @@ class IliasElementType(Enum):
|
||||
TypeMatcher.query("cmdclass=ildclrecordlistgui")
|
||||
)
|
||||
case IliasElementType.EXERCISE:
|
||||
return TypeMatcher.never()
|
||||
case IliasElementType.EXERCISE_FILES:
|
||||
return TypeMatcher.never()
|
||||
case IliasElementType.EXERCISE_OVERVIEW:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.path("/exc/"),
|
||||
TypeMatcher.path("_exc_"),
|
||||
TypeMatcher.img_src("_exc.svg"),
|
||||
)
|
||||
case IliasElementType.EXERCISE_FILES:
|
||||
return TypeMatcher.never()
|
||||
case IliasElementType.FILE:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("cmd=sendfile"),
|
||||
@ -247,11 +243,6 @@ class IliasElementType(Enum):
|
||||
TypeMatcher.query("cmdclass=iltestscreengui"),
|
||||
TypeMatcher.img_src("_tst.svg")
|
||||
)
|
||||
case IliasElementType.WIKI:
|
||||
return TypeMatcher.any(
|
||||
TypeMatcher.query("baseClass=ilwikihandlergui"),
|
||||
TypeMatcher.img_src("wiki.svg")
|
||||
)
|
||||
|
||||
raise CrawlWarning(f"Unknown matcher {self}")
|
||||
|
||||
@ -280,7 +271,6 @@ class IliasPageElement:
|
||||
r"mcst/(?P<id>\d+)", # mediacast
|
||||
r"pg/(?P<id>(\d|_)+)", # page?
|
||||
r"svy/(?P<id>\d+)", # survey
|
||||
r"sess/(?P<id>\d+)", # session
|
||||
r"webr/(?P<id>\d+)", # web referene (link)
|
||||
r"thr_pk=(?P<id>\d+)", # forums
|
||||
r"ref_id=(?P<id>\d+)",
|
||||
@ -499,31 +489,79 @@ class IliasPage:
|
||||
return url
|
||||
return None
|
||||
|
||||
def get_forum_export_url(self) -> Optional[str]:
|
||||
forum_link = self._soup.select_one("#tab_forums_threads > a")
|
||||
if not forum_link:
|
||||
log.explain("Found no forum link")
|
||||
return None
|
||||
def get_forum_entries(self) -> list[IliasPageElement]:
|
||||
form = self._get_forum_form()
|
||||
if not form:
|
||||
return []
|
||||
threads = []
|
||||
|
||||
base_url = self._abs_url_from_link(forum_link)
|
||||
base_url = re.sub(r"cmd=\w+", "cmd=post", base_url)
|
||||
base_url = re.sub(r"cmdClass=\w+", "cmdClass=ilExportGUI", base_url)
|
||||
|
||||
rtoken_form = cast(
|
||||
for row in cast(list[Tag], form.select("table > tbody > tr")):
|
||||
url_tag = cast(
|
||||
Optional[Tag],
|
||||
self._soup.find("form", attrs={"action": lambda x: x is not None and "rtoken=" in x})
|
||||
row.find(name="a", attrs={"href": lambda x: x is not None and "cmd=viewthread" in x.lower()})
|
||||
)
|
||||
if not rtoken_form:
|
||||
log.explain("Found no rtoken anywhere")
|
||||
if url_tag is None:
|
||||
log.explain(f"Skipping row without URL: {row}")
|
||||
continue
|
||||
name = url_tag.get_text().strip()
|
||||
columns = [td.get_text().strip() for td in cast(list[Tag], row.find_all(name="td"))]
|
||||
potential_dates_opt = [IliasPage._find_date_in_text(column) for column in columns]
|
||||
potential_dates = [x for x in potential_dates_opt if x is not None]
|
||||
mtime = max(potential_dates) if potential_dates else None
|
||||
|
||||
threads.append(IliasPageElement.create_new(
|
||||
IliasElementType.FORUM_THREAD,
|
||||
self._abs_url_from_link(url_tag),
|
||||
name,
|
||||
mtime=mtime
|
||||
))
|
||||
|
||||
return threads
|
||||
|
||||
def get_download_forum_data(self, thread_ids: list[str]) -> Optional[IliasDownloadForumData]:
|
||||
form = cast(Optional[Tag], self._soup.find(
|
||||
"form",
|
||||
attrs={"action": lambda x: x is not None and "fallbackCmd=showThreads" in x}
|
||||
))
|
||||
if not form:
|
||||
return None
|
||||
match = cast(re.Match[str], re.search(r"rtoken=(\w+)", str(rtoken_form.attrs["action"])))
|
||||
rtoken = match.group(1)
|
||||
post_url = self._abs_url_from_relative(cast(str, form["action"]))
|
||||
|
||||
base_url = base_url + "&rtoken=" + rtoken
|
||||
log.explain(f"Fetching forum threads {thread_ids}")
|
||||
|
||||
return base_url
|
||||
form_data: Dict[str, Union[str, list[str]]] = {
|
||||
"thread_ids[]": cast(list[str], thread_ids),
|
||||
"selected_cmd2": "html",
|
||||
"select_cmd2": "Ausführen",
|
||||
"selected_cmd": "",
|
||||
}
|
||||
|
||||
return IliasDownloadForumData(url=post_url, form_data=form_data, empty=len(thread_ids) == 0)
|
||||
|
||||
def _get_forum_form(self) -> Optional[Tag]:
|
||||
return cast(Optional[Tag], self._soup.find(
|
||||
"form",
|
||||
attrs={"action": lambda x: x is not None and "fallbackCmd=showThreads" in x}
|
||||
))
|
||||
|
||||
def get_next_stage_element(self) -> Optional[IliasPageElement]:
|
||||
if self._is_forum_page():
|
||||
if "trows=" in self._page_url:
|
||||
log.explain("Manual row override detected, accepting it as good")
|
||||
return None
|
||||
log.explain("Requesting *all* forum threads")
|
||||
thread_count = self._get_forum_thread_count()
|
||||
if thread_count is not None and thread_count > 400:
|
||||
log.warn(
|
||||
"Forum has more than 400 threads, fetching all threads will take a while. "
|
||||
"You might need to adjust your http_timeout config option."
|
||||
)
|
||||
|
||||
# Fetch at least 400 in case we detect it wrong
|
||||
if thread_count is not None and thread_count < 400:
|
||||
thread_count = 400
|
||||
|
||||
return self._get_show_max_forum_entries_per_page_url(thread_count)
|
||||
if self._is_ilias_opencast_embedding():
|
||||
log.explain("Unwrapping opencast embedding")
|
||||
return self.get_child_elements()[0]
|
||||
@ -533,8 +571,6 @@ class IliasPage:
|
||||
if self._contains_collapsed_future_meetings():
|
||||
log.explain("Requesting *all* future meetings")
|
||||
return self._uncollapse_future_meetings_url()
|
||||
if self._is_exercise_not_all_shown():
|
||||
return self._show_all_exercises()
|
||||
if not self._is_content_tab_selected():
|
||||
if self._page_type != IliasElementType.INFO_TAB:
|
||||
log.explain("Selecting content tab")
|
||||
@ -543,6 +579,11 @@ class IliasPage:
|
||||
log.explain("Crawling info tab, skipping content select")
|
||||
return None
|
||||
|
||||
def _is_forum_page(self) -> bool:
|
||||
if perma_link := self.get_permalink():
|
||||
return "/frm/" in perma_link
|
||||
return False
|
||||
|
||||
def _is_video_player(self) -> bool:
|
||||
return "paella_config_file" in str(self._soup)
|
||||
|
||||
@ -566,7 +607,7 @@ class IliasPage:
|
||||
|
||||
def _is_exercise_file(self) -> bool:
|
||||
# we know it from before
|
||||
if self._page_type == IliasElementType.EXERCISE_OVERVIEW:
|
||||
if self._page_type == IliasElementType.EXERCISE:
|
||||
return True
|
||||
|
||||
# We have no suitable parent - let's guesss
|
||||
@ -603,17 +644,6 @@ class IliasPage:
|
||||
link = self._abs_url_from_link(element)
|
||||
return IliasPageElement.create_new(IliasElementType.FOLDER, link, "show all meetings")
|
||||
|
||||
def _is_exercise_not_all_shown(self) -> bool:
|
||||
return (self._page_type == IliasElementType.EXERCISE_OVERVIEW
|
||||
and "mode=all" not in self._page_url.lower())
|
||||
|
||||
def _show_all_exercises(self) -> Optional[IliasPageElement]:
|
||||
return IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_OVERVIEW,
|
||||
self._page_url + "&mode=all",
|
||||
"show all exercises"
|
||||
)
|
||||
|
||||
def _is_content_tab_selected(self) -> bool:
|
||||
return self._select_content_page_url() is None
|
||||
|
||||
@ -879,62 +909,15 @@ class IliasPage:
|
||||
|
||||
def _find_exercise_entries(self) -> list[IliasPageElement]:
|
||||
if self._soup.find(id="tab_submission"):
|
||||
log.explain("Found submission tab. This is an exercise detail or files page")
|
||||
if self._soup.select_one("#tab_submission.active") is None:
|
||||
log.explain(" This is a details page")
|
||||
log.explain("Found submission tab. This is an exercise detail page")
|
||||
return self._find_exercise_entries_detail_page()
|
||||
else:
|
||||
log.explain(" This is a files page")
|
||||
return self._find_exercise_entries_files_page()
|
||||
|
||||
log.explain("Found no submission tab. This is an exercise root page")
|
||||
return self._find_exercise_entries_root_page()
|
||||
|
||||
def _find_exercise_entries_detail_page(self) -> list[IliasPageElement]:
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
if link := cast(Optional[Tag], self._soup.select_one("#tab_submission > a")):
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_FILES,
|
||||
self._abs_url_from_link(link),
|
||||
"Submission"
|
||||
))
|
||||
else:
|
||||
log.explain("Found no submission link for exercise, maybe it has not started yet?")
|
||||
|
||||
# Find all download links in the container (this will contain all the *feedback* files)
|
||||
download_links = cast(list[Tag], self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmd=download" in x},
|
||||
text="Download"
|
||||
))
|
||||
|
||||
for link in download_links:
|
||||
parent_row: Tag = cast(Tag, link.find_parent(
|
||||
attrs={"class": lambda x: x is not None and "row" in x}))
|
||||
name_tag = cast(Optional[Tag], parent_row.find(name="div"))
|
||||
|
||||
if not name_tag:
|
||||
log.warn("Could not find name tag for exercise entry")
|
||||
_unexpected_html_warning()
|
||||
continue
|
||||
|
||||
name = _sanitize_path_name(name_tag.get_text().strip())
|
||||
log.explain(f"Found exercise detail entry {name!r}")
|
||||
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
self._abs_url_from_link(link),
|
||||
name
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
def _find_exercise_entries_files_page(self) -> list[IliasPageElement]:
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
# Find all download links in the container
|
||||
# Find all download links in the container (this will contain all the files)
|
||||
download_links = cast(list[Tag], self._soup.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
@ -947,7 +930,7 @@ class IliasPage:
|
||||
children = cast(list[Tag], parent_row.find_all("td"))
|
||||
|
||||
name = _sanitize_path_name(children[1].get_text().strip())
|
||||
log.explain(f"Found exercise file entry {name!r}")
|
||||
log.explain(f"Found exercise detail entry {name!r}")
|
||||
|
||||
date = None
|
||||
for child in reversed(children):
|
||||
@ -955,7 +938,7 @@ class IliasPage:
|
||||
if date is not None:
|
||||
break
|
||||
if date is None:
|
||||
log.warn(f"Date parsing failed for exercise file entry {name!r}")
|
||||
log.warn(f"Date parsing failed for exercise entry {name!r}")
|
||||
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
@ -969,32 +952,66 @@ class IliasPage:
|
||||
def _find_exercise_entries_root_page(self) -> list[IliasPageElement]:
|
||||
results: list[IliasPageElement] = []
|
||||
|
||||
content_tab = cast(Optional[Tag], self._soup.find(id="ilContentContainer"))
|
||||
if not content_tab:
|
||||
log.warn("Could not find content tab in exercise overview page")
|
||||
_unexpected_html_warning()
|
||||
return []
|
||||
# Each assignment is in an accordion container
|
||||
assignment_containers: list[Tag] = self._soup.select(".il_VAccordionInnerContainer")
|
||||
|
||||
exercise_links = content_tab.select(".il-item-title a")
|
||||
for container in assignment_containers:
|
||||
# Fetch the container name out of the header to use it in the path
|
||||
container_name = cast(Tag, container.select_one(".ilAssignmentHeader")).get_text().strip()
|
||||
log.explain(f"Found exercise container {container_name!r}")
|
||||
|
||||
for exercise in cast(list[Tag], exercise_links):
|
||||
if "href" not in exercise.attrs:
|
||||
continue
|
||||
href = exercise.attrs["href"]
|
||||
if type(href) is not str:
|
||||
continue
|
||||
if "ass_id=" not in href or "cmdclass=ilassignmentpresentationgui" not in href.lower():
|
||||
continue
|
||||
|
||||
name = _sanitize_path_name(exercise.get_text().strip())
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE,
|
||||
self._abs_url_from_link(exercise),
|
||||
name
|
||||
# Find all download links in the container (this will contain all the files)
|
||||
files = cast(list[Tag], container.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmdClass=ilexsubmissiongui" in x},
|
||||
text="Download"
|
||||
))
|
||||
|
||||
for result in results:
|
||||
log.explain(f"Found exercise {result.name!r}")
|
||||
# Grab each file as you now have the link
|
||||
for file_link in files:
|
||||
# Two divs, side by side. Left is the name, right is the link ==> get left
|
||||
# sibling
|
||||
file_name = cast(
|
||||
Tag,
|
||||
cast(Tag, file_link.parent).find_previous(name="div")
|
||||
).get_text().strip()
|
||||
url = self._abs_url_from_link(file_link)
|
||||
|
||||
log.explain(f"Found exercise entry {file_name!r}")
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.FILE,
|
||||
url,
|
||||
_sanitize_path_name(container_name) + "/" + _sanitize_path_name(file_name),
|
||||
mtime=None, # We do not have any timestamp
|
||||
skip_sanitize=True
|
||||
))
|
||||
|
||||
# Find all links to file listings (e.g. "Submitted Files" for groups)
|
||||
file_listings = cast(list[Tag], container.find_all(
|
||||
name="a",
|
||||
# download links contain the given command class
|
||||
attrs={"href": lambda x: x is not None and "cmdclass=ilexsubmissionfilegui" in x.lower()}
|
||||
))
|
||||
|
||||
# Add each listing as a new
|
||||
for listing in file_listings:
|
||||
parent_container = cast(Tag, listing.find_parent(
|
||||
"div", attrs={"class": lambda x: x is not None and "form-group" in x}
|
||||
))
|
||||
label_container = cast(Tag, parent_container.find(
|
||||
attrs={"class": lambda x: x is not None and "control-label" in x}
|
||||
))
|
||||
file_name = label_container.get_text().strip()
|
||||
url = self._abs_url_from_link(listing)
|
||||
log.explain(f"Found exercise detail {file_name!r} at {url}")
|
||||
results.append(IliasPageElement.create_new(
|
||||
IliasElementType.EXERCISE_FILES,
|
||||
url,
|
||||
_sanitize_path_name(container_name) + "/" + _sanitize_path_name(file_name),
|
||||
None, # we do not have any timestamp
|
||||
skip_sanitize=True
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
@ -1110,7 +1127,7 @@ class IliasPage:
|
||||
|
||||
videos.append(IliasPageElement.create_new(
|
||||
typ=IliasElementType.MOB_VIDEO,
|
||||
url=url,
|
||||
url=self._abs_url_from_relative(url),
|
||||
name=_sanitize_path_name(title),
|
||||
mtime=None
|
||||
))
|
||||
@ -1136,9 +1153,6 @@ class IliasPage:
|
||||
else:
|
||||
title = f"unknown video {figure}"
|
||||
|
||||
if url:
|
||||
url = self._abs_url_from_relative(url)
|
||||
|
||||
return url, title
|
||||
|
||||
def _is_in_expanded_meeting(self, tag: Tag) -> bool:
|
||||
|
@ -1,2 +1,2 @@
|
||||
NAME = "PFERD"
|
||||
VERSION = "3.8.2"
|
||||
VERSION = "3.8.0"
|
||||
|
Reference in New Issue
Block a user