mirror of
https://github.com/Garmelon/PFERD.git
synced 2025-08-13 11:02:41 +02:00
Compare commits
2 Commits
fix/exerci
...
debug/dump
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f0e56a8e12 | ||
![]() |
8caad0008d |
@@ -45,8 +45,8 @@ def load(
|
|||||||
load_crawler(args, section)
|
load_crawler(args, section)
|
||||||
|
|
||||||
section["type"] = COMMAND_NAME
|
section["type"] = COMMAND_NAME
|
||||||
if args.ilias_url is not None:
|
if args.base_url is not None:
|
||||||
section["base_url"] = args.ilias_url
|
section["base_url"] = args.base_url
|
||||||
if args.client_id is not None:
|
if args.client_id is not None:
|
||||||
section["client_id"] = args.client_id
|
section["client_id"] = args.client_id
|
||||||
|
|
||||||
|
@@ -983,6 +983,8 @@ instance's greatest bottleneck.
|
|||||||
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
soup = IliasSoup(soupify(await request.read()), str(request.url))
|
||||||
if IliasPage.is_logged_in(soup):
|
if IliasPage.is_logged_in(soup):
|
||||||
return self._verify_page(soup, url, root_page_allowed)
|
return self._verify_page(soup, url, root_page_allowed)
|
||||||
|
with open("/tmp/ilias_debug.html", "w") as f:
|
||||||
|
f.write(str(soup.soup.prettify()))
|
||||||
raise CrawlError(f"get_page failed even after authenticating on {url!r}")
|
raise CrawlError(f"get_page failed even after authenticating on {url!r}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
Reference in New Issue
Block a user