Fix a few lint errors and pferd quirks in sync_url

This commit is contained in:
I-Al-Istannen 2020-09-28 19:42:59 +02:00
parent aaa6a2b6a4
commit 74ea039458

View File

@ -1,14 +1,19 @@
#!/usr/bin/env python #!/usr/bin/env python
"""
A simple script to download a course by name from ILIAS.
"""
import argparse import argparse
from pathlib import Path from pathlib import Path
from urllib.parse import urlparse, parse_qs from urllib.parse import parse_qs, urlparse
from PFERD import Pferd from PFERD import Pferd
from PFERD.cookie_jar import CookieJar from PFERD.cookie_jar import CookieJar
from PFERD.utils import to_path
from PFERD.ilias.authenticators import KitShibbolethAuthenticator from PFERD.ilias.authenticators import KitShibbolethAuthenticator
from PFERD.ilias.crawler import IliasCrawler from PFERD.ilias.crawler import IliasCrawler
from PFERD.utils import to_path
def main() -> None: def main() -> None:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -18,29 +23,36 @@ def main() -> None:
parser.add_argument('folder', nargs='?', default=None, help="Folder to put stuff into") parser.add_argument('folder', nargs='?', default=None, help="Folder to put stuff into")
args = parser.parse_args() args = parser.parse_args()
pferd = Pferd(Path(__file__).parent, test_run=args.test_run)
pferd.enable_logging()
# parse provided course URL # parse provided course URL
url = urlparse(args.url) url = urlparse(args.url)
query = parse_qs(url.query) query = parse_qs(url.query)
id = int(query['ref_id'][0]) course_id = query['ref_id'][0]
if args.folder is None: if args.folder is not None:
folder = args.folder
# Initialize pferd at the *parent of the passed folder*
# This is needed so Pferd's internal protections against escaping the working directory
# do not trigger (e.g. if somebody names a file in ILIAS '../../bad thing.txt')
pferd = Pferd(Path(Path(__file__).parent, folder).parent, test_run=args.test_run)
else:
# fetch course name from ilias # fetch course name from ilias
cookie_jar = CookieJar(to_path(args.cookies) if args.cookies else None) cookie_jar = CookieJar(to_path(args.cookies) if args.cookies else None)
session = cookie_jar.create_session() session = cookie_jar.create_session()
authenticator = KitShibbolethAuthenticator() authenticator = KitShibbolethAuthenticator()
crawler = IliasCrawler(url.scheme + '://' + url.netloc, session, authenticator, lambda x, y: True) crawler = IliasCrawler(url.scheme + '://' + url.netloc, session,
authenticator, lambda x, y: True)
cookie_jar.load_cookies() cookie_jar.load_cookies()
folder = crawler.find_element_name(args.url) folder = crawler.find_element_name(args.url)
cookie_jar.save_cookies() cookie_jar.save_cookies()
else:
folder = args.folder
# Initialize pferd at the location of the script
pferd = Pferd(Path(__file__).parent, test_run=args.test_run)
pferd.enable_logging()
# fetch # fetch
pferd.ilias_kit(target=folder, course_id=str(id), cookies=args.cookies) pferd.ilias_kit(target=folder, course_id=course_id, cookies=args.cookies)
if __name__ == "__main__": if __name__ == "__main__":
main() main()