mirror of
https://github.com/Garmelon/PFERD.git
synced 2023-12-21 10:23:01 +01:00
Fix authenticator and crawler names
Now, the "auth:" and "crawl:" parts are considered part of the name. This fixes crawlers not being able to find their authenticators.
This commit is contained in:
parent
a6fdf05ee9
commit
595de88d96
@ -139,19 +139,17 @@ class Config:
|
||||
|
||||
def crawler_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for section_name, section_proxy in self._parser.items():
|
||||
if section_name.startswith("crawler:"):
|
||||
crawler_name = section_name[8:]
|
||||
result.append((crawler_name, section_proxy))
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("crawler:"):
|
||||
result.append((name, proxy))
|
||||
|
||||
return result
|
||||
|
||||
def authenticator_sections(self) -> List[Tuple[str, SectionProxy]]:
|
||||
result = []
|
||||
for section_name, section_proxy in self._parser.items():
|
||||
if section_name.startswith("auth:"):
|
||||
crawler_name = section_name[5:]
|
||||
result.append((crawler_name, section_proxy))
|
||||
for name, proxy in self._parser.items():
|
||||
if name.startswith("auth:"):
|
||||
result.append((name, proxy))
|
||||
|
||||
return result
|
||||
|
||||
|
@ -180,7 +180,7 @@ class CrawlerSection(Section):
|
||||
value = self.s.get("auth")
|
||||
if value is None:
|
||||
self.missing_value("auth")
|
||||
auth = authenticators.get(f"auth:{value}")
|
||||
auth = authenticators.get(value)
|
||||
if auth is None:
|
||||
self.invalid_value("auth", value, "No such auth section exists")
|
||||
return auth
|
||||
|
@ -25,7 +25,7 @@ class Pferd:
|
||||
def _load_authenticators(self) -> None:
|
||||
abort = False
|
||||
for name, section in self._config.authenticator_sections():
|
||||
print(f"[bold bright_cyan]Loading[/] auth:{escape(name)}")
|
||||
print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||
authenticator_type = section.get("type")
|
||||
authenticator_constructor = AUTHENTICATORS.get(authenticator_type)
|
||||
if authenticator_constructor is None:
|
||||
@ -48,7 +48,7 @@ class Pferd:
|
||||
def _load_crawlers(self) -> None:
|
||||
abort = False
|
||||
for name, section in self._config.crawler_sections():
|
||||
print(f"[bold bright_cyan]Loading[/] crawler:{escape(name)}")
|
||||
print(f"[bold bright_cyan]Loading[/] {escape(name)}")
|
||||
crawler_type = section.get("type")
|
||||
crawler_constructor = CRAWLERS.get(crawler_type)
|
||||
if crawler_constructor is None:
|
||||
@ -79,6 +79,6 @@ class Pferd:
|
||||
|
||||
for name, crawler in self._crawlers.items():
|
||||
print()
|
||||
print(f"[bold bright_cyan]Running[/] crawler:{escape(name)}")
|
||||
print(f"[bold bright_cyan]Running[/] {escape(name)}")
|
||||
|
||||
await crawler.run()
|
||||
|
Loading…
Reference in New Issue
Block a user