mirror of
https://github.com/Garmelon/PFERD.git
synced 2023-12-21 10:23:01 +01:00
Compare commits
1 Commits
db86d23989
...
debug/mtim
Author | SHA1 | Date | |
---|---|---|---|
03efa17cf1 |
@ -92,9 +92,6 @@ common to all crawlers:
|
|||||||
load for the crawl target. (Default: `0.0`)
|
load for the crawl target. (Default: `0.0`)
|
||||||
- `windows_paths`: Whether PFERD should find alternative names for paths that
|
- `windows_paths`: Whether PFERD should find alternative names for paths that
|
||||||
are invalid on Windows. (Default: `yes` on Windows, `no` otherwise)
|
are invalid on Windows. (Default: `yes` on Windows, `no` otherwise)
|
||||||
- `aliases`: List of strings that are considered as an alias when invoking with
|
|
||||||
the `--crawler` or `-C` flag. If there is more than one crawl section with
|
|
||||||
the same aliases all are selected. Thereby, you can group different crawlers.
|
|
||||||
|
|
||||||
Some crawlers may also require credentials for authentication. To configure how
|
Some crawlers may also require credentials for authentication. To configure how
|
||||||
the crawler obtains its credentials, the `auth` option is used. It is set to the
|
the crawler obtains its credentials, the `auth` option is used. It is set to the
|
||||||
@ -109,7 +106,6 @@ username = foo
|
|||||||
password = bar
|
password = bar
|
||||||
|
|
||||||
[crawl:something]
|
[crawl:something]
|
||||||
aliases = [sth, some]
|
|
||||||
type = some-complex-crawler
|
type = some-complex-crawler
|
||||||
auth = auth:example
|
auth = auth:example
|
||||||
on_conflict = no-delete
|
on_conflict = no-delete
|
||||||
|
@ -514,8 +514,11 @@ class IliasPage:
|
|||||||
f"td.std:nth-child({index})"
|
f"td.std:nth-child({index})"
|
||||||
).getText().strip()
|
).getText().strip()
|
||||||
if re.search(r"\d+\.\d+.\d+ - \d+:\d+", modification_string):
|
if re.search(r"\d+\.\d+.\d+ - \d+:\d+", modification_string):
|
||||||
|
log.explain(f"Converting {modification_string!r}")
|
||||||
modification_time = datetime.strptime(modification_string, "%d.%m.%Y - %H:%M")
|
modification_time = datetime.strptime(modification_string, "%d.%m.%Y - %H:%M")
|
||||||
break
|
break
|
||||||
|
else:
|
||||||
|
log.explain(f"Date has wrong format: {modification_string!r}")
|
||||||
|
|
||||||
if modification_time is None:
|
if modification_time is None:
|
||||||
log.warn(f"Could not determine upload time for {link}")
|
log.warn(f"Could not determine upload time for {link}")
|
||||||
|
@ -415,6 +415,7 @@ class OutputDirectory:
|
|||||||
|
|
||||||
def _update_metadata(self, info: DownloadInfo) -> None:
|
def _update_metadata(self, info: DownloadInfo) -> None:
|
||||||
if mtime := info.heuristics.mtime:
|
if mtime := info.heuristics.mtime:
|
||||||
|
log.explain(f"Setting mtime to {mtime}")
|
||||||
mtimestamp = mtime.timestamp()
|
mtimestamp = mtime.timestamp()
|
||||||
os.utime(info.local_path, times=(mtimestamp, mtimestamp))
|
os.utime(info.local_path, times=(mtimestamp, mtimestamp))
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Optional, Set
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
from rich.markup import escape
|
from rich.markup import escape
|
||||||
|
|
||||||
@ -43,24 +43,16 @@ class Pferd:
|
|||||||
|
|
||||||
crawl_sections = [name for name, _ in config.crawl_sections()]
|
crawl_sections = [name for name, _ in config.crawl_sections()]
|
||||||
|
|
||||||
crawlers_to_run = set() # With crawl: prefix
|
crawlers_to_run = [] # With crawl: prefix
|
||||||
unknown_names = [] # Without crawl: prefix
|
unknown_names = [] # Without crawl: prefix
|
||||||
|
|
||||||
for name in cli_crawlers:
|
for name in cli_crawlers:
|
||||||
section_name = f"crawl:{name}"
|
section_name = f"crawl:{name}"
|
||||||
if section_name in crawl_sections:
|
if section_name in crawl_sections:
|
||||||
log.explain(f"Crawler section named {section_name!r} exists")
|
log.explain(f"Crawler section named {section_name!r} exists")
|
||||||
crawlers_to_run.add(section_name)
|
crawlers_to_run.append(section_name)
|
||||||
# interprete name as alias of a crawler
|
else:
|
||||||
alias_names = self._find_crawlers_by_alias(name, config)
|
log.explain(f"There's no crawler section named {section_name!r}")
|
||||||
if alias_names:
|
|
||||||
crawlers_to_run.update(alias_names)
|
|
||||||
log.explain_topic(f"Crawler alias {name!r} found corresponding crawler sections:")
|
|
||||||
for alias_name in alias_names:
|
|
||||||
log.explain(f"Crawler section named {alias_name!r} with alias {name!r} exists")
|
|
||||||
|
|
||||||
if not section_name in crawl_sections and not alias_names:
|
|
||||||
log.explain(f"There's neither a crawler section named {section_name!r} nor does a crawler with alias {name!r} exist.")
|
|
||||||
unknown_names.append(name)
|
unknown_names.append(name)
|
||||||
|
|
||||||
if unknown_names:
|
if unknown_names:
|
||||||
@ -73,14 +65,6 @@ class Pferd:
|
|||||||
|
|
||||||
return crawlers_to_run
|
return crawlers_to_run
|
||||||
|
|
||||||
def _find_crawlers_by_alias(self, alias: str, config: Config) -> Set[str]:
|
|
||||||
alias_names = set()
|
|
||||||
for (section_name, section) in config.crawl_sections():
|
|
||||||
section_aliases = section.get("aliases", [])
|
|
||||||
if alias in section_aliases:
|
|
||||||
alias_names.add(section_name)
|
|
||||||
return alias_names
|
|
||||||
|
|
||||||
def _find_crawlers_to_run(
|
def _find_crawlers_to_run(
|
||||||
self,
|
self,
|
||||||
config: Config,
|
config: Config,
|
||||||
|
Reference in New Issue
Block a user