Rename local crawler path to target

This commit is contained in:
Joscha 2021-05-15 17:12:25 +02:00
parent b2a2b5999b
commit 868f486922
2 changed files with 6 additions and 6 deletions

View File

@ -113,7 +113,7 @@ This crawler crawls a local directory. It is really simple and mostly useful for
testing different setups. The various delay options are meant to make the testing different setups. The various delay options are meant to make the
crawler simulate a slower, network-based crawler. crawler simulate a slower, network-based crawler.
- `path`: Path to the local directory to crawl. (Required) - `target`: Path to the local directory to crawl. (Required)
- `crawl_delay`: Maximum artificial delay (in seconds) to simulate for crawl - `crawl_delay`: Maximum artificial delay (in seconds) to simulate for crawl
requests. (Default: 0.0) requests. (Default: 0.0)
- `download_delay`: Maximum artificial delay (in seconds) to simulate for - `download_delay`: Maximum artificial delay (in seconds) to simulate for

View File

@ -10,10 +10,10 @@ from ..crawler import Crawler, CrawlerSection, anoncritical
class LocalCrawlerSection(CrawlerSection): class LocalCrawlerSection(CrawlerSection):
def path(self) -> Path: def target(self) -> Path:
value = self.s.get("path") value = self.s.get("target")
if value is None: if value is None:
self.missing_value("path") self.missing_value("target")
return Path(value).expanduser() return Path(value).expanduser()
def crawl_delay(self) -> float: def crawl_delay(self) -> float:
@ -48,7 +48,7 @@ class LocalCrawler(Crawler):
): ):
super().__init__(name, section, config, conductor) super().__init__(name, section, config, conductor)
self._path = config.working_dir / section.path() self._target = config.working_dir / section.target()
self._crawl_delay = section.crawl_delay() self._crawl_delay = section.crawl_delay()
self._download_delay = section.download_delay() self._download_delay = section.download_delay()
self._download_speed = section.download_speed() self._download_speed = section.download_speed()
@ -59,7 +59,7 @@ class LocalCrawler(Crawler):
self._block_size = 1024**2 # 1 MiB self._block_size = 1024**2 # 1 MiB
async def crawl(self) -> None: async def crawl(self) -> None:
await self._crawl_path(self._path, PurePath()) await self._crawl_path(self._target, PurePath())
if self.error_free: if self.error_free:
await self.cleanup() await self.cleanup()