Make progress bars easier to use

The crawler now supports two types of progress bars
This commit is contained in:
Joscha 2021-04-29 13:53:16 +02:00
parent 3ea86d18a0
commit ac3bfd7388
2 changed files with 26 additions and 10 deletions

View File

@ -2,7 +2,8 @@ import configparser
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from pathlib import Path from pathlib import Path
from typing import AsyncIterator, Optional # TODO In Python 3.9 and above, AsyncContextManager is deprecated
from typing import AsyncContextManager, AsyncIterator, Optional
from rich.markup import escape from rich.markup import escape
@ -40,14 +41,27 @@ class Crawler(ABC):
@asynccontextmanager @asynccontextmanager
async def progress_bar( async def progress_bar(
self, self,
path: Path, desc: str,
total: Optional[int] = None, total: Optional[int] = None,
) -> AsyncIterator[ProgressBar]: ) -> AsyncIterator[ProgressBar]:
desc = escape(str(path))
async with self._limiter.limit(): async with self._limiter.limit():
with self._conductor.progress_bar(desc, total=total) as bar: with self._conductor.progress_bar(desc, total=total) as bar:
yield bar yield bar
def crawl_bar(self, path: Path) -> AsyncContextManager[ProgressBar]:
path = escape(str(path))
desc = f"[bold magenta]Crawling[/bold magenta] {path}"
return self.progress_bar(desc)
def download_bar(
self,
path: Path,
size: int,
) -> AsyncContextManager[ProgressBar]:
path = escape(str(path))
desc = f"[bold green]Downloading[/bold green] {path}"
return self.progress_bar(desc, total=size)
async def run(self) -> None: async def run(self) -> None:
await self._conductor.start() await self._conductor.start()
try: try:

View File

@ -14,11 +14,13 @@ DUMMY_TREE = {
"Blatt_03.pdf": (), "Blatt_03.pdf": (),
"Blatt_04.pdf": (), "Blatt_04.pdf": (),
"Blatt_05.pdf": (), "Blatt_05.pdf": (),
"Blatt_01_Lösung.pdf": (), "Lösungen": {
"Blatt_02_Lösung.pdf": (), "Blatt_01_Lösung.pdf": (),
"Blatt_03_Lösung.pdf": (), "Blatt_02_Lösung.pdf": (),
"Blatt_04_Lösung.pdf": (), "Blatt_03_Lösung.pdf": (),
"Blatt_05_Lösung.pdf": (), "Blatt_04_Lösung.pdf": (),
"Blatt_05_Lösung.pdf": (),
},
}, },
"Vorlesungsfolien": { "Vorlesungsfolien": {
"VL_01.pdf": (), "VL_01.pdf": (),
@ -39,7 +41,7 @@ class DummyCrawler(Crawler):
async def _crawl_entry(self, path: Path, value: Any) -> None: async def _crawl_entry(self, path: Path, value: Any) -> None:
if value == (): if value == ():
n = random.randint(5, 20) n = random.randint(5, 20)
async with self.progress_bar(path, n) as bar: async with self.download_bar(path, n) as bar:
await asyncio.sleep(random.random() / 2) await asyncio.sleep(random.random() / 2)
for i in range(n): for i in range(n):
await asyncio.sleep(0.5) await asyncio.sleep(0.5)
@ -47,7 +49,7 @@ class DummyCrawler(Crawler):
self.print(f"[green]Downloaded {escape(str(path))}") self.print(f"[green]Downloaded {escape(str(path))}")
else: else:
t = random.random() * 2 + 1 t = random.random() * 2 + 1
async with self.progress_bar(path) as bar: async with self.crawl_bar(path) as bar:
await asyncio.sleep(t) await asyncio.sleep(t)
tasks = [self._crawl_entry(path / k, v) for k, v in value.items()] tasks = [self._crawl_entry(path / k, v) for k, v in value.items()]
await asyncio.gather(*tasks) await asyncio.gather(*tasks)