mirror of
https://github.com/Garmelon/PFERD.git
synced 2023-12-21 10:23:01 +01:00
Make progress bars easier to use
The crawler now supports two types of progress bars
This commit is contained in:
parent
3ea86d18a0
commit
ac3bfd7388
@ -2,7 +2,8 @@ import configparser
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import AsyncIterator, Optional
|
||||
# TODO In Python 3.9 and above, AsyncContextManager is deprecated
|
||||
from typing import AsyncContextManager, AsyncIterator, Optional
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
@ -40,14 +41,27 @@ class Crawler(ABC):
|
||||
@asynccontextmanager
|
||||
async def progress_bar(
|
||||
self,
|
||||
path: Path,
|
||||
desc: str,
|
||||
total: Optional[int] = None,
|
||||
) -> AsyncIterator[ProgressBar]:
|
||||
desc = escape(str(path))
|
||||
async with self._limiter.limit():
|
||||
with self._conductor.progress_bar(desc, total=total) as bar:
|
||||
yield bar
|
||||
|
||||
def crawl_bar(self, path: Path) -> AsyncContextManager[ProgressBar]:
|
||||
path = escape(str(path))
|
||||
desc = f"[bold magenta]Crawling[/bold magenta] {path}"
|
||||
return self.progress_bar(desc)
|
||||
|
||||
def download_bar(
|
||||
self,
|
||||
path: Path,
|
||||
size: int,
|
||||
) -> AsyncContextManager[ProgressBar]:
|
||||
path = escape(str(path))
|
||||
desc = f"[bold green]Downloading[/bold green] {path}"
|
||||
return self.progress_bar(desc, total=size)
|
||||
|
||||
async def run(self) -> None:
|
||||
await self._conductor.start()
|
||||
try:
|
||||
|
@ -14,11 +14,13 @@ DUMMY_TREE = {
|
||||
"Blatt_03.pdf": (),
|
||||
"Blatt_04.pdf": (),
|
||||
"Blatt_05.pdf": (),
|
||||
"Blatt_01_Lösung.pdf": (),
|
||||
"Blatt_02_Lösung.pdf": (),
|
||||
"Blatt_03_Lösung.pdf": (),
|
||||
"Blatt_04_Lösung.pdf": (),
|
||||
"Blatt_05_Lösung.pdf": (),
|
||||
"Lösungen": {
|
||||
"Blatt_01_Lösung.pdf": (),
|
||||
"Blatt_02_Lösung.pdf": (),
|
||||
"Blatt_03_Lösung.pdf": (),
|
||||
"Blatt_04_Lösung.pdf": (),
|
||||
"Blatt_05_Lösung.pdf": (),
|
||||
},
|
||||
},
|
||||
"Vorlesungsfolien": {
|
||||
"VL_01.pdf": (),
|
||||
@ -39,7 +41,7 @@ class DummyCrawler(Crawler):
|
||||
async def _crawl_entry(self, path: Path, value: Any) -> None:
|
||||
if value == ():
|
||||
n = random.randint(5, 20)
|
||||
async with self.progress_bar(path, n) as bar:
|
||||
async with self.download_bar(path, n) as bar:
|
||||
await asyncio.sleep(random.random() / 2)
|
||||
for i in range(n):
|
||||
await asyncio.sleep(0.5)
|
||||
@ -47,7 +49,7 @@ class DummyCrawler(Crawler):
|
||||
self.print(f"[green]Downloaded {escape(str(path))}")
|
||||
else:
|
||||
t = random.random() * 2 + 1
|
||||
async with self.progress_bar(path) as bar:
|
||||
async with self.crawl_bar(path) as bar:
|
||||
await asyncio.sleep(t)
|
||||
tasks = [self._crawl_entry(path / k, v) for k, v in value.items()]
|
||||
await asyncio.gather(*tasks)
|
||||
|
Loading…
Reference in New Issue
Block a user