transition from requests to httpx

This commit is contained in:
be7a
2021-04-23 18:02:57 +02:00
parent c1ab7485e2
commit 44aeb6c2eb
14 changed files with 80 additions and 89 deletions

View File

@@ -7,7 +7,7 @@ from pathlib import Path, PurePath
from typing import Optional, Tuple, Union
import bs4
import requests
import httpx
from .progress import ProgressSettings, progress_for, size_from_headers
@@ -35,41 +35,38 @@ def to_pattern(regex: Regex) -> re.Pattern:
return re.compile(regex)
def soupify(response: requests.Response) -> bs4.BeautifulSoup:
def soupify(response: httpx.Response) -> bs4.BeautifulSoup:
"""
Wrap a requests response in a bs4 object.
Wrap a httpx response in a bs4 object.
"""
return bs4.BeautifulSoup(response.text, "html.parser")
def stream_to_path(
response: requests.Response,
response: httpx.Response,
target: Path,
progress_name: Optional[str] = None,
chunk_size: int = 1024 ** 2
) -> None:
"""
Download a requests response content to a file by streaming it. This
function avoids excessive memory usage when downloading large files. The
chunk_size is in bytes.
Download a httpx response content to a file by streaming it. This
function avoids excessive memory usage when downloading large files.
If progress_name is None, no progress bar will be shown. Otherwise a progress
bar will appear, if the download is bigger than an internal threshold.
"""
with response:
length = size_from_headers(response)
if progress_name and length and int(length) > 1024 * 1024 * 10: # 10 MiB
settings: Optional[ProgressSettings] = ProgressSettings(progress_name, length)
else:
settings = None
length = size_from_headers(response)
if progress_name and length and int(length) > 1024 * 1024 * 10: # 10 MiB
settings: Optional[ProgressSettings] = ProgressSettings(progress_name, length)
else:
settings = None
with open(target, 'wb') as file_descriptor:
with progress_for(settings) as progress:
for chunk in response.iter_content(chunk_size=chunk_size):
file_descriptor.write(chunk)
progress.advance(len(chunk))
with open(target, 'wb') as file_descriptor:
with progress_for(settings) as progress:
for chunk in response.iter_bytes():
file_descriptor.write(chunk)
progress.advance(len(chunk))
def prompt_yes_no(question: str, default: Optional[bool] = None) -> bool: