Skip to content

Grabber

Low-level HTTP client with nhentai-compatible headers. Used internally by NHentai, but available if you need raw HTML or image bytes.

nhentai.grabber.Grabber(url: str, timeout: int = 30, headers: dict | None = None)

Fetches raw HTML or image bytes from a URL with nhentai-compatible headers.

Parameters:

Name Type Description Default
url str

Target URL.

required
timeout int

Request timeout in seconds.

30
headers dict | None

Additional headers merged with defaults.

None
Source code in nhentai/grabber.py
def __init__(self, url: str, timeout: int = 30, headers: dict | None = None):
    """
    :param url: Target URL.
    :param timeout: Request timeout in seconds.
    :param headers: Additional headers merged with defaults.
    """
    self.url = url
    self.timeout = timeout
    self.headers = {**_DEFAULT_HEADERS, **(headers or {})}

get_html() -> str

Fetch and return the HTML of :attr:url.

Raises:

Type Description
ConnectionError

On HTTP error, timeout, or network failure.

Source code in nhentai/grabber.py
def get_html(self) -> str:
    """Fetch and return the HTML of :attr:`url`.

    :raises ConnectionError: On HTTP error, timeout, or network failure.
    """
    try:
        resp = requests.get(
            self.url, headers=self.headers, verify=False,
            timeout=self.timeout, allow_redirects=True,
        )
        resp.raise_for_status()
        return resp.text
    except requests.exceptions.HTTPError as exc:
        raise ConnectionError(f"HTTP {exc.response.status_code} fetching {self.url}") from exc
    except requests.exceptions.Timeout as exc:
        raise ConnectionError(f"Request timed out after {self.timeout}s: {self.url}") from exc
    except requests.exceptions.ConnectionError as exc:
        raise ConnectionError(f"Network error fetching {self.url}: {exc}") from exc

download_bytes(url: str) -> bytes

Download and return raw bytes from url.

Raises:

Type Description
ConnectionError

On HTTP error, timeout, or network failure.

Source code in nhentai/grabber.py
def download_bytes(self, url: str) -> bytes:
    """Download and return raw bytes from *url*.

    :raises ConnectionError: On HTTP error, timeout, or network failure.
    """
    try:
        resp = requests.get(
            url, headers=self.headers, verify=False,
            timeout=self.timeout, stream=True,
        )
        resp.raise_for_status()
        return resp.content
    except requests.exceptions.HTTPError as exc:
        raise ConnectionError(f"HTTP {exc.response.status_code} downloading {url}") from exc
    except requests.exceptions.Timeout as exc:
        raise ConnectionError(f"Download timed out: {url}") from exc
    except requests.exceptions.ConnectionError as exc:
        raise ConnectionError(f"Network error downloading {url}: {exc}") from exc