"""HTTP feed fetching with XDG cache and conditional requests."""

from __future__ import annotations

import hashlib
import json
import os
import ssl
import urllib.error
import urllib.request
from pathlib import Path
from typing import Any

from sb_scout.models import DEFAULT_FEED_CURRENCY, FEED_URLS


def default_cache_dir() -> Path:
    """Return the default XDG cache directory for sb-scout feeds."""
    xdg_cache = os.environ.get("XDG_CACHE_HOME") or str(Path.home() / ".cache")
    return Path(xdg_cache) / "sb-scout" / "feeds"


def resolve_feed_url(currency: str | None = None, url: str | None = None) -> tuple[str, str]:
    """Resolve the feed URL and canonical currency label.

    If an explicit URL is provided, it wins. Otherwise a supported currency
    name (USD/EUR) resolves to Hetzner's matching auction feed.
    """
    if url:
        guessed_currency = (currency or _currency_from_url(url) or DEFAULT_FEED_CURRENCY).upper()
        return url, guessed_currency

    resolved_currency = (currency or DEFAULT_FEED_CURRENCY).upper()
    if resolved_currency not in FEED_URLS:
        available = ", ".join(sorted(FEED_URLS))
        raise SystemExit(f"Unsupported currency: {resolved_currency} (choose from: {available})")
    return FEED_URLS[resolved_currency], resolved_currency


def _currency_from_url(url: str) -> str | None:
    upper = url.upper()
    for currency in FEED_URLS:
        if f"_{currency}.JSON" in upper:
            return currency
    return None


def _cache_paths(url: str, cache_dir: Path) -> tuple[Path, Path]:
    digest = hashlib.sha256(url.encode("utf-8")).hexdigest()
    return cache_dir / f"{digest}.json", cache_dir / f"{digest}.meta.json"


def _load_cached(data_path: Path, meta_path: Path) -> tuple[dict[str, Any], dict[str, Any]] | None:
    if not data_path.is_file() or not meta_path.is_file():
        return None
    try:
        payload = json.loads(data_path.read_text())
        meta = json.loads(meta_path.read_text())
    except (OSError, json.JSONDecodeError):
        return None
    if not isinstance(payload, dict):
        return None
    return payload, meta if isinstance(meta, dict) else {}


def _write_cache(
    data_path: Path,
    meta_path: Path,
    *,
    url: str,
    payload: dict[str, Any],
    headers: dict[str, str],
) -> None:
    data_path.parent.mkdir(parents=True, exist_ok=True)
    data_path.write_text(json.dumps(payload))
    meta = {
        "url": url,
        "etag": headers.get("ETag"),
        "last_modified": headers.get("Last-Modified"),
    }
    meta_path.write_text(json.dumps(meta))


def _validate_payload(url: str, payload: Any) -> dict[str, Any]:
    if not isinstance(payload, dict) or not isinstance(payload.get("server"), list):
        raise SystemExit(f"Unexpected feed shape from {url}")
    return payload


def fetch_feed(url: str, *, cache_dir: str | Path | None = None, timeout: int = 30) -> dict[str, Any]:
    """Fetch the Hetzner auction JSON feed.

    Conditional requests are used when cache metadata is available. If the
    network request fails but a cached copy exists, the cached copy is used as
    a stale fallback.
    """
    cache_root = Path(cache_dir) if cache_dir is not None else default_cache_dir()
    data_path, meta_path = _cache_paths(url, cache_root)
    cached = _load_cached(data_path, meta_path)

    headers: dict[str, str] = {}
    if cached:
        _payload, meta = cached
        etag = meta.get("etag")
        last_modified = meta.get("last_modified")
        if isinstance(etag, str) and etag:
            headers["If-None-Match"] = etag
        if isinstance(last_modified, str) and last_modified:
            headers["If-Modified-Since"] = last_modified

    request = urllib.request.Request(url, headers=headers)
    context = ssl.create_default_context()

    try:
        with urllib.request.urlopen(request, context=context, timeout=timeout) as response:
            payload = _validate_payload(url, json.loads(response.read().decode()))
            response_headers = {
                "ETag": response.headers.get("ETag", ""),
                "Last-Modified": response.headers.get("Last-Modified", ""),
            }
            _write_cache(data_path, meta_path, url=url, payload=payload, headers=response_headers)
            return payload
    except urllib.error.HTTPError as exc:
        if exc.code == 304 and cached:
            return cached[0]
        if cached:
            return cached[0]
        raise
    except urllib.error.URLError:
        if cached:
            return cached[0]
        raise
