"""Tests for feed resolution and XDG cache handling."""

from __future__ import annotations

import json
import urllib.error
from pathlib import Path

import pytest

from sb_scout.fetch import default_cache_dir, fetch_feed, resolve_feed_url


class FakeHeaders(dict):
    def get(self, key, default=None):
        return super().get(key, default)


class FakeResponse:
    def __init__(self, payload, headers=None):
        self._payload = payload
        self.headers = FakeHeaders(headers or {})
        self.status = 200

    def read(self):
        return json.dumps(self._payload).encode()

    def __enter__(self):
        return self

    def __exit__(self, exc_type, exc, tb):
        return False


class TestResolveFeedUrl:
    def test_default_currency_is_usd(self):
        url, currency = resolve_feed_url()
        assert currency == "USD"
        assert url.endswith("live_data_sb_USD.json")

    def test_eur_currency(self):
        url, currency = resolve_feed_url(currency="EUR")
        assert currency == "EUR"
        assert url.endswith("live_data_sb_EUR.json")

    def test_explicit_url_wins(self):
        url, currency = resolve_feed_url(currency="EUR", url="https://example.com/feed.json")
        assert url == "https://example.com/feed.json"
        assert currency == "EUR"


class TestDefaultCacheDir:
    def test_xdg_cache_location(self, monkeypatch, tmp_path):
        monkeypatch.setenv("XDG_CACHE_HOME", str(tmp_path / "cache"))
        assert default_cache_dir() == tmp_path / "cache" / "sb-scout" / "feeds"


class TestFetchFeed:
    def test_uses_cached_payload_on_304(self, monkeypatch, tmp_path):
        url = "https://example.com/feed.json"
        payload = {"server": [{"id": 1}]}
        calls = []

        def fake_urlopen(request, context=None, timeout=None):
            calls.append({
                "headers": dict(request.headers),
                "timeout": timeout,
                "url": request.full_url,
            })
            if len(calls) == 1:
                return FakeResponse(payload, headers={"ETag": '"abc"', "Last-Modified": "Mon, 01 Jan 2024 00:00:00 GMT"})
            raise urllib.error.HTTPError(url, 304, "Not Modified", hdrs=None, fp=None)

        monkeypatch.setattr("urllib.request.urlopen", fake_urlopen)

        first = fetch_feed(url, cache_dir=tmp_path)
        second = fetch_feed(url, cache_dir=tmp_path)

        assert first == payload
        assert second == payload
        headers = {k.lower(): v for k, v in calls[1]["headers"].items()}
        assert headers["if-none-match"] == '"abc"'
        assert headers["if-modified-since"] == "Mon, 01 Jan 2024 00:00:00 GMT"

    def test_falls_back_to_cache_on_network_error(self, monkeypatch, tmp_path):
        url = "https://example.com/feed.json"
        payload = {"server": [{"id": 1}]}
        cache_dir = tmp_path

        def first_urlopen(request, context=None, timeout=None):
            return FakeResponse(payload, headers={"ETag": '"abc"'})

        def second_urlopen(request, context=None, timeout=None):
            raise urllib.error.URLError("offline")

        monkeypatch.setattr("urllib.request.urlopen", first_urlopen)
        assert fetch_feed(url, cache_dir=cache_dir) == payload

        monkeypatch.setattr("urllib.request.urlopen", second_urlopen)
        assert fetch_feed(url, cache_dir=cache_dir) == payload
