Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
## [1.9.3] - 2026-02-14
### Fixed
- Aligned config-flow API validation with runtime parsing by requiring `dailyInfo`
to be a non-empty list of objects during setup validation.
- Prevented test cross-contamination in setup tests by using scoped monkeypatching
for coordinator/client stubs instead of persistent module reassignment.
- Prevented disabled per-day sensors from being re-created during sensor setup by
skipping `*_d1`/`*_d2` keys when effective forecast options disable them.
- Hardened coordinator parsing for malformed `dailyInfo` payloads by treating
non-list/non-dict structures as invalid and preserving the last successful
dataset when available.
- Normalized stored forecast sensor mode values during integration setup so
legacy or whitespace-padded values no longer degrade silently to `none`.
- Ensured deterministic current-day plant sensor creation by sorting plant codes.
- Reject whitespace-only API keys at setup (defensive validation) and raise `ConfigEntryAuthFailed` with a clearer "Invalid API key" message.
- Mask API key input fields in config flow (password selector).
Expand All @@ -8,6 +19,13 @@
- Improved test isolation by avoiding unconditional replacement of the global `aiohttp` module stub.

### Changed
- Switched sensor setup iteration to use a validated local data snapshot for
clearer and more consistent entity creation flow.
- Preserved legacy 4-decimal coordinate unique-id formatting to keep existing
duplicate-location detection behavior stable across upgrades.
- Expanded regression coverage for disabled per-day sensor creation, malformed
`dailyInfo` handling, setup mode normalization, and legacy duplicate
detection behavior for coordinate-based unique IDs.
- Simplified plant parsing by removing redundant code checks (non-empty by construction).
- Deduplicated defensive integer parsing into a shared utility and aligned diagnostics
with runtime/config-flow rules to reject non-finite or decimal values consistently.
Expand Down
9 changes: 6 additions & 3 deletions custom_components/pollenlevels/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,12 +207,15 @@ async def async_setup_entry(
CONF_CREATE_FORECAST_SENSORS,
entry.data.get(CONF_CREATE_FORECAST_SENSORS, ForecastSensorMode.NONE),
)
normalized_mode = normalize_sensor_mode(raw_mode, _LOGGER)
try:
mode = ForecastSensorMode(raw_mode)
mode = ForecastSensorMode(normalized_mode)
except (ValueError, TypeError):
mode = ForecastSensorMode.NONE
create_d1 = mode in (ForecastSensorMode.D1, ForecastSensorMode.D1_D2)
create_d2 = mode == ForecastSensorMode.D1_D2
create_d1 = (
mode in (ForecastSensorMode.D1, ForecastSensorMode.D1_D2) and forecast_days >= 2
)
create_d2 = mode == ForecastSensorMode.D1_D2 and forecast_days >= 3

api_key = entry.data.get(CONF_API_KEY)
if not isinstance(api_key, str) or not api_key.strip():
Expand Down
16 changes: 14 additions & 2 deletions custom_components/pollenlevels/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,8 @@ async def _async_validate_input(
normalized[CONF_LONGITUDE] = lon

if check_unique_id:
# Keep unique_id formatting aligned with legacy entries for
# duplicate detection compatibility across upgrades.
uid = f"{lat:.4f}_{lon:.4f}"
try:
await self.async_set_unique_id(uid, raise_on_progress=False)
Expand Down Expand Up @@ -445,8 +447,18 @@ async def _async_validate_input(
data = json.loads(body_str) if body_str else {}
except Exception:
data = {}
if not data.get("dailyInfo"):
_LOGGER.warning("Validation: 'dailyInfo' missing")

daily_info = (
data.get("dailyInfo") if isinstance(data, dict) else None
)
daily_is_valid = isinstance(daily_info, list) and bool(daily_info)
if daily_is_valid:
daily_is_valid = all(
isinstance(item, dict) for item in daily_info
)

if not daily_is_valid:
_LOGGER.warning("Validation: 'dailyInfo' missing or invalid")
errors["base"] = "cannot_connect"
placeholders["error_message"] = (
"API response missing expected pollen forecast information."
Expand Down
13 changes: 10 additions & 3 deletions custom_components/pollenlevels/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,16 +238,23 @@ async def _async_update_data(self):
if region := payload.get("regionCode"):
new_data["region"] = {"source": "meta", "value": region}

daily: list[dict] = payload.get("dailyInfo") or []
daily_raw = payload.get("dailyInfo")
daily = daily_raw if isinstance(daily_raw, list) else None
# Keep day offsets stable: if any element is invalid, treat the payload as
# malformed instead of compacting/reindexing list positions.
if daily is not None and any(not isinstance(item, dict) for item in daily):
daily = None

if not daily:
if self.data:
if not self._missing_dailyinfo_warned:
_LOGGER.warning(
"API response missing dailyInfo; keeping last successful data"
"API response missing or invalid dailyInfo; "
"keeping last successful data"
)
self._missing_dailyinfo_warned = True
return self.data
raise UpdateFailed("API response missing dailyInfo")
raise UpdateFailed("API response missing or invalid dailyInfo")
self._missing_dailyinfo_warned = False

# date (today)
Expand Down
6 changes: 5 additions & 1 deletion custom_components/pollenlevels/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,9 +189,13 @@ async def async_setup_entry(
)

sensors: list[CoordinatorEntity] = []
for code in coordinator.data:
for code in data:
if code in ("region", "date"):
continue
if code.endswith("_d1") and not allow_d1:
continue
if code.endswith("_d2") and not allow_d2:
continue
sensors.append(PollenSensor(coordinator, code))

sensors.extend(
Expand Down
92 changes: 91 additions & 1 deletion tests/test_config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -1037,6 +1037,46 @@ def test_validate_input_redacts_api_key_in_error_message(
assert "***" in error_message


def test_validate_input_http_200_non_list_dailyinfo_sets_cannot_connect(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""A non-list dailyInfo in HTTP 200 should be treated as invalid."""

body = b'{"dailyInfo": "invalid"}'
session = _patch_client_session(monkeypatch, _StubResponse(status=200, body=body))

flow = PollenLevelsConfigFlow()
flow.hass = SimpleNamespace()

errors, normalized = asyncio.run(
flow._async_validate_input(_base_user_input(), check_unique_id=False)
)

assert session.calls
assert errors == {"base": "cannot_connect"}
assert normalized is None


def test_validate_input_http_200_dailyinfo_with_non_dict_sets_cannot_connect(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""A dailyInfo list with non-dict items should be treated as invalid."""

body = b'{"dailyInfo": ["invalid-item"]}'
session = _patch_client_session(monkeypatch, _StubResponse(status=200, body=body))

flow = PollenLevelsConfigFlow()
flow.hass = SimpleNamespace()

errors, normalized = asyncio.run(
flow._async_validate_input(_base_user_input(), check_unique_id=False)
)

assert session.calls
assert errors == {"base": "cannot_connect"}
assert normalized is None


def test_validate_input_unexpected_exception_sets_unknown(
monkeypatch: pytest.MonkeyPatch,
) -> None:
Expand Down Expand Up @@ -1064,7 +1104,8 @@ def test_validate_input_happy_path_sets_unique_id_and_normalizes(
"""Successful validation should normalize data and set unique ID."""

body = b'{"dailyInfo": [{"day": "D0"}]}'
session = _patch_client_session(monkeypatch, _StubResponse(200, body))
session = _SequenceSession([_StubResponse(200, body), _StubResponse(200, body)])
monkeypatch.setattr(cf, "async_get_clientsession", lambda hass: session)

class _TrackingFlow(PollenLevelsConfigFlow):
def __init__(self) -> None:
Expand Down Expand Up @@ -1104,6 +1145,55 @@ def _abort_if_unique_id_configured(self):
assert flow.abort_calls == 1


def test_validate_input_unique_id_collapses_nearby_locations_legacy_compat(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Unique-id format should match legacy 4-decimal duplicate detection."""

body = b'{"dailyInfo": [{"day": "D0"}]}'
session = _SequenceSession([_StubResponse(200, body), _StubResponse(200, body)])
monkeypatch.setattr(cf, "async_get_clientsession", lambda hass: session)

class _TrackingFlow(PollenLevelsConfigFlow):
def __init__(self) -> None:
super().__init__()
self.unique_ids: list[str] = []

async def async_set_unique_id(self, uid: str, raise_on_progress: bool = False):
self.unique_ids.append(uid)
return None

def _abort_if_unique_id_configured(self):
return None

flow = _TrackingFlow()
flow.hass = SimpleNamespace(config=SimpleNamespace())

first = {
**_base_user_input(),
CONF_LOCATION: {CONF_LATITUDE: "1.0000044", CONF_LONGITUDE: "2.0000044"},
}
second = {
**_base_user_input(),
CONF_LOCATION: {CONF_LATITUDE: "1.0000046", CONF_LONGITUDE: "2.0000046"},
}

first_errors, first_normalized = asyncio.run(
flow._async_validate_input(first, check_unique_id=True)
)
second_errors, second_normalized = asyncio.run(
flow._async_validate_input(second, check_unique_id=True)
)

assert session.calls
assert first_errors == {}
assert second_errors == {}
assert first_normalized is not None
assert second_normalized is not None
assert len(flow.unique_ids) == 2
assert flow.unique_ids[0] == flow.unique_ids[1] == "1.0000_2.0000"


def test_reauth_confirm_updates_and_reloads_entry() -> None:
"""Re-auth confirmation should update stored credentials and reload the entry."""

Expand Down
107 changes: 95 additions & 12 deletions tests/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,9 @@ def test_setup_entry_boundary_coordinates_are_allowed() -> None:
assert asyncio.run(integration.async_setup_entry(hass, entry)) is True


def test_setup_entry_decimal_numeric_options_fallback_to_defaults() -> None:
def test_setup_entry_decimal_numeric_options_fallback_to_defaults(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Decimal options should not be truncated silently during setup."""

hass = _FakeHass()
Expand All @@ -507,15 +509,11 @@ def __init__(self, *args, **kwargs):
async def async_config_entry_first_refresh(self):
return None

orig_coordinator = integration.PollenDataUpdateCoordinator
integration.PollenDataUpdateCoordinator = _StubCoordinator
monkeypatch.setattr(integration, "PollenDataUpdateCoordinator", _StubCoordinator)

try:
assert asyncio.run(integration.async_setup_entry(hass, entry)) is True
assert seen["hours"] == integration.DEFAULT_UPDATE_INTERVAL
assert seen["forecast_days"] == integration.DEFAULT_FORECAST_DAYS
finally:
integration.PollenDataUpdateCoordinator = orig_coordinator
assert asyncio.run(integration.async_setup_entry(hass, entry)) is True
assert seen["hours"] == integration.DEFAULT_UPDATE_INTERVAL
assert seen["forecast_days"] == integration.DEFAULT_FORECAST_DAYS


def test_setup_entry_wraps_generic_error() -> None:
Expand All @@ -531,7 +529,9 @@ class _Boom(Exception):
asyncio.run(integration.async_setup_entry(hass, entry))


def test_setup_entry_success_and_unload() -> None:
def test_setup_entry_success_and_unload(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Happy path should forward setup, register listener, and unload cleanly."""

hass = _FakeHass()
Expand Down Expand Up @@ -565,8 +565,8 @@ async def async_config_entry_first_refresh(self):
async def async_refresh(self):
return None

integration.GooglePollenApiClient = _StubClient
integration.PollenDataUpdateCoordinator = _StubCoordinator
monkeypatch.setattr(integration, "GooglePollenApiClient", _StubClient)
monkeypatch.setattr(integration, "PollenDataUpdateCoordinator", _StubCoordinator)

assert asyncio.run(integration.async_setup_entry(hass, entry)) is True

Expand All @@ -585,6 +585,89 @@ async def async_refresh(self):
assert entry.runtime_data is None


def test_setup_entry_normalizes_forecast_sensor_mode(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Setup should normalize stored forecast mode values before coordinator flags."""

hass = _FakeHass()
entry = _FakeEntry(options={integration.CONF_CREATE_FORECAST_SENSORS: " D+1 "})

class _StubClient:
def __init__(self, _session, _api_key):
self.session = _session
self.api_key = _api_key

async def async_fetch_pollen_data(self, **_kwargs):
return {"region": {"source": "meta"}, "dailyInfo": []}

class _StubCoordinator(update_coordinator_mod.DataUpdateCoordinator):
def __init__(self, *args, **kwargs):
self.create_d1 = kwargs["create_d1"]
self.create_d2 = kwargs["create_d2"]
self.entry_id = kwargs["entry_id"]
self.entry_title = kwargs.get("entry_title")
self.lat = kwargs["lat"]
self.lon = kwargs["lon"]
self.last_updated = None
self.data = {"region": {"source": "meta"}, "date": {"source": "meta"}}

async def async_config_entry_first_refresh(self):
return None

monkeypatch.setattr(integration, "GooglePollenApiClient", _StubClient)
monkeypatch.setattr(integration, "PollenDataUpdateCoordinator", _StubCoordinator)

assert asyncio.run(integration.async_setup_entry(hass, entry)) is True
assert entry.runtime_data is not None
assert entry.runtime_data.coordinator.create_d1 is True
assert entry.runtime_data.coordinator.create_d2 is False


def test_setup_entry_disables_d1_when_forecast_days_is_one(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Setup should disable D+1/D+2 creation when forecast days disallow them."""

hass = _FakeHass()
entry = _FakeEntry(
options={
integration.CONF_CREATE_FORECAST_SENSORS: "D+1+2",
integration.CONF_FORECAST_DAYS: 1,
}
)

class _StubClient:
def __init__(self, _session, _api_key):
self.session = _session
self.api_key = _api_key

async def async_fetch_pollen_data(self, **_kwargs):
return {"region": {"source": "meta"}, "dailyInfo": []}

class _StubCoordinator(update_coordinator_mod.DataUpdateCoordinator):
def __init__(self, *args, **kwargs):
self.create_d1 = kwargs["create_d1"]
self.create_d2 = kwargs["create_d2"]
self.entry_id = kwargs["entry_id"]
self.entry_title = kwargs.get("entry_title")
self.lat = kwargs["lat"]
self.lon = kwargs["lon"]
self.last_updated = None
self.data = {"region": {"source": "meta"}, "date": {"source": "meta"}}

async def async_config_entry_first_refresh(self):
return None

monkeypatch.setattr(integration, "GooglePollenApiClient", _StubClient)
monkeypatch.setattr(integration, "PollenDataUpdateCoordinator", _StubCoordinator)

assert asyncio.run(integration.async_setup_entry(hass, entry)) is True
assert entry.runtime_data is not None
assert entry.runtime_data.coordinator.create_d1 is False
assert entry.runtime_data.coordinator.create_d2 is False


def test_force_update_requests_refresh_per_entry() -> None:
"""force_update should queue refresh via runtime_data coordinators and skip missing runtime data."""

Expand Down
Loading