diff --git a/dags/sdk_doctor/github_sdk_versions.py b/dags/sdk_doctor/github_sdk_versions.py index 5b02fe8b1433e..296b255909853 100644 --- a/dags/sdk_doctor/github_sdk_versions.py +++ b/dags/sdk_doctor/github_sdk_versions.py @@ -2,6 +2,7 @@ import json import time from collections.abc import Callable +from datetime import UTC, datetime from typing import Any, Literal, Optional, cast import dagster @@ -76,7 +77,8 @@ def fetch_github_data_for_sdk(lib_name: str) -> Optional[dict[str, Any]]: def fetch_sdk_data_from_releases(repo: str, tag_prefix: str = "") -> Optional[dict[str, Any]]: """Helper function to fetch SDK data from GitHub releases API.""" try: - response = requests.get(f"https://api.github.com/repos/{repo}/releases", timeout=10) + # Fetch 100 releases (GitHub API max) to handle monorepos with many packages + response = requests.get(f"https://api.github.com/repos/{repo}/releases?per_page=100", timeout=10) if not response.ok: logger.error(f"[SDK Doctor] Failed to fetch releases for {repo}", status_code=response.status_code) return None @@ -376,6 +378,9 @@ def cache_github_sdk_versions_op( cached_count = 0 skipped_count = 0 + # Cache creation timestamp for staleness checks + cached_at = datetime.now(UTC).isoformat() + for lib_name, github_data in sdk_data.items(): if github_data is None: skipped_count += 1 @@ -383,7 +388,9 @@ def cache_github_sdk_versions_op( cache_key = f"github:sdk_versions:{lib_name}" try: - redis_client.setex(cache_key, CACHE_EXPIRY, json.dumps(github_data)) + # Add cachedAt timestamp to the data + github_data_with_timestamp = {**github_data, "cachedAt": cached_at} + redis_client.setex(cache_key, CACHE_EXPIRY, json.dumps(github_data_with_timestamp)) cached_count += 1 context.log.info(f"Successfully cached {lib_name} SDK data") except Exception as e: diff --git a/dags/sdk_doctor/team_sdk_versions.py b/dags/sdk_doctor/team_sdk_versions.py index 8392d45e1c864..77026e45b2662 100644 --- a/dags/sdk_doctor/team_sdk_versions.py +++ b/dags/sdk_doctor/team_sdk_versions.py @@ -1,6 +1,7 @@ import json from collections import defaultdict from dataclasses import dataclass +from datetime import UTC, datetime from typing import Any, Literal, Optional import dagster @@ -100,9 +101,14 @@ def get_and_cache_team_sdk_versions( try: sdk_versions = get_sdk_versions_for_team(team_id, logger=logger) if sdk_versions is not None: - payload = json.dumps(sdk_versions) + # Store actual timestamp when caching (instead of calculating from TTL later) + cached_at = datetime.now(UTC).isoformat() + cache_payload = { + "sdk_versions": sdk_versions, + "cachedAt": cached_at, + } cache_key = f"sdk_versions:team:{team_id}" - redis_client.setex(cache_key, CACHE_EXPIRY, payload) + redis_client.setex(cache_key, CACHE_EXPIRY, json.dumps(cache_payload)) logger.info(f"[SDK Doctor] Team {team_id} SDK versions cached successfully") return sdk_versions diff --git a/ee/clickhouse/queries/test/__snapshots__/test_cohort_query.ambr b/ee/clickhouse/queries/test/__snapshots__/test_cohort_query.ambr index 685b1ebbe96c1..0a7cbbc3adf33 100644 --- a/ee/clickhouse/queries/test/__snapshots__/test_cohort_query.ambr +++ b/ee/clickhouse/queries/test/__snapshots__/test_cohort_query.ambr @@ -46,7 +46,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-11 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-12 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -59,7 +59,7 @@ actor_id AS id FROM (SELECT min(toTimeZone(e.timestamp, 'UTC')) AS min_timestamp, - minIf(toTimeZone(e.timestamp, 'UTC'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-11 00:00:00.000000', 6, 'UTC'))) AS min_timestamp_with_condition, + minIf(toTimeZone(e.timestamp, 'UTC'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-12 00:00:00.000000', 6, 'UTC'))) AS min_timestamp_with_condition, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS actor_id, argMin(e.uuid, toTimeZone(e.timestamp, 'UTC')) AS uuid, argMin(e.distinct_id, toTimeZone(e.timestamp, 'UTC')) AS distinct_id @@ -213,7 +213,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-17 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$new_view'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$new_view'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE bitTest(steps_bitfield, 1) @@ -454,7 +454,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -932,7 +932,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1195,7 +1195,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1276,7 +1276,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE bitTest(steps_bitfield, 1) @@ -1376,7 +1376,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE bitTest(steps_bitfield, 1) @@ -1397,7 +1397,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$new_view'), less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC')), greater(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC'))) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$new_view'), less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC')), greater(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC'))) GROUP BY if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id) HAVING ifNull(greaterOrEquals(count(), 1), 0) ORDER BY count() DESC) @@ -1499,7 +1499,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) + WHERE and(equals(e.team_id, 99999), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE bitTest(steps_bitfield, 1) @@ -1519,7 +1519,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) - WHERE and(equals(events.team_id, 99999), equals(events.event, '$pageview'), less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC')), greater(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC'))) + WHERE and(equals(events.team_id, 99999), equals(events.event, '$pageview'), less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('today', 6, 'UTC')), greater(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC'))) GROUP BY if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id) HAVING ifNull(greaterOrEquals(count(), 1), 0) ORDER BY count() DESC) @@ -1636,7 +1636,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$filter_prop'), ''), 'null'), '^"|"$', ''), 'something'), 0), greaterOrEquals(timestamp, toDateTime64('2025-11-22 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$filter_prop'), ''), 'null'), '^"|"$', ''), 'something'), 0), greaterOrEquals(timestamp, toDateTime64('2025-11-23 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1702,7 +1702,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1795,7 +1795,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1913,7 +1913,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-11 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-12 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -1942,7 +1942,7 @@ actor_id AS id FROM (SELECT min(toTimeZone(e.timestamp, 'UTC')) AS min_timestamp, - minIf(toTimeZone(e.timestamp, 'UTC'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-11 00:00:00.000000', 6, 'UTC'))) AS min_timestamp_with_condition, + minIf(toTimeZone(e.timestamp, 'UTC'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2025-11-12 00:00:00.000000', 6, 'UTC'))) AS min_timestamp_with_condition, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS actor_id, argMin(e.uuid, toTimeZone(e.timestamp, 'UTC')) AS uuid, argMin(e.distinct_id, toTimeZone(e.timestamp, 'UTC')) AS distinct_id @@ -2381,7 +2381,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -2466,7 +2466,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, @@ -2566,7 +2566,7 @@ WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(tupleElement(argMax(tuple(person_distinct_id_overrides.is_deleted), person_distinct_id_overrides.version), 1), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-18 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(timestamp, toDateTime64('2025-11-19 00:00:00.000000', 6, 'UTC')), lessOrEquals(timestamp, toDateTime64('today', 6, 'UTC')), equals(e.event, '$pageview'))) GROUP BY actor_id) AS source ORDER BY source.id ASC LIMIT 1000000000 SETTINGS optimize_aggregation_in_order=1, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSdkDoctor.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSdkDoctor.tsx index 347de6af37775..0b01c4855229d 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSdkDoctor.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSdkDoctor.tsx @@ -6,6 +6,7 @@ import { LemonBanner, LemonButton, LemonTable, LemonTableColumns, LemonTag, Link import { TZLabel } from 'lib/components/TZLabel' import { FEATURE_FLAGS } from 'lib/constants' +import { dayjs } from 'lib/dayjs' import { useOnMountEffect } from 'lib/hooks/useOnMountEffect' import { IconWithBadge } from 'lib/lemon-ui/icons' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' @@ -110,16 +111,55 @@ const COLUMNS: LemonTableColumns = [ {record.isOutdated ? ( Outdated + ) : record.latestVersion && record.version === record.latestVersion ? ( + + You have the latest available + {record.cachedAt ? ` as of ${dayjs(record.cachedAt).fromNow()}` : ''}. +
+ Click 'Releases ↗' above to check for any since. + + } + > + + Current + +
) : ( - - {record.latestVersion && record.version === record.latestVersion ? 'Current' : 'Recent'} - + + Released {dayjs(record.releaseDate).fromNow()}. +
+ Upgrading is a good idea, but it's not urgent yet. + + ) : ( + "Upgrading is a good idea, but it's not urgent yet" + ) + } + > + + Recent + +
)} ) @@ -305,9 +345,10 @@ export const SidePanelSdkDoctorIcon = (props: { className?: string }): JSX.Eleme } function SdkSection({ sdkType }: { sdkType: SdkType }): JSX.Element { - const { sdkVersionsMap, teamSdkVersionsLoading } = useValues(sidePanelSdkDoctorLogic) + const { sdkVersionsMap, sdkVersions, teamSdkVersionsLoading } = useValues(sidePanelSdkDoctorLogic) const sdk = sdkVersionsMap[sdkType]! + const cachedAt = sdkVersions?.[sdkType]?.cachedAt const links = SDK_DOCS_LINKS[sdkType] const sdkName = SDK_TYPE_READABLE_NAME[sdkType] @@ -315,29 +356,23 @@ function SdkSection({ sdkType }: { sdkType: SdkType }): JSX.Element {
-
-

{sdkName}

- - - {sdk.isOutdated ? 'Outdated' : 'Up to date'} - - - {sdk.isOld && ( - - Old - - )} - -
- Latest version available: {sdk.currentVersion} +

{sdkName}

+ + Latest version available:{' '} + + This was the latest available version + {cachedAt ? ` as of ${dayjs().diff(dayjs(cachedAt), 'hour')} hours ago` : ''} + . +
+ Click 'Releases ↗', to the right, to check for any since. + + } + > + {sdk.currentVersion} +
+
diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSdkDoctorLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSdkDoctorLogic.tsx index b9f64e65e0960..870817739beaa 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSdkDoctorLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/sidePanelSdkDoctorLogic.tsx @@ -30,6 +30,7 @@ export type SdkVersion = `${string}.${string}.${string}` export type SdkVersionInfo = { latestVersion: SdkVersion releaseDates: Record + cachedAt?: string // ISO timestamp calculated from Redis TTL } // For a team we have a map of SDK types to all of the versions we say in recent times @@ -67,6 +68,7 @@ export type AugmentedTeamSdkVersionsInfoRelease = { isOutdated: boolean isOld: boolean needsUpdating: boolean + cachedAt: string | undefined } /** @@ -81,8 +83,8 @@ export type SdkHealthStatus = 'danger' | 'warning' | 'success' * Provides smart version outdatedness detection. * * Architecture: - * - Backend detection: Team SDK detections cached server-side (72h Redis, refreshed every 12 hours) - * - Version checking: Per-SDK GitHub API queries cached server-side (72h Redis, refreshed every 6 hours) + * - Backend detection: Team SDK detections cached server-side (7-day Redis, refreshed daily at midnight UTC) + * - Version checking: GitHub SDK versions cached server-side (7-day Redis, refreshed daily at midnight UTC) * - Smart semver: Contextual thresholds */ @@ -121,6 +123,21 @@ export const sidePanelSdkDoctorLogic = kea([ unsnooze: () => null, }, ], + lazyLoadedDates: [ + {} as Record, + { + loadVersionDateSuccess: (state, { lazyLoadedVersionDate }) => { + if (!lazyLoadedVersionDate) { + return state + } + const key = `${lazyLoadedVersionDate.sdkType}:${lazyLoadedVersionDate.version}` + return { + ...state, + [key]: lazyLoadedVersionDate.releaseDate, + } + }, + }, + ], })), loaders(() => ({ @@ -140,17 +157,25 @@ export const sidePanelSdkDoctorLogic = kea([ }, ], teamSdkVersions: [ - null as TeamSdkVersionsInfo | null, + null as (TeamSdkVersionsInfo & { cached?: boolean; cachedAt?: string }) | null, { - loadTeamSdkVersions: async ({ - forceRefresh, - }: { forceRefresh?: boolean } = {}): Promise => { + loadTeamSdkVersions: async ({ forceRefresh }: { forceRefresh?: boolean } = {}): Promise< + (TeamSdkVersionsInfo & { cached?: boolean; cachedAt?: string }) | null + > => { const endpoint = forceRefresh === true ? 'api/team_sdk_versions/?force_refresh=true' : 'api/team_sdk_versions/' try { - const response = await api.get<{ sdk_versions: TeamSdkVersionsInfo; cached: boolean }>(endpoint) - return response.sdk_versions + const response = await api.get<{ + sdk_versions: TeamSdkVersionsInfo + cached: boolean + cachedAt?: string + }>(endpoint) + return { + ...response.sdk_versions, + cached: response.cached, + cachedAt: response.cachedAt, + } } catch (error) { console.error('Error loading team SDK versions:', error) return null @@ -158,37 +183,83 @@ export const sidePanelSdkDoctorLogic = kea([ }, }, ], + lazyLoadedVersionDate: [ + null as { sdkType: SdkType; version: SdkVersion; releaseDate: string } | null, + { + loadVersionDate: async ({ + sdkType, + version, + }: { + sdkType: SdkType + version: SdkVersion + }): Promise<{ sdkType: SdkType; version: SdkVersion; releaseDate: string } | null> => { + try { + const response = await api.get<{ releaseDate: string }>( + `api/sdk_version_date/${sdkType}/${version}/` + ) + return { sdkType, version, releaseDate: response.releaseDate } + } catch (error) { + console.warn(`[SDK Doctor] Failed to load version date for ${sdkType}@${version}:`, error) + return null + } + }, + }, + ], })), selectors({ sdkVersionsMap: [ - (s) => [s.sdkVersions, s.teamSdkVersions], + (s) => [s.sdkVersions, s.teamSdkVersions, s.lazyLoadedDates], ( sdkVersions: Record, - teamSdkVersions: TeamSdkVersionsInfo + teamSdkVersions: (TeamSdkVersionsInfo & { cached?: boolean; cachedAt?: string }) | null, + lazyLoadedDates: Record ): AugmentedTeamSdkVersionsInfo => { if (!sdkVersions || !teamSdkVersions) { return {} } - return Object.fromEntries( - Object.entries(teamSdkVersions).map(([sdkType, teamSdkVersion]) => { - const sdkVersion = sdkVersions[sdkType as SdkType] - const releasesInfo = teamSdkVersion.map((version) => - computeAugmentedInfoRelease(sdkType as SdkType, version, sdkVersion) - ) + // Filter out metadata fields + const sdkData = Object.entries(teamSdkVersions).filter(([, value]) => Array.isArray(value)) as [ + SdkType, + TeamSdkVersionInfo[], + ][] - return [ - sdkType, - { - isOutdated: releasesInfo[0]!.isOutdated, - isOld: releasesInfo[0]!.isOld, - needsUpdating: releasesInfo[0]!.needsUpdating, - currentVersion: sdkVersion.latestVersion, - allReleases: releasesInfo, - }, - ] - }) + return Object.fromEntries( + sdkData + .filter(([sdkType]) => sdkVersions[sdkType as SdkType] !== undefined) + .map(([sdkType, teamSdkVersion]) => { + const sdkVersion = sdkVersions[sdkType as SdkType] + + // Merge lazy-loaded dates + const mergedReleaseDates = { ...sdkVersion.releaseDates } + Object.keys(lazyLoadedDates).forEach((key) => { + const [loadedSdkType, version] = key.split(':') + if (loadedSdkType === sdkType) { + mergedReleaseDates[version as SdkVersion] = lazyLoadedDates[key] + } + }) + + const mergedSdkVersion = { + ...sdkVersion, + releaseDates: mergedReleaseDates, + } + + const releasesInfo = teamSdkVersion.map((version) => + computeAugmentedInfoRelease(sdkType as SdkType, version, mergedSdkVersion) + ) + + return [ + sdkType, + { + isOutdated: releasesInfo[0]!.isOutdated, + isOld: releasesInfo[0]!.isOld, + needsUpdating: releasesInfo[0]!.needsUpdating, + currentVersion: sdkVersion.latestVersion, + allReleases: releasesInfo, + }, + ] + }) ) }, ], @@ -268,11 +339,71 @@ export const sidePanelSdkDoctorLogic = kea([ ], }), - listeners({ + listeners(({ actions, values }) => ({ snoozeSdkDoctor: () => { lemonToast.success('SDK Doctor snoozed for 30 days') }, - }), + loadTeamSdkVersionsSuccess: async ({ teamSdkVersions }, breakpoint) => { + // Trigger background refresh if cache is stale (>26 hours) + await breakpoint(100) + + try { + const teamData = teamSdkVersions as TeamSdkVersionsInfo & { cachedAt?: string; cached?: boolean } + if (teamSdkVersions && teamData.cachedAt) { + // Check if cache is stale (>26h = daily job + 2h buffer) + const cachedAtTime = new Date(teamData.cachedAt).getTime() + const now = new Date().getTime() + const hoursOld = (now - cachedAtTime) / (1000 * 60 * 60) + const STALE_CACHE_HOURS = 26 + + if (hoursOld > STALE_CACHE_HOURS) { + // Cache is stale, trigger background refresh + // Fire-and-forget: don't block the UI, don't show loading states + actions.loadTeamSdkVersions({ forceRefresh: true }) + } + } + } catch (error) { + // Background optimization - silent failure acceptable + console.warn('[SDK Doctor] Cache staleness check failed:', error) + } + + // Fire-and-forget: lazy-load missing release dates + const { sdkVersions } = values + + if (!sdkVersions || !teamSdkVersions) { + return + } + + // Find versions with missing release dates + Object.entries(teamSdkVersions) + .filter(([, value]) => Array.isArray(value)) + .forEach(([sdkType, teamVersions]) => { + const sdkVersion = sdkVersions[sdkType as SdkType] + if (!sdkVersion) { + return + } + + ;(teamVersions as TeamSdkVersionInfo[]).forEach((versionInfo) => { + if (typeof versionInfo !== 'object' || !('lib_version' in versionInfo)) { + return + } + const version = versionInfo.lib_version + // Avoid duplicate API calls for already-loaded dates + const hasReleaseDate = + sdkVersion.releaseDates[version] !== undefined || + values.lazyLoadedDates[`${sdkType}:${version}`] !== undefined + + if (!hasReleaseDate) { + // Background load updates state when ready + actions.loadVersionDate({ sdkType: sdkType as SdkType, version }) + } + }) + }) + }, + loadSdkVersionsSuccess: () => { + // Team SDK versions lazy-loaded in loadTeamSdkVersionsSuccess listener + }, + })), afterMount(({ actions, values }) => { actions.loadTeamSdkVersions() @@ -327,6 +458,28 @@ function computeAugmentedInfoRelease( // Check if versions differ const diff = diffVersions(latestVersionParsed, currentVersionParsed) + // Team version newer than cached latest (multiple daily releases) + // Treat as current until cache refreshes at midnight UTC + // diffVersions(a,b) = a.version - b.version, so diff.diff < 0 means b > a + const teamVersionIsNewer = diff && diff.diff < 0 + + if (teamVersionIsNewer) { + // Show as "Current" if newer than cached version from GitHub + return { + type, + version: version.lib_version, + maxTimestamp: version.max_timestamp, + count: version.count, + isOutdated: false, + isOld: false, + needsUpdating: false, + releaseDate: undefined, + daysSinceRelease: undefined, + latestVersion: version.lib_version, + cachedAt: sdkVersion.cachedAt, + } + } + // Count number of versions behind by estimating based on semantic version difference let releasesBehind = 0 if (diff) { @@ -360,7 +513,7 @@ function computeAugmentedInfoRelease( isOld = releasesBehind > 0 && weeksOld > ageThreshold } - // Grace period: Don't flag versions released <7 days ago (even if major version behind) + // Grace period: Don't flag versions released <=7 days ago (even if major version behind) // This gives our team time to fix any issues with recent releases before we nag them about new releases // // NOTE: If daysSinceRelease is undefined (e.g., failed releases not in GitHub), @@ -369,7 +522,7 @@ function computeAugmentedInfoRelease( const GRACE_PERIOD_DAYS = 7 if (daysSinceRelease !== undefined) { - isRecentRelease = daysSinceRelease < GRACE_PERIOD_DAYS + isRecentRelease = daysSinceRelease <= GRACE_PERIOD_DAYS } // Smart version detection based on semver difference @@ -385,9 +538,9 @@ function computeAugmentedInfoRelease( isOutdated = true break case 'minor': - // Minor version behind (e.g. 1.2.x -> 1.5.x): Flag if 3+ minors behind OR >6 months old + // Minor: Flag if threshold+ behind (9+ for Web SDK, 3+ for others) OR >6mo old const sixMonthsInDays = 180 - const isMinorOutdatedByCount = diff.diff >= 3 + const isMinorOutdatedByCount = diff.diff >= getMinorVersionThreshold(type) const isMinorOutdatedByAge = daysSinceRelease !== undefined && daysSinceRelease > sixMonthsInDays isOutdated = isMinorOutdatedByCount || isMinorOutdatedByAge break @@ -409,6 +562,7 @@ function computeAugmentedInfoRelease( releaseDate, daysSinceRelease, latestVersion: sdkVersion.latestVersion, + cachedAt: sdkVersion.cachedAt, } } catch { // If we can't parse the versions, return error state @@ -423,6 +577,7 @@ function computeAugmentedInfoRelease( releaseDate: undefined, daysSinceRelease: undefined, latestVersion: sdkVersion.latestVersion, + cachedAt: sdkVersion.cachedAt, } } } @@ -455,3 +610,15 @@ function determineDeviceContext(sdkType: SdkType): 'mobile' | 'desktop' | 'mixed return 'mixed' } + +/** + * Minor version threshold for outdatedness detection. + * Web SDK: 9+ (ships frequently), Others: 3+ + * @returns Minor versions behind before flagging as outdated + */ +function getMinorVersionThreshold(sdkType: SdkType): number { + if (sdkType === 'web') { + return 9 + } + return 3 +} diff --git a/posthog/api/github_sdk_versions.py b/posthog/api/github_sdk_versions.py index 106d3f70c729f..8768487534c41 100644 --- a/posthog/api/github_sdk_versions.py +++ b/posthog/api/github_sdk_versions.py @@ -1,8 +1,11 @@ +import re import json +import time from typing import cast from django.http import JsonResponse +import requests import structlog import posthoganalytics from rest_framework import exceptions @@ -18,13 +21,41 @@ logger = structlog.get_logger(__name__) +# SDK to GitHub repo mapping for lazy-loading individual version dates +SDK_REPO_MAP = { + "web": ("PostHog/posthog-js", "posthog-js@{version}"), + "posthog-node": ("PostHog/posthog-js", "posthog-node@{version}"), + "posthog-react-native": ("PostHog/posthog-js", "posthog-react-native@{version}"), + "posthog-python": ("PostHog/posthog-python", "v{version}"), + "posthog-flutter": ("PostHog/posthog-flutter", "{version}"), + "posthog-ios": ("PostHog/posthog-ios", "{version}"), + "posthog-android": ("PostHog/posthog-android", "android-v{version}"), + "posthog-go": ("PostHog/posthog-go", "v{version}"), + "posthog-php": ("PostHog/posthog-php", "v{version}"), + "posthog-ruby": ("PostHog/posthog-ruby", "v{version}"), + "posthog-elixir": ("PostHog/posthog-elixir", "v{version}"), + "posthog-dotnet": ("PostHog/posthog-dotnet", "v{version}"), +} + +# Fallback tag formats for SDKs with legacy versions +# Old posthog-js versions used v-prefixed tags (e.g., "v1.187.2") +FALLBACK_TAG_TEMPLATES = { + "web": "v{version}", # Legacy format: v-prefixed (e.g., v1.187.2) +} + +# CHANGELOG paths for monorepo SDKs with pre-monorepo history +CHANGELOG_PATHS = { + "posthog-node": "packages/node/CHANGELOG.md", + "posthog-react-native": "packages/react-native/CHANGELOG.md", +} + @api_view(["GET"]) @permission_classes([IsAuthenticated]) def github_sdk_versions(request: Request) -> JsonResponse: """ Serve cached GitHub SDK version data for SDK Doctor. - Data is cached by Dagster job that runs every 6 hours. + Data is cached by Dagster job that runs daily at midnight UTC. Protected by sdk-doctor-beta feature flag. """ user = cast(User, request.user) @@ -39,6 +70,7 @@ def github_sdk_versions(request: Request) -> JsonResponse: if cached_data: try: data = json.loads(cached_data.decode("utf-8") if isinstance(cached_data, bytes) else cached_data) + # cachedAt is now stored in the cache data itself (no need to calculate from TTL) response[sdk_type] = data except (json.JSONDecodeError, AttributeError) as e: logger.warning(f"[SDK Doctor] Cache corrupted for {sdk_type}", error=str(e)) @@ -48,3 +80,132 @@ def github_sdk_versions(request: Request) -> JsonResponse: response[sdk_type] = {"error": "SDK data not available. Please try again later."} return JsonResponse(response) + + +@api_view(["GET"]) +@permission_classes([IsAuthenticated]) +def sdk_version_date(request: Request, sdk_type: str, version: str) -> JsonResponse: + """ + Lazy-load release dates for versions not in main cache. + Fetches from GitHub on-demand and caches permanently (release dates are immutable). + """ + user = cast(User, request.user) + if not posthoganalytics.feature_enabled("sdk-doctor-beta", str(user.distinct_id)): + raise exceptions.ValidationError("SDK Doctor is not enabled for this user") + + # Validate SDK type + if sdk_type not in SDK_REPO_MAP: + raise exceptions.ValidationError(f"Invalid SDK type: {sdk_type}") + + redis_client = get_client() + cache_key = f"github:sdk_version_date:{sdk_type}:{version}" + + # Check cache first + cached_date = redis_client.get(cache_key) + if cached_date: + try: + return JsonResponse( + { + "releaseDate": cached_date.decode("utf-8") if isinstance(cached_date, bytes) else cached_date, + "cached": True, + } + ) + except (AttributeError, UnicodeDecodeError) as e: + logger.warning(f"[SDK Doctor] Cache corrupted for {sdk_type}@{version}", error=str(e)) + capture_exception(e, {"sdk_type": sdk_type, "version": version, "cache_key": cache_key}) + + # Fetch from GitHub API with retry for rate limits + repo, tag_template = SDK_REPO_MAP[sdk_type] + tag_name = tag_template.format(version=version) + + try: + github_url = f"https://api.github.com/repos/{repo}/releases/tags/{tag_name}" + response = requests.get(github_url, timeout=10) + + if response.status_code in [403, 429]: + logger.warning( + f"[SDK Doctor] GitHub API rate limit hit for {sdk_type}@{version} (status {response.status_code}), retrying after 2s" + ) + time.sleep(2) + response = requests.get(github_url, timeout=10) + + if response.status_code in [403, 429]: + logger.error( + f"[SDK Doctor] GitHub API rate limit exceeded for {sdk_type}@{version} after retry (status {response.status_code})" + ) + return JsonResponse({"error": "GitHub API rate limit exceeded. Please try again later."}, status=503) + + if response.status_code == 200: + release_data = response.json() + release_date = release_data.get("published_at") + + if release_date: + # Permanent cache (immutable data) + redis_client.set(cache_key, release_date) + logger.info(f"[SDK Doctor] Lazy-loaded date for {sdk_type}@{version}: {release_date}") + return JsonResponse({"releaseDate": release_date, "cached": False}) + + # Try fallback tag format for legacy versions + elif response.status_code == 404 and sdk_type in FALLBACK_TAG_TEMPLATES: + fallback_tag = FALLBACK_TAG_TEMPLATES[sdk_type].format(version=version) + fallback_url = f"https://api.github.com/repos/{repo}/releases/tags/{fallback_tag}" + + logger.info(f"[SDK Doctor] Trying fallback tag format for {sdk_type}@{version}: {fallback_tag}") + fallback_response = requests.get(fallback_url, timeout=10) + + if fallback_response.status_code in [403, 429]: + logger.warning( + f"[SDK Doctor] GitHub API rate limit hit on fallback for {sdk_type}@{version} (status {fallback_response.status_code}), retrying after 2s" + ) + time.sleep(2) + fallback_response = requests.get(fallback_url, timeout=10) + + if fallback_response.status_code in [403, 429]: + logger.error( + f"[SDK Doctor] GitHub API rate limit exceeded on fallback for {sdk_type}@{version} after retry (status {fallback_response.status_code})" + ) + return JsonResponse({"error": "GitHub API rate limit exceeded. Please try again later."}, status=503) + + if fallback_response.status_code == 200: + release_data = fallback_response.json() + release_date = release_data.get("published_at") + + if release_date: + # Permanent cache (immutable data) + redis_client.set(cache_key, release_date) + logger.info( + f"[SDK Doctor] Lazy-loaded date for {sdk_type}@{version} using fallback tag: {release_date}" + ) + return JsonResponse({"releaseDate": release_date, "cached": False}) + + # Try CHANGELOG fallback for monorepo SDKs with pre-monorepo history + if response.status_code == 404 and sdk_type in CHANGELOG_PATHS: + changelog_path = CHANGELOG_PATHS[sdk_type] + changelog_url = f"https://raw.githubusercontent.com/{repo}/main/{changelog_path}" + + logger.info(f"[SDK Doctor] Trying CHANGELOG fallback for {sdk_type}@{version}") + changelog_response = requests.get(changelog_url, timeout=10) + + if changelog_response.status_code == 200: + changelog_content = changelog_response.text + version_pattern = re.compile(r"^## (\d+\.\d+\.\d+) - (\d{4}-\d{2}-\d{2})", re.MULTILINE) + matches = version_pattern.findall(changelog_content) + + for found_version, date in matches: + if found_version == version: + release_date = f"{date}T00:00:00Z" + redis_client.set(cache_key, release_date) + logger.info( + f"[SDK Doctor] Lazy-loaded date for {sdk_type}@{version} from CHANGELOG: {release_date}" + ) + return JsonResponse({"releaseDate": release_date, "cached": False}) + + logger.warning( + f"[SDK Doctor] Could not fetch release date for {sdk_type}@{version}", status=response.status_code + ) + return JsonResponse({"error": "Release date not available"}, status=404) + + except Exception as e: + logger.exception(f"[SDK Doctor] Error fetching release date for {sdk_type}@{version}") + capture_exception(e, {"sdk_type": sdk_type, "version": version}) + return JsonResponse({"error": "Failed to fetch release date"}, status=500) diff --git a/posthog/api/team_sdk_versions.py b/posthog/api/team_sdk_versions.py index b620b992dd600..21c20e152199a 100644 --- a/posthog/api/team_sdk_versions.py +++ b/posthog/api/team_sdk_versions.py @@ -24,7 +24,7 @@ @permission_classes([IsAuthenticated]) def team_sdk_versions(request: Request) -> JsonResponse: """ - Serve team SDK versions. Data is cached by Dagster job (runs every 6 hours). + Serve team SDK versions. Data is cached by Dagster job (runs daily at midnight UTC). Supports force_refresh=true for on-demand detection. Protected by sdk-doctor-beta feature flag. """ @@ -52,11 +52,23 @@ def team_sdk_versions(request: Request) -> JsonResponse: cached_data = redis_client.get(cache_key) if cached_data: try: - sdk_versions = json.loads( + cache_payload = json.loads( cached_data.decode("utf-8") if isinstance(cached_data, bytes) else cached_data ) + # Handle both old format (just sdk_versions) and new format (with cachedAt) + if isinstance(cache_payload, dict) and "sdk_versions" in cache_payload: + sdk_versions = cache_payload["sdk_versions"] + cached_at = cache_payload.get("cachedAt") + else: + # Old cache format - just the sdk_versions dict + sdk_versions = cache_payload + cached_at = None + logger.info(f"[SDK Doctor] Team {team_id} SDK versions successfully read from cache") - return JsonResponse({"sdk_versions": sdk_versions, "cached": True}, safe=False) + response = {"sdk_versions": sdk_versions, "cached": True} + if cached_at: + response["cachedAt"] = cached_at + return JsonResponse(response, safe=False) except (json.JSONDecodeError, AttributeError) as e: logger.warning(f"[SDK Doctor] Cache corrupted for team {team_id}", error=str(e)) capture_exception(e) @@ -67,7 +79,23 @@ def team_sdk_versions(request: Request) -> JsonResponse: try: sdk_versions = get_and_cache_team_sdk_versions(team_id, redis_client) if sdk_versions is not None: - return JsonResponse({"sdk_versions": sdk_versions, "cached": False}, safe=False) + # After force refresh, re-read the cache to get the cachedAt timestamp + cached_data = redis_client.get(cache_key) + cached_at = None + if cached_data: + try: + cache_payload = json.loads( + cached_data.decode("utf-8") if isinstance(cached_data, bytes) else cached_data + ) + cached_at = cache_payload.get("cachedAt") + except (json.JSONDecodeError, AttributeError): + # cachedAt is optional metadata - graceful degradation if extraction fails + pass + + response = {"sdk_versions": sdk_versions, "cached": False} + if cached_at: + response["cachedAt"] = cached_at + return JsonResponse(response, safe=False) else: logger.error(f"[SDK Doctor] No data received from ClickHouse for team {team_id}") return JsonResponse({"error": "Failed to get SDK versions. Please try again later."}, status=500) diff --git a/posthog/urls.py b/posthog/urls.py index e9a95d8a3a44d..3dec5f05cd77a 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -30,7 +30,7 @@ uploaded_media, user, ) -from posthog.api.github_sdk_versions import github_sdk_versions +from posthog.api.github_sdk_versions import github_sdk_versions, sdk_version_date from posthog.api.query import progress from posthog.api.slack import slack_interactivity_callback from posthog.api.survey import public_survey_page, surveys @@ -174,6 +174,7 @@ def authorize_and_redirect(request: HttpRequest) -> HttpResponse: path("api/unsubscribe", unsubscribe.unsubscribe), path("api/alerts/github", github.SecretAlert.as_view()), path("api/sdk_versions/", github_sdk_versions), + path("api/sdk_version_date///", sdk_version_date), path("api/team_sdk_versions/", team_sdk_versions), opt_slash_path("api/support/ensure-zendesk-organization", csrf_exempt(ensure_zendesk_organization)), path("api/", include(router.urls)),