diff --git a/.dev/mailpit/Dockerfile b/.dev/mailpit/Dockerfile new file mode 100644 index 000000000..6e79e1309 --- /dev/null +++ b/.dev/mailpit/Dockerfile @@ -0,0 +1,18 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM axllent/mailpit:v1.28.0 + +EXPOSE 1025 +EXPOSE 8025 diff --git a/.dev/mailpit/manifests/pod.yaml b/.dev/mailpit/manifests/pod.yaml new file mode 100644 index 000000000..e96ec3a17 --- /dev/null +++ b/.dev/mailpit/manifests/pod.yaml @@ -0,0 +1,41 @@ +# Copyright 2025 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v1 +kind: Pod +metadata: + name: mailpit + labels: + app.kubernetes.io/name: mailpit +spec: + containers: + - name: mailpit + image: mailpit + imagePullPolicy: Never # Need this for pushing directly into minikube + ports: + - containerPort: 1025 + name: smtp-port + - containerPort: 8025 + name: web-ui-port + readinessProbe: + tcpSocket: + port: 1025 + initialDelaySeconds: 10 + resources: + limits: + cpu: 250m + memory: 128Mi + requests: + cpu: 100m + memory: 64Mi diff --git a/.dev/mailpit/manifests/service.yaml b/.dev/mailpit/manifests/service.yaml new file mode 100644 index 000000000..3e1666611 --- /dev/null +++ b/.dev/mailpit/manifests/service.yaml @@ -0,0 +1,30 @@ +# Copyright 2025 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v1 +kind: Service +metadata: + name: mailpit +spec: + selector: + app.kubernetes.io/name: mailpit + ports: + - name: smtp + protocol: TCP + port: 1025 + targetPort: smtp-port + - name: web-ui + protocol: TCP + port: 8025 + targetPort: web-ui-port diff --git a/.dev/mailpit/skaffold.yaml b/.dev/mailpit/skaffold.yaml new file mode 100644 index 000000000..62e1da612 --- /dev/null +++ b/.dev/mailpit/skaffold.yaml @@ -0,0 +1,31 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: skaffold/v4beta9 +kind: Config +metadata: + name: mailpit-config +profiles: + - name: local + build: + artifacts: + - image: mailpit + context: . + local: + useBuildkit: true + manifests: + rawYaml: + - manifests/* + deploy: + kubectl: {} diff --git a/.dev/pubsub/run.sh b/.dev/pubsub/run.sh index 03f96c72d..2c9bf8332 100755 --- a/.dev/pubsub/run.sh +++ b/.dev/pubsub/run.sh @@ -32,9 +32,11 @@ create_topic() { } # Function to create a Subscription (Pull). gcloud does not support subscription creation in the emulator, so we use curl. +# Usage: create_subscription [dlq_topic_name] create_subscription() { local topic_name=$1 local sub_name=$2 + local dlq_topic_name=$3 if [[ -z "$topic_name" || -z "$sub_name" ]]; then echo "Error: Topic name and Subscription name required." @@ -46,25 +48,60 @@ create_subscription() { # The emulator requires the full path to the topic in the JSON body local topic_path="projects/${PROJECT_ID}/topics/${topic_name}" + # Base JSON payload + local json_payload="{\"topic\": \"$topic_path\"" + + # If a DLQ topic is provided, add the deadLetterPolicy + if [[ -n "$dlq_topic_name" ]]; then + local dlq_topic_path="projects/${PROJECT_ID}/topics/${dlq_topic_name}" + # MaxDeliveryAttempts is set to 5 as a standard default + json_payload="${json_payload}, \"deadLetterPolicy\": {\"deadLetterTopic\": \"${dlq_topic_path}\", \"maxDeliveryAttempts\": 5}" + echo " -> With Dead Letter Queue: ${dlq_topic_name}" + fi + + # Close the JSON object + json_payload="${json_payload}}" + curl -s -X PUT "http://0.0.0.0:${PORT}/v1/projects/${PROJECT_ID}/subscriptions/${sub_name}" \ -H "Content-Type: application/json" \ - -d "{\"topic\": \"$topic_path\"}" + -d "$json_payload" echo -e "\nSubscription ${sub_name} for topic: ${topic_name} created." } gcloud beta emulators pubsub start --project="$PROJECT_ID" --host-port="0.0.0.0:$PORT" & -while ! curl -s -o /dev/null "localhost:$PORT"; do +while ! curl -s -f "http://0.0.0.0:${PORT}/v1/projects/${PROJECT_ID}/topics"; do sleep 1 # Wait 1 second before checking again echo "waiting until pubsub emulator responds before finishing setup" done +# --- 1. Dead Letter Queues (Create these first so main subs can reference them) --- + +# Ingestion DLQ +create_topic "ingestion-jobs-dead-letter-topic-id" +create_subscription "ingestion-jobs-dead-letter-topic-id" "ingestion-jobs-dead-letter-sub-id" + +# Notification/Fan-out DLQ +create_topic "notification-events-dead-letter-topic-id" +create_subscription "notification-events-dead-letter-topic-id" "notification-events-dead-letter-sub-id" + +# Delivery DLQ +create_topic "delivery-dead-letter-topic-id" +create_subscription "delivery-dead-letter-topic-id" "delivery-dead-letter-sub-id" + + +# --- 2. Main Topics and Subscriptions --- +create_topic "batch-updates-topic-id" +create_subscription "batch-updates-topic-id" "batch-updates-sub-id" "ingestion-jobs-dead-letter-topic-id" + create_topic "ingestion-jobs-topic-id" -create_subscription "ingestion-jobs-topic-id" "ingestion-jobs-sub-id" +create_subscription "ingestion-jobs-topic-id" "ingestion-jobs-sub-id" "ingestion-jobs-dead-letter-topic-id" + create_topic "notification-events-topic-id" -create_subscription "notification-events-topic-id" "notification-events-sub-id" +create_subscription "notification-events-topic-id" "notification-events-sub-id" "notification-events-dead-letter-topic-id" + create_topic "chime-delivery-topic-id" -create_subscription "chime-delivery-topic-id" "chime-delivery-sub-id" +create_subscription "chime-delivery-topic-id" "chime-delivery-sub-id" "delivery-dead-letter-topic-id" echo "Pub/Sub setup for webstatus.dev finished" diff --git a/.prettierignore b/.prettierignore index 3573c80a9..bfb12f32a 100644 --- a/.prettierignore +++ b/.prettierignore @@ -7,6 +7,7 @@ jsonschema/mdn_browser-compat-data jsonschema/web-platform-dx_web-features jsonschema/web-platform-dx_web-features-mappings docs/schema +workers/email/pkg/digest/testdata/digest.golden.html workflows/steps/services/bcd_consumer/pkg/data/testdata/data.json workflows/steps/services/web_feature_consumer/pkg/data/testdata/v3.data.json workflows/steps/services/developer_signals_consumer/pkg/data/testdata/web-features-signals.json diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index ecc7fa7d9..178a7737b 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -93,7 +93,7 @@ The above skaffold command deploys multiple resources: | valkey | Valkey | N/A | valkey:6379 | | auth | Auth Emulator | http://localhost:9099
http://localhost:9100/auth (ui) | http://auth:9099
http://auth:9100/auth (ui) | | wiremock | Wiremock | http://localhost:8087 | http://api-github-mock.default.svc.cluster.local:8080 (GitHub Mock) | -| pubsub | Pub/Sub Emulator | N/A | http://pubsub:8086 | +| pubsub | Pub/Sub Emulator | http://localhost:8060 | http://pubsub:8060 | | gcs | GCS Emulator | N/A | http://gcs:4443 | _In the event the servers are not responsive, make a temporary change to a file_ diff --git a/GEMINI.md b/GEMINI.md index bd4054d2f..945f9702c 100644 --- a/GEMINI.md +++ b/GEMINI.md @@ -69,6 +69,8 @@ The frontend Single Page Application (SPA). - **DON'T** introduce other UI frameworks like React or Vue. - **DO** use the Lit Context and service container pattern to access shared services. - **DON'T** create new global state management solutions. + - **DON'T** render the full page layout (header, sidebar, main container) inside a page component. DO focus on rendering only the specific content for that route. The main `webstatus-app` component provides the overall page shell. + - **DON'T** add generic class names (e.g., `.card`, `.container`) to `shared-css.ts`. DO leverage Shadow DOM encapsulation by defining component-specific styles within the component's `static styles` block. Use composition with slots (e.g., a `` component) for reusable layout patterns instead of shared global CSS classes. - **DO** add Playwright tests for new user-facing features and unit tests for component logic. #### Workflows / Data Ingestion Jobs (`workflows/`) @@ -165,17 +167,17 @@ This section describes the components related to user accounts, authentication, - **Authentication**: User authentication is handled via Firebase Authentication on the frontend, which integrates with GitHub as an OAuth provider. When a user signs in, the frontend receives a Firebase token and a GitHub token. The GitHub token is sent to the backend during a login sync process. - **User Profile Sync**: On login, the backend synchronizes the user's verified GitHub emails with their notification channels in the database. This is an example of a transactional update using the mapper pattern. The flow is as follows: - 1. A user signs in on the frontend. The frontend sends the user's GitHub token to the backend's `/v1/users/me/ping` endpoint. - 2. The `httpserver.PingUser` handler receives the request. It uses a `UserGitHubClient` to fetch the user's profile and verified emails from the GitHub API. - 3. The handler then calls `spanneradapters.Backend.SyncUserProfileInfo` with the user's profile information. - 4. The `Backend` adapter translates the `backendtypes.UserProfile` to a `gcpspanner.UserProfile` and calls `gcpspanner.Client.SyncUserProfileInfo`. - 5. `SyncUserProfileInfo` starts a `ReadWriteTransaction`. - 6. Inside the transaction, it fetches the user's existing `NotificationChannels` and `NotificationChannelStates`. - 7. It then compares the existing channels with the verified emails from GitHub. - - New emails result in new `NotificationChannel` and `NotificationChannelState` records, created using `createWithTransaction`. - - Emails that were previously disabled are re-enabled using `updateWithTransaction`. - - Channels for emails that are no longer verified are disabled, also using `updateWithTransaction`. - 8. The entire set of operations is committed atomically. If any step fails, the entire transaction is rolled back. + 1. A user signs in on the frontend. The frontend sends the user's GitHub token to the backend's `/v1/users/me/ping` endpoint. + 2. The `httpserver.PingUser` handler receives the request. It uses a `UserGitHubClient` to fetch the user's profile and verified emails from the GitHub API. + 3. The handler then calls `spanneradapters.Backend.SyncUserProfileInfo` with the user's profile information. + 4. The `Backend` adapter translates the `backendtypes.UserProfile` to a `gcpspanner.UserProfile` and calls `gcpspanner.Client.SyncUserProfileInfo`. + 5. `SyncUserProfileInfo` starts a `ReadWriteTransaction`. + 6. Inside the transaction, it fetches the user's existing `NotificationChannels` and `NotificationChannelStates`. + 7. It then compares the existing channels with the verified emails from GitHub. + - New emails result in new `NotificationChannel` and `NotificationChannelState` records, created using `createWithTransaction`. + - Emails that were previously disabled are re-enabled using `updateWithTransaction`. + - Channels for emails that are no longer verified are disabled, also using `updateWithTransaction`. + 8. The entire set of operations is committed atomically. If any step fails, the entire transaction is rolled back. - **Notification Channels**: The `NotificationChannels` table stores the destinations for notifications (e.g., email addresses). Each channel has a corresponding entry in the `NotificationChannelStates` table, which tracks whether the channel is enabled or disabled. - **Saved Search Subscriptions**: Authenticated users can save feature search queries and subscribe to receive notifications when the results of that query change. This is managed through the `UserSavedSearches` and `UserSavedSearchBookmarks` tables. @@ -232,6 +234,7 @@ This practice decouples the core application logic from the exact structure of t - **DO** add E2E tests for critical user journeys. - **DON'T** write E2E tests for small component-level interactions. - **DO** use resilient selectors like `data-testid`. + - **DO** move the mouse to a neutral position (e.g., `page.mouse.move(0, 0)`) before taking a screenshot to avoid flaky tests caused by unintended hover effects. - **Go Unit & Integration Tests**: - **DO** use table-driven unit tests with mocks for dependencies at the adapter layer (`spanneradapters`). - **DO** write **integration tests using `testcontainers-go`** for any changes to the `lib/gcpspanner` layer. This is especially critical when implementing or modifying a mapper. These tests must spin up a Spanner emulator and verify the mapper's logic against a real database. @@ -239,6 +242,7 @@ This practice decouples the core application logic from the exact structure of t - **TypeScript Unit Tests**: - **TypeScript**: Use `npm run test -w frontend`. - **ES Module Testing**: When testing components that use ES module exports directly (e.g., `signInWithPopup` from `firebase/auth`), direct stubbing with Sinon (e.g., `sinon.stub(firebaseAuth, 'signInWithPopup')`) is problematic due to module immutability. Instead, introduce a helper property (e.g., `credentialGetter`) in the component that defaults to the original ES module function but can be overridden with a Sinon stub in tests. This allows for effective mocking of ES module interactions. + - **DO** prefer using generic arguments for `querySelector` in tests (e.g., `querySelector('slot[name="content"]')`) to improve type safety and avoid unnecessary casting. ### 5.3. CI/CD (`.github/`) diff --git a/Makefile b/Makefile index e7aa27c9c..aa25edd17 100644 --- a/Makefile +++ b/Makefile @@ -89,6 +89,8 @@ check-local-ports: $(call wait_for_port,9010,spanner) $(call wait_for_port,8086,datastore) $(call wait_for_port,8087,wiremock) + $(call wait_for_port,8060,pubsub) + $(call wait_for_port,8025,mailpit) port-forward-manual: port-forward-terminate @@ -98,6 +100,8 @@ port-forward-manual: port-forward-terminate kubectl wait --for=condition=ready pod/datastore kubectl wait --for=condition=ready pod/spanner kubectl wait --for=condition=ready pod/wiremock + kubectl wait --for=condition=ready pod/pubsub + kubectl wait --for=condition=ready pod/mailpit kubectl port-forward --address 127.0.0.1 pod/frontend 5555:5555 2>&1 >/dev/null & kubectl port-forward --address 127.0.0.1 pod/backend 8080:8080 2>&1 >/dev/null & kubectl port-forward --address 127.0.0.1 pod/auth 9099:9099 2>&1 >/dev/null & @@ -105,6 +109,8 @@ port-forward-manual: port-forward-terminate kubectl port-forward --address 127.0.0.1 pod/spanner 9010:9010 2>&1 >/dev/null & kubectl port-forward --address 127.0.0.1 pod/datastore 8086:8086 2>&1 >/dev/null & kubectl port-forward --address 127.0.0.1 pod/wiremock 8087:8080 2>&1 >/dev/null & + kubectl port-forward --address 127.0.0.1 pod/pubsub 8060:8060 2>&1 >/dev/null & + kubectl port-forward --address 127.0.0.1 pod/mailpit 8025:8025 2>&1 >/dev/null & make check-local-ports port-forward-terminate: @@ -115,6 +121,8 @@ port-forward-terminate: fuser -k 9010/tcp || true fuser -k 8086/tcp || true fuser -k 8087/tcp || true + fuser -k 8060/tcp || true + fuser -k 8025/tcp || true # Prerequisite target to start minikube if necessary minikube-running: @@ -337,7 +345,8 @@ ADDLICENSE_ARGS := -c "${COPYRIGHT_NAME}" \ -ignore 'node_modules/**' \ -ignore 'infra/storage/spanner/schema.sql' \ -ignore 'antlr/.antlr/**' \ - -ignore '.devcontainer/cache/**' + -ignore '.devcontainer/cache/**' \ + -ignore 'workers/email/pkg/digest/testdata/digest.golden.html' license-check: go-install-tools go tool addlicense -check $(ADDLICENSE_ARGS) . diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go index efe66f401..e8a670f5e 100644 --- a/backend/cmd/server/main.go +++ b/backend/cmd/server/main.go @@ -28,6 +28,8 @@ import ( "github.com/GoogleChrome/webstatus.dev/backend/pkg/httpserver" "github.com/GoogleChrome/webstatus.dev/lib/auth" "github.com/GoogleChrome/webstatus.dev/lib/cachetypes" + "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub" + "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub/gcppubsubadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/spanneradapters" "github.com/GoogleChrome/webstatus.dev/lib/gds" @@ -197,11 +199,30 @@ func main() { } + pubsubProjectID := os.Getenv("PUBSUB_PROJECT_ID") + if pubsubProjectID == "" { + slog.ErrorContext(ctx, "missing pubsub project id") + os.Exit(1) + } + + ingestionTopicID := os.Getenv("INGESTION_TOPIC_ID") + if ingestionTopicID == "" { + slog.ErrorContext(ctx, "missing ingestion topic id") + os.Exit(1) + } + + queueClient, err := gcppubsub.NewClient(ctx, pubsubProjectID) + if err != nil { + slog.ErrorContext(ctx, "unable to create pub sub client", "error", err) + os.Exit(1) + } + srv := httpserver.NewHTTPServer( "8080", baseURL, datastoreadapters.NewBackend(fs), spanneradapters.NewBackend(spannerClient), + gcppubsubadapters.NewBackendAdapter(queueClient, ingestionTopicID), cache, routeCacheOptions, func(token string) *httpserver.UserGitHubClient { diff --git a/backend/go.mod b/backend/go.mod index bf8b724db..5b01b940d 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -23,6 +23,7 @@ require ( cloud.google.com/go/logging v1.13.1 // indirect cloud.google.com/go/longrunning v0.7.0 // indirect cloud.google.com/go/monitoring v1.24.3 // indirect + cloud.google.com/go/pubsub/v2 v2.0.0 // indirect cloud.google.com/go/secretmanager v1.16.0 // indirect cloud.google.com/go/spanner v1.86.1 // indirect cloud.google.com/go/storage v1.57.2 // indirect diff --git a/backend/go.sum b/backend/go.sum index 26b4d8057..2bdce011c 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -445,6 +445,8 @@ cloud.google.com/go/pubsub v1.26.0/go.mod h1:QgBH3U/jdJy/ftjPhTkyXNj543Tin1pRYcd cloud.google.com/go/pubsub v1.27.1/go.mod h1:hQN39ymbV9geqBnfQq6Xf63yNhUAhv9CZhzp5O6qsW0= cloud.google.com/go/pubsub v1.28.0/go.mod h1:vuXFpwaVoIPQMGXqRyUQigu/AX1S3IWugR9xznmcXX8= cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= +cloud.google.com/go/pubsub/v2 v2.0.0 h1:0qS6mRJ41gD1lNmM/vdm6bR7DQu6coQcVwD+VPf0Bz0= +cloud.google.com/go/pubsub/v2 v2.0.0/go.mod h1:0aztFxNzVQIRSZ8vUr79uH2bS3jwLebwK6q1sgEub+E= cloud.google.com/go/pubsublite v1.5.0/go.mod h1:xapqNQ1CuLfGi23Yda/9l4bBCKz/wC3KIJ5gKcxveZg= cloud.google.com/go/pubsublite v1.6.0/go.mod h1:1eFCS0U11xlOuMFV/0iBqw3zP12kddMeCbj/F3FSj9k= cloud.google.com/go/pubsublite v1.7.0/go.mod h1:8hVMwRXfDfvGm3fahVbtDbiLePT3gpoiJYJY+vxWxVM= @@ -1105,6 +1107,8 @@ github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= +go.einride.tech/aip v0.68.1 h1:16/AfSxcQISGN5z9C5lM+0mLYXihrHbQ1onvYTr93aQ= +go.einride.tech/aip v0.68.1/go.mod h1:XaFtaj4HuA3Zwk9xoBtTWgNubZ0ZZXv9BZJCkuKuWbg= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= diff --git a/backend/manifests/pod.yaml b/backend/manifests/pod.yaml index 9a003c83a..59c292cf6 100644 --- a/backend/manifests/pod.yaml +++ b/backend/manifests/pod.yaml @@ -59,6 +59,12 @@ spec: value: auth:9099 - name: GITHUB_API_BASE_URL value: http://api-github-mock.default.svc.cluster.local:8080/ + - name: PUBSUB_PROJECT_ID + value: local + - name: PUBSUB_EMULATOR_HOST + value: pubsub:8060 + - name: INGESTION_TOPIC_ID + value: 'ingestion-jobs-topic-id' resources: limits: cpu: 250m diff --git a/backend/pkg/httpserver/create_saved_search.go b/backend/pkg/httpserver/create_saved_search.go index 5896b99f8..3c6507c62 100644 --- a/backend/pkg/httpserver/create_saved_search.go +++ b/backend/pkg/httpserver/create_saved_search.go @@ -176,5 +176,11 @@ func (s *Server) CreateSavedSearch(ctx context.Context, request backend.CreateSa }, nil } + err = s.eventPublisher.PublishSearchConfigurationChanged(ctx, output, user.ID, true) + if err != nil { + // We should not mark this as a failure. Only log it. + slog.WarnContext(ctx, "unable to publish search configuration changed event during create", "error", err) + } + return backend.CreateSavedSearch201JSONResponse(*output), nil } diff --git a/backend/pkg/httpserver/create_saved_search_test.go b/backend/pkg/httpserver/create_saved_search_test.go index cc7840a63..519304f74 100644 --- a/backend/pkg/httpserver/create_saved_search_test.go +++ b/backend/pkg/httpserver/create_saved_search_test.go @@ -44,6 +44,7 @@ func TestCreateSavedSearch(t *testing.T) { testCases := []struct { name string mockCreateUserSavedSearchConfig *MockCreateUserSavedSearchConfig + mockPublishConfig *MockPublishSearchConfigurationChangedConfig authMiddlewareOption testServerOption request *http.Request expectedResponse *http.Response @@ -51,6 +52,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "name is 33 characters long, missing query", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, request: httptest.NewRequest( http.MethodPost, "/v1/saved-searches", @@ -70,6 +72,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "name is empty", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -88,6 +91,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "name is missing", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -106,6 +110,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "query is empty", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -124,6 +129,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "query is 257 characters long", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -142,6 +148,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "description is empty", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -160,6 +167,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "description is 1025 characters long", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -179,6 +187,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "query has bad syntax", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -197,6 +206,7 @@ func TestCreateSavedSearch(t *testing.T) { { name: "missing body creation error", mockCreateUserSavedSearchConfig: nil, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -226,6 +236,7 @@ func TestCreateSavedSearch(t *testing.T) { output: nil, err: errTest, }, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -251,6 +262,7 @@ func TestCreateSavedSearch(t *testing.T) { output: nil, err: errors.Join(backendtypes.ErrUserMaxSavedSearches, errTest), }, + mockPublishConfig: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -289,6 +301,25 @@ func TestCreateSavedSearch(t *testing.T) { }, err: nil, }, + mockPublishConfig: &MockPublishSearchConfigurationChangedConfig{ + expectedResp: &backend.SavedSearchResponse{ + Id: "searchID1", + Name: "test name", + Query: `name:"test"`, + Description: nil, + Permissions: &backend.UserSavedSearchPermissions{ + Role: valuePtr(backend.SavedSearchOwner), + }, + BookmarkStatus: &backend.UserSavedSearchBookmark{ + Status: backend.BookmarkActive, + }, + CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + }, + expectedUserID: "testID1", + expectedIsCreation: true, + err: nil, + }, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -308,7 +339,7 @@ func TestCreateSavedSearch(t *testing.T) { ), }, { - name: "successful with name, query and description", + name: "successful with name, query and description, failed publish", mockCreateUserSavedSearchConfig: &MockCreateUserSavedSearchConfig{ expectedSavedSearch: backend.SavedSearch{ Name: "test name", @@ -332,6 +363,25 @@ func TestCreateSavedSearch(t *testing.T) { }, err: nil, }, + mockPublishConfig: &MockPublishSearchConfigurationChangedConfig{ + expectedResp: &backend.SavedSearchResponse{ + Id: "searchID1", + Name: "test name", + Query: `name:"test"`, + Description: valuePtr("test description"), + Permissions: &backend.UserSavedSearchPermissions{ + Role: valuePtr(backend.SavedSearchOwner), + }, + BookmarkStatus: &backend.UserSavedSearchBookmark{ + Status: backend.BookmarkActive, + }, + CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + }, + expectedUserID: "testID1", + expectedIsCreation: true, + err: errTest, + }, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPost, @@ -365,8 +415,13 @@ func TestCreateSavedSearch(t *testing.T) { createUserSavedSearchCfg: tc.mockCreateUserSavedSearchConfig, t: t, } + mockPublisher := &MockEventPublisher{ + t: t, + callCountPublishSearchConfigurationChanged: 0, + publishSearchConfigurationChangedCfg: tc.mockPublishConfig, + } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, - operationResponseCaches: nil, baseURL: getTestBaseURL(t)} + operationResponseCaches: nil, baseURL: getTestBaseURL(t), eventPublisher: mockPublisher} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) }) diff --git a/backend/pkg/httpserver/create_subscription.go b/backend/pkg/httpserver/create_subscription.go index 502d229ec..4a8633896 100644 --- a/backend/pkg/httpserver/create_subscription.go +++ b/backend/pkg/httpserver/create_subscription.go @@ -29,9 +29,10 @@ import ( // The exhaustive linter is configured to check that this map is complete. func getAllSubscriptionTriggersSet() map[backend.SubscriptionTriggerWritable]any { return map[backend.SubscriptionTriggerWritable]any{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete: nil, - backend.SubscriptionTriggerFeatureBaselineLimitedToNewly: nil, - backend.SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited: nil, + backend.SubscriptionTriggerFeatureBaselineToNewly: nil, + backend.SubscriptionTriggerFeatureBaselineToWidely: nil, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete: nil, + backend.SubscriptionTriggerFeatureBaselineRegressionToLimited: nil, } } @@ -76,7 +77,9 @@ func validateSubscriptionTrigger(trigger *[]backend.SubscriptionTriggerWritable, // The exhaustive linter is configured to check that this map is complete. func getAllSubscriptionFrequenciesSet() map[backend.SubscriptionFrequency]any { return map[backend.SubscriptionFrequency]any{ - backend.SubscriptionFrequencyDaily: nil, + backend.SubscriptionFrequencyImmediate: nil, + backend.SubscriptionFrequencyWeekly: nil, + backend.SubscriptionFrequencyMonthly: nil, } } diff --git a/backend/pkg/httpserver/create_subscription_test.go b/backend/pkg/httpserver/create_subscription_test.go index 083bd3e8c..f7af1476e 100644 --- a/backend/pkg/httpserver/create_subscription_test.go +++ b/backend/pkg/httpserver/create_subscription_test.go @@ -51,8 +51,8 @@ func TestCreateSubscription(t *testing.T) { ChannelId: "channel-id", SavedSearchId: "search-id", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: "daily", + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: "immediate", }, output: &backend.SubscriptionResponse{ Id: "sub-id", @@ -61,11 +61,11 @@ func TestCreateSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, - Frequency: "daily", + Frequency: "immediate", CreatedAt: now, UpdatedAt: now, }, @@ -79,16 +79,16 @@ func TestCreateSubscription(t *testing.T) { strings.NewReader(`{ "channel_id": "channel-id", "saved_search_id": "search-id", - "triggers": ["feature_any_browser_implementation_complete"], - "frequency": "daily" + "triggers": ["feature_browser_implementation_any_complete"], + "frequency": "immediate" }`), ), expectedResponse: testJSONResponse(http.StatusCreated, `{ "id":"sub-id", "channel_id":"channel-id", "saved_search_id":"search-id", - "triggers": [{"value":"feature_any_browser_implementation_complete"}], - "frequency":"daily", + "triggers": [{"value":"feature_browser_implementation_any_complete"}], + "frequency":"immediate", "created_at":"`+now.Format(time.RFC3339Nano)+`", "updated_at":"`+now.Format(time.RFC3339Nano)+`" }`), @@ -103,8 +103,8 @@ func TestCreateSubscription(t *testing.T) { "/v1/users/me/subscriptions", strings.NewReader(`{ "saved_search_id": "search-id", - "triggers": ["feature_any_browser_implementation_complete"], - "frequency": "daily" + "triggers": ["feature_browser_implementation_any_complete"], + "frequency": "immediate" }`), ), expectedResponse: testJSONResponse(http.StatusBadRequest, ` @@ -124,8 +124,8 @@ func TestCreateSubscription(t *testing.T) { ChannelId: "channel-id", SavedSearchId: "search-id", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: "daily", + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: "immediate", }, output: nil, err: backendtypes.ErrUserNotAuthorizedForAction, @@ -138,8 +138,8 @@ func TestCreateSubscription(t *testing.T) { strings.NewReader(`{ "channel_id": "channel-id", "saved_search_id": "search-id", - "triggers": ["feature_any_browser_implementation_complete"], - "frequency": "daily" + "triggers": ["feature_browser_implementation_any_complete"], + "frequency": "immediate" }`)), expectedResponse: testJSONResponse(http.StatusForbidden, `{ "code":403, @@ -154,8 +154,8 @@ func TestCreateSubscription(t *testing.T) { ChannelId: "channel-id", SavedSearchId: "search-id", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: "daily", + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: "immediate", }, output: nil, err: fmt.Errorf("database error"), @@ -168,8 +168,8 @@ func TestCreateSubscription(t *testing.T) { strings.NewReader(`{ "channel_id": "channel-id", "saved_search_id": "search-id", - "triggers": ["feature_any_browser_implementation_complete"], - "frequency": "daily" + "triggers": ["feature_browser_implementation_any_complete"], + "frequency": "immediate" }`)), expectedResponse: testJSONResponse(http.StatusInternalServerError, `{ "code":500, @@ -190,6 +190,7 @@ func TestCreateSubscription(t *testing.T) { metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: nil, + eventPublisher: nil, baseURL: getTestBaseURL(t), } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, tc.authMiddlewareOption) @@ -214,8 +215,8 @@ func TestValidateSubscriptionCreation(t *testing.T) { ChannelId: "channel-id", SavedSearchId: "search-id", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, want: nil, }, @@ -225,8 +226,8 @@ func TestValidateSubscriptionCreation(t *testing.T) { ChannelId: "", SavedSearchId: "searchid", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, want: &fieldValidationErrors{ fieldErrorMap: map[string]string{ @@ -240,8 +241,8 @@ func TestValidateSubscriptionCreation(t *testing.T) { ChannelId: "channelid", SavedSearchId: "", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, want: &fieldValidationErrors{ fieldErrorMap: map[string]string{ @@ -256,7 +257,7 @@ func TestValidateSubscriptionCreation(t *testing.T) { SavedSearchId: "searchid", Triggers: []backend.SubscriptionTriggerWritable{ "invalid_trigger"}, - Frequency: backend.SubscriptionFrequencyDaily, + Frequency: backend.SubscriptionFrequencyImmediate, }, want: &fieldValidationErrors{ fieldErrorMap: map[string]string{ @@ -270,7 +271,7 @@ func TestValidateSubscriptionCreation(t *testing.T) { ChannelId: "channelid", SavedSearchId: "searchid", Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, Frequency: "invalid_frequency", }, want: &fieldValidationErrors{ diff --git a/backend/pkg/httpserver/delete_notification_channel_test.go b/backend/pkg/httpserver/delete_notification_channel_test.go index 8133dac13..e69f2b4a0 100644 --- a/backend/pkg/httpserver/delete_notification_channel_test.go +++ b/backend/pkg/httpserver/delete_notification_channel_test.go @@ -95,6 +95,7 @@ func TestDeleteNotificationChannel(t *testing.T) { metadataStorer: nil, operationResponseCaches: nil, userGitHubClientFactory: nil, + eventPublisher: nil, } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) diff --git a/backend/pkg/httpserver/delete_subscription_test.go b/backend/pkg/httpserver/delete_subscription_test.go index 2e3028a76..1f959ef96 100644 --- a/backend/pkg/httpserver/delete_subscription_test.go +++ b/backend/pkg/httpserver/delete_subscription_test.go @@ -117,6 +117,7 @@ func TestDeleteSubscription(t *testing.T) { metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: nil, + eventPublisher: nil, baseURL: getTestBaseURL(t), } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, tc.authMiddlewareOption) diff --git a/backend/pkg/httpserver/get_feature.go b/backend/pkg/httpserver/get_feature.go index 339f8c5f8..0d6adeffb 100644 --- a/backend/pkg/httpserver/get_feature.go +++ b/backend/pkg/httpserver/get_feature.go @@ -82,7 +82,7 @@ func (s *Server) GetFeature( } result, err := s.wptMetricsStorer.GetFeature(ctx, request.FeatureId, getWPTMetricViewOrDefault(request.Params.WptMetricView), - defaultBrowsers(), + backendtypes.DefaultBrowsers(), ) if err != nil { if errors.Is(err, gcpspanner.ErrQueryReturnedNoResults) { diff --git a/backend/pkg/httpserver/get_feature_metadata_test.go b/backend/pkg/httpserver/get_feature_metadata_test.go index fc46bb853..c4c0c1e88 100644 --- a/backend/pkg/httpserver/get_feature_metadata_test.go +++ b/backend/pkg/httpserver/get_feature_metadata_test.go @@ -114,7 +114,9 @@ func TestGetFeatureMetadata(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: mockMetadataStorer, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), - baseURL: getTestBaseURL(t)} + baseURL: getTestBaseURL(t), + eventPublisher: nil, + } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) mockCacher.AssertExpectations() // TODO: Start tracking call count and assert call count. diff --git a/backend/pkg/httpserver/get_feature_test.go b/backend/pkg/httpserver/get_feature_test.go index 550c66c48..dad407e50 100644 --- a/backend/pkg/httpserver/get_feature_test.go +++ b/backend/pkg/httpserver/get_feature_test.go @@ -413,6 +413,7 @@ func TestGetFeature(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountGetFeature, diff --git a/backend/pkg/httpserver/get_features.go b/backend/pkg/httpserver/get_features.go index 4e35d06b2..c0fe766d0 100644 --- a/backend/pkg/httpserver/get_features.go +++ b/backend/pkg/httpserver/get_features.go @@ -69,7 +69,7 @@ func (s *Server) ListFeatures( node, req.Params.Sort, getWPTMetricViewOrDefault(req.Params.WptMetricView), - defaultBrowsers(), + backendtypes.DefaultBrowsers(), ) if err != nil { diff --git a/backend/pkg/httpserver/get_features_test.go b/backend/pkg/httpserver/get_features_test.go index dec6bad80..4c115e004 100644 --- a/backend/pkg/httpserver/get_features_test.go +++ b/backend/pkg/httpserver/get_features_test.go @@ -537,6 +537,7 @@ func TestListFeatures(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountFeaturesSearch, diff --git a/backend/pkg/httpserver/get_notification_channel_test.go b/backend/pkg/httpserver/get_notification_channel_test.go index 4768dcb3b..fc544d1da 100644 --- a/backend/pkg/httpserver/get_notification_channel_test.go +++ b/backend/pkg/httpserver/get_notification_channel_test.go @@ -118,6 +118,7 @@ func TestGetNotificationChannel(t *testing.T) { metadataStorer: nil, operationResponseCaches: nil, userGitHubClientFactory: nil, + eventPublisher: nil, } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) diff --git a/backend/pkg/httpserver/get_saved_search_test.go b/backend/pkg/httpserver/get_saved_search_test.go index 8c36d86c6..0f2811e3a 100644 --- a/backend/pkg/httpserver/get_saved_search_test.go +++ b/backend/pkg/httpserver/get_saved_search_test.go @@ -73,6 +73,7 @@ func TestGetSavedSearch(t *testing.T) { t: t, } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, + eventPublisher: nil, operationResponseCaches: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) diff --git a/backend/pkg/httpserver/get_subscription_test.go b/backend/pkg/httpserver/get_subscription_test.go index 68c44b182..6c4b3aa8e 100644 --- a/backend/pkg/httpserver/get_subscription_test.go +++ b/backend/pkg/httpserver/get_subscription_test.go @@ -144,6 +144,7 @@ func TestGetSubscription(t *testing.T) { metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: nil, + eventPublisher: nil, baseURL: getTestBaseURL(t), } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, tc.authMiddlewareOption) diff --git a/backend/pkg/httpserver/list_aggregated_baseline_status_counts_test.go b/backend/pkg/httpserver/list_aggregated_baseline_status_counts_test.go index 6eb0b8ee0..bf5c8827c 100644 --- a/backend/pkg/httpserver/list_aggregated_baseline_status_counts_test.go +++ b/backend/pkg/httpserver/list_aggregated_baseline_status_counts_test.go @@ -298,6 +298,7 @@ func TestListAggregatedBaselineStatusCounts(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListBaselineStatusCounts, diff --git a/backend/pkg/httpserver/list_aggregated_feature_support_test.go b/backend/pkg/httpserver/list_aggregated_feature_support_test.go index 76a1ea62c..033eafee0 100644 --- a/backend/pkg/httpserver/list_aggregated_feature_support_test.go +++ b/backend/pkg/httpserver/list_aggregated_feature_support_test.go @@ -315,6 +315,7 @@ func TestListAggregatedFeatureSupport(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListBrowserFeatureCountMetric, diff --git a/backend/pkg/httpserver/list_aggregated_wpt_metrics_test.go b/backend/pkg/httpserver/list_aggregated_wpt_metrics_test.go index 8ed40a45b..60651627e 100644 --- a/backend/pkg/httpserver/list_aggregated_wpt_metrics_test.go +++ b/backend/pkg/httpserver/list_aggregated_wpt_metrics_test.go @@ -301,6 +301,7 @@ func TestListAggregatedWPTMetrics(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListMetricsOverTimeWithAggregatedTotals, diff --git a/backend/pkg/httpserver/list_chromium_usage_test.go b/backend/pkg/httpserver/list_chromium_usage_test.go index f86a482fd..33a2c9a3c 100644 --- a/backend/pkg/httpserver/list_chromium_usage_test.go +++ b/backend/pkg/httpserver/list_chromium_usage_test.go @@ -155,6 +155,7 @@ func TestListChromeDailyUsageStats(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListChromeDailyUsageStats, diff --git a/backend/pkg/httpserver/list_feature_wpt_metrics_test.go b/backend/pkg/httpserver/list_feature_wpt_metrics_test.go index f85090bc7..d4efef3f4 100644 --- a/backend/pkg/httpserver/list_feature_wpt_metrics_test.go +++ b/backend/pkg/httpserver/list_feature_wpt_metrics_test.go @@ -297,6 +297,7 @@ func TestListFeatureWPTMetrics(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListMetricsForFeatureIDBrowserAndChannel, diff --git a/backend/pkg/httpserver/list_missing_one_implementation_counts_test.go b/backend/pkg/httpserver/list_missing_one_implementation_counts_test.go index 5af48d8e0..baa1a88de 100644 --- a/backend/pkg/httpserver/list_missing_one_implementation_counts_test.go +++ b/backend/pkg/httpserver/list_missing_one_implementation_counts_test.go @@ -346,6 +346,7 @@ func TestListMissingOneImplementationCounts(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, tc.expectedCacheCalls, tc.expectedGetCalls) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListMissingOneImplCounts, diff --git a/backend/pkg/httpserver/list_missing_one_implementation_features_test.go b/backend/pkg/httpserver/list_missing_one_implementation_features_test.go index f7cc04b2a..da4cbb234 100644 --- a/backend/pkg/httpserver/list_missing_one_implementation_features_test.go +++ b/backend/pkg/httpserver/list_missing_one_implementation_features_test.go @@ -199,6 +199,7 @@ func TestListMissingOneImplementationFeatures(t *testing.T) { mockCacher := NewMockRawBytesDataCacher(t, nil, nil) myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: initOperationResponseCaches(mockCacher, getTestRouteCacheOptions()), + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListMissingOneImplFeatures, diff --git a/backend/pkg/httpserver/list_notification_channels_test.go b/backend/pkg/httpserver/list_notification_channels_test.go index 1b6336f37..a875370f3 100644 --- a/backend/pkg/httpserver/list_notification_channels_test.go +++ b/backend/pkg/httpserver/list_notification_channels_test.go @@ -138,6 +138,7 @@ func TestListNotificationChannels(t *testing.T) { metadataStorer: nil, operationResponseCaches: nil, userGitHubClientFactory: nil, + eventPublisher: nil, } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) diff --git a/backend/pkg/httpserver/list_subscriptions_test.go b/backend/pkg/httpserver/list_subscriptions_test.go index c534e9fc1..02116721b 100644 --- a/backend/pkg/httpserver/list_subscriptions_test.go +++ b/backend/pkg/httpserver/list_subscriptions_test.go @@ -180,6 +180,7 @@ func TestListSubscriptions(t *testing.T) { metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: nil, + eventPublisher: nil, baseURL: getTestBaseURL(t), } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, tc.authMiddlewareOption) diff --git a/backend/pkg/httpserver/list_user_saved_searches_test.go b/backend/pkg/httpserver/list_user_saved_searches_test.go index de374ddd2..fa102007b 100644 --- a/backend/pkg/httpserver/list_user_saved_searches_test.go +++ b/backend/pkg/httpserver/list_user_saved_searches_test.go @@ -180,7 +180,8 @@ func TestListUserSavedSearches(t *testing.T) { t: t, } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, - operationResponseCaches: nil, baseURL: getTestBaseURL(t)} + operationResponseCaches: nil, baseURL: getTestBaseURL(t), + eventPublisher: nil} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) assertMocksExpectations(t, tc.expectedCallCount, mockStorer.callCountListUserSavedSearches, diff --git a/backend/pkg/httpserver/ping_user_test.go b/backend/pkg/httpserver/ping_user_test.go index aa2a6e6d4..ea160252a 100644 --- a/backend/pkg/httpserver/ping_user_test.go +++ b/backend/pkg/httpserver/ping_user_test.go @@ -267,6 +267,7 @@ func TestPingUser(t *testing.T) { ), operationResponseCaches: nil, baseURL: getTestBaseURL(t), + eventPublisher: nil, } req := httptest.NewRequest(http.MethodPost, "/v1/users/me/ping", tc.body) diff --git a/backend/pkg/httpserver/put_user_saved_search_bookmark_test.go b/backend/pkg/httpserver/put_user_saved_search_bookmark_test.go index f336e115a..d2f9ff829 100644 --- a/backend/pkg/httpserver/put_user_saved_search_bookmark_test.go +++ b/backend/pkg/httpserver/put_user_saved_search_bookmark_test.go @@ -107,6 +107,7 @@ func TestPutUserSavedSearchBookmark(t *testing.T) { } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, operationResponseCaches: nil, + eventPublisher: nil, baseURL: getTestBaseURL(t)} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{authMiddlewareOption}...) diff --git a/backend/pkg/httpserver/remove_saved_search_test.go b/backend/pkg/httpserver/remove_saved_search_test.go index 919504950..98465a43b 100644 --- a/backend/pkg/httpserver/remove_saved_search_test.go +++ b/backend/pkg/httpserver/remove_saved_search_test.go @@ -106,7 +106,7 @@ func TestRemoveSavedSearch(t *testing.T) { t: t, } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, - operationResponseCaches: nil, baseURL: getTestBaseURL(t)} + operationResponseCaches: nil, baseURL: getTestBaseURL(t), eventPublisher: nil} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{authMiddlewareOption}...) assertMocksExpectations(t, 1, mockStorer.callCountDeleteUserSavedSearch, diff --git a/backend/pkg/httpserver/remove_user_saved_search_bookmark_test.go b/backend/pkg/httpserver/remove_user_saved_search_bookmark_test.go index ca3c9a0e2..7c09d0b7a 100644 --- a/backend/pkg/httpserver/remove_user_saved_search_bookmark_test.go +++ b/backend/pkg/httpserver/remove_user_saved_search_bookmark_test.go @@ -106,7 +106,7 @@ func TestRemoveUserSavedSearchBookmark(t *testing.T) { t: t, } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, - operationResponseCaches: nil, baseURL: getTestBaseURL(t)} + operationResponseCaches: nil, baseURL: getTestBaseURL(t), eventPublisher: nil} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{authMiddlewareOption}...) assertMocksExpectations(t, 1, mockStorer.callCountRemoveUserSavedSearchBookmark, diff --git a/backend/pkg/httpserver/server.go b/backend/pkg/httpserver/server.go index 1bc48deaf..d87685d53 100644 --- a/backend/pkg/httpserver/server.go +++ b/backend/pkg/httpserver/server.go @@ -170,6 +170,7 @@ type Server struct { operationResponseCaches *operationResponseCaches baseURL *url.URL userGitHubClientFactory UserGitHubClientFactory + eventPublisher EventPublisher } type GitHubUserClient interface { @@ -183,18 +184,6 @@ type UserGitHubClient struct { type UserGitHubClientFactory func(token string) *UserGitHubClient -func defaultBrowsers() []backend.BrowserPathParam { - return []backend.BrowserPathParam{ - backend.Chrome, - backend.Edge, - backend.Firefox, - backend.Safari, - backend.ChromeAndroid, - backend.FirefoxAndroid, - backend.SafariIos, - } -} - func getPageSizeOrDefault(pageSize *int) int { // maxPageSize comes from the /openapi/backend/openapi.yaml maxPageSize := 100 @@ -233,11 +222,17 @@ type RouteCacheOptions struct { AggregatedFeatureStatsOptions []cachetypes.CacheOption } +type EventPublisher interface { + PublishSearchConfigurationChanged(ctx context.Context, resp *backend.SavedSearchResponse, + userID string, isCreation bool) error +} + func NewHTTPServer( port string, baseURL *url.URL, metadataStorer WebFeatureMetadataStorer, wptMetricsStorer WPTMetricsStorer, + eventPublisher EventPublisher, rawBytesDataCacher RawBytesDataCacher, routeCacheOptions RouteCacheOptions, userGitHubClientFactory UserGitHubClientFactory, @@ -247,6 +242,7 @@ func NewHTTPServer( srv := &Server{ metadataStorer: metadataStorer, wptMetricsStorer: wptMetricsStorer, + eventPublisher: eventPublisher, operationResponseCaches: initOperationResponseCaches(rawBytesDataCacher, routeCacheOptions), baseURL: baseURL, userGitHubClientFactory: userGitHubClientFactory, diff --git a/backend/pkg/httpserver/server_test.go b/backend/pkg/httpserver/server_test.go index 6cc6e3c04..2db9c5926 100644 --- a/backend/pkg/httpserver/server_test.go +++ b/backend/pkg/httpserver/server_test.go @@ -855,6 +855,38 @@ func (m *MockWPTMetricsStorer) DeleteNotificationChannel( return m.deleteNotificationChannelCfg.err } +type MockPublishSearchConfigurationChangedConfig struct { + expectedResp *backend.SavedSearchResponse + expectedUserID string + expectedIsCreation bool + err error +} + +type MockEventPublisher struct { + t *testing.T + callCountPublishSearchConfigurationChanged int + publishSearchConfigurationChangedCfg *MockPublishSearchConfigurationChangedConfig +} + +func (m *MockEventPublisher) PublishSearchConfigurationChanged( + _ context.Context, + resp *backend.SavedSearchResponse, + userID string, + isCreation bool) error { + m.callCountPublishSearchConfigurationChanged++ + if !reflect.DeepEqual(resp, m.publishSearchConfigurationChangedCfg.expectedResp) { + m.t.Errorf("unexpected response %+v", resp) + } + if userID != m.publishSearchConfigurationChangedCfg.expectedUserID { + m.t.Errorf("unexpected user id %s", userID) + } + if isCreation != m.publishSearchConfigurationChangedCfg.expectedIsCreation { + m.t.Errorf("unexpected is creation %t", isCreation) + } + + return m.publishSearchConfigurationChangedCfg.err +} + func TestGetPageSizeOrDefault(t *testing.T) { testCases := []struct { name string diff --git a/backend/pkg/httpserver/update_saved_search.go b/backend/pkg/httpserver/update_saved_search.go index 06d67fff5..220ec220c 100644 --- a/backend/pkg/httpserver/update_saved_search.go +++ b/backend/pkg/httpserver/update_saved_search.go @@ -128,5 +128,11 @@ func (s *Server) UpdateSavedSearch( }, nil } + err = s.eventPublisher.PublishSearchConfigurationChanged(ctx, output, user.ID, false) + if err != nil { + // We should not mark this as a failure. Only log it. + slog.ErrorContext(ctx, "unable to publish search configuration changed event during update", "error", err) + } + return backend.UpdateSavedSearch200JSONResponse(*output), nil } diff --git a/backend/pkg/httpserver/update_saved_search_test.go b/backend/pkg/httpserver/update_saved_search_test.go index 6c32e03be..3eb4eee08 100644 --- a/backend/pkg/httpserver/update_saved_search_test.go +++ b/backend/pkg/httpserver/update_saved_search_test.go @@ -61,6 +61,7 @@ func TestUpdateSavedSearch(t *testing.T) { testCases := []struct { name string cfg *MockUpdateUserSavedSearchConfig + publishCfg *MockPublishSearchConfigurationChangedConfig authMiddlewareOption testServerOption request *http.Request expectedResponse *http.Response @@ -68,6 +69,7 @@ func TestUpdateSavedSearch(t *testing.T) { { name: "missing body update error", cfg: nil, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPatch, @@ -81,6 +83,7 @@ func TestUpdateSavedSearch(t *testing.T) { { name: "empty update mask error", cfg: nil, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPatch, @@ -94,6 +97,7 @@ func TestUpdateSavedSearch(t *testing.T) { { name: "update with invalid masks error", cfg: nil, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPatch, @@ -110,6 +114,7 @@ func TestUpdateSavedSearch(t *testing.T) { { name: "missing fields, all update masks set, update error", cfg: nil, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( http.MethodPatch, @@ -138,6 +143,7 @@ func TestUpdateSavedSearch(t *testing.T) { output: nil, err: backendtypes.ErrUserNotAuthorizedForAction, }, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -161,6 +167,7 @@ func TestUpdateSavedSearch(t *testing.T) { output: nil, err: backendtypes.ErrEntityDoesNotExist, }, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -184,6 +191,7 @@ func TestUpdateSavedSearch(t *testing.T) { output: nil, err: errTest, }, + publishCfg: nil, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -220,6 +228,25 @@ func TestUpdateSavedSearch(t *testing.T) { }, err: nil, }, + publishCfg: &MockPublishSearchConfigurationChangedConfig{ + expectedResp: &backend.SavedSearchResponse{ + Id: "saved-search-id", + Name: "test name", + Query: `name:"test"`, + Description: valuePtr("test description"), + Permissions: &backend.UserSavedSearchPermissions{ + Role: valuePtr(backend.SavedSearchOwner), + }, + BookmarkStatus: &backend.UserSavedSearchBookmark{ + Status: backend.BookmarkActive, + }, + CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + }, + expectedIsCreation: false, + expectedUserID: "testID1", + err: nil, + }, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -245,7 +272,7 @@ func TestUpdateSavedSearch(t *testing.T) { ), }, { - name: "success, all fields, clear description with explicit null", + name: "success, all fields, clear description with explicit null, failed publish", cfg: &MockUpdateUserSavedSearchConfig{ expectedSavedSearchID: "saved-search-id", expectedUserID: "testID1", @@ -266,6 +293,25 @@ func TestUpdateSavedSearch(t *testing.T) { }, err: nil, }, + publishCfg: &MockPublishSearchConfigurationChangedConfig{ + expectedResp: &backend.SavedSearchResponse{ + Id: "saved-search-id", + Name: "test name", + Query: `name:"test"`, + Description: nil, + Permissions: &backend.UserSavedSearchPermissions{ + Role: valuePtr(backend.SavedSearchOwner), + }, + BookmarkStatus: &backend.UserSavedSearchBookmark{ + Status: backend.BookmarkActive, + }, + CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + }, + expectedIsCreation: false, + expectedUserID: "testID1", + err: errTest, + }, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -318,6 +364,25 @@ func TestUpdateSavedSearch(t *testing.T) { }, err: nil, }, + publishCfg: &MockPublishSearchConfigurationChangedConfig{ + expectedResp: &backend.SavedSearchResponse{ + Id: "saved-search-id", + Name: "test name", + Query: `name:"test"`, + Description: nil, + Permissions: &backend.UserSavedSearchPermissions{ + Role: valuePtr(backend.SavedSearchOwner), + }, + BookmarkStatus: &backend.UserSavedSearchBookmark{ + Status: backend.BookmarkActive, + }, + CreatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + UpdatedAt: time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC), + }, + expectedUserID: "testID1", + expectedIsCreation: false, + err: nil, + }, authMiddlewareOption: withAuthMiddleware(mockAuthMiddleware(testUser)), request: httptest.NewRequest( @@ -355,8 +420,13 @@ func TestUpdateSavedSearch(t *testing.T) { updateUserSavedSearchCfg: tc.cfg, t: t, } + mockPublisher := &MockEventPublisher{ + t: t, + callCountPublishSearchConfigurationChanged: 0, + publishSearchConfigurationChangedCfg: tc.publishCfg, + } myServer := Server{wptMetricsStorer: mockStorer, metadataStorer: nil, userGitHubClientFactory: nil, - operationResponseCaches: nil, baseURL: getTestBaseURL(t)} + operationResponseCaches: nil, baseURL: getTestBaseURL(t), eventPublisher: mockPublisher} assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, []testServerOption{tc.authMiddlewareOption}...) }) diff --git a/backend/pkg/httpserver/update_subscription_test.go b/backend/pkg/httpserver/update_subscription_test.go index f7d0a3b14..8e0686429 100644 --- a/backend/pkg/httpserver/update_subscription_test.go +++ b/backend/pkg/httpserver/update_subscription_test.go @@ -50,7 +50,7 @@ func TestUpdateSubscription(t *testing.T) { expectedSubscriptionID: "sub-id", expectedUpdateRequest: backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ backend.UpdateSubscriptionRequestMaskTriggers}, Frequency: nil, @@ -62,7 +62,7 @@ func TestUpdateSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, @@ -79,7 +79,7 @@ func TestUpdateSubscription(t *testing.T) { strings.NewReader(` { "triggers": - ["feature_any_browser_implementation_complete"], + ["feature_browser_implementation_any_complete"], "update_mask": ["triggers"] }`)), expectedResponse: testJSONResponse(http.StatusOK, @@ -87,7 +87,7 @@ func TestUpdateSubscription(t *testing.T) { "id":"sub-id", "channel_id":"channel-id", "saved_search_id":"search-id", - "triggers": [{"value":"feature_any_browser_implementation_complete"}], + "triggers": [{"value":"feature_browser_implementation_any_complete"}], "frequency":"daily", "created_at":"`+now.Format(time.RFC3339Nano)+`", "updated_at":"`+now.Format(time.RFC3339Nano)+`" @@ -101,7 +101,7 @@ func TestUpdateSubscription(t *testing.T) { expectedSubscriptionID: "sub-id", expectedUpdateRequest: backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete, }, Frequency: nil, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ @@ -116,7 +116,7 @@ func TestUpdateSubscription(t *testing.T) { "/v1/users/me/subscriptions/sub-id", strings.NewReader(` { - "triggers": ["feature_any_browser_implementation_complete"], + "triggers": ["feature_browser_implementation_any_complete"], "update_mask": ["triggers"] }`)), expectedResponse: testJSONResponse(http.StatusNotFound, ` @@ -134,7 +134,7 @@ func TestUpdateSubscription(t *testing.T) { "/v1/users/me/subscriptions/sub-id", strings.NewReader(` { - "triggers": ["feature_any_browser_implementation_complete"], + "triggers": ["feature_browser_implementation_any_complete"], "update_mask": ["invalid_field"] }`)), expectedResponse: testJSONResponse(http.StatusBadRequest, ` @@ -155,7 +155,7 @@ func TestUpdateSubscription(t *testing.T) { expectedSubscriptionID: "sub-id", expectedUpdateRequest: backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ backend.UpdateSubscriptionRequestMaskTriggers}, Frequency: nil, @@ -169,7 +169,7 @@ func TestUpdateSubscription(t *testing.T) { "/v1/users/me/subscriptions/sub-id", strings.NewReader(` { - "triggers": ["feature_any_browser_implementation_complete"], + "triggers": ["feature_browser_implementation_any_complete"], "update_mask": ["triggers"] }`)), expectedResponse: testJSONResponse(http.StatusForbidden, ` @@ -186,7 +186,7 @@ func TestUpdateSubscription(t *testing.T) { expectedSubscriptionID: "sub-id", expectedUpdateRequest: backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ backend.UpdateSubscriptionRequestMaskTriggers}, Frequency: nil, @@ -200,7 +200,7 @@ func TestUpdateSubscription(t *testing.T) { "/v1/users/me/subscriptions/sub-id", strings.NewReader(` { - "triggers": ["feature_any_browser_implementation_complete"], + "triggers": ["feature_browser_implementation_any_complete"], "update_mask": ["triggers"] }`)), expectedResponse: testJSONResponse(http.StatusInternalServerError, ` @@ -227,6 +227,7 @@ func TestUpdateSubscription(t *testing.T) { metadataStorer: nil, operationResponseCaches: nil, userGitHubClientFactory: nil, + eventPublisher: nil, } assertTestServerRequest(t, &myServer, tc.request, tc.expectedResponse, tc.authMiddlewareOption) assertMocksExpectations(t, @@ -250,7 +251,7 @@ func TestValidateSubscriptionUpdate(t *testing.T) { name: "valid update", input: &backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ backend.UpdateSubscriptionRequestMaskTriggers}, Frequency: nil, @@ -261,7 +262,7 @@ func TestValidateSubscriptionUpdate(t *testing.T) { name: "invalid update mask", input: &backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: []backend.UpdateSubscriptionRequestUpdateMask{ "invalid_field"}, Frequency: nil, @@ -290,7 +291,7 @@ func TestValidateSubscriptionUpdate(t *testing.T) { name: "nil update mask", input: &backend.UpdateSubscriptionRequest{ Triggers: &[]backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, UpdateMask: nil, Frequency: nil, }, diff --git a/backend/skaffold.yaml b/backend/skaffold.yaml index 2d7390e64..44a2f3403 100644 --- a/backend/skaffold.yaml +++ b/backend/skaffold.yaml @@ -22,6 +22,7 @@ requires: - path: ../.dev/valkey - path: ../.dev/spanner - path: ../.dev/wiremock + - path: ../.dev/pubsub profiles: - name: local build: diff --git a/e2e/tests/notification-channels.spec.ts b/e2e/tests/notification-channels.spec.ts new file mode 100644 index 000000000..d45d5113f --- /dev/null +++ b/e2e/tests/notification-channels.spec.ts @@ -0,0 +1,70 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {test, expect} from '@playwright/test'; +import {loginAsUser, BASE_URL} from './utils'; + +test.describe('Notification Channels Page', () => { + test('redirects unauthenticated user to home and shows toast', async ({ + page, + }) => { + await page.goto(`${BASE_URL}/settings/notification-channels`); + + // Expect to be redirected to the home page. + await expect(page).toHaveURL(BASE_URL); + // FYI: We do not assert the toast because it flashes on the screen due to the redirect. + }); + + test('authenticated user sees their email channel and coming soon messages', async ({ + page, + }) => { + // Log in as a test user + await loginAsUser(page, 'test user 1'); + + // Navigate to the notification channels page + await page.goto(`${BASE_URL}/settings/notification-channels`); + + // Move the mouse to a neutral position to avoid hover effects on the screenshot + await page.mouse.move(0, 0); + + // Expect the URL to be correct + await expect(page).toHaveURL(`${BASE_URL}/settings/notification-channels`); + + // Verify Email panel content + const emailPanel = page.locator('webstatus-notification-email-channels'); + await expect(emailPanel).toBeVisible(); + await expect(emailPanel).toContainText('test.user.1@example.com'); + await expect(emailPanel).toContainText('Enabled'); + + // Verify RSS panel content + const rssPanel = page.locator('webstatus-notification-rss-channels'); + await expect(rssPanel).toBeVisible(); + await expect(rssPanel).toContainText('Coming soon'); + + // Verify Webhook panel content + const webhookPanel = page.locator( + 'webstatus-notification-webhook-channels', + ); + await expect(webhookPanel).toBeVisible(); + await expect(webhookPanel).toContainText('Coming soon'); + + // Take a screenshot for visual regression + const pageContainer = page.locator('.page-container'); + await expect(pageContainer).toHaveScreenshot( + 'notification-channels-authenticated.png', + ); + }); +}); diff --git a/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-chromium-linux.png b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-chromium-linux.png new file mode 100644 index 000000000..c9922b225 Binary files /dev/null and b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-chromium-linux.png differ diff --git a/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-firefox-linux.png b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-firefox-linux.png new file mode 100644 index 000000000..a60ecc594 Binary files /dev/null and b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-firefox-linux.png differ diff --git a/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-webkit-linux.png b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-webkit-linux.png new file mode 100644 index 000000000..45af20445 Binary files /dev/null and b/e2e/tests/notification-channels.spec.ts-snapshots/notification-channels-authenticated-webkit-linux.png differ diff --git a/e2e/tests/notifications.spec.ts b/e2e/tests/notifications.spec.ts new file mode 100644 index 000000000..b08337955 --- /dev/null +++ b/e2e/tests/notifications.spec.ts @@ -0,0 +1,236 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {test, expect} from '@playwright/test'; +import {resetUserData, loginAsUser, gotoOverviewPageUrl} from './utils.js'; +import { + getLatestEmail, + triggerBatchJob, + triggerNonMatchingChange, + triggerMatchingChange, + triggerBatchChange, +} from './test-data-util.js'; + +const TEST_USER_1 = { + username: 'test user 1', + email: 'test.user.1@example.com', +}; + +test.describe('Notifications', () => { + test.beforeEach(async ({page}) => { + await loginAsUser(page, TEST_USER_1.username); + await resetUserData(); + }); + + test('Immediate Edit Flow', async ({page}) => { + await gotoOverviewPageUrl(page, 'http://localhost:5555/'); + await page.getByLabel('Search features').fill('group:css'); + await page.getByLabel('Search features').press('Enter'); + await page.getByRole('button', {name: 'Save Search'}).click(); + await page.getByLabel('Name').fill('Browser Features'); + await page.getByRole('button', {name: 'Save'}).click(); + await page.getByRole('button', {name: 'Subscribe to updates'}).click(); + await page.getByRole('radio', {name: 'Immediately'}).click(); + await page.getByRole('button', {name: 'Save'}).click(); + await expect(page.getByText('Subscription saved!')).toBeVisible(); + + // Trigger the notification + await page.getByRole('button', {name: 'Edit Search'}).click(); + await page.getByLabel('Query').fill('group:html'); + await page.getByRole('button', {name: 'Save'}).click(); + + // Verify email + const email = await test.step('Poll for email', async () => { + for (let i = 0; i < 10; i++) { + const email = await getLatestEmail(TEST_USER_1.email); + if (email) { + return email; + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + return null; + }); + + expect(email).not.toBeNull(); + expect(email.Content.Headers.Subject[0]).toContain('Update:'); + }); + + test('Batch Schedule Flow', async ({page}) => { + await gotoOverviewPageUrl(page, 'http://localhost:5555/'); + await page.getByLabel('Search features').fill('group:css'); + await page.getByLabel('Search features').press('Enter'); + await page.getByRole('button', {name: 'Save Search'}).click(); + await page.getByLabel('Name').fill('Browser Features'); + await page.getByRole('button', {name: 'Save'}).click(); + await page.getByRole('button', {name: 'Subscribe to updates'}).click(); + await page.getByRole('radio', {name: 'Weekly updates'}).click(); + await page.getByRole('button', {name: 'Save'}).click(); + await expect(page.getByText('Subscription saved!')).toBeVisible(); + + // Backdoor data change + triggerBatchChange(); + + // Trigger the batch job + await triggerBatchJob('weekly'); + + // Verify email + const email = await test.step('Poll for email', async () => { + for (let i = 0; i < 10; i++) { + const email = await getLatestEmail(TEST_USER_1.email); + if ( + email && + email.Content.Headers.Subject[0].includes('Weekly Digest') + ) { + return email; + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + return null; + }); + + expect(email).not.toBeNull(); + expect(email.Content.Headers.Subject[0]).toContain('Weekly Digest'); + }); + + test('2-Click Unsubscribe Flow', async ({page}) => { + // 1. Setup: Run the "Immediate Edit" flow to generate an email. + await gotoOverviewPageUrl(page, 'http://localhost:5555/'); + await page.getByLabel('Search features').fill('group:css'); + await page.getByLabel('Search features').press('Enter'); + await page.getByRole('button', {name: 'Save Search'}).click(); + await page.getByLabel('Name').fill('Browser Features'); + await page.getByRole('button', {name: 'Save'}).click(); + await page.getByRole('button', {name: 'Subscribe to updates'}).click(); + await page.getByRole('radio', {name: 'Immediately'}).click(); + await page.getByRole('button', {name: 'Save'}).click(); + await expect(page.getByText('Subscription saved!')).toBeVisible(); + await page.getByRole('button', {name: 'Edit Search'}).click(); + await page.getByLabel('Query').fill('group:html'); + await page.getByRole('button', {name: 'Save'}).click(); + const email = await test.step('Poll for email', async () => { + for (let i = 0; i < 10; i++) { + const email = await getLatestEmail(TEST_USER_1.email); + if (email) { + return email; + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + return null; + }); + expect(email).not.toBeNull(); + + // 2. Extract Unsubscribe Link + const unsubscribeLinkMatch = email.Content.Body.match( + /href="([^"]+action=unsubscribe[^"]+)"/, + ); + expect(unsubscribeLinkMatch).not.toBeNull(); + const unsubscribeUrl = unsubscribeLinkMatch[1]; + + // 3. Action: Navigate to the link + await page.goto(unsubscribeUrl); + + // 4. Interact: Confirm the unsubscription + await page.getByRole('button', {name: 'Confirm Unsubscribe'}).click(); + await expect(page.getByText('Subscription deleted!')).toBeVisible(); + + // 5. Verify: Go to the subscriptions page and check that the subscription is gone. + await page.goto('/settings/subscriptions'); + await expect(page.getByText('No subscriptions found.')).toBeVisible(); + }); + + test('Noise Filter Flow (Negative Test)', async ({page}) => { + // 1. Setup + await gotoOverviewPageUrl(page, 'http://localhost:5555/'); + await page.getByLabel('Search features').fill('group:css'); + await page.getByLabel('Search features').press('Enter'); + await page.getByRole('button', {name: 'Save Search'}).click(); + await page.getByLabel('Name').fill('Browser Features'); + await page.getByRole('button', {name: 'Save'}).click(); + await page.getByRole('button', {name: 'Subscribe to updates'}).click(); + await page.getByRole('radio', {name: 'Weekly updates'}).click(); + await page + .getByRole('checkbox', {name: '...becomes widely available'}) + .check(); + await page.getByRole('button', {name: 'Save'}).click(); + await expect(page.getByText('Subscription saved!')).toBeVisible(); + + // 2. Backdoor Action 1 (Non-matching change) + triggerNonMatchingChange(); + + // 3. Trigger + await triggerBatchJob('weekly'); + + // 4. Verify NO email is received + await new Promise(resolve => setTimeout(resolve, 5000)); // Wait for a reasonable time + let email = await getLatestEmail(TEST_USER_1.email); + expect(email).toBeNull(); + + // 5. Backdoor Action 2 (Matching change) + triggerMatchingChange(); + + // 6. Trigger + await triggerBatchJob('weekly'); + + // 7. Verify email IS received + email = await test.step('Poll for email', async () => { + for (let i = 0; i < 10; i++) { + const email = await getLatestEmail(TEST_USER_1.email); + if (email) { + return email; + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + return null; + }); + expect(email).not.toBeNull(); + }); + + test('Idempotency Flow', async ({page}) => { + // 1. Setup + await gotoOverviewPageUrl(page, 'http://localhost:5555/'); + await page.getByLabel('Search features').fill('group:css'); + await page.getByLabel('Search features').press('Enter'); + await page.getByRole('button', {name: 'Save Search'}).click(); + await page.getByLabel('Name').fill('Browser Features'); + await page.getByRole('button', {name: 'Save'}).click(); + await page.getByRole('button', {name: 'Subscribe to updates'}).click(); + await page.getByRole('radio', {name: 'Weekly updates'}).click(); + await page.getByRole('button', {name: 'Save'}).click(); + await expect(page.getByText('Subscription saved!')).toBeVisible(); + + // Placeholder for backdoor data change + triggerBatchChange(); + await triggerBatchJob('weekly'); + const firstEmail = await test.step('Poll for first email', async () => { + for (let i = 0; i < 10; i++) { + const email = await getLatestEmail(TEST_USER_1.email); + if (email) { + return email; + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + return null; + }); + expect(firstEmail).not.toBeNull(); + + // 2. Action: Trigger again + await triggerBatchJob('weekly'); + + // 3. Verify: No new email + await new Promise(resolve => setTimeout(resolve, 5000)); // Wait for a reasonable time + const secondEmail = await getLatestEmail(TEST_USER_1.email); + expect(secondEmail).not.toBeNull(); + expect(secondEmail.ID).toEqual(firstEmail.ID); // No new email, so latest is the same. + }); +}); diff --git a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-chromium-linux.png b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-chromium-linux.png index 2dcb90025..3290834fb 100644 Binary files a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-chromium-linux.png and b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-chromium-linux.png differ diff --git a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-firefox-linux.png b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-firefox-linux.png index 7b3d6e2cb..efbde13eb 100644 Binary files a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-firefox-linux.png and b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-firefox-linux.png differ diff --git a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-webkit-linux.png b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-webkit-linux.png index 95dfba4cd..860e2a41b 100644 Binary files a/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-webkit-linux.png and b/e2e/tests/sidebar.spec.ts-snapshots/sidebar-authenticated-webkit-linux.png differ diff --git a/e2e/tests/test-data-util.ts b/e2e/tests/test-data-util.ts new file mode 100644 index 000000000..d66d26695 --- /dev/null +++ b/e2e/tests/test-data-util.ts @@ -0,0 +1,63 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {execSync} from 'child_process'; + +const LOAD_FAKE_DATA_CMD = + "make dev_fake_data LOAD_FAKE_DATA_FLAGS='-trigger-scenario=%s'"; + +export async function triggerBatchJob(frequency: string) { + const message = { + messages: [ + { + data: Buffer.from(JSON.stringify({frequency})).toString('base64'), + }, + ], + }; + + await fetch( + 'http://localhost:8060/v1/projects/local/topics/batch-updates-topic-id:publish', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(message), + }, + ); +} + +export async function getLatestEmail(recipient: string): Promise { + const response = await fetch('http://localhost:8025/api/v1/messages'); + const data = await response.json(); + const messages = data.messages || []; + for (const message of messages) { + if (message.To.some((r: any) => r.Address === recipient)) { + return message; + } + } + return null; +} + +export function triggerNonMatchingChange() { + execSync(LOAD_FAKE_DATA_CMD.replace('%s', 'non-matching')); +} + +export function triggerMatchingChange() { + execSync(LOAD_FAKE_DATA_CMD.replace('%s', 'matching')); +} + +export function triggerBatchChange() { + execSync(LOAD_FAKE_DATA_CMD.replace('%s', 'batch-change')); +} diff --git a/frontend/nginx.conf b/frontend/nginx.conf index 981aece17..ec33e8155 100644 --- a/frontend/nginx.conf +++ b/frontend/nginx.conf @@ -44,6 +44,14 @@ http { try_files $uri $uri/ /index.html; } + location = /settings/notification-channels { + try_files $uri $uri/ /index.html; + } + + location = /settings/subscriptions { + try_files $uri $uri/ /index.html; + } + location = / { try_files $uri $uri/ =404; } diff --git a/frontend/src/static/img/email/chrome.png b/frontend/src/static/img/email/chrome.png new file mode 100644 index 000000000..eb9dc42ff Binary files /dev/null and b/frontend/src/static/img/email/chrome.png differ diff --git a/frontend/src/static/img/email/edge.png b/frontend/src/static/img/email/edge.png new file mode 100644 index 000000000..a516b9b87 Binary files /dev/null and b/frontend/src/static/img/email/edge.png differ diff --git a/frontend/src/static/img/email/firefox.png b/frontend/src/static/img/email/firefox.png new file mode 100644 index 000000000..b7c3dd364 Binary files /dev/null and b/frontend/src/static/img/email/firefox.png differ diff --git a/frontend/src/static/img/email/limited.png b/frontend/src/static/img/email/limited.png new file mode 100644 index 000000000..7a3ee8f0b Binary files /dev/null and b/frontend/src/static/img/email/limited.png differ diff --git a/frontend/src/static/img/email/newly.png b/frontend/src/static/img/email/newly.png new file mode 100644 index 000000000..d16304181 Binary files /dev/null and b/frontend/src/static/img/email/newly.png differ diff --git a/frontend/src/static/img/email/safari.png b/frontend/src/static/img/email/safari.png new file mode 100644 index 000000000..ba9254d18 Binary files /dev/null and b/frontend/src/static/img/email/safari.png differ diff --git a/frontend/src/static/img/email/widely.png b/frontend/src/static/img/email/widely.png new file mode 100644 index 000000000..67f1480b5 Binary files /dev/null and b/frontend/src/static/img/email/widely.png differ diff --git a/frontend/src/static/img/shoelace/assets/icons/envelope.svg b/frontend/src/static/img/shoelace/assets/icons/envelope.svg new file mode 100644 index 000000000..78bf1ded1 --- /dev/null +++ b/frontend/src/static/img/shoelace/assets/icons/envelope.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/src/static/img/shoelace/assets/icons/mailbox-flag.svg b/frontend/src/static/img/shoelace/assets/icons/mailbox-flag.svg new file mode 100644 index 000000000..8e24db05f --- /dev/null +++ b/frontend/src/static/img/shoelace/assets/icons/mailbox-flag.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/src/static/img/shoelace/assets/icons/plus-lg.svg b/frontend/src/static/img/shoelace/assets/icons/plus-lg.svg new file mode 100644 index 000000000..531e86cd0 --- /dev/null +++ b/frontend/src/static/img/shoelace/assets/icons/plus-lg.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/frontend/src/static/img/shoelace/assets/icons/rss.svg b/frontend/src/static/img/shoelace/assets/icons/rss.svg new file mode 100644 index 000000000..18dc9f1be --- /dev/null +++ b/frontend/src/static/img/shoelace/assets/icons/rss.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/frontend/src/static/img/shoelace/assets/icons/webhook.svg b/frontend/src/static/img/shoelace/assets/icons/webhook.svg new file mode 100644 index 000000000..6fbfb0543 --- /dev/null +++ b/frontend/src/static/img/shoelace/assets/icons/webhook.svg @@ -0,0 +1,5 @@ + + webhook + + + diff --git a/frontend/src/static/js/api/client.ts b/frontend/src/static/js/api/client.ts index 07d33430f..0470929a2 100644 --- a/frontend/src/static/js/api/client.ts +++ b/frontend/src/static/js/api/client.ts @@ -54,8 +54,10 @@ export type BrowsersParameter = components['parameters']['browserPathParam']; type PageablePath = | '/v1/features' | '/v1/features/{feature_id}/stats/wpt/browsers/{browser}/channels/{channel}/{metric_view}' + | '/v1/users/me/notification-channels' | '/v1/stats/features/browsers/{browser}/feature_counts' | '/v1/users/me/saved-searches' + | '/v1/users/me/subscriptions' | '/v1/stats/baseline_status/low_date_feature_counts'; type SuccessResponsePageableData< @@ -421,6 +423,28 @@ export class APIClient { }); } + public async listNotificationChannels( + token: string, + ): Promise { + type NotificationChannelPage = SuccessResponsePageableData< + paths['/v1/users/me/notification-channels']['get'], + FetchOptions< + FilterKeys + >, + 'application/json', + '/v1/users/me/notification-channels' + >; + + return this.getAllPagesOfData< + '/v1/users/me/notification-channels', + NotificationChannelPage + >('/v1/users/me/notification-channels', { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + } + public async pingUser( token: string, pingOptions?: {githubToken?: string}, @@ -831,4 +855,143 @@ export class APIClient { } return response.data; } + + public async getSubscription( + subscriptionId: string, + token: string, + ): Promise { + const options = { + ...temporaryFetchOptions, + params: { + path: { + subscription_id: subscriptionId, + }, + }, + headers: { + Authorization: `Bearer ${token}`, + }, + }; + const response = await this.client.GET( + '/v1/users/me/subscriptions/{subscription_id}', + options, + ); + const error = response.error; + if (error !== undefined) { + throw createAPIError(error); + } + + return response.data; + } + + public async deleteSubscription(subscriptionId: string, token: string) { + const options = { + ...temporaryFetchOptions, + params: { + path: { + subscription_id: subscriptionId, + }, + }, + headers: { + Authorization: `Bearer ${token}`, + }, + }; + const response = await this.client.DELETE( + '/v1/users/me/subscriptions/{subscription_id}', + options, + ); + const error = response.error; + if (error !== undefined) { + throw createAPIError(error); + } + + return response.data; + } + + public async listSubscriptions( + token: string, + ): Promise { + type SubscriptionPage = SuccessResponsePageableData< + paths['/v1/users/me/subscriptions']['get'], + ParamsOption<'/v1/users/me/subscriptions'>, + 'application/json', + '/v1/users/me/subscriptions' + >; + + return this.getAllPagesOfData< + '/v1/users/me/subscriptions', + SubscriptionPage + >('/v1/users/me/subscriptions', { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + } + + public async createSubscription( + token: string, + subscription: components['schemas']['Subscription'], + ): Promise { + const options: FetchOptions< + FilterKeys + > = { + headers: { + Authorization: `Bearer ${token}`, + }, + body: subscription, + credentials: temporaryFetchOptions.credentials, + }; + const response = await this.client.POST( + '/v1/users/me/subscriptions', + options, + ); + const error = response.error; + if (error !== undefined) { + throw createAPIError(error); + } + return response.data; + } + + public async updateSubscription( + subscriptionId: string, + token: string, + updates: { + triggers?: components['schemas']['SubscriptionTriggerWritable'][]; + frequency?: components['schemas']['SubscriptionFrequency']; + }, + ): Promise { + const req: components['schemas']['UpdateSubscriptionRequest'] = { + update_mask: [], + }; + if (updates.triggers !== undefined) { + req.update_mask.push('triggers'); + req.triggers = updates.triggers; + } + if (updates.frequency !== undefined) { + req.update_mask.push('frequency'); + req.frequency = updates.frequency; + } + const options: FetchOptions< + FilterKeys + > = { + headers: { + Authorization: `Bearer ${token}`, + }, + params: { + path: { + subscription_id: subscriptionId, + }, + }, + body: req, + credentials: temporaryFetchOptions.credentials, + }; + const response = await this.client.PATCH( + '/v1/users/me/subscriptions/{subscription_id}', + options, + ); + const error = response.error; + if (error !== undefined) { + throw createAPIError(error); + } + return response.data; + } } diff --git a/frontend/src/static/js/components/test/webstatus-manage-subscriptions-dialog.test.ts b/frontend/src/static/js/components/test/webstatus-manage-subscriptions-dialog.test.ts new file mode 100644 index 000000000..a3c28843c --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-manage-subscriptions-dialog.test.ts @@ -0,0 +1,368 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {fixture, html} from '@open-wc/testing'; +import {expect} from '@esm-bundle/chai'; +import sinon from 'sinon'; +import {APIClient} from '../../api/client.js'; +import {User} from '../../contexts/firebase-user-context.js'; +import '../webstatus-manage-subscriptions-dialog.js'; +import { + ManageSubscriptionsDialog, +} from '../webstatus-manage-subscriptions-dialog.js'; +import {type components} from 'webstatus.dev-backend'; + +describe('webstatus-manage-subscriptions-dialog', () => { + let sandbox: sinon.SinonSandbox; + let apiClient: APIClient; + let user: User; + let element: ManageSubscriptionsDialog; + + const mockSavedSearch: components['schemas']['SavedSearchResponse'] = { + id: 'test-search-id', + name: 'Test Saved Search', + query: 'is:test', + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + permissions: {role: 'saved_search_owner'}, + bookmark_status: {status: 'bookmark_none'}, + }; + + const mockNotificationChannels: components['schemas']['NotificationChannelResponse'][] = [ + { + id: 'test-channel-id', + type: 'email', + name: 'test@example.com', + value: 'test@example.com', + status: 'enabled', + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }, + { + id: 'other-channel-id', + type: 'email', + name: 'other@example.com', + value: 'other@example.com', + status: 'enabled', + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }, + ]; + + const mockInitialSubscription: + components['schemas']['SubscriptionResponse'] = { + id: 'initial-sub-id', + saved_search_id: 'test-search-id', + channel_id: 'initial-channel-id', + frequency: 'weekly', + triggers: [{value: 'feature_baseline_to_newly'}], + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }; + + const mockOtherSubscription: + components['schemas']['SubscriptionResponse'] = { + id: 'other-sub-id', + saved_search_id: 'test-search-id', + channel_id: 'other-channel-id', + frequency: 'monthly', + triggers: [{value: 'feature_baseline_to_widely'}], + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }; + + beforeEach(async () => { + sandbox = sinon.createSandbox(); + apiClient = { + listNotificationChannels: sandbox.stub().resolves(mockNotificationChannels), + getSavedSearchByID: sandbox.stub().resolves(mockSavedSearch), + listSubscriptions: sandbox + .stub() + .resolves([mockInitialSubscription, mockOtherSubscription]), + createSubscription: sandbox.stub().resolves(mockInitialSubscription), + updateSubscription: sandbox.stub().resolves(mockInitialSubscription), + deleteSubscription: sandbox.stub().resolves(undefined), + getSubscription: sandbox.stub().resolves(mockInitialSubscription), + } as any as APIClient; + user = {getIdToken: async () => 'test-token'} as User; + + element = await fixture(html` + + `); + + // Ensure the loading task completes and the component re-renders + await element['_loadingTask'].taskComplete; + await element.updateComplete; + }); + + afterEach(() => { + sandbox.restore(); + }); + + it('renders correctly initially', async () => { + expect(element).to.be.instanceOf(ManageSubscriptionsDialog); + // Dialog is initially closed, so its content should not be visible. + expect(element.open).to.be.false; + expect(element.shadowRoot?.querySelector('sl-dialog[open]')).to.be.null; + // We will test content when the dialog is explicitly opened in another test. + expect(element.isDirty).to.be.false; + }); + + it('shows content when opened', async () => { + element.open = true; + await element['_loadingTask'].run(); + await element.updateComplete; + expect(element.shadowRoot?.querySelector('sl-spinner')).to.be.null; + expect(element.shadowRoot?.textContent).to.include('Test Saved Search'); + expect(element.shadowRoot?.textContent).to.include('Notification channels'); + }); + + + it('fetches data when opened for a saved search', async () => { + element.open = true; + await element['_loadingTask'].run(); // Explicitly re-run the task + + // The beforeEach already triggers the loading task + // We just need to assert the calls and state. + expect(apiClient.listNotificationChannels).to.have.been.calledWith('test-token'); + expect(apiClient.getSavedSearchByID).to.have.been.calledWith( + 'test-search-id', + 'test-token' + ); + expect(apiClient.listSubscriptions).to.have.been.calledWith('test-token'); + // Also verify that the dialog's internal state is updated + expect(element['_notificationChannels']).to.deep.equal(mockNotificationChannels); + expect(element['_savedSearch']).to.deep.equal(mockSavedSearch); + expect(element['_subscriptionsForSavedSearch']).to.deep.equal([ + mockInitialSubscription, + mockOtherSubscription, + ]); + }); + + it('is dirty when frequency changes', async () => { + element.open = true; + await element['_loadingTask'].run(); + await element.updateComplete; + + element['_selectedFrequency'] = 'monthly'; // Change from initial 'weekly' + expect(element.isDirty).to.be.true; + }); + + it('is dirty when triggers change', async () => { + element.open = true; + await element['_loadingTask'].run(); + await element.updateComplete; + + element['_selectedTriggers'] = ['feature_baseline_to_widely']; // Change from initial [ { value: 'feature_baseline_to_newly' } ] + expect(element.isDirty).to.be.true; + }); + + it('is not dirty when changes are reverted', async () => { + element.open = true; + await element['_loadingTask'].run(); + await element.updateComplete; + + // Simulate selecting the initial channel to set the baseline state correctly. + element['_handleChannelChange'](mockInitialSubscription.channel_id); + await element.updateComplete; + expect(element.isDirty, 'Should not be dirty after initialization').to.be + .false; + + // Make a change + element['_selectedFrequency'] = 'monthly'; + await element.updateComplete; + expect(element.isDirty, 'Should be dirty after change').to.be.true; + + // Revert the change + element['_selectedFrequency'] = mockInitialSubscription.frequency; + await element.updateComplete; + expect(element.isDirty, 'Should not be dirty after reverting change').to.be + .false; + }); + + it('dispatches SubscriptionSaveSuccessEvent on successful create', async () => { + const eventSpy = sandbox.spy(); + element.addEventListener('subscription-save-success', eventSpy); + + // Make it dirty for creation. + element.savedSearchId = 'new-saved-search-id'; + element['_activeChannelId'] = mockNotificationChannels[0].id; + element['_selectedFrequency'] = 'monthly'; + element['_selectedTriggers'] = ['feature_baseline_to_newly']; + element['_initialSelectedFrequency'] = 'immediate'; + element['_initialSelectedTriggers'] = []; + await element.updateComplete; + expect(element.isDirty).to.be.true; + + await element['_handleSave'](); + + expect(apiClient.createSubscription).to.have.been.calledWith('test-token', { + saved_search_id: 'new-saved-search-id', + channel_id: mockNotificationChannels[0].id, + frequency: 'monthly', + triggers: ['feature_baseline_to_newly'], + }); + expect(eventSpy).to.have.been.calledOnce; + }); + + it('dispatches SubscriptionSaveSuccessEvent on successful update', async () => { + const eventSpy = sandbox.spy(); + element.addEventListener('subscription-save-success', eventSpy); + + // Setup existing subscription + element['_subscriptionsForSavedSearch'] = [mockInitialSubscription]; + element['_activeChannelId'] = mockInitialSubscription.channel_id; + element['_selectedFrequency'] = 'immediate'; // Change frequency from 'weekly' + element['_initialSelectedFrequency'] = mockInitialSubscription.frequency; // Set initial + element['_initialSelectedTriggers'] = mockInitialSubscription.triggers.map(t => t.value) as components['schemas']['SubscriptionTriggerWritable'][]; + element['_selectedTriggers'] = [...mockInitialSubscription.triggers.map(t => t.value), 'feature_baseline_to_widely'] as components['schemas']['SubscriptionTriggerWritable'][]; + + await element.updateComplete; + expect(element.isDirty).to.be.true; + + await element['_handleSave'](); + + expect(apiClient.updateSubscription).to.have.been.calledWith( + mockInitialSubscription.id, + 'test-token', + { + frequency: 'immediate', + triggers: [...mockInitialSubscription.triggers.map(t => t.value), 'feature_baseline_to_widely'], + } + ); + expect(eventSpy).to.have.been.calledOnce; + }); + + it('dispatches SubscriptionSaveErrorEvent on save failure', async () => { + (apiClient.createSubscription as sinon.SinonStub) + .returns(Promise.reject(new Error('Save failed'))); + + const eventSpy = sandbox.spy(); + element.addEventListener('subscription-save-error', eventSpy); + + element.savedSearchId = 'test-search-id'; + element['_activeChannelId'] = mockNotificationChannels[0].id; + element['_selectedFrequency'] = 'monthly'; // Make it dirty + element['_initialSelectedFrequency'] = 'immediate'; + + await element['_handleSave'](); + + expect(eventSpy).to.have.been.calledOnce; + expect(eventSpy.args[0][0].detail.message).to.equal('Save failed'); + }); + + it('dispatches SubscriptionDeleteSuccessEvent on successful delete', async () => { + const eventSpy = sandbox.spy(); + element.addEventListener('subscription-delete-success', eventSpy); + + element.subscriptionId = 'test-sub-id'; + + await element['_handleDelete'](); + + expect(apiClient.deleteSubscription).to.have.been.calledWith('test-sub-id', 'test-token'); + expect(eventSpy).to.have.been.calledOnce; + }); + + it('dispatches SubscriptionDeleteErrorEvent on delete failure', async () => { + (apiClient.deleteSubscription as sinon.SinonStub) + .returns(Promise.reject(new Error('Delete failed'))); + + const eventSpy = sandbox.spy(); + element.addEventListener('subscription-delete-error', eventSpy); + + element.subscriptionId = 'test-sub-id'; + + await element['_handleDelete'](); + + expect(eventSpy).to.have.been.calledOnce; + expect(eventSpy.args[0][0].detail.message).to.equal('Delete failed'); + }); + + describe('_handleChannelChange', () => { + let confirmStub: sinon.SinonStub; + + beforeEach(async () => { + confirmStub = sandbox.stub(window, 'confirm'); + // listSubscriptions is already stubbed in the top-level beforeEach + + element.savedSearchId = 'test-search-id'; + element.open = true; + await element['_loadingTask'].taskComplete; + + // Manually set initial state for this test suite + element['_activeChannelId'] = mockInitialSubscription.channel_id; + element['_subscription'] = mockInitialSubscription; + element['_selectedTriggers'] = mockInitialSubscription.triggers.map(t => t.value) as components['schemas']['SubscriptionTriggerWritable'][]; + element['_selectedFrequency'] = mockInitialSubscription.frequency; + element['_initialSelectedTriggers'] = mockInitialSubscription.triggers.map(t => t.value) as components['schemas']['SubscriptionTriggerWritable'][]; + element['_initialSelectedFrequency'] = mockInitialSubscription.frequency; + await element.updateComplete; + + // Make it dirty by changing something on the initial channel + element['_selectedFrequency'] = 'immediate'; + await element.updateComplete; + expect(element.isDirty).to.be.true; + }); + + it('prompts user to discard changes when switching channels while dirty (cancel)', async () => { + confirmStub.returns(false); // User clicks cancel + + const originalActiveChannelId = element['_activeChannelId']; + const originalSelectedFrequency = element['_selectedFrequency']; + + element['_handleChannelChange'](mockOtherSubscription.channel_id); + await element.updateComplete; + + expect(confirmStub).to.have.been.calledOnce; + // Should revert to original channel + expect(element['_activeChannelId']).to.equal(originalActiveChannelId); + // Should keep original dirty changes + expect(element['_selectedFrequency']).to.equal(originalSelectedFrequency); + expect(element.isDirty).to.be.true; + }); + + it('discards changes and switches channels when switching channels while dirty (ok)', async () => { + confirmStub.returns(true); // User clicks OK + + element['_handleChannelChange'](mockOtherSubscription.channel_id); + await element.updateComplete; + + expect(confirmStub).to.have.been.calledOnce; + // Should switch to the new channel + expect(element['_activeChannelId']).to.equal(mockOtherSubscription.channel_id); + // Should have new settings from otherSubscription, thus no longer dirty + expect(element['_selectedFrequency']).to.equal(mockOtherSubscription.frequency); + expect(element.isDirty).to.be.false; + }); + + it('does not prompt user when switching channels while not dirty', async () => { + element['_selectedFrequency'] = mockInitialSubscription.frequency; // Make it not dirty + await element.updateComplete; + expect(element.isDirty).to.be.false; + + element['_handleChannelChange'](mockOtherSubscription.channel_id); + await element.updateComplete; + + expect(confirmStub).to.not.have.been.called; + expect(element['_activeChannelId']).to.equal(mockOtherSubscription.channel_id); + expect(element.isDirty).to.be.false; + }); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-notification-email-channels.test.ts b/frontend/src/static/js/components/test/webstatus-notification-email-channels.test.ts new file mode 100644 index 000000000..22faf8ac6 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-notification-email-channels.test.ts @@ -0,0 +1,86 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {assert, fixture, html} from '@open-wc/testing'; +import type {WebstatusNotificationEmailChannels} from '../../components/webstatus-notification-email-channels.js'; +import '../../components/webstatus-notification-email-channels.js'; +import {components} from 'webstatus.dev-backend'; +import {WebstatusNotificationPanel} from '../webstatus-notification-panel.js'; + +type NotificationChannelResponse = + components['schemas']['NotificationChannelResponse']; + +describe('webstatus-notification-email-channels', () => { + it('renders email channels correctly', async () => { + const mockChannels: NotificationChannelResponse[] = [ + { + id: '1', + type: 'email', + value: 'test1@example.com', + name: 'Email 1', + status: 'enabled', + created_at: '2023-01-01T00:00:00Z', + updated_at: '2023-01-01T00:00:00Z', + }, + { + id: '2', + type: 'email', + value: 'test2@example.com', + name: 'Email 2', + status: 'disabled', + created_at: '2023-01-01T00:00:00Z', + updated_at: '2023-01-01T00:00:00Z', + }, + ]; + + const el = await fixture(html` + + `); + + const emailItems = el.shadowRoot!.querySelectorAll('.channel-item'); + assert.equal(emailItems.length, mockChannels.length); + + // Test first email channel + const email1Name = emailItems[0].querySelector('.name'); + assert.include(email1Name!.textContent, 'test1@example.com'); + const email1Badge = emailItems[0].querySelector('sl-badge'); + assert.isNotNull(email1Badge); + assert.include(email1Badge!.textContent, 'Enabled'); + + // Test second email channel (disabled, so no badge) + const email2Name = emailItems[1].querySelector('.name'); + assert.include(email2Name!.textContent, 'test2@example.com'); + const email2Badge = emailItems[1].querySelector('sl-badge'); + assert.isNotNull(email2Badge); + assert.include(email2Badge!.textContent, 'Disabled'); + }); + + it('passes loading state to the base panel', async () => { + const el = await fixture(html` + + `); + + const basePanel = el.shadowRoot!.querySelector( + 'webstatus-notification-panel', + ); + assert.isNotNull(basePanel); + assert.isTrue(basePanel!.loading); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-notification-panel.test.ts b/frontend/src/static/js/components/test/webstatus-notification-panel.test.ts new file mode 100644 index 000000000..cfda47b22 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-notification-panel.test.ts @@ -0,0 +1,117 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {assert, fixture, html} from '@open-wc/testing'; +import '../../components/webstatus-notification-panel.js'; +import type {WebstatusNotificationPanel} from '../../components/webstatus-notification-panel.js'; + +describe('webstatus-notification-panel', () => { + it('renders content in the content slot', async () => { + const el = await fixture(html` + +
Test Content
+
+ `); + const contentSlot = el.shadowRoot!.querySelector( + 'slot[name="content"]', + ); + assert.isNotNull(contentSlot); + const assignedNodes = contentSlot.assignedNodes({ + flatten: true, + }) as HTMLElement[]; + assert.include(assignedNodes[0].textContent, 'Test Content'); + }); + + it('renders an icon in the icon slot', async () => { + const el = await fixture(html` + + + + `); + const iconSlot = + el.shadowRoot!.querySelector('slot[name="icon"]'); + assert.isNotNull(iconSlot); + const assignedNodes = iconSlot.assignedNodes({ + flatten: true, + }) as HTMLElement[]; + assert.equal(assignedNodes[0].tagName, 'SL-ICON'); + assert.equal(assignedNodes[0].getAttribute('name'), 'test-icon'); + }); + + it('renders a title in the title slot', async () => { + const el = await fixture(html` + + Test Title + + `); + const titleSlot = + el.shadowRoot!.querySelector('slot[name="title"]'); + assert.isNotNull(titleSlot); + const assignedNodes = titleSlot.assignedNodes({ + flatten: true, + }) as HTMLElement[]; + assert.include(assignedNodes[0].textContent, 'Test Title'); + }); + + it('renders actions in the actions slot', async () => { + const el = await fixture(html` + + Action Button + + `); + const actionsSlot = el.shadowRoot!.querySelector( + 'slot[name="actions"]', + ); + assert.isNotNull(actionsSlot); + const assignedNodes = actionsSlot.assignedNodes({ + flatten: true, + }) as HTMLElement[]; + assert.equal(assignedNodes[0].tagName, 'SL-BUTTON'); + assert.include(assignedNodes[0].textContent, 'Action Button'); + }); + + it('displays skeletons when loading is true and hides content', async () => { + const el = await fixture(html` + +
Test Content
+
+ `); + const skeletons = el.shadowRoot!.querySelectorAll('sl-skeleton'); + assert.equal(skeletons.length, 2); + const contentSlot = el.shadowRoot!.querySelector( + 'slot[name="content"]', + ); + assert.isNull(contentSlot); + }); + + it('hides skeletons when loading is false and shows content', async () => { + const el = await fixture(html` + +
Test Content
+
+ `); + const skeletons = el.shadowRoot!.querySelectorAll('sl-skeleton'); + assert.equal(skeletons.length, 0); + const contentSlot = el.shadowRoot!.querySelector( + 'slot[name="content"]', + ); + assert.isNotNull(contentSlot); + const assignedNodes = contentSlot.assignedNodes({ + flatten: true, + }) as HTMLElement[]; + assert.include(assignedNodes[0].textContent, 'Test Content'); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-notification-rss-channels.test.ts b/frontend/src/static/js/components/test/webstatus-notification-rss-channels.test.ts new file mode 100644 index 000000000..93ab3fec8 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-notification-rss-channels.test.ts @@ -0,0 +1,59 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {assert, fixture, html} from '@open-wc/testing'; +import type {WebstatusNotificationRssChannels} from '../../components/webstatus-notification-rss-channels.js'; +import '../../components/webstatus-notification-rss-channels.js'; +import '../../components/webstatus-notification-panel.js'; + +describe('webstatus-notification-rss-channels', () => { + it('displays "Coming soon" message', async () => { + const el = await fixture(html` + + `); + + const basePanel = el.shadowRoot!.querySelector( + 'webstatus-notification-panel', + ); + assert.isNotNull(basePanel); + + const comingSoonText = basePanel!.querySelector( + '[slot="content"] p', + ) as HTMLParagraphElement; + assert.isNotNull(comingSoonText); + assert.include(comingSoonText.textContent, 'Coming soon'); + }); + + it('displays "Create RSS channel" button', async () => { + const el = await fixture(html` + + `); + + const basePanel = el.shadowRoot!.querySelector( + 'webstatus-notification-panel', + ); + assert.isNotNull(basePanel); + + const createButton = basePanel!.querySelector( + '[slot="actions"] sl-button', + ) as HTMLButtonElement; + assert.isNotNull(createButton); + assert.include( + createButton.textContent!.trim().replace(/\s+/g, ' '), + 'Create RSS channel', + ); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-notification-webhook-channels.test.ts b/frontend/src/static/js/components/test/webstatus-notification-webhook-channels.test.ts new file mode 100644 index 000000000..933cda626 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-notification-webhook-channels.test.ts @@ -0,0 +1,59 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {assert, fixture, html} from '@open-wc/testing'; +import type {WebstatusNotificationWebhookChannels} from '../../components/webstatus-notification-webhook-channels.js'; +import '../../components/webstatus-notification-webhook-channels.js'; +import '../../components/webstatus-notification-panel.js'; + +describe('webstatus-notification-webhook-channels', () => { + it('displays "Coming soon" message', async () => { + const el = await fixture(html` + + `); + + const basePanel = el.shadowRoot!.querySelector( + 'webstatus-notification-panel', + ); + assert.isNotNull(basePanel); + + const comingSoonText = basePanel!.querySelector( + '[slot="content"] p', + ) as HTMLParagraphElement; + assert.isNotNull(comingSoonText); + assert.include(comingSoonText.textContent, 'Coming soon'); + }); + + it('displays "Create Webhook channel" button', async () => { + const el = await fixture(html` + + `); + + const basePanel = el.shadowRoot!.querySelector( + 'webstatus-notification-panel', + ); + assert.isNotNull(basePanel); + + const createButton = basePanel!.querySelector( + '[slot="actions"] sl-button', + ) as HTMLButtonElement; + assert.isNotNull(createButton); + assert.include( + createButton.textContent!.trim().replace(/\s+/g, ' '), + 'Create Webhook channel', + ); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-subscribe-button.test.ts b/frontend/src/static/js/components/test/webstatus-subscribe-button.test.ts new file mode 100644 index 000000000..a70607876 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-subscribe-button.test.ts @@ -0,0 +1,43 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {fixture, html} from '@open-wc/testing'; +import {expect} from '@esm-bundle/chai'; +import sinon from 'sinon'; +import '../webstatus-subscribe-button.js'; +import { + SubscribeButton, + SubscribeEvent, +} from '../webstatus-subscribe-button.js'; + +describe('webstatus-subscribe-button', () => { + it('dispatches subscribe event on click', async () => { + const savedSearchId = 'test-search-id'; + const element = await fixture(html` + + `); + const eventSpy = sinon.spy(); + element.addEventListener('subscribe', eventSpy); + + element.shadowRoot?.querySelector('sl-button')?.click(); + + expect(eventSpy).to.have.been.calledOnce; + const event = eventSpy.args[0][0] as SubscribeEvent; + expect(event.detail.savedSearchId).to.equal(savedSearchId); + }); +}); diff --git a/frontend/src/static/js/components/test/webstatus-subscriptions-page.test.ts b/frontend/src/static/js/components/test/webstatus-subscriptions-page.test.ts new file mode 100644 index 000000000..9f4e81a65 --- /dev/null +++ b/frontend/src/static/js/components/test/webstatus-subscriptions-page.test.ts @@ -0,0 +1,144 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {fixture, html} from '@open-wc/testing'; +import {expect} from '@esm-bundle/chai'; +import sinon from 'sinon'; +import {APIClient} from '../../api/client.js'; +import {User} from '../../contexts/firebase-user-context.js'; +import '../webstatus-subscriptions-page.js'; +import {SubscriptionsPage} from '../webstatus-subscriptions-page.js'; +import {type components} from 'webstatus.dev-backend'; + +function mockLocation() { + let search = ''; + return { + setSearch: (s: string) => { + search = s; + }, + getLocation: (): Location => ({search} as Location), + }; +} + +describe('webstatus-subscriptions-page', () => { + let sandbox: sinon.SinonSandbox; + let apiClient: APIClient; + let user: User; + let element: SubscriptionsPage; + let mockLocationHelper: ReturnType; + + const mockSubscriptions: components['schemas']['SubscriptionResponse'][] = [ + { + id: 'sub1', + saved_search_id: 'search1', + channel_id: 'channel1', + frequency: 'weekly', + triggers: [], + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + }, + ]; + + const mockSavedSearches: components['schemas']['SavedSearchResponse'][] = [ + { + id: 'search1', + name: 'Test Search 1', + query: 'is:test', + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + permissions: {role: 'saved_search_owner'}, + bookmark_status: {status: 'bookmark_none'}, + }, + ]; + + beforeEach(async () => { + sandbox = sinon.createSandbox(); + apiClient = { + listSubscriptions: sandbox.stub().resolves(mockSubscriptions), + getAllUserSavedSearches: sandbox.stub().resolves(mockSavedSearches), + } as any as APIClient; + user = {getIdToken: async () => 'test-token'} as User; + mockLocationHelper = mockLocation(); + + element = await fixture(html` + + `); + element.toaster = sandbox.stub(); + await element.updateComplete; + }); + + afterEach(() => { + sandbox.restore(); + }); + + it('renders a loading spinner initially', () => { + // This is hard to test reliably as the task starts immediately. + // We'll focus on the complete and error states. + }); + + it('fetches and renders subscriptions', async () => { + await element['_loadingTask'].taskComplete; + await element.updateComplete; + + expect(apiClient.listSubscriptions).to.have.been.calledWith('test-token'); + expect(apiClient.getAllUserSavedSearches).to.have.been.calledWith( + 'test-token' + ); + const renderedText = element.shadowRoot?.textContent; + expect(renderedText).to.include('Test Search 1'); + expect(renderedText).to.include('channel1'); + expect(renderedText).to.include('weekly'); + }); + + it('opens dialog on unsubscribe link', async () => { + mockLocationHelper.setSearch('?unsubscribe=test-sub-id'); + // willUpdate is called before update, so we need to trigger an update. + element.requestUpdate(); + await element.updateComplete; + expect(element['_isSubscriptionDialogOpen']).to.be.true; + expect(element['_activeSubscriptionId']).to.equal('test-sub-id'); + }); + + it('refreshes on subscription save event', async () => { + await element['_loadingTask'].taskComplete; + await element.updateComplete; + const runSpy = sandbox.spy(element['_loadingTask'], 'run'); + + const dialog = element.shadowRoot?.querySelector( + 'webstatus-manage-subscriptions-dialog' + ); + dialog?.dispatchEvent(new CustomEvent('subscription-save-success')); + + expect(runSpy).to.have.been.calledOnce; + }); + + it('refreshes on subscription delete event', async () => { + await element['_loadingTask'].taskComplete; + await element.updateComplete; + const runSpy = sandbox.spy(element['_loadingTask'], 'run'); + + const dialog = element.shadowRoot?.querySelector( + 'webstatus-manage-subscriptions-dialog' + ); + dialog?.dispatchEvent(new CustomEvent('subscription-delete-success')); + + expect(runSpy).to.have.been.calledOnce; + }); +}); \ No newline at end of file diff --git a/frontend/src/static/js/components/webstatus-manage-subscriptions-dialog.ts b/frontend/src/static/js/components/webstatus-manage-subscriptions-dialog.ts new file mode 100644 index 000000000..f0642493a --- /dev/null +++ b/frontend/src/static/js/components/webstatus-manage-subscriptions-dialog.ts @@ -0,0 +1,409 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {consume} from '@lit/context'; +import {Task} from '@lit/task'; +import {LitElement, html, css, TemplateResult} from 'lit'; +import {customElement, property, state} from 'lit/decorators.js'; +import {apiClientContext} from '../contexts/api-client-context.js'; +import {APIClient} from '../api/client.js'; +import {SHARED_STYLES} from '../css/shared-css.js'; +import {type components} from 'webstatus.dev-backend'; +import {User, firebaseUserContext} from '../contexts/firebase-user-context.js'; + +export class SubscriptionSaveSuccessEvent extends CustomEvent { + constructor() { + super('subscription-save-success', {bubbles: true, composed: true}); + } +} + +export class SubscriptionSaveErrorEvent extends CustomEvent { + constructor(error: Error) { + super('subscription-save-error', { + bubbles: true, + composed: true, + detail: error, + }); + } +} + +export class SubscriptionDeleteSuccessEvent extends CustomEvent { + constructor() { + super('subscription-delete-success', {bubbles: true, composed: true}); + } +} + +export class SubscriptionDeleteErrorEvent extends CustomEvent { + constructor(error: Error) { + super('subscription-delete-error', { + bubbles: true, + composed: true, + detail: error, + }); + } +} + +@customElement('webstatus-manage-subscriptions-dialog') +export class ManageSubscriptionsDialog extends LitElement { + _loadingTask: Task; + + @consume({context: apiClientContext}) + @state() + apiClient!: APIClient; + + @consume({context: firebaseUserContext, subscribe: true}) + @state() + user: User | null | undefined; + + @property({type: String, attribute: 'saved-search-id'}) + savedSearchId = ''; + + @property({type: String, attribute: 'subscription-id'}) + subscriptionId = ''; + + @property({type: Boolean}) + open = false; + + @state() + private _notificationChannels: components['schemas']['NotificationChannelResponse'][] = + []; + + @state() + private _savedSearch: components['schemas']['SavedSearchResponse'] | null = + null; + + @state() + private _subscription: components['schemas']['SubscriptionResponse'] | null = + null; + + @state() + private _selectedTriggers: components['schemas']['SubscriptionTriggerWritable'][] = + []; + + @state() + private _selectedFrequency: components['schemas']['SubscriptionFrequency'] = + 'immediate'; + + @state() + private _initialSelectedTriggers: components['schemas']['SubscriptionTriggerWritable'][] = + []; + @state() + private _initialSelectedFrequency: components['schemas']['SubscriptionFrequency'] = + 'immediate'; + @state() + private _subscriptionsForSavedSearch: components['schemas']['SubscriptionResponse'][] = + []; + @state() + private _activeChannelId: string | undefined = undefined; + + static _TRIGGER_CONFIG: { + value: components['schemas']['SubscriptionTriggerWritable']; + label: string; + }[] = [ + { + value: 'feature_baseline_to_widely', + label: 'becomes widely available', + }, + { + value: 'feature_baseline_to_newly', + label: 'becomes newly available', + }, + { + value: 'feature_browser_implementation_any_complete', + label: 'gets a new browser implementation', + }, + { + value: 'feature_baseline_regression_to_limited', + label: 'regresses to limited availability', + }, + ]; + + static get styles() { + return [ + SHARED_STYLES, + css` + .dialog-overview { + --sl-dialog-width: 80vw; + } + `, + ]; + } + + get isDirty() { + console.log('isDirty check:'); + console.log(' _selectedFrequency:', this._selectedFrequency); + console.log(' _initialSelectedFrequency:', this._initialSelectedFrequency); + console.log(' _selectedTriggers:', this._selectedTriggers); + console.log(' _initialSelectedTriggers:', this._initialSelectedTriggers); + + if (this._selectedFrequency !== this._initialSelectedFrequency) { + return true; + } + + const sortedCurrent = [...this._selectedTriggers].sort(); + const sortedInitial = [...this._initialSelectedTriggers].sort(); + + if (sortedCurrent.length !== sortedInitial.length) { + return true; + } + + for (let i = 0; i < sortedCurrent.length; i++) { + if (sortedCurrent[i] !== sortedInitial[i]) { + return true; + } + } + + return false; + } + + constructor() { + super(); + this._loadingTask = new Task(this, { + args: () => [ + this.apiClient, + this.savedSearchId, + this.subscriptionId, + this.open, + ], + task: async ([apiClient, savedSearchId, subscriptionId, open]) => { + if (!open || !apiClient || !this.user) { + return; + } + const token = await this.user.getIdToken(); + + const promises = []; + promises.push( + apiClient.listNotificationChannels(token).then(r => { + this._notificationChannels = r || []; + }), + ); + + if (savedSearchId) { + promises.push( + apiClient.getSavedSearchByID(savedSearchId, token).then(r => { + this._savedSearch = r; + }), + ); + promises.push( + apiClient.listSubscriptions(token).then(r => { + this._subscriptionsForSavedSearch = + r.filter(s => s.saved_search_id === savedSearchId) || []; + }), + ); + } + + if (subscriptionId) { + // TODO: Fetch subscription details + } + + await Promise.all(promises); + }, + }); + } + + render(): TemplateResult { + return html` + (this.open = false)} + > + ${this._loadingTask.render({ + pending: () => html``, + complete: () => this.renderContent(), + error: e => html`Error: ${e}`, + })} + + `; + } + + renderContent(): TemplateResult { + const confirmDeletion = this.subscriptionId && !this.savedSearchId; + if (confirmDeletion) { + return html` +

Are you sure you want to unsubscribe?

+ + Confirm Unsubscribe + + `; + } + + return html` +

+ Select how and when you want to get updates for + ${this._subscription?.saved_search_id ?? + this._savedSearch?.name}. +

+ +
+
+

Notification channels

+ ${this._notificationChannels.map( + channel => html` + { + this._handleChannelChange(channel.id); + }} + >${channel.name} (${channel.type}) + `, + )} +
+
+

Triggers

+

Get an update when a feature...

+ ${ManageSubscriptionsDialog._TRIGGER_CONFIG.map( + trigger => html` + { + const checkbox = e.target as HTMLInputElement; + if (checkbox.checked) { + this._selectedTriggers.push(trigger.value); + } else { + this._selectedTriggers = this._selectedTriggers.filter( + t => t !== trigger.value, + ); + } + }} + >...${trigger.label} + `, + )} +
+
+

Frequency

+ { + const radioGroup = e.target as HTMLInputElement; + this._selectedFrequency = + radioGroup.value as components['schemas']['SubscriptionFrequency']; + }} + > + Immediately + Weekly updates + Monthly updates + +
+
+ + Save + `; + } + + private async _handleSave() { + if (!this.user || !this.isDirty || !this._activeChannelId) { + return; + } + try { + const token = await this.user.getIdToken(); + const existingSub = this._subscriptionsForSavedSearch.find( + s => s.channel_id === this._activeChannelId, + ); + + if (existingSub) { + // Update + const updates: { + triggers?: components['schemas']['SubscriptionTriggerWritable'][]; + frequency?: components['schemas']['SubscriptionFrequency']; + } = {}; + if (this._selectedFrequency !== this._initialSelectedFrequency) { + updates.frequency = this._selectedFrequency; + } + const triggersChanged = + this._selectedTriggers.length !== + this._initialSelectedTriggers.length || + [...this._selectedTriggers].sort().join(',') !== + [...this._initialSelectedTriggers].sort().join(','); + + if (triggersChanged) { + updates.triggers = this._selectedTriggers; + } + + await this.apiClient.updateSubscription(existingSub.id, token, updates); + } else { + // Create + await this.apiClient.createSubscription(token, { + saved_search_id: this.savedSearchId, + channel_id: this._activeChannelId, + frequency: this._selectedFrequency, + triggers: this._selectedTriggers, + }); + } + this.dispatchEvent(new SubscriptionSaveSuccessEvent()); + } catch (e) { + this.dispatchEvent(new SubscriptionSaveErrorEvent(e as Error)); + } + } + + private async _handleDelete() { + if (!this.subscriptionId || !this.user) { + return; + } + try { + const token = await this.user.getIdToken(); + await this.apiClient.deleteSubscription(this.subscriptionId, token); + this.dispatchEvent(new SubscriptionDeleteSuccessEvent()); + } catch (e) { + this.dispatchEvent(new SubscriptionDeleteErrorEvent(e as Error)); + } + } + + private _handleChannelChange(channelId: string) { + const previousActiveChannelId = this._activeChannelId; + if (this.isDirty) { + if (!confirm('You have unsaved changes. Discard them?')) { + // If user cancels, prevent channel change. The UI will naturally + // remain on the previously active radio button due to Lit's rendering. + // Alternatives include explicitly re-checking the old radio button or + // presenting a more advanced 'Save/Discard/Cancel' dialog. + this._activeChannelId = previousActiveChannelId; // Explicitly revert UI + return; + } + } + + this._activeChannelId = channelId; + const sub = this._subscriptionsForSavedSearch.find( + s => s.channel_id === channelId, + ); + if (sub) { + this._subscription = sub; + this._activeChannelId = sub.channel_id; + this._selectedTriggers = sub.triggers.map( + t => t.value as components['schemas']['SubscriptionTriggerWritable'], + ); + this._selectedFrequency = sub.frequency; + } else { + this._subscription = null; + this._selectedTriggers = []; + this._selectedFrequency = 'immediate'; + } + this._initialSelectedTriggers = [...this._selectedTriggers]; + this._initialSelectedFrequency = this._selectedFrequency; + } +} diff --git a/frontend/src/static/js/components/webstatus-notification-channels-page.ts b/frontend/src/static/js/components/webstatus-notification-channels-page.ts new file mode 100644 index 000000000..c524be7d1 --- /dev/null +++ b/frontend/src/static/js/components/webstatus-notification-channels-page.ts @@ -0,0 +1,118 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law of a an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {consume} from '@lit/context'; +import {LitElement, css, html} from 'lit'; +import {customElement, state} from 'lit/decorators.js'; +import {User} from 'firebase/auth'; +import {Task} from '@lit/task'; + +import {firebaseUserContext} from '../contexts/firebase-user-context.js'; +import {apiClientContext} from '../contexts/api-client-context.js'; +import {APIClient} from '../api/client.js'; +import {components} from 'webstatus.dev-backend'; +import {toast} from '../utils/toast.js'; +import {navigateToUrl} from '../utils/app-router.js'; + +import './webstatus-notification-email-channels.js'; +import './webstatus-notification-rss-channels.js'; +import './webstatus-notification-webhook-channels.js'; + +type NotificationChannelResponse = + components['schemas']['NotificationChannelResponse']; + +@customElement('webstatus-notification-channels-page') +export class WebstatusNotificationChannelsPage extends LitElement { + static styles = css` + .container { + display: flex; + flex-direction: column; + gap: 16px; + } + `; + + @consume({context: firebaseUserContext, subscribe: true}) + @state() + user: User | null | undefined; + + @consume({context: apiClientContext}) + @state() + apiClient!: APIClient; + + @state() + private emailChannels: NotificationChannelResponse[] = []; + + private _channelsTask = new Task(this, { + task: async () => { + if (this.user === null) { + navigateToUrl('/'); + void toast('You must be logged in to view this page.', 'danger'); + return; + } + if (this.user === undefined) { + return; + } + + const token = await this.user.getIdToken(); + const channels = await this.apiClient + .listNotificationChannels(token) + .catch(e => { + const errorMessage = e instanceof Error ? e.message : 'unknown error'; + void toast( + `Failed to load notification channels: ${errorMessage}`, + 'danger', + ); + return []; + }); + this.emailChannels = channels.filter(c => c.type === 'email'); + }, + args: () => [this.user], + }); + + render() { + return html` +
+ ${this._channelsTask.render({ + pending: () => html` + + + + + + + + + `, + + complete: () => html` + + + + + + + `, + error: e => { + const errorMessage = + e instanceof Error ? e.message : 'unknown error'; + return html`

Error: ${errorMessage}

`; + }, + })} +
+ `; + } +} diff --git a/frontend/src/static/js/components/webstatus-notification-email-channels.ts b/frontend/src/static/js/components/webstatus-notification-email-channels.ts new file mode 100644 index 000000000..60223adf8 --- /dev/null +++ b/frontend/src/static/js/components/webstatus-notification-email-channels.ts @@ -0,0 +1,97 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, css, html} from 'lit'; +import {customElement, property} from 'lit/decorators.js'; +import {repeat} from 'lit/directives/repeat.js'; +import {components} from 'webstatus.dev-backend'; +import './webstatus-notification-panel.js'; + +type NotificationChannelResponse = + components['schemas']['NotificationChannelResponse']; + +@customElement('webstatus-notification-email-channels') +export class WebstatusNotificationEmailChannels extends LitElement { + static styles = css` + .channel-item { + background-color: #f9f9f9; + display: flex; + align-items: center; + justify-content: space-between; + padding: 8px 16px; + border-bottom: 1px solid #e4e4e7; + } + + .channel-item:last-child { + border-bottom: none; + } + + .channel-info { + display: flex; + flex-direction: column; + } + + .channel-info .name { + font-size: 14px; + } + + .info-icon-button { + font-size: 1.2rem; + } + `; + + @property({type: Array}) + channels: NotificationChannelResponse[] = []; + + @property({type: Boolean}) + loading = false; + + render() { + return html` + + + Email +
+ + + +
+
+ ${repeat( + this.channels, + channel => channel.id, + channel => html` +
+
+ ${channel.value} +
+ ${channel.status === 'enabled' + ? html`Enabled` + : html`Disabled`} +
+ `, + )} +
+
+ `; + } +} diff --git a/frontend/src/static/js/components/webstatus-notification-panel.ts b/frontend/src/static/js/components/webstatus-notification-panel.ts new file mode 100644 index 000000000..c1d577000 --- /dev/null +++ b/frontend/src/static/js/components/webstatus-notification-panel.ts @@ -0,0 +1,84 @@ +/** + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, css, html} from 'lit'; +import {customElement, property} from 'lit/decorators.js'; + +@customElement('webstatus-notification-panel') +export class WebstatusNotificationPanel extends LitElement { + static styles = css` + .card { + border: 1px solid #e4e4e7; + border-radius: 4px; + overflow: hidden; + } + + .card-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 8px 16px; + } + + .card-header .title { + display: flex; + align-items: center; + gap: 8px; + font-weight: bold; + font-size: 16px; + } + + .card-body { + padding: 0 20px 20px 20px; + } + + .loading-skeleton { + display: flex; + flex-direction: column; + gap: 10px; + padding: 16px; + } + `; + + @property({type: Boolean}) + loading = false; + + render() { + return html` +
+
+
+ + +
+
+ +
+
+
+ ${this.loading + ? html` +
+ + +
+ ` + : html``} +
+
+ `; + } +} diff --git a/frontend/src/static/js/components/webstatus-notification-rss-channels.ts b/frontend/src/static/js/components/webstatus-notification-rss-channels.ts new file mode 100644 index 000000000..a7d25d04b --- /dev/null +++ b/frontend/src/static/js/components/webstatus-notification-rss-channels.ts @@ -0,0 +1,47 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, css, html} from 'lit'; +import {customElement} from 'lit/decorators.js'; +import './webstatus-notification-panel.js'; + +@customElement('webstatus-notification-rss-channels') +export class WebstatusNotificationRssChannels extends LitElement { + static styles = css` + .card-body { + padding: 20px; + color: #71717a; + } + `; + + render() { + return html` + + + RSS +
+ Create RSS + channel +
+
+

Coming soon

+
+
+ `; + } +} diff --git a/frontend/src/static/js/components/webstatus-notification-webhook-channels.ts b/frontend/src/static/js/components/webstatus-notification-webhook-channels.ts new file mode 100644 index 000000000..ac02f9b0a --- /dev/null +++ b/frontend/src/static/js/components/webstatus-notification-webhook-channels.ts @@ -0,0 +1,47 @@ +/** + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, css, html} from 'lit'; +import {customElement} from 'lit/decorators.js'; +import './webstatus-notification-panel.js'; + +@customElement('webstatus-notification-webhook-channels') +export class WebstatusNotificationWebhookChannels extends LitElement { + static styles = css` + .card-body { + padding: 20px; + color: #71717a; + } + `; + + render() { + return html` + + + Webhook +
+ Create Webhook + channel +
+
+

Coming soon

+
+
+ `; + } +} diff --git a/frontend/src/static/js/components/webstatus-overview-content.ts b/frontend/src/static/js/components/webstatus-overview-content.ts index d57532f17..ce24ace28 100644 --- a/frontend/src/static/js/components/webstatus-overview-content.ts +++ b/frontend/src/static/js/components/webstatus-overview-content.ts @@ -30,6 +30,8 @@ import {type components} from 'webstatus.dev-backend'; import './webstatus-overview-data-loader.js'; import './webstatus-overview-filters.js'; import './webstatus-overview-pagination.js'; +import './webstatus-subscribe-button.js'; +import './webstatus-manage-subscriptions-dialog.js'; import {SHARED_STYLES} from '../css/shared-css.js'; import {TaskTracker} from '../utils/task-tracker.js'; import {ApiError} from '../api/errors.js'; @@ -53,6 +55,13 @@ import { SavedSearchOperationType, UserSavedSearch, } from '../utils/constants.js'; +import {SubscribeEvent} from './webstatus-subscribe-button.js'; +import {ifDefined} from 'lit/directives/if-defined.js'; +import {toast} from '../utils/toast.js'; +import { + SubscriptionSaveErrorEvent, + SubscriptionDeleteErrorEvent, +} from './webstatus-manage-subscriptions-dialog.js'; @customElement('webstatus-overview-content') export class WebstatusOverviewContent extends LitElement { @@ -86,6 +95,12 @@ export class WebstatusOverviewContent extends LitElement { @query('webstatus-saved-search-editor') savedSearchEditor!: WebstatusSavedSearchEditor; + @state() + private _isSubscriptionDialogOpen = false; + + @state() + private _activeSavedSearchId: string | undefined = undefined; + static get styles(): CSSResultGroup { return [ SHARED_STYLES, @@ -235,6 +250,12 @@ export class WebstatusOverviewContent extends LitElement { .user=${this.user} .apiClient=${this.apiClient} > + ${userSavedSearch + ? html`` + : nothing}
+ (this._isSubscriptionDialogOpen = false)} + @subscription-save-success=${this._handleSubscriptionSaveSuccess} + @subscription-save-error=${this._handleSubscriptionSaveError} + @subscription-delete-success=${this._handleSubscriptionDeleteSuccess} + @subscription-delete-error=${this._handleSubscriptionDeleteError} + > + `; } + + private _handleSubscribe(e: SubscribeEvent) { + this._activeSavedSearchId = e.detail.savedSearchId; + this._isSubscriptionDialogOpen = true; + } + + private _handleSubscriptionSaveSuccess() { + this._isSubscriptionDialogOpen = false; + void toast('Subscription saved!', 'success'); + } + + private _handleSubscriptionSaveError(e: SubscriptionSaveErrorEvent) { + void toast(`Error saving subscription: ${e.detail.message}`, 'danger'); + } + + private _handleSubscriptionDeleteSuccess() { + this._isSubscriptionDialogOpen = false; + void toast('Subscription deleted!', 'success'); + } + + private _handleSubscriptionDeleteError(e: SubscriptionDeleteErrorEvent) { + void toast(`Error deleting subscription: ${e.detail.message}`, 'danger'); + } } diff --git a/frontend/src/static/js/components/webstatus-sidebar-menu.ts b/frontend/src/static/js/components/webstatus-sidebar-menu.ts index 3b285ebfe..9e47c318a 100644 --- a/frontend/src/static/js/components/webstatus-sidebar-menu.ts +++ b/frontend/src/static/js/components/webstatus-sidebar-menu.ts @@ -50,11 +50,14 @@ import { savedSearchHelpers, } from '../contexts/app-bookmark-info-context.js'; import {TaskStatus} from '@lit/task'; +import {User} from 'firebase/auth'; +import {firebaseUserContext} from '../contexts/firebase-user-context.js'; // Map from sl-tree-item ids to paths. enum NavigationItemKey { FEATURES = 'features-item', STATISTICS = 'statistics-item', + NOTIFICATION_CHANNELS = 'notification-channels-item', } interface NavigationItem { @@ -75,6 +78,10 @@ const navigationMap: NavigationMap = { id: NavigationItemKey.STATISTICS, path: '/stats', }, + [NavigationItemKey.NOTIFICATION_CHANNELS]: { + id: NavigationItemKey.NOTIFICATION_CHANNELS, + path: '/settings/notification-channels', + }, }; interface GetLocationFunction { @@ -152,6 +159,10 @@ export class WebstatusSidebarMenu extends LitElement { @state() appBookmarkInfo?: AppBookmarkInfo; + @consume({context: firebaseUserContext, subscribe: true}) + @state() + user: User | null | undefined; + // For now, unconditionally open the features dropdown. @state() private isFeaturesDropdownExpanded: boolean = true; @@ -371,6 +382,28 @@ export class WebstatusSidebarMenu extends LitElement { `; } + renderSettingsMenu(): TemplateResult { + if (this.user === undefined) { + return html`${nothing}`; + } + if (this.user === null) { + return html`${nothing}`; + } + + return html` + + + + + Notification Channels + + + `; + } + render(): TemplateResult { return html` @@ -396,7 +429,7 @@ export class WebstatusSidebarMenu extends LitElement { Statistics --> - ${this.renderUserSavedSearches()} + ${this.renderUserSavedSearches()} ${this.renderSettingsMenu()} diff --git a/frontend/src/static/js/components/webstatus-subscribe-button.ts b/frontend/src/static/js/components/webstatus-subscribe-button.ts new file mode 100644 index 000000000..5617d76fc --- /dev/null +++ b/frontend/src/static/js/components/webstatus-subscribe-button.ts @@ -0,0 +1,53 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, html, css, TemplateResult} from 'lit'; +import {customElement, property} from 'lit/decorators.js'; + +export class SubscribeEvent extends CustomEvent<{savedSearchId: string}> { + constructor(savedSearchId: string) { + super('subscribe', { + bubbles: true, + composed: true, + detail: {savedSearchId}, + }); + } +} + +@customElement('webstatus-subscribe-button') +export class SubscribeButton extends LitElement { + @property({type: String, attribute: 'saved-search-id'}) + savedSearchId = ''; + + static styles = css` + sl-button::part(base) { + font-size: var(--sl-button-font-size-medium); + } + `; + + private _handleClick() { + this.dispatchEvent(new SubscribeEvent(this.savedSearchId)); + } + + render(): TemplateResult { + return html` + + + Subscribe to updates + + `; + } +} diff --git a/frontend/src/static/js/components/webstatus-subscriptions-page.ts b/frontend/src/static/js/components/webstatus-subscriptions-page.ts new file mode 100644 index 000000000..9d92bb941 --- /dev/null +++ b/frontend/src/static/js/components/webstatus-subscriptions-page.ts @@ -0,0 +1,183 @@ +/** + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {LitElement, html, TemplateResult} from 'lit'; +import {customElement, state, property} from 'lit/decorators.js'; +import {Task} from '@lit/task'; +import {consume} from '@lit/context'; +import {APIClient} from '../api/client.js'; +import {apiClientContext} from '../contexts/api-client-context.js'; +import {User, firebaseUserContext} from '../contexts/firebase-user-context.js'; +import {toast} from '../utils/toast.js'; +import { + SubscriptionSaveErrorEvent, + SubscriptionDeleteErrorEvent, +} from './webstatus-manage-subscriptions-dialog.js'; +import {ifDefined} from 'lit/directives/if-defined.js'; +import {type components} from 'webstatus.dev-backend'; + +interface GetLocationFunction { + (): Location; +} + +@customElement('webstatus-subscriptions-page') +export class SubscriptionsPage extends LitElement { + _loadingTask: Task; + + @consume({context: apiClientContext}) + @state() + apiClient!: APIClient; + + @consume({context: firebaseUserContext, subscribe: true}) + @state() + user: User | null | undefined; + + @property({attribute: false}) + getLocation: GetLocationFunction = () => window.location; + + @property({attribute: false}) + toaster = toast; + + @state() + private _isSubscriptionDialogOpen = false; + + @state() + private _activeSubscriptionId: string | undefined = undefined; + + @state() + private _subscriptions: components['schemas']['SubscriptionResponse'][] = []; + + @state() + private _savedSearches: Map< + string, + components['schemas']['SavedSearchResponse'] + > = new Map(); + + constructor() { + super(); + this._loadingTask = new Task(this, { + args: () => [this.apiClient, this.user], + task: async ([apiClient, user]) => { + if (!apiClient || !user) { + return; + } + const token = await user.getIdToken(); + + const [subscriptions, savedSearches] = await Promise.all([ + apiClient.listSubscriptions(token), + apiClient.getAllUserSavedSearches(token), + ]); + + this._subscriptions = subscriptions; + this._savedSearches = new Map(savedSearches.map(ss => [ss.id, ss])); + }, + }); + } + + willUpdate() { + const urlParams = new URLSearchParams(this.getLocation().search); + const unsubscribeToken = urlParams.get('unsubscribe'); + if (unsubscribeToken) { + this._activeSubscriptionId = unsubscribeToken; + this._isSubscriptionDialogOpen = true; + } + } + + render(): TemplateResult { + return html` +

My Subscriptions

+ ${this._loadingTask.render({ + pending: () => html``, + complete: () => this.renderSubscriptions(), + error: e => html`Error: ${e}`, + })} + (this._isSubscriptionDialogOpen = false)} + @subscription-save-success=${this._handleSubscriptionSaveSuccess} + @subscription-save-error=${this._handleSubscriptionSaveError} + @subscription-delete-success=${this._handleSubscriptionDeleteSuccess} + @subscription-delete-error=${this._handleSubscriptionDeleteError} + > + + `; + } + + private renderSubscriptions(): TemplateResult { + if (this._subscriptions.length === 0) { + return html`

No subscriptions found.

`; + } + + return html` +
    + ${this._subscriptions.map(sub => { + const savedSearch = this._savedSearches.get(sub.saved_search_id); + return html` +
  • + ${savedSearch?.name ?? sub.saved_search_id} + (Channel: ${sub.channel_id}, Frequency: ${sub.frequency}) + this._openEditDialog(sub.id)} + >Edit + this._openDeleteDialog(sub.id)} + >Delete +
  • + `; + })} +
+ `; + } + + private _openEditDialog(subscriptionId: string) { + this._activeSubscriptionId = subscriptionId; + this._isSubscriptionDialogOpen = true; + } + + private _openDeleteDialog(subscriptionId: string) { + this._activeSubscriptionId = subscriptionId; + // In this case, we're initiating a delete from the list, not an unsubscribe link. + // The dialog itself will handle the confirmation internally. + this._isSubscriptionDialogOpen = true; + // The dialog should internally check if it's a delete scenario via subscriptionId only + // and render the confirmation view if savedSearchId is not present. + } + + private _handleSubscriptionSaveSuccess() { + this._isSubscriptionDialogOpen = false; + void this.toaster('Subscription saved!', 'success'); + void this._loadingTask.run(); + } + + private _handleSubscriptionSaveError(e: SubscriptionSaveErrorEvent) { + void this.toaster(`Error saving subscription: ${e.detail.message}`, 'danger'); + } + + private _handleSubscriptionDeleteSuccess() { + this._isSubscriptionDialogOpen = false; + void this.toaster('Subscription deleted!', 'success'); + void this._loadingTask.run(); + } + + private _handleSubscriptionDeleteError(e: SubscriptionDeleteErrorEvent) { + void this.toaster(`Error deleting subscription: ${e.detail.message}`, 'danger'); + } +} diff --git a/frontend/src/static/js/contexts/api-client-context.ts b/frontend/src/static/js/contexts/api-client-context.ts index eea6dab01..973678a29 100644 --- a/frontend/src/static/js/contexts/api-client-context.ts +++ b/frontend/src/static/js/contexts/api-client-context.ts @@ -17,6 +17,6 @@ import {createContext} from '@lit/context'; import type {APIClient} from '../api/client.js'; -export type {APIClient} from '../api/client.js'; +export {APIClient} from '../api/client.js'; export const apiClientContext = createContext('api-client'); diff --git a/frontend/src/static/js/utils/app-router.ts b/frontend/src/static/js/utils/app-router.ts index 96f316b87..709056f07 100644 --- a/frontend/src/static/js/utils/app-router.ts +++ b/frontend/src/static/js/utils/app-router.ts @@ -21,6 +21,8 @@ import '../components/webstatus-feature-page.js'; import '../components/webstatus-stats-page.js'; import '../components/webstatus-notfound-error-page.js'; import '../components/webstatus-feature-gone-split-page.js'; +import '../components/webstatus-notification-channels-page.js'; +import '../components/webstatus-subscriptions-page.js'; export const initRouter = async (element: HTMLElement): Promise => { const router = new Router(element); @@ -37,6 +39,14 @@ export const initRouter = async (element: HTMLElement): Promise => { component: 'webstatus-stats-page', path: '/stats', }, + { + component: 'webstatus-notification-channels-page', + path: '/settings/notification-channels', + }, + { + component: 'webstatus-subscriptions-page', + path: '/settings/subscriptions', + }, { component: 'webstatus-feature-gone-split-page', path: '/errors-410/feature-gone-split', diff --git a/infra/storage/spanner/migrations/000029.sql b/infra/storage/spanner/migrations/000029.sql new file mode 100644 index 000000000..5b1257938 --- /dev/null +++ b/infra/storage/spanner/migrations/000029.sql @@ -0,0 +1,17 @@ +-- Copyright 2025 Google LLC +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. + +-- This index helps get the latest events for a search for the event producer worker. +CREATE INDEX IF NOT EXISTS SavedSearchNotificationEvents_BySearchAndSnapshotType +ON SavedSearchNotificationEvents (SavedSearchId, SnapshotType, Timestamp DESC); \ No newline at end of file diff --git a/lib/backendtypes/types.go b/lib/backendtypes/types.go index 8382a2e0a..b1dd3eae3 100644 --- a/lib/backendtypes/types.go +++ b/lib/backendtypes/types.go @@ -86,3 +86,15 @@ func AttemptToStoreSubscriptionTriggerUnknown() backend.SubscriptionTriggerRespo return ret } + +func DefaultBrowsers() []backend.BrowserPathParam { + return []backend.BrowserPathParam{ + backend.Chrome, + backend.Edge, + backend.Firefox, + backend.Safari, + backend.ChromeAndroid, + backend.FirefoxAndroid, + backend.SafariIos, + } +} diff --git a/lib/blobtypes/featurelistdiff/v1/reconciler.go b/lib/blobtypes/featurelistdiff/v1/reconciler.go index cc180bc78..14b50779f 100644 --- a/lib/blobtypes/featurelistdiff/v1/reconciler.go +++ b/lib/blobtypes/featurelistdiff/v1/reconciler.go @@ -38,16 +38,18 @@ func (w *FeatureDiffWorkflow) ReconcileHistory(ctx context.Context) error { // Phase 1: Investigation // Iterate through all removed features to build a map of their historical outcomes. - for i := range w.diff.Removed { - r := &w.diff.Removed[i] - + remainingRemoved := make([]FeatureRemoved, 0, len(w.diff.Removed)) + for _, r := range w.diff.Removed { // Check the current status of the removed feature ID in the database. result, err := w.fetcher.GetFeature(ctx, r.ID) if err != nil { // If the entity is completely gone from the DB, it's a true deletion. - // We update the reason to allow for specific UI messaging (e.g. "Deleted from platform"). if errors.Is(err, backendtypes.ErrEntityDoesNotExist) { - r.Reason = ReasonDeleted + w.diff.Deleted = append(w.diff.Deleted, FeatureDeleted{ + ID: r.ID, + Name: r.Name, + Reason: ReasonDeleted, + }) continue } @@ -55,6 +57,9 @@ func (w *FeatureDiffWorkflow) ReconcileHistory(ctx context.Context) error { return err } + // If the feature was not deleted, keep it in the list to check for moves/splits. + remainingRemoved = append(remainingRemoved, r) + // Update the visitor context so it knows which OldID owns the result we are about to visit. visitor.currentID = r.ID @@ -63,6 +68,10 @@ func (w *FeatureDiffWorkflow) ReconcileHistory(ctx context.Context) error { return err } } + // Update the diff with the (now smaller) list of removed items to check. + if len(remainingRemoved) > 0 { + w.diff.Removed = remainingRemoved + } // Phase 2: Correlation // If we found any history records, try to match them with the 'Added' list. diff --git a/lib/blobtypes/featurelistdiff/v1/reconciler_test.go b/lib/blobtypes/featurelistdiff/v1/reconciler_test.go index 3f6249a89..6493de594 100644 --- a/lib/blobtypes/featurelistdiff/v1/reconciler_test.go +++ b/lib/blobtypes/featurelistdiff/v1/reconciler_test.go @@ -63,6 +63,7 @@ func TestReconcileHistory(t *testing.T) { name: "Scenario 1: Feature Moved (Rename)", initialDiff: &FeatureDiff{ Removed: []FeatureRemoved{{ID: "old-id", Name: "Old Name", Reason: ReasonUnmatched}}, + Deleted: nil, Added: []FeatureAdded{{ID: "new-id", Name: "New Name", Reason: ReasonNewMatch, Docs: nil}}, QueryChanged: false, Modified: nil, @@ -84,6 +85,7 @@ func TestReconcileHistory(t *testing.T) { QueryChanged: false, Modified: nil, Splits: nil, + Deleted: nil, }, wantErr: false, }, @@ -99,6 +101,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "monolith": backendtypes.NewGetFeatureResult( @@ -127,6 +130,7 @@ func TestReconcileHistory(t *testing.T) { QueryChanged: false, Modified: nil, Moves: nil, + Deleted: nil, }, wantErr: false, }, @@ -142,6 +146,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "monolith": backendtypes.NewGetFeatureResult( @@ -170,6 +175,7 @@ func TestReconcileHistory(t *testing.T) { QueryChanged: false, Modified: nil, Moves: nil, + Deleted: nil, }, wantErr: false, }, @@ -182,6 +188,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "removed-id": backendtypes.NewGetFeatureResult( @@ -208,26 +215,29 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, wantErr: false, }, { name: "Scenario 5: Hard Delete (EntityDoesNotExist)", initialDiff: &FeatureDiff{ - Removed: []FeatureRemoved{{ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonUnmatched}}, + Deleted: []FeatureDeleted{{ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonDeleted}}, Added: nil, QueryChanged: false, Modified: nil, Moves: nil, Splits: nil, + Removed: nil, }, mockResults: nil, mockErrors: map[string]error{ "deleted-id": backendtypes.ErrEntityDoesNotExist, }, expectedDiff: &FeatureDiff{ - // Remains in Removed list, but Reason updated to Deleted - Removed: []FeatureRemoved{{ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonDeleted}}, + // Should be moved to Deleted list + Removed: nil, + Deleted: []FeatureDeleted{{ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonDeleted}}, Added: nil, QueryChanged: false, Modified: nil, @@ -247,6 +257,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "old-id": backendtypes.NewGetFeatureResult( @@ -261,6 +272,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, wantErr: false, }, @@ -273,6 +285,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: nil, mockErrors: map[string]error{ @@ -292,6 +305,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "monolith": backendtypes.NewGetFeatureResult( @@ -310,6 +324,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, wantErr: false, }, @@ -327,6 +342,7 @@ func TestReconcileHistory(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, mockResults: map[string]*backendtypes.GetFeatureResult{ "old-id": backendtypes.NewGetFeatureResult( @@ -343,6 +359,51 @@ func TestReconcileHistory(t *testing.T) { QueryChanged: false, Modified: nil, Splits: nil, + Deleted: nil, + }, + wantErr: false, + }, + { + name: "Scenario 10: Mixed Removed and Deleted", + initialDiff: &FeatureDiff{ + Removed: []FeatureRemoved{ + {ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonUnmatched}, + {ID: "removed-id", Name: "Removed Feature", Reason: ReasonUnmatched}, + }, + Added: nil, + QueryChanged: false, + Modified: nil, + Moves: nil, + Splits: nil, + Deleted: nil, + }, + mockResults: map[string]*backendtypes.GetFeatureResult{ + "removed-id": backendtypes.NewGetFeatureResult( + backendtypes.NewRegularFeatureResult(&backend.Feature{ + FeatureId: "removed-id", + Name: "", + Spec: nil, + Baseline: nil, + BrowserImplementations: nil, + Discouraged: nil, + Usage: nil, + Wpt: nil, + VendorPositions: nil, + DeveloperSignals: nil, + }), + ), + }, + mockErrors: map[string]error{ + "deleted-id": backendtypes.ErrEntityDoesNotExist, + }, + expectedDiff: &FeatureDiff{ + Removed: []FeatureRemoved{{ID: "removed-id", Name: "Removed Feature", Reason: ReasonUnmatched}}, + Deleted: []FeatureDeleted{{ID: "deleted-id", Name: "Deleted Feature", Reason: ReasonDeleted}}, + Added: nil, + QueryChanged: false, + Modified: nil, + Moves: nil, + Splits: nil, }, wantErr: false, }, diff --git a/lib/blobtypes/featurelistdiff/v1/types.go b/lib/blobtypes/featurelistdiff/v1/types.go index a09a1fb26..271e7b09e 100644 --- a/lib/blobtypes/featurelistdiff/v1/types.go +++ b/lib/blobtypes/featurelistdiff/v1/types.go @@ -94,6 +94,7 @@ type FeatureDiff struct { QueryChanged bool `json:"queryChanged,omitempty"` Added []FeatureAdded `json:"added,omitempty"` Removed []FeatureRemoved `json:"removed,omitempty"` + Deleted []FeatureDeleted `json:"deleted,omitempty"` Modified []FeatureModified `json:"modified,omitempty"` Moves []FeatureMoved `json:"moves,omitempty"` Splits []FeatureSplit `json:"splits,omitempty"` @@ -125,6 +126,13 @@ func (d *FeatureDiff) Sort() { return d.Removed[i].ID < d.Removed[j].ID }) + sort.Slice(d.Deleted, func(i, j int) bool { + if d.Deleted[i].Name != d.Deleted[j].Name { + return d.Deleted[i].Name < d.Deleted[j].Name + } + + return d.Deleted[i].ID < d.Deleted[j].ID + }) sort.Slice(d.Modified, func(i, j int) bool { if d.Modified[i].Name != d.Modified[j].Name { return d.Modified[i].Name < d.Modified[j].Name @@ -182,6 +190,12 @@ type FeatureRemoved struct { Reason ChangeReason `json:"reason"` } +type FeatureDeleted struct { + ID string `json:"id"` + Name string `json:"name"` + Reason ChangeReason `json:"reason"` +} + type FeatureMoved struct { FromID string `json:"fromId"` ToID string `json:"toId"` @@ -223,7 +237,7 @@ func (d FeatureDiff) HasChanges() bool { } func (d FeatureDiff) HasDataChanges() bool { - return len(d.Added) > 0 || len(d.Removed) > 0 || + return len(d.Added) > 0 || len(d.Removed) > 0 || len(d.Deleted) > 0 || len(d.Modified) > 0 || len(d.Moves) > 0 || len(d.Splits) > 0 } diff --git a/lib/blobtypes/featurelistdiff/v1/types_test.go b/lib/blobtypes/featurelistdiff/v1/types_test.go index 831ecb8a4..c75802da2 100644 --- a/lib/blobtypes/featurelistdiff/v1/types_test.go +++ b/lib/blobtypes/featurelistdiff/v1/types_test.go @@ -36,6 +36,7 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, expected: false, }, @@ -48,6 +49,7 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, expected: true, }, @@ -60,6 +62,7 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, expected: true, }, @@ -72,6 +75,22 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, + }, + expected: true, + }, + { + name: "Deleted", + diff: FeatureDiff{ + QueryChanged: false, + Added: nil, + Removed: nil, + Modified: nil, + Moves: nil, + Splits: nil, + Deleted: []FeatureDeleted{ + {ID: "1", Name: "A", Reason: ReasonDeleted}, + }, }, expected: true, }, @@ -101,8 +120,9 @@ func TestHasChanges(t *testing.T) { BrowserChanges: nil, DocsChange: nil, }}, - Moves: nil, - Splits: nil, + Moves: nil, + Splits: nil, + Deleted: nil, }, expected: true, }, @@ -115,6 +135,7 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: []FeatureMoved{{FromID: "A", ToID: "B", FromName: "A", ToName: "B"}}, Splits: nil, + Deleted: nil, }, expected: true, }, @@ -127,6 +148,7 @@ func TestHasChanges(t *testing.T) { Modified: nil, Moves: nil, Splits: []FeatureSplit{{FromID: "A", FromName: "A", To: nil}}, + Deleted: nil, }, expected: true, }, @@ -176,6 +198,10 @@ func TestFeatureDiff_Sort(t *testing.T) { To: nil, }, }, + Deleted: []FeatureDeleted{ + {ID: "2", Name: "B", Reason: ReasonDeleted}, + {ID: "1", Name: "A", Reason: ReasonDeleted}, + }, } diff.Sort() @@ -190,6 +216,11 @@ func TestFeatureDiff_Sort(t *testing.T) { t.Errorf("Removed sort failed: %+v", diff.Removed) } + // Deleted: A(1), B(2) + if diff.Deleted[0].ID != "1" || diff.Deleted[1].ID != "2" { + t.Errorf("Deleted sort failed: %+v", diff.Deleted) + } + // Modified: A(1), B(2) if diff.Modified[0].ID != "1" || diff.Modified[1].ID != "2" { t.Errorf("Modified sort failed: %+v", diff.Modified) diff --git a/lib/email/chime/chimeadapters/email_worker.go b/lib/email/chime/chimeadapters/email_worker.go new file mode 100644 index 000000000..326b9f250 --- /dev/null +++ b/lib/email/chime/chimeadapters/email_worker.go @@ -0,0 +1,58 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package chimeadapters + +import ( + "context" + "errors" + + "github.com/GoogleChrome/webstatus.dev/lib/email/chime" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type EmailSender interface { + Send(ctx context.Context, id string, to string, subject string, htmlBody string) error +} + +type EmailWorkerChimeAdapter struct { + chimeSender EmailSender +} + +// NewEmailWorkerChimeAdapter creates a new adapter for the email worker to use Chime. +func NewEmailWorkerChimeAdapter(chimeSender EmailSender) *EmailWorkerChimeAdapter { + return &EmailWorkerChimeAdapter{ + chimeSender: chimeSender, + } +} + +// Send implements the EmailSender interface for the email worker. +func (a *EmailWorkerChimeAdapter) Send(ctx context.Context, id string, to string, + subject string, htmlBody string) error { + err := a.chimeSender.Send(ctx, id, to, subject, htmlBody) + if err != nil { + if errors.Is(err, chime.ErrPermanentUser) { + return errors.Join(workertypes.ErrUnrecoverableUserFailureEmailSending, err) + } else if errors.Is(err, chime.ErrPermanentSystem) { + return errors.Join(workertypes.ErrUnrecoverableSystemFailureEmailSending, err) + } else if errors.Is(err, chime.ErrDuplicate) { + return errors.Join(workertypes.ErrUnrecoverableSystemFailureEmailSending, err) + } + + // Will be recorded as a transient error + return err + } + + return nil +} diff --git a/lib/email/chime/chimeadapters/email_worker_test.go b/lib/email/chime/chimeadapters/email_worker_test.go new file mode 100644 index 000000000..181682222 --- /dev/null +++ b/lib/email/chime/chimeadapters/email_worker_test.go @@ -0,0 +1,100 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package chimeadapters + +import ( + "context" + "errors" + "testing" + + "github.com/GoogleChrome/webstatus.dev/lib/email/chime" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// mockChimeSender is a mock implementation of the EmailSender for testing. +type mockChimeSender struct { + sendErr error +} + +func (m *mockChimeSender) Send(_ context.Context, _ string, _ string, _ string, _ string) error { + return m.sendErr +} + +var errTest = errors.New("test error") + +func TestEmailWorkerChimeAdapter_Send(t *testing.T) { + ctx := context.Background() + testCases := []struct { + name string + chimeError error + expectedError error + }{ + { + name: "Success", + chimeError: nil, + expectedError: nil, + }, + { + name: "Permanent User Error", + chimeError: chime.ErrPermanentUser, + expectedError: workertypes.ErrUnrecoverableUserFailureEmailSending, + }, + { + name: "Permanent System Error", + chimeError: chime.ErrPermanentSystem, + expectedError: workertypes.ErrUnrecoverableSystemFailureEmailSending, + }, + { + name: "Duplicate Error", + chimeError: chime.ErrDuplicate, + expectedError: workertypes.ErrUnrecoverableSystemFailureEmailSending, + }, + { + name: "Transient Error", + chimeError: chime.ErrTransient, + expectedError: chime.ErrTransient, // Should be passed through + }, + { + name: "Other Error", + chimeError: errTest, + expectedError: errTest, // Should be passed through + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Setup + mockSender := &mockChimeSender{sendErr: tc.chimeError} + adapter := NewEmailWorkerChimeAdapter(mockSender) + + // Execute + err := adapter.Send(ctx, "test-id", "to@example.com", "Test Subject", "

Hello

") + + // Verify + if tc.expectedError != nil { + if err == nil { + t.Fatal("Expected an error, but got nil") + } + if !errors.Is(err, tc.expectedError) { + t.Errorf("Expected error wrapping %v, but got %v", tc.expectedError, err) + } + } else { + if err != nil { + t.Errorf("Expected no error, but got %v", err) + } + } + }) + } +} diff --git a/lib/email/chime/client.go b/lib/email/chime/client.go new file mode 100644 index 000000000..4a0ac0ba1 --- /dev/null +++ b/lib/email/chime/client.go @@ -0,0 +1,367 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package chime + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "strings" + "time" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" +) + +// Env type for environment selection. +type Env int + +const ( + // EnvAutopush uses the autopush environment. + EnvAutopush Env = iota + // EnvProd uses the production environment. + EnvProd +) + +func getChimeURL(env Env) string { + switch env { + case EnvAutopush: + return "https://autopush-notifications-pa-googleapis.sandbox.google.com" + case EnvProd: + return "https://notifications-pa.googleapis.com" + default: + return "" + } +} + +// ClientID and other constants. +const ( + clientID = "webstatus_dev" + notificationType = "SUBSCRIPTION_NOTIFICATION" + defaultFromAddr = "noreply-webstatus-dev@google.com" +) + +// Sentinel Errors. +var ( + ErrPermanentUser = errors.New("permanent error due to user/target issue") + ErrPermanentSystem = errors.New("permanent error due to system/config issue") + ErrTransient = errors.New("transient error, can be retried") + ErrDuplicate = errors.New("duplicate notification") +) + +// HTTPClient interface to allow mocking http.Client. +type HTTPClient interface { + Do(req *http.Request) (*http.Response, error) +} + +type Sender struct { + bcc []string + tokenSource oauth2.TokenSource + httpClient HTTPClient + fromAddress string + baseURL string +} + +// NewChimeSender creates a new ChimeSender instance. +func NewChimeSender(ctx context.Context, env Env, bcc []string, fromAddr string, + customHTTPClient HTTPClient) (*Sender, error) { + baseURL := getChimeURL(env) + if baseURL == "" { + return nil, fmt.Errorf("%w: invalid ChimeEnv: %v", ErrPermanentSystem, env) + } + + ts, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/notifications") + if err != nil { + return nil, fmt.Errorf("%w: failed to find default credentials: %w", ErrPermanentSystem, err) + } + + httpClient := customHTTPClient + if httpClient == nil { + client := oauth2.NewClient(ctx, ts.TokenSource) + client.Timeout = 30 * time.Second + httpClient = client + } + + if fromAddr == "" { + fromAddr = defaultFromAddr + } + + return &Sender{ + bcc: bcc, + tokenSource: ts.TokenSource, + httpClient: httpClient, + fromAddress: fromAddr, + baseURL: baseURL, + }, nil +} + +type NotifyTargetSyncRequest struct { + Notification Notification `json:"notification"` + Target Target `json:"target"` +} +type Notification struct { + ClientID string `json:"client_id"` + ExternalID string `json:"external_id"` + TypeID string `json:"type_id"` + Payload Payload `json:"payload"` +} +type Source struct { + SystemName string `json:"system_name"` +} +type Payload struct { + TypeURL string `json:"@type"` + EmailMessage EmailMessage `json:"email_message"` +} +type EmailMessage struct { + FromAddress string `json:"from_address"` + Subject string `json:"subject"` + BodyPart []BodyPart `json:"body_part"` + BccRecipient []string `json:"bcc_recipient,omitempty"` +} +type BodyPart struct { + Content string `json:"content"` + ContentType string `json:"content_type"` +} +type Target struct { + ChannelType string `json:"channel_type"` + DeliveryAddress DeliveryAddress `json:"delivery_address"` +} +type DeliveryAddress struct { + EmailAddress EmailAddress `json:"email_address"` +} +type EmailAddress struct { + ToAddress string `json:"to_address"` +} +type NotifyTargetSyncResponse struct { + ExternalID string `json:"externalId"` + Identifier string `json:"identifier"` + Details struct { + Outcome string `json:"outcome"` + Reason string `json:"reason"` + } `json:"details"` +} + +// --- Send method and its helpers --- + +func (s *Sender) Send(ctx context.Context, id string, to string, subject string, htmlBody string) error { + if id == "" { + return fmt.Errorf("%w: id (externalID) cannot be empty", ErrPermanentSystem) + } + + reqBodyData, err := s.buildRequestBody(id, to, subject, htmlBody) + if err != nil { + return err + } + + httpReq, err := s.createHTTPRequest(ctx, reqBodyData) + if err != nil { + return err + } + + resp, bodyBytes, err := s.executeRequest(httpReq) + if err != nil { + return err // errors from executeRequest are already wrapped + } + defer resp.Body.Close() + + err = s.handleResponse(ctx, resp, bodyBytes, id) + handleSendResult(ctx, err, id) + + return err +} + +func (s *Sender) buildRequestBody(id string, to string, subject string, htmlBody string) ([]byte, error) { + reqBody := NotifyTargetSyncRequest{ + Notification: Notification{ + ClientID: clientID, + ExternalID: id, + TypeID: notificationType, + Payload: Payload{ + TypeURL: "type.googleapis.com/notifications.backend.common.message.RenderedMessage", + EmailMessage: EmailMessage{ + FromAddress: s.fromAddress, + Subject: subject, + BodyPart: []BodyPart{ + {Content: htmlBody, ContentType: "text/html"}, + }, + BccRecipient: s.bcc, + }, + }, + }, + Target: Target{ + ChannelType: "EMAIL", + DeliveryAddress: DeliveryAddress{ + EmailAddress: EmailAddress{ToAddress: to}, + }, + }, + } + jsonData, err := json.Marshal(reqBody) + if err != nil { + return nil, fmt.Errorf("%w: failed to marshal request body: %w", ErrPermanentSystem, err) + } + + return jsonData, nil +} + +func (s *Sender) createHTTPRequest(ctx context.Context, body []byte) (*http.Request, error) { + apiURL := fmt.Sprintf("%s/v1/notifytargetsync", s.baseURL) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, apiURL, bytes.NewBuffer(body)) + if err != nil { + return nil, fmt.Errorf("%w: failed to create HTTP request: %w", ErrPermanentSystem, err) + } + + token, err := s.tokenSource.Token() + if err != nil { + return nil, fmt.Errorf("%w: failed to retrieve access token: %w", ErrPermanentSystem, err) + } + req.Header.Set("Authorization", "Bearer "+token.AccessToken) + req.Header.Set("Content-Type", "application/json") + + return req, nil +} + +func (s *Sender) executeRequest(req *http.Request) (*http.Response, []byte, error) { + resp, err := s.httpClient.Do(req) + if err != nil { + return nil, nil, fmt.Errorf("%w: network error sending to Chime: %w", ErrTransient, err) + } + if resp.Body != nil { + defer resp.Body.Close() + } + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, nil, fmt.Errorf("%w: failed to read response body: %w", ErrTransient, err) + } + + return resp, bodyBytes, nil +} + +func (s *Sender) handleResponse(ctx context.Context, + resp *http.Response, bodyBytes []byte, externalID string) error { + bodyStr := string(bodyBytes) + + if resp.StatusCode == http.StatusConflict { // 409 + return fmt.Errorf("%w: external_id %s: %s", ErrDuplicate, externalID, bodyStr) + } + + if resp.StatusCode >= 400 && resp.StatusCode < 500 { + return classifyHTTPClientError(resp.StatusCode, bodyStr) + } else if resp.StatusCode >= 500 { + return fmt.Errorf("%w: Chime server error (%d): %s", ErrTransient, resp.StatusCode, bodyStr) + } + + var responseBody NotifyTargetSyncResponse + if err := json.Unmarshal(bodyBytes, &responseBody); err != nil { + // Chime accepted it, but response is not what we expected. Log and treat as success. + slog.WarnContext(ctx, "Chime call OK, but failed to parse response body", + "externalID", externalID, "error", err, "body", bodyStr) + + return nil + } + + return classifyChimeOutcome(ctx, externalID, responseBody) +} + +func classifyHTTPClientError(statusCode int, bodyStr string) error { + switch statusCode { + case http.StatusBadRequest: // 400 + return fmt.Errorf("%w: bad request (400): %s", ErrPermanentSystem, bodyStr) + case http.StatusUnauthorized: // 401 + return fmt.Errorf("%w: unauthorized (401): %s", ErrPermanentSystem, bodyStr) + case http.StatusForbidden: // 403 + return fmt.Errorf("%w: forbidden (403): %s", ErrPermanentSystem, bodyStr) + default: + return fmt.Errorf("%w: client error (%d): %s", ErrPermanentSystem, statusCode, bodyStr) + } +} + +func classifyChimeOutcome(ctx context.Context, externalID string, responseBody NotifyTargetSyncResponse) error { + outcome := responseBody.Details.Outcome + reason := responseBody.Details.Reason + chimeID := responseBody.Identifier + slog.DebugContext(ctx, "Chime Response", "externalID", externalID, + "chimeID", chimeID, "outcome", outcome, "reason", reason) + + switch outcome { + case "SENT": + return nil // Success + case "PREFERENCE_DROPPED", "INVALID_AUTH_SUB_TOKEN_DROPPED": + return fmt.Errorf("%w: outcome %s, reason: %s", ErrPermanentUser, outcome, reason) + case "EXPLICITLY_DROPPED", "MESSAGE_TOO_LARGE_DROPPED", "INVALID_REQUEST_DROPPED": + return fmt.Errorf("%w: outcome %s, reason: %s", ErrPermanentSystem, outcome, reason) + case "DELIVERY_FAILURE_DROPPED": + if isUserCausedDeliveryFailure(reason) { + return fmt.Errorf("%w: outcome %s, reason: %s", ErrPermanentUser, outcome, reason) + } else if isSystemCausedDeliveryFailure(reason) { + return fmt.Errorf("%w: outcome %s, reason: %s", ErrPermanentSystem, outcome, reason) + } + + return fmt.Errorf("%w: outcome %s, reason: %s", ErrTransient, outcome, reason) + case "QUOTA_DROPPED": + return fmt.Errorf("%w: outcome %s, reason: %s", ErrTransient, outcome, reason) + default: // Unknown outcome + return fmt.Errorf("%w: unknown outcome %s, reason: %s", ErrTransient, outcome, reason) + } +} + +func isUserCausedDeliveryFailure(reason string) bool { + userKeywords := []string{"invalid_mailbox", "no such user", "invalid_domain", "domain not found", "unroutable address"} + lowerReason := strings.ToLower(reason) + for _, kw := range userKeywords { + if strings.Contains(lowerReason, kw) { + return true + } + } + + return strings.Contains(lowerReason, "perm_fail") && !isSystemCausedDeliveryFailure(reason) +} + +func isSystemCausedDeliveryFailure(reason string) bool { + systemKeywords := []string{"perm_fail_sender_denied", "mail loop"} + lowerReason := strings.ToLower(reason) + for _, kw := range systemKeywords { + if strings.Contains(lowerReason, kw) { + return true + } + } + + return false +} + +func handleSendResult(ctx context.Context, err error, externalID string) { + if err == nil { + slog.InfoContext(ctx, "Email sending process initiated and reported as SENT.", "externalID", externalID) + + return + } + slog.ErrorContext(ctx, "Error sending email", "externalID", externalID, "error", err) + if errors.Is(err, ErrDuplicate) { + slog.ErrorContext(ctx, "Result: This was a DUPLICATE send.", "externalID", externalID) + } else if errors.Is(err, ErrPermanentUser) { + slog.ErrorContext(ctx, "Result: PERMANENT error due to USER issue.", "externalID", externalID) + } else if errors.Is(err, ErrPermanentSystem) { + slog.ErrorContext(ctx, "Result: PERMANENT error due to SYSTEM issue.", "externalID", externalID) + } else if errors.Is(err, ErrTransient) { + slog.ErrorContext(ctx, "Result: TRANSIENT error.", "externalID", externalID) + } else { + slog.ErrorContext(ctx, "Result: Unknown error type.", "externalID", externalID) + } +} diff --git a/lib/email/chime/client_test.go b/lib/email/chime/client_test.go new file mode 100644 index 000000000..d6eed6bb9 --- /dev/null +++ b/lib/email/chime/client_test.go @@ -0,0 +1,202 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package chime + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "strings" + "testing" + + "golang.org/x/oauth2" +) + +// mockHTTPClient allows faking HTTP responses for tests. +type mockHTTPClient struct { + response *http.Response + err error +} + +func (m *mockHTTPClient) Do(_ *http.Request) (*http.Response, error) { + return m.response, m.err +} + +// mockTokenSource is a dummy token source for tests. +type mockTokenSource struct { + token *oauth2.Token + err error +} + +func (m *mockTokenSource) Token() (*oauth2.Token, error) { + return m.token, m.err +} + +func newTestSender(mockClient HTTPClient) *Sender { + return &Sender{ + bcc: []string{"bcc@example.com"}, + // nolint:exhaustruct // WONTFIX - external struct. + tokenSource: &mockTokenSource{token: &oauth2.Token{AccessToken: "fake-token"}, err: nil}, + httpClient: mockClient, + fromAddress: "test-from@example.com", + baseURL: "https://fake-chime.googleapis.com", + } +} + +func TestSend(t *testing.T) { + ctx := context.Background() + testCases := []struct { + name string + mockClient *mockHTTPClient + id string + expectedError error + }{ + { + name: "Success - SENT outcome", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"details": {"outcome": "SENT"}}`)), + Header: make(http.Header), + }, + err: nil, + }, + id: "success-id", + expectedError: nil, + }, + { + name: "Duplicate Notification - 409 Conflict", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusConflict, + Body: io.NopCloser(strings.NewReader("Duplicate")), + Header: make(http.Header), + }, + err: nil, + }, + id: "duplicate-id", + expectedError: ErrDuplicate, + }, + { + name: "Permanent User Error - PREFERENCE_DROPPED", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"details": {"outcome": "PREFERENCE_DROPPED"}}`)), + Header: make(http.Header), + }, + err: nil, + }, + id: "user-error-id", + expectedError: ErrPermanentUser, + }, + { + name: "Permanent System Error - INVALID_REQUEST_DROPPED", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"details": {"outcome": "INVALID_REQUEST_DROPPED"}}`)), + Header: make(http.Header), + }, + err: nil, + }, + id: "system-error-id", + expectedError: ErrPermanentSystem, + }, + { + name: "Permanent System Error - 400 Bad Request", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusBadRequest, + Body: io.NopCloser(strings.NewReader("Bad Request")), + Header: make(http.Header), + }, + err: nil, + }, + id: "bad-request-id", + expectedError: ErrPermanentSystem, + }, + { + name: "Transient Error - QUOTA_DROPPED", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(`{"details": {"outcome": "QUOTA_DROPPED"}}`)), + Header: make(http.Header), + }, + err: nil, + }, + id: "transient-quota-id", + expectedError: ErrTransient, + }, + { + name: "Transient Error - 503 Server Error", + mockClient: &mockHTTPClient{ + // nolint:exhaustruct // WONTFIX - external struct. + response: &http.Response{ + StatusCode: http.StatusServiceUnavailable, + Body: io.NopCloser(strings.NewReader("Server Error")), + Header: make(http.Header), + }, + err: nil, + }, + id: "transient-server-error-id", + expectedError: ErrTransient, + }, + { + name: "Network Error", + mockClient: &mockHTTPClient{ + response: nil, + err: fmt.Errorf("network connection failed"), + }, + id: "network-error-id", + expectedError: ErrTransient, + }, + { + name: "Empty ID Error", + mockClient: &mockHTTPClient{response: nil, err: nil}, + id: "", + expectedError: ErrPermanentSystem, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + sender := newTestSender(tc.mockClient) + err := sender.Send(ctx, tc.id, "to@example.com", "Test Subject", "

Test

") + + if tc.expectedError != nil { + if err == nil { + t.Fatalf("Expected error but got nil") + } + if !errors.Is(err, tc.expectedError) { + t.Errorf("Expected error wrapping %v, but got %v", tc.expectedError, err) + } + } else { + if err != nil { + t.Errorf("Expected no error, but got %v", err) + } + } + }) + } +} diff --git a/lib/email/smtpsender/client.go b/lib/email/smtpsender/client.go new file mode 100644 index 000000000..a42be3607 --- /dev/null +++ b/lib/email/smtpsender/client.go @@ -0,0 +1,71 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package smtpsender + +import ( + "errors" + "fmt" + "net/smtp" +) + +// SMTPClientConfig holds configuration for the SMTP client. +type SMTPClientConfig struct { + Host string + Port int + Username string + Password string +} + +type Client struct { + config SMTPClientConfig + send sendFunc + addr string + auth smtp.Auth + from string +} + +type sendFunc func(addr string, a smtp.Auth, from string, to []string, msg []byte) error + +// NewClient creates a new Client. +func NewClient(cfg SMTPClientConfig, from string) (*Client, error) { + if cfg.Host == "" || cfg.Port == 0 { + return nil, fmt.Errorf("%w: SMTP host and port are required", ErrSMTPConfig) + } + var auth smtp.Auth + if cfg.Username != "" && cfg.Password != "" { + auth = smtp.PlainAuth("", cfg.Username, cfg.Password, cfg.Host) + } + addr := fmt.Sprintf("%s:%d", cfg.Host, cfg.Port) + + return &Client{config: cfg, send: smtp.SendMail, auth: auth, addr: addr, from: from}, nil +} + +func (c *Client) From() string { + return c.from +} + +func (c *Client) SendMail(to []string, msg []byte) error { + err := c.send(c.addr, c.auth, c.from, to, msg) + if err != nil { + return fmt.Errorf("%w: failed to send email: %w", ErrSMTPFailedSend, err) + } + + return nil +} + +var ( + ErrSMTPConfig = errors.New("smtp configuration error") + ErrSMTPFailedSend = errors.New("smtp failed to send email") +) diff --git a/lib/email/smtpsender/client_test.go b/lib/email/smtpsender/client_test.go new file mode 100644 index 000000000..6d982c334 --- /dev/null +++ b/lib/email/smtpsender/client_test.go @@ -0,0 +1,106 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package smtpsender + +import ( + "errors" + "net/smtp" + "testing" +) + +func TestNewSMTPClient(t *testing.T) { + t.Parallel() + var emptyCfg SMTPClientConfig + + testCases := []struct { + name string + config SMTPClientConfig + expectErr error + }{ + { + name: "Valid config", + config: SMTPClientConfig{Host: "localhost", Port: 1025, Username: "", Password: ""}, + expectErr: nil, + }, + { + name: "Missing host", + config: SMTPClientConfig{Host: "", Port: 1025, Username: "", Password: ""}, + expectErr: ErrSMTPConfig, + }, + { + name: "Missing port", + config: SMTPClientConfig{Host: "localhost", Port: 0, Username: "", Password: ""}, + expectErr: ErrSMTPConfig, + }, + { + name: "Empty config", + config: emptyCfg, + expectErr: ErrSMTPConfig, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + client, err := NewClient(tc.config, "from@example.com") + if !errors.Is(err, tc.expectErr) { + t.Errorf("Expected error wrapping %v, but got %v", tc.expectErr, err) + } + if err == nil && client == nil { + t.Fatal("Expected client, but got nil") + } + }) + } +} + +func TestSMTPClient_SendMail(t *testing.T) { + cfg := SMTPClientConfig{Host: "localhost", Port: 1025, Username: "fake", Password: "fake"} + + testCases := []struct { + name string + mockSendMail func(addr string, a smtp.Auth, from string, to []string, msg []byte) error + expectedError error + }{ + { + name: "Successful send", + mockSendMail: func(_ string, _ smtp.Auth, _ string, _ []string, _ []byte) error { + return nil + }, + expectedError: nil, + }, + { + name: "Some error", + mockSendMail: func(_ string, _ smtp.Auth, _ string, _ []string, _ []byte) error { + return errors.New("connection refused") + }, + expectedError: ErrSMTPFailedSend, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + client, err := NewClient(cfg, "from@example.com") + if err != nil { + t.Fatalf("Failed to create client: %v", err) + } + client.send = tc.mockSendMail + + err = client.SendMail([]string{"to@example.com"}, []byte("body")) + + if !errors.Is(err, tc.expectedError) { + t.Errorf("Expected error wrapping %v, but got %v", tc.expectedError, err) + } + }) + } +} diff --git a/lib/email/smtpsender/smtpsenderadapters/email_worker.go b/lib/email/smtpsender/smtpsenderadapters/email_worker.go new file mode 100644 index 000000000..da0d79ad1 --- /dev/null +++ b/lib/email/smtpsender/smtpsenderadapters/email_worker.go @@ -0,0 +1,64 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package smtpsenderadapters + +import ( + "context" + "errors" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// EmailWorkerSMTPAdapter implements the interface for the email worker +// using the SMTP client. +type EmailWorkerSMTPAdapter struct { + sender Sender +} + +type Sender interface { + SendMail(to []string, msg []byte) error + From() string +} + +// NewEmailWorkerSMTPAdapter creates a new adapter for the email worker to use SMTP. +func NewEmailWorkerSMTPAdapter(client Sender) *EmailWorkerSMTPAdapter { + return &EmailWorkerSMTPAdapter{ + sender: client, + } +} + +// Send implements the EmailSender interface for the email worker. +func (a *EmailWorkerSMTPAdapter) Send(ctx context.Context, id string, + to string, + subject string, + htmlBody string) error { + + slog.InfoContext(ctx, "sending email via SMTP", "to", to, "id", id) + + msg := []byte("To: " + to + "\r\n" + + "From: " + a.sender.From() + "\r\n" + + "Subject: " + subject + "\r\n" + + "Content-Type: text/html; charset=UTF-8\r\n" + + "\r\n" + htmlBody) + + err := a.sender.SendMail([]string{to}, msg) + if err != nil { + return errors.Join(workertypes.ErrUnrecoverableSystemFailureEmailSending, err) + + } + + return nil +} diff --git a/lib/email/smtpsender/smtpsenderadapters/email_worker_test.go b/lib/email/smtpsender/smtpsenderadapters/email_worker_test.go new file mode 100644 index 000000000..eeda90036 --- /dev/null +++ b/lib/email/smtpsender/smtpsenderadapters/email_worker_test.go @@ -0,0 +1,75 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package smtpsenderadapters + +import ( + "context" + "errors" + "testing" + + "github.com/GoogleChrome/webstatus.dev/lib/email/smtpsender" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// mockSMTPSenderClient is a mock implementation of Sender for testing. +type mockSMTPSenderClient struct { + sendMailErr error +} + +func (m *mockSMTPSenderClient) SendMail(_ []string, _ []byte) error { + return m.sendMailErr +} + +func (m *mockSMTPSenderClient) From() string { + return "from@example.com" +} + +func TestEmailWorkerSmtpAdapter_Send(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + smtpSendErr error + expectedError error + }{ + { + name: "Success", + smtpSendErr: nil, + expectedError: nil, + }, + { + name: "SMTP Failed Send Error", + smtpSendErr: smtpsender.ErrSMTPFailedSend, + expectedError: workertypes.ErrUnrecoverableSystemFailureEmailSending, + }, + { + name: "SMTP Config Error", + smtpSendErr: smtpsender.ErrSMTPConfig, + expectedError: workertypes.ErrUnrecoverableSystemFailureEmailSending, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + mockClient := &mockSMTPSenderClient{sendMailErr: tc.smtpSendErr} + adapter := NewEmailWorkerSMTPAdapter(mockClient) + + err := adapter.Send(ctx, "test-id", "to@example.com", "Test Subject", "

Hello

") + if !errors.Is(err, tc.expectedError) { + t.Errorf("Expected error wrapping %v, but got %v (raw: %v)", tc.expectedError, err, errors.Unwrap(err)) + } + }) + } +} diff --git a/lib/event/batchrefreshtrigger/v1/types.go b/lib/event/batchrefreshtrigger/v1/types.go index aba9f421d..f943a46bb 100644 --- a/lib/event/batchrefreshtrigger/v1/types.go +++ b/lib/event/batchrefreshtrigger/v1/types.go @@ -22,7 +22,6 @@ type JobFrequency string const ( FrequencyUnknown JobFrequency = "UNKNOWN" FrequencyImmediate JobFrequency = "IMMEDIATE" - FrequencyDaily JobFrequency = "DAILY" FrequencyWeekly JobFrequency = "WEEKLY" FrequencyMonthly JobFrequency = "MONTHLY" ) @@ -31,8 +30,6 @@ func (f JobFrequency) ToWorkerTypeJobFrequency() workertypes.JobFrequency { switch f { case FrequencyImmediate: return workertypes.FrequencyImmediate - case FrequencyDaily: - return workertypes.FrequencyDaily case FrequencyWeekly: return workertypes.FrequencyWeekly case FrequencyMonthly: diff --git a/lib/event/emailjob/v1/types.go b/lib/event/emailjob/v1/types.go new file mode 100644 index 000000000..45df72341 --- /dev/null +++ b/lib/event/emailjob/v1/types.go @@ -0,0 +1,160 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1 + +import ( + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// EmailJobEvent represents an email job event. +type EmailJobEvent struct { + // SubscriptionID is the ID of the subscription that triggered this job. + SubscriptionID string `json:"subscription_id"` + // RecipientEmail is the email address of the recipient. + RecipientEmail string `json:"recipient_email"` + // SummaryRaw is the raw JSON bytes of the event summary. + SummaryRaw []byte `json:"summary_raw"` + // Metadata contains additional metadata about the event. + Metadata EmailJobEventMetadata `json:"metadata"` + // ChannelID is the ID of the channel associated with this job. + ChannelID string `json:"channel_id"` + // Triggers is a list of triggers associated with this job. + Triggers []JobTrigger `json:"triggers"` +} + +type EmailJobEventMetadata struct { + // EventID is the ID of the original event that triggered this job. + EventID string `json:"event_id"` + // SearchID is the ID of the search that generated the event. + SearchID string `json:"search_id"` + // Query is the query string used for the search. + Query string `json:"query"` + // Frequency is the frequency of the job (e.g., "daily", "weekly"). + Frequency JobFrequency `json:"frequency"` + // GeneratedAt is the timestamp when the original event was generated. + GeneratedAt time.Time `json:"generated_at"` +} + +func (EmailJobEvent) Kind() string { return "EmailJobEvent" } +func (EmailJobEvent) APIVersion() string { return "v1" } + +type JobFrequency string + +const ( + FrequencyUnknown JobFrequency = "UNKNOWN" + FrequencyImmediate JobFrequency = "IMMEDIATE" + FrequencyWeekly JobFrequency = "WEEKLY" + FrequencyMonthly JobFrequency = "MONTHLY" +) + +func (f JobFrequency) ToWorkerTypeJobFrequency() workertypes.JobFrequency { + switch f { + case FrequencyImmediate: + return workertypes.FrequencyImmediate + case FrequencyWeekly: + return workertypes.FrequencyWeekly + case FrequencyMonthly: + return workertypes.FrequencyMonthly + case FrequencyUnknown: + return workertypes.FrequencyUnknown + } + + return workertypes.FrequencyUnknown +} + +func ToJobFrequency(freq workertypes.JobFrequency) JobFrequency { + switch freq { + case workertypes.FrequencyImmediate: + return FrequencyImmediate + case workertypes.FrequencyWeekly: + return FrequencyWeekly + case workertypes.FrequencyMonthly: + return FrequencyMonthly + case workertypes.FrequencyUnknown: + return FrequencyUnknown + } + + return FrequencyUnknown +} + +type JobTrigger string + +const ( + FeaturePromotedToNewly JobTrigger = "FEATURE_PROMOTED_TO_NEWLY" + FeaturePromotedToWidely JobTrigger = "FEATURE_PROMOTED_TO_WIDELY" + FeatureRegressedToLimited JobTrigger = "FEATURE_REGRESSED_TO_LIMITED" + BrowserImplementationAnyComplete JobTrigger = "BROWSER_IMPLEMENTATION_ANY_COMPLETE" + UnknownJobTrigger JobTrigger = "UNKNOWN" +) + +func (e EmailJobEvent) ToWorkerTypeJobTriggers() []workertypes.JobTrigger { + if e.Triggers == nil { + return nil + } + + triggers := make([]workertypes.JobTrigger, 0, len(e.Triggers)) + for _, t := range e.Triggers { + triggers = append(triggers, t.ToWorkerTypeJobTrigger()) + } + + return triggers +} + +func (t JobTrigger) ToWorkerTypeJobTrigger() workertypes.JobTrigger { + switch t { + case FeaturePromotedToNewly: + return workertypes.FeaturePromotedToNewly + case FeaturePromotedToWidely: + return workertypes.FeaturePromotedToWidely + case FeatureRegressedToLimited: + return workertypes.FeatureRegressedToLimited + case BrowserImplementationAnyComplete: + return workertypes.BrowserImplementationAnyComplete + case UnknownJobTrigger: + break + } + + return "" +} + +func ToJobTrigger(trigger workertypes.JobTrigger) JobTrigger { + switch trigger { + case workertypes.FeaturePromotedToNewly: + return FeaturePromotedToNewly + case workertypes.FeaturePromotedToWidely: + return FeaturePromotedToWidely + case workertypes.FeatureRegressedToLimited: + return FeatureRegressedToLimited + case workertypes.BrowserImplementationAnyComplete: + return BrowserImplementationAnyComplete + } + + return UnknownJobTrigger +} + +func ToJobTriggers(triggers []workertypes.JobTrigger) []JobTrigger { + if triggers == nil { + return nil + } + + jobTriggers := make([]JobTrigger, 0, len(triggers)) + for _, t := range triggers { + jobTriggers = append(jobTriggers, ToJobTrigger(t)) + } + + return jobTriggers +} diff --git a/lib/event/featurediff/v1/types.go b/lib/event/featurediff/v1/types.go index 5782f15cf..4df9d7633 100644 --- a/lib/event/featurediff/v1/types.go +++ b/lib/event/featurediff/v1/types.go @@ -25,7 +25,6 @@ type JobFrequency string const ( FrequencyUnknown JobFrequency = "UNKNOWN" FrequencyImmediate JobFrequency = "IMMEDIATE" - FrequencyDaily JobFrequency = "DAILY" FrequencyWeekly JobFrequency = "WEEKLY" FrequencyMonthly JobFrequency = "MONTHLY" ) @@ -69,12 +68,25 @@ type FeatureDiffEvent struct { func (FeatureDiffEvent) Kind() string { return "FeatureDiffEvent" } func (FeatureDiffEvent) APIVersion() string { return "v1" } +func (f JobFrequency) ToWorkertypes() workertypes.JobFrequency { + switch f { + case FrequencyImmediate: + return workertypes.FrequencyImmediate + case FrequencyWeekly: + return workertypes.FrequencyWeekly + case FrequencyMonthly: + return workertypes.FrequencyMonthly + case FrequencyUnknown: + return workertypes.FrequencyUnknown + } + + return workertypes.FrequencyUnknown +} + func ToJobFrequency(freq workertypes.JobFrequency) JobFrequency { switch freq { case workertypes.FrequencyImmediate: return FrequencyImmediate - case workertypes.FrequencyDaily: - return FrequencyDaily case workertypes.FrequencyWeekly: return FrequencyWeekly case workertypes.FrequencyMonthly: diff --git a/lib/event/refreshsearchcommand/v1/types.go b/lib/event/refreshsearchcommand/v1/types.go index 1a23ceeb4..5d03b3686 100644 --- a/lib/event/refreshsearchcommand/v1/types.go +++ b/lib/event/refreshsearchcommand/v1/types.go @@ -26,7 +26,6 @@ type JobFrequency string const ( FrequencyUnknown JobFrequency = "UNKNOWN" FrequencyImmediate JobFrequency = "IMMEDIATE" - FrequencyDaily JobFrequency = "DAILY" FrequencyWeekly JobFrequency = "WEEKLY" FrequencyMonthly JobFrequency = "MONTHLY" ) @@ -35,8 +34,6 @@ func (f JobFrequency) ToWorkerTypeJobFrequency() workertypes.JobFrequency { switch f { case FrequencyImmediate: return workertypes.FrequencyImmediate - case FrequencyDaily: - return workertypes.FrequencyDaily case FrequencyWeekly: return workertypes.FrequencyWeekly case FrequencyMonthly: diff --git a/lib/event/searchconfigurationchanged/v1/types.go b/lib/event/searchconfigurationchanged/v1/types.go index 24849399b..b26a55134 100644 --- a/lib/event/searchconfigurationchanged/v1/types.go +++ b/lib/event/searchconfigurationchanged/v1/types.go @@ -26,7 +26,6 @@ type JobFrequency string const ( FrequencyUnknown JobFrequency = "UNKNOWN" FrequencyImmediate JobFrequency = "IMMEDIATE" - FrequencyDaily JobFrequency = "DAILY" FrequencyWeekly JobFrequency = "WEEKLY" FrequencyMonthly JobFrequency = "MONTHLY" ) @@ -35,8 +34,6 @@ func (f JobFrequency) ToWorkerTypeJobFrequency() workertypes.JobFrequency { switch f { case FrequencyImmediate: return workertypes.FrequencyImmediate - case FrequencyDaily: - return workertypes.FrequencyDaily case FrequencyWeekly: return workertypes.FrequencyWeekly case FrequencyMonthly: diff --git a/lib/gcpgcs/gcpgcsadapters/event_producer.go b/lib/gcpgcs/gcpgcsadapters/event_producer.go new file mode 100644 index 000000000..46729cb7b --- /dev/null +++ b/lib/gcpgcs/gcpgcsadapters/event_producer.go @@ -0,0 +1,57 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcpgcsadapters + +import ( + "context" + "path" + + "github.com/GoogleChrome/webstatus.dev/lib/blobtypes" +) + +type EventProducerBlobStorageClient interface { + WriteBlob(ctx context.Context, path string, data []byte, opts ...blobtypes.WriteOption) error + ReadBlob(ctx context.Context, path string, opts ...blobtypes.ReadOption) (*blobtypes.Blob, error) +} + +type EventProducer struct { + client EventProducerBlobStorageClient + bucketName string +} + +func NewEventProducer(client EventProducerBlobStorageClient, bucketName string) *EventProducer { + return &EventProducer{client: client, bucketName: bucketName} +} + +func (e *EventProducer) Store(ctx context.Context, dirs []string, key string, data []byte) (string, error) { + filepath := append([]string{e.bucketName}, dirs...) + // Add the key as the final element. + filepath = append(filepath, key) + path := path.Join(filepath...) + if err := e.client.WriteBlob(ctx, path, data, blobtypes.WithContentType("application/json")); err != nil { + return "", err + } + + return path, nil +} + +func (e *EventProducer) Get(ctx context.Context, fullpath string) ([]byte, error) { + blob, err := e.client.ReadBlob(ctx, fullpath) + if err != nil { + return nil, err + } + + return blob.Data, nil +} diff --git a/lib/gcpgcs/gcpgcsadapters/event_producer_test.go b/lib/gcpgcs/gcpgcsadapters/event_producer_test.go new file mode 100644 index 000000000..3a9968da9 --- /dev/null +++ b/lib/gcpgcs/gcpgcsadapters/event_producer_test.go @@ -0,0 +1,203 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +//     http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcpgcsadapters + +import ( + "context" + "errors" + "testing" + + "github.com/GoogleChrome/webstatus.dev/lib/blobtypes" +) + +type mockBlobStorageClient struct { + writeBlobCalled bool + writeBlobReq struct { + path string + data []byte + opts []blobtypes.WriteOption + } + writeBlobErr error + + readBlobCalled bool + readBlobReq struct { + path string + } + readBlobResp *blobtypes.Blob + readBlobErr error +} + +func (m *mockBlobStorageClient) WriteBlob(_ context.Context, path string, data []byte, + opts ...blobtypes.WriteOption) error { + m.writeBlobCalled = true + m.writeBlobReq.path = path + m.writeBlobReq.data = data + m.writeBlobReq.opts = opts + + return m.writeBlobErr +} + +func (m *mockBlobStorageClient) ReadBlob(_ context.Context, path string, + _ ...blobtypes.ReadOption) (*blobtypes.Blob, error) { + m.readBlobCalled = true + m.readBlobReq.path = path + + return m.readBlobResp, m.readBlobErr +} + +func TestStore(t *testing.T) { + bucketName := "test-bucket" + data := []byte("test-data") + + tests := []struct { + name string + dirs []string + key string + mockErr error + expectedPath string + wantErr bool + }{ + { + name: "root directory", + dirs: []string{}, + key: "file.json", + mockErr: nil, + expectedPath: "test-bucket/file.json", + wantErr: false, + }, + { + name: "nested directory", + dirs: []string{"folder1", "folder2"}, + key: "file.json", + mockErr: nil, + expectedPath: "test-bucket/folder1/folder2/file.json", + wantErr: false, + }, + { + name: "write error", + dirs: []string{"folder"}, + key: "file.json", + mockErr: errors.New("gcs error"), + expectedPath: "test-bucket/folder/file.json", + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockBlobStorageClient) + mock.writeBlobErr = tc.mockErr + adapter := NewEventProducer(mock, bucketName) + + path, err := adapter.Store(context.Background(), tc.dirs, tc.key, data) + + if (err != nil) != tc.wantErr { + t.Errorf("Store() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.writeBlobCalled { + t.Fatal("WriteBlob not called") + } + + if mock.writeBlobReq.path != tc.expectedPath { + t.Errorf("path mismatch: got %q, want %q", mock.writeBlobReq.path, tc.expectedPath) + } + if string(mock.writeBlobReq.data) != string(data) { + t.Errorf("data mismatch") + } + + // Verify returned path matches the full path sent to GCS + if err == nil && path != tc.expectedPath { + t.Errorf("returned path mismatch: got %q, want %q", path, tc.expectedPath) + } + + // Verify the options include the correct content type + foundContentType := false + for _, opt := range mock.writeBlobReq.opts { + var config blobtypes.WriteSettings + opt(&config) + if config.ContentType != nil && *config.ContentType == "application/json" { + foundContentType = true + + break + } + } + if !foundContentType { + t.Error("content type option not set to application/json") + } + }) + } +} + +func TestGet(t *testing.T) { + bucketName := "test-bucket" + fullPath := "test-bucket/folder/file.json" + data := []byte("test-data") + + tests := []struct { + name string + mockResp *blobtypes.Blob + mockErr error + wantData []byte + wantErr bool + }{ + { + name: "success", + mockResp: &blobtypes.Blob{ + Data: data, + ContentType: "application/json", + Metadata: nil, + Generation: 1, + }, + mockErr: nil, + wantData: data, + wantErr: false, + }, + { + name: "read error", + mockResp: nil, + mockErr: errors.New("gcs error"), + wantData: nil, + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockBlobStorageClient) + mock.readBlobResp = tc.mockResp + mock.readBlobErr = tc.mockErr + adapter := NewEventProducer(mock, bucketName) + + gotData, err := adapter.Get(context.Background(), fullPath) + + if (err != nil) != tc.wantErr { + t.Errorf("Get() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.readBlobCalled { + t.Fatal("ReadBlob not called") + } + + if mock.readBlobReq.path != fullPath { + t.Errorf("path mismatch: got %q, want %q", mock.readBlobReq.path, fullPath) + } + + if err == nil && string(gotData) != string(tc.wantData) { + t.Errorf("data mismatch: got %q, want %q", gotData, tc.wantData) + } + }) + } +} diff --git a/lib/gcppubsub/client.go b/lib/gcppubsub/client.go index 88c29f55f..99ea9b74c 100644 --- a/lib/gcppubsub/client.go +++ b/lib/gcppubsub/client.go @@ -73,6 +73,7 @@ func (c *Client) Subscribe(ctx context.Context, subID string, msg.Ack() } else if errors.Is(workErr, event.ErrTransientFailure) { // NACK: Retry later + slog.WarnContext(ctx, "transient failure, will retry", "error", workErr) msg.Nack() } else { // ACK: Permanent failure or unknown error, do not retry diff --git a/lib/gcppubsub/gcppubsubadapters/backend.go b/lib/gcppubsub/gcppubsubadapters/backend.go new file mode 100644 index 000000000..8b75feb43 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/backend.go @@ -0,0 +1,67 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "fmt" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + searchconfigv1 "github.com/GoogleChrome/webstatus.dev/lib/event/searchconfigurationchanged/v1" + "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend" +) + +type BackendAdapter struct { + client EventPublisher + topicID string +} + +func NewBackendAdapter(client EventPublisher, topicID string) *BackendAdapter { + return &BackendAdapter{client: client, topicID: topicID} +} + +func (p *BackendAdapter) PublishSearchConfigurationChanged( + ctx context.Context, + resp *backend.SavedSearchResponse, + userID string, + isCreation bool) error { + + evt := searchconfigv1.SearchConfigurationChangedEvent{ + SearchID: resp.Id, + Query: resp.Query, + UserID: userID, + Timestamp: resp.UpdatedAt, + IsCreation: isCreation, + Frequency: searchconfigv1.FrequencyImmediate, + } + + msg, err := event.New(evt) + if err != nil { + return fmt.Errorf("failed to create event: %w", err) + } + + id, err := p.client.Publish(ctx, p.topicID, msg) + if err != nil { + return fmt.Errorf("failed to publish message: %w", err) + } + + slog.InfoContext(ctx, "published search configuration changed event", + "msgID", id, + "searchID", evt.SearchID, + "isCreation", evt.IsCreation) + + return nil +} diff --git a/lib/gcppubsub/gcppubsubadapters/backend_test.go b/lib/gcppubsub/gcppubsubadapters/backend_test.go new file mode 100644 index 000000000..061d5a462 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/backend_test.go @@ -0,0 +1,136 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "encoding/json" + "errors" + "testing" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend" + "github.com/google/go-cmp/cmp" +) + +func testSavedSearchResponse(id string, query string, updatedAt time.Time) *backend.SavedSearchResponse { + var resp backend.SavedSearchResponse + resp.Id = id + resp.Query = query + resp.UpdatedAt = updatedAt + + return &resp +} +func TestSearchConfigurationPublisherAdapter_Publish(t *testing.T) { + fixedTime := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + + tests := []struct { + name string + resp *backend.SavedSearchResponse + userID string + isCreation bool + publishErr error + wantErr bool + expectedJSON string + }{ + { + name: "success creation", + resp: testSavedSearchResponse("search-123", "group:css", fixedTime), + userID: "user-1", + isCreation: true, + publishErr: nil, + wantErr: false, + expectedJSON: `{ + "apiVersion": "v1", + "kind": "SearchConfigurationChangedEvent", + "data": { + "search_id": "search-123", + "query": "group:css", + "user_id": "user-1", + "timestamp": "2025-01-01T00:00:00Z", + "is_creation": true, + "frequency": "IMMEDIATE" + } + }`, + }, + { + name: "success update", + resp: testSavedSearchResponse("search-456", "group:html", fixedTime.Add(24*time.Hour)), + userID: "user-1", + isCreation: false, + publishErr: nil, + wantErr: false, + expectedJSON: `{ + "apiVersion": "v1", + "kind": "SearchConfigurationChangedEvent", + "data": { + "search_id": "search-456", + "query": "group:html", + "user_id": "user-1", + "timestamp": "2025-01-02T00:00:00Z", + "is_creation": false, + "frequency": "IMMEDIATE" + } + }`, + }, + { + name: "publish error", + resp: testSavedSearchResponse("search-err", "group:html", fixedTime), + userID: "user-1", + isCreation: false, + publishErr: errors.New("pubsub error"), + wantErr: true, + expectedJSON: "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + publisher := new(mockPublisher) + publisher.err = tc.publishErr + adapter := NewBackendAdapter(publisher, "test-topic") + + err := adapter.PublishSearchConfigurationChanged(context.Background(), tc.resp, tc.userID, tc.isCreation) + + if (err != nil) != tc.wantErr { + t.Errorf("PublishSearchConfigurationChanged() error = %v, wantErr %v", err, tc.wantErr) + } + + if tc.wantErr { + return + } + + if publisher.publishedTopic != "test-topic" { + t.Errorf("Topic mismatch: got %s, want test-topic", publisher.publishedTopic) + } + + // Unmarshal actual data + var actual interface{} + if err := json.Unmarshal(publisher.publishedData, &actual); err != nil { + t.Fatalf("failed to unmarshal published data: %v", err) + } + + // Unmarshal expected data + var expected interface{} + if err := json.Unmarshal([]byte(tc.expectedJSON), &expected); err != nil { + t.Fatalf("failed to unmarshal expected data: %v", err) + } + + if diff := cmp.Diff(expected, actual); diff != "" { + t.Errorf("Payload mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/lib/gcppubsub/gcppubsubadapters/batch_event_producer.go b/lib/gcppubsub/gcppubsubadapters/batch_event_producer.go new file mode 100644 index 000000000..f6aeb709c --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/batch_event_producer.go @@ -0,0 +1,54 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +//     http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "fmt" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + refreshv1 "github.com/GoogleChrome/webstatus.dev/lib/event/refreshsearchcommand/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type BatchFanOutPublisherAdapter struct { + client EventPublisher + topicID string +} + +func NewBatchFanOutPublisherAdapter(client EventPublisher, topicID string) *BatchFanOutPublisherAdapter { + return &BatchFanOutPublisherAdapter{client: client, topicID: topicID} +} + +func (a *BatchFanOutPublisherAdapter) PublishRefreshCommand(ctx context.Context, + cmd workertypes.RefreshSearchCommand) error { + evt := refreshv1.RefreshSearchCommand{ + SearchID: cmd.SearchID, + Query: cmd.Query, + Frequency: refreshv1.JobFrequency(cmd.Frequency), + Timestamp: cmd.Timestamp, + } + + msg, err := event.New(evt) + if err != nil { + return fmt.Errorf("failed to create event: %w", err) + } + + if _, err := a.client.Publish(ctx, a.topicID, msg); err != nil { + return fmt.Errorf("failed to publish message: %w", err) + } + + return nil +} diff --git a/lib/gcppubsub/gcppubsubadapters/batch_event_producer_test.go b/lib/gcppubsub/gcppubsubadapters/batch_event_producer_test.go new file mode 100644 index 000000000..b357ec6c0 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/batch_event_producer_test.go @@ -0,0 +1,106 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +//     http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "encoding/json" + "errors" + "testing" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +func TestBatchFanOutPublisherAdapter_PublishRefreshCommand(t *testing.T) { + tests := []struct { + name string + cmd workertypes.RefreshSearchCommand + publishErr error + wantErr bool + expectedJSON string + }{ + { + name: "success", + cmd: workertypes.RefreshSearchCommand{ + SearchID: "search-123", + Query: "query=abc", + Frequency: workertypes.FrequencyImmediate, + Timestamp: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), + }, + publishErr: nil, + wantErr: false, + expectedJSON: `{ + "apiVersion": "v1", + "kind": "RefreshSearchCommand", + "data": { + "search_id": "search-123", + "query": "query=abc", + "frequency": "IMMEDIATE", + "timestamp": "2025-01-01T00:00:00Z" + } + }`, + }, + { + name: "publish error", + cmd: workertypes.RefreshSearchCommand{ + SearchID: "search-123", + Query: "query=abc", + Frequency: workertypes.FrequencyImmediate, + Timestamp: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC), + }, + publishErr: errors.New("pubsub error"), + wantErr: true, + expectedJSON: "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + publisher := new(mockPublisher) + publisher.err = tc.publishErr + adapter := NewBatchFanOutPublisherAdapter(publisher, "test-topic") + + err := adapter.PublishRefreshCommand(context.Background(), tc.cmd) + + if (err != nil) != tc.wantErr { + t.Errorf("PublishRefreshCommand() error = %v, wantErr %v", err, tc.wantErr) + } + + if !tc.wantErr { + verifyJSONPayload(t, publisher.publishedData, tc.expectedJSON) + } + }) + } +} + +func verifyJSONPayload(t *testing.T, actualBytes []byte, expectedJSON string) { + t.Helper() + + var actual interface{} + if err := json.Unmarshal(actualBytes, &actual); err != nil { + t.Fatalf("failed to unmarshal actual data: %v", err) + } + + var expected interface{} + if err := json.Unmarshal([]byte(expectedJSON), &expected); err != nil { + t.Fatalf("failed to unmarshal expected data: %v", err) + } + + if diff := cmp.Diff(expected, actual); diff != "" { + t.Errorf("Payload mismatch (-want +got):\n%s", diff) + } +} diff --git a/lib/gcppubsub/gcppubsubadapters/email_worker.go b/lib/gcppubsub/gcppubsubadapters/email_worker.go new file mode 100644 index 000000000..cdcb88ddd --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/email_worker.go @@ -0,0 +1,85 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + v1 "github.com/GoogleChrome/webstatus.dev/lib/event/emailjob/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// EmailWorkerMessageHandler defines the interface for the Sender logic. +type EmailWorkerMessageHandler interface { + ProcessMessage(ctx context.Context, job workertypes.IncomingEmailDeliveryJob) error +} + +type EmailWorkerSubscriberAdapter struct { + sender EmailWorkerMessageHandler + eventSubscriber EventSubscriber + subscriptionID string + router *event.Router +} + +func NewEmailWorkerSubscriberAdapter( + sender EmailWorkerMessageHandler, + eventSubscriber EventSubscriber, + subscriptionID string, +) *EmailWorkerSubscriberAdapter { + router := event.NewRouter() + + ret := &EmailWorkerSubscriberAdapter{ + sender: sender, + eventSubscriber: eventSubscriber, + subscriptionID: subscriptionID, + router: router, + } + + event.Register(router, ret.handleEmailJobEvent) + + return ret +} + +func (a *EmailWorkerSubscriberAdapter) Subscribe(ctx context.Context) error { + return a.eventSubscriber.Subscribe(ctx, a.subscriptionID, func(ctx context.Context, + msgID string, data []byte) error { + return a.router.HandleMessage(ctx, msgID, data) + }) +} + +func (a *EmailWorkerSubscriberAdapter) handleEmailJobEvent( + ctx context.Context, msgID string, event v1.EmailJobEvent) error { + + incomingJob := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SubscriptionID: event.SubscriptionID, + RecipientEmail: event.RecipientEmail, + ChannelID: event.ChannelID, + SummaryRaw: event.SummaryRaw, + Metadata: workertypes.DeliveryMetadata{ + EventID: event.Metadata.EventID, + SearchID: event.Metadata.SearchID, + Query: event.Metadata.Query, + Frequency: event.Metadata.Frequency.ToWorkerTypeJobFrequency(), + GeneratedAt: event.Metadata.GeneratedAt, + }, + Triggers: event.ToWorkerTypeJobTriggers(), + }, + EmailEventID: msgID, + } + + return a.sender.ProcessMessage(ctx, incomingJob) +} diff --git a/lib/gcppubsub/gcppubsubadapters/email_worker_test.go b/lib/gcppubsub/gcppubsubadapters/email_worker_test.go new file mode 100644 index 000000000..0805f21cb --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/email_worker_test.go @@ -0,0 +1,178 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "encoding/json" + "sync" + "testing" + "time" + + v1 "github.com/GoogleChrome/webstatus.dev/lib/event/emailjob/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// --- Mocks --- + +type mockEmailWorkerMessageHandler struct { + calls []workertypes.IncomingEmailDeliveryJob + mu sync.Mutex + err error +} + +func (m *mockEmailWorkerMessageHandler) ProcessMessage( + _ context.Context, job workertypes.IncomingEmailDeliveryJob) error { + m.mu.Lock() + defer m.mu.Unlock() + m.calls = append(m.calls, job) + + return m.err +} + +// --- Tests --- + +type emailTestEnv struct { + sender *mockEmailWorkerMessageHandler + adapter *EmailWorkerSubscriberAdapter + handleFn func(context.Context, string, []byte) error + stop func() +} + +func setupEmailTestAdapter(t *testing.T) *emailTestEnv { + t.Helper() + sender := new(mockEmailWorkerMessageHandler) + subscriber := &mockSubscriber{block: make(chan struct{}), mu: sync.Mutex{}, handlers: nil} + subscriptionID := "email-sub" + + adapter := NewEmailWorkerSubscriberAdapter(sender, subscriber, subscriptionID) + + ctx, cancel := context.WithCancel(context.Background()) + + errChan := make(chan error) + go func() { + errChan <- adapter.Subscribe(ctx) + }() + + // Wait briefly for Subscribe to start and register the handler + time.Sleep(50 * time.Millisecond) + + subscriber.mu.Lock() + handleFn := subscriber.handlers[subscriptionID] + subscriber.mu.Unlock() + + if handleFn == nil { + cancel() + t.Fatalf("Subscribe did not register a handler for subscription %s", subscriptionID) + } + + return &emailTestEnv{ + sender: sender, + adapter: adapter, + handleFn: func(ctx context.Context, msgID string, data []byte) error { + return handleFn(ctx, msgID, data) + }, + stop: func() { + close(subscriber.block) + cancel() + <-errChan + }, + } +} + +func TestEmailWorkerSubscriberAdapter_RoutesEmailJobEvent(t *testing.T) { + env := setupEmailTestAdapter(t) + defer env.stop() + + now := time.Now() + emailJobEvent := v1.EmailJobEvent{ + SubscriptionID: "sub-123", + RecipientEmail: "test@example.com", + ChannelID: "chan-456", + SummaryRaw: []byte(`{"key":"value"}`), + Metadata: v1.EmailJobEventMetadata{ + EventID: "event-789", + SearchID: "search-abc", + Query: "is:open", + Frequency: v1.FrequencyMonthly, + GeneratedAt: now, + }, + Triggers: []v1.JobTrigger{ + v1.BrowserImplementationAnyComplete, + }, + } + + ceWrapper := map[string]interface{}{ + "apiVersion": "v1", + "kind": "EmailJobEvent", + "data": emailJobEvent, + } + ceBytes, err := json.Marshal(ceWrapper) + if err != nil { + t.Fatalf("Failed to marshal event: %v", err) + } + + msgID := "msg-xyz" + if err := env.handleFn(context.Background(), msgID, ceBytes); err != nil { + t.Errorf("handleFn failed: %v", err) + } + + if len(env.sender.calls) != 1 { + t.Fatalf("Expected 1 call to ProcessMessage, got %d", len(env.sender.calls)) + } + + expectedJob := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-123", + RecipientEmail: "test@example.com", + ChannelID: "chan-456", + SummaryRaw: []byte(`{"key":"value"}`), + Metadata: workertypes.DeliveryMetadata{ + EventID: "event-789", + SearchID: "search-abc", + Query: "is:open", + Frequency: workertypes.FrequencyMonthly, + GeneratedAt: now, + }, + Triggers: []workertypes.JobTrigger{ + workertypes.BrowserImplementationAnyComplete, + }, + }, + EmailEventID: msgID, + } + + if diff := cmp.Diff(expectedJob, env.sender.calls[0]); diff != "" { + t.Errorf("ProcessMessage call mismatch (-want +got):\n%s", diff) + } +} + +func TestEmailWorkerSubscriberAdapter_ReturnsErrorOnUnknownEvent(t *testing.T) { + env := setupEmailTestAdapter(t) + defer env.stop() + + ceWrapper := map[string]interface{}{ + "apiVersion": "v1", + "kind": "UnknownEvent", + "data": map[string]string{"foo": "bar"}, + } + ceBytes, _ := json.Marshal(ceWrapper) + + err := env.handleFn(context.Background(), "msg-1", ceBytes) + if err == nil { + t.Error("Expected an error for an unknown event, but got nil") + } +} diff --git a/lib/gcppubsub/gcppubsubadapters/event_producer.go b/lib/gcppubsub/gcppubsubadapters/event_producer.go new file mode 100644 index 000000000..37bef1286 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/event_producer.go @@ -0,0 +1,169 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + batchrefreshv1 "github.com/GoogleChrome/webstatus.dev/lib/event/batchrefreshtrigger/v1" + featurediffv1 "github.com/GoogleChrome/webstatus.dev/lib/event/featurediff/v1" + refreshv1 "github.com/GoogleChrome/webstatus.dev/lib/event/refreshsearchcommand/v1" + searchconfigv1 "github.com/GoogleChrome/webstatus.dev/lib/event/searchconfigurationchanged/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type EventProducerSearchMessageHandler interface { + ProcessSearch(ctx context.Context, searchID string, query string, + frequency workertypes.JobFrequency, triggerID string) error +} + +type EventProducerBatchUpdateHandler interface { + ProcessBatchUpdate(ctx context.Context, triggerID string, frequency workertypes.JobFrequency) error +} + +type EventSubscriber interface { + Subscribe(ctx context.Context, subID string, + handler func(ctx context.Context, msgID string, data []byte) error) error +} + +type SubscriberConfig struct { + SearchSubscriptionID string + BatchUpdateSubscriptionID string +} + +type EventProducerSubscriberAdapter struct { + searchEventHandler EventProducerSearchMessageHandler + batchUpdateHandler EventProducerBatchUpdateHandler + eventSubscriber EventSubscriber + config SubscriberConfig + searchEventRouter *event.Router + batchUpdateRouter *event.Router +} + +func NewEventProducerSubscriberAdapter( + searchMessageHandler EventProducerSearchMessageHandler, + batchUpdateHandler EventProducerBatchUpdateHandler, + eventSubscriber EventSubscriber, + config SubscriberConfig, +) *EventProducerSubscriberAdapter { + searchEventRouter := event.NewRouter() + + batchUpdateRouter := event.NewRouter() + + ret := &EventProducerSubscriberAdapter{ + searchEventHandler: searchMessageHandler, + batchUpdateHandler: batchUpdateHandler, + eventSubscriber: eventSubscriber, + config: config, + searchEventRouter: searchEventRouter, + batchUpdateRouter: batchUpdateRouter, + } + + event.Register(searchEventRouter, ret.processRefreshSearchCommand) + event.Register(searchEventRouter, ret.processSearchConfigurationChangedEvent) + + event.Register(batchUpdateRouter, ret.processBatchUpdateCommand) + + return ret +} + +func (a *EventProducerSubscriberAdapter) processRefreshSearchCommand(ctx context.Context, + eventID string, event refreshv1.RefreshSearchCommand) error { + slog.InfoContext(ctx, "received refresh search command", "eventID", eventID, "event", event) + + return a.searchEventHandler.ProcessSearch(ctx, event.SearchID, event.Query, + event.Frequency.ToWorkerTypeJobFrequency(), eventID) +} + +func (a *EventProducerSubscriberAdapter) processSearchConfigurationChangedEvent(ctx context.Context, + eventID string, event searchconfigv1.SearchConfigurationChangedEvent) error { + slog.InfoContext(ctx, "received search configuration changed event", "eventID", eventID, "event", event) + + return a.searchEventHandler.ProcessSearch(ctx, event.SearchID, event.Query, + event.Frequency.ToWorkerTypeJobFrequency(), eventID) +} + +func (a *EventProducerSubscriberAdapter) Subscribe(ctx context.Context) error { + return RunGroup(ctx, + // Handler 1: Search + func(ctx context.Context) error { + return a.eventSubscriber.Subscribe(ctx, a.config.SearchSubscriptionID, + func(ctx context.Context, msgID string, data []byte) error { + return a.searchEventRouter.HandleMessage(ctx, msgID, data) + }) + }, + // Handler 2: Batch Update + func(ctx context.Context) error { + return a.eventSubscriber.Subscribe(ctx, a.config.BatchUpdateSubscriptionID, + func(ctx context.Context, msgID string, data []byte) error { + return a.batchUpdateRouter.HandleMessage(ctx, msgID, data) + }) + }, + ) +} + +func (a *EventProducerSubscriberAdapter) processBatchUpdateCommand(ctx context.Context, + eventID string, event batchrefreshv1.BatchRefreshTrigger) error { + slog.InfoContext(ctx, "received batch update command", "eventID", eventID, "event", event) + + return a.batchUpdateHandler.ProcessBatchUpdate(ctx, eventID, + event.Frequency.ToWorkerTypeJobFrequency()) +} + +type EventPublisher interface { + Publish(ctx context.Context, topicID string, data []byte) (string, error) +} + +type EventProducerPublisherAdapter struct { + eventPublisher EventPublisher + topicID string +} + +func NewEventProducerPublisherAdapter(eventPublisher EventPublisher, topicID string) *EventProducerPublisherAdapter { + return &EventProducerPublisherAdapter{ + eventPublisher: eventPublisher, + topicID: topicID, + } +} + +func (a *EventProducerPublisherAdapter) Publish(ctx context.Context, + req workertypes.PublishEventRequest) (string, error) { + b, err := event.New(featurediffv1.FeatureDiffEvent{ + EventID: req.EventID, + SearchID: req.SearchID, + Query: req.Query, + Summary: req.Summary, + StateID: req.StateID, + StateBlobPath: req.StateBlobPath, + DiffID: req.DiffID, + DiffBlobPath: req.DiffBlobPath, + GeneratedAt: req.GeneratedAt, + Frequency: featurediffv1.ToJobFrequency(req.Frequency), + Reasons: featurediffv1.ToReasons(req.Reasons), + }) + if err != nil { + return "", err + } + + id, err := a.eventPublisher.Publish(ctx, a.topicID, b) + if err != nil { + return "", err + } + slog.InfoContext(ctx, "published feature diff event", "id", id, "eventID", req.EventID) + + return id, nil +} diff --git a/lib/gcppubsub/gcppubsubadapters/event_producer_test.go b/lib/gcppubsub/gcppubsubadapters/event_producer_test.go new file mode 100644 index 000000000..72bc5e33a --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/event_producer_test.go @@ -0,0 +1,342 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "encoding/base64" + "encoding/json" + "sync" + "testing" + "time" + + batchrefreshv1 "github.com/GoogleChrome/webstatus.dev/lib/event/batchrefreshtrigger/v1" + refreshv1 "github.com/GoogleChrome/webstatus.dev/lib/event/refreshsearchcommand/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// --- Mocks --- + +type mockSearchHandler struct { + calls []searchCall + mu sync.Mutex + err error +} + +type searchCall struct { + SearchID string + Query string + Frequency workertypes.JobFrequency + TriggerID string +} + +func (m *mockSearchHandler) ProcessSearch(_ context.Context, searchID, query string, + freq workertypes.JobFrequency, triggerID string) error { + m.mu.Lock() + defer m.mu.Unlock() + m.calls = append(m.calls, searchCall{searchID, query, freq, triggerID}) + + return m.err +} + +type mockBatchHandler struct { + calls []batchCall + mu sync.Mutex + err error +} + +type batchCall struct { + TriggerID string + Frequency workertypes.JobFrequency +} + +func (m *mockBatchHandler) ProcessBatchUpdate(_ context.Context, triggerID string, + freq workertypes.JobFrequency) error { + m.mu.Lock() + defer m.mu.Unlock() + m.calls = append(m.calls, batchCall{triggerID, freq}) + + return m.err +} + +type mockSubscriber struct { + handlers map[string]func(context.Context, string, []byte) error + mu sync.Mutex + // block allows us to simulate a long-running Subscribe call so RunGroup doesn't exit immediately + block chan struct{} +} + +func (m *mockSubscriber) Subscribe(ctx context.Context, subID string, + handler func(context.Context, string, []byte) error) error { + m.mu.Lock() + if m.handlers == nil { + m.handlers = make(map[string]func(context.Context, string, []byte) error) + } + m.handlers[subID] = handler + m.mu.Unlock() + + // Simulate blocking behavior of a real subscriber logic + if m.block != nil { + select { + case <-m.block: + return nil + case <-ctx.Done(): + return ctx.Err() + } + } + + return nil +} + +type mockPublisher struct { + publishedData []byte + publishedTopic string + err error +} + +func (m *mockPublisher) Publish(_ context.Context, topicID string, data []byte) (string, error) { + m.publishedData = data + m.publishedTopic = topicID + + return "msg-id", m.err +} + +// --- Tests --- + +type testEnv struct { + searchHandler *mockSearchHandler + batchHandler *mockBatchHandler + subscriber *mockSubscriber + adapter *EventProducerSubscriberAdapter + searchFn func(context.Context, string, []byte) error + batchFn func(context.Context, string, []byte) error + stop func() +} + +func setupTestAdapter(t *testing.T) *testEnv { + t.Helper() + searchHandler := new(mockSearchHandler) + batchHandler := new(mockBatchHandler) + subscriber := &mockSubscriber{block: make(chan struct{}), mu: sync.Mutex{}, handlers: nil} + config := SubscriberConfig{ + SearchSubscriptionID: "search-sub", + BatchUpdateSubscriptionID: "batch-sub", + } + + adapter := NewEventProducerSubscriberAdapter(searchHandler, batchHandler, subscriber, config) + + // Run Subscribe in a goroutine because it blocks + ctx, cancel := context.WithCancel(context.Background()) + + errChan := make(chan error) + go func() { + errChan <- adapter.Subscribe(ctx) + }() + + // Wait briefly for RunGroup to start and handlers to be registered + time.Sleep(50 * time.Millisecond) + + subscriber.mu.Lock() + searchFn := subscriber.handlers["search-sub"] + batchFn := subscriber.handlers["batch-sub"] + subscriber.mu.Unlock() + + if searchFn == nil || batchFn == nil { + cancel() + t.Fatal("Subscribe did not register handlers for both subscriptions") + } + + return &testEnv{ + searchHandler: searchHandler, + batchHandler: batchHandler, + subscriber: subscriber, + adapter: adapter, + searchFn: searchFn, + batchFn: batchFn, + stop: func() { + close(subscriber.block) // Unblock the subscriber + cancel() // Cancel the context + <-errChan // Wait for adapter.Subscribe to return + }, + } +} + +func TestSubscribe_RoutesRefreshSearchCommand(t *testing.T) { + env := setupTestAdapter(t) + defer env.stop() + + refreshCmd := refreshv1.RefreshSearchCommand{ + SearchID: "s1", + Query: "q1", + Frequency: "IMMEDIATE", + Timestamp: time.Time{}, + } + ceWrapper := map[string]interface{}{ + "apiVersion": "v1", + "kind": "RefreshSearchCommand", + "data": refreshCmd, + } + ceBytes, _ := json.Marshal(ceWrapper) + + if err := env.searchFn(context.Background(), "msg-1", ceBytes); err != nil { + t.Errorf("searchFn failed: %v", err) + } + + if len(env.searchHandler.calls) != 1 { + t.Fatalf("Expected 1 search call, got %d", len(env.searchHandler.calls)) + } + + expectedCall := searchCall{ + SearchID: "s1", + Query: "q1", + Frequency: workertypes.FrequencyImmediate, + TriggerID: "msg-1", + } + + if diff := cmp.Diff(expectedCall, env.searchHandler.calls[0]); diff != "" { + t.Errorf("Search call mismatch (-want +got):\n%s", diff) + } +} + +func TestSubscribe_RoutesBatchUpdate(t *testing.T) { + env := setupTestAdapter(t) + defer env.stop() + + batchTrig := batchrefreshv1.BatchRefreshTrigger{ + Frequency: "WEEKLY", + } + ceWrapperBatch := map[string]interface{}{ + "apiVersion": "v1", + "kind": "BatchRefreshTrigger", + "data": batchTrig, + } + ceBytesBatch, _ := json.Marshal(ceWrapperBatch) + + if err := env.batchFn(context.Background(), "msg-2", ceBytesBatch); err != nil { + t.Errorf("batchFn failed: %v", err) + } + + if len(env.batchHandler.calls) != 1 { + t.Fatalf("Expected 1 batch call, got %d", len(env.batchHandler.calls)) + } + + expectedCall := batchCall{ + TriggerID: "msg-2", + Frequency: workertypes.FrequencyWeekly, + } + + if diff := cmp.Diff(expectedCall, env.batchHandler.calls[0]); diff != "" { + t.Errorf("Batch call mismatch (-want +got):\n%s", diff) + } +} + +func TestSubscribe_RoutesSearchConfigurationChanged(t *testing.T) { + env := setupTestAdapter(t) + defer env.stop() + + // We construct the payload manually for the test execution + configEventPayload := map[string]interface{}{ + "search_id": "s2", + "query": "q2", + "user_id": "user-1", + "timestamp": "0001-01-01T00:00:00Z", + "is_creation": false, + "frequency": "IMMEDIATE", + } + + ceWrapperConfig := map[string]interface{}{ + "apiVersion": "v1", + "kind": "SearchConfigurationChangedEvent", + "data": configEventPayload, + } + ceBytesConfig, _ := json.Marshal(ceWrapperConfig) + + if err := env.searchFn(context.Background(), "msg-3", ceBytesConfig); err != nil { + t.Errorf("searchFn (config event) failed: %v", err) + } + + if len(env.searchHandler.calls) != 1 { + t.Fatalf("Expected 1 search call, got %d", len(env.searchHandler.calls)) + } + + expectedCall := searchCall{ + SearchID: "s2", + Query: "q2", + Frequency: workertypes.FrequencyImmediate, + TriggerID: "msg-3", + } + + if diff := cmp.Diff(expectedCall, env.searchHandler.calls[0]); diff != "" { + t.Errorf("Search call mismatch (-want +got):\n%s", diff) + } +} + +func TestPublisher_Publish(t *testing.T) { + publisher := new(mockPublisher) + adapter := NewEventProducerPublisherAdapter(publisher, "topic-1") + now := time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC) + + req := workertypes.PublishEventRequest{ + EventID: "evt-1", + SearchID: "search-1", + Query: "query-1", + Frequency: workertypes.FrequencyImmediate, + Reasons: []workertypes.Reason{workertypes.ReasonDataUpdated}, + Summary: []byte(`{"added": 1}`), + StateID: "state-id-1", + DiffID: "diff-id-1", + StateBlobPath: "gs://bucket/state-blob", + DiffBlobPath: "gs://bucket/diff-blob", + GeneratedAt: now, + } + + _, err := adapter.Publish(context.Background(), req) + if err != nil { + t.Fatalf("Publish failed: %v", err) + } + + if publisher.publishedTopic != "topic-1" { + t.Errorf("Topic mismatch: got %s, want topic-1", publisher.publishedTopic) + } + + var actualEnvelope map[string]interface{} + if err := json.Unmarshal(publisher.publishedData, &actualEnvelope); err != nil { + t.Fatalf("Failed to unmarshal published data: %v", err) + } + + expectedEnvelope := map[string]interface{}{ + "apiVersion": "v1", + "kind": "FeatureDiffEvent", + "data": map[string]interface{}{ + "event_id": "evt-1", + "search_id": "search-1", + "query": "query-1", + // go encodes/decodes []byte as base64 strings + "summary": base64.StdEncoding.EncodeToString([]byte(`{"added": 1}`)), + "state_id": "state-id-1", + "diff_id": "diff-id-1", + "state_blob_path": "gs://bucket/state-blob", + "diff_blob_path": "gs://bucket/diff-blob", + "reasons": []interface{}{"DATA_UPDATED"}, + "generated_at": now.Format(time.RFC3339), + "frequency": "IMMEDIATE", + }, + } + + if diff := cmp.Diff(expectedEnvelope, actualEnvelope); diff != "" { + t.Errorf("Payload mismatch (-want +got):\n%s", diff) + } +} diff --git a/lib/gcppubsub/gcppubsubadapters/push_delivery.go b/lib/gcppubsub/gcppubsubadapters/push_delivery.go new file mode 100644 index 000000000..0d4386747 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/push_delivery.go @@ -0,0 +1,119 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +//     http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "fmt" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + v1 "github.com/GoogleChrome/webstatus.dev/lib/event/emailjob/v1" + featurediffv1 "github.com/GoogleChrome/webstatus.dev/lib/event/featurediff/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type PushDeliveryPublisher struct { + client EventPublisher + emailTopic string +} + +func NewPushDeliveryPublisher(client EventPublisher, emailTopic string) *PushDeliveryPublisher { + return &PushDeliveryPublisher{ + client: client, + emailTopic: emailTopic, + } +} + +func (p *PushDeliveryPublisher) PublishEmailJob(ctx context.Context, job workertypes.EmailDeliveryJob) error { + b, err := event.New(v1.EmailJobEvent{ + SubscriptionID: job.SubscriptionID, + RecipientEmail: job.RecipientEmail, + SummaryRaw: job.SummaryRaw, + Metadata: v1.EmailJobEventMetadata{ + EventID: job.Metadata.EventID, + SearchID: job.Metadata.SearchID, + Query: job.Metadata.Query, + Frequency: v1.ToJobFrequency(job.Metadata.Frequency), + GeneratedAt: job.Metadata.GeneratedAt, + }, + Triggers: v1.ToJobTriggers(job.Triggers), + ChannelID: job.ChannelID, + }) + if err != nil { + return err + } + + id, err := p.client.Publish(ctx, p.emailTopic, b) + if err != nil { + return fmt.Errorf("failed to publish email job: %w", err) + } + slog.InfoContext(ctx, "published email job", "id", id, "eventID", job.Metadata.EventID) + + return nil +} + +// PushDeliveryMessageHandler defines the interface for the Dispatcher logic. +type PushDeliveryMessageHandler interface { + ProcessEvent(ctx context.Context, metadata workertypes.DispatchEventMetadata, summary []byte) error +} + +type PushDeliverySubscriberAdapter struct { + dispatcher PushDeliveryMessageHandler + eventSubscriber EventSubscriber + subscriptionID string + router *event.Router +} + +func NewPushDeliverySubscriberAdapter( + dispatcher PushDeliveryMessageHandler, + eventSubscriber EventSubscriber, + subscriptionID string, +) *PushDeliverySubscriberAdapter { + router := event.NewRouter() + + ret := &PushDeliverySubscriberAdapter{ + dispatcher: dispatcher, + eventSubscriber: eventSubscriber, + subscriptionID: subscriptionID, + router: router, + } + + event.Register(router, ret.processFeatureDiffEvent) + + return ret +} + +func (a *PushDeliverySubscriberAdapter) Subscribe(ctx context.Context) error { + return a.eventSubscriber.Subscribe(ctx, a.subscriptionID, func(ctx context.Context, + msgID string, data []byte) error { + return a.router.HandleMessage(ctx, msgID, data) + }) +} + +func (a *PushDeliverySubscriberAdapter) processFeatureDiffEvent(ctx context.Context, + eventID string, event featurediffv1.FeatureDiffEvent) error { + slog.InfoContext(ctx, "received feature diff event", "eventID", eventID) + + metadata := workertypes.DispatchEventMetadata{ + EventID: event.EventID, + SearchID: event.SearchID, + Query: event.Query, + Frequency: event.Frequency.ToWorkertypes(), + GeneratedAt: event.GeneratedAt, + } + + return a.dispatcher.ProcessEvent(ctx, metadata, event.Summary) +} diff --git a/lib/gcppubsub/gcppubsubadapters/push_delivery_test.go b/lib/gcppubsub/gcppubsubadapters/push_delivery_test.go new file mode 100644 index 000000000..a138e2b61 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/push_delivery_test.go @@ -0,0 +1,261 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "encoding/base64" + "encoding/json" + "sync" + "testing" + "time" + + featurediffv1 "github.com/GoogleChrome/webstatus.dev/lib/event/featurediff/v1" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// --- Mocks --- + +type mockPushDeliveryPublisher struct { + publishedData []byte + publishedTopic string + err error + mu sync.Mutex // Added mutex for concurrent access +} + +func (m *mockPushDeliveryPublisher) Publish(_ context.Context, topicID string, data []byte) (string, error) { + m.mu.Lock() + defer m.mu.Unlock() + m.publishedData = data + m.publishedTopic = topicID + + return "msg-id", m.err +} + +type mockDispatcher struct { + calls []processEventCall + mu sync.Mutex + err error +} + +type processEventCall struct { + Metadata workertypes.DispatchEventMetadata + Summary []byte +} + +func (m *mockDispatcher) ProcessEvent(_ context.Context, + metadata workertypes.DispatchEventMetadata, summary []byte) error { + m.mu.Lock() + defer m.mu.Unlock() + m.calls = append(m.calls, processEventCall{Metadata: metadata, Summary: summary}) + + return m.err +} + +type mockPushDeliverySubscriber struct { + handlers map[string]func(context.Context, string, []byte) error + mu sync.Mutex + // block allows us to simulate a long-running Subscribe call so RunGroup doesn't exit immediately + block chan struct{} +} + +func (m *mockPushDeliverySubscriber) Subscribe(ctx context.Context, subID string, + handler func(context.Context, string, []byte) error) error { + m.mu.Lock() + if m.handlers == nil { + m.handlers = make(map[string]func(context.Context, string, []byte) error) + } + m.handlers[subID] = handler + m.mu.Unlock() + + // Simulate blocking behavior of a real subscriber logic + if m.block != nil { + select { + case <-m.block: + return nil + case <-ctx.Done(): + return ctx.Err() + } + } + + return nil +} + +// --- Tests --- + +func TestPushDeliveryPublisher_PublishEmailJob(t *testing.T) { + mockPub := new(mockPushDeliveryPublisher) + publisher := NewPushDeliveryPublisher(mockPub, "email-topic") + + job := workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-1", + RecipientEmail: "test@example.com", + SummaryRaw: []byte(`{"text": "Test Body"}`), + Metadata: workertypes.DeliveryMetadata{ + EventID: "event-1", + SearchID: "search-1", + Query: "query-string", + Frequency: workertypes.FrequencyMonthly, + GeneratedAt: time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC), + }, + ChannelID: "chan-1", + Triggers: []workertypes.JobTrigger{ + workertypes.FeaturePromotedToNewly, + workertypes.FeaturePromotedToWidely, + }, + } + + err := publisher.PublishEmailJob(context.Background(), job) + if err != nil { + t.Fatalf("PublishEmailJob failed: %v", err) + } + + if mockPub.publishedTopic != "email-topic" { + t.Errorf("Topic mismatch: got %s, want email-topic", mockPub.publishedTopic) + } + + var actualEnvelope map[string]interface{} + if err := json.Unmarshal(mockPub.publishedData, &actualEnvelope); err != nil { + t.Fatalf("Failed to unmarshal published data: %v", err) + } + + expectedEnvelope := map[string]interface{}{ + "apiVersion": "v1", + "kind": "EmailJobEvent", + "data": map[string]interface{}{ + "subscription_id": "sub-1", + "recipient_email": "test@example.com", + "summary_raw": base64.StdEncoding.EncodeToString([]byte(`{"text": "Test Body"}`)), + "triggers": []any{"FEATURE_PROMOTED_TO_NEWLY", "FEATURE_PROMOTED_TO_WIDELY"}, + "metadata": map[string]interface{}{ + "event_id": "event-1", + "search_id": "search-1", + "query": "query-string", + "frequency": "MONTHLY", + "generated_at": "2025-01-01T12:00:00Z", + }, + "channel_id": "chan-1", + }, + } + + if diff := cmp.Diff(expectedEnvelope, actualEnvelope); diff != "" { + t.Errorf("Email job mismatch (-want +got):\n%s", diff) + } +} + +type pushDeliveryTestEnv struct { + dispatcher *mockDispatcher + subscriber *mockPushDeliverySubscriber + adapter *PushDeliverySubscriberAdapter + featureDiffFn func(context.Context, string, []byte) error + stop func() +} + +func setupPushDeliveryTestAdapter(t *testing.T) *pushDeliveryTestEnv { + t.Helper() + dispatcher := new(mockDispatcher) + subscriber := &mockPushDeliverySubscriber{block: make(chan struct{}), mu: sync.Mutex{}, handlers: nil} + subscriptionID := "feature-diff-sub" + + adapter := NewPushDeliverySubscriberAdapter(dispatcher, subscriber, subscriptionID) + + ctx, cancel := context.WithCancel(context.Background()) + + errChan := make(chan error, 1) // Buffered channel to prevent goroutine leak on t.Fatal + go func() { + errChan <- adapter.Subscribe(ctx) + }() + + // Wait briefly for Subscribe to start and handler to be registered + time.Sleep(50 * time.Millisecond) + + subscriber.mu.Lock() + featureDiffFn := subscriber.handlers[subscriptionID] + subscriber.mu.Unlock() + + if featureDiffFn == nil { + cancel() + close(subscriber.block) + <-errChan + t.Fatal("Subscribe did not register handler for subscription") + } + + return &pushDeliveryTestEnv{ + dispatcher: dispatcher, + subscriber: subscriber, + adapter: adapter, + featureDiffFn: featureDiffFn, + stop: func() { + close(subscriber.block) // Unblock the subscriber + cancel() // Cancel the context + <-errChan // Wait for adapter.Subscribe to return + }, + } +} + +func TestPushDeliverySubscriber_RoutesFeatureDiffEvent(t *testing.T) { + env := setupPushDeliveryTestAdapter(t) + defer env.stop() + + now := time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC) + featureDiffEvent := featurediffv1.FeatureDiffEvent{ + EventID: "evt-1", + SearchID: "s1", + Query: "q1", + Summary: []byte(`{"added": 1}`), + StateID: "state-id-1", + StateBlobPath: "gs://bucket/state-blob", + DiffID: "diff-id-1", + DiffBlobPath: "gs://bucket/diff-blob", + GeneratedAt: now, + Frequency: featurediffv1.FrequencyMonthly, + Reasons: []featurediffv1.Reason{featurediffv1.ReasonDataUpdated}, + } + ceWrapper := map[string]interface{}{ + "apiVersion": "v1", + "kind": "FeatureDiffEvent", + "data": featureDiffEvent, + } + ceBytes, _ := json.Marshal(ceWrapper) + + if err := env.featureDiffFn(context.Background(), "msg-1", ceBytes); err != nil { + t.Errorf("featureDiffFn failed: %v", err) + } + + if len(env.dispatcher.calls) != 1 { + t.Fatalf("Expected 1 dispatcher call, got %d", len(env.dispatcher.calls)) + } + + expectedMetadata := workertypes.DispatchEventMetadata{ + EventID: "evt-1", + SearchID: "s1", + Query: "q1", + Frequency: workertypes.FrequencyMonthly, + GeneratedAt: now, + } + + // Compare summary as string since cmp.Diff might struggle with []byte directly within interface{} + actualSummaryStr := string(env.dispatcher.calls[0].Summary) + expectedSummaryStr := string(featureDiffEvent.Summary) + + if diff := cmp.Diff(expectedMetadata, env.dispatcher.calls[0].Metadata); diff != "" { + t.Errorf("Dispatcher metadata mismatch (-want +got):\n%s", diff) + } + + if diff := cmp.Diff(expectedSummaryStr, actualSummaryStr); diff != "" { + t.Errorf("Dispatcher summary mismatch (-want +got):\n%s", diff) + } +} diff --git a/lib/gcppubsub/gcppubsubadapters/utils.go b/lib/gcppubsub/gcppubsubadapters/utils.go new file mode 100644 index 000000000..8dfe2bbf6 --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/utils.go @@ -0,0 +1,58 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "sync" +) + +// RunGroup runs multiple blocking functions concurrently. +// - It returns the first error encountered. +// - If one function fails, it cancels the context for the others. +// - It waits for all functions to exit before returning. +func RunGroup(ctx context.Context, fns ...func(ctx context.Context) error) error { + // 1. Create a derived context so we can signal cancellation to all siblings + ctx, cancel := context.WithCancel(ctx) + defer cancel() + + var wg sync.WaitGroup + errChan := make(chan error, len(fns)) + + for _, fn := range fns { + wg.Add(1) + // Capture fn in the loop scope + go func(f func(context.Context) error) { + defer wg.Done() + + // Pass the cancellable context to the function + if err := f(ctx); err != nil { + // Try to push the error; if channel is full, we already have an error + select { + case errChan <- err: + // Signal other routines to stop + cancel() + default: + } + } + }(fn) + } + + wg.Wait() + close(errChan) + + // Return the first error (if any) + return <-errChan +} diff --git a/lib/gcppubsub/gcppubsubadapters/utils_test.go b/lib/gcppubsub/gcppubsubadapters/utils_test.go new file mode 100644 index 000000000..32a5e518b --- /dev/null +++ b/lib/gcppubsub/gcppubsubadapters/utils_test.go @@ -0,0 +1,101 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gcppubsubadapters + +import ( + "context" + "errors" + "testing" + "time" +) + +func TestRunGroup(t *testing.T) { + tests := []struct { + name string + fns []func(context.Context) error + expectedErr error + }{ + { + name: "All functions succeed", + fns: []func(context.Context) error{ + func(_ context.Context) error { return nil }, + func(_ context.Context) error { return nil }, + }, + expectedErr: nil, + }, + { + name: "One function fails immediately", + fns: []func(context.Context) error{ + func(_ context.Context) error { return errors.New("fail") }, + func(ctx context.Context) error { + // Simulate work + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(100 * time.Millisecond): + return nil + } + }, + }, + expectedErr: errors.New("fail"), + }, + { + name: "Multiple failures return first error", + fns: []func(context.Context) error{ + func(_ context.Context) error { return errors.New("error 1") }, + func(_ context.Context) error { + time.Sleep(10 * time.Millisecond) // Ensure this happens slightly later + + return errors.New("error 2") + }, + }, + expectedErr: errors.New("error 1"), + }, + { + name: "Cancellation propagates", + fns: []func(context.Context) error{ + func(_ context.Context) error { + return errors.New("trigger cancel") + }, + func(ctx context.Context) error { + select { + case <-ctx.Done(): + // Correct behavior: context was cancelled + return nil + case <-time.After(1 * time.Second): + return errors.New("timeout: context was not cancelled") + } + }, + }, + expectedErr: errors.New("trigger cancel"), + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + err := RunGroup(context.Background(), tc.fns...) + + if tc.expectedErr == nil { + if err != nil { + t.Errorf("RunGroup() unexpected error: %v", err) + } + } else { + if err == nil || err.Error() != tc.expectedErr.Error() { + t.Errorf("RunGroup() error = %v, want %v", err, tc.expectedErr) + } + } + }) + } +} diff --git a/lib/gcpspanner/client.go b/lib/gcpspanner/client.go index f6cdd12e5..045d372e4 100644 --- a/lib/gcpspanner/client.go +++ b/lib/gcpspanner/client.go @@ -81,6 +81,7 @@ type Client struct { featureSearchQuery FeatureSearchBaseQuery missingOneImplQuery MissingOneImplementationQuery searchCfg searchConfig + notificationCfg notificationConfig batchWriter batchSize int batchWriters int @@ -140,10 +141,17 @@ type searchConfig struct { maxBookmarksPerUser uint32 } +// notificationConfig holds the application configuation for notifications. +type notificationConfig struct { + // Max number of consecutive failures per channel + maxConsecutiveFailuresPerChannel uint32 +} + const defaultMaxOwnedSearchesPerUser = 25 const defaultMaxBookmarksPerUser = 25 const defaultBatchSize = 5000 const defaultBatchWriters = 8 +const defaultMaxConsecutiveFailuresPerChannel = 5 func combineAndDeduplicate(excluded []string, discouraged []string) []string { if excluded == nil && discouraged == nil { @@ -219,6 +227,9 @@ func NewSpannerClient(projectID string, instanceID string, name string) (*Client maxOwnedSearchesPerUser: defaultMaxOwnedSearchesPerUser, maxBookmarksPerUser: defaultMaxBookmarksPerUser, }, + notificationConfig{ + maxConsecutiveFailuresPerChannel: defaultMaxConsecutiveFailuresPerChannel, + }, bw, defaultBatchSize, defaultBatchWriters, diff --git a/lib/gcpspanner/feature_group_lookups.go b/lib/gcpspanner/feature_group_lookups.go index 120c15926..6fb81e239 100644 --- a/lib/gcpspanner/feature_group_lookups.go +++ b/lib/gcpspanner/feature_group_lookups.go @@ -124,3 +124,30 @@ func calculateAllFeatureGroupLookups( } } } + +// AddFeatureToGroup adds a feature to a group. +func (c *Client) AddFeatureToGroup(ctx context.Context, featureKey, groupKey string) error { + _, err := c.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + featureID, err := c.GetIDFromFeatureKey(ctx, NewFeatureKeyFilter(featureKey)) + if err != nil { + return err + } + groupID, err := c.GetGroupIDFromGroupKey(ctx, groupKey) + if err != nil { + return err + } + m, err := spanner.InsertStruct(featureGroupKeysLookupTable, spannerFeatureGroupKeysLookup{ + GroupID: *groupID, + GroupKeyLowercase: groupKey, + WebFeatureID: *featureID, + Depth: 0, + }) + if err != nil { + return err + } + + return txn.BufferWrite([]*spanner.Mutation{m}) + }) + + return err +} diff --git a/lib/gcpspanner/list_user_saved_searches.go b/lib/gcpspanner/list_user_saved_searches.go index c10ccc712..34da59a5a 100644 --- a/lib/gcpspanner/list_user_saved_searches.go +++ b/lib/gcpspanner/list_user_saved_searches.go @@ -168,29 +168,20 @@ func (c *Client) ListUserSavedSearches( }, nil } -type savedSearchIDContainer struct { - ID string `spanner:"ID"` +type SavedSearchBriefDetails struct { + ID string `spanner:"ID"` + Query string `spanner:"Query"` } func (m userSavedSearchListerMapper) SelectAll() spanner.Statement { return spanner.Statement{ - SQL: "SELECT ID FROM SavedSearches", + SQL: "SELECT ID, Query FROM SavedSearches", Params: nil, } } // Used by the Cloud Scheduler batch job to find all entities to process. -func (c *Client) ListAllSavedSearchIDs( - ctx context.Context) ([]string, error) { - ret, err := newAllEntityReader[userSavedSearchListerMapper, savedSearchIDContainer](c).readAll(ctx) - if err != nil { - return nil, err - } - - ids := make([]string, 0, len(ret)) - for _, r := range ret { - ids = append(ids, r.ID) - } - - return ids, nil +func (c *Client) ListAllSavedSearches( + ctx context.Context) ([]SavedSearchBriefDetails, error) { + return newAllEntityReader[userSavedSearchListerMapper, SavedSearchBriefDetails](c).readAll(ctx) } diff --git a/lib/gcpspanner/list_user_saved_searches_test.go b/lib/gcpspanner/list_user_saved_searches_test.go index 87b7473c3..0850a59ec 100644 --- a/lib/gcpspanner/list_user_saved_searches_test.go +++ b/lib/gcpspanner/list_user_saved_searches_test.go @@ -281,30 +281,33 @@ func userSavedSearchesPageEquality(left, right *UserSavedSearchesPage) bool { }) } -func TestListAllSavedSearchIDs(t *testing.T) { +func TestListAllSavedSearches(t *testing.T) { restartDatabaseContainer(t) searches := loadFakeSavedSearches(t) t.Run("list all saved search IDs", func(t *testing.T) { - ids, err := spannerClient.ListAllSavedSearchIDs(context.Background()) + details, err := spannerClient.ListAllSavedSearches(context.Background()) if err != nil { t.Errorf("expected nil error. received %s", err) } - if len(ids) != len(searches) { - t.Errorf("expected %d results. received %d", len(searches), len(ids)) + if len(details) != len(searches) { + t.Errorf("expected %d results. received %d", len(searches), len(details)) } - expectedIDs := make([]string, len(searches)) + // Build expected details list from created searches (which have generated IDs) + expectedDetails := make([]SavedSearchBriefDetails, len(searches)) for idx, search := range searches { - expectedIDs[idx] = search.ID + expectedDetails[idx] = SavedSearchBriefDetails{ID: search.ID, Query: search.Query} } - slices.Sort(ids) - slices.Sort(expectedIDs) + slices.SortFunc(expectedDetails, sortSavedSearchBriefDetails) + slices.SortFunc(details, sortSavedSearchBriefDetails) - if !slices.Equal(ids, expectedIDs) { - t.Errorf("expected IDs %v but received %v", expectedIDs, ids) + if !slices.Equal(details, expectedDetails) { + t.Errorf("expected IDs %v but received %v", expectedDetails, details) } }) } + +func sortSavedSearchBriefDetails(a, b SavedSearchBriefDetails) int { return cmp.Compare(a.ID, b.ID) } diff --git a/lib/gcpspanner/notification_channel.go b/lib/gcpspanner/notification_channel.go index 7f9c9d3eb..22f6425f7 100644 --- a/lib/gcpspanner/notification_channel.go +++ b/lib/gcpspanner/notification_channel.go @@ -144,17 +144,34 @@ func (c *NotificationChannel) toSpanner() *spannerNotificationChannel { // toPublic converts the internal spannerNotificationChannel to the public NotificationChannel for reading. func (sc *spannerNotificationChannel) toPublic() (*NotificationChannel, error) { ret := &sc.NotificationChannel - if sc.Config.Valid { - bytes, err := json.Marshal(sc.Config.Value) - if err != nil { - return nil, err - } - var emailConfig EmailConfig - if err := json.Unmarshal(bytes, &emailConfig); err != nil { - return nil, err - } - ret.EmailConfig = &emailConfig + subscriptionConfigs, err := loadSubscriptionConfigs(sc.Type, sc.Config) + if err != nil { + return nil, err + } + ret.EmailConfig = subscriptionConfigs.EmailConfig + + return ret, nil +} + +type subscriptionConfigs struct { + EmailConfig *EmailConfig +} + +func loadSubscriptionConfigs(_ string, config spanner.NullJSON) (subscriptionConfigs, error) { + var ret subscriptionConfigs + if !config.Valid { + return ret, nil + } + // For now, only email config is supported. + bytes, err := json.Marshal(config.Value) + if err != nil { + return ret, err + } + var emailConfig EmailConfig + if err := json.Unmarshal(bytes, &emailConfig); err != nil { + return ret, err } + ret.EmailConfig = &emailConfig return ret, nil } diff --git a/lib/gcpspanner/notification_channel_delivery_attempt.go b/lib/gcpspanner/notification_channel_delivery_attempt.go index 4432a0e40..79484e49a 100644 --- a/lib/gcpspanner/notification_channel_delivery_attempt.go +++ b/lib/gcpspanner/notification_channel_delivery_attempt.go @@ -16,6 +16,7 @@ package gcpspanner import ( "context" + "encoding/json" "fmt" "time" @@ -25,13 +26,45 @@ import ( const notificationChannelDeliveryAttemptTable = "NotificationChannelDeliveryAttempts" const maxDeliveryAttemptsToKeep = 10 -// NotificationChannelDeliveryAttempt represents a row in the NotificationChannelDeliveryAttempt table. -type NotificationChannelDeliveryAttempt struct { +// spannerNotificationChannelDeliveryAttempt represents a row in the spannerNotificationChannelDeliveryAttempt table. +type spannerNotificationChannelDeliveryAttempt struct { ID string `spanner:"ID"` ChannelID string `spanner:"ChannelID"` AttemptTimestamp time.Time `spanner:"AttemptTimestamp"` Status NotificationChannelDeliveryAttemptStatus `spanner:"Status"` Details spanner.NullJSON `spanner:"Details"` + AttemptDetails *AttemptDetails `spanner:"-"` +} + +func (s spannerNotificationChannelDeliveryAttempt) toPublic() (*NotificationChannelDeliveryAttempt, error) { + var attemptDetails *AttemptDetails + if s.Details.Valid { + attemptDetails = new(AttemptDetails) + b, err := json.Marshal(s.Details.Value) + if err != nil { + return nil, err + } + err = json.Unmarshal(b, &attemptDetails) + if err != nil { + return nil, err + } + } + + return &NotificationChannelDeliveryAttempt{ + ID: s.ID, + ChannelID: s.ChannelID, + AttemptTimestamp: s.AttemptTimestamp, + Status: s.Status, + AttemptDetails: attemptDetails, + }, nil +} + +type NotificationChannelDeliveryAttempt struct { + ID string `spanner:"ID"` + ChannelID string `spanner:"ChannelID"` + AttemptTimestamp time.Time `spanner:"AttemptTimestamp"` + Status NotificationChannelDeliveryAttemptStatus `spanner:"Status"` + AttemptDetails *AttemptDetails `spanner:"AttemptDetails"` } type NotificationChannelDeliveryAttemptStatus string @@ -71,13 +104,14 @@ func (m notificationChannelDeliveryAttemptMapper) Table() string { func (m notificationChannelDeliveryAttemptMapper) NewEntity( id string, - req CreateNotificationChannelDeliveryAttemptRequest) (NotificationChannelDeliveryAttempt, error) { - return NotificationChannelDeliveryAttempt{ + req CreateNotificationChannelDeliveryAttemptRequest) (spannerNotificationChannelDeliveryAttempt, error) { + return spannerNotificationChannelDeliveryAttempt{ ID: id, ChannelID: req.ChannelID, AttemptTimestamp: req.AttemptTimestamp, Status: req.Status, Details: req.Details, + AttemptDetails: nil, }, nil } @@ -132,7 +166,8 @@ type notificationChannelDeliveryAttemptCursor struct { } // EncodePageToken returns the ID of the delivery attempt as a page token. -func (m notificationChannelDeliveryAttemptMapper) EncodePageToken(item NotificationChannelDeliveryAttempt) string { +func (m notificationChannelDeliveryAttemptMapper) EncodePageToken( + item spannerNotificationChannelDeliveryAttempt) string { return encodeCursor(notificationChannelDeliveryAttemptCursor{ LastID: item.ID, LastAttemptTimestamp: item.AttemptTimestamp, @@ -144,61 +179,65 @@ func (c *Client) CreateNotificationChannelDeliveryAttempt( ctx context.Context, req CreateNotificationChannelDeliveryAttemptRequest) (*string, error) { var newID *string _, err := c.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { - // 1. Create the new attempt - id, err := newEntityCreator[notificationChannelDeliveryAttemptMapper](c).createWithTransaction(ctx, txn, req) - if err != nil { - return err - } - newID = id + var err error + newID, err = c.createNotificationChannelDeliveryAttemptWithTransaction(ctx, txn, req) + + return err + }) + + return newID, err +} +func (c *Client) createNotificationChannelDeliveryAttemptWithTransaction( + ctx context.Context, txn *spanner.ReadWriteTransaction, + req CreateNotificationChannelDeliveryAttemptRequest) (*string, error) { + var newID *string + // 1. Create the new attempt + id, err := newEntityCreator[notificationChannelDeliveryAttemptMapper](c).createWithTransaction(ctx, txn, req) + if err != nil { + return nil, err + } + newID = id - // 2. Count existing attempts for the channel. Note: This count does not include the new attempt just buffered. - countStmt := spanner.NewStatement(` + // 2. Count existing attempts for the channel. Note: This count does not include the new attempt just buffered. + countStmt := spanner.NewStatement(` SELECT COUNT(*) FROM NotificationChannelDeliveryAttempts WHERE ChannelID = @channelID`) - countStmt.Params["channelID"] = req.ChannelID - var count int64 - err = txn.Query(ctx, countStmt).Do(func(r *spanner.Row) error { - return r.Column(0, &count) - }) - if err != nil { - return err - } + countStmt.Params["channelID"] = req.ChannelID + var count int64 + err = txn.Query(ctx, countStmt).Do(func(r *spanner.Row) error { + return r.Column(0, &count) + }) + if err != nil { + return nil, err + } - // 3. If the pre-insert count is at the limit, fetch the oldest attempts to delete. - if count >= maxDeliveryAttemptsToKeep { - // We need to delete enough to make room for the one we are adding. - deleteCount := count - maxDeliveryAttemptsToKeep + 1 - deleteStmt := spanner.NewStatement(` + // 3. If the pre-insert count is at the limit, fetch the oldest attempts to delete. + // We need to delete enough to make room for the one we are adding. + + if count < maxDeliveryAttemptsToKeep { + return newID, nil + } + + deleteCount := count - maxDeliveryAttemptsToKeep + 1 + deleteStmt := spanner.NewStatement(` SELECT ID FROM NotificationChannelDeliveryAttempts WHERE ChannelID = @channelID ORDER BY AttemptTimestamp ASC LIMIT @deleteCount`) - deleteStmt.Params["channelID"] = req.ChannelID - deleteStmt.Params["deleteCount"] = deleteCount - - var mutations []*spanner.Mutation - err := txn.Query(ctx, deleteStmt).Do(func(r *spanner.Row) error { - var attemptID string - if err := r.Column(0, &attemptID); err != nil { - return err - } - mutations = append(mutations, - spanner.Delete(notificationChannelDeliveryAttemptTable, - spanner.Key{attemptID, req.ChannelID})) - - return nil - }) - if err != nil { - return err - } - - // 4. Buffer delete mutations - if len(mutations) > 0 { - return txn.BufferWrite(mutations) - } + deleteStmt.Params["channelID"] = req.ChannelID + deleteStmt.Params["deleteCount"] = deleteCount + + var mutations []*spanner.Mutation + err = txn.Query(ctx, deleteStmt).Do(func(r *spanner.Row) error { + var attemptID string + if err := r.Column(0, &attemptID); err != nil { + return err } + mutations = append(mutations, + spanner.Delete(notificationChannelDeliveryAttemptTable, + spanner.Key{attemptID, req.ChannelID})) return nil }) @@ -206,6 +245,14 @@ func (c *Client) CreateNotificationChannelDeliveryAttempt( return nil, err } + // 4. Buffer delete mutations + if len(mutations) > 0 { + err := txn.BufferWrite(mutations) + if err != nil { + return nil, err + } + } + return newID, nil } @@ -214,8 +261,13 @@ func (c *Client) GetNotificationChannelDeliveryAttempt( ctx context.Context, attemptID string, channelID string) (*NotificationChannelDeliveryAttempt, error) { key := deliveryAttemptKey{ID: attemptID, ChannelID: channelID} - return newEntityReader[notificationChannelDeliveryAttemptMapper, - NotificationChannelDeliveryAttempt, deliveryAttemptKey](c).readRowByKey(ctx, key) + attempt, err := newEntityReader[notificationChannelDeliveryAttemptMapper, + spannerNotificationChannelDeliveryAttempt, deliveryAttemptKey](c).readRowByKey(ctx, key) + if err != nil { + return nil, err + } + + return attempt.toPublic() } // ListNotificationChannelDeliveryAttempts lists all delivery attempts for a channel. @@ -223,5 +275,19 @@ func (c *Client) ListNotificationChannelDeliveryAttempts( ctx context.Context, req ListNotificationChannelDeliveryAttemptsRequest, ) ([]NotificationChannelDeliveryAttempt, *string, error) { - return newEntityLister[notificationChannelDeliveryAttemptMapper](c).list(ctx, req) + attempts, nextPageToken, err := newEntityLister[notificationChannelDeliveryAttemptMapper](c).list(ctx, req) + if err != nil { + return nil, nil, err + } + + publicAttempts := make([]NotificationChannelDeliveryAttempt, 0, len(attempts)) + for _, attempt := range attempts { + publicAttempt, err := attempt.toPublic() + if err != nil { + return nil, nil, err + } + publicAttempts = append(publicAttempts, *publicAttempt) + } + + return publicAttempts, nextPageToken, nil } diff --git a/lib/gcpspanner/notification_channel_delivery_attempt_test.go b/lib/gcpspanner/notification_channel_delivery_attempt_test.go index a536391a8..e34520e6a 100644 --- a/lib/gcpspanner/notification_channel_delivery_attempt_test.go +++ b/lib/gcpspanner/notification_channel_delivery_attempt_test.go @@ -43,9 +43,10 @@ func TestCreateNotificationChannelDeliveryAttempt(t *testing.T) { channelID := *channelIDPtr req := CreateNotificationChannelDeliveryAttemptRequest{ - ChannelID: channelID, - Status: "SUCCESS", - Details: spanner.NullJSON{Value: map[string]interface{}{"info": "delivered"}, Valid: true}, + ChannelID: channelID, + Status: "SUCCESS", + Details: spanner.NullJSON{Value: map[string]interface{}{ + "event_id": "evt-123", "message": "delivered"}, Valid: true}, AttemptTimestamp: time.Now(), } @@ -71,6 +72,15 @@ func TestCreateNotificationChannelDeliveryAttempt(t *testing.T) { if retrieved.AttemptTimestamp.IsZero() { t.Error("expected a non-zero commit timestamp") } + if retrieved.AttemptDetails == nil { + t.Fatal("expected details to be non-nil") + } + if retrieved.AttemptDetails.Message != "delivered" { + t.Errorf("expected details info to be 'delivered', got %s", retrieved.AttemptDetails.Message) + } + if retrieved.AttemptDetails.EventID != "evt-123" { + t.Errorf("expected details eventID to be 'evt-123', got %s", retrieved.AttemptDetails.EventID) + } } func TestCreateNotificationChannelDeliveryAttemptPruning(t *testing.T) { @@ -202,3 +212,126 @@ func TestCreateNotificationChannelDeliveryAttemptConcurrency(t *testing.T) { t.Errorf("expected %d attempts, got %d", maxDeliveryAttemptsToKeep, len(attempts)) } } + +func TestListNotificationChannelDeliveryAttemptsPagination(t *testing.T) { + ctx := context.Background() + restartDatabaseContainer(t) + // We need a channel to associate the attempt with. + userID := uuid.NewString() + createReq := CreateNotificationChannelRequest{ + UserID: userID, + Name: "Test Channel", + Type: "EMAIL", + EmailConfig: &EmailConfig{Address: "test@example.com", IsVerified: true, VerificationToken: nil}, + } + channelIDPtr, err := spannerClient.CreateNotificationChannel(ctx, createReq) + if err != nil { + t.Fatalf("failed to create notification channel: %v", err) + } + channelID := *channelIDPtr + + // Create more attempts than the page size to test pagination. + totalAttempts := 5 + for i := 0; i < totalAttempts; i++ { + // The sleep is a simple way to ensure distinct AttemptTimestamps for ordering. + time.Sleep(1 * time.Millisecond) + req := CreateNotificationChannelDeliveryAttemptRequest{ + ChannelID: channelID, + Status: "SUCCESS", + Details: spanner.NullJSON{Value: nil, Valid: false}, + AttemptTimestamp: time.Now(), + } + _, err := spannerClient.CreateNotificationChannelDeliveryAttempt(ctx, req) + if err != nil { + t.Fatalf("CreateNotificationChannelDeliveryAttempt (pagination test) failed: %v", err) + } + } + + // 1. First Page + pageSize := 2 + listReq1 := ListNotificationChannelDeliveryAttemptsRequest{ + ChannelID: channelID, + PageSize: pageSize, + PageToken: nil, + } + attempts1, nextToken1, err := spannerClient.ListNotificationChannelDeliveryAttempts(ctx, listReq1) + if err != nil { + t.Fatalf("ListNotificationChannelDeliveryAttempts (page 1) failed: %v", err) + } + if len(attempts1) != pageSize { + t.Errorf("expected %d attempts on page 1, got %d", pageSize, len(attempts1)) + } + if nextToken1 == nil { + t.Fatal("expected a next page token, but got nil") + } + + // 2. Second Page + listReq2 := ListNotificationChannelDeliveryAttemptsRequest{ + ChannelID: channelID, + PageSize: pageSize, + PageToken: nextToken1, + } + attempts2, nextToken2, err := spannerClient.ListNotificationChannelDeliveryAttempts(ctx, listReq2) + if err != nil { + t.Fatalf("ListNotificationChannelDeliveryAttempts (page 2) failed: %v", err) + } + if len(attempts2) != pageSize { + t.Errorf("expected %d attempts on page 2, got %d", pageSize, len(attempts2)) + } + if nextToken2 == nil { + t.Fatal("expected a next page token, but got nil") + } + + // 3. Third and Final Page + listReq3 := ListNotificationChannelDeliveryAttemptsRequest{ + ChannelID: channelID, + PageSize: pageSize, + PageToken: nextToken2, + } + attempts3, nextToken3, err := spannerClient.ListNotificationChannelDeliveryAttempts(ctx, listReq3) + if err != nil { + t.Fatalf("ListNotificationChannelDeliveryAttempts (page 3) failed: %v", err) + } + if len(attempts3) != 1 { + t.Errorf("expected 1 attempt on page 3, got %d", len(attempts3)) + } + if nextToken3 != nil { + t.Errorf("expected no next page token, but got one: %s", *nextToken3) + } +} + +func TestToPublic(t *testing.T) { + attempt := spannerNotificationChannelDeliveryAttempt{ + ID: "test-id", + ChannelID: "test-channel-id", + AttemptTimestamp: time.Now(), + Status: DeliveryAttemptStatusSuccess, + Details: spanner.NullJSON{Value: map[string]interface{}{"message": "test-info", + "event_id": "test-event-id"}, Valid: true}, + AttemptDetails: nil, + } + + publicAttempt, err := attempt.toPublic() + if err != nil { + t.Fatalf("toPublic() failed: %v", err) + } + + if publicAttempt.ID != attempt.ID { + t.Errorf("expected ID %s, got %s", attempt.ID, publicAttempt.ID) + } + if publicAttempt.ChannelID != attempt.ChannelID { + t.Errorf("expected ChannelID %s, got %s", attempt.ChannelID, publicAttempt.ChannelID) + } + if publicAttempt.Status != attempt.Status { + t.Errorf("expected Status %s, got %s", attempt.Status, publicAttempt.Status) + } + if publicAttempt.AttemptDetails == nil { + t.Fatal("expected AttemptDetails to be non-nil") + } + if publicAttempt.AttemptDetails.Message != "test-info" { + t.Errorf("expected AttemptDetails.Message %s, got %s", "test-info", publicAttempt.AttemptDetails.Message) + } + if publicAttempt.AttemptDetails.EventID != "test-event-id" { + t.Errorf("expected AttemptDetails.EventID %s, got %s", "test-event-id", publicAttempt.AttemptDetails.EventID) + } +} diff --git a/lib/gcpspanner/notification_channel_state.go b/lib/gcpspanner/notification_channel_state.go index 087af22b8..fe7e2351b 100644 --- a/lib/gcpspanner/notification_channel_state.go +++ b/lib/gcpspanner/notification_channel_state.go @@ -16,6 +16,7 @@ package gcpspanner import ( "context" + "errors" "fmt" "time" @@ -78,3 +79,105 @@ func (c *Client) GetNotificationChannelState( return newEntityReader[notificationChannelStateMapper, NotificationChannelState, string](c).readRowByKey(ctx, channelID) } + +// RecordNotificationChannelSuccess resets the consecutive failures count in the NotificationChannelStates table +// and logs a successful delivery attempt in the NotificationChannelDeliveryAttempts table. +func (c *Client) RecordNotificationChannelSuccess( + ctx context.Context, channelID string, timestamp time.Time, eventID string) error { + _, err := c.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + // Update NotificationChannelStates + err := newEntityWriter[notificationChannelStateMapper](c).upsertWithTransaction(ctx, txn, + NotificationChannelState{ + ChannelID: channelID, + IsDisabledBySystem: false, + ConsecutiveFailures: 0, + CreatedAt: timestamp, + UpdatedAt: timestamp, + }) + if err != nil { + return err + } + + _, err = c.createNotificationChannelDeliveryAttemptWithTransaction(ctx, txn, + CreateNotificationChannelDeliveryAttemptRequest{ + ChannelID: channelID, + AttemptTimestamp: timestamp, + Status: DeliveryAttemptStatusSuccess, + Details: spanner.NullJSON{Value: AttemptDetails{ + EventID: eventID, + Message: "delivered"}, Valid: true}, + }) + + return err + }) + + return err + +} + +// RecordNotificationChannelFailure increments the consecutive failures count in the NotificationChannelStates table +// and logs a failure delivery attempt in the NotificationChannelDeliveryAttempts table. +// If isPermanent is true, it increments the failure count and potentially disables the channel. +// If isPermanent is false (transient), it logs the error but does not penalize the channel health. +func (c *Client) RecordNotificationChannelFailure( + ctx context.Context, channelID string, errorMsg string, timestamp time.Time, + isPermanent bool, eventID string) error { + _, err := c.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + // Read current state + state, err := newEntityReader[notificationChannelStateMapper, NotificationChannelState, string](c). + readRowByKeyWithTransaction(ctx, channelID, txn) + if err != nil && !errors.Is(err, ErrQueryReturnedNoResults) { + return err + } else if errors.Is(err, ErrQueryReturnedNoResults) { + state = &NotificationChannelState{ + ChannelID: channelID, + CreatedAt: timestamp, + UpdatedAt: timestamp, + IsDisabledBySystem: false, + ConsecutiveFailures: 0, + } + } + + // Calculate new state + if isPermanent { + state.ConsecutiveFailures++ + } + state.UpdatedAt = timestamp + state.IsDisabledBySystem = state.ConsecutiveFailures >= int64( + c.notificationCfg.maxConsecutiveFailuresPerChannel) + + // Update NotificationChannelStates + err = newEntityWriter[notificationChannelStateMapper](c).upsertWithTransaction(ctx, + txn, NotificationChannelState{ + ChannelID: channelID, + IsDisabledBySystem: state.IsDisabledBySystem, + ConsecutiveFailures: state.ConsecutiveFailures, + CreatedAt: state.CreatedAt, + UpdatedAt: state.UpdatedAt, + }) + if err != nil { + return err + } + + // Log attempt + _, err = c.createNotificationChannelDeliveryAttemptWithTransaction(ctx, txn, + CreateNotificationChannelDeliveryAttemptRequest{ + ChannelID: channelID, + AttemptTimestamp: timestamp, + Status: DeliveryAttemptStatusFailure, + Details: spanner.NullJSON{Value: AttemptDetails{ + EventID: eventID, + Message: errorMsg}, Valid: true}, + }) + + return err + + }) + + return err +} + +type AttemptDetails struct { + Message string `json:"message"` + EventID string `json:"event_id"` +} diff --git a/lib/gcpspanner/notification_channel_state_test.go b/lib/gcpspanner/notification_channel_state_test.go index d9178d421..a9815a579 100644 --- a/lib/gcpspanner/notification_channel_state_test.go +++ b/lib/gcpspanner/notification_channel_state_test.go @@ -17,6 +17,7 @@ package gcpspanner import ( "context" "testing" + "time" "cloud.google.com/go/spanner" "github.com/google/go-cmp/cmp" @@ -115,4 +116,151 @@ func TestNotificationChannelStateOperations(t *testing.T) { t.Errorf("GetNotificationChannelState after update mismatch (-want +got):\n%s", diff) } }) + + t.Run("RecordNotificationChannelSuccess", func(t *testing.T) { + testRecordNotificationChannelSuccess(t, channelID) + }) + + t.Run("RecordNotificationChannelFailure", func(t *testing.T) { + testRecordNotificationChannelFailure(t, channelID) + }) +} + +func testRecordNotificationChannelSuccess(t *testing.T, channelID string) { + ctx := t.Context() + // First, set up a channel state with some failures. + initialState := &NotificationChannelState{ + ChannelID: channelID, + IsDisabledBySystem: true, + ConsecutiveFailures: 3, + CreatedAt: spanner.CommitTimestamp, + UpdatedAt: spanner.CommitTimestamp, + } + err := spannerClient.UpsertNotificationChannelState(ctx, *initialState) + if err != nil { + t.Fatalf("pre-test UpsertNotificationChannelState failed: %v", err) + } + + testTime := time.Now() + eventID := "evt-1" + err = spannerClient.RecordNotificationChannelSuccess(ctx, channelID, testTime, eventID) + if err != nil { + t.Fatalf("RecordNotificationChannelSuccess failed: %v", err) + } + + // Verify state update. + retrievedState, err := spannerClient.GetNotificationChannelState(ctx, channelID) + if err != nil { + t.Fatalf("GetNotificationChannelState after success failed: %v", err) + } + if retrievedState.IsDisabledBySystem != false { + t.Errorf("expected IsDisabledBySystem to be false, got %t", retrievedState.IsDisabledBySystem) + } + if retrievedState.ConsecutiveFailures != 0 { + t.Errorf("expected ConsecutiveFailures to be 0, got %d", retrievedState.ConsecutiveFailures) + } + + // Verify delivery attempt log. + listAttemptsReq := ListNotificationChannelDeliveryAttemptsRequest{ + ChannelID: channelID, + PageSize: 1, + PageToken: nil, + } + attempts, _, err := spannerClient.ListNotificationChannelDeliveryAttempts(ctx, listAttemptsReq) + if err != nil { + t.Fatalf("ListNotificationChannelDeliveryAttempts after success failed: %v", err) + } + if len(attempts) != 1 { + t.Fatalf("expected 1 delivery attempt, got %d", len(attempts)) + } + if attempts[0].Status != DeliveryAttemptStatusSuccess { + t.Errorf("expected status SUCCESS, got %s", attempts[0].Status) + } + if attempts[0].AttemptDetails == nil || attempts[0].AttemptDetails.Message != "delivered" || + attempts[0].AttemptDetails.EventID != "evt-1" { + t.Errorf("expected details message 'delivered', got %v", attempts[0].AttemptDetails) + } +} + +func testRecordNotificationChannelFailure(t *testing.T, channelID string) { + ctx := t.Context() + // Reset state for new test + initialState := &NotificationChannelState{ + ChannelID: channelID, + IsDisabledBySystem: false, + ConsecutiveFailures: 0, + CreatedAt: spanner.CommitTimestamp, + UpdatedAt: spanner.CommitTimestamp, + } + err := spannerClient.UpsertNotificationChannelState(ctx, *initialState) + if err != nil { + t.Fatalf("pre-test UpsertNotificationChannelState failed: %v", err) + } + + t.Run("Permanent Failure", func(t *testing.T) { + _ = spannerClient.UpsertNotificationChannelState(ctx, *initialState) // Ensure clean state + testTime := time.Now() + errorMsg := "permanent error" + eventID := "evt-124" + err = spannerClient.RecordNotificationChannelFailure(ctx, channelID, errorMsg, testTime, true, eventID) + if err != nil { + t.Fatalf("RecordNotificationChannelFailure (permanent) failed: %v", err) + } + + verifyFailureAttemptAndState(t, channelID, 1, false, errorMsg, eventID) + }) + + t.Run("Transient Failure", func(t *testing.T) { + _ = spannerClient.UpsertNotificationChannelState(ctx, *initialState) // Ensure clean state + testTime := time.Now() + errorMsg := "transient error" + eventID := "evt-125" + err = spannerClient.RecordNotificationChannelFailure(ctx, channelID, errorMsg, testTime, false, eventID) + if err != nil { + t.Fatalf("RecordNotificationChannelFailure (transient) failed: %v", err) + } + + verifyFailureAttemptAndState(t, channelID, 0, false, errorMsg, eventID) + }) +} + +// verifyFailureAttemptAndState is a helper function to verify the state and delivery attempt after a failure. +func verifyFailureAttemptAndState(t *testing.T, channelID string, + expectedFailures int64, expectedIsDisabled bool, expectedAttemptMessage string, expectedEventID string) { + t.Helper() + ctx := t.Context() + + // Verify state update. + retrievedState, err := spannerClient.GetNotificationChannelState(ctx, channelID) + if err != nil { + t.Fatalf("GetNotificationChannelState after failure failed: %v", err) + } + if retrievedState.ConsecutiveFailures != expectedFailures { + t.Errorf("expected ConsecutiveFailures to be %d, got %d", expectedFailures, retrievedState.ConsecutiveFailures) + } + if retrievedState.IsDisabledBySystem != expectedIsDisabled { + t.Errorf("expected IsDisabledBySystem to be %t, got %t", expectedIsDisabled, retrievedState.IsDisabledBySystem) + } + + // Verify delivery attempt log. + listAttemptsReq := ListNotificationChannelDeliveryAttemptsRequest{ + ChannelID: channelID, + PageSize: 1, + PageToken: nil, + } + attempts, _, err := spannerClient.ListNotificationChannelDeliveryAttempts(ctx, listAttemptsReq) + if err != nil { + t.Fatalf("ListNotificationChannelDeliveryAttempts after failure failed: %v", err) + } + if len(attempts) != 1 { + t.Fatalf("expected 1 delivery attempt, got %d", len(attempts)) + } + if attempts[0].Status != DeliveryAttemptStatusFailure { + t.Errorf("expected status FAILURE, got %s", attempts[0].Status) + } + if attempts[0].AttemptDetails == nil || attempts[0].AttemptDetails.Message != expectedAttemptMessage || + attempts[0].AttemptDetails.EventID != expectedEventID { + t.Errorf("expected details message '%s' eventID '%s', got %v", expectedAttemptMessage, expectedEventID, + attempts[0].AttemptDetails) + } } diff --git a/lib/gcpspanner/saved_search_notification_events.go b/lib/gcpspanner/saved_search_notification_events.go index ea10c8f24..c2543621e 100644 --- a/lib/gcpspanner/saved_search_notification_events.go +++ b/lib/gcpspanner/saved_search_notification_events.go @@ -133,3 +133,42 @@ func (c *Client) PublishSavedSearchNotificationEvent(ctx context.Context, return id, err } + +type savedSearchNotificationEventBySearchAndSnapshotTypeKey struct { + SavedSearchID string + SnapshotType SavedSearchSnapshotType +} + +type savedSearchNotificationEventBySearchAndSnapshotTypeMapper struct{} + +func (m savedSearchNotificationEventBySearchAndSnapshotTypeMapper) Table() string { + return savedSearchNotificationEventsTable +} + +func (m savedSearchNotificationEventBySearchAndSnapshotTypeMapper) SelectOne( + key savedSearchNotificationEventBySearchAndSnapshotTypeKey) spanner.Statement { + return spanner.Statement{ + SQL: `SELECT * FROM SavedSearchNotificationEvents + WHERE SavedSearchId = @SavedSearchId AND SnapshotType = @SnapshotType + ORDER BY Timestamp DESC + LIMIT 1`, + Params: map[string]any{ + "SavedSearchId": key.SavedSearchID, + "SnapshotType": key.SnapshotType, + }, + } +} + +func (c *Client) GetLatestSavedSearchNotificationEvent( + ctx context.Context, savedSearchID string, + snapshotType SavedSearchSnapshotType) (*SavedSearchNotificationEvent, error) { + r := newEntityReader[savedSearchNotificationEventBySearchAndSnapshotTypeMapper, SavedSearchNotificationEvent, + savedSearchNotificationEventBySearchAndSnapshotTypeKey](c) + + key := savedSearchNotificationEventBySearchAndSnapshotTypeKey{ + SavedSearchID: savedSearchID, + SnapshotType: snapshotType, + } + + return r.readRowByKey(ctx, key) +} diff --git a/lib/gcpspanner/saved_search_notification_events_test.go b/lib/gcpspanner/saved_search_notification_events_test.go index ebb784fd6..8ea64afa4 100644 --- a/lib/gcpspanner/saved_search_notification_events_test.go +++ b/lib/gcpspanner/saved_search_notification_events_test.go @@ -349,3 +349,55 @@ func TestGetSavedSearchNotificationEvent_NotFound(t *testing.T) { t.Errorf("expected ErrQueryReturnedNoResults, got %v", err) } } + +func TestGetLatestSavedSearchNotificationEvent(t *testing.T) { + ctx := t.Context() + restartDatabaseContainer(t) + // Insert multiple events for the same saved search and snapshot type + savedSearchID := createSavedSearchForNotificationTests(ctx, t) + snapshotType := SavedSearchSnapshotType("compat-stats") + + eventTimes := []time.Time{ + time.Date(2025, 1, 1, 10, 0, 0, 0, time.UTC), + time.Date(2025, 1, 1, 11, 0, 0, 0, time.UTC), + time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC), + } + // Setup and acquire lock + setupLockAndInitialState(ctx, t, savedSearchID, string(snapshotType), "worker-1", "path/initial", 10*time.Minute, + time.Date(2025, 1, 1, 9, 0, 0, 0, time.UTC)) + + var latestEventID string + for i, eventTime := range eventTimes { + eventID := "event-" + string(rune('A'+i)) + _, err := spannerClient.PublishSavedSearchNotificationEvent(ctx, SavedSearchNotificationCreateRequest{ + SavedSearchID: savedSearchID, + SnapshotType: snapshotType, + Timestamp: eventTime, + EventType: "IMMEDIATE_DIFF", + Reasons: []string{"DATA_UPDATED"}, + BlobPath: "path/" + eventID, + DiffBlobPath: "diff/path/" + eventID, + Summary: spanner.NullJSON{ + Value: nil, + Valid: false, + }, + }, "path/"+eventID, "worker-1", WithID(eventID)) + if err != nil { + t.Fatalf("PublishSavedSearchNotificationEvent() failed: %v", err) + } + latestEventID = eventID + } + + // Now retrieve the latest event + latestEvent, err := spannerClient.GetLatestSavedSearchNotificationEvent(ctx, savedSearchID, snapshotType) + if err != nil { + t.Fatalf("GetLatestSavedSearchNotificationEvent() failed: %v", err) + } + + if latestEvent.ID != latestEventID { + t.Errorf("Latest event ID mismatch: got %s, want %s", latestEvent.ID, latestEventID) + } + if !latestEvent.Timestamp.Equal(eventTimes[2]) { + t.Errorf("Latest event Timestamp mismatch: got %v, want %v", latestEvent.Timestamp, eventTimes[2]) + } +} diff --git a/lib/gcpspanner/saved_search_state.go b/lib/gcpspanner/saved_search_state.go index d2ac701bf..279134abc 100644 --- a/lib/gcpspanner/saved_search_state.go +++ b/lib/gcpspanner/saved_search_state.go @@ -37,6 +37,7 @@ const ( SavedSearchSnapshotTypeImmediate SavedSearchSnapshotType = "IMMEDIATE" SavedSearchSnapshotTypeWeekly SavedSearchSnapshotType = "WEEKLY" SavedSearchSnapshotTypeMonthly SavedSearchSnapshotType = "MONTHLY" + SavedSearchSnapshotTypeUnknown SavedSearchSnapshotType = "UNKNOWN" ) type SavedSearchState struct { diff --git a/lib/gcpspanner/saved_search_subscription.go b/lib/gcpspanner/saved_search_subscription.go index 718474ee6..a69ab756d 100644 --- a/lib/gcpspanner/saved_search_subscription.go +++ b/lib/gcpspanner/saved_search_subscription.go @@ -29,30 +29,41 @@ const savedSearchSubscriptionTable = "SavedSearchSubscriptions" // SavedSearchSubscription represents a row in the SavedSearchSubscription table. type SavedSearchSubscription struct { - ID string `spanner:"ID"` - ChannelID string `spanner:"ChannelID"` - SavedSearchID string `spanner:"SavedSearchID"` - Triggers []string `spanner:"Triggers"` - Frequency string `spanner:"Frequency"` - CreatedAt time.Time `spanner:"CreatedAt"` - UpdatedAt time.Time `spanner:"UpdatedAt"` + ID string `spanner:"ID"` + ChannelID string `spanner:"ChannelID"` + SavedSearchID string `spanner:"SavedSearchID"` + Triggers []SubscriptionTrigger `spanner:"Triggers"` + Frequency SavedSearchSnapshotType `spanner:"Frequency"` + CreatedAt time.Time `spanner:"CreatedAt"` + UpdatedAt time.Time `spanner:"UpdatedAt"` } +type SubscriptionTrigger string + +const ( + SubscriptionTriggerBrowserImplementationAnyComplete SubscriptionTrigger = "feature.browser_implementation." + + "any_complete" + SubscriptionTriggerFeatureBaselinePromoteToNewly SubscriptionTrigger = "feature.baseline.promote_to_newly" + SubscriptionTriggerFeatureBaselinePromoteToWidely SubscriptionTrigger = "feature.baseline.promote_to_widely" + SubscriptionTriggerFeatureBaselineRegressionToLimited SubscriptionTrigger = "feature.baseline.regression_to_limited" + SubscriptionTriggerUnknown SubscriptionTrigger = "unknown" +) + // CreateSavedSearchSubscriptionRequest is the request to create a subscription. type CreateSavedSearchSubscriptionRequest struct { UserID string ChannelID string SavedSearchID string - Triggers []string - Frequency string + Triggers []SubscriptionTrigger + Frequency SavedSearchSnapshotType } // UpdateSavedSearchSubscriptionRequest is a request to update a saved search subscription. type UpdateSavedSearchSubscriptionRequest struct { ID string UserID string - Triggers OptionallySet[[]string] - Frequency OptionallySet[string] + Triggers OptionallySet[[]SubscriptionTrigger] + Frequency OptionallySet[SavedSearchSnapshotType] } // ListSavedSearchSubscriptionsRequest is a request to list saved search subscriptions. @@ -304,12 +315,23 @@ func (c *Client) ListSavedSearchSubscriptions( return newEntityLister[savedSearchSubscriptionMapper](c).list(ctx, req) } +type spannerSubscriberDestination struct { + SubscriptionID string `spanner:"ID"` + UserID string `spanner:"UserID"` + ChannelID string `spanner:"ChannelID"` + Type string `spanner:"Type"` + Triggers []SubscriptionTrigger `spanner:"Triggers"` + Config spanner.NullJSON `spanner:"Config"` +} + type SubscriberDestination struct { - SubscriptionID string `spanner:"ID"` - UserID string `spanner:"UserID"` - ChannelID string `spanner:"ChannelID"` - Type string `spanner:"Type"` - Config spanner.NullJSON `spanner:"Config"` + SubscriptionID string + UserID string + ChannelID string + Type string + Triggers []SubscriptionTrigger + // If type is EMAIL, EmailConfig is set. + EmailConfig *EmailConfig } type readAllActivePushSubscriptionsMapper struct { @@ -318,7 +340,7 @@ type readAllActivePushSubscriptionsMapper struct { type activePushSubscriptionKey struct { SavedSearchID string - Frequency string + Frequency SavedSearchSnapshotType } func (m readAllActivePushSubscriptionsMapper) SelectAllByKeys(key activePushSubscriptionKey) spanner.Statement { @@ -330,6 +352,7 @@ func (m readAllActivePushSubscriptionsMapper) SelectAllByKeys(key activePushSubs sc.ID, nc.UserID, sc.ChannelID, + sc.Triggers, nc.Type, nc.Config FROM SavedSearchSubscriptions sc @@ -354,16 +377,62 @@ func (m readAllActivePushSubscriptionsMapper) SelectAllByKeys(key activePushSubs func (c *Client) FindAllActivePushSubscriptions( ctx context.Context, savedSearchID string, - frequency string, + frequency SavedSearchSnapshotType, ) ([]SubscriberDestination, error) { - return newAllByKeysEntityReader[ + values, err := newAllByKeysEntityReader[ readAllActivePushSubscriptionsMapper, activePushSubscriptionKey, - SubscriberDestination](c).readAllByKeys( + spannerSubscriberDestination](c).readAllByKeys( ctx, activePushSubscriptionKey{ SavedSearchID: savedSearchID, Frequency: frequency, }, ) + if err != nil { + return nil, err + } + results := make([]SubscriberDestination, 0, len(values)) + for _, v := range values { + dest := SubscriberDestination{ + SubscriptionID: v.SubscriptionID, + UserID: v.UserID, + ChannelID: v.ChannelID, + Type: v.Type, + Triggers: v.Triggers, + EmailConfig: nil, + } + subscriptionConfigs, err := loadSubscriptionConfigs(v.Type, v.Config) + if err != nil { + return nil, err + } + dest.EmailConfig = subscriptionConfigs.EmailConfig + results = append(results, dest) + } + + return results, nil +} + +// DeleteUserSubscriptions deletes all saved search subscriptions for a given list of user IDs. +// Used for E2E tests. +func (c *Client) DeleteUserSubscriptions(ctx context.Context, userIDs []string) error { + _, err := c.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + _, err := txn.Update(ctx, spanner.Statement{ + SQL: `DELETE FROM SavedSearchSubscriptions WHERE ChannelID IN + (SELECT ID FROM NotificationChannels WHERE UserID IN UNNEST(@userIDs))`, + Params: map[string]interface{}{ + "userIDs": userIDs, + }, + }) + if err != nil { + return errors.Join(ErrInternalQueryFailure, err) + } + + return nil + }) + if err != nil { + return fmt.Errorf("failed to delete user subscriptions: %w", err) + } + + return nil } diff --git a/lib/gcpspanner/saved_search_subscription_test.go b/lib/gcpspanner/saved_search_subscription_test.go index 74570ecd9..9fcd8ca8b 100644 --- a/lib/gcpspanner/saved_search_subscription_test.go +++ b/lib/gcpspanner/saved_search_subscription_test.go @@ -16,6 +16,7 @@ package gcpspanner import ( "context" + "slices" "testing" "time" @@ -58,8 +59,8 @@ func TestCreateAndGetSavedSearchSubscription(t *testing.T) { UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"spec.links"}, - Frequency: "DAILY", + Triggers: []SubscriptionTrigger{SubscriptionTriggerFeatureBaselineRegressionToLimited}, + Frequency: SavedSearchSnapshotTypeImmediate, } subIDPtr, err := spannerClient.CreateSavedSearchSubscription(ctx, createReq) if err != nil { @@ -128,8 +129,8 @@ func TestGetSavedSearchSubscriptionFailsForWrongUser(t *testing.T) { UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"baseline.status"}, - Frequency: "IMMEDIATE", + Triggers: []SubscriptionTrigger{SubscriptionTriggerFeatureBaselineRegressionToLimited}, + Frequency: SavedSearchSnapshotTypeImmediate, } subToUpdateIDPtr, err := spannerClient.CreateSavedSearchSubscription(ctx, baseCreateReq) @@ -178,8 +179,8 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"baseline.status"}, - Frequency: "IMMEDIATE", + Triggers: []SubscriptionTrigger{SubscriptionTriggerFeatureBaselinePromoteToNewly}, + Frequency: SavedSearchSnapshotTypeImmediate, } subToUpdateIDPtr, err := spannerClient.CreateSavedSearchSubscription(ctx, baseCreateReq) @@ -189,10 +190,11 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { subToUpdateID := *subToUpdateIDPtr updateReq := UpdateSavedSearchSubscriptionRequest{ - ID: subToUpdateID, - UserID: userID, - Triggers: OptionallySet[[]string]{Value: []string{"developer_signals.upvotes"}, IsSet: true}, - Frequency: OptionallySet[string]{Value: "WEEKLY_DIGEST", IsSet: true}, + ID: subToUpdateID, + UserID: userID, + Triggers: OptionallySet[[]SubscriptionTrigger]{Value: []SubscriptionTrigger{ + SubscriptionTriggerBrowserImplementationAnyComplete}, IsSet: true}, + Frequency: OptionallySet[SavedSearchSnapshotType]{Value: SavedSearchSnapshotTypeWeekly, IsSet: true}, } err = spannerClient.UpdateSavedSearchSubscription(ctx, updateReq) if err != nil { @@ -203,9 +205,13 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { if err != nil { t.Fatalf("GetSavedSearchSubscription after update failed: %v", err) } - if retrieved.Frequency != "WEEKLY_DIGEST" { + if retrieved.Frequency != SavedSearchSnapshotTypeWeekly { t.Errorf("expected updated frequency, got %s", retrieved.Frequency) } + expectedTriggers := []SubscriptionTrigger{SubscriptionTriggerBrowserImplementationAnyComplete} + if diff := cmp.Diff(expectedTriggers, retrieved.Triggers); diff != "" { + t.Errorf("updated triggers mismatch (-want +got):\n%s", diff) + } } func TestDeleteSavedSearchSubscription(t *testing.T) { @@ -242,8 +248,8 @@ func TestDeleteSavedSearchSubscription(t *testing.T) { UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"baseline.status"}, - Frequency: "IMMEDIATE", + Triggers: []SubscriptionTrigger{SubscriptionTriggerFeatureBaselinePromoteToNewly}, + Frequency: SavedSearchSnapshotTypeImmediate, } subToDeleteIDPtr, err := spannerClient.CreateSavedSearchSubscription(ctx, baseCreateReq) @@ -297,8 +303,8 @@ func TestListSavedSearchSubscriptions(t *testing.T) { UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"baseline.status"}, - Frequency: "IMMEDIATE", + Triggers: []SubscriptionTrigger{SubscriptionTriggerFeatureBaselinePromoteToNewly}, + Frequency: SavedSearchSnapshotTypeImmediate, } // Create a few subscriptions to list @@ -410,7 +416,13 @@ func TestFindAllActivePushSubscriptions(t *testing.T) { // Subscription 1: Correct, on active EMAIL channel _, err = spannerClient.CreateSavedSearchSubscription(ctx, CreateSavedSearchSubscriptionRequest{ - UserID: userID, ChannelID: emailChannelID, SavedSearchID: savedSearchID, Frequency: "IMMEDIATE", Triggers: nil, + UserID: userID, ChannelID: emailChannelID, SavedSearchID: savedSearchID, + Frequency: SavedSearchSnapshotTypeImmediate, Triggers: []SubscriptionTrigger{ + SubscriptionTriggerFeatureBaselineRegressionToLimited, + SubscriptionTriggerBrowserImplementationAnyComplete, + SubscriptionTriggerFeatureBaselinePromoteToNewly, + SubscriptionTriggerFeatureBaselinePromoteToWidely, + }, }) if err != nil { t.Fatalf("failed to create sub 1: %v", err) @@ -419,7 +431,8 @@ func TestFindAllActivePushSubscriptions(t *testing.T) { // Subscription 2: Correct, on active WEBHOOK channel // TODO: Enable webhook channel tests once webhooks are supported. // _, err = spannerClient.CreateSavedSearchSubscription(ctx, CreateSavedSearchSubscriptionRequest{ - // UserID: userID, ChannelID: webhookChannelID, SavedSearchID: savedSearchID, Frequency: "IMMEDIATE", + // UserID: userID, ChannelID: webhookChannelID, SavedSearchID: savedSearchID, + // Frequency: SavedSearchSnapshotTypeImmediate, // }) // if err != nil { // t.Fatalf("failed to create sub 2: %v", err) @@ -435,7 +448,8 @@ func TestFindAllActivePushSubscriptions(t *testing.T) { // Subscription 4: Non-push channel (RSS) _, err = spannerClient.CreateSavedSearchSubscription(ctx, CreateSavedSearchSubscriptionRequest{ - UserID: userID, ChannelID: rssChannelID, SavedSearchID: savedSearchID, Frequency: "IMMEDIATE", Triggers: nil, + UserID: userID, ChannelID: rssChannelID, SavedSearchID: savedSearchID, + Frequency: SavedSearchSnapshotTypeImmediate, Triggers: nil, }) if err != nil { t.Fatalf("failed to create sub 4: %v", err) @@ -443,15 +457,17 @@ func TestFindAllActivePushSubscriptions(t *testing.T) { // Subscription 5: Disabled channel _, err = spannerClient.CreateSavedSearchSubscription(ctx, CreateSavedSearchSubscriptionRequest{ - UserID: userID, ChannelID: disabledChannelID, SavedSearchID: savedSearchID, Frequency: "IMMEDIATE", - Triggers: nil, + UserID: userID, ChannelID: disabledChannelID, SavedSearchID: savedSearchID, + Frequency: SavedSearchSnapshotTypeImmediate, + Triggers: nil, }) if err != nil { t.Fatalf("failed to create sub 5: %v", err) } // Find subscribers - subscribers, err := spannerClient.FindAllActivePushSubscriptions(ctx, savedSearchID, "IMMEDIATE") + subscribers, err := spannerClient.FindAllActivePushSubscriptions(ctx, savedSearchID, + SavedSearchSnapshotTypeImmediate) if err != nil { t.Fatalf("FindAllActivePushSubscriptions failed: %v", err) } @@ -467,6 +483,34 @@ func TestFindAllActivePushSubscriptions(t *testing.T) { // foundWebhook := false for _, sub := range subscribers { if sub.ChannelID == emailChannelID { + // Do the comparison for the email subscriber details + if sub.Type != "EMAIL" { + t.Errorf("expected EMAIL type for email channel, got %s", sub.Type) + + continue + } + if sub.EmailConfig == nil { + t.Error("expected EmailConfig to be set for email subscriber, got nil") + + continue + } + if sub.EmailConfig.Address != "active@example.com" { + t.Errorf("expected address to be active@example.com, got %s", sub.EmailConfig.Address) + + continue + } + expectedTriggers := []SubscriptionTrigger{ + SubscriptionTriggerFeatureBaselineRegressionToLimited, + SubscriptionTriggerBrowserImplementationAnyComplete, + SubscriptionTriggerFeatureBaselinePromoteToNewly, + SubscriptionTriggerFeatureBaselinePromoteToWidely, + } + if !slices.Equal(expectedTriggers, sub.Triggers) { + t.Errorf("expected triggers %v, got %v", expectedTriggers, sub.Triggers) + + continue + } + foundEmail = true } // if sub.ChannelID == webhookChannelID { diff --git a/lib/gcpspanner/spanneradapters/backend.go b/lib/gcpspanner/spanneradapters/backend.go index e3c2841c3..8ca58c377 100644 --- a/lib/gcpspanner/spanneradapters/backend.go +++ b/lib/gcpspanner/spanneradapters/backend.go @@ -1332,29 +1332,49 @@ func (s *Backend) GetIDFromFeatureKey( return id, nil } -func backendTriggersToSpannerTriggers(backendTriggers []backend.SubscriptionTriggerWritable) []string { - triggers := make([]string, 0, len(backendTriggers)) +func backendTriggersToSpannerTriggers( + backendTriggers []backend.SubscriptionTriggerWritable) []gcpspanner.SubscriptionTrigger { + triggers := make([]gcpspanner.SubscriptionTrigger, 0, len(backendTriggers)) for _, trigger := range backendTriggers { - triggers = append(triggers, string(trigger)) + triggers = append(triggers, toSpannerSubscriptionTrigger(trigger)) } return triggers } -func spannerTriggersToBackendTriggers(spannerTriggers []string) []backend.SubscriptionTriggerResponseItem { +func spannerTriggersToBackendTriggers( + spannerTriggers []gcpspanner.SubscriptionTrigger) []backend.SubscriptionTriggerResponseItem { triggers := make([]backend.SubscriptionTriggerResponseItem, 0, len(spannerTriggers)) for _, trigger := range spannerTriggers { - input := backend.SubscriptionTriggerWritable(trigger) - switch input { - case backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete, - backend.SubscriptionTriggerFeatureBaselineLimitedToNewly, - backend.SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited: + switch trigger { + case gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete: triggers = append(triggers, backend.SubscriptionTriggerResponseItem{ - Value: backendtypes.AttemptToStoreSubscriptionTrigger(input), + Value: backendtypes.AttemptToStoreSubscriptionTrigger( + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }) + case gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly: + triggers = append(triggers, backend.SubscriptionTriggerResponseItem{ + Value: backendtypes.AttemptToStoreSubscriptionTrigger( + backend.SubscriptionTriggerFeatureBaselineToNewly), + RawValue: nil, + }) + case gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToWidely: + triggers = append(triggers, backend.SubscriptionTriggerResponseItem{ + Value: backendtypes.AttemptToStoreSubscriptionTrigger( + backend.SubscriptionTriggerFeatureBaselineToWidely), + RawValue: nil, + }) + case gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited: + triggers = append(triggers, backend.SubscriptionTriggerResponseItem{ + Value: backendtypes.AttemptToStoreSubscriptionTrigger( + backend.SubscriptionTriggerFeatureBaselineRegressionToLimited), + RawValue: nil, + }) + case gcpspanner.SubscriptionTriggerUnknown: + fallthrough default: - value := trigger + value := string(trigger) triggers = append(triggers, backend.SubscriptionTriggerResponseItem{ Value: backendtypes.AttemptToStoreSubscriptionTriggerUnknown(), RawValue: &value, @@ -1365,14 +1385,63 @@ func spannerTriggersToBackendTriggers(spannerTriggers []string) []backend.Subscr return triggers } +func toBackendSubscriptionFrequency(freq gcpspanner.SavedSearchSnapshotType) backend.SubscriptionFrequency { + switch freq { + case gcpspanner.SavedSearchSnapshotTypeImmediate: + return backend.SubscriptionFrequencyImmediate + case gcpspanner.SavedSearchSnapshotTypeWeekly: + return backend.SubscriptionFrequencyWeekly + case gcpspanner.SavedSearchSnapshotTypeMonthly: + return backend.SubscriptionFrequencyMonthly + case gcpspanner.SavedSearchSnapshotTypeUnknown: + break + } + + slog.WarnContext(context.TODO(), "unknown subscription frequency from spanner", "frequency", freq) + + // Should not reach here normally. The database should not have unknown frequencies. + return backend.SubscriptionFrequencyImmediate +} + +func toSpannerSubscriptionFrequency(freq backend.SubscriptionFrequency) gcpspanner.SavedSearchSnapshotType { + switch freq { + case backend.SubscriptionFrequencyImmediate: + return gcpspanner.SavedSearchSnapshotTypeImmediate + case backend.SubscriptionFrequencyWeekly: + return gcpspanner.SavedSearchSnapshotTypeWeekly + case backend.SubscriptionFrequencyMonthly: + return gcpspanner.SavedSearchSnapshotTypeMonthly + } + + // Should not reach here normally. The http layer already checks for valid frequencies, default to unknown. + return gcpspanner.SavedSearchSnapshotTypeUnknown +} + +func toSpannerSubscriptionTrigger(trigger backend.SubscriptionTriggerWritable) gcpspanner.SubscriptionTrigger { + switch trigger { + case backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete: + return gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete + case backend.SubscriptionTriggerFeatureBaselineToNewly: + return gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly + case backend.SubscriptionTriggerFeatureBaselineToWidely: + return gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToWidely + case backend.SubscriptionTriggerFeatureBaselineRegressionToLimited: + return gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited + } + + // Should not reach here normally. The http layer already checks for valid triggers, default to unknown. + return gcpspanner.SubscriptionTriggerUnknown +} + func (s *Backend) CreateSavedSearchSubscription(ctx context.Context, userID string, req backend.Subscription) (*backend.SubscriptionResponse, error) { + spannerFreq := toSpannerSubscriptionFrequency(req.Frequency) createReq := gcpspanner.CreateSavedSearchSubscriptionRequest{ UserID: userID, ChannelID: req.ChannelId, SavedSearchID: req.SavedSearchId, Triggers: backendTriggersToSpannerTriggers(req.Triggers), - Frequency: string(req.Frequency), + Frequency: spannerFreq, } id, err := s.client.CreateSavedSearchSubscription(ctx, createReq) @@ -1439,11 +1508,11 @@ func (s *Backend) UpdateSavedSearchSubscription(ctx context.Context, updateReq := gcpspanner.UpdateSavedSearchSubscriptionRequest{ ID: subscriptionID, UserID: userID, - Triggers: gcpspanner.OptionallySet[[]string]{ + Triggers: gcpspanner.OptionallySet[[]gcpspanner.SubscriptionTrigger]{ IsSet: false, Value: nil, }, - Frequency: gcpspanner.OptionallySet[string]{ + Frequency: gcpspanner.OptionallySet[gcpspanner.SavedSearchSnapshotType]{ IsSet: false, Value: "", }, @@ -1452,11 +1521,12 @@ func (s *Backend) UpdateSavedSearchSubscription(ctx context.Context, for _, field := range req.UpdateMask { switch field { case backend.UpdateSubscriptionRequestMaskTriggers: - updateReq.Triggers = gcpspanner.OptionallySet[[]string]{ + updateReq.Triggers = gcpspanner.OptionallySet[[]gcpspanner.SubscriptionTrigger]{ Value: backendTriggersToSpannerTriggers(*req.Triggers), IsSet: true} case backend.UpdateSubscriptionRequestMaskFrequency: - updateReq.Frequency = gcpspanner.OptionallySet[string]{Value: string(*req.Frequency), IsSet: true} + updateReq.Frequency = gcpspanner.OptionallySet[gcpspanner.SavedSearchSnapshotType]{ + Value: toSpannerSubscriptionFrequency(*req.Frequency), IsSet: true} } } @@ -1505,7 +1575,7 @@ func toBackendSubscription(sub *gcpspanner.SavedSearchSubscription) *backend.Sub ChannelId: sub.ChannelID, SavedSearchId: sub.SavedSearchID, Triggers: spannerTriggersToBackendTriggers(sub.Triggers), - Frequency: backend.SubscriptionFrequency(sub.Frequency), + Frequency: toBackendSubscriptionFrequency(sub.Frequency), CreatedAt: sub.CreatedAt, UpdatedAt: sub.UpdatedAt, } diff --git a/lib/gcpspanner/spanneradapters/backend_test.go b/lib/gcpspanner/spanneradapters/backend_test.go index dfbce5af4..c47e9f074 100644 --- a/lib/gcpspanner/spanneradapters/backend_test.go +++ b/lib/gcpspanner/spanneradapters/backend_test.go @@ -3653,16 +3653,17 @@ func TestCreateSavedSearchSubscription(t *testing.T) { ChannelId: channelID, SavedSearchId: savedSearchID, Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, createCfg: &mockCreateSavedSearchSubscriptionConfig{ expectedRequest: gcpspanner.CreateSavedSearchSubscriptionRequest{ UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: string(backend.SubscriptionFrequencyDaily), + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, }, result: valuePtr(subID), returnedError: nil, @@ -3674,8 +3675,8 @@ func TestCreateSavedSearchSubscription(t *testing.T) { ID: subID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: string(backend.SubscriptionFrequencyDaily), + Triggers: []gcpspanner.SubscriptionTrigger{gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3688,11 +3689,11 @@ func TestCreateSavedSearchSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, - Frequency: backend.SubscriptionFrequencyDaily, + Frequency: backend.SubscriptionFrequencyImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3704,16 +3705,16 @@ func TestCreateSavedSearchSubscription(t *testing.T) { ChannelId: channelID, SavedSearchId: savedSearchID, Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, createCfg: &mockCreateSavedSearchSubscriptionConfig{ expectedRequest: gcpspanner.CreateSavedSearchSubscriptionRequest{ UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: string(backend.SubscriptionFrequencyDaily), + Triggers: []gcpspanner.SubscriptionTrigger{gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, }, result: nil, returnedError: gcpspanner.ErrMissingRequiredRole, @@ -3728,16 +3729,16 @@ func TestCreateSavedSearchSubscription(t *testing.T) { ChannelId: channelID, SavedSearchId: savedSearchID, Triggers: []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete}, - Frequency: backend.SubscriptionFrequencyDaily, + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete}, + Frequency: backend.SubscriptionFrequencyImmediate, }, createCfg: &mockCreateSavedSearchSubscriptionConfig{ expectedRequest: gcpspanner.CreateSavedSearchSubscriptionRequest{ UserID: userID, ChannelID: channelID, SavedSearchID: savedSearchID, - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: string(backend.SubscriptionFrequencyDaily), + Triggers: []gcpspanner.SubscriptionTrigger{gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, }, result: nil, returnedError: errTest, @@ -3797,8 +3798,8 @@ func TestListSavedSearchSubscriptions(t *testing.T) { ID: "sub1", ChannelID: "chan1", SavedSearchID: "search1", - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: "daily", + Triggers: []gcpspanner.SubscriptionTrigger{gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3815,11 +3816,11 @@ func TestListSavedSearchSubscriptions(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, - Frequency: "daily", + Frequency: backend.SubscriptionFrequencyImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3890,8 +3891,8 @@ func TestGetSavedSearchSubscription(t *testing.T) { ID: subID, ChannelID: "chan1", SavedSearchID: "search1", - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: "daily", + Triggers: []gcpspanner.SubscriptionTrigger{gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3904,11 +3905,11 @@ func TestGetSavedSearchSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, - Frequency: "daily", + Frequency: backend.SubscriptionFrequencyImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -3964,10 +3965,11 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { ) now := time.Now() updatedTriggers := []backend.SubscriptionTriggerWritable{ - backend.SubscriptionTriggerFeatureBaselineLimitedToNewly, - backend.SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited, + backend.SubscriptionTriggerFeatureBaselineToNewly, + backend.SubscriptionTriggerFeatureBaselineRegressionToLimited, } - updatedFrequency := backend.SubscriptionFrequencyDaily + updatedFrequency := backend.SubscriptionFrequencyImmediate + updatedSpannerFrequency := gcpspanner.SavedSearchSnapshotTypeImmediate testCases := []struct { name string @@ -3989,13 +3991,13 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { expectedRequest: gcpspanner.UpdateSavedSearchSubscriptionRequest{ ID: subID, UserID: userID, - Triggers: gcpspanner.OptionallySet[[]string]{ - Value: []string{ - "feature_baseline_limited_to_newly", - "feature_baseline_regression_newly_to_limited", + Triggers: gcpspanner.OptionallySet[[]gcpspanner.SubscriptionTrigger]{ + Value: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly, + gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited, }, IsSet: true, }, - Frequency: gcpspanner.OptionallySet[string]{IsSet: false, Value: ""}, + Frequency: gcpspanner.OptionallySet[gcpspanner.SavedSearchSnapshotType]{IsSet: false, Value: ""}, }, returnedError: nil, }, @@ -4003,11 +4005,12 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { expectedSubscriptionID: subID, expectedUserID: userID, result: &gcpspanner.SavedSearchSubscription{ - ID: subID, - Triggers: []string{"feature_baseline_limited_to_newly"}, + ID: subID, + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly}, ChannelID: "channel", SavedSearchID: "savedsearch", - Frequency: "daily", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -4018,13 +4021,13 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureBaselineLimitedToNewly), + backend.SubscriptionTriggerFeatureBaselineToNewly), RawValue: nil, }, }, ChannelId: "channel", SavedSearchId: "savedsearch", - Frequency: "daily", + Frequency: backend.SubscriptionFrequencyImmediate, CreatedAt: now, UpdatedAt: now, }, @@ -4042,9 +4045,9 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { expectedRequest: gcpspanner.UpdateSavedSearchSubscriptionRequest{ ID: subID, UserID: userID, - Triggers: gcpspanner.OptionallySet[[]string]{IsSet: false, Value: nil}, - Frequency: gcpspanner.OptionallySet[string]{ - Value: "daily", IsSet: true, + Triggers: gcpspanner.OptionallySet[[]gcpspanner.SubscriptionTrigger]{IsSet: false, Value: nil}, + Frequency: gcpspanner.OptionallySet[gcpspanner.SavedSearchSnapshotType]{ + Value: gcpspanner.SavedSearchSnapshotTypeImmediate, IsSet: true, }, }, returnedError: nil, @@ -4056,10 +4059,11 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { ID: subID, ChannelID: "channel", SavedSearchID: "savedsearchid", - Triggers: []string{"feature_any_browser_implementation_complete"}, - Frequency: string(updatedFrequency), - CreatedAt: now, - UpdatedAt: now, + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete}, + Frequency: updatedSpannerFrequency, + CreatedAt: now, + UpdatedAt: now, }, returnedError: nil, }, @@ -4070,7 +4074,7 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { Triggers: []backend.SubscriptionTriggerResponseItem{ { Value: backendtypes.AttemptToStoreSubscriptionTrigger( - backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil, }, }, @@ -4092,13 +4096,13 @@ func TestUpdateSavedSearchSubscription(t *testing.T) { expectedRequest: gcpspanner.UpdateSavedSearchSubscriptionRequest{ ID: subID, UserID: userID, - Triggers: gcpspanner.OptionallySet[[]string]{ - Value: []string{ - "feature_baseline_limited_to_newly", - "feature_baseline_regression_newly_to_limited", + Triggers: gcpspanner.OptionallySet[[]gcpspanner.SubscriptionTrigger]{ + Value: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly, + gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited, }, IsSet: true, }, - Frequency: gcpspanner.OptionallySet[string]{ + Frequency: gcpspanner.OptionallySet[gcpspanner.SavedSearchSnapshotType]{ Value: "", IsSet: false, }, @@ -4223,7 +4227,7 @@ func assertUnknownTrigger(t *testing.T, itemIndex int, func TestSpannerTriggersToBackendTriggers(t *testing.T) { testCases := []struct { name string - inputTriggers []string + inputTriggers []gcpspanner.SubscriptionTrigger expectedItems []struct { IsUnknown bool Value string @@ -4232,9 +4236,9 @@ func TestSpannerTriggersToBackendTriggers(t *testing.T) { }{ { name: "All Valid Triggers", - inputTriggers: []string{ - string(backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), - string(backend.SubscriptionTriggerFeatureBaselineLimitedToNewly), + inputTriggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete, + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly, }, expectedItems: []struct { IsUnknown bool @@ -4242,19 +4246,19 @@ func TestSpannerTriggersToBackendTriggers(t *testing.T) { RawValue *string }{ {IsUnknown: false, - Value: string(backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + Value: string(backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil}, {IsUnknown: false, - Value: string(backend.SubscriptionTriggerFeatureBaselineLimitedToNewly), + Value: string(backend.SubscriptionTriggerFeatureBaselineToNewly), RawValue: nil}, }, }, { name: "Mixed Valid and Unknown Triggers", - inputTriggers: []string{ - string(backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + inputTriggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete, "deprecated_trigger", - string(backend.SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited), + gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited, "another_unknown", }, expectedItems: []struct { @@ -4263,13 +4267,13 @@ func TestSpannerTriggersToBackendTriggers(t *testing.T) { RawValue *string }{ {IsUnknown: false, - Value: string(backend.SubscriptionTriggerFeatureAnyBrowserImplementationComplete), + Value: string(backend.SubscriptionTriggerFeatureBrowserImplementationAnyComplete), RawValue: nil}, {IsUnknown: true, Value: string(backend.EnumUnknownValue), RawValue: valuePtr("deprecated_trigger")}, {IsUnknown: false, - Value: string(backend.SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited), + Value: string(backend.SubscriptionTriggerFeatureBaselineRegressionToLimited), RawValue: nil}, {IsUnknown: true, Value: string(backend.EnumUnknownValue), @@ -4278,7 +4282,7 @@ func TestSpannerTriggersToBackendTriggers(t *testing.T) { }, { name: "All Unknown Triggers", - inputTriggers: []string{"unknown1", "unknown2"}, + inputTriggers: []gcpspanner.SubscriptionTrigger{"unknown1", "unknown2"}, expectedItems: []struct { IsUnknown bool Value string @@ -4290,7 +4294,7 @@ func TestSpannerTriggersToBackendTriggers(t *testing.T) { }, { name: "Empty Triggers", - inputTriggers: []string{}, + inputTriggers: []gcpspanner.SubscriptionTrigger{}, expectedItems: []struct { IsUnknown bool Value string diff --git a/lib/gcpspanner/spanneradapters/email_worker.go b/lib/gcpspanner/spanneradapters/email_worker.go new file mode 100644 index 000000000..74d63c68d --- /dev/null +++ b/lib/gcpspanner/spanneradapters/email_worker.go @@ -0,0 +1,49 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "time" +) + +type EmailWorkerSpannerClient interface { + RecordNotificationChannelSuccess(ctx context.Context, channelID string, timestamp time.Time, eventID string) error + RecordNotificationChannelFailure(ctx context.Context, channelID string, errorMsg string, timestamp time.Time, + isPermanent bool, eventID string) error +} + +type EmailWorkerChannelStateManager struct { + client EmailWorkerSpannerClient +} + +func NewEmailWorkerChannelStateManager(client EmailWorkerSpannerClient) *EmailWorkerChannelStateManager { + return &EmailWorkerChannelStateManager{client: client} +} + +func (s *EmailWorkerChannelStateManager) RecordSuccess(ctx context.Context, channelID string, + timestamp time.Time, eventID string) error { + return s.client.RecordNotificationChannelSuccess(ctx, channelID, timestamp, eventID) +} + +func (s *EmailWorkerChannelStateManager) RecordFailure(ctx context.Context, channelID string, err error, + timestamp time.Time, permanentUserFailure bool, emailEventID string) error { + msg := "" + if err != nil { + msg = err.Error() + } + + return s.client.RecordNotificationChannelFailure(ctx, channelID, msg, timestamp, permanentUserFailure, emailEventID) +} diff --git a/lib/gcpspanner/spanneradapters/email_worker_test.go b/lib/gcpspanner/spanneradapters/email_worker_test.go new file mode 100644 index 000000000..c25264dc8 --- /dev/null +++ b/lib/gcpspanner/spanneradapters/email_worker_test.go @@ -0,0 +1,133 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/google/go-cmp/cmp" +) + +type mockEmailWorkerSpannerClient struct { + successCalled bool + successReq struct { + ChannelID string + Timestamp time.Time + EventID string + } + successErr error + + failureCalled bool + failureReq struct { + ChannelID string + Msg string + Timestamp time.Time + IsPermanent bool + EventID string + } + failureErr error +} + +func (m *mockEmailWorkerSpannerClient) RecordNotificationChannelSuccess( + _ context.Context, channelID string, timestamp time.Time, eventID string) error { + m.successCalled = true + m.successReq.ChannelID = channelID + m.successReq.Timestamp = timestamp + m.successReq.EventID = eventID + + return m.successErr +} + +func (m *mockEmailWorkerSpannerClient) RecordNotificationChannelFailure( + _ context.Context, channelID, errorMsg string, timestamp time.Time, isPermanent bool, eventID string) error { + m.failureCalled = true + m.failureReq.ChannelID = channelID + m.failureReq.Msg = errorMsg + m.failureReq.Timestamp = timestamp + m.failureReq.IsPermanent = isPermanent + m.failureReq.EventID = eventID + + return m.failureErr +} + +func TestRecordSuccess(t *testing.T) { + mock := new(mockEmailWorkerSpannerClient) + adapter := NewEmailWorkerChannelStateManager(mock) + + ts := time.Now() + eventID := "evt-1" + err := adapter.RecordSuccess(context.Background(), "chan-1", ts, eventID) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if !mock.successCalled { + t.Error("RecordNotificationChannelSuccess not called") + } + + expectedReq := struct { + ChannelID string + Timestamp time.Time + EventID string + }{ + ChannelID: "chan-1", + Timestamp: ts, + EventID: eventID, + } + + if diff := cmp.Diff(expectedReq, mock.successReq); diff != "" { + t.Errorf("RecordNotificationChannelSuccess request mismatch (-want +got):\n%s", diff) + } +} + +func TestRecordFailure(t *testing.T) { + mock := new(mockEmailWorkerSpannerClient) + adapter := NewEmailWorkerChannelStateManager(mock) + + testErr := errors.New("smtp error") + ts := time.Now() + eventID := "evt-2" + isPermanent := true + + err := adapter.RecordFailure(context.Background(), "chan-1", testErr, ts, isPermanent, eventID) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if !mock.failureCalled { + t.Error("RecordNotificationChannelFailure not called") + } + + expectedReq := struct { + ChannelID string + Msg string + Timestamp time.Time + IsPermanent bool + EventID string + }{ + ChannelID: "chan-1", + Msg: testErr.Error(), + Timestamp: ts, + IsPermanent: isPermanent, + EventID: eventID, + } + + if diff := cmp.Diff(expectedReq, mock.failureReq); diff != "" { + t.Errorf("RecordNotificationChannelFailure request mismatch (-want +got):\n%s", diff) + } +} diff --git a/lib/gcpspanner/spanneradapters/event_producer.go b/lib/gcpspanner/spanneradapters/event_producer.go new file mode 100644 index 000000000..ff801c483 --- /dev/null +++ b/lib/gcpspanner/spanneradapters/event_producer.go @@ -0,0 +1,254 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "time" + + "cloud.google.com/go/spanner" + "github.com/GoogleChrome/webstatus.dev/lib/backendtypes" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/searchtypes" + "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type BackendAdapterForEventProducerDiffer interface { + GetFeature( + ctx context.Context, + featureID string, + wptMetricView backend.WPTMetricView, + browsers []backend.BrowserPathParam, + ) (*backendtypes.GetFeatureResult, error) + FeaturesSearch( + ctx context.Context, + pageToken *string, + pageSize int, + searchNode *searchtypes.SearchNode, + sortOrder *backend.ListFeaturesParamsSort, + wptMetricView backend.WPTMetricView, + browsers []backend.BrowserPathParam, + ) (*backend.FeaturePage, error) +} + +type EventProducerDiffer struct { + backendAdapter BackendAdapterForEventProducerDiffer +} + +type EventProducerDifferSpannerClient interface { + BackendSpannerClient +} + +// NewEventProducerDiffer constructs an adapter for the differ in the event producer service. +func NewEventProducerDiffer(adapter BackendAdapterForEventProducerDiffer) *EventProducerDiffer { + return &EventProducerDiffer{backendAdapter: adapter} +} + +func (e *EventProducerDiffer) GetFeature( + ctx context.Context, + featureID string) (*backendtypes.GetFeatureResult, error) { + return e.backendAdapter.GetFeature(ctx, featureID, backend.TestCounts, + backendtypes.DefaultBrowsers()) +} + +func (e *EventProducerDiffer) FetchFeatures(ctx context.Context, query string) ([]backend.Feature, error) { + parser := searchtypes.FeaturesSearchQueryParser{} + node, err := parser.Parse(query) + if err != nil { + return nil, err + } + var features []backend.Feature + + s := backend.NameAsc + var pageToken *string + for { + featurePage, err := e.backendAdapter.FeaturesSearch( + ctx, + pageToken, + // TODO: Use helper for page size https://github.com/GoogleChrome/webstatus.dev/issues/2122 + 100, + node, + &s, + //TODO: Use helper for test type https://github.com/GoogleChrome/webstatus.dev/issues/2122 + backend.TestCounts, + backendtypes.DefaultBrowsers(), + ) + if err != nil { + return nil, err + } + features = append(features, featurePage.Data...) + if featurePage.Metadata.NextPageToken == nil { + break + } + pageToken = featurePage.Metadata.NextPageToken + } + + return features, nil +} + +type EventProducerSpannerClient interface { + TryAcquireSavedSearchStateWorkerLock( + ctx context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, + workerID string, + ttl time.Duration) (bool, error) + PublishSavedSearchNotificationEvent(ctx context.Context, + event gcpspanner.SavedSearchNotificationCreateRequest, newStatePath, workerID string, + opts ...gcpspanner.CreateOption) (*string, error) + GetLatestSavedSearchNotificationEvent( + ctx context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, + ) (*gcpspanner.SavedSearchNotificationEvent, error) + ReleaseSavedSearchStateWorkerLock( + ctx context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, + workerID string) error +} + +type EventProducer struct { + client EventProducerSpannerClient +} + +func NewEventProducer(client EventProducerSpannerClient) *EventProducer { + return &EventProducer{client: client} +} + +func convertFrequencyToSnapshotType(freq workertypes.JobFrequency) gcpspanner.SavedSearchSnapshotType { + switch freq { + case workertypes.FrequencyImmediate: + return gcpspanner.SavedSearchSnapshotTypeImmediate + case workertypes.FrequencyWeekly: + return gcpspanner.SavedSearchSnapshotTypeWeekly + case workertypes.FrequencyMonthly: + return gcpspanner.SavedSearchSnapshotTypeMonthly + case workertypes.FrequencyUnknown: + return gcpspanner.SavedSearchSnapshotTypeUnknown + } + + return gcpspanner.SavedSearchSnapshotTypeUnknown +} + +func convertWorktypeReasonsToSpanner(reasons []workertypes.Reason) []string { + if reasons == nil { + return nil + } + spannerReasons := make([]string, 0, len(reasons)) + for _, r := range reasons { + spannerReasons = append(spannerReasons, string(r)) + } + + return spannerReasons +} + +func (e *EventProducer) AcquireLock(ctx context.Context, searchID string, frequency workertypes.JobFrequency, + workerID string, lockTTL time.Duration) error { + snapshotType := convertFrequencyToSnapshotType(frequency) + _, err := e.client.TryAcquireSavedSearchStateWorkerLock( + ctx, + searchID, + snapshotType, + workerID, + lockTTL, + ) + + return err +} + +func (e *EventProducer) GetLatestEvent(ctx context.Context, frequency workertypes.JobFrequency, + searchID string) (*workertypes.LatestEventInfo, error) { + snapshotType := convertFrequencyToSnapshotType(frequency) + + event, err := e.client.GetLatestSavedSearchNotificationEvent(ctx, searchID, snapshotType) + if err != nil { + return nil, err + } + + return &workertypes.LatestEventInfo{ + EventID: event.ID, + StateBlobPath: event.BlobPath, + }, nil +} + +func (e *EventProducer) ReleaseLock(ctx context.Context, searchID string, frequency workertypes.JobFrequency, + workerID string) error { + snapshotType := convertFrequencyToSnapshotType(frequency) + + return e.client.ReleaseSavedSearchStateWorkerLock(ctx, searchID, snapshotType, workerID) +} + +func (e *EventProducer) PublishEvent(ctx context.Context, req workertypes.PublishEventRequest) error { + var summaryObj interface{} + if req.Summary != nil { + if err := json.Unmarshal(req.Summary, &summaryObj); err != nil { + return fmt.Errorf("failed to unmarshal summary JSON: %w", err) + } + } + snapshotType := convertFrequencyToSnapshotType(req.Frequency) + _, err := e.client.PublishSavedSearchNotificationEvent(ctx, gcpspanner.SavedSearchNotificationCreateRequest{ + SavedSearchID: req.SearchID, + SnapshotType: snapshotType, + Timestamp: req.GeneratedAt, + EventType: "", // TODO: Set appropriate event type + Reasons: convertWorktypeReasonsToSpanner(req.Reasons), + BlobPath: req.StateBlobPath, + DiffBlobPath: req.DiffBlobPath, + Summary: spanner.NullJSON{Value: map[string]any{"summary": summaryObj}, Valid: req.Summary != nil}, + }, + req.StateBlobPath, + req.EventID, + gcpspanner.WithID(req.EventID), + ) + if err != nil { + slog.ErrorContext(ctx, "unable to publish notification event", "error", err, "eventID", req.EventID) + + return err + } + + return nil +} + +type BatchEventProducerSpannerClient interface { + ListAllSavedSearches( + ctx context.Context) ([]gcpspanner.SavedSearchBriefDetails, error) +} + +type BatchEventProducer struct { + client BatchEventProducerSpannerClient +} + +func NewBatchEventProducer(client BatchEventProducerSpannerClient) *BatchEventProducer { + return &BatchEventProducer{client: client} +} + +func (b *BatchEventProducer) ListAllSavedSearches(ctx context.Context) ([]workertypes.SearchJob, error) { + details, err := b.client.ListAllSavedSearches(ctx) + if err != nil { + return nil, err + } + + jobs := make([]workertypes.SearchJob, 0, len(details)) + for _, detail := range details { + jobs = append(jobs, workertypes.SearchJob{ID: detail.ID, Query: detail.Query}) + } + + return jobs, nil +} diff --git a/lib/gcpspanner/spanneradapters/event_producer_test.go b/lib/gcpspanner/spanneradapters/event_producer_test.go new file mode 100644 index 000000000..983a2b10b --- /dev/null +++ b/lib/gcpspanner/spanneradapters/event_producer_test.go @@ -0,0 +1,724 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +//     http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "errors" + "testing" + "time" + + "cloud.google.com/go/spanner" + "github.com/GoogleChrome/webstatus.dev/lib/backendtypes" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/searchtypes" + "github.com/GoogleChrome/webstatus.dev/lib/gen/openapi/backend" + "github.com/GoogleChrome/webstatus.dev/lib/generic" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// mockSpannerClient implements EventProducerSpannerClient for testing. +type mockSpannerClient struct { + tryAcquireLockCalled bool + acquireLockReq struct { + SavedSearchID string + SnapshotType gcpspanner.SavedSearchSnapshotType + WorkerID string + TTL time.Duration + } + acquireLockResp bool + acquireLockErr error + + releaseLockCalled bool + releaseLockReq struct { + SavedSearchID string + SnapshotType gcpspanner.SavedSearchSnapshotType + WorkerID string + } + releaseLockErr error + + publishEventCalled bool + publishEventReq struct { + Event gcpspanner.SavedSearchNotificationCreateRequest + NewStatePath string + WorkerID string + } + publishEventResp *string + publishEventErr error + + getLatestEventCalled bool + getLatestEventReq struct { + SavedSearchID string + SnapshotType gcpspanner.SavedSearchSnapshotType + } + getLatestEventResp *gcpspanner.SavedSearchNotificationEvent + getLatestEventErr error +} + +func (m *mockSpannerClient) TryAcquireSavedSearchStateWorkerLock( + _ context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, + workerID string, + ttl time.Duration) (bool, error) { + m.tryAcquireLockCalled = true + m.acquireLockReq.SavedSearchID = savedSearchID + m.acquireLockReq.SnapshotType = snapshotType + m.acquireLockReq.WorkerID = workerID + m.acquireLockReq.TTL = ttl + + return m.acquireLockResp, m.acquireLockErr +} + +func (m *mockSpannerClient) PublishSavedSearchNotificationEvent(_ context.Context, + event gcpspanner.SavedSearchNotificationCreateRequest, newStatePath, workerID string, + _ ...gcpspanner.CreateOption) (*string, error) { + m.publishEventCalled = true + m.publishEventReq.Event = event + m.publishEventReq.NewStatePath = newStatePath + m.publishEventReq.WorkerID = workerID + + return m.publishEventResp, m.publishEventErr +} + +func (m *mockSpannerClient) GetLatestSavedSearchNotificationEvent( + _ context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, +) (*gcpspanner.SavedSearchNotificationEvent, error) { + m.getLatestEventCalled = true + m.getLatestEventReq.SavedSearchID = savedSearchID + m.getLatestEventReq.SnapshotType = snapshotType + + return m.getLatestEventResp, m.getLatestEventErr +} + +func (m *mockSpannerClient) ReleaseSavedSearchStateWorkerLock( + _ context.Context, + savedSearchID string, + snapshotType gcpspanner.SavedSearchSnapshotType, + workerID string) error { + m.releaseLockCalled = true + m.releaseLockReq.SavedSearchID = savedSearchID + m.releaseLockReq.SnapshotType = snapshotType + m.releaseLockReq.WorkerID = workerID + + return m.releaseLockErr +} + +func TestEventProducer_AcquireLock(t *testing.T) { + tests := []struct { + name string + freq workertypes.JobFrequency + wantSnapshotType gcpspanner.SavedSearchSnapshotType + mockResp bool + mockErr error + wantErr bool + }{ + { + name: "Immediate maps to Immediate", + freq: workertypes.FrequencyImmediate, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeImmediate, + mockResp: true, + mockErr: nil, + wantErr: false, + }, + { + name: "Weekly maps to Weekly", + freq: workertypes.FrequencyWeekly, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeWeekly, + mockResp: true, + mockErr: nil, + wantErr: false, + }, + { + name: "Error propagation", + freq: workertypes.FrequencyMonthly, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeMonthly, + mockResp: false, + mockErr: errors.New("spanner error"), + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockSpannerClient) + mock.acquireLockResp = tc.mockResp + mock.acquireLockErr = tc.mockErr + + adapter := NewEventProducer(mock) + + err := adapter.AcquireLock(context.Background(), "search-1", tc.freq, "worker-1", time.Minute) + + if (err != nil) != tc.wantErr { + t.Errorf("AcquireLock() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.tryAcquireLockCalled { + t.Fatal("TryAcquireSavedSearchStateWorkerLock not called") + } + + expectedReq := struct { + SavedSearchID string + SnapshotType gcpspanner.SavedSearchSnapshotType + WorkerID string + TTL time.Duration + }{ + SavedSearchID: "search-1", + SnapshotType: tc.wantSnapshotType, + WorkerID: "worker-1", + TTL: time.Minute, + } + + if diff := cmp.Diff(expectedReq, mock.acquireLockReq); diff != "" { + t.Errorf("TryAcquireSavedSearchStateWorkerLock request mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestEventProducer_ReleaseLock(t *testing.T) { + tests := []struct { + name string + freq workertypes.JobFrequency + wantSnapshotType gcpspanner.SavedSearchSnapshotType + mockErr error + wantErr bool + }{ + { + name: "Regular release", + freq: workertypes.FrequencyImmediate, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeImmediate, + mockErr: nil, + wantErr: false, + }, + { + name: "Error propagation", + freq: workertypes.FrequencyWeekly, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeWeekly, + mockErr: errors.New("lock lost"), + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockSpannerClient) + mock.releaseLockErr = tc.mockErr + + adapter := NewEventProducer(mock) + + err := adapter.ReleaseLock(context.Background(), "search-1", tc.freq, "worker-1") + + if (err != nil) != tc.wantErr { + t.Errorf("ReleaseLock() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.releaseLockCalled { + t.Fatal("ReleaseSavedSearchStateWorkerLock not called") + } + + expectedReq := struct { + SavedSearchID string + SnapshotType gcpspanner.SavedSearchSnapshotType + WorkerID string + }{ + SavedSearchID: "search-1", + SnapshotType: tc.wantSnapshotType, + WorkerID: "worker-1", + } + + if diff := cmp.Diff(expectedReq, mock.releaseLockReq); diff != "" { + t.Errorf("ReleaseSavedSearchStateWorkerLock request mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestEventProducer_PublishEvent(t *testing.T) { + generatedAt := time.Now() + summaryJSON := `{"added": 1, "removed": 2}` + + // Defined named struct for better type safety and comparison in tests + type expectedRequest struct { + Event gcpspanner.SavedSearchNotificationCreateRequest + NewStatePath string + WorkerID string + } + + tests := []struct { + name string + req workertypes.PublishEventRequest + mockPublishErr error + mockPublishResp *string + wantErr bool + expectCall bool + expectedReq *expectedRequest + }{ + { + name: "success", + req: workertypes.PublishEventRequest{ + EventID: "event-1", + SearchID: "search-1", + StateID: "state-1", + StateBlobPath: "gs://bucket/state", + DiffID: "diff-1", + DiffBlobPath: "gs://bucket/diff", + Summary: []byte(summaryJSON), + Reasons: []workertypes.Reason{workertypes.ReasonDataUpdated}, + Frequency: workertypes.FrequencyWeekly, + Query: "query", + GeneratedAt: generatedAt, + }, + mockPublishErr: nil, + mockPublishResp: generic.ValuePtr("new-event-id"), + wantErr: false, + expectCall: true, + expectedReq: &expectedRequest{ + Event: gcpspanner.SavedSearchNotificationCreateRequest{ + SavedSearchID: "search-1", + SnapshotType: gcpspanner.SavedSearchSnapshotTypeWeekly, + Timestamp: generatedAt, + EventType: "", // Matches TODO in implementation + Reasons: []string{"DATA_UPDATED"}, + BlobPath: "gs://bucket/state", + DiffBlobPath: "gs://bucket/diff", + Summary: spanner.NullJSON{Value: map[string]any{ + "summary": map[string]any{ + "added": float64(1), + "removed": float64(2), + }, + }, Valid: true}, + }, + NewStatePath: "gs://bucket/state", + WorkerID: "event-1", + }, + }, + { + name: "spanner publish error", + req: workertypes.PublishEventRequest{ + EventID: "event-1", + SearchID: "search-1", + StateID: "state-1", + StateBlobPath: "gs://bucket/state", + DiffID: "diff-1", + DiffBlobPath: "gs://bucket/diff", + Summary: []byte(summaryJSON), + Reasons: []workertypes.Reason{workertypes.ReasonDataUpdated}, + Frequency: workertypes.FrequencyWeekly, + Query: "query", + GeneratedAt: generatedAt, + }, + mockPublishErr: errors.New("spanner failure"), + mockPublishResp: nil, + wantErr: true, + expectCall: true, + expectedReq: &expectedRequest{ + Event: gcpspanner.SavedSearchNotificationCreateRequest{ + SavedSearchID: "search-1", + SnapshotType: gcpspanner.SavedSearchSnapshotTypeWeekly, + Timestamp: generatedAt, + EventType: "", + Reasons: []string{"DATA_UPDATED"}, + BlobPath: "gs://bucket/state", + DiffBlobPath: "gs://bucket/diff", + Summary: spanner.NullJSON{Value: map[string]any{ + "summary": map[string]any{ + "added": float64(1), + "removed": float64(2), + }, + }, Valid: true}, + }, + NewStatePath: "gs://bucket/state", + WorkerID: "event-1", + }, + }, + { + name: "invalid json summary", + req: workertypes.PublishEventRequest{ + EventID: "event-1", + SearchID: "search-1", + Summary: []byte("invalid-json"), + Frequency: workertypes.FrequencyWeekly, + StateID: "", + StateBlobPath: "", + DiffID: "", + DiffBlobPath: "", + Reasons: nil, + Query: "", + GeneratedAt: time.Time{}, + }, + mockPublishResp: nil, + mockPublishErr: nil, + wantErr: true, + expectCall: false, + expectedReq: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockSpannerClient) + mock.publishEventErr = tc.mockPublishErr + mock.publishEventResp = tc.mockPublishResp + + adapter := NewEventProducer(mock) + + err := adapter.PublishEvent(context.Background(), tc.req) + + if (err != nil) != tc.wantErr { + t.Errorf("PublishEvent() error = %v, wantErr %v", err, tc.wantErr) + } + + if mock.publishEventCalled != tc.expectCall { + t.Errorf("PublishSavedSearchNotificationEvent called = %v, expected %v", mock.publishEventCalled, tc.expectCall) + } + + if tc.expectCall && tc.expectedReq != nil { + // Verify mapping by converting mock data to our expected type for type-safe comparison + actualReq := expectedRequest{ + Event: mock.publishEventReq.Event, + NewStatePath: mock.publishEventReq.NewStatePath, + WorkerID: mock.publishEventReq.WorkerID, + } + + if diff := cmp.Diff(*tc.expectedReq, actualReq); diff != "" { + t.Errorf("PublishSavedSearchNotificationEvent request mismatch (-want +got):\n%s", diff) + } + } + }) + } +} + +func TestEventProducer_GetLatestEvent(t *testing.T) { + testEvent := new(gcpspanner.SavedSearchNotificationEvent) + testEvent.ID = "event-123" + testEvent.BlobPath = "gs://bucket/blob" + + tests := []struct { + name string + freq workertypes.JobFrequency + wantSnapshotType gcpspanner.SavedSearchSnapshotType + mockResp *gcpspanner.SavedSearchNotificationEvent + mockErr error + wantInfo *workertypes.LatestEventInfo + wantErr bool + }{ + { + name: "Found event", + freq: workertypes.FrequencyWeekly, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeWeekly, + mockResp: testEvent, + mockErr: nil, + wantInfo: &workertypes.LatestEventInfo{ + EventID: "event-123", + StateBlobPath: "gs://bucket/blob", + }, + wantErr: false, + }, + { + name: "Spanner error", + freq: workertypes.FrequencyImmediate, + wantSnapshotType: gcpspanner.SavedSearchSnapshotTypeImmediate, + mockResp: nil, + mockErr: errors.New("db error"), + wantInfo: nil, + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockSpannerClient) + mock.getLatestEventResp = tc.mockResp + mock.getLatestEventErr = tc.mockErr + + adapter := NewEventProducer(mock) + + info, err := adapter.GetLatestEvent(context.Background(), tc.freq, "search-1") + + if (err != nil) != tc.wantErr { + t.Errorf("GetLatestEvent() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.getLatestEventCalled { + t.Fatal("GetLatestSavedSearchNotificationEvent not called") + } + + if mock.getLatestEventReq.SavedSearchID != "search-1" { + t.Errorf("GetLatestSavedSearchNotificationEvent called with wrong ID: got %q, want %q", + mock.getLatestEventReq.SavedSearchID, "search-1") + } + if mock.getLatestEventReq.SnapshotType != tc.wantSnapshotType { + t.Errorf("GetLatestSavedSearchNotificationEvent called with wrong SnapshotType: got %v, want %v", + mock.getLatestEventReq.SnapshotType, tc.wantSnapshotType) + } + + if diff := cmp.Diff(tc.wantInfo, info); diff != "" { + t.Errorf("GetLatestEvent() mismatch (-want +got):\n%s", diff) + } + }) + } +} + +type mockBackendAdapterForEventProducer struct { + BackendSpannerClient + + getFeatureCalled bool + GetFeatureReq struct { + FeatureID string + } + getFeatureResp *backendtypes.GetFeatureResult + getFeatureErr error + + featuresSearchCalled bool + FeaturesSearchReqs []struct { + PageToken *string + PageSize int + QueryNode *searchtypes.SearchNode + } + // A sequence of pages to return. + featuresSearchPages []*backend.FeaturePage + featuresSearchErr error + // Call count for search to iterate through pages + searchCallCount int +} + +func (m *mockBackendAdapterForEventProducer) GetFeature( + _ context.Context, + featureID string, + _ backend.WPTMetricView, + _ []backend.BrowserPathParam, +) (*backendtypes.GetFeatureResult, error) { + m.getFeatureCalled = true + m.GetFeatureReq.FeatureID = featureID + + return m.getFeatureResp, m.getFeatureErr +} + +func (m *mockBackendAdapterForEventProducer) FeaturesSearch( + _ context.Context, + pageToken *string, + pageSize int, + queryNode *searchtypes.SearchNode, + _ *backend.ListFeaturesParamsSort, + _ backend.WPTMetricView, + _ []backend.BrowserPathParam, +) (*backend.FeaturePage, error) { + m.featuresSearchCalled = true + m.FeaturesSearchReqs = append(m.FeaturesSearchReqs, struct { + PageToken *string + PageSize int + QueryNode *searchtypes.SearchNode + }{pageToken, pageSize, queryNode}) + + if m.featuresSearchErr != nil { + return nil, m.featuresSearchErr // Return nil for FeaturePage on error + } + + if m.searchCallCount < len(m.featuresSearchPages) { + page := m.featuresSearchPages[m.searchCallCount] + m.searchCallCount++ + + return page, nil + } + + return new(backend.FeaturePage), nil // End of results +} + +func TestEventProducerDiffer_GetFeature(t *testing.T) { + mock := new(mockBackendAdapterForEventProducer) + f := new(backend.Feature) + f.FeatureId = "fx" + f.Name = "Feature x" + mock.getFeatureResp = backendtypes.NewGetFeatureResult(backendtypes.NewRegularFeatureResult(f)) + + adapter := NewEventProducerDiffer(mock) + + res, err := adapter.GetFeature(context.Background(), "fx") + if err != nil { + t.Fatalf("GetFeature() unexpected error: %v", err) + } + + if !mock.getFeatureCalled { + t.Error("GetFeature on backend client not called") + } + if mock.GetFeatureReq.FeatureID != "fx" { + t.Errorf("GetFeature called with %q, want %q", mock.GetFeatureReq.FeatureID, "fx") + } + visitor := new(simpleRegularFeatureVisitor) + if err := res.Visit(context.Background(), visitor); err != nil { + t.Fatalf("Visit failed: %v", err) + } + + if visitor.feature == nil { + t.Fatal("Visitor did not receive a regular feature") + } + if visitor.feature.FeatureId != "fx" { + t.Errorf("Feature ID mismatch: got %q, want fx", visitor.feature.FeatureId) + } + if visitor.feature.Name != "Feature x" { + t.Errorf("Feature Name mismatch: got %q, want 'Feature x'", visitor.feature.Name) + } +} + +type simpleRegularFeatureVisitor struct { + feature *backend.Feature +} + +func (v *simpleRegularFeatureVisitor) VisitRegularFeature(_ context.Context, + res backendtypes.RegularFeatureResult) error { + v.feature = res.Feature() + + return nil +} +func (v *simpleRegularFeatureVisitor) VisitMovedFeature(_ context.Context, _ backendtypes.MovedFeatureResult) error { + return nil +} +func (v *simpleRegularFeatureVisitor) VisitSplitFeature(_ context.Context, _ backendtypes.SplitFeatureResult) error { + return nil +} + +func TestEventProducerDiffer_FetchFeatures(t *testing.T) { + mock := new(mockBackendAdapterForEventProducer) + feature1 := new(backend.Feature) + feature1.FeatureId = "f1" + feature1.Name = "Feature 1" + feature2 := new(backend.Feature) + feature2.FeatureId = "f2" + feature2.Name = "Feature 2" + mock.featuresSearchPages = []*backend.FeaturePage{ + // First page + { + Data: []backend.Feature{ + *feature1, + }, + Metadata: backend.PageMetadataWithTotal{ + NextPageToken: generic.ValuePtr("token-1"), + Total: 1000, + }, + }, + // Second page + { + Data: []backend.Feature{ + *feature2, + }, + Metadata: backend.PageMetadataWithTotal{ + Total: 1000, + NextPageToken: nil, // End of iteration + + }, + }, + } + + adapter := NewEventProducerDiffer(mock) + + // A simple query that parses successfully + query := "name:foo" + features, err := adapter.FetchFeatures(context.Background(), query) + if err != nil { + t.Fatalf("FetchFeatures() unexpected error: %v", err) + } + + if len(features) != 2 { + t.Errorf("Expected 2 features (across 2 pages), got %d", len(features)) + } + if features[0].FeatureId != "f1" || features[1].FeatureId != "f2" { + t.Error("Feature ID mismatch in results") + } + + if len(mock.FeaturesSearchReqs) != 2 { + t.Errorf("Expected 2 calls to FeaturesSearch, got %d", len(mock.FeaturesSearchReqs)) + } + // First call should have nil token + if mock.FeaturesSearchReqs[0].PageToken != nil { + t.Error("First page token should be nil") + } + // Second call should have token-1 + if mock.FeaturesSearchReqs[1].PageToken == nil || *mock.FeaturesSearchReqs[1].PageToken != "token-1" { + t.Error("Second page token mismatch") + } +} + +type mockBatchEventProducerSpannerClient struct { + listAllSavedSearchesCalled bool + listAllSavedSearchesResp []gcpspanner.SavedSearchBriefDetails + listAllSavedSearchesErr error +} + +func (m *mockBatchEventProducerSpannerClient) ListAllSavedSearches( + _ context.Context) ([]gcpspanner.SavedSearchBriefDetails, error) { + m.listAllSavedSearchesCalled = true + + return m.listAllSavedSearchesResp, m.listAllSavedSearchesErr +} + +func TestBatchEventProducer_ListAllSavedSearches(t *testing.T) { + tests := []struct { + name string + mockResp []gcpspanner.SavedSearchBriefDetails + mockErr error + wantSearchJobs []workertypes.SearchJob + wantErr bool + }{ + { + name: "success with searches", + mockResp: []gcpspanner.SavedSearchBriefDetails{{ID: "s1", Query: "q1"}, {ID: "s2", Query: "q2"}}, + mockErr: nil, + wantSearchJobs: []workertypes.SearchJob{{ID: "s1", Query: "q1"}, {ID: "s2", Query: "q2"}}, + wantErr: false, + }, + { + name: "success empty list", + mockResp: []gcpspanner.SavedSearchBriefDetails{}, + mockErr: nil, + wantSearchJobs: []workertypes.SearchJob{}, + wantErr: false, + }, + { + name: "lister error", + mockResp: nil, + mockErr: errors.New("db error"), + wantSearchJobs: nil, + wantErr: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockBatchEventProducerSpannerClient) + mock.listAllSavedSearchesResp = tc.mockResp + mock.listAllSavedSearchesErr = tc.mockErr + + adapter := NewBatchEventProducer(mock) + + searchJobs, err := adapter.ListAllSavedSearches(context.Background()) + + if (err != nil) != tc.wantErr { + t.Errorf("ListAllSavedSearchIDs() error = %v, wantErr %v", err, tc.wantErr) + } + + if !mock.listAllSavedSearchesCalled { + t.Fatal("ListAllSavedSearchIDs not called") + } + + if diff := cmp.Diff(tc.wantSearchJobs, searchJobs); diff != "" { + t.Errorf("ListAllSavedSearchIDs() mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/lib/gcpspanner/spanneradapters/push_delivery.go b/lib/gcpspanner/spanneradapters/push_delivery.go new file mode 100644 index 000000000..54bb8545b --- /dev/null +++ b/lib/gcpspanner/spanneradapters/push_delivery.go @@ -0,0 +1,100 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type PushDeliverySpannerClient interface { + FindAllActivePushSubscriptions( + ctx context.Context, + savedSearchID string, + frequency gcpspanner.SavedSearchSnapshotType, + ) ([]gcpspanner.SubscriberDestination, error) +} + +type PushDeliverySubscriberFinder struct { + client PushDeliverySpannerClient +} + +func NewPushDeliverySubscriberFinder(client PushDeliverySpannerClient) *PushDeliverySubscriberFinder { + return &PushDeliverySubscriberFinder{client: client} +} + +func (f *PushDeliverySubscriberFinder) FindSubscribers(ctx context.Context, searchID string, + frequency workertypes.JobFrequency) (*workertypes.SubscriberSet, error) { + spannerFrequency := convertFrequencyToSnapshotType(frequency) + + dests, err := f.client.FindAllActivePushSubscriptions(ctx, searchID, spannerFrequency) + if err != nil { + return nil, err + } + + set := &workertypes.SubscriberSet{ + Emails: make([]workertypes.EmailSubscriber, 0), + } + + for _, dest := range dests { + // If EmailConfig is set, it's an email subscriber. + if dest.EmailConfig != nil { + set.Emails = append(set.Emails, workertypes.EmailSubscriber{ + SubscriptionID: dest.SubscriptionID, + UserID: dest.UserID, + Triggers: convertSpannerTriggersToJobTriggers(dest.Triggers), + EmailAddress: dest.EmailConfig.Address, + ChannelID: dest.ChannelID, + }) + } + } + + return set, nil +} + +func convertSpannerTriggersToJobTriggers(triggers []gcpspanner.SubscriptionTrigger) []workertypes.JobTrigger { + if triggers == nil { + return nil + } + jobTriggers := make([]workertypes.JobTrigger, 0, len(triggers)) + for _, t := range triggers { + jobTriggers = append(jobTriggers, convertSpannerTriggerToJobTrigger(t)) + } + + return jobTriggers +} + +func convertSpannerTriggerToJobTrigger(trigger gcpspanner.SubscriptionTrigger) workertypes.JobTrigger { + switch trigger { + case gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly: + return workertypes.FeaturePromotedToNewly + case gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToWidely: + return workertypes.FeaturePromotedToWidely + case gcpspanner.SubscriptionTriggerFeatureBaselineRegressionToLimited: + return workertypes.FeatureRegressedToLimited + case gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete: + return workertypes.BrowserImplementationAnyComplete + case gcpspanner.SubscriptionTriggerUnknown: + break + } + // Should not reach here. + slog.WarnContext(context.TODO(), "unknown subscription trigger encountered in push deliveryspanner adapter", + "trigger", trigger) + + return "" +} diff --git a/lib/gcpspanner/spanneradapters/push_delivery_test.go b/lib/gcpspanner/spanneradapters/push_delivery_test.go new file mode 100644 index 000000000..8e4cf46ad --- /dev/null +++ b/lib/gcpspanner/spanneradapters/push_delivery_test.go @@ -0,0 +1,245 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spanneradapters + +import ( + "context" + "errors" + "testing" + + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +type mockPushDeliverySpannerClient struct { + findAllCalledWith *findAllCalledWith + findAllReturns findAllReturns +} + +type findAllCalledWith struct { + SearchID string + Frequency gcpspanner.SavedSearchSnapshotType +} + +type findAllReturns struct { + dests []gcpspanner.SubscriberDestination + err error +} + +func (m *mockPushDeliverySpannerClient) FindAllActivePushSubscriptions( + _ context.Context, + savedSearchID string, + frequency gcpspanner.SavedSearchSnapshotType, +) ([]gcpspanner.SubscriberDestination, error) { + m.findAllCalledWith = &findAllCalledWith{ + SearchID: savedSearchID, + Frequency: frequency, + } + + return m.findAllReturns.dests, m.findAllReturns.err +} + +func TestFindSubscribers(t *testing.T) { + tests := []struct { + name string + dests []gcpspanner.SubscriberDestination + clientErr error + expectedCall *findAllCalledWith + expectedSet *workertypes.SubscriberSet + expectedError error + }{ + { + name: "Success with Email and Triggers", + expectedCall: &findAllCalledWith{ + SearchID: "search-1", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, + }, + dests: []gcpspanner.SubscriberDestination{ + { + SubscriptionID: "sub-1", + UserID: "user-1", + Type: "EMAIL", + ChannelID: "chan-1", + EmailConfig: &gcpspanner.EmailConfig{ + Address: "test@example.com", + IsVerified: true, + VerificationToken: nil, + }, + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToNewly, + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToWidely, + }, + }, + }, + expectedSet: &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + { + SubscriptionID: "sub-1", + UserID: "user-1", + EmailAddress: "test@example.com", + Triggers: []workertypes.JobTrigger{ + workertypes.FeaturePromotedToNewly, + workertypes.FeaturePromotedToWidely, + }, + ChannelID: "chan-1", + }, + }, + }, + clientErr: nil, + expectedError: nil, + }, + { + name: "Mixed types (Webhook ignored)", + expectedCall: &findAllCalledWith{ + SearchID: "search-1", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, + }, + clientErr: nil, + dests: []gcpspanner.SubscriberDestination{ + { + UserID: "user-1", + SubscriptionID: "sub-1", + Type: "EMAIL", + ChannelID: "chan-1", + EmailConfig: &gcpspanner.EmailConfig{ + Address: "test@example.com", + IsVerified: true, + VerificationToken: nil, + }, + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerBrowserImplementationAnyComplete, + }, + }, + { + SubscriptionID: "sub-2", + Type: "WEBHOOK", + ChannelID: "chan-2", + EmailConfig: nil, // Webhooks don't have EmailConfig + Triggers: nil, + UserID: "user-3", + }, + }, + expectedSet: &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + { + SubscriptionID: "sub-1", + UserID: "user-1", + EmailAddress: "test@example.com", + Triggers: []workertypes.JobTrigger{ + workertypes.BrowserImplementationAnyComplete, + }, + ChannelID: "chan-1", + }, + }, + }, + expectedError: nil, + }, + { + name: "Client Error", + expectedCall: &findAllCalledWith{ + SearchID: "search-1", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, + }, + expectedSet: nil, + dests: nil, + clientErr: errTest, + expectedError: errTest, + }, + { + name: "Nil Email Config (Should Skip)", + expectedCall: &findAllCalledWith{ + SearchID: "search-1", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, + }, + dests: []gcpspanner.SubscriberDestination{ + { + UserID: "user-1", + SubscriptionID: "sub-1", + Type: "EMAIL", + ChannelID: "chan-1", + Triggers: nil, + EmailConfig: nil, // Missing config should be skipped + }, + }, + clientErr: nil, + expectedSet: &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{}, + }, + expectedError: nil, + }, + { + name: "Unknown Trigger (Should be logged/ignored/empty string)", + expectedCall: &findAllCalledWith{ + SearchID: "search-1", + Frequency: gcpspanner.SavedSearchSnapshotTypeImmediate, + }, + dests: []gcpspanner.SubscriberDestination{ + { + UserID: "user-1", + SubscriptionID: "sub-1", + Type: "EMAIL", + ChannelID: "chan-1", + EmailConfig: &gcpspanner.EmailConfig{ + Address: "test@example.com", + IsVerified: true, + VerificationToken: nil, + }, + Triggers: []gcpspanner.SubscriptionTrigger{ + "some_unknown_trigger", + }, + }, + }, + clientErr: nil, + expectedSet: &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + { + UserID: "user-1", + SubscriptionID: "sub-1", + EmailAddress: "test@example.com", + Triggers: []workertypes.JobTrigger{ + "", // Unknown triggers map to empty string/zero value in current implementation + }, + ChannelID: "chan-1", + }, + }, + }, + expectedError: nil, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mock := new(mockPushDeliverySpannerClient) + mock.findAllReturns.dests = tc.dests + mock.findAllReturns.err = tc.clientErr + + finder := NewPushDeliverySubscriberFinder(mock) + set, err := finder.FindSubscribers(context.Background(), "search-1", workertypes.FrequencyImmediate) + + if !errors.Is(err, tc.expectedError) { + t.Errorf("FindSubscribers error = %v, wantErr %v", err, tc.expectedError) + } + + if diff := cmp.Diff(tc.expectedSet, set); diff != "" { + t.Errorf("SubscriberSet mismatch (-want +got):\n%s", diff) + } + + if diff := cmp.Diff(tc.expectedCall, mock.findAllCalledWith); diff != "" { + t.Errorf("findAllCalledWith mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/lib/gcpspanner/web_features.go b/lib/gcpspanner/web_features.go index 292499b9e..4aa005392 100644 --- a/lib/gcpspanner/web_features.go +++ b/lib/gcpspanner/web_features.go @@ -513,6 +513,22 @@ func (c *Client) FetchAllFeatureKeys(ctx context.Context) ([]string, error) { return fetchSingleColumnValuesWithTransaction[string](ctx, txn, webFeaturesTable, "FeatureKey") } +// UpdateFeatureDescription updates the description of a web feature. +// Useful for e2e tests. +func (c *Client) UpdateFeatureDescription( + ctx context.Context, featureKey, newDescription string) error { + _, err := c.ReadWriteTransaction(ctx, func(_ context.Context, txn *spanner.ReadWriteTransaction) error { + return txn.BufferWrite([]*spanner.Mutation{ + spanner.Update(webFeaturesTable, + []string{"FeatureKey", "Description"}, + []any{featureKey, newDescription}, + ), + }) + }) + + return err +} + type SpannerFeatureIDAndKey struct { ID string `spanner:"ID"` FeatureKey string `spanner:"FeatureKey"` diff --git a/lib/workertypes/types.go b/lib/workertypes/types.go index 2d0277cba..a855e043c 100644 --- a/lib/workertypes/types.go +++ b/lib/workertypes/types.go @@ -32,6 +32,17 @@ var ( const ( // VersionEventSummaryV1 defines the schema version for v1 of the EventSummary. VersionEventSummaryV1 = "v1" + // MaxHighlights caps the number of detailed items stored in Spanner (The full highlights are stored in GCS). + // Spanner's 10MB limit can easily accommodate this. + // Calculation details: + // A typical highlight contains: + // - Feature info (ID, Name): ~50-80 bytes + // - 2 DocLinks (URL, Title, Slug): ~250 bytes + // - Changes metadata: ~50 bytes + // - JSON structure overhead: ~50 bytes + // Total ≈ 450-500 bytes. + // 10,000 highlights * 500 bytes = 5MB, which is 50% of the 10MB column limit. + MaxHighlights = 10000 ) type SavedSearchState struct { @@ -67,6 +78,7 @@ type SummaryCategories struct { QueryChanged int `json:"query_changed,omitzero"` Added int `json:"added,omitzero"` Removed int `json:"removed,omitzero"` + Deleted int `json:"deleted,omitzero"` Moved int `json:"moved,omitzero"` Split int `json:"split,omitzero"` Updated int `json:"updated,omitzero"` @@ -77,9 +89,125 @@ type SummaryCategories struct { // EventSummary matches the JSON structure stored in the database 'Summary' column. type EventSummary struct { - SchemaVersion string `json:"schemaVersion"` - Text string `json:"text"` - Categories SummaryCategories `json:"categories,omitzero"` + SchemaVersion string `json:"schemaVersion"` + Text string `json:"text"` + Categories SummaryCategories `json:"categories,omitzero"` + Truncated bool `json:"truncated"` + Highlights []SummaryHighlight `json:"highlights"` +} + +func (h SummaryHighlight) MatchesTrigger(t JobTrigger) bool { + switch t { + case FeaturePromotedToNewly: + return h.BaselineChange != nil && h.BaselineChange.To.Status == BaselineStatusNewly + case FeaturePromotedToWidely: + return h.BaselineChange != nil && h.BaselineChange.To.Status == BaselineStatusWidely + case FeatureRegressedToLimited: + return h.BaselineChange != nil && h.BaselineChange.To.Status == BaselineStatusLimited + case BrowserImplementationAnyComplete: + for _, change := range h.BrowserChanges { + if change == nil { + continue + } + if change.To.Status == BrowserStatusAvailable { + return true + } + } + } + + return false +} + +// Change represents a value transition from Old to New. +type Change[T any] struct { + From T `json:"from"` + To T `json:"to"` +} + +type FeatureRef struct { + ID string `json:"id"` + Name string `json:"name"` +} + +type DocLink struct { + URL string `json:"url"` + Title *string `json:"title,omitempty"` + Slug *string `json:"slug,omitempty"` +} + +type Docs struct { + MDNDocs []DocLink `json:"mdn_docs,omitempty"` +} + +type BaselineStatus string + +const ( + BaselineStatusLimited BaselineStatus = "limited" + BaselineStatusNewly BaselineStatus = "newly" + BaselineStatusWidely BaselineStatus = "widely" + BaselineStatusUnknown BaselineStatus = "unknown" +) + +type BaselineValue struct { + Status BaselineStatus `json:"status"` + LowDate *time.Time `json:"low_date,omitempty"` + HighDate *time.Time `json:"high_date,omitempty"` +} + +type BrowserStatus string + +const ( + BrowserStatusAvailable BrowserStatus = "available" + BrowserStatusUnavailable BrowserStatus = "unavailable" + BrowserStatusUnknown BrowserStatus = "" +) + +type BrowserValue struct { + Status BrowserStatus `json:"status"` + Version *string `json:"version,omitempty"` + Date *time.Time `json:"date,omitempty"` +} + +type BrowserName string + +const ( + BrowserChrome BrowserName = "chrome" + BrowserChromeAndroid BrowserName = "chrome_android" + BrowserEdge BrowserName = "edge" + BrowserFirefox BrowserName = "firefox" + BrowserFirefoxAndroid BrowserName = "firefox_android" + BrowserSafari BrowserName = "safari" + BrowserSafariIos BrowserName = "safari_ios" +) + +type SummaryHighlightType string + +const ( + SummaryHighlightTypeAdded SummaryHighlightType = "Added" + SummaryHighlightTypeRemoved SummaryHighlightType = "Removed" + SummaryHighlightTypeChanged SummaryHighlightType = "Changed" + SummaryHighlightTypeMoved SummaryHighlightType = "Moved" + SummaryHighlightTypeSplit SummaryHighlightType = "Split" + SummaryHighlightTypeDeleted SummaryHighlightType = "Deleted" +) + +type SplitChange struct { + From FeatureRef `json:"from"` + To []FeatureRef `json:"to"` +} + +type SummaryHighlight struct { + Type SummaryHighlightType `json:"type"` + FeatureID string `json:"feature_id"` + FeatureName string `json:"feature_name"` + Docs *Docs `json:"docs,omitempty"` + + // Strongly typed change fields to support i18n and avoid interface{} + NameChange *Change[string] `json:"name_change,omitempty"` + BaselineChange *Change[BaselineValue] `json:"baseline_change,omitempty"` + BrowserChanges map[BrowserName]*Change[BrowserValue] `json:"browser_changes,omitempty"` + Moved *Change[FeatureRef] `json:"moved,omitempty"` + Split *SplitChange `json:"split,omitempty"` } // SummaryVisitor defines the contract for consuming immutable Event Summaries. @@ -122,59 +250,360 @@ func (g FeatureDiffV1SummaryGenerator) GenerateJSONSummary( d v1.FeatureDiff) ([]byte, error) { var s EventSummary s.SchemaVersion = VersionEventSummaryV1 + + s.Categories, s.Text = g.calculateCategoriesAndText(d) + s.Highlights, s.Truncated = g.generateHighlights(d) + + b, err := json.Marshal(s) + if err != nil { + return nil, fmt.Errorf("%w: %w", ErrFailedToSerializeSummary, err) + } + + return b, nil +} + +func (g FeatureDiffV1SummaryGenerator) calculateCategoriesAndText(d v1.FeatureDiff) (SummaryCategories, string) { + var c SummaryCategories var parts []string + // 1. Populate Counts (Categories) if d.QueryChanged { parts = append(parts, "Search criteria updated") - s.Categories.QueryChanged = 1 + c.QueryChanged = 1 } - if len(d.Added) > 0 { parts = append(parts, fmt.Sprintf("%d features added", len(d.Added))) - s.Categories.Added = len(d.Added) + c.Added = len(d.Added) } if len(d.Removed) > 0 { parts = append(parts, fmt.Sprintf("%d features removed", len(d.Removed))) - s.Categories.Removed = len(d.Removed) + c.Removed = len(d.Removed) + } + if len(d.Deleted) > 0 { + parts = append(parts, fmt.Sprintf("%d features deleted", len(d.Deleted))) + c.Deleted = len(d.Deleted) } if len(d.Moves) > 0 { parts = append(parts, fmt.Sprintf("%d features moved/renamed", len(d.Moves))) - s.Categories.Moved = len(d.Moves) + c.Moved = len(d.Moves) } if len(d.Splits) > 0 { parts = append(parts, fmt.Sprintf("%d features split", len(d.Splits))) - s.Categories.Split = len(d.Splits) + c.Split = len(d.Splits) } - if len(d.Modified) > 0 { parts = append(parts, fmt.Sprintf("%d features updated", len(d.Modified))) - s.Categories.Updated = len(d.Modified) - + c.Updated = len(d.Modified) for _, m := range d.Modified { if len(m.BrowserChanges) > 0 { - s.Categories.UpdatedImpl++ + c.UpdatedImpl++ } if m.NameChange != nil { - s.Categories.UpdatedRename++ + c.UpdatedRename++ } if m.BaselineChange != nil { - s.Categories.UpdatedBaseline++ + c.UpdatedBaseline++ } } } - if len(parts) == 0 { - s.Text = "No changes detected" - } else { - s.Text = strings.Join(parts, ", ") + text := "No changes detected" + if len(parts) > 0 { + text = strings.Join(parts, ", ") } - b, err := json.Marshal(s) - if err != nil { - return nil, fmt.Errorf("%w: %w", ErrFailedToSerializeSummary, err) + return c, text +} + +func (g FeatureDiffV1SummaryGenerator) generateHighlights(d v1.FeatureDiff) ([]SummaryHighlight, bool) { + var highlights []SummaryHighlight + var truncated bool + + if highlights, truncated = g.processModified(highlights, d.Modified); truncated { + return highlights, true } - return b, nil + if highlights, truncated = g.processAdded(highlights, d.Added); truncated { + return highlights, true + } + + if highlights, truncated = g.processRemoved(highlights, d.Removed); truncated { + return highlights, true + } + + if highlights, truncated = g.processDeleted(highlights, d.Deleted); truncated { + return highlights, true + } + + if highlights, truncated = g.processMoves(highlights, d.Moves); truncated { + return highlights, true + } + + if highlights, truncated = g.processSplits(highlights, d.Splits); truncated { + return highlights, true + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processModified(highlights []SummaryHighlight, + modified []v1.FeatureModified) ([]SummaryHighlight, bool) { + for _, m := range modified { + if len(highlights) >= MaxHighlights { + return highlights, true + } + + h := SummaryHighlight{ + Type: SummaryHighlightTypeChanged, + FeatureID: m.ID, + FeatureName: m.Name, + Docs: toDocLinks(m.Docs), + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + } + + if m.BaselineChange != nil { + h.BaselineChange = &Change[BaselineValue]{ + From: toBaselineValue(m.BaselineChange.From), + To: toBaselineValue(m.BaselineChange.To), + } + } + if m.NameChange != nil { + h.NameChange = &Change[string]{ + From: m.NameChange.From, + To: m.NameChange.To, + } + } + + if len(m.BrowserChanges) > 0 { + h.BrowserChanges = make(map[BrowserName]*Change[BrowserValue]) + for b, c := range m.BrowserChanges { + if c == nil { + continue + } + var key BrowserName + switch b { + case v1.Chrome: + key = BrowserChrome + case v1.ChromeAndroid: + key = BrowserChromeAndroid + case v1.Edge: + key = BrowserEdge + case v1.Firefox: + key = BrowserFirefox + case v1.FirefoxAndroid: + key = BrowserFirefoxAndroid + case v1.Safari: + key = BrowserSafari + case v1.SafariIos: + key = BrowserSafariIos + default: + continue + } + h.BrowserChanges[key] = &Change[BrowserValue]{ + From: toBrowserValue(c.From), + To: toBrowserValue(c.To), + } + } + } + + highlights = append(highlights, h) + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processAdded(highlights []SummaryHighlight, + added []v1.FeatureAdded) ([]SummaryHighlight, bool) { + for _, a := range added { + if len(highlights) >= MaxHighlights { + return highlights, true + } + highlights = append(highlights, SummaryHighlight{ + Type: SummaryHighlightTypeAdded, + FeatureID: a.ID, + FeatureName: a.Name, + Docs: toDocLinks(a.Docs), + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }) + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processRemoved(highlights []SummaryHighlight, + removed []v1.FeatureRemoved) ([]SummaryHighlight, bool) { + for _, r := range removed { + if len(highlights) >= MaxHighlights { + return highlights, true + } + highlights = append(highlights, SummaryHighlight{ + Type: SummaryHighlightTypeRemoved, + FeatureID: r.ID, + FeatureName: r.Name, + Docs: nil, + Moved: nil, + Split: nil, + BaselineChange: nil, + NameChange: nil, + BrowserChanges: nil, + }) + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processDeleted(highlights []SummaryHighlight, + deleted []v1.FeatureDeleted) ([]SummaryHighlight, bool) { + for _, r := range deleted { + if len(highlights) >= MaxHighlights { + return highlights, true + } + highlights = append(highlights, SummaryHighlight{ + Type: SummaryHighlightTypeDeleted, + FeatureID: r.ID, + FeatureName: r.Name, + Docs: nil, + Moved: nil, + Split: nil, + BaselineChange: nil, + NameChange: nil, + BrowserChanges: nil, + }) + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processMoves(highlights []SummaryHighlight, + moves []v1.FeatureMoved) ([]SummaryHighlight, bool) { + for _, m := range moves { + if len(highlights) >= MaxHighlights { + return highlights, true + } + highlights = append(highlights, SummaryHighlight{ + Type: SummaryHighlightTypeMoved, + FeatureID: m.ToID, // Use new ID after move + FeatureName: m.ToName, + Moved: &Change[FeatureRef]{ + From: FeatureRef{ID: m.FromID, Name: m.FromName}, + To: FeatureRef{ID: m.ToID, Name: m.ToName}, + }, + BrowserChanges: nil, + BaselineChange: nil, + NameChange: nil, + Docs: nil, + Split: nil, + }) + } + + return highlights, false +} + +func (g FeatureDiffV1SummaryGenerator) processSplits(highlights []SummaryHighlight, + splits []v1.FeatureSplit) ([]SummaryHighlight, bool) { + for _, split := range splits { + if len(highlights) >= MaxHighlights { + return highlights, true + } + var to []FeatureRef + for _, t := range split.To { + to = append(to, FeatureRef{ID: t.ID, Name: t.Name}) + } + highlights = append(highlights, SummaryHighlight{ + Type: SummaryHighlightTypeSplit, + FeatureID: split.FromID, + FeatureName: split.FromName, + Split: &SplitChange{ + From: FeatureRef{ID: split.FromID, Name: split.FromName}, + To: to, + }, + Moved: nil, + BrowserChanges: nil, + BaselineChange: nil, + NameChange: nil, + Docs: nil, + }) + } + + return highlights, false +} + +func toDocLinks(docs *v1.Docs) *Docs { + if docs == nil { + return nil + } + ret := new(Docs) + mdnDocs := make([]DocLink, 0, len(docs.MdnDocs)) + for _, d := range docs.MdnDocs { + mdnDocs = append(mdnDocs, DocLink{ + URL: d.URL, + Title: d.Title, + Slug: d.Slug, + }) + } + ret.MDNDocs = mdnDocs + + return ret +} + +func toBaselineValue(s v1.BaselineState) BaselineValue { + val := BaselineValue{ + Status: BaselineStatusUnknown, + LowDate: nil, + HighDate: nil, + } + if s.Status.IsSet { + switch s.Status.Value { + case v1.Limited: + val.Status = BaselineStatusLimited + case v1.Newly: + val.Status = BaselineStatusNewly + case v1.Widely: + val.Status = BaselineStatusWidely + } + } + + if s.LowDate.IsSet { + val.LowDate = s.LowDate.Value + } + if s.HighDate.IsSet { + val.HighDate = s.HighDate.Value + } + + return val +} + +func toBrowserValue(s v1.BrowserState) BrowserValue { + val := BrowserValue{ + Status: BrowserStatusUnknown, + Version: nil, + Date: nil, + } + if s.Status.IsSet { + switch s.Status.Value { + case v1.Available: + val.Status = BrowserStatusAvailable + case v1.Unavailable: + val.Status = BrowserStatusUnavailable + } + } + if s.Version.IsSet { + val.Version = s.Version.Value + } + + if s.Date.IsSet { + val.Date = s.Date.Value + } + + return val } type Reason string @@ -209,7 +638,90 @@ type JobFrequency string const ( FrequencyUnknown JobFrequency = "UNKNOWN" FrequencyImmediate JobFrequency = "IMMEDIATE" - FrequencyDaily JobFrequency = "DAILY" FrequencyWeekly JobFrequency = "WEEKLY" FrequencyMonthly JobFrequency = "MONTHLY" ) + +type RefreshSearchCommand struct { + SearchID string + Query string + Frequency JobFrequency + Timestamp time.Time +} + +type SearchJob struct { + ID string + Query string +} + +type JobTrigger string + +const ( + FeaturePromotedToNewly JobTrigger = "FEATURE_PROMOTED_TO_NEWLY" + FeaturePromotedToWidely JobTrigger = "FEATURE_PROMOTED_TO_WIDELY" + FeatureRegressedToLimited JobTrigger = "FEATURE_REGRESSED_TO_LIMITED" + BrowserImplementationAnyComplete JobTrigger = "BROWSER_IMPLEMENTATION_ANY_COMPLETE" +) + +// EmailSubscriber represents a subscriber using an Email channel. +type EmailSubscriber struct { + SubscriptionID string + UserID string + Triggers []JobTrigger + EmailAddress string + ChannelID string +} + +// SubscriberSet groups subscribers by channel type to avoid runtime type assertions. +type SubscriberSet struct { + Emails []EmailSubscriber + // Future channel types (e.g. Webhook) can be added here. +} + +// DeliveryMetadata contains the necessary context from the original event +// required for rendering notifications (e.g. generating links), decoupled from the upstream event format. +type DeliveryMetadata struct { + EventID string + SearchID string + Query string + Frequency JobFrequency + GeneratedAt time.Time +} + +type DispatchEventMetadata struct { + EventID string + SearchID string + Frequency JobFrequency + Query string + GeneratedAt time.Time +} + +// EmailDeliveryJob represents a task to send an email. +type EmailDeliveryJob struct { + SubscriptionID string + RecipientEmail string + ChannelID string + Triggers []JobTrigger + // SummaryRaw is the opaque JSON payload describing the event. + SummaryRaw []byte + // Metadata contains context for links and tracking. + Metadata DeliveryMetadata +} + +type IncomingEmailDeliveryJob struct { + EmailDeliveryJob + // The ID from the queued event for this specific email job. + // This will be generated by the queuing service. + // This is different from the EventID in the Metadata which is for the original event that triggered + // the event producer in the very beginning. + EmailEventID string +} + +var ( + // ErrUnrecoverableSystemFailureEmailSending indicates that there's a system failure that should not be retried. + // Examples: System auth issue. + ErrUnrecoverableSystemFailureEmailSending = errors.New("unrecoverable user failure trying to send email") + // ErrUnrecoverableUserFailureEmailSending indicates that there's a user failure that should not be retried. + // Examples: Bad email address. + ErrUnrecoverableUserFailureEmailSending = errors.New("unrecoverable user failure trying to send email") +) diff --git a/lib/workertypes/types_test.go b/lib/workertypes/types_test.go index ea2ad6f99..0d684dd89 100644 --- a/lib/workertypes/types_test.go +++ b/lib/workertypes/types_test.go @@ -60,6 +60,7 @@ func TestParseEventSummary(t *testing.T) { QueryChanged: 0, Added: 0, Removed: 0, + Deleted: 0, Moved: 0, Split: 0, Updated: 0, @@ -67,6 +68,8 @@ func TestParseEventSummary(t *testing.T) { UpdatedRename: 0, UpdatedBaseline: 0, }, + Truncated: false, + Highlights: nil, }, wantErr: false, }, @@ -118,6 +121,8 @@ func TestParseEventSummary(t *testing.T) { } func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { + newlyDate := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + browserImplDate := time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC) tests := []struct { name string diff v1.FeatureDiff @@ -133,8 +138,9 @@ func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { Modified: nil, Moves: nil, Splits: nil, + Deleted: nil, }, - expected: `{"schemaVersion":"v1","text":"No changes detected"}`, + expected: `{"schemaVersion":"v1","text":"No changes detected","truncated":false,"highlights":null}`, expectedError: nil, }, { @@ -143,11 +149,16 @@ func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { QueryChanged: true, Added: []v1.FeatureAdded{ {ID: "1", Name: "A", Reason: v1.ReasonNewMatch, Docs: nil}, - {ID: "2", Name: "B", Reason: v1.ReasonNewMatch, Docs: nil}, + {ID: "2", Name: "B", Reason: v1.ReasonNewMatch, Docs: &v1.Docs{ + MdnDocs: []v1.MdnDoc{{URL: "https://mdn.io/B", Title: generic.ValuePtr("B"), Slug: generic.ValuePtr("slug-b")}}, + }}, }, Removed: []v1.FeatureRemoved{ {ID: "3", Name: "C", Reason: v1.ReasonUnmatched}, }, + Deleted: []v1.FeatureDeleted{ + {ID: "4", Name: "D", Reason: v1.ReasonDeleted}, + }, Moves: []v1.FeatureMoved{ {FromID: "4", ToID: "5", FromName: "D", ToName: "E"}, }, @@ -167,7 +178,7 @@ func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { }, To: v1.BaselineState{ Status: generic.SetOpt(v1.Newly), - LowDate: generic.UnsetOpt[*time.Time](), + LowDate: generic.SetOpt(&newlyDate), HighDate: generic.UnsetOpt[*time.Time](), }, }, @@ -187,8 +198,8 @@ func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { Version: generic.UnsetOpt[*string](), }, To: v1.BrowserState{ Status: generic.SetOpt(v1.Available), - Date: generic.UnsetOpt[*time.Time](), - Version: generic.UnsetOpt[*string](), + Date: generic.SetOpt(&browserImplDate), + Version: generic.SetOpt(generic.ValuePtr("123")), }}, v1.ChromeAndroid: nil, v1.Edge: nil, @@ -213,21 +224,125 @@ func TestGenerateJSONSummaryFeatureDiffV1(t *testing.T) { }, expected: `{ -"schemaVersion":"v1", -"text":"Search criteria updated, 2 features added, 1 features removed, ` + - `1 features moved/renamed, 1 features split, 3 features updated", -"categories": - { - "query_changed":1, - "added":2, - "removed":1, - "moved":1, - "split":1, - "updated":3, - "updated_impl":1, - "updated_rename":1, - "updated_baseline":1 - } + "schemaVersion": "v1", + "text": "Search criteria updated, 2 features added, 1 features removed, ` + + `1 features deleted, 1 features moved/renamed, 1 features split, 3 features updated", + "categories": { + "query_changed": 1, + "added": 2, + "removed": 1, + "deleted": 1, + "moved": 1, + "split": 1, + "updated": 3, + "updated_impl": 1, + "updated_rename": 1, + "updated_baseline": 1 + }, + "truncated": false, + "highlights": [ + { + "type": "Changed", + "feature_id": "8", + "feature_name": "H", + "baseline_change": { + "from": { + "status": "limited" + }, + "to": { + "status": "newly", + "low_date": "2025-01-01T00:00:00Z" + } + } + }, + { + "type": "Changed", + "feature_id": "9", + "feature_name": "I", + "browser_changes": { + "chrome": { + "from": { + "status": "unavailable" + }, + "to": { + "date": "2024-01-01T00:00:00Z", + "status": "available", + "version": "123" + } + } + } + }, + { + "type": "Changed", + "feature_id": "10", + "feature_name": "J", + "name_change": { + "from": "Old", + "to": "New" + } + }, + { + "type": "Added", + "feature_id": "1", + "feature_name": "A" + }, + { + "type": "Added", + "feature_id": "2", + "feature_name": "B", + "docs": { + "mdn_docs": [ + { + "url": "https://mdn.io/B", + "title": "B", + "slug": "slug-b" + } + ] + } + }, + { + "type": "Removed", + "feature_id": "3", + "feature_name": "C" + }, + { + "type": "Deleted", + "feature_id": "4", + "feature_name": "D" + }, + { + "type": "Moved", + "feature_id": "5", + "feature_name": "E", + "moved": { + "from": { + "id": "4", + "name": "D" + }, + "to": { + "id": "5", + "name": "E" + } + } + }, + { + "type": "Split", + "feature_id": "6", + "feature_name": "F", + "split": { + "from": { + "id": "6", + "name": "F" + }, + "to": [ + { + "id": "7", + "name": "G" + } + ] + } + } + ] }`, expectedError: nil, }, diff --git a/openapi/backend/openapi.yaml b/openapi/backend/openapi.yaml index 2cf6b4f40..efacfe7ca 100644 --- a/openapi/backend/openapi.yaml +++ b/openapi/backend/openapi.yaml @@ -1813,22 +1813,28 @@ components: - EnumUnknownValue SubscriptionFrequency: type: string - description: The frequency for a subscription. Currently, only 'daily' is supported. + description: The frequency for a subscription. enum: - - daily + - immediate + - weekly + - monthly x-enumNames: - - SubscriptionFrequencyDaily + - SubscriptionFrequencyImmediate + - SubscriptionFrequencyWeekly + - SubscriptionFrequencyMonthly SubscriptionTriggerWritable: type: string description: The set of valid, user-selectable triggers for a subscription. enum: - - feature_baseline_limited_to_newly - - feature_any_browser_implementation_complete - - feature_baseline_regression_newly_to_limited + - feature_baseline_to_newly + - feature_baseline_to_widely + - feature_browser_implementation_any_complete + - feature_baseline_regression_to_limited x-enumNames: - - SubscriptionTriggerFeatureBaselineLimitedToNewly - - SubscriptionTriggerFeatureAnyBrowserImplementationComplete - - SubscriptionTriggerFeatureBaselineRegressionNewlyToLimited + - SubscriptionTriggerFeatureBaselineToNewly + - SubscriptionTriggerFeatureBaselineToWidely + - SubscriptionTriggerFeatureBrowserImplementationAnyComplete + - SubscriptionTriggerFeatureBaselineRegressionToLimited SubscriptionTriggerResponseValue: description: > Represents a subscription trigger value. Includes 'unknown' for handling deprecated triggers. diff --git a/skaffold.yaml b/skaffold.yaml index be8cdd607..8dff3e4f4 100644 --- a/skaffold.yaml +++ b/skaffold.yaml @@ -19,3 +19,4 @@ metadata: requires: - path: ./backend - path: ./frontend + - path: ./workers diff --git a/util/cmd/load_fake_data/main.go b/util/cmd/load_fake_data/main.go index 53186d190..4f2ca617d 100644 --- a/util/cmd/load_fake_data/main.go +++ b/util/cmd/load_fake_data/main.go @@ -248,6 +248,13 @@ func resetTestData(ctx context.Context, spannerClient *gcpspanner.Client, authCl return nil } + // Delete all subscriptions for the test users. + err := spannerClient.DeleteUserSubscriptions(ctx, userIDs) + if err != nil { + return fmt.Errorf("failed to delete test user subscriptions: %w", err) + } + slog.InfoContext(ctx, "Deleted subscriptions for test users") + for _, userID := range userIDs { page, err := spannerClient.ListUserSavedSearches(ctx, userID, 1000, nil) if err != nil { @@ -740,6 +747,59 @@ func generateSavedSearchBookmarks(ctx context.Context, spannerClient *gcpspanner return len(bookmarksToInsert), nil } +func generateSubscriptions(ctx context.Context, spannerClient *gcpspanner.Client, + authClient *auth.Client) (int, error) { + // Get the channel ID for test.user.1@example.com's primary email. + userID, err := findUserIDByEmail(ctx, "test.user.1@example.com", authClient) + if err != nil { + return 0, fmt.Errorf("could not find userID for test.user.1@example.com: %w", err) + } + channels, _, err := spannerClient.ListNotificationChannels(ctx, gcpspanner.ListNotificationChannelsRequest{ + UserID: userID, + PageSize: 100, + PageToken: nil, + }) + if err != nil { + return 0, fmt.Errorf("could not list notification channels for user %s: %w", userID, err) + } + if len(channels) == 0 { + return 0, fmt.Errorf("no notification channels found for user %s", userID) + } + primaryChannelID := channels[0].ID + + subscriptionsToInsert := []struct { + SavedSearchUUID string + ChannelID string + Frequency gcpspanner.SavedSearchSnapshotType + Triggers []gcpspanner.SubscriptionTrigger + }{ + { + // Subscription for "my first project query" + SavedSearchUUID: "74bdb85f-59d3-43b0-8061-20d5818e8c97", + ChannelID: primaryChannelID, + Frequency: gcpspanner.SavedSearchSnapshotTypeWeekly, + Triggers: []gcpspanner.SubscriptionTrigger{ + gcpspanner.SubscriptionTriggerFeatureBaselinePromoteToWidely, + }, + }, + } + + for _, sub := range subscriptionsToInsert { + _, err := spannerClient.CreateSavedSearchSubscription(ctx, gcpspanner.CreateSavedSearchSubscriptionRequest{ + UserID: userID, + ChannelID: sub.ChannelID, + SavedSearchID: sub.SavedSearchUUID, + Triggers: sub.Triggers, + Frequency: sub.Frequency, + }) + if err != nil { + return 0, fmt.Errorf("failed to create subscription for saved search %s: %w", sub.SavedSearchUUID, err) + } + } + + return len(subscriptionsToInsert), nil +} + func generateUserData(ctx context.Context, spannerClient *gcpspanner.Client, authClient *auth.Client) error { savedSearchesCount, err := generateSavedSearches(ctx, spannerClient, authClient) @@ -757,6 +817,13 @@ func generateUserData(ctx context.Context, spannerClient *gcpspanner.Client, slog.InfoContext(ctx, "saved search bookmarks generated", "amount of bookmarks created", bookmarkCount) + subscriptionsCount, err := generateSubscriptions(ctx, spannerClient, authClient) + if err != nil { + return fmt.Errorf("subscriptions generation failed %w", err) + } + slog.InfoContext(ctx, "subscriptions generated", + "amount of subscriptions created", subscriptionsCount) + return nil } func generateData(ctx context.Context, spannerClient *gcpspanner.Client, datastoreClient *gds.Client) error { @@ -1354,6 +1421,7 @@ func main() { datastoreDatabase = flag.String("datastore_database", "", "Datastore Database") scope = flag.String("scope", "all", "Scope of data generation: all, user") resetFlag = flag.Bool("reset", false, "Reset test user data before loading") + triggerScenario = flag.String("trigger-scenario", "", "Trigger a specific data change scenario for E2E tests") ) flag.Parse() @@ -1387,6 +1455,16 @@ func main() { gofakeit.GlobalFaker = gofakeit.New(seedValue) ctx := context.Background() + if *triggerScenario != "" { + err := triggerDataChange(ctx, spannerClient, *triggerScenario) + if err != nil { + slog.ErrorContext(ctx, "Failed to trigger data change", "scenario", *triggerScenario, "error", err) + os.Exit(1) + } + slog.InfoContext(ctx, "Data change triggered successfully", "scenario", *triggerScenario) + + return // Exit immediately after triggering the change + } var finalErr error @@ -1425,5 +1503,36 @@ func main() { slog.ErrorContext(ctx, "Data generation failed", "scope", *scope, "reset", *resetFlag, "error", finalErr) os.Exit(1) } + slog.InfoContext(ctx, "loading fake data successful") } + +func triggerDataChange(ctx context.Context, spannerClient *gcpspanner.Client, scenario string) error { + slog.InfoContext(ctx, "Triggering data change", "scenario", scenario) + // These feature keys are used in the E2E tests. + const nonMatchingFeatureKey = "popover" + const matchingFeatureKey = "popover" + const batchChangeFeatureKey = "dialog" + const batchChangeGroupKey = "css" + + switch scenario { + case "non-matching": + // Change a property that the test subscription is not listening for. + return spannerClient.UpdateFeatureDescription(ctx, nonMatchingFeatureKey, "A non-matching change") + case "matching": + // Change the BaselineStatus to 'widely' to match the test subscription's trigger. + status := gcpspanner.BaselineStatusHigh + + return spannerClient.UpsertFeatureBaselineStatus(ctx, matchingFeatureKey, gcpspanner.FeatureBaselineStatus{ + Status: &status, + // In reality, these would be set, but we are strictly testing the transition of baseline status. + LowDate: nil, + HighDate: nil, + }) + case "batch-change": + // Change a feature to match the 'group:css' search criteria. + return spannerClient.AddFeatureToGroup(ctx, batchChangeFeatureKey, batchChangeGroupKey) + default: + return fmt.Errorf("unknown trigger scenario: %s", scenario) + } +} diff --git a/workers/email/cmd/job/main.go b/workers/email/cmd/job/main.go index 36a8b15f3..6ad983f4d 100644 --- a/workers/email/cmd/job/main.go +++ b/workers/email/cmd/job/main.go @@ -17,12 +17,50 @@ package main import ( "context" "log/slog" + "net/url" "os" + "strconv" + "strings" + "github.com/GoogleChrome/webstatus.dev/lib/email/chime" + "github.com/GoogleChrome/webstatus.dev/lib/email/chime/chimeadapters" + "github.com/GoogleChrome/webstatus.dev/lib/email/smtpsender" + "github.com/GoogleChrome/webstatus.dev/lib/email/smtpsender/smtpsenderadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub" + "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub/gcppubsubadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/spanneradapters" + "github.com/GoogleChrome/webstatus.dev/workers/email/pkg/digest" + "github.com/GoogleChrome/webstatus.dev/workers/email/pkg/sender" ) +func getSMTPSender(ctx context.Context, smtpHost string) *smtpsenderadapters.EmailWorkerSMTPAdapter { + slog.InfoContext(ctx, "using smtp email sender") + smtpPortStr := os.Getenv("SMTP_PORT") + smtpPort, err := strconv.Atoi(smtpPortStr) + if err != nil { + slog.ErrorContext(ctx, "invalid SMTP_PORT", "error", err) + os.Exit(1) + } + smtpUsername := os.Getenv("SMTP_USERNAME") + smtpPassword := os.Getenv("SMTP_PASSWORD") + fromAddress := os.Getenv("FROM_ADDRESS") + + smtpCfg := smtpsender.SMTPClientConfig{ + Host: smtpHost, + Port: smtpPort, + Username: smtpUsername, + Password: smtpPassword, + } + smtpClient, err := smtpsender.NewClient(smtpCfg, fromAddress) + if err != nil { + slog.ErrorContext(ctx, "failed to create smtp client", "error", err) + os.Exit(1) + } + + return smtpsenderadapters.NewEmailWorkerSMTPAdapter(smtpClient) +} + func main() { ctx := context.Background() @@ -48,6 +86,18 @@ func main() { spannerClient.SetMisingOneImplementationQuery(gcpspanner.LocalMissingOneImplementationQuery{}) } + baseURL := os.Getenv("FRONTEND_BASE_URL") + if baseURL == "" { + slog.ErrorContext(ctx, "FRONTEND_BASE_URL is not set. exiting...") + os.Exit(1) + } + + parsedBaseURL, err := url.Parse(baseURL) + if err != nil { + slog.ErrorContext(ctx, "failed to parse FRONTEND_BASE_URL", "error", err.Error()) + os.Exit(1) + } + // For subscribing to email events emailSubID := os.Getenv("EMAIL_SUBSCRIPTION_ID") if emailSubID == "" { @@ -61,11 +111,45 @@ func main() { os.Exit(1) } - // TODO: https://github.com/GoogleChrome/webstatus.dev/issues/1852 - // Nil handler for now. Will fix later - err = queueClient.Subscribe(ctx, emailSubID, nil) + renderer, err := digest.NewHTMLRenderer(parsedBaseURL.String()) if err != nil { - slog.ErrorContext(ctx, "unable to connect to subscription", "error", err) + // If the template is not valid, the renderer will fail. + slog.ErrorContext(ctx, "unable to create renderer", "error", err) + os.Exit(1) + } + + var emailSender sender.EmailSender + smtpHost := os.Getenv("SMTP_HOST") + if smtpHost != "" { + emailSender = getSMTPSender(ctx, smtpHost) + } else { + slog.InfoContext(ctx, "using chime email sender") + chimeEnvStr := os.Getenv("CHIME_ENV") + chimeEnv := chime.EnvProd + if chimeEnvStr == "autopush" { + chimeEnv = chime.EnvAutopush + } + chimeBCC := os.Getenv("CHIME_BCC") + bccList := []string{} + if chimeBCC != "" { + bccList = strings.Split(chimeBCC, ",") + } + fromAddress := os.Getenv("FROM_ADDRESS") + chimeSender, err := chime.NewChimeSender(ctx, chimeEnv, bccList, fromAddress, nil) + if err != nil { + slog.ErrorContext(ctx, "failed to create chime sender", "error", err) + os.Exit(1) + } + emailSender = chimeadapters.NewEmailWorkerChimeAdapter(chimeSender) + } + + listener := gcppubsubadapters.NewEmailWorkerSubscriberAdapter(sender.NewSender( + emailSender, + spanneradapters.NewEmailWorkerChannelStateManager(spannerClient), + renderer, + ), queueClient, emailSubID) + if err := listener.Subscribe(ctx); err != nil { + slog.ErrorContext(ctx, "worker subscriber failed", "error", err) os.Exit(1) } } diff --git a/workers/email/go.mod b/workers/email/go.mod index 7c9352e96..f1e99c9b6 100644 --- a/workers/email/go.mod +++ b/workers/email/go.mod @@ -6,37 +6,66 @@ replace github.com/GoogleChrome/webstatus.dev/lib => ../../lib replace github.com/GoogleChrome/webstatus.dev/lib/gen => ../../lib/gen -require github.com/GoogleChrome/webstatus.dev/lib v0.0.0-00010101000000-000000000000 +require ( + github.com/GoogleChrome/webstatus.dev/lib v0.0.0-00010101000000-000000000000 + github.com/google/go-cmp v0.7.0 +) require ( cel.dev/expr v0.25.1 // indirect cloud.google.com/go v0.123.0 // indirect cloud.google.com/go/auth v0.17.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/cloudtasks v1.13.7 // indirect cloud.google.com/go/compute/metadata v0.9.0 // indirect + cloud.google.com/go/datastore v1.21.0 // indirect cloud.google.com/go/iam v1.5.3 // indirect + cloud.google.com/go/logging v1.13.1 // indirect + cloud.google.com/go/longrunning v0.7.0 // indirect cloud.google.com/go/monitoring v1.24.3 // indirect cloud.google.com/go/pubsub/v2 v2.0.0 // indirect + cloud.google.com/go/secretmanager v1.16.0 // indirect cloud.google.com/go/spanner v1.86.1 // indirect github.com/GoogleChrome/webstatus.dev/lib/gen v0.0.0-20251119220853-b545639c35ae // indirect github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e // indirect + github.com/deckarep/golang-set v1.8.0 // indirect github.com/envoyproxy/go-control-plane/envoy v1.36.0 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/getkin/kin-openapi v0.133.0 // indirect github.com/go-jose/go-jose/v4 v4.1.3 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-openapi/jsonpointer v0.22.3 // indirect + github.com/go-openapi/swag/jsonname v0.25.3 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/gomodule/redigo v1.9.3 // indirect + github.com/google/go-github/v77 v77.0.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/securecookie v1.1.2 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/mailru/easyjson v0.9.1 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/oapi-codegen/runtime v1.1.2 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632 // indirect + github.com/woodsbury/decimal128 v1.4.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect @@ -61,4 +90,5 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba // indirect google.golang.org/grpc v1.77.0 // indirect google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/workers/email/go.sum b/workers/email/go.sum index af0a25e77..672b53cf0 100644 --- a/workers/email/go.sum +++ b/workers/email/go.sum @@ -637,6 +637,7 @@ github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0 github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= @@ -648,6 +649,9 @@ github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmO github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= @@ -735,6 +739,8 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2 github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= +github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= @@ -755,8 +761,16 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.22.3 h1:dKMwfV4fmt6Ah90zloTbUKWMD+0he+12XYAsPotrkn8= +github.com/go-openapi/jsonpointer v0.22.3/go.mod h1:0lBbqeRsQ5lIanv3LHZBrmRGHLHcQoOXQnf88fHlGWo= +github.com/go-openapi/swag/jsonname v0.25.3 h1:U20VKDS74HiPaLV7UZkztpyVOw3JNVsit+w+gTXRj0A= +github.com/go-openapi/swag/jsonname v0.25.3/go.mod h1:GPVEk9CWVhNvWhZgrnvRA6utbAltopbKwDu8mXNUMag= +github.com/go-openapi/testify/v2 v2.0.2 h1:X999g3jeLcoY8qctY/c/Z8iBHTbwLz7R2WXd6Ub6wls= +github.com/go-openapi/testify/v2 v2.0.2/go.mod h1:HCPmvFFnheKK2BuwSA0TbbdxJ3I16pjwMkYkP4Ywn54= github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= @@ -828,6 +842,8 @@ github.com/google/go-github/v77 v77.0.0 h1:9DsKKbZqil5y/4Z9mNpZDQnpli6PJbqipSuuN github.com/google/go-github/v77 v77.0.0/go.mod h1:c8VmGXRUmaZUqbctUcGEDWYnMrtzZfJhDSylEf1wfmA= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -891,8 +907,11 @@ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= @@ -906,8 +925,11 @@ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= @@ -916,6 +938,8 @@ github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuz github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.9.1 h1:LbtsOm5WAswyWbvTEOqhypdPeZzHavpZx96/n553mR8= +github.com/mailru/easyjson v0.9.1/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= @@ -935,12 +959,24 @@ github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/oapi-codegen/runtime v1.1.2 h1:P2+CubHq8fO4Q6fV1tqDBZHCwpVpvPg7oKiYzQgXIyI= +github.com/oapi-codegen/runtime v1.1.2/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -966,6 +1002,8 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= @@ -978,10 +1016,12 @@ github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo= github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -998,8 +1038,12 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= +github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632 h1:9t4b2caqsFRUWK4k9+lM/PkxLTCvo46ZQPVaoHPaCxY= github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632/go.mod h1:YpCvJq5JKA+aa4+jwK1S2uQ7r0horYVl6DHcl/G9S3s= +github.com/woodsbury/decimal128 v1.4.0 h1:xJATj7lLu4f2oObouMt2tgGiElE5gO6mSWUjQsBgUlc= +github.com/woodsbury/decimal128 v1.4.0/go.mod h1:BP46FUrVjVhdTbKT+XuQh2xfQaGki9LMIRJSFuh6THU= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1043,6 +1087,8 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1288,6 +1334,7 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1682,6 +1729,7 @@ google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aO google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/workers/email/manifests/pod.yaml b/workers/email/manifests/pod.yaml index 0e8b1744e..b5a1e80f7 100644 --- a/workers/email/manifests/pod.yaml +++ b/workers/email/manifests/pod.yaml @@ -33,9 +33,17 @@ spec: - name: SPANNER_EMULATOR_HOST value: 'spanner:9010' - name: PUBSUB_EMULATOR_HOST - value: 'http://pubsub:8086' + value: 'pubsub:8060' - name: EMAIL_SUBSCRIPTION_ID value: 'chime-delivery-sub-id' + - name: FRONTEND_BASE_URL + value: 'http://localhost:5555' + - name: SMTP_HOST + value: 'mailpit' + - name: SMTP_PORT + value: '1025' + - name: FROM_ADDRESS + value: 'test@webstatus.dev' resources: limits: cpu: 250m diff --git a/workers/email/pkg/digest/components.go b/workers/email/pkg/digest/components.go new file mode 100644 index 000000000..40eb13337 --- /dev/null +++ b/workers/email/pkg/digest/components.go @@ -0,0 +1,230 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// nolint:lll // WONTFIX - for readability +package digest + +const componentStyles = `{{- define "style_badge_wrapper" -}}align-self: stretch; padding-top: 12px; padding-bottom: 11px; padding-left: 15px; padding-right: 16px; overflow: hidden; border-top-left-radius: 4px; border-top-right-radius: 4px; justify-content: flex-start; align-items: center; display: flex;{{- end -}} +{{- define "style_badge_inner_wrapper" -}}flex: 1 1 0; flex-direction: column; justify-content: center; align-items: flex-start; display: inline-flex;{{- end -}} +{{- define "style_change_detail_wrapper" -}}align-self: stretch; justify-content: flex-start; align-items: center; gap: 10px; display: inline-flex; width: 100%;{{- end -}} +{{- define "style_change_detail_inner" -}}flex: 1 1 0;{{- end -}} +{{- define "style_banner_wrapper" -}}align-self: stretch; height: 50px; padding-top: 12px; padding-bottom: 11px; padding-left: 15px; padding-right: 16px; overflow: hidden; border-top-left-radius: 4px; border-top-right-radius: 4px; justify-content: flex-start; align-items: center; gap: 8px; display: flex;{{- end -}} +{{- define "style_banner_icon_wrapper_28" -}}height: 28px; position: relative; overflow: hidden; display: flex; align-items: center;{{- end -}} +{{- define "style_banner_icon_wrapper_20" -}}height: 20px; position: relative; margin-right: 4px;{{- end -}} +{{- define "style_img_responsive" -}}display: block; width: auto;{{- end -}} +{{- define "style_banner_text_wrapper" -}}flex: 1 1 0;{{- end -}} +{{- define "style_banner_browser_logos_wrapper" -}}justify-content: flex-start; align-items: center; display: flex; margin-right: 8px;{{- end -}} +{{- define "style_browser_item_row" -}}align-self: stretch; justify-content: flex-start; align-items: center; gap: 10px; display: flex;{{- end -}} +{{- define "style_browser_item_logo_wrapper" -}}justify-content: flex-start; align-items: center; display: flex;{{- end -}} +{{- define "style_browser_item_feature_link_wrapper" -}}align-self: stretch; justify-content: flex-start; align-items: center; gap: 10px; display: inline-flex; margin-top: 8px;{{- end -}} +{{- define "style_button_wrapper" -}}margin: 20px 0; text-align: center;{{- end -}} +{{- define "style_footer_wrapper" -}}align-self: stretch; padding-top: 16px; flex-direction: column; justify-content: flex-start; align-items: flex-start; gap: 12px; display: flex; {{- template "font_family_main" -}};{{- end -}} +{{- define "style_footer_hr" -}}align-self: stretch; height: 1px; background: #E4E4E7;{{- end -}} +{{- define "style_footer_text_wrapper" -}}align-self: stretch;{{- end -}} +{{- define "style_feature_title_row_wrapper" -}}align-self: stretch; justify-content: flex-start; align-items: center; gap: 10px; display: flex;{{- end -}} +{{- define "style_feature_title_row_inner" -}}flex: 1 1 0;{{- end -}}` + +const badgeComponent = `{{- define "badge" -}} +
+
+
{{.Title}}
+ {{- if .Description -}} +
{{.Description}}
+ {{- end -}} +
+
+{{- end -}}` + +const introTextComponent = `{{- define "intro_text" -}} +
+

{{.Subject}}

+
+ Here is your update for the saved search '{{.Query}}'. + {{.SummaryText}}. +
+
+{{- end -}}` + +const changeDetailComponent = `{{- define "change_detail" -}} +
+
+ {{.Label}} + ({{.From}} → {{.To}}) +
+
+{{- end -}}` + +const baselineChangeItemComponent = `{{- define "baseline_change_item" -}} +
+
+
+ {{.To}} +
+
+ Baseline + {{.To}} +
+
+
+
+
+ {{.FeatureName}} +
+ +
+
+
+{{- end -}}` + +const browserItemComponent = `{{- define "browser_item" -}} +
+
+
+ {{.Name}} +
+
+ {{.Name}}: {{ template "browser_status_detail" .From }} → {{ template "browser_status_detail" .To -}} +
+
+ {{- if .FeatureName -}} + + {{- end -}} +
+{{- end -}}` + +const buttonComponent = `{{- define "button" -}} + +{{- end -}}` + +const footerComponent = `{{- define "footer" -}} +
+
+
+ You can + unsubscribe + or change any of your alerts on + webstatus.dev +
+
+{{- end -}}` + +const bannerComponents = `{{- define "banner_baseline_widely" -}} +
+
+ Widely Available +
+
+ Baseline + Widely available +
+
+{{- end -}} +{{- define "banner_baseline_newly" -}} +
+
+ Newly Available +
+
+ Baseline + Newly available +
+
+{{- end -}} +{{- define "banner_baseline_regression" -}} +
+
+ Regressed +
+
+ Regressed + to limited availability +
+
+{{- end -}} +{{- define "banner_browser_implementation" -}} +
+
+ {{- /* Always display the 4 main browser logos as requested */ -}} +
+ +
+
+ +
+
+ +
+
+ +
+
+
Browser support changed
+
+{{- end -}} +{{- define "banner_generic" -}} +
+
+ {{.Type}} +
+
+{{- end -}}` +const featureTitleRowComponent = `{{- define "feature_title_row" -}} +
+
+ {{.Name}} + {{- with .Docs -}} + {{- if .MDNDocs -}} + ( + {{- range $i, $doc := .MDNDocs }} + {{- if $i }}, {{ end -}} + MDN + {{- end -}} + ) + {{- end -}} + {{- end -}} +
+ {{- if .Date -}} +
{{.Date}}
+ {{- end -}} +
+{{- end -}}` + +const browserStatusDetailComponent = `{{- define "browser_status_detail" -}} + {{- formatBrowserStatus .Status -}} + {{- if .Version -}} + in {{.Version}} + {{- end -}} + {{- if .Date -}} + (on {{ formatDate .Date -}}) + {{- end -}} +{{- end -}}` + +const EmailComponents = badgeComponent + + introTextComponent + + changeDetailComponent + + baselineChangeItemComponent + + browserItemComponent + + buttonComponent + + footerComponent + + bannerComponents + + featureTitleRowComponent + + browserStatusDetailComponent diff --git a/workers/email/pkg/digest/renderer.go b/workers/email/pkg/digest/renderer.go new file mode 100644 index 000000000..c80d75b3f --- /dev/null +++ b/workers/email/pkg/digest/renderer.go @@ -0,0 +1,398 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package digest + +import ( + "bytes" + "errors" + "fmt" + "html/template" + "strings" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +// HTMLRenderer implements the email_handler.TemplateRenderer interface. +type HTMLRenderer struct { + tmpl *template.Template + webStatusBaseURL string +} + +// NewHTMLRenderer creates a new renderer with the default template. +func NewHTMLRenderer(webStatusBaseURL string) (*HTMLRenderer, error) { + r := &HTMLRenderer{ + webStatusBaseURL: webStatusBaseURL, + tmpl: nil, + } + + // Register helper functions for the template + funcMap := template.FuncMap{ + "toLower": strings.ToLower, + "browserLogoURL": r.browserLogoURL, + "browserDisplayName": r.browserDisplayName, + "statusLogoURL": r.statusLogoURL, + "dict": dict, + "list": list, + "append": appendList, + "formatDate": formatDate, + "formatBrowserStatus": r.formatBrowserStatus, + "formatBaselineStatus": r.formatBaselineStatus, + "badgeBackgroundColor": badgeBackgroundColor, + } + + // Parse both the components and the main template + tmpl, err := template.New("email").Funcs(funcMap).Parse( + EmailStyles + componentStyles + EmailComponents + defaultEmailTemplate) + if err != nil { + return nil, fmt.Errorf("failed to parse email templates: %w", err) + } + r.tmpl = tmpl + + return r, nil +} + +// dict helper function to creating maps in templates. +func dict(values ...any) (map[string]any, error) { + if len(values)%2 != 0 { + return nil, errors.New("invalid dict call: odd number of arguments") + } + dict := make(map[string]any, len(values)/2) + for i := 0; i < len(values); i += 2 { + key, ok := values[i].(string) + if !ok { + return nil, errors.New("dict keys must be strings") + } + dict[key] = values[i+1] + } + + return dict, nil +} + +func list(values ...any) []any { + return values +} + +func appendList(l []any, v any) []any { + return append(l, v) +} + +func formatDate(t *time.Time) string { + if t == nil { + return "" + } + + return t.Format("2006-01-02") +} + +func (r *HTMLRenderer) formatBrowserStatus(status workertypes.BrowserStatus) string { + switch status { + case workertypes.BrowserStatusAvailable: + return "Available" + case workertypes.BrowserStatusUnavailable: + return "Unavailable" + case workertypes.BrowserStatusUnknown: + break + } + + return "Unknown" +} + +func (r *HTMLRenderer) formatBaselineStatus(status workertypes.BaselineStatus) string { + switch status { + case workertypes.BaselineStatusLimited: + return "Limited" + case workertypes.BaselineStatusNewly: + return "Newly" + case workertypes.BaselineStatusWidely: + return "Widely" + case workertypes.BaselineStatusUnknown: + break + } + + return "Unknown" +} + +func badgeBackgroundColor(title string) string { + switch title { + case "Added": + return "#E6F4EA" // Green + case "Removed": + return "#E4E4E7" // Neutral Gray + case "Deleted": + return "#FCE8E6" // Red + default: + return "#E8F0FE" // Default: Moved/Split (Blue-ish) + } +} + +// templateData is the struct passed to the HTML template. +type BrowserChangeRenderData struct { + BrowserName workertypes.BrowserName + Change *workertypes.Change[workertypes.BrowserValue] + FeatureName string + FeatureID string +} + +type templateData struct { + Subject string + Query string + SummaryText string + BaselineNewlyChanges []workertypes.SummaryHighlight + BaselineWidelyChanges []workertypes.SummaryHighlight + BaselineRegressionChanges []workertypes.SummaryHighlight + AllBrowserChanges []BrowserChangeRenderData + AddedFeatures []workertypes.SummaryHighlight + RemovedFeatures []workertypes.SummaryHighlight + DeletedFeatures []workertypes.SummaryHighlight + MovedFeatures []workertypes.SummaryHighlight + SplitFeatures []workertypes.SummaryHighlight + Truncated bool + BaseURL string + UnsubscribeURL string +} + +// RenderDigest processes the delivery job and returns the subject and HTML body. +func (r *HTMLRenderer) RenderDigest(job workertypes.IncomingEmailDeliveryJob) (string, string, error) { + // 1. Generate Subject + subject := r.generateSubject(job.Metadata.Frequency, job.Metadata.Query) + + // 2. Prepare Template Data using the visitor + generator := new(templateDataGenerator) + generator.job = job + generator.baseURL = r.webStatusBaseURL + generator.subject = subject + + if err := workertypes.ParseEventSummary(job.SummaryRaw, generator); err != nil { + return "", "", fmt.Errorf("failed to parse event summary: %w", err) + } + + // 3. Render Body + var body bytes.Buffer + if err := r.tmpl.Execute(&body, generator.data); err != nil { + return "", "", fmt.Errorf("failed to execute email template: %w", err) + } + + return subject, body.String(), nil +} + +// templateDataGenerator implements workertypes.SummaryVisitor to prepare the data for the template. +type templateDataGenerator struct { + job workertypes.IncomingEmailDeliveryJob + subject string + baseURL string + data templateData +} + +// VisitV1 is called when a V1 summary is parsed. +func (g *templateDataGenerator) VisitV1(summary workertypes.EventSummary) error { + g.data = templateData{ + Subject: g.subject, + Query: g.job.Metadata.Query, + SummaryText: summary.Text, + Truncated: summary.Truncated, + BaseURL: g.baseURL, + UnsubscribeURL: fmt.Sprintf("%s/subscriptions/%s?action=unsubscribe", + g.baseURL, g.job.SubscriptionID), + BaselineNewlyChanges: nil, + BaselineWidelyChanges: nil, + BaselineRegressionChanges: nil, + AllBrowserChanges: nil, + AddedFeatures: nil, + RemovedFeatures: nil, + DeletedFeatures: nil, + SplitFeatures: nil, + MovedFeatures: nil, + } + // 2. Filter Content (Content Filtering) + // We only show highlights that match the user's specific triggers. + filteredHighlights := filterHighlights(summary.Highlights, g.job.Triggers) + if len(filteredHighlights) != 0 { + // As long as we have some filtered highlights, override it. + // This should be the common case unless there's some logic error. + summary.Highlights = filteredHighlights + } + + g.categorizeHighlights(summary.Highlights) + + return nil +} + +func (g *templateDataGenerator) categorizeHighlights(highlights []workertypes.SummaryHighlight) { + for _, highlight := range highlights { + g.processHighlight(highlight) + } +} + +func (g *templateDataGenerator) processHighlight(highlight workertypes.SummaryHighlight) { + g.routeHighlightToCategory(highlight) +} + +func (g *templateDataGenerator) routeHighlightToCategory(highlight workertypes.SummaryHighlight) { + switch highlight.Type { + case workertypes.SummaryHighlightTypeMoved: + g.data.MovedFeatures = append(g.data.MovedFeatures, highlight) + case workertypes.SummaryHighlightTypeSplit: + g.data.SplitFeatures = append(g.data.SplitFeatures, highlight) + case workertypes.SummaryHighlightTypeAdded: + g.data.AddedFeatures = append(g.data.AddedFeatures, highlight) + case workertypes.SummaryHighlightTypeRemoved: + g.data.RemovedFeatures = append(g.data.RemovedFeatures, highlight) + case workertypes.SummaryHighlightTypeDeleted: + g.data.DeletedFeatures = append(g.data.DeletedFeatures, highlight) + case workertypes.SummaryHighlightTypeChanged: + g.processChangedData(highlight) + } +} + +func (g *templateDataGenerator) processChangedData(highlight workertypes.SummaryHighlight) { + // Consolidate browser changes into their own list + if len(highlight.BrowserChanges) > 0 { + for browserName, change := range highlight.BrowserChanges { + // If a feature regresses AND loses a browser impl, it will be in two sections. + if change == nil { + continue + } + g.data.AllBrowserChanges = append(g.data.AllBrowserChanges, BrowserChangeRenderData{ + BrowserName: browserName, + Change: change, + FeatureName: highlight.FeatureName, + FeatureID: highlight.FeatureID, + }) + } + } + + if highlight.BaselineChange != nil { + g.processBaselineChange(highlight) + } +} + +func (g *templateDataGenerator) processBaselineChange(highlight workertypes.SummaryHighlight) { + switch highlight.BaselineChange.To.Status { + case workertypes.BaselineStatusNewly: + g.data.BaselineNewlyChanges = append(g.data.BaselineNewlyChanges, highlight) + case workertypes.BaselineStatusWidely: + g.data.BaselineWidelyChanges = append(g.data.BaselineWidelyChanges, highlight) + case workertypes.BaselineStatusLimited: + g.data.BaselineRegressionChanges = append(g.data.BaselineRegressionChanges, highlight) + case workertypes.BaselineStatusUnknown: + // Do nothing + } +} + +func filterHighlights( + highlights []workertypes.SummaryHighlight, triggers []workertypes.JobTrigger) []workertypes.SummaryHighlight { + // If no triggers are specified (e.g. legacy or "all"), return everything. + if len(triggers) == 0 { + return highlights + } + + var filtered []workertypes.SummaryHighlight + for _, h := range highlights { + matched := false + for _, t := range triggers { + if h.MatchesTrigger(t) { + matched = true + + break + } + } + if matched { + filtered = append(filtered, h) + } + } + + return filtered +} + +func (r *HTMLRenderer) generateSubject(frequency workertypes.JobFrequency, query string) string { + prefix := "Update:" + switch frequency { + case workertypes.FrequencyWeekly: + prefix = "Weekly Digest:" + case workertypes.FrequencyMonthly: + prefix = "Monthly Digest:" + case workertypes.FrequencyImmediate: + // Do nothing + case workertypes.FrequencyUnknown: + // Do nothing + } + + displayQuery := query + if len(displayQuery) > 50 { + displayQuery = displayQuery[:47] + "..." + } + + return fmt.Sprintf("%s %s", prefix, displayQuery) +} + +// browserToString helps handle the any passed from templates which could be +// string or workertypes.BrowserName. +func (r *HTMLRenderer) browserToString(browser any) string { + switch v := browser.(type) { + case string: + return v + case workertypes.BrowserName: + return string(v) + default: + return fmt.Sprintf("%v", v) + } +} + +// browserLogoURL returns the URL for the browser logo. +// Maps mobile browsers to their desktop equivalents since we share logos. +func (r *HTMLRenderer) browserLogoURL(browser any) string { + b := strings.ToLower(r.browserToString(browser)) + + switch b { + case "chrome_android": + b = "chrome" + case "firefox_android": + b = "firefox" + case "safari_ios": + b = "safari" + } + + return fmt.Sprintf("%s/public/img/email/%s.png", r.webStatusBaseURL, b) +} + +// browserDisplayName returns a human-readable name for the browser. +func (r *HTMLRenderer) browserDisplayName(browser any) string { + b := strings.ToLower(r.browserToString(browser)) + + switch b { + case "chrome": + return "Chrome" + case "chrome_android": + return "Chrome Android" + case "edge": + return "Edge" + case "firefox": + return "Firefox" + case "firefox_android": + return "Firefox Android" + case "safari": + return "Safari" + case "safari_ios": + return "Safari iOS" + } + // Fallback for unknown + return r.browserToString(browser) +} + +func (r *HTMLRenderer) statusLogoURL(status string) string { + return fmt.Sprintf("%s/public/img/email/%s.png", r.webStatusBaseURL, strings.ToLower(status)) + +} diff --git a/workers/email/pkg/digest/renderer_test.go b/workers/email/pkg/digest/renderer_test.go new file mode 100644 index 000000000..e3b0153dc --- /dev/null +++ b/workers/email/pkg/digest/renderer_test.go @@ -0,0 +1,525 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package digest + +import ( + "encoding/json" + "flag" + "os" + "path/filepath" + "testing" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/generic" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// nolint:gochecknoglobals // WONTFIX - used for testing only +var updateGolden = flag.Bool("update", false, "update golden files") + +func TestRenderDigest_Golden(t *testing.T) { + newlyDate := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + widelyDate := time.Date(2025, 12, 27, 0, 0, 0, 0, time.UTC) + + // Setup complex test data to exercise all templates + summary := workertypes.EventSummary{ + SchemaVersion: "v1", + Text: "11 features changed", + Categories: workertypes.SummaryCategories{ + Updated: 5, + Added: 2, + Removed: 1, + Moved: 1, + Split: 1, + Deleted: 1, + QueryChanged: 0, + UpdatedImpl: 0, + UpdatedRename: 0, + UpdatedBaseline: 3, + }, + Truncated: false, + Highlights: []workertypes.SummaryHighlight{ + { + // Case 1: Baseline Widely (with multiple docs) + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "Container queries", + FeatureID: "container-queries", + Docs: &workertypes.Docs{ + MDNDocs: []workertypes.DocLink{ + {URL: "https://developer.mozilla.org/docs/Web/CSS/CSS_Container_Queries", Title: nil, Slug: nil}, + {URL: "https://developer.mozilla.org/docs/Web/CSS/container-queries", Title: nil, Slug: nil}, + }, + }, + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusNewly, LowDate: &newlyDate, + HighDate: nil}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusWidely, LowDate: &newlyDate, + HighDate: &widelyDate}, + }, + NameChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + { + // Case 2: Baseline Newly + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "Newly Available Feature", + FeatureID: "newly-feature", + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusLimited, LowDate: nil, + HighDate: nil}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusNewly, LowDate: &newlyDate, + HighDate: nil}, + }, + Docs: nil, + NameChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + { + // Case 3: Baseline Regression + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "Regressed Feature", + FeatureID: "regressed-feature", + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusWidely, + LowDate: &newlyDate, HighDate: &widelyDate}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusLimited, + LowDate: nil, HighDate: nil}, + }, + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserChrome: { + From: workertypes.BrowserValue{Status: workertypes.BrowserStatusAvailable, + Version: generic.ValuePtr("120"), Date: nil}, + To: workertypes.BrowserValue{Status: workertypes.BrowserStatusUnavailable, Version: nil, + Date: nil}, + }, + workertypes.BrowserFirefox: nil, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserEdge: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserSafariIos: nil, + }, + Docs: nil, + NameChange: nil, + Moved: nil, + Split: nil, + }, + { + // Case 4: Browser Implementation with version + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "content-visibility", + FeatureID: "content-visibility", + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserSafariIos: { + From: workertypes.BrowserValue{Status: workertypes.BrowserStatusUnavailable, Version: nil, + Date: nil}, + To: workertypes.BrowserValue{Status: workertypes.BrowserStatusAvailable, + Version: generic.ValuePtr("17.2"), + // Purposefully set to nil to test that it doesn't crash. + Date: nil}, + }, + workertypes.BrowserChrome: nil, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserEdge: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserFirefox: nil, + }, + Docs: nil, + NameChange: nil, + BaselineChange: nil, + Moved: nil, + Split: nil, + }, + { + // Case 5: Browser Implementation with date + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "another-feature", + FeatureID: "another-feature", + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserChrome: { + From: workertypes.BrowserValue{Status: workertypes.BrowserStatusUnavailable, + Date: nil, Version: nil}, + To: workertypes.BrowserValue{Status: workertypes.BrowserStatusAvailable, Date: &newlyDate, + // Purposefully set to nil so that we can see that it doesn't crash. + Version: nil}, + }, + workertypes.BrowserFirefox: nil, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserEdge: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserSafariIos: nil, + }, + Docs: nil, + NameChange: nil, + BaselineChange: nil, + Moved: nil, + Split: nil, + }, + { + // Case 6: Added + Type: workertypes.SummaryHighlightTypeAdded, + FeatureName: "New Feature", + FeatureID: "new-feature", + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + { + // Case 6b: Another Added + Type: workertypes.SummaryHighlightTypeAdded, + FeatureName: "Another New Feature", + FeatureID: "another-new-feature", + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + { + // Case 7: Removed + Type: workertypes.SummaryHighlightTypeRemoved, + FeatureName: "Removed Feature", + FeatureID: "removed-feature", + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + { + // Case 8: Moved + Type: workertypes.SummaryHighlightTypeMoved, + FeatureName: "New Cool Name", + FeatureID: "new-cool-name", + Moved: &workertypes.Change[workertypes.FeatureRef]{ + From: workertypes.FeatureRef{ID: "old-name", Name: "Old Name"}, + To: workertypes.FeatureRef{ID: "new-cool-name", Name: "New Cool Name"}, + }, + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Split: nil, + }, + { + // Case 9: Split + Type: workertypes.SummaryHighlightTypeSplit, + FeatureName: "Feature To Split", + FeatureID: "feature-to-split", + Split: &workertypes.SplitChange{ + From: workertypes.FeatureRef{ID: "feature-to-split", Name: "Feature To Split"}, + To: []workertypes.FeatureRef{ + {ID: "sub-feature-1", Name: "Sub Feature 1"}, + {ID: "sub-feature-2", Name: "Sub Feature 2"}, + }, + }, + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + }, + { + // Case 10: Browser Implementation with version and date + Type: workertypes.SummaryHighlightTypeChanged, + FeatureName: "new-browser-feature", + FeatureID: "new-browser-feature", + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserFirefox: { + From: workertypes.BrowserValue{Status: workertypes.BrowserStatusUnavailable, + Version: nil, Date: nil}, + To: workertypes.BrowserValue{Status: workertypes.BrowserStatusAvailable, + Version: generic.ValuePtr("123"), Date: &newlyDate}, + }, + workertypes.BrowserChrome: nil, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserEdge: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserSafariIos: nil, + }, + NameChange: nil, + BaselineChange: nil, + Moved: nil, + Split: nil, + Docs: nil, + }, + { + // Case 11: Deleted + Type: workertypes.SummaryHighlightTypeDeleted, + FeatureName: "Deleted Feature", + FeatureID: "deleted-feature", + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + }, + } + summaryBytes, _ := json.Marshal(summary) + + job := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SummaryRaw: summaryBytes, + RecipientEmail: "rick@example.com", + SubscriptionID: "sub-123", + ChannelID: "chan-1", + Metadata: workertypes.DeliveryMetadata{ + Query: "group:css", + Frequency: workertypes.FrequencyWeekly, + EventID: "evt-123", + SearchID: "s-1", + GeneratedAt: time.Now(), + }, + Triggers: nil, + }, + EmailEventID: "email-event-id", + } + + // Initialize Renderer + renderer, err := NewHTMLRenderer("http://localhost:5555") + if err != nil { + t.Fatalf("NewHTMLRenderer failed: %v", err) + } + + // Execute + _, body, err := renderer.RenderDigest(job) + if err != nil { + t.Fatalf("RenderDigest failed: %v", err) + } + + goldenFile := filepath.Join("testdata", "digest.golden.html") + + if *updateGolden { + if err := os.MkdirAll("testdata", 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(goldenFile, []byte(body), 0600); err != nil { + t.Fatal(err) + } + } + + expected, err := os.ReadFile(goldenFile) + if err != nil { + t.Fatalf("failed to read golden file: %v", err) + } + + if diff := cmp.Diff(string(expected), body); diff != "" { + t.Errorf("HTML mismatch (-want +got):\n%s", diff) + } +} + +func TestFilterHighlights(t *testing.T) { + newlyDate := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + widelyDate := time.Date(2025, 12, 27, 0, 0, 0, 0, time.UTC) + availableDate := time.Date(2025, 12, 28, 0, 0, 0, 0, time.UTC) + + // Reusable highlight definitions + hNewly := workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "h1", + FeatureName: "Newly Feature", + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusLimited, LowDate: nil, HighDate: nil}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusNewly, LowDate: &newlyDate, + HighDate: nil}, + }, + Docs: nil, + NameChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + } + hWidely := workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "h2", + FeatureName: "Widely Feature", + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusNewly, LowDate: &newlyDate, + HighDate: nil}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusWidely, LowDate: &newlyDate, + HighDate: &widelyDate}, + }, + Docs: nil, + NameChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + } + hRegression := workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "h3", + FeatureName: "Regression Feature", + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: workertypes.BaselineValue{Status: workertypes.BaselineStatusWidely, LowDate: &newlyDate, + HighDate: &widelyDate}, + To: workertypes.BaselineValue{Status: workertypes.BaselineStatusLimited, LowDate: nil, HighDate: nil}, + }, + Docs: nil, + NameChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + } + hBrowser := workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "h4", + FeatureName: "Browser Feature", + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserChrome: { + From: workertypes.BrowserValue{Status: workertypes.BrowserStatusUnavailable, Version: nil, Date: nil}, + To: workertypes.BrowserValue{Status: workertypes.BrowserStatusAvailable, Version: nil, + Date: &availableDate}, + }, + workertypes.BrowserEdge: nil, + workertypes.BrowserFirefox: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserSafariIos: nil, + }, + BaselineChange: nil, + Docs: nil, + NameChange: nil, + Moved: nil, + Split: nil, + } + hGenericAdded := workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeAdded, + FeatureID: "h5", + FeatureName: "Generic Added", + Docs: nil, + NameChange: nil, + BaselineChange: nil, + BrowserChanges: nil, + Moved: nil, + Split: nil, + } + + allHighlights := []workertypes.SummaryHighlight{hNewly, hWidely, hRegression, hBrowser, hGenericAdded} + + tests := []struct { + name string + triggers []workertypes.JobTrigger + // IDs of expected highlights + wantIDs []string + }{ + { + name: "No Triggers (Default) - Should Return All", + triggers: nil, + wantIDs: []string{"h1", "h2", "h3", "h4", "h5"}, + }, + { + name: "Empty Triggers List - Should Return All (Same as nil)", + triggers: []workertypes.JobTrigger{}, + wantIDs: []string{"h1", "h2", "h3", "h4", "h5"}, + }, + { + name: "Newly Trigger", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + wantIDs: []string{"h1"}, + }, + { + name: "Widely Trigger", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, + wantIDs: []string{"h2"}, + }, + { + name: "Regression Trigger", + triggers: []workertypes.JobTrigger{workertypes.FeatureRegressedToLimited}, + wantIDs: []string{"h3"}, + }, + { + name: "Browser Implementation Trigger", + triggers: []workertypes.JobTrigger{workertypes.BrowserImplementationAnyComplete}, + wantIDs: []string{"h4"}, + }, + { + name: "Multiple Triggers (Newly + Widely)", + triggers: []workertypes.JobTrigger{ + workertypes.FeaturePromotedToNewly, + workertypes.FeaturePromotedToWidely, + }, + wantIDs: []string{"h1", "h2"}, + }, + { + name: "No Matches", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, // Only h2 matches + // Pass in only h1 (Newly) + wantIDs: nil, // If input is only h1, result is empty. + // But for this test, we run against 'allHighlights' by default unless customized. + // Let's customize the logic below to handle "wantIDs subset of allHighlights" + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + input := allHighlights + // Special case for "No Matches" test to be clear: pass in only items that definitely don't match + if tc.name == "No Matches" { + input = []workertypes.SummaryHighlight{hNewly} + } + + got := filterHighlights(input, tc.triggers) + + if len(got) != len(tc.wantIDs) { + t.Errorf("Count mismatch: got %d, want %d", len(got), len(tc.wantIDs)) + } + + for i, h := range got { + if i < len(tc.wantIDs) && h.FeatureID != tc.wantIDs[i] { + t.Errorf("Index %d mismatch: got ID %s, want %s", i, h.FeatureID, tc.wantIDs[i]) + } + } + }) + } +} + +func TestRenderDigest_InvalidJSON(t *testing.T) { + var metadata workertypes.DeliveryMetadata + + job := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SummaryRaw: []byte("invalid-json"), + SubscriptionID: "", + RecipientEmail: "", + Metadata: metadata, + ChannelID: "", + Triggers: nil, + }, + EmailEventID: "email-event-id", + } + + renderer, _ := NewHTMLRenderer("https://test.dev") + _, _, err := renderer.RenderDigest(job) + + if err == nil { + t.Error("Expected error for invalid JSON, got nil") + } +} diff --git a/workers/email/pkg/digest/styles.go b/workers/email/pkg/digest/styles.go new file mode 100644 index 000000000..6a013f7bb --- /dev/null +++ b/workers/email/pkg/digest/styles.go @@ -0,0 +1,54 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// nolint:lll // WONTFIX - for readability +package digest + +const styleSnippets = `{{- define "font_family_main" -}}font-family: SF Pro, system-ui, sans-serif;{{- end -}} +{{- define "font_family_monospace" -}}font-family: Menlo, monospace;{{- end -}} +{{- define "font_weight_bold" -}}font-weight: 700;{{- end -}} +{{- define "font_weight_normal" -}}font-weight: 400;{{- end -}} +{{- define "color_text_dark" -}}color: #18181B;{{- end -}} +{{- define "color_text_medium" -}}color: #52525B;{{- end -}} +{{- define "color_bg_success" -}}background: #E6F4EA;{{- end -}} +{{- define "color_bg_info" -}}background: #E8F0FE;{{- end -}} +{{- define "color_bg_neutral" -}}background: #E4E4E7;{{- end -}} +{{- define "color_bg_light_neutral" -}}background: #F4F4F5;{{- end -}}` + +const layoutStyles = `{{- define "style_body_wrapper" -}}max-width: 600px; margin: 0 auto; padding: 20px;{{- end -}} +{{- define "style_subject_header" -}}{{- template "style_text_normal" -}}; align-self: stretch; margin: 0;{{- end -}} +{{- define "style_query_text" -}}{{- template "style_text_normal" -}}; align-self: stretch;{{- end -}} +{{- define "style_section_wrapper" -}}align-self: stretch; padding-top: 8px; padding-bottom: 8px; flex-direction: column; justify-content: flex-start; align-items: flex-start; display: flex;{{- end -}} +{{- define "style_card_body" -}}align-self: stretch; padding-top: 12px; padding-bottom: 15px; padding-left: 15px; padding-right: 15px; overflow: hidden; border-bottom-right-radius: 4px; border-bottom-left-radius: 4px; border-left: 1px solid #E4E4E7; border-right: 1px solid #E4E4E7; border-bottom: 1px solid #E4E4E7; flex-direction: column; justify-content: center; align-items: flex-start; display: flex; background: #FFFFFF;{{- end -}} +{{- define "style_button_link" -}}display: inline-block; padding: 10px 20px; background: #18181B; color: white; text-decoration: none; border-radius: 4px; {{- template "font_family_main" -}}; font-weight: 500; font-size: 14px;{{- end -}}` + +const composedTextStyles = `{{- define "style_text_badge_title" -}}{{- template "color_text_dark" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_bold" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_badge_description" -}}{{- template "color_text_medium" -}}; font-size: 12px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_normal" -}}{{- template "color_text_dark" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; line-height: 21px; word-wrap: break-word;{{- end -}} +{{- define "style_text_body" -}}{{- template "color_text_dark" -}}; font-size: 16px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; line-height: 30.40px; word-wrap: break-word;{{- end -}} +{{- define "style_text_body_subtle" -}}{{- template "color_text_medium" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; line-height: 26.60px; word-wrap: break-word;{{- end -}} +{{- define "style_text_banner_bold" -}}{{- template "color_text_dark" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_bold" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_banner_normal" -}}{{- template "color_text_dark" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_feature_link" -}}{{- template "color_text_dark" -}}; font-size: 16px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; text-decoration: underline; line-height: 30.40px; word-wrap: break-word;{{- end -}} +{{- define "style_text_doc_link" -}}{{- template "color_text_medium" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; text-decoration: underline; line-height: 26.60px;{{- end -}} +{{- define "style_text_doc_punctuation" -}}{{- template "color_text_medium" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; line-height: 26.60px;{{- end -}} +{{- define "style_text_date" -}}{{- template "color_text_medium" -}}; font-size: 12px; {{- template "font_family_monospace" -}}; {{- template "font_weight_normal" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_browser_item" -}}{{- template "color_text_dark" -}}; font-size: 14px; {{- template "font_family_main" -}}; {{- template "font_weight_normal" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_footer" -}}{{- template "color_text_medium" -}}; font-size: 11px; {{- template "font_weight_normal" -}}; word-wrap: break-word;{{- end -}} +{{- define "style_text_footer_link" -}}{{- template "color_text_medium" -}}; font-size: 11px; {{- template "font_weight_normal" -}}; text-decoration: underline; word-wrap: break-word;{{- end -}}` + +const EmailStyles = styleSnippets + layoutStyles + composedTextStyles + `{{- define "style_body" -}}{{- template "font_family_main" -}}; line-height: 1.5; color: #333; margin: 0; padding: 0;{{- end -}} +{{- define "style_change_detail_div" -}}align-self: stretch; justify-content: flex-start; align-items: center; gap: 10px; display: inline-flex; width: 100%;{{- end -}} +{{- define "style_change_detail_inner_div" -}}flex: 1 1 0;{{- end -}} +{{- define "style_split_into" -}}flex: 1 1 0;{{- end -}}` diff --git a/workers/email/pkg/digest/templates.go b/workers/email/pkg/digest/templates.go new file mode 100644 index 000000000..480f6dc04 --- /dev/null +++ b/workers/email/pkg/digest/templates.go @@ -0,0 +1,162 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package digest + +// defaultEmailTemplate is the main layout. It uses {{template "name" .}} to include +// the components defined in emailComponents. +// nolint: lll // WONTFIX - Keeping for readability. +const defaultEmailTemplate = ` + + + + {{.Subject}} + + +
+ {{- template "intro_text" . -}} + + {{- if .BaselineNewlyChanges -}} +
+ {{- template "banner_baseline_newly" dict "LogoURL" (statusLogoURL "newly") -}} + {{- range .BaselineNewlyChanges -}} + {{- $date := "" -}} + {{- if .BaselineChange.To.LowDate -}} + {{- $date = formatDate .BaselineChange.To.LowDate -}} + {{- end -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs "Date" $date -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .BaselineWidelyChanges -}} +
+ {{- template "banner_baseline_widely" dict "LogoURL" (statusLogoURL "widely") -}} + {{- range .BaselineWidelyChanges -}} + {{- $date := "" -}} + {{- if .BaselineChange.To.HighDate -}} + {{- $date = formatDate .BaselineChange.To.HighDate -}} + {{- end -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs "Date" $date -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .BaselineRegressionChanges -}} +
+ {{- template "banner_baseline_regression" dict "LogoURL" (statusLogoURL "limited") -}} + {{- range .BaselineRegressionChanges -}} + {{- $date := "" -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs "Date" $date -}} +
+
+ + {{- with .BaselineChange -}} + From {{formatBaselineStatus .From.Status}} + {{- end -}} + +
+
+
+ {{- end -}} +
+ {{- end -}} + + {{- if .AllBrowserChanges -}} +
+ {{- template "banner_browser_implementation" -}} + {{- range .AllBrowserChanges -}} + {{- template "browser_item" dict "Name" (browserDisplayName .BrowserName) "LogoURL" (browserLogoURL .BrowserName) "From" .Change.From "To" .Change.To "FeatureName" .FeatureName "FeatureURL" (printf "%s/features/%s" $.BaseURL .FeatureID) -}} + {{- end -}} +
+ {{- end -}} + + {{- if .AddedFeatures -}} +
+ {{- template "badge" (dict "Title" "Added" "Description" "These features now match your search criteria.") -}} + {{- range .AddedFeatures -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .RemovedFeatures -}} +
+ {{- template "badge" (dict "Title" "Removed" "Description" "These features no longer match your search criteria.") -}} + {{- range .RemovedFeatures -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .DeletedFeatures -}} +
+ {{- template "badge" (dict "Title" "Deleted" "Description" "These features have been removed from the web platform.") -}} + {{- range .DeletedFeatures -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .MovedFeatures -}} +
+ {{- template "badge" (dict "Title" "Moved" "Description" "These features have been renamed or merged with another feature.") -}} + {{- range .MovedFeatures -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs -}} + {{- template "change_detail" dict "Label" "Moved from" "From" .Moved.From.Name "To" .Moved.To.Name -}} +
+ {{- end -}} +
+ {{- end -}} + + {{- if .SplitFeatures -}} +
+ {{- template "badge" (dict "Title" "Split" "Description" "This feature has been split into multiple, more granular features.") -}} + {{- range .SplitFeatures -}} +
+ {{- template "feature_title_row" dict "Name" .FeatureName "URL" (printf "%s/features/%s" $.BaseURL .FeatureID) "Docs" .Docs -}} +
+
+ Split into + {{ range $i, $feature := .Split.To -}} + {{- if $i }}, {{ end -}} + {{$feature.Name}} + {{- end -}} +
+
+
+ {{- end -}} +
+ {{- end -}} + + {{- if .Truncated -}} + {{- template "button" dict "URL" (printf "%s/saved-searches" $.BaseURL) "Text" "View All Changes" -}} + {{- end -}} + + {{- template "footer" dict "UnsubscribeURL" $.UnsubscribeURL "ManageURL" (printf "%s/saved-searches" $.BaseURL) -}} +
+ +` diff --git a/workers/email/pkg/digest/testdata/digest.golden.html b/workers/email/pkg/digest/testdata/digest.golden.html new file mode 100644 index 000000000..bb69497c8 --- /dev/null +++ b/workers/email/pkg/digest/testdata/digest.golden.html @@ -0,0 +1,155 @@ + + + + + Weekly Digest: group:css + + +
+

Weekly Digest: group:css

+
+ Here is your update for the saved search 'group:css'. + 11 features changed. +
+
+
+ Newly Available +
+
+ Baseline + Newly available +
+
+
+ Widely Available +
+
+ Baseline + Widely available +
+
+
2025-12-27
+
+ Regressed +
+
+ Regressed + to limited availability +
+
+
+ From Widely +
+
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+
Browser support changed
+
+
+
+ Chrome +
+
+ Chrome: Available in 120 → Unavailable
+
+
+
+ Safari iOS +
+
+ Safari iOS: Unavailable → Available in 17.2
+
+
+
+ Chrome +
+
+ Chrome: Unavailable → Available (on 2025-01-01)
+
+ +
+
+
+ Firefox +
+
+ Firefox: Unavailable → Available in 123 (on 2025-01-01)
+
+
+
Added
These features now match your search criteria.
+
+
+
Removed
These features no longer match your search criteria.
+
+
+
Deleted
These features have been removed from the web platform.
+
+
+
Moved
These features have been renamed or merged with another feature.
+
+
+ Moved from + (Old Name → New Cool Name) +
+
+
+
Split
This feature has been split into multiple, more granular features.
+
+
+ Split into + Sub Feature 1, Sub Feature 2
+
+
+
+
+ You can + unsubscribe + or change any of your alerts on + webstatus.dev +
+
+ + \ No newline at end of file diff --git a/workers/email/pkg/sender/sender.go b/workers/email/pkg/sender/sender.go new file mode 100644 index 000000000..eda8f1c69 --- /dev/null +++ b/workers/email/pkg/sender/sender.go @@ -0,0 +1,99 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sender + +import ( + "context" + "errors" + "log/slog" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type EmailSender interface { + Send(ctx context.Context, id string, to string, subject string, htmlBody string) error +} + +type ChannelStateManager interface { + RecordSuccess(ctx context.Context, channelID string, timestamp time.Time, eventID string) error + RecordFailure(ctx context.Context, channelID string, err error, + timestamp time.Time, permanentUserFailure bool, emailEventID string) error +} + +type TemplateRenderer interface { + RenderDigest(job workertypes.IncomingEmailDeliveryJob) (string, string, error) +} + +type Sender struct { + sender EmailSender + stateManager ChannelStateManager + renderer TemplateRenderer + now func() time.Time +} + +func NewSender( + sender EmailSender, + stateManager ChannelStateManager, + renderer TemplateRenderer, +) *Sender { + return &Sender{ + sender: sender, + stateManager: stateManager, + renderer: renderer, + now: time.Now, + } +} + +func (s *Sender) ProcessMessage(ctx context.Context, job workertypes.IncomingEmailDeliveryJob) error { + // 1. Render (Parsing happens inside RenderDigest implementation) + subject, body, err := s.renderer.RenderDigest(job) + if err != nil { + slog.ErrorContext(ctx, "failed to render email", "subscription_id", job.SubscriptionID, "error", err) + if dbErr := s.stateManager.RecordFailure(ctx, job.ChannelID, err, s.now(), false, job.EmailEventID); dbErr != nil { + slog.ErrorContext(ctx, "failed to record channel failure", "channel_id", job.ChannelID, "error", dbErr) + } + + return err + } + + // 2. Send + if err := s.sender.Send(ctx, job.EmailEventID, job.RecipientEmail, subject, body); err != nil { + isPermanentUserError := errors.Is(err, workertypes.ErrUnrecoverableUserFailureEmailSending) + isPermanent := errors.Is(err, workertypes.ErrUnrecoverableSystemFailureEmailSending) || + isPermanentUserError + slog.ErrorContext(ctx, "failed to send email", "recipient", job.RecipientEmail, "error", err) + // Record failure in DB + if dbErr := s.stateManager.RecordFailure(ctx, job.ChannelID, err, s.now(), + isPermanentUserError, job.EmailEventID); dbErr != nil { + slog.ErrorContext(ctx, "failed to record channel failure", "channel_id", job.ChannelID, "error", dbErr) + } + if isPermanent { + return err + } + + // If not permanent, wrap with ErrTransient to trigger NACK (which will retry) + return errors.Join(event.ErrTransientFailure, err) + } + + // 3. Success + if err := s.stateManager.RecordSuccess(ctx, job.ChannelID, s.now(), job.EmailEventID); err != nil { + // Non-critical error, but good to log + slog.WarnContext(ctx, "failed to record channel success", "channel_id", job.ChannelID, "error", err) + } + + return nil +} diff --git a/workers/email/pkg/sender/sender_test.go b/workers/email/pkg/sender/sender_test.go new file mode 100644 index 000000000..7cbe0bb41 --- /dev/null +++ b/workers/email/pkg/sender/sender_test.go @@ -0,0 +1,310 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sender + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// --- Mocks --- + +type mockEmailSender struct { + sentCalls []sentCall + sendErr error +} + +type sentCall struct { + id string + to string + subject string + body string +} + +func (m *mockEmailSender) Send(_ context.Context, id, to, subject, body string) error { + m.sentCalls = append(m.sentCalls, sentCall{id, to, subject, body}) + + return m.sendErr +} + +type successCall struct { + channelID string + emailEventID string + timestamp time.Time +} + +type mockChannelStateManager struct { + successCalls []successCall + failureCalls []failureCall + recordErr error +} + +type failureCall struct { + channelID string + emailEventID string + err error + isPermanentUserError bool + timestamp time.Time +} + +func (m *mockChannelStateManager) RecordSuccess(_ context.Context, channelID string, + timestamp time.Time, emailEventID string) error { + m.successCalls = append(m.successCalls, successCall{channelID, emailEventID, timestamp}) + + return m.recordErr +} + +func (m *mockChannelStateManager) RecordFailure(_ context.Context, channelID string, err error, + timestamp time.Time, isPermanentUserError bool, emailEventID string, +) error { + m.failureCalls = append(m.failureCalls, failureCall{channelID, emailEventID, err, isPermanentUserError, timestamp}) + + return m.recordErr +} + +type mockTemplateRenderer struct { + renderSubject string + renderBody string + renderErr error + renderInput workertypes.IncomingEmailDeliveryJob +} + +func (m *mockTemplateRenderer) RenderDigest(job workertypes.IncomingEmailDeliveryJob) (string, string, error) { + m.renderInput = job + + return m.renderSubject, m.renderBody, m.renderErr +} + +func testGeneratedAt() time.Time { + return time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC) +} + +const testChannelID = "chan-1" + +func testMetadata() workertypes.DeliveryMetadata { + return workertypes.DeliveryMetadata{ + EventID: "event-1", + SearchID: "search-1", + Query: "query-string", + Frequency: workertypes.FrequencyMonthly, + GeneratedAt: testGeneratedAt(), + } +} + +func fakeNow() time.Time { + return time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC) +} + +// --- Tests --- + +const testEmailEventID = "job-id" + +func TestProcessMessage_Success(t *testing.T) { + ctx := context.Background() + job := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-1", + Metadata: testMetadata(), + RecipientEmail: "user@example.com", + SummaryRaw: []byte("{}"), + ChannelID: "chan-1", + Triggers: []workertypes.JobTrigger{ + workertypes.BrowserImplementationAnyComplete, + }, + }, + EmailEventID: testEmailEventID, + } + + sender := new(mockEmailSender) + stateManager := new(mockChannelStateManager) + renderer := new(mockTemplateRenderer) + renderer.renderSubject = "Subject" + renderer.renderBody = "Body" + + h := NewSender(sender, stateManager, renderer) + h.now = fakeNow + + err := h.ProcessMessage(ctx, job) + if err != nil { + t.Fatalf("ProcessMessage failed: %v", err) + } + + // Verify Renderer Input + if diff := cmp.Diff(job, renderer.renderInput); diff != "" { + t.Errorf("Renderer input mismatch (-want +got):\n%s", diff) + } + + // Verify Send + if len(sender.sentCalls) != 1 { + t.Fatalf("Expected 1 email sent, got %d", len(sender.sentCalls)) + } + if sender.sentCalls[0].to != "user@example.com" { + t.Errorf("Recipient mismatch: %s", sender.sentCalls[0].to) + } + if sender.sentCalls[0].id != testEmailEventID { + t.Errorf("Event ID mismatch: %s", sender.sentCalls[0].id) + } + if sender.sentCalls[0].subject != "Subject" { + t.Errorf("Subject mismatch: %s", sender.sentCalls[0].subject) + } + if sender.sentCalls[0].body != "Body" { + t.Errorf("Body mismatch: %s", sender.sentCalls[0].body) + } + + // Verify State + if len(stateManager.successCalls) != 1 { + t.Errorf("Expected 1 success record, got %d", len(stateManager.successCalls)) + } + if stateManager.successCalls[0].channelID != testChannelID { + t.Errorf("Success recorded for wrong channel: %v", stateManager.successCalls[0]) + } + if stateManager.successCalls[0].emailEventID != "job-id" { + t.Errorf("Success recorded for wrong event: %v", stateManager.successCalls[0]) + } + if !stateManager.successCalls[0].timestamp.Equal(fakeNow()) { + t.Errorf("Success recorded with wrong timestamp: %v", stateManager.successCalls[0]) + } +} + +func TestProcessMessage_RenderError(t *testing.T) { + ctx := context.Background() + job := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-1", + Metadata: testMetadata(), + RecipientEmail: "user@example.com", + SummaryRaw: []byte("{}"), + ChannelID: "chan-1", + Triggers: nil, + }, + EmailEventID: "job-id", + } + + sender := new(mockEmailSender) + stateManager := new(mockChannelStateManager) + renderer := new(mockTemplateRenderer) + renderer.renderErr = errors.New("template error") + + h := NewSender(sender, stateManager, renderer) + h.now = fakeNow + + // Should return non transient error (ACK) for rendering error + err := h.ProcessMessage(ctx, job) + if errors.Is(err, event.ErrTransientFailure) { + t.Errorf("Expected non transient error for render failure, got %v", err) + } + + if !errors.Is(err, renderer.renderErr) { + t.Errorf("Expected configured renderer error, got %v", err) + } + + // Should record failure + if len(stateManager.failureCalls) != 1 { + t.Fatal("Expected failure recording") + } + + // Should NOT send + if len(sender.sentCalls) > 0 { + t.Error("Should not send email on render error") + } +} + +func TestProcessMessage_SendError(t *testing.T) { + ctx := context.Background() + job := workertypes.IncomingEmailDeliveryJob{ + EmailDeliveryJob: workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-1", + Metadata: testMetadata(), + RecipientEmail: "user@example.com", + SummaryRaw: []byte("{}"), + ChannelID: "chan-1", + Triggers: nil, + }, + EmailEventID: "job-id", + } + + testCases := []struct { + name string + sendErr error + isPermanentUserError bool + wantNack bool + }{ + { + "regular error = NACK", + errors.New("send error"), + false, + true, + }, + { + "user error = ACK", + workertypes.ErrUnrecoverableUserFailureEmailSending, + true, + false, + }, + { + "system error = ACK", + workertypes.ErrUnrecoverableSystemFailureEmailSending, + false, + false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + sender := &mockEmailSender{sendErr: tc.sendErr, sentCalls: nil} + stateManager := new(mockChannelStateManager) + renderer := new(mockTemplateRenderer) + renderer.renderSubject = "S" + renderer.renderBody = "B" + + h := NewSender(sender, stateManager, renderer) + h.now = fakeNow + + err := h.ProcessMessage(ctx, job) + if !errors.Is(err, tc.sendErr) { + t.Errorf("Expected send error %v, got %v", tc.sendErr, err) + } + // Should record failure in DB as well + if len(stateManager.failureCalls) != 1 { + t.Fatal("Expected failure recording") + } + if stateManager.failureCalls[0].channelID != testChannelID { + t.Errorf("Recorded failure for wrong channel") + } + if stateManager.failureCalls[0].emailEventID != "job-id" { + t.Errorf("Recorded failure for wrong event") + } + if tc.isPermanentUserError != stateManager.failureCalls[0].isPermanentUserError { + t.Errorf("Recorded failure for wrong error type") + } + if !stateManager.failureCalls[0].timestamp.Equal(fakeNow()) { + t.Errorf("Recorded failure for wrong timestamp") + } + + if tc.wantNack { + if !errors.Is(err, event.ErrTransientFailure) { + t.Errorf("Expected transient failure for NACK, got %v", err) + } + } + }) + } + +} diff --git a/workers/email/skaffold.yaml b/workers/email/skaffold.yaml index 1e4c3c6ad..4650b090b 100644 --- a/workers/email/skaffold.yaml +++ b/workers/email/skaffold.yaml @@ -19,6 +19,7 @@ metadata: requires: - path: ../../.dev/pubsub - path: ../../.dev/spanner + - path: ../../.dev/mailpit profiles: - name: local build: diff --git a/workers/event_producer/cmd/job/main.go b/workers/event_producer/cmd/job/main.go index 449f0459d..24f83636f 100644 --- a/workers/event_producer/cmd/job/main.go +++ b/workers/event_producer/cmd/job/main.go @@ -20,8 +20,12 @@ import ( "os" "github.com/GoogleChrome/webstatus.dev/lib/gcpgcs" + "github.com/GoogleChrome/webstatus.dev/lib/gcpgcs/gcpgcsadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub" + "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub/gcppubsubadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/spanneradapters" + "github.com/GoogleChrome/webstatus.dev/workers/event_producer/pkg/producer" ) func main() { @@ -56,6 +60,20 @@ func main() { os.Exit(1) } + // For publishing to ingestion events to fan-out + ingestionTopicID := os.Getenv("INGESTION_TOPIC_ID") + if ingestionTopicID == "" { + slog.ErrorContext(ctx, "INGESTION_TOPIC_ID is not set. exiting...") + os.Exit(1) + } + + // For subscribing to batch events + batchSubID := os.Getenv("BATCH_UPDATE_SUBSCRIPTION_ID") + if batchSubID == "" { + slog.ErrorContext(ctx, "BATCH_UPDATE_SUBSCRIPTION_ID is not set. exiting...") + os.Exit(1) + } + // For publishing to notification events notificationTopicID := os.Getenv("NOTIFICATION_TOPIC_ID") if notificationTopicID == "" { @@ -75,17 +93,30 @@ func main() { os.Exit(1) } - _, err = gcpgcs.NewClient(ctx, stateBlobBucket) + blobClient, err := gcpgcs.NewClient(ctx, stateBlobBucket) if err != nil { slog.ErrorContext(ctx, "unable to create gcs client", "error", err) os.Exit(1) } + blobAdapter := gcpgcsadapters.NewEventProducer(blobClient, stateBlobBucket) - // TODO: https://github.com/GoogleChrome/webstatus.dev/issues/1848 - // Nil handler for now. Will fix later - err = queueClient.Subscribe(ctx, ingestionSubID, nil) + p := producer.NewEventProducer( + producer.NewDiffer(spanneradapters.NewEventProducerDiffer(spanneradapters.NewBackend(spannerClient))), + blobAdapter, spanneradapters.NewEventProducer(spannerClient), + gcppubsubadapters.NewEventProducerPublisherAdapter(queueClient, notificationTopicID)) + batch := producer.NewBatchUpdateHandler(spanneradapters.NewBatchEventProducer(spannerClient), + gcppubsubadapters.NewBatchFanOutPublisherAdapter(queueClient, ingestionTopicID)) + listener := gcppubsubadapters.NewEventProducerSubscriberAdapter( + p, batch, queueClient, gcppubsubadapters.SubscriberConfig{ + SearchSubscriptionID: ingestionSubID, + BatchUpdateSubscriptionID: batchSubID, + }) + // Start listening to ingestion events + slog.InfoContext(ctx, "starting event producer subscriber for ingestion events") + err = listener.Subscribe(ctx) if err != nil { - slog.ErrorContext(ctx, "unable to connect to subscription", "error", err) + slog.ErrorContext(ctx, "unable to start subscriber", "error", err) os.Exit(1) } + } diff --git a/workers/event_producer/go.mod b/workers/event_producer/go.mod index 52cfcc2ec..9059db8fc 100644 --- a/workers/event_producer/go.mod +++ b/workers/event_producer/go.mod @@ -19,10 +19,15 @@ require ( cloud.google.com/go v0.123.0 // indirect cloud.google.com/go/auth v0.17.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/cloudtasks v1.13.7 // indirect cloud.google.com/go/compute/metadata v0.9.0 // indirect + cloud.google.com/go/datastore v1.21.0 // indirect cloud.google.com/go/iam v1.5.3 // indirect + cloud.google.com/go/logging v1.13.1 // indirect + cloud.google.com/go/longrunning v0.7.0 // indirect cloud.google.com/go/monitoring v1.24.3 // indirect cloud.google.com/go/pubsub/v2 v2.0.0 // indirect + cloud.google.com/go/secretmanager v1.16.0 // indirect cloud.google.com/go/spanner v1.86.1 // indirect cloud.google.com/go/storage v1.57.2 // indirect github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect @@ -33,6 +38,7 @@ require ( github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e // indirect + github.com/deckarep/golang-set v1.8.0 // indirect github.com/envoyproxy/go-control-plane/envoy v1.36.0 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -43,9 +49,15 @@ require ( github.com/go-openapi/jsonpointer v0.22.3 // indirect github.com/go-openapi/swag/jsonname v0.25.3 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/gomodule/redigo v1.9.3 // indirect + github.com/google/go-github/v77 v77.0.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/securecookie v1.1.2 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/mailru/easyjson v0.9.1 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect @@ -53,7 +65,9 @@ require ( github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632 // indirect github.com/woodsbury/decimal128 v1.4.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect @@ -79,4 +93,5 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba // indirect google.golang.org/grpc v1.77.0 // indirect google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/workers/event_producer/go.sum b/workers/event_producer/go.sum index dfb79e895..304967e9e 100644 --- a/workers/event_producer/go.sum +++ b/workers/event_producer/go.sum @@ -852,6 +852,8 @@ github.com/google/go-github/v77 v77.0.0 h1:9DsKKbZqil5y/4Z9mNpZDQnpli6PJbqipSuuN github.com/google/go-github/v77 v77.0.0/go.mod h1:c8VmGXRUmaZUqbctUcGEDWYnMrtzZfJhDSylEf1wfmA= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -986,6 +988,8 @@ github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJw github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -1098,6 +1102,8 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1343,6 +1349,7 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/workers/event_producer/manifests/pod.yaml b/workers/event_producer/manifests/pod.yaml index df7c8f41f..4a7fc3500 100644 --- a/workers/event_producer/manifests/pod.yaml +++ b/workers/event_producer/manifests/pod.yaml @@ -33,7 +33,7 @@ spec: - name: SPANNER_EMULATOR_HOST value: 'spanner:9010' - name: PUBSUB_EMULATOR_HOST - value: 'http://pubsub:8086' + value: 'pubsub:8060' - name: INGESTION_SUBSCRIPTION_ID value: 'ingestion-jobs-sub-id' - name: NOTIFICATION_TOPIC_ID @@ -42,6 +42,10 @@ spec: value: 'state-bucket' - name: STORAGE_EMULATOR_HOST value: 'http://gcs:4443' + - name: BATCH_UPDATE_SUBSCRIPTION_ID + value: 'batch-updates-sub-id' + - name: INGESTION_TOPIC_ID + value: 'ingestion-jobs-topic-id' resources: limits: cpu: 250m diff --git a/workers/event_producer/pkg/producer/batch_handler.go b/workers/event_producer/pkg/producer/batch_handler.go new file mode 100644 index 000000000..652ee4e20 --- /dev/null +++ b/workers/event_producer/pkg/producer/batch_handler.go @@ -0,0 +1,88 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package producer + +import ( + "context" + "fmt" + "log/slog" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type SearchLister interface { + ListAllSavedSearches(ctx context.Context) ([]workertypes.SearchJob, error) +} + +type CommandPublisher interface { + PublishRefreshCommand(ctx context.Context, cmd workertypes.RefreshSearchCommand) error +} + +type BatchUpdateHandler struct { + lister SearchLister + publisher CommandPublisher + now func() time.Time +} + +func NewBatchUpdateHandler(lister SearchLister, publisher CommandPublisher) *BatchUpdateHandler { + return &BatchUpdateHandler{ + lister: lister, + publisher: publisher, + now: time.Now, + } +} + +func (h *BatchUpdateHandler) ProcessBatchUpdate(ctx context.Context, triggerID string, + frequency workertypes.JobFrequency) error { + slog.InfoContext(ctx, "starting batch update fan-out", "trigger_id", triggerID, "frequency", frequency) + + // 1. List all Saved Searches + searches, err := h.lister.ListAllSavedSearches(ctx) + if err != nil { + // Transient db error should be retried + return fmt.Errorf("%w: failed to list saved searches: %w", event.ErrTransientFailure, err) + } + + slog.InfoContext(ctx, "found saved searches to refresh", "count", len(searches)) + + // 2. Fan-out + for _, search := range searches { + cmd := workertypes.RefreshSearchCommand{ + SearchID: search.ID, + Query: search.Query, + Frequency: frequency, + Timestamp: h.now(), + } + + if err := h.publisher.PublishRefreshCommand(ctx, cmd); err != nil { + // If we fail to publish one, we should probably fail the whole batch so it retries. + // But we don't want to re-publish successfully published ones if possible. + // Pub/Sub doesn't support transactional batch publishes across messages easily. + // Ideally, we just return error and let the handler retry. + // Idempotency in ProcessSearch handles the duplicates. + slog.ErrorContext(ctx, "failed to publish refresh command", "search_id", search.ID, "error", err, + "trigger_id", triggerID, "frequency", frequency) + + return fmt.Errorf("%w: failed to publish refresh command for search %s: %w", + event.ErrTransientFailure, search.ID, err) + } + } + + slog.InfoContext(ctx, "batch update fan-out complete", "trigger_id", triggerID) + + return nil +} diff --git a/workers/event_producer/pkg/producer/batch_handler_test.go b/workers/event_producer/pkg/producer/batch_handler_test.go new file mode 100644 index 000000000..d0955d9dc --- /dev/null +++ b/workers/event_producer/pkg/producer/batch_handler_test.go @@ -0,0 +1,130 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package producer + +import ( + "context" + "errors" + "testing" + + "github.com/GoogleChrome/webstatus.dev/lib/event" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type mockSearchLister struct { + searches []workertypes.SearchJob + err error +} + +func (m *mockSearchLister) ListAllSavedSearches(_ context.Context) ([]workertypes.SearchJob, error) { + return m.searches, m.err +} + +type mockCommandPublisher struct { + commands []workertypes.RefreshSearchCommand + err error +} + +func (m *mockCommandPublisher) PublishRefreshCommand(_ context.Context, cmd workertypes.RefreshSearchCommand) error { + m.commands = append(m.commands, cmd) + + return m.err +} + +func TestProcessBatchUpdate(t *testing.T) { + tests := []struct { + name string + searches []workertypes.SearchJob + listerErr error + pubErr error + expectPubCalls int + wantErr bool + transient bool + }{ + { + name: "success with searches", + searches: []workertypes.SearchJob{ + {ID: "s1", Query: "q=1"}, + {ID: "s2", Query: "q=2"}, + }, + listerErr: nil, + pubErr: nil, + expectPubCalls: 2, + wantErr: false, + transient: false, + }, + { + name: "success empty list", + searches: []workertypes.SearchJob{}, + expectPubCalls: 0, + listerErr: nil, + pubErr: nil, + wantErr: false, + transient: false, + }, + { + name: "lister error", + listerErr: errors.New("db error"), + wantErr: true, + transient: true, + searches: nil, + pubErr: nil, + expectPubCalls: 0, + }, + { + name: "publisher error", + listerErr: nil, + searches: []workertypes.SearchJob{{ID: "s1", Query: "q=1"}}, + pubErr: errors.New("pub error"), + wantErr: true, + transient: true, + expectPubCalls: 1, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + lister := &mockSearchLister{searches: tc.searches, err: tc.listerErr} + pub := &mockCommandPublisher{err: tc.pubErr, commands: nil} + handler := NewBatchUpdateHandler(lister, pub) + + err := handler.ProcessBatchUpdate(context.Background(), "trigger-1", workertypes.FrequencyImmediate) + + if (err != nil) != tc.wantErr { + t.Errorf("ProcessBatchUpdate() error = %v, wantErr %v", err, tc.wantErr) + } + + if tc.wantErr && tc.transient { + if !errors.Is(err, event.ErrTransientFailure) { + t.Errorf("Expected error to be transient") + } + } + + if len(pub.commands) != tc.expectPubCalls { + t.Errorf("Expected %d publish calls, got %d", tc.expectPubCalls, len(pub.commands)) + } + + // Verify command content for success case + if !tc.wantErr && len(tc.searches) > 0 { + if pub.commands[0].SearchID != "s1" { + t.Errorf("Command data mismatch") + } + if pub.commands[0].Frequency != workertypes.FrequencyImmediate { + t.Errorf("Frequency mismatch") + } + } + }) + } +} diff --git a/workers/event_producer/pkg/producer/diff_test.go b/workers/event_producer/pkg/producer/diff_test.go index bfb265a48..348229a42 100644 --- a/workers/event_producer/pkg/producer/diff_test.go +++ b/workers/event_producer/pkg/producer/diff_test.go @@ -163,6 +163,7 @@ func TestV1DiffSerializer_Serialize(t *testing.T) { diff := &featurelistdiffv1.FeatureDiff{ QueryChanged: false, Added: []featurelistdiffv1.FeatureAdded{{ID: "feat-a", Name: "Feature A", Reason: "", Docs: nil}}, + Deleted: nil, Removed: nil, Modified: nil, Moves: nil, diff --git a/workers/event_producer/pkg/producer/producer.go b/workers/event_producer/pkg/producer/producer.go index 6e3483069..dd72d89e3 100644 --- a/workers/event_producer/pkg/producer/producer.go +++ b/workers/event_producer/pkg/producer/producer.go @@ -19,7 +19,9 @@ import ( "errors" "fmt" "log/slog" + "time" + "github.com/GoogleChrome/webstatus.dev/lib/event" "github.com/GoogleChrome/webstatus.dev/lib/workertypes" "github.com/GoogleChrome/webstatus.dev/workers/event_producer/pkg/differ" ) @@ -37,6 +39,10 @@ type BlobStorage interface { // EventMetadataStore handles the publishing and retrieval of event metadata. type EventMetadataStore interface { + AcquireLock(ctx context.Context, searchID string, frequency workertypes.JobFrequency, + workerID string, lockTTL time.Duration) error + ReleaseLock(ctx context.Context, searchID string, frequency workertypes.JobFrequency, + workerID string) error PublishEvent(ctx context.Context, req workertypes.PublishEventRequest) error // GetLatestEvent retrieves the last known event for a search to establish continuity. GetLatestEvent(ctx context.Context, @@ -74,10 +80,28 @@ func baseblobname(id string) string { return fmt.Sprintf("%s.json", id) } +func getDefaultLockTTL() time.Duration { + return 2 * time.Minute +} + // ProcessSearch is the main entry point triggered when a search query needs to be checked. // triggerID is the unique ID for this execution (e.g., from a Pub/Sub message). func (p *EventProducer) ProcessSearch(ctx context.Context, searchID string, query string, frequency workertypes.JobFrequency, triggerID string) error { + // 0. Acquire Lock + // TODO: For now, use the triggerID as the worker ID. + // https://github.com/GoogleChrome/webstatus.dev/issues/2123 + workerID := triggerID + if err := p.metaStore.AcquireLock(ctx, searchID, frequency, workerID, getDefaultLockTTL()); err != nil { + slog.ErrorContext(ctx, "failed to acquire lock", "search_id", searchID, "worker_id", workerID, "error", err) + + return fmt.Errorf("%w: failed to acquire lock: %w", event.ErrTransientFailure, err) + } + defer func() { + if err := p.metaStore.ReleaseLock(ctx, searchID, frequency, workerID); err != nil { + slog.ErrorContext(ctx, "failed to release lock", "search_id", searchID, "worker_id", workerID, "error", err) + } + }() // 1. Fetch Previous State // We need the last known state to compute the diff. lastEvent, err := p.metaStore.GetLatestEvent(ctx, frequency, searchID) diff --git a/workers/event_producer/pkg/producer/producer_test.go b/workers/event_producer/pkg/producer/producer_test.go index bc79fbc91..297d41763 100644 --- a/workers/event_producer/pkg/producer/producer_test.go +++ b/workers/event_producer/pkg/producer/producer_test.go @@ -94,6 +94,17 @@ type mockEventMetadataStore struct { info *workertypes.LatestEventInfo err error } + acquireLockReturns error + releaseLockReturns error +} + +func (m *mockEventMetadataStore) AcquireLock(_ context.Context, _ string, _ workertypes.JobFrequency, _ string, + _ time.Duration) error { + return m.acquireLockReturns +} + +func (m *mockEventMetadataStore) ReleaseLock(_ context.Context, _ string, _ workertypes.JobFrequency, _ string) error { + return m.releaseLockReturns } func (m *mockEventMetadataStore) PublishEvent(_ context.Context, req workertypes.PublishEventRequest) error { diff --git a/workers/push_delivery/cmd/job/main.go b/workers/push_delivery/cmd/job/main.go index e92d2aad3..764fbfef3 100644 --- a/workers/push_delivery/cmd/job/main.go +++ b/workers/push_delivery/cmd/job/main.go @@ -19,9 +19,11 @@ import ( "log/slog" "os" - "github.com/GoogleChrome/webstatus.dev/lib/gcpgcs" "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub" + "github.com/GoogleChrome/webstatus.dev/lib/gcppubsub/gcppubsubadapters" "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner" + "github.com/GoogleChrome/webstatus.dev/lib/gcpspanner/spanneradapters" + "github.com/GoogleChrome/webstatus.dev/workers/push_delivery/pkg/dispatcher" ) func main() { @@ -64,29 +66,22 @@ func main() { os.Exit(1) } - stateBlobBucket := os.Getenv("STATE_BLOB_BUCKET") - if stateBlobBucket == "" { - slog.ErrorContext(ctx, "STATE_BLOB_BUCKET is not set. exiting...") - os.Exit(1) - } - queueClient, err := gcppubsub.NewClient(ctx, projectID) if err != nil { slog.ErrorContext(ctx, "unable to create pub sub client", "error", err) os.Exit(1) } - _, err = gcpgcs.NewClient(ctx, stateBlobBucket) - if err != nil { - slog.ErrorContext(ctx, "unable to create gcs client", "error", err) - os.Exit(1) - } - - // TODO: https://github.com/GoogleChrome/webstatus.dev/issues/1851 - // Nil handler for now. Will fix later - err = queueClient.Subscribe(ctx, notificationSubID, nil) - if err != nil { - slog.ErrorContext(ctx, "unable to connect to subscription", "error", err) + listener := gcppubsubadapters.NewPushDeliverySubscriberAdapter( + dispatcher.NewDispatcher( + spanneradapters.NewPushDeliverySubscriberFinder(spannerClient), + gcppubsubadapters.NewPushDeliveryPublisher(queueClient, emailTopicID), + ), + queueClient, + notificationSubID, + ) + if err := listener.Subscribe(ctx); err != nil { + slog.ErrorContext(ctx, "Push delivery subscriber failed", "error", err) os.Exit(1) } } diff --git a/workers/push_delivery/go.mod b/workers/push_delivery/go.mod index f41db9c1f..2caa01c2c 100644 --- a/workers/push_delivery/go.mod +++ b/workers/push_delivery/go.mod @@ -6,40 +6,66 @@ replace github.com/GoogleChrome/webstatus.dev/lib => ../../lib replace github.com/GoogleChrome/webstatus.dev/lib/gen => ../../lib/gen -require github.com/GoogleChrome/webstatus.dev/lib v0.0.0-00010101000000-000000000000 +require ( + github.com/GoogleChrome/webstatus.dev/lib v0.0.0-00010101000000-000000000000 + github.com/google/go-cmp v0.7.0 +) require ( cel.dev/expr v0.25.1 // indirect cloud.google.com/go v0.123.0 // indirect cloud.google.com/go/auth v0.17.0 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect + cloud.google.com/go/cloudtasks v1.13.7 // indirect cloud.google.com/go/compute/metadata v0.9.0 // indirect + cloud.google.com/go/datastore v1.21.0 // indirect cloud.google.com/go/iam v1.5.3 // indirect + cloud.google.com/go/logging v1.13.1 // indirect + cloud.google.com/go/longrunning v0.7.0 // indirect cloud.google.com/go/monitoring v1.24.3 // indirect cloud.google.com/go/pubsub/v2 v2.0.0 // indirect + cloud.google.com/go/secretmanager v1.16.0 // indirect cloud.google.com/go/spanner v1.86.1 // indirect - cloud.google.com/go/storage v1.57.2 // indirect github.com/GoogleChrome/webstatus.dev/lib/gen v0.0.0-20251209015135-23da3f0e7b76 // indirect github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect + github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20251110193048-8bfbf64dc13e // indirect + github.com/deckarep/golang-set v1.8.0 // indirect github.com/envoyproxy/go-control-plane/envoy v1.36.0 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/getkin/kin-openapi v0.133.0 // indirect github.com/go-jose/go-jose/v4 v4.1.3 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-openapi/jsonpointer v0.22.3 // indirect + github.com/go-openapi/swag/jsonname v0.25.3 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect + github.com/gomodule/redigo v1.9.3 // indirect + github.com/google/go-github/v77 v77.0.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/gorilla/handlers v1.5.2 // indirect + github.com/gorilla/mux v1.8.1 // indirect + github.com/gorilla/securecookie v1.1.2 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/mailru/easyjson v0.9.1 // indirect + github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect + github.com/oapi-codegen/runtime v1.1.2 // indirect + github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect + github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect + github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect + github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632 // indirect + github.com/woodsbury/decimal128 v1.4.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect @@ -64,4 +90,5 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba // indirect google.golang.org/grpc v1.77.0 // indirect google.golang.org/protobuf v1.36.10 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/workers/push_delivery/go.sum b/workers/push_delivery/go.sum index 181f6edc7..672b53cf0 100644 --- a/workers/push_delivery/go.sum +++ b/workers/push_delivery/go.sum @@ -555,8 +555,6 @@ cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeL cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= cloud.google.com/go/storage v1.28.1/go.mod h1:Qnisd4CqDdo6BGs2AD5LLnEsmSQ80wQ5ogcBBKhU86Y= cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= -cloud.google.com/go/storage v1.57.2 h1:sVlym3cHGYhrp6XZKkKb+92I1V42ks2qKKpB0CF5Mb4= -cloud.google.com/go/storage v1.57.2/go.mod h1:n5ijg4yiRXXpCu0sJTD6k+eMf7GRrJmPyr9YxLXGHOk= cloud.google.com/go/storagetransfer v1.5.0/go.mod h1:dxNzUopWy7RQevYFHewchb29POFv3/AaBgnhqzqiK0w= cloud.google.com/go/storagetransfer v1.6.0/go.mod h1:y77xm4CQV/ZhFZH75PLEXY0ROiS7Gh6pSKrM8dJyg6I= cloud.google.com/go/storagetransfer v1.7.0/go.mod h1:8Giuj1QNb1kfLAiWM1bN6dHzfdlDAVC9rv9abHot2W4= @@ -576,8 +574,6 @@ cloud.google.com/go/trace v1.3.0/go.mod h1:FFUE83d9Ca57C+K8rDl/Ih8LwOzWIV1krKgxg cloud.google.com/go/trace v1.4.0/go.mod h1:UG0v8UBqzusp+z63o7FK74SdFE+AXpCLdFb1rshXG+Y= cloud.google.com/go/trace v1.8.0/go.mod h1:zH7vcsbAhklH8hWFig58HvxcxyQbaIqMarMg9hn5ECA= cloud.google.com/go/trace v1.9.0/go.mod h1:lOQqpE5IaWY0Ixg7/r2SjixMuc6lfTFeO4QGM4dQWOk= -cloud.google.com/go/trace v1.11.7 h1:kDNDX8JkaAG3R2nq1lIdkb7FCSi1rCmsEtKVsty7p+U= -cloud.google.com/go/trace v1.11.7/go.mod h1:TNn9d5V3fQVf6s4SCveVMIBS2LJUqo73GACmq/Tky0s= cloud.google.com/go/translate v1.3.0/go.mod h1:gzMUwRjvOqj5i69y/LYLd8RrNQk+hOmIXTi9+nb3Djs= cloud.google.com/go/translate v1.4.0/go.mod h1:06Dn/ppvLD6WvA5Rhdp029IX2Mi3Mn7fpMRLPvXT5Wg= cloud.google.com/go/translate v1.5.0/go.mod h1:29YDSYveqqpA1CQFD7NQuP49xymq17RXNaUDdc0mNu0= @@ -637,16 +633,11 @@ github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 h1:2afWGsMzkIcN8Qm4mgP github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3/go.mod h1:dppbR7CwXD4pgtV9t3wD1812RaLDcBjtblcDF5f1vI0= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c= github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0 h1:lhhYARPUu3LmHysQ/igznQphfzynnqI3D75oUyw1HXk= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.54.0/go.mod h1:l9rva3ApbBpEJxSNYnwT9N4CDLrWgtq3u8736C5hyJw= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0 h1:xfK3bbi6F2RDtaZFtUdKO3osOBIhNb+xTs8lFW6yx9o= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.54.0/go.mod h1:vB2GH9GAYYJTO3mEn8oYwzEdhlayZIdQz6zdzgUIRvA= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 h1:s0WlVbf9qpvkh1c/uDAPElam0WrL7fHRIidgZJ7UqZI= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0/go.mod h1:Mf6O40IAyB9zR/1J8nGDDPirZQQPbYJni8Yisy7NTMc= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk= github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= @@ -658,6 +649,9 @@ github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmO github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= +github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ= +github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk= +github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w= github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= @@ -745,6 +739,8 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2 github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= +github.com/getkin/kin-openapi v0.133.0 h1:pJdmNohVIJ97r4AUFtEXRXwESr8b0bD721u/Tz6k8PQ= +github.com/getkin/kin-openapi v0.133.0/go.mod h1:boAciF6cXk5FhPqe/NQeBTeenbjqU4LhWBf09ILVvWE= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g= github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks= @@ -765,8 +761,16 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-openapi/jsonpointer v0.22.3 h1:dKMwfV4fmt6Ah90zloTbUKWMD+0he+12XYAsPotrkn8= +github.com/go-openapi/jsonpointer v0.22.3/go.mod h1:0lBbqeRsQ5lIanv3LHZBrmRGHLHcQoOXQnf88fHlGWo= +github.com/go-openapi/swag/jsonname v0.25.3 h1:U20VKDS74HiPaLV7UZkztpyVOw3JNVsit+w+gTXRj0A= +github.com/go-openapi/swag/jsonname v0.25.3/go.mod h1:GPVEk9CWVhNvWhZgrnvRA6utbAltopbKwDu8mXNUMag= +github.com/go-openapi/testify/v2 v2.0.2 h1:X999g3jeLcoY8qctY/c/Z8iBHTbwLz7R2WXd6Ub6wls= +github.com/go-openapi/testify/v2 v2.0.2/go.mod h1:HCPmvFFnheKK2BuwSA0TbbdxJ3I16pjwMkYkP4Ywn54= github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M= +github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= +github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= @@ -838,14 +842,13 @@ github.com/google/go-github/v77 v77.0.0 h1:9DsKKbZqil5y/4Z9mNpZDQnpli6PJbqipSuuN github.com/google/go-github/v77 v77.0.0/go.mod h1:c8VmGXRUmaZUqbctUcGEDWYnMrtzZfJhDSylEf1wfmA= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= -github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= -github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= @@ -904,8 +907,11 @@ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE= github.com/jung-kurt/gofpdf v1.0.0/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= @@ -919,8 +925,11 @@ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= @@ -929,6 +938,8 @@ github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuz github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mailru/easyjson v0.9.1 h1:LbtsOm5WAswyWbvTEOqhypdPeZzHavpZx96/n553mR8= +github.com/mailru/easyjson v0.9.1/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= @@ -948,12 +959,24 @@ github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/oapi-codegen/runtime v1.1.2 h1:P2+CubHq8fO4Q6fV1tqDBZHCwpVpvPg7oKiYzQgXIyI= +github.com/oapi-codegen/runtime v1.1.2/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= +github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= +github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90/go.mod h1:y5+oSEHCPT/DGrS++Wc/479ERge0zTFxaF8PbGKcg2o= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= +github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= +github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI= +github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY= github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI= @@ -979,6 +1002,8 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w= github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk= github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs= @@ -991,10 +1016,12 @@ github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo= github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= +github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -1011,8 +1038,12 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= +github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632 h1:9t4b2caqsFRUWK4k9+lM/PkxLTCvo46ZQPVaoHPaCxY= github.com/web-platform-tests/wpt.fyi v0.0.0-20251118162843-54f805c8a632/go.mod h1:YpCvJq5JKA+aa4+jwK1S2uQ7r0horYVl6DHcl/G9S3s= +github.com/woodsbury/decimal128 v1.4.0 h1:xJATj7lLu4f2oObouMt2tgGiElE5gO6mSWUjQsBgUlc= +github.com/woodsbury/decimal128 v1.4.0/go.mod h1:BP46FUrVjVhdTbKT+XuQh2xfQaGki9LMIRJSFuh6THU= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -1045,8 +1076,6 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg= go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= -go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 h1:wm/Q0GAAykXv83wzcKzGGqAnnfLFyFe7RslekZuv+VI= -go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0/go.mod h1:ra3Pa40+oKjvYh+ZD3EdxFZZB0xdMfuileHAm4nNN7w= go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= @@ -1058,6 +1087,8 @@ go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42s go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1303,6 +1334,7 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1697,6 +1729,7 @@ google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aO google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/workers/push_delivery/manifests/pod.yaml b/workers/push_delivery/manifests/pod.yaml index ff2504835..0b80a3f3c 100644 --- a/workers/push_delivery/manifests/pod.yaml +++ b/workers/push_delivery/manifests/pod.yaml @@ -33,15 +33,11 @@ spec: - name: SPANNER_EMULATOR_HOST value: 'spanner:9010' - name: PUBSUB_EMULATOR_HOST - value: 'http://pubsub:8086' + value: 'pubsub:8060' - name: NOTIFICATION_SUBSCRIPTION_ID value: 'notification-events-sub-id' - name: EMAIL_TOPIC_ID value: 'chime-delivery-topic-id' - - name: STATE_BLOB_BUCKET - value: 'state-bucket' - - name: STORAGE_EMULATOR_HOST - value: 'http://gcs:4443' resources: limits: cpu: 250m diff --git a/workers/push_delivery/pkg/dispatcher/dispatcher.go b/workers/push_delivery/pkg/dispatcher/dispatcher.go new file mode 100644 index 000000000..dbd889460 --- /dev/null +++ b/workers/push_delivery/pkg/dispatcher/dispatcher.go @@ -0,0 +1,212 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dispatcher + +import ( + "context" + "fmt" + "log/slog" + + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" +) + +type SubscriptionFinder interface { + // FindSubscribers retrieves all active subscriptions for the given search and frequency. + // The adapter is responsible for unmarshalling channel configs and sorting them into the set. + FindSubscribers(ctx context.Context, searchID string, + frequency workertypes.JobFrequency) (*workertypes.SubscriberSet, error) +} + +type DeliveryPublisher interface { + PublishEmailJob(ctx context.Context, job workertypes.EmailDeliveryJob) error +} + +// SummaryParser abstracts the logic for parsing the event summary blob. +type SummaryParser func(data []byte, v workertypes.SummaryVisitor) error + +type Dispatcher struct { + finder SubscriptionFinder + publisher DeliveryPublisher + parser SummaryParser +} + +func NewDispatcher(finder SubscriptionFinder, publisher DeliveryPublisher) *Dispatcher { + return &Dispatcher{ + finder: finder, + publisher: publisher, + parser: workertypes.ParseEventSummary, + } +} + +// ProcessEvent is the main entry point for the worker. +// It handles the "Fan-Out" logic: One Event -> Many Delivery Jobs. +func (d *Dispatcher) ProcessEvent(ctx context.Context, + metadata workertypes.DispatchEventMetadata, summary []byte) error { + slog.InfoContext(ctx, "processing event", "event_id", metadata.EventID, "search_id", metadata.SearchID) + + // 1. Generate Delivery Jobs from Event Summary + gen := &deliveryJobGenerator{ + finder: d.finder, + metadata: metadata, + // We pass the raw summary bytes down so it can be attached to the jobs + // without needing to re-marshal the struct. + rawSummary: summary, + emailJobs: nil, + } + + if err := d.parser(gen.rawSummary, gen); err != nil { + return fmt.Errorf("failed to parse event summary: %w", err) + } + + totalJobs := gen.JobCount() + if totalJobs == 0 { + slog.InfoContext(ctx, "no delivery jobs generated", "event_id", metadata.EventID) + + return nil + } + + slog.InfoContext(ctx, "dispatching jobs", "count", totalJobs) + + // 2. Publish Delivery Jobs + successCount := 0 + failCount := 0 + + // Publish Email Jobs + for _, job := range gen.emailJobs { + if err := d.publisher.PublishEmailJob(ctx, job); err != nil { + slog.ErrorContext(ctx, "failed to publish email job", + "subscription_id", job.SubscriptionID, "error", err) + failCount++ + } else { + successCount++ + } + } + + // TODO: Webhook jobs would be published here similarly + // https://github.com/GoogleChrome/webstatus.dev/issues/1859 + + slog.InfoContext(ctx, "dispatch complete", + "event_id", metadata.EventID, + "sent", successCount, + "failed", failCount, + "total_candidates", totalJobs) + + if failCount > 0 { + return fmt.Errorf("partial failure: %d/%d jobs failed to publish", failCount, totalJobs) + } + + return nil +} + +// deliveryJobGenerator implements workertypes.SummaryVisitor to generate jobs from V1 summaries. +type deliveryJobGenerator struct { + finder SubscriptionFinder + metadata workertypes.DispatchEventMetadata + rawSummary []byte + emailJobs []workertypes.EmailDeliveryJob +} + +func (g *deliveryJobGenerator) VisitV1(s workertypes.EventSummary) error { + // 1. Find Subscribers + subscribers, err := g.finder.FindSubscribers( + // TODO: modify Visitor to pass context down + // https://github.com/GoogleChrome/webstatus.dev/issues/2132 + context.TODO(), + g.metadata.SearchID, + g.metadata.Frequency) + if err != nil { + return fmt.Errorf("failed to find subscribers: %w", err) + } + + if subscribers == nil { + return nil + } + + deliveryMetadata := workertypes.DeliveryMetadata{ + EventID: g.metadata.EventID, + SearchID: g.metadata.SearchID, + Query: g.metadata.Query, + Frequency: g.metadata.Frequency, + GeneratedAt: g.metadata.GeneratedAt, + } + + // 2. Filter & Create Jobs + // Iterate Emails + for _, sub := range subscribers.Emails { + if !shouldNotifyV1(sub.Triggers, s) { + continue + } + g.emailJobs = append(g.emailJobs, workertypes.EmailDeliveryJob{ + SubscriptionID: sub.SubscriptionID, + RecipientEmail: sub.EmailAddress, + SummaryRaw: g.rawSummary, + Metadata: deliveryMetadata, + ChannelID: sub.ChannelID, + Triggers: sub.Triggers, + }) + } + + // TODO: Iterate Webhooks when supported. + // https://github.com/GoogleChrome/webstatus.dev/issues/1859 + + return nil +} + +// JobCount returns the total number of delivery jobs generated. +func (g *deliveryJobGenerator) JobCount() int { + // TODO: When we add Webhook jobs, sum them here too. + + return len(g.emailJobs) +} + +// shouldNotifyV1 determines if the V1 event summary matches any of the user's triggers. +func shouldNotifyV1(triggers []workertypes.JobTrigger, summary workertypes.EventSummary) bool { + // 1. Determine if summary has changes. + hasChanges := summary.Categories.Added > 0 || + summary.Categories.Removed > 0 || + summary.Categories.Updated > 0 || + summary.Categories.Moved > 0 || + summary.Categories.Split > 0 || + summary.Categories.QueryChanged > 0 + + if !hasChanges { + return false + } + + // 2. If user has no triggers. + // Assuming empty triggers = no notifications for safety. + if len(triggers) == 0 { + return false + } + + // 3. Iterate triggers and check highlights + for _, t := range triggers { + if matchesTrigger(t, summary) { + return true + } + } + + return false +} + +func matchesTrigger(t workertypes.JobTrigger, summary workertypes.EventSummary) bool { + for _, h := range summary.Highlights { + if h.MatchesTrigger(t) { + return true + } + } + + return false +} diff --git a/workers/push_delivery/pkg/dispatcher/dispatcher_test.go b/workers/push_delivery/pkg/dispatcher/dispatcher_test.go new file mode 100644 index 000000000..813cc8e29 --- /dev/null +++ b/workers/push_delivery/pkg/dispatcher/dispatcher_test.go @@ -0,0 +1,608 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package dispatcher + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/GoogleChrome/webstatus.dev/lib/generic" + "github.com/GoogleChrome/webstatus.dev/lib/workertypes" + "github.com/google/go-cmp/cmp" +) + +// --- Mocks --- + +type findSubscribersReq struct { + SearchID string + Frequency string +} + +type mockSubscriptionFinder struct { + findCalledWith *findSubscribersReq + findReturnSet *workertypes.SubscriberSet + findReturnErr error +} + +func (m *mockSubscriptionFinder) FindSubscribers(_ context.Context, searchID string, + frequency workertypes.JobFrequency) (*workertypes.SubscriberSet, error) { + m.findCalledWith = &findSubscribersReq{ + SearchID: searchID, + Frequency: string(frequency), + } + + return m.findReturnSet, m.findReturnErr +} + +type mockDeliveryPublisher struct { + emailJobs []workertypes.EmailDeliveryJob + emailJobErr func(job workertypes.EmailDeliveryJob) error +} + +func (m *mockDeliveryPublisher) PublishEmailJob(_ context.Context, job workertypes.EmailDeliveryJob) error { + if m.emailJobErr != nil { + if err := m.emailJobErr(job); err != nil { + return err + } + } + m.emailJobs = append(m.emailJobs, job) + + return nil +} + +// --- Test Helpers --- + +// createTestSummary returns a populated EventSummary for testing. +func createTestSummary(hasChanges bool) workertypes.EventSummary { + categories := workertypes.SummaryCategories{ + QueryChanged: 0, + Added: 0, + Deleted: 0, + Removed: 0, + Moved: 0, + Split: 0, + Updated: 0, + UpdatedImpl: 0, + UpdatedRename: 0, + UpdatedBaseline: 0, + } + + if hasChanges { + categories.Added = 1 + } + + return workertypes.EventSummary{ + SchemaVersion: "v1", + Text: "Test Summary", + Categories: categories, + Truncated: false, + Highlights: nil, + } +} + +// mockParserFactory creates a SummaryParser that injects the given summary directly. +func mockParserFactory(summary workertypes.EventSummary, err error) SummaryParser { + return func(_ []byte, v workertypes.SummaryVisitor) error { + if err != nil { + return err + } + + return v.VisitV1(summary) + } +} + +// --- Tests --- + +func emptyFinderReq() findSubscribersReq { + return findSubscribersReq{ + SearchID: "", + Frequency: "", + } +} + +func TestProcessEvent_Success(t *testing.T) { + ctx := context.Background() + eventID := "evt-123" + searchID := "search-abc" + frequency := workertypes.FrequencyImmediate + generatedAt := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + summaryBytes := []byte("{}") + + metadata := workertypes.DispatchEventMetadata{ + EventID: eventID, + SearchID: searchID, + Query: "q=test", + Frequency: frequency, + GeneratedAt: generatedAt, + } + + // Two subscribers: one matching trigger, one not. + subSet := &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + { + SubscriptionID: "sub-1", + UserID: "user-1", + Triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, // Matches + EmailAddress: "user1@example.com", + ChannelID: "chan-1", + }, + { + SubscriptionID: "sub-2", + UserID: "user-2", + // Does not match (summary is Newly) + Triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, + EmailAddress: "user2@example.com", + ChannelID: "chan-2", + }, + }, + } + + finder := &mockSubscriptionFinder{ + findReturnSet: subSet, + findReturnErr: nil, + findCalledWith: nil, + } + publisher := &mockDeliveryPublisher{ + emailJobs: nil, + emailJobErr: nil, + } + + // Create a summary that HAS changes so notification logic proceeds. + summary := createTestSummary(true) + summary.Categories.UpdatedBaseline = 1 + summary.Categories.Updated = 1 + summary.Highlights = []workertypes.SummaryHighlight{ + { + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "test-feature-id", + FeatureName: "Test Feature", + Docs: nil, + NameChange: nil, + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: newBaselineValue(workertypes.BaselineStatusLimited), + To: newBaselineValue(workertypes.BaselineStatusNewly), + }, + BrowserChanges: nil, + Moved: nil, + Split: nil, + }, + } + parser := mockParserFactory(summary, nil) + + d := NewDispatcher(finder, publisher) + d.parser = parser + + if err := d.ProcessEvent(ctx, metadata, summaryBytes); err != nil { + t.Fatalf("ProcessEvent unexpected error: %v", err) + } + + // Assertions + expectedFinderReq := findSubscribersReq{ + SearchID: searchID, + Frequency: string(frequency), + } + assertFindSubscribersCalledWith(t, finder, &expectedFinderReq) + + if len(publisher.emailJobs) != 1 { + t.Fatalf("Expected 1 email job, got %d", len(publisher.emailJobs)) + } + + job := publisher.emailJobs[0] + expectedJob := workertypes.EmailDeliveryJob{ + SubscriptionID: "sub-1", + RecipientEmail: "user1@example.com", + SummaryRaw: summaryBytes, + Triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + Metadata: workertypes.DeliveryMetadata{ + EventID: eventID, + SearchID: searchID, + Query: "q=test", + Frequency: frequency, + GeneratedAt: generatedAt, + }, + ChannelID: "chan-1", + } + + if diff := cmp.Diff(expectedJob, job); diff != "" { + t.Errorf("Job mismatch (-want +got):\n%s", diff) + } +} + +func assertFindSubscribersCalledWith(t *testing.T, finder *mockSubscriptionFinder, expected *findSubscribersReq) { + t.Helper() + if diff := cmp.Diff(expected, finder.findCalledWith); diff != "" { + t.Errorf("FindSubscribers called with mismatch (-want +got):\n%s", diff) + } +} + +func TestProcessEvent_NoChanges_FiltersAll(t *testing.T) { + ctx := context.Background() + metadata := workertypes.DispatchEventMetadata{ + EventID: "evt-1", + SearchID: "search-1", + Frequency: workertypes.FrequencyImmediate, + Query: "", + GeneratedAt: time.Time{}, + } + + subSet := &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + { + SubscriptionID: "sub-1", + UserID: "user-1", + Triggers: []workertypes.JobTrigger{"any_change"}, + EmailAddress: "user1@example.com", + ChannelID: "chan-1", + }, + }, + } + + finder := &mockSubscriptionFinder{ + findReturnSet: subSet, + findReturnErr: nil, + findCalledWith: nil, + } + publisher := &mockDeliveryPublisher{ + emailJobs: nil, + emailJobErr: nil, + } + + // Summary with NO changes + summary := createTestSummary(false) + parser := mockParserFactory(summary, nil) + + d := NewDispatcher(finder, publisher) + d.parser = parser + + if err := d.ProcessEvent(ctx, metadata, []byte("{}")); err != nil { + t.Fatalf("ProcessEvent unexpected error: %v", err) + } + + if len(publisher.emailJobs) != 0 { + t.Errorf("Expected 0 jobs due to no changes, got %d", len(publisher.emailJobs)) + } +} + +func TestProcessEvent_ParserError(t *testing.T) { + d := NewDispatcher(nil, nil) + var summary workertypes.EventSummary + d.parser = mockParserFactory(summary, errors.New("parse error")) + + metadata := workertypes.DispatchEventMetadata{ + EventID: "", + SearchID: "", + Query: "", + Frequency: workertypes.FrequencyImmediate, + GeneratedAt: time.Time{}, + } + + err := d.ProcessEvent(context.Background(), metadata, []byte("{}")) + if err == nil { + t.Error("Expected error from parser, got nil") + } +} + +func TestProcessEvent_FinderError(t *testing.T) { + finder := &mockSubscriptionFinder{ + findReturnSet: nil, + findReturnErr: errors.New("db error"), + findCalledWith: nil, + } + + d := NewDispatcher(finder, nil) + // Provide a valid summary struct so parser succeeds + var summary workertypes.EventSummary + d.parser = mockParserFactory(summary, nil) + + metadata := workertypes.DispatchEventMetadata{ + EventID: "", + SearchID: "", + Query: "", + Frequency: "", + GeneratedAt: time.Time{}, + } + + err := d.ProcessEvent(context.Background(), metadata, []byte("{}")) + if err == nil { + t.Error("Expected error from finder, got nil") + } + assertFindSubscribersCalledWith(t, finder, generic.ValuePtr(emptyFinderReq())) +} + +func TestProcessEvent_PublisherPartialFailure(t *testing.T) { + ctx := context.Background() + // Two subscribers + subSet := &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + {SubscriptionID: "sub-1", Triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + UserID: "u1", EmailAddress: "e1", ChannelID: "chan-1"}, + {SubscriptionID: "sub-2", Triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + UserID: "u2", EmailAddress: "e2", ChannelID: "chan-2"}, + }, + } + + finder := &mockSubscriptionFinder{ + findReturnSet: subSet, + findReturnErr: nil, + findCalledWith: nil, + } + + // Publisher returns error for first job, success for second + publisher := &mockDeliveryPublisher{ + emailJobs: nil, + emailJobErr: func(job workertypes.EmailDeliveryJob) error { + if job.SubscriptionID == "sub-1" { + return errors.New("queue full") + } + + return nil + }, + } + + summaryWithNewly := withBaselineHighlight(createTestSummary(false), + workertypes.BaselineStatusLimited, workertypes.BaselineStatusNewly) + d := NewDispatcher(finder, publisher) + d.parser = mockParserFactory(summaryWithNewly, nil) + + metadata := workertypes.DispatchEventMetadata{ + EventID: "", + SearchID: "", + Query: "", + Frequency: "", + GeneratedAt: time.Time{}, + } + + err := d.ProcessEvent(ctx, metadata, []byte("{}")) + if err == nil { + t.Error("Expected error due to partial publish failure") + } + + if len(publisher.emailJobs) != 1 { + t.Errorf("Expected 1 successful job recorded, got %d", len(publisher.emailJobs)) + } + if publisher.emailJobs[0].SubscriptionID != "sub-2" { + t.Errorf("Expected sub-2 to succeed, got %s", publisher.emailJobs[0].SubscriptionID) + } + if publisher.emailJobs[0].ChannelID != "chan-2" { + t.Errorf("Expected chan-2 to succeed, got %s", publisher.emailJobs[0].ChannelID) + } + assertFindSubscribersCalledWith(t, finder, generic.ValuePtr(emptyFinderReq())) +} + +func TestProcessEvent_JobCount(t *testing.T) { + // Verify that if no jobs are generated (e.g. no matching triggers), ProcessEvent returns early/cleanly. + subSet := &workertypes.SubscriberSet{ + Emails: []workertypes.EmailSubscriber{ + {SubscriptionID: "sub-1", Triggers: []workertypes.JobTrigger{}, EmailAddress: "e1", UserID: "u1", + ChannelID: "chan-1"}, // No match + }, + } + finder := &mockSubscriptionFinder{ + findReturnSet: subSet, + findReturnErr: nil, + findCalledWith: nil, + } + publisher := new(mockDeliveryPublisher) + d := NewDispatcher(finder, publisher) + d.parser = mockParserFactory(createTestSummary(true), nil) + + metadata := workertypes.DispatchEventMetadata{ + EventID: "", + SearchID: "", + Query: "", + Frequency: "", + GeneratedAt: time.Time{}, + } + + if err := d.ProcessEvent(context.Background(), metadata, []byte("{}")); err != nil { + t.Errorf("Expected no error for 0 jobs, got %v", err) + } + if len(publisher.emailJobs) != 0 { + t.Error("Expected 0 jobs") + } + assertFindSubscribersCalledWith(t, finder, generic.ValuePtr(emptyFinderReq())) +} + +// --- shouldNotifyV1 Test Helpers --- + +func newBaselineValue(status workertypes.BaselineStatus) workertypes.BaselineValue { + t := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + + return workertypes.BaselineValue{ + Status: status, + LowDate: &t, + HighDate: nil, + } +} + +func newBrowserValue(status workertypes.BrowserStatus) workertypes.BrowserValue { + version := "100" + testDate := time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC) + + return workertypes.BrowserValue{ + Status: status, + Version: &version, + Date: &testDate, + } +} + +func withBaselineHighlight( + s workertypes.EventSummary, from, to workertypes.BaselineStatus) workertypes.EventSummary { + s.Highlights = append(s.Highlights, workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "test-feature-id", + FeatureName: "Test Feature", + Docs: nil, + BaselineChange: &workertypes.Change[workertypes.BaselineValue]{ + From: newBaselineValue(from), + To: newBaselineValue(to), + }, + BrowserChanges: nil, + NameChange: nil, + Moved: nil, + Split: nil, + }) + s.Categories.Updated = 1 + s.Categories.UpdatedBaseline = 1 + + return s +} + +func withBrowserChangeHighlight( + s workertypes.EventSummary, from, to workertypes.BrowserStatus) workertypes.EventSummary { + s.Highlights = append(s.Highlights, workertypes.SummaryHighlight{ + Type: workertypes.SummaryHighlightTypeChanged, + FeatureID: "test-feature-id", + FeatureName: "Test Feature", + Docs: nil, + BaselineChange: nil, + BrowserChanges: map[workertypes.BrowserName]*workertypes.Change[workertypes.BrowserValue]{ + workertypes.BrowserChrome: { + From: newBrowserValue(from), + To: newBrowserValue(to), + }, + workertypes.BrowserChromeAndroid: nil, + workertypes.BrowserFirefox: nil, + workertypes.BrowserFirefoxAndroid: nil, + workertypes.BrowserEdge: nil, + workertypes.BrowserSafari: nil, + workertypes.BrowserSafariIos: nil, + }, + NameChange: nil, + Moved: nil, + Split: nil, + }) + s.Categories.Updated = 1 + s.Categories.UpdatedImpl = 1 + + return s +} + +func TestShouldNotifyV1(t *testing.T) { + summaryWithNewly := withBaselineHighlight(createTestSummary(false), + workertypes.BaselineStatusLimited, workertypes.BaselineStatusNewly) + summaryWithWidely := withBaselineHighlight(createTestSummary(false), + workertypes.BaselineStatusNewly, workertypes.BaselineStatusWidely) + summaryWithLimited := withBaselineHighlight(createTestSummary(false), + workertypes.BaselineStatusWidely, workertypes.BaselineStatusLimited) + summaryWithBrowserAvailable := withBrowserChangeHighlight(createTestSummary(false), + workertypes.BrowserStatusUnknown, workertypes.BrowserStatusAvailable) + summaryWithBrowserInDev := withBrowserChangeHighlight(createTestSummary(false), + workertypes.BrowserStatusUnknown, workertypes.BrowserStatusUnknown) + summaryQueryChanged := createTestSummary(false) + summaryQueryChanged.Categories.QueryChanged = 1 + + testCases := []struct { + name string + triggers []workertypes.JobTrigger + summary workertypes.EventSummary + want bool + }{ + { + name: "no changes should return false", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + summary: createTestSummary(false), + want: false, + }, + { + name: "changes but no triggers should return false", + triggers: []workertypes.JobTrigger{}, + summary: createTestSummary(true), + want: false, + }, + { + name: "changes and triggers but no highlights should return false", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + summary: createTestSummary(true), + want: false, + }, + { + name: "changes, triggers, highlights, but no match should return false", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, + summary: summaryWithNewly, + want: false, + }, + { + name: "match on FeaturePromotedToNewly should return true", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + summary: summaryWithNewly, + want: true, + }, + { + name: "match on FeaturePromotedToWidely should return true", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, + summary: summaryWithWidely, + want: true, + }, + { + name: "match on FeatureRegressedToLimited should return true", + triggers: []workertypes.JobTrigger{workertypes.FeatureRegressedToLimited}, + summary: summaryWithLimited, + want: true, + }, + { + name: "match on BrowserImplementationAnyComplete should return true", + triggers: []workertypes.JobTrigger{workertypes.BrowserImplementationAnyComplete}, + summary: summaryWithBrowserAvailable, + want: true, + }, + { + name: "no match on BrowserImplementation when status is not Available", + triggers: []workertypes.JobTrigger{workertypes.BrowserImplementationAnyComplete}, + summary: summaryWithBrowserInDev, + want: false, + }, + { + name: "multiple triggers with one match should return true", + triggers: []workertypes.JobTrigger{ + workertypes.FeaturePromotedToWidely, workertypes.FeaturePromotedToNewly}, + summary: summaryWithNewly, + want: true, + }, + { + name: "multiple highlights with one match should return true", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToWidely}, + summary: withBaselineHighlight(summaryWithNewly, + workertypes.BaselineStatusNewly, workertypes.BaselineStatusWidely), + want: true, + }, + { + name: "QueryChanged is considered a change and matches with highlight", + triggers: []workertypes.JobTrigger{ + workertypes.FeaturePromotedToNewly, + }, + summary: withBaselineHighlight(summaryQueryChanged, + workertypes.BaselineStatusLimited, workertypes.BaselineStatusNewly), + want: true, + }, + { + name: "no match when baseline highlight has wrong status", + triggers: []workertypes.JobTrigger{workertypes.FeaturePromotedToNewly}, + summary: summaryWithWidely, + want: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + got := shouldNotifyV1(tc.triggers, tc.summary) + if got != tc.want { + t.Errorf("shouldNotifyV1() = %v, want %v", got, tc.want) + } + }) + } +} diff --git a/workers/push_delivery/skaffold.yaml b/workers/push_delivery/skaffold.yaml index ce757f5fe..42cd614d2 100644 --- a/workers/push_delivery/skaffold.yaml +++ b/workers/push_delivery/skaffold.yaml @@ -19,7 +19,6 @@ metadata: requires: - path: ../../.dev/pubsub - path: ../../.dev/spanner - - path: ../../.dev/gcs profiles: - name: local build: diff --git a/workers/skaffold.yaml b/workers/skaffold.yaml new file mode 100644 index 000000000..13045a967 --- /dev/null +++ b/workers/skaffold.yaml @@ -0,0 +1,22 @@ +# Copyright 2026 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: skaffold/v4beta9 +kind: Config +metadata: + name: workers +requires: + - path: ./email + - path: ./event_producer + - path: ./push_delivery