Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Issue #3536] Add saved opportunity notifications to backend job #3639

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
ae394d2
Add JobTable, track in tasks
mikehgrantsgov Jan 17, 2025
ea3d22f
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 17, 2025
98257e3
Update to enums / add metrics to table
mikehgrantsgov Jan 17, 2025
8fa48f5
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 17, 2025
71a80d9
Lint
mikehgrantsgov Jan 17, 2025
e65f8d7
Lint
mikehgrantsgov Jan 17, 2025
53d7b2e
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 17, 2025
ac89d27
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 17, 2025
4193c2b
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
4f72d43
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
238e8fd
Update api/src/task/task.py
mikehgrantsgov Jan 21, 2025
43f90be
Remove last_loaded_at and use updated_at instead
mikehgrantsgov Jan 21, 2025
5c18c81
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 21, 2025
2865fbd
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 21, 2025
b5c8d05
Lint
mikehgrantsgov Jan 21, 2025
68030d4
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 21, 2025
54b5d10
Fix
mikehgrantsgov Jan 21, 2025
55922b8
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 21, 2025
92b21fb
Update migration
mikehgrantsgov Jan 21, 2025
4fe3095
Fix migration
mikehgrantsgov Jan 21, 2025
9efeb56
Update to JobLog, remove has_update
mikehgrantsgov Jan 23, 2025
578fa09
Format
mikehgrantsgov Jan 23, 2025
e62f4e6
Remove query on non-null column
mikehgrantsgov Jan 23, 2025
ad38093
Add task tests
mikehgrantsgov Jan 23, 2025
562c2a7
Catch db errors and rollback/start new transaction to store failed state
mikehgrantsgov Jan 23, 2025
624bd98
Merge branch 'main' into mikehgrantsgov/3527-modify-load-opp-logic-ne…
mikehgrantsgov Jan 23, 2025
e57fc43
Fix head
mikehgrantsgov Jan 23, 2025
aa65d75
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 23, 2025
4342630
Fix migration
mikehgrantsgov Jan 23, 2025
9d51544
Merge branch 'mikehgrantsgov/3527-modify-load-opp-logic-never-delete'…
mikehgrantsgov Jan 23, 2025
aa6f5b7
Update transaction management
mikehgrantsgov Jan 24, 2025
2761cdf
Fix test
mikehgrantsgov Jan 24, 2025
6c26a5d
Wrap failed update with db_session.begin
mikehgrantsgov Jan 24, 2025
35817e7
Detect and notify when an opportunity is changed
mikehgrantsgov Jan 24, 2025
e9ad242
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 24, 2025
f20a8a1
Merge branch 'main' into mikehgrantsgov/3536-add-saved-notifications-…
mikehgrantsgov Jan 27, 2025
ac20a08
Add new tests / PR feedback
mikehgrantsgov Jan 27, 2025
2b089a9
Merge branch 'main' into mikehgrantsgov/3536-add-saved-notifications-…
mikehgrantsgov Jan 27, 2025
aff14dd
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 27, 2025
7e1fd70
Change to enum
mikehgrantsgov Jan 28, 2025
269a2a4
Merge branch 'main' into mikehgrantsgov/3536-add-saved-notifications-…
mikehgrantsgov Jan 29, 2025
3b95ed6
Fix revision
mikehgrantsgov Jan 29, 2025
1807ad9
Create ERD diagram and Update OpenAPI spec
nava-platform-bot Jan 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Add last_notified_at to user saved opportunity table

Revision ID: 43b179a7c92e
Revises: dc04ce955a9a
Create Date: 2025-01-24 17:15:14.064880

"""

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "43b179a7c92e"
down_revision = "9e7fc937646a"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"user_saved_opportunity",
sa.Column(
"last_notified_at", sa.TIMESTAMP(timezone=True), server_default="NOW()", nullable=False
),
schema="api",
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("user_saved_opportunity", "last_notified_at", schema="api")
# ### end Alembic commands ###
8 changes: 7 additions & 1 deletion api/src/db/models/user_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ class UserSavedOpportunity(ApiSchemaTable, TimestampMixin):
BigInteger, ForeignKey(Opportunity.opportunity_id), primary_key=True
)

last_notified_at: Mapped[datetime] = mapped_column(
default=datetime.utcnow, server_default="NOW()", nullable=False
)

user: Mapped[User] = relationship(User, back_populates="saved_opportunities")
opportunity: Mapped[Opportunity] = relationship(
"Opportunity", back_populates="saved_opportunities_by_users"
Expand Down Expand Up @@ -115,7 +119,9 @@ class UserSavedSearch(ApiSchemaTable, TimestampMixin):
class UserNotificationLog(ApiSchemaTable, TimestampMixin):
__tablename__ = "user_notification_log"

user_notification_log_id: Mapped[uuid.UUID] = mapped_column(UUID, primary_key=True)
user_notification_log_id: Mapped[uuid.UUID] = mapped_column(
UUID, primary_key=True, default=uuid.uuid4
)

user_id: Mapped[uuid.UUID] = mapped_column(ForeignKey(User.user_id), index=True)
user: Mapped[User] = relationship(User)
Expand Down
74 changes: 62 additions & 12 deletions api/src/task/notifications/generate_notifications.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,16 @@
from dataclasses import dataclass, field
from enum import StrEnum

from sqlalchemy import select, update

import src.adapters.db as db
import src.adapters.db.flask_db as flask_db
from src.db.models.user_models import User
from src.db.models.opportunity_models import OpportunityChangeAudit
from src.db.models.user_models import UserNotificationLog, UserSavedOpportunity
from src.task.ecs_background_task import ecs_background_task
from src.task.task import Task
from src.task.task_blueprint import task_blueprint
from src.util import datetime_util

logger = logging.getLogger(__name__)

Expand All @@ -24,12 +28,15 @@ def run_notification_task(db_session: db.Session) -> None:
task.run()


class NotificationConstants:
OPPORTUNITY_UPDATES = "opportunity_updates"


@dataclass
class NotificationContainer:
"""Container for collecting notifications for a single user"""

user: User
updated_opportunity_ids: list[int] = field(default_factory=list)
saved_opportunities: list[UserSavedOpportunity] = field(default_factory=list)
# TODO: Change from str to something else
updated_searches: list[str] = field(default_factory=list)

Expand All @@ -55,11 +62,35 @@ def run_task(self) -> None:
self._send_notifications()

def _collect_opportunity_notifications(self) -> None:
"""Collect notifications for changed opportunities
To be implemented in future ticket
"""
logger.info("Opportunity notification collection not yet implemented")
pass
"""Collect notifications for changed opportunities that users are tracking"""
stmt = (
select(UserSavedOpportunity)
.join(
OpportunityChangeAudit,
OpportunityChangeAudit.opportunity_id == UserSavedOpportunity.opportunity_id,
)
.where(OpportunityChangeAudit.updated_at > UserSavedOpportunity.last_notified_at)
.distinct()
)

results = self.db_session.execute(stmt)

for row in results.scalars():
user_id = row.user_id
if user_id not in self.user_notification_map:
self.user_notification_map[user_id] = NotificationContainer()
self.user_notification_map[user_id].saved_opportunities.append(row)

logger.info(
"Collected opportunity notifications",
extra={
"user_count": len(self.user_notification_map),
"total_notifications": sum(
len(container.saved_opportunities)
for container in self.user_notification_map.values()
),
},
)

def _collect_search_notifications(self) -> None:
"""Collect notifications for changed saved searches
Expand All @@ -71,22 +102,41 @@ def _collect_search_notifications(self) -> None:
def _send_notifications(self) -> None:
"""Send collected notifications to users"""
for user_id, container in self.user_notification_map.items():
if not container.updated_opportunity_ids and not container.updated_searches:
if not container.saved_opportunities and not container.updated_searches:
continue

# TODO: Implement actual notification sending in future ticket
logger.info(
"Would send notification to user",
extra={
"user_id": user_id,
"opportunity_count": len(container.updated_opportunity_ids),
"opportunity_count": len(container.saved_opportunities),
"search_count": len(container.updated_searches),
},
)

self.increment(
self.Metrics.OPPORTUNITIES_TRACKED, len(container.updated_opportunity_ids)
# Create notification log entry
notification_log = UserNotificationLog(
user_id=user_id,
notification_reason=NotificationConstants.OPPORTUNITY_UPDATES,
notification_sent=True,
)
self.db_session.add(notification_log)

# Update last_notified_at for all opportunities we just notified about
opportunity_ids = [
saved_opp.opportunity_id for saved_opp in container.saved_opportunities
]
self.db_session.execute(
update(UserSavedOpportunity)
.where(
UserSavedOpportunity.user_id == user_id,
UserSavedOpportunity.opportunity_id.in_(opportunity_ids),
)
.values(last_notified_at=datetime_util.utcnow())
)

self.increment(self.Metrics.OPPORTUNITIES_TRACKED, len(container.saved_opportunities))
self.increment(self.Metrics.SEARCHES_TRACKED, len(container.updated_searches))
self.increment(self.Metrics.NOTIFICATIONS_SENT)
self.increment(self.Metrics.USERS_NOTIFIED)
9 changes: 7 additions & 2 deletions api/src/task/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,10 @@ def run(self) -> None:
job_succeeded = True

try:
logger.info("Starting %s", self.cls_name())
start = time.perf_counter()
# Create initial job record
self.job = JobLog(job_type=self.cls_name(), job_status=JobStatus.STARTED)
self.db_session.add(self.job)
self.db_session.commit()

# Create initial job record
self.job = JobLog(job_type=self.cls_name(), job_status=JobStatus.STARTED)
Expand All @@ -49,6 +51,9 @@ def run(self) -> None:
# Run the actual task
self.run_task()

logger.info("Starting %s", self.cls_name())
start = time.perf_counter()

# Calculate and set a duration
end = time.perf_counter()
duration = round((end - start), 3)
Expand Down
127 changes: 127 additions & 0 deletions api/tests/src/task/notifications/test_generate_notifications.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,132 @@
from datetime import timedelta

import pytest

import tests.src.db.models.factories as factories
from src.db.models.user_models import UserNotificationLog
from src.task.notifications.generate_notifications import NotificationConstants


@pytest.fixture
def clear_notification_logs(db_session):
"""Clear all notification logs"""
db_session.query(UserNotificationLog).delete()


def test_via_cli(cli_runner, db_session, enable_factory_create, user):
"""Simple test that verifies we can invoke the notification task via CLI"""
result = cli_runner.invoke(args=["task", "generate-notifications"])

assert result.exit_code == 0


def test_collect_notifications_cli(cli_runner, db_session, enable_factory_create, user, caplog):
"""Simple test that verifies we can invoke the notification task via CLI"""
# Create a saved opportunity that needs notification
opportunity = factories.OpportunityFactory.create()
saved_opportunity = factories.UserSavedOpportunityFactory.create(
user=user,
opportunity=opportunity,
last_notified_at=opportunity.updated_at - timedelta(days=1),
)
factories.OpportunityChangeAuditFactory.create(
opportunity=opportunity,
updated_at=saved_opportunity.last_notified_at + timedelta(minutes=1),
)

result = cli_runner.invoke(args=["task", "generate-notifications"])

assert result.exit_code == 0

# Verify expected log messages
assert "Collected opportunity notifications" in caplog.text
assert "Would send notification to user" in caplog.text

# Verify the log contains the correct metrics
log_records = [r for r in caplog.records if "Would send notification to user" in r.message]
assert len(log_records) == 1
extra = log_records[0].__dict__
Comment on lines +46 to +48
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we want tests that are a bit less log-focused at the moment, I did add the notification table which we'll use as a sort of auditing table. Could start populating that with something.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added some new tests here to and adding logs in generate_notifications.py

assert extra["user_id"] == user.user_id
assert extra["opportunity_count"] == 1
assert extra["search_count"] == 0


def test_last_notified_at_updates(cli_runner, db_session, enable_factory_create, user):
"""Test that last_notified_at gets updated after sending notifications"""
# Create an opportunity that was updated after the last notification
opportunity = factories.OpportunityFactory.create()
saved_opp = factories.UserSavedOpportunityFactory.create(
user=user,
opportunity=opportunity,
last_notified_at=opportunity.updated_at - timedelta(days=1),
)
factories.OpportunityChangeAuditFactory.create(
opportunity=opportunity,
updated_at=saved_opp.last_notified_at + timedelta(minutes=1),
)
# Store the original notification time
original_notification_time = saved_opp.last_notified_at

# Run the notification task
result = cli_runner.invoke(args=["task", "generate-notifications"])
assert result.exit_code == 0

# Refresh the saved opportunity from the database
db_session.refresh(saved_opp)

# Verify last_notified_at was updated
assert saved_opp.last_notified_at > original_notification_time
# Verify last_notified_at is now after the opportunity's updated_at
assert saved_opp.last_notified_at > opportunity.updated_at


def test_notification_log_creation(
cli_runner, db_session, enable_factory_create, clear_notification_logs, user
):
"""Test that notification logs are created when notifications are sent"""
# Create a saved opportunity that needs notification
opportunity = factories.OpportunityFactory.create()
saved_opportunity = factories.UserSavedOpportunityFactory.create(
user=user,
opportunity=opportunity,
last_notified_at=opportunity.updated_at - timedelta(days=1),
)

factories.OpportunityChangeAuditFactory.create(
opportunity=opportunity,
updated_at=saved_opportunity.last_notified_at + timedelta(minutes=1),
)

# Run the notification task
result = cli_runner.invoke(args=["task", "generate-notifications"])
assert result.exit_code == 0

# Verify notification log was created
notification_logs = db_session.query(UserNotificationLog).all()
assert len(notification_logs) == 1

log = notification_logs[0]
assert log.user_id == user.user_id
assert log.notification_reason == NotificationConstants.OPPORTUNITY_UPDATES
assert log.notification_sent is True


def test_no_notification_log_when_no_updates(
cli_runner, db_session, enable_factory_create, clear_notification_logs, user
):
"""Test that no notification log is created when there are no updates"""
# Create a saved opportunity that doesn't need notification
opportunity = factories.OpportunityFactory.create()
factories.UserSavedOpportunityFactory.create(
user=user,
opportunity=opportunity,
last_notified_at=opportunity.updated_at + timedelta(minutes=1), # After the update
)

# Run the notification task
result = cli_runner.invoke(args=["task", "generate-notifications"])
assert result.exit_code == 0

# Verify no notification log was created
notification_logs = db_session.query(UserNotificationLog).all()
assert len(notification_logs) == 0
Binary file modified documentation/api/database/erds/api-schema.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.