diff --git a/epictrack-api/src/api/models/event_type.py b/epictrack-api/src/api/models/event_type.py index 8758edfaf..ea5f09e49 100644 --- a/epictrack-api/src/api/models/event_type.py +++ b/epictrack-api/src/api/models/event_type.py @@ -27,6 +27,8 @@ class EventTypeEnum(enum.Enum): # pylint: disable=C0103 ADM = 16 CEAO_DECISION = 15 + COMMENT_PERIOD = 22 + EAC_MINISTER = 13 MINISTER_DECISION = 14 REFERRAL = 5 TIME_LIMIT_RESUMPTION = 38 diff --git a/epictrack-api/src/api/reports/report_factory.py b/epictrack-api/src/api/reports/report_factory.py index 0e2d0785a..4922cba9f 100644 --- a/epictrack-api/src/api/reports/report_factory.py +++ b/epictrack-api/src/api/reports/report_factory.py @@ -15,13 +15,13 @@ class ReportFactory(ABC): def __init__( self, + color_intensity=None, data_keys=None, + filters=None, group_by=None, group_sort_order=None, item_sort_key=None, - template_name=None, - filters=None, - color_intensity=None + template_name=None ): """Constructor""" self.data_keys = data_keys @@ -62,7 +62,7 @@ def _format_data(self, data, report_title=None): else: for item in data: obj = { - k: getattr(item, k) for k in self.data_keys if k not in excluded_items + k: getattr(item, k, None) for k in self.data_keys if k not in excluded_items } if self.group_by: formatted_data[obj.get(self.group_by, -1)].append(obj) diff --git a/epictrack-api/src/api/reports/resource_forecast_report.py b/epictrack-api/src/api/reports/resource_forecast_report.py index 1ca783c36..ae1993a5c 100644 --- a/epictrack-api/src/api/reports/resource_forecast_report.py +++ b/epictrack-api/src/api/reports/resource_forecast_report.py @@ -74,11 +74,11 @@ def __init__(self, filters, color_intensity): ] group_by = "work_id" super().__init__( + color_intensity=color_intensity, data_keys=data_keys, - group_by=group_by, - template_name=None, filters=filters, - color_intensity=color_intensity + group_by=group_by, + template_name=None ) self.excluded_items = [] if self.filters and "exclude" in self.filters: diff --git a/epictrack-api/src/api/reports/thirty_sixty_ninety_report.py b/epictrack-api/src/api/reports/thirty_sixty_ninety_report.py index b2984bdb5..b046fd37b 100644 --- a/epictrack-api/src/api/reports/thirty_sixty_ninety_report.py +++ b/epictrack-api/src/api/reports/thirty_sixty_ninety_report.py @@ -2,8 +2,8 @@ from datetime import datetime, timedelta from io import BytesIO -from typing import Dict, List from os import path +from typing import Dict, List, Optional from operator import attrgetter from dateutil import parser @@ -11,7 +11,7 @@ from reportlab.lib import colors from reportlab.lib.enums import TA_CENTER from reportlab.lib.pagesizes import A4 -from reportlab.lib.styles import getSampleStyleSheet +from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle from reportlab.pdfbase import pdfmetrics from reportlab.pdfbase.ttfonts import TTFont from reportlab.platypus import NextPageTemplate, Paragraph, Table, TableStyle @@ -27,6 +27,7 @@ from api.models.special_field import EntityEnum from api.models.work import WorkStateEnum from api.models.work_issues import WorkIssues +from api.models.work_issue_updates import WorkIssueUpdates from api.services.special_field import SpecialFieldService from api.services.work_issues import WorkIssuesService from api.schemas import response as res @@ -35,7 +36,6 @@ from api.utils.util import process_data from .report_factory import ReportFactory -from flask import current_app # pylint:disable=not-callable,no-member @@ -47,23 +47,39 @@ class ThirtySixtyNinetyReport(ReportFactory): def __init__(self, filters, color_intensity): """Initialize the ReportFactory""" data_keys = [ - "project_name", - "work_report_title", + "actual_date", "anticipated_decision_date", - "work_short_description", - "work_status_text", "decision_information", - "pecp_explanation", + "event_configuration_id", + "event_date", "event_description", - "work_id", - "milestone_id", "event_id", "event_title", - "event_date", + "event_type_id", + "milestone_id", + "pecp_explanation", "project_id", + "project_name", "status_date_updated", + "work_id", + "work_report_title", + "work_short_description", + "work_status_text", ] - super().__init__(data_keys=data_keys, filters=filters, color_intensity=color_intensity) + group_by = "work_id" + self.event_order = { + "decision_referral": 1, + "work_issue": 2, + "pcp": 3, + "other": 4, + } + super().__init__( + color_intensity=color_intensity, + filters=filters, + data_keys=data_keys, + group_by=group_by, + template_name=None + ) self.report_date = None self.report_title = "30-60-90" self.pecp_configuration_ids = ( @@ -75,7 +91,6 @@ def __init__(self, filters, color_intensity): .scalars() .all() ) - self.decision_configuration_ids = ( db.session.execute( select(EventConfiguration.id).where( @@ -83,6 +98,7 @@ def __init__(self, filters, color_intensity): [ EventTypeEnum.MINISTER_DECISION.value, EventTypeEnum.CEAO_DECISION.value, + EventTypeEnum.EAC_MINISTER.value ] ) ) @@ -90,12 +106,24 @@ def __init__(self, filters, color_intensity): .scalars() .all() ) + self.high_profile_work_issue_work_ids = ( + db.session.execute( + select(WorkIssues.work_id).where( + WorkIssues.is_active.is_(True), + WorkIssues.is_high_priority.is_(True), + WorkIssues.is_resolved.is_(False) + ) + ) + .scalars() + .all() + ) def _fetch_data(self, report_date): """Fetches the relevant data for EA 30-60-90 Report""" - max_date = report_date + timedelta(days=90) + max_date = report_date + timedelta(days=93) next_pecp_query = self._get_next_pcp_query(report_date, max_date) valid_event_ids = self._get_valid_event_ids(report_date, max_date) + work_issue_work_ids = self._get_valid_work_issue_work_ids(report_date, max_date) latest_status_updates = self._get_latest_status_update_query() results_qry = ( @@ -106,34 +134,37 @@ def _fetch_data(self, report_date): [WorkStateEnum.IN_PROGRESS.value, WorkStateEnum.SUSPENDED.value] ), ) - .join(Project) - .join(WorkType) - .join( + .join(Project, Work.project) + .join(WorkType, Work.work_type) + .outerjoin( Event, and_( - Event.work_id == Work.id, + Event.work, Event.id.in_(valid_event_ids), - ), - ) - .join( - EventConfiguration, - EventConfiguration.id == Event.event_configuration_id, - ) + ),) + .outerjoin(EventConfiguration, EventConfiguration.id == Event.event_configuration_id) .outerjoin(latest_status_updates, latest_status_updates.c.work_id == Work.id) - .outerjoin( - next_pecp_query, - and_( - next_pecp_query.c.work_id == Work.id, - ), - ) + .outerjoin(next_pecp_query, next_pecp_query.c.work_id == Work.id) .outerjoin(WorkStatus) + .filter( + or_( + # Include Works with valid work issues + Work.id.in_(work_issue_work_ids), + # Include Works with valid events + and_( + Event.id.isnot(None), + Event.id.in_(valid_event_ids), + ), + ) + ) .add_columns( Project.name.label("project_name"), WorkType.report_title.label("work_report_title"), ( Event.anticipated_date - + func.cast(func.concat(Event.number_of_days, " DAYS"), INTERVAL) + + func.cast(func.concat(func.coalesce(Event.number_of_days, 0), " DAYS"), INTERVAL) ).label("anticipated_decision_date"), + Event.actual_date.label("actual_date"), Work.report_description.label("work_short_description"), Event.notes.label("decision_information"), Event.description.label("event_description"), @@ -146,6 +177,8 @@ def _fetch_data(self, report_date): func.coalesce(Event.actual_date, Event.anticipated_date).label( "event_date" ), + EventConfiguration.id.label("event_configuration_id"), + EventConfiguration.event_type_id.label("event_type_id"), EventConfiguration.event_category_id.label("milestone_id"), Project.id.label("project_id"), ) @@ -153,65 +186,61 @@ def _fetch_data(self, report_date): return results_qry.all() - def _format_data(self, data, report_title=None): - current_app.logger.debug(f"Formatting data for {self.report_title} report") + def _format_data(self, data: List[Dict], report_title: Optional[str] = None) -> Dict: + """ + Formats and categorizes event data into 30-, 60-, and 90-day intervals based on event dates. + + This method processes the provided data by: + 1. Resolving multiple events using `_resolve_multiple_events`. + 2. Updating work issues for the first event using `_update_work_issues`. + 3. Categorizing the formatted events into 30-, 60-, and 90-day intervals relative to the report date. + 4. Retrieving and applying project-specific history to the earliest event in each interval. + """ data = super()._format_data(data) + # Get work issues for work first event data = self._update_work_issues(data) + data = self._resolve_multiple_events(data) response = { "30": [], "60": [], "90": [], } - project_special_history = self._get_project_special_history(data) - for work in data: - next_major_decision_event = self._get_next_major_decision_event( - work["work_id"], work["anticipated_decision_date"] - ) - event_decision_date = work["anticipated_decision_date"] - work["anticipated_decision_date"] = ( - next_major_decision_event.anticipated_date - ) - work.update( - { - "is_decision_event": False, - "is_pecp_event": False, - "is_reportable_event": False, - } - ) - if work["milestone_id"] in self.decision_configuration_ids: - work["is_decision_event"] = True - elif work["milestone_id"] in self.pecp_configuration_ids: - work["is_pecp_event"] = True - else: - work["is_reportable_event"] = True - if event_decision_date <= (self.report_date + timedelta(days=30)): + works = [group["items"][0] for group in data] + project_special_history = self._get_project_special_history(works) + for group in data: + first_event = group["items"][0] + first_event_date = first_event["event_date"] + if first_event_date <= (self.report_date + timedelta(days=30)): special_history = self._get_project_special_history_id( - work["project_id"], project_special_history[30], event_decision_date + first_event["project_id"], project_special_history[30], first_event_date ) if special_history: - work["project_name"] = special_history.field_value - response["30"].append(work) - elif event_decision_date <= (self.report_date + timedelta(days=60)): + first_event["project_name"] = special_history.field_value + response["30"].append(group) + elif first_event_date <= (self.report_date + timedelta(days=60)): special_history = self._get_project_special_history_id( - work["project_id"], project_special_history[60], event_decision_date + first_event["project_id"], project_special_history[60], first_event_date ) if special_history: - work["project_name"] = special_history.field_value - response["60"].append(work) - elif event_decision_date <= (self.report_date + timedelta(days=90)): + first_event["project_name"] = special_history.field_value + response["60"].append(group) + elif first_event_date <= (self.report_date + timedelta(days=93)): special_history = self._get_project_special_history_id( - work["project_id"], project_special_history[90], event_decision_date + first_event["project_id"], project_special_history[90], first_event_date ) if special_history: - work["project_name"] = special_history.field_value - response["90"].append(work) + first_event["project_name"] = special_history.field_value + response["90"].append(group) + for _, value in response.items(): + value.sort(key=lambda work: work["items"][0]["event_date"]) return response def _update_work_issues(self, data) -> List[WorkIssues]: """Combine the result with work issues""" - work_ids = set((work.get("work_id") for work in data)) + work_ids = set((work["group"] for work in data)) work_issues = WorkIssuesService.find_work_issues_by_work_ids(work_ids) - for result_item in data: + for group in data: + result_item = group["items"][0] issue_per_work = [ issue for issue in work_issues @@ -241,6 +270,10 @@ def _update_work_issues(self, data) -> List[WorkIssues]: result_item["oldest_update"] = None result_item["work_issues"] = issues + # Update all works with the work issues + for work in group["items"][1:]: + work["work_issues"] = result_item["work_issues"] + work["oldest_update"] = result_item["oldest_update"] return data def generate_report( @@ -248,7 +281,7 @@ def generate_report( ): # pylint: disable=too-many-locals """Generates a report and returns it""" self.report_date = report_date.astimezone(utc) - data = self._fetch_data(report_date) + data = self._fetch_data(report_date + timedelta(days=-3)) data = self._format_data(data) data = self._update_staleness(data, report_date) if return_type == "json" or not data: @@ -274,22 +307,30 @@ def generate_report( id="LaterPages", frames=[page_table_frame], onPage=self.add_default_info ) doc.addPageTemplates(page_template) - heading_style = stylesheet["Heading2"] + title_style = stylesheet["Heading2"] + title_style.fontName = 'BCSans' + title_style.alignment = TA_CENTER + heading_style = stylesheet["Heading3"] heading_style.fontName = 'BCSans' heading_style.alignment = TA_CENTER story = [NextPageTemplate(["*", "LaterPages"])] - story.append(Paragraph("30-60-90", heading_style)) + story.append(Paragraph("30-60-90", title_style)) story.append(Paragraph("Environmental Assessment Office", heading_style)) story.append( Paragraph(f"Submitted for: {report_date:%B %d, %Y}", heading_style) ) - - table_data = [["Issue", "Status/Key Milestones/Next Steps"]] - normal_style = stylesheet["Normal"] normal_style.fontSize = 6.5 normal_style.fontName = 'BCSans' - data, styles = self._get_table_data_and_styles(data, normal_style) + subheading_style = ParagraphStyle( + "subheadings", + parent=normal_style, + fontName="BCSans-Bold", + fontSize=9 + ) + table_data = [[Paragraph("Issue", subheading_style), Paragraph("Status/Key Milestones/Next Steps", subheading_style)]] + + data, styles = self._get_table_data_and_styles(data, normal_style, subheading_style) table_data.extend(data) table = Table(table_data) table.setStyle( @@ -361,107 +402,174 @@ def _get_next_pcp_query(self, start_date, end_date): ) return next_pecp_query - def _get_valid_event_ids(self, start_date, end_date): - """Find and return set of valid decision or high priority event ids""" - valid_events = db.session.query(Event).join( - Work, Work.id == Event.work_id - ).filter( - func.coalesce(Event.actual_date, Event.anticipated_date).between( - start_date.date(), end_date.date() - ), - or_( - Event.event_configuration_id.in_(self.decision_configuration_ids), - and_(Work.is_high_priority.is_(True), Event.high_priority.is_(True)), - ), - ) - valid_events = {x.id for x in valid_events} - return valid_events - - def _get_next_major_decision_event(self, work_id, anticipated_decision_date): - """Find and return the next major decision event based on work id and anticipated decision date""" - next_major_decision_event_query = ( - db.session.query( - Event.work_id, - func.min(Event.anticipated_date).label("min_anticipated_start_date"), - ) + def _get_valid_work_issue_work_ids(self, start_date, end_date): + """Find and return set of valid work ids that have a high priority work issue""" + work_ids = ( + db.session.query(WorkIssues.work_id) + .join(WorkIssueUpdates.work_issue) .filter( - Event.anticipated_date >= anticipated_decision_date, - Event.event_configuration_id.in_(self.decision_configuration_ids), + WorkIssueUpdates.is_approved.is_(True), + WorkIssueUpdates.posted_date.between(start_date.date(), end_date.date()), + WorkIssues.work_id.in_(self.high_profile_work_issue_work_ids) ) - .group_by(Event.work_id) - .subquery() ) - next_major_decision_event = ( - Event.query.filter(Event.work_id == work_id) - .join( - next_major_decision_event_query, - and_( - Event.work_id == next_major_decision_event_query.c.work_id, - Event.anticipated_date - == next_major_decision_event_query.c.min_anticipated_start_date, + return work_ids + + def _get_valid_event_ids(self, start_date, end_date): + """Find and return set of valid decision or high priority event ids""" + valid_events = ( + db.session.query(Event.id) + .join(EventConfiguration, Event.event_configuration) + .join(Work, Event.work) + .filter( + func.coalesce(Event.actual_date, Event.anticipated_date).between( + start_date.date(), end_date.date() + ), + or_( + Event.event_configuration_id.in_(self.decision_configuration_ids), # Decision events + and_( # High profile work with pcp + Work.is_high_priority.is_(True), + EventConfiguration.event_category_id == EventCategoryEnum.PCP.value, + EventConfiguration.event_type_id == EventTypeEnum.COMMENT_PERIOD.value + ), + and_( # High profile events + Work.is_high_priority.is_(True), + Event.high_priority.is_(True), + EventConfiguration.event_category_id.not_in([EventCategoryEnum.CALENDAR.value, EventCategoryEnum.FINANCE.value]) + ), + and_( + Work.work_type_id == 1, # Project Notification + EventConfiguration.event_category_id == EventCategoryEnum.MILESTONE.value, + EventConfiguration.event_type_id == EventTypeEnum.REFERRAL.value, + EventConfiguration.name == "Project Notification Report referred to Decision Maker", + ), + and_( + Work.work_type_id == 2, # Minister's Designation + EventConfiguration.event_category_id == EventCategoryEnum.MILESTONE.value, + EventConfiguration.event_type_id == EventTypeEnum.REFERRAL.value, + EventConfiguration.name != "Minister's Designation Report referred to Decision Maker", + ), + and_( + Work.work_type_id == 5, # Exemption Order + EventConfiguration.event_category_id == EventCategoryEnum.MILESTONE.value, + EventConfiguration.event_type_id == EventTypeEnum.REFERRAL.value, + EventConfiguration.name != "Exemption Request Package Referred to Minister", + ), + and_( + Work.work_type_id == 6, # Assessment + EventConfiguration.event_category_id == EventCategoryEnum.MILESTONE.value, + EventConfiguration.event_type_id == EventTypeEnum.REFERRAL.value, + EventConfiguration.name.in_(["EAC Referral Package sent to Ministers", "Termination Package Referred to Minister"]) + ), + and_( + Work.work_type_id == 7, # Ammendment + EventConfiguration.event_category_id == EventCategoryEnum.MILESTONE.value, + EventConfiguration.event_type_id == EventTypeEnum.REFERRAL.value, + EventConfiguration.name == "Amendment Decision Package Referred to Decision Maker" + ), ), ) - .first() ) - return next_major_decision_event + return valid_events + + def _format_table_data_events(self, events, style): + """Generates styled paragraphs for all events relating to the same work""" + data = [] + # List events in chronological order + sorted_events = sorted(events, key=lambda event: event.get("event_date")) + for event in sorted_events: + event_description = "" + event_description_label = "" + if event["event_type"] == "decision_referral" and event.get("decision_information"): + event_description = event["decision_information"] + event_description_label = "Decision Information" + elif event["event_type"] == "pcp" and event.get("pecp_explanation"): + event_description = event["pecp_explanation"] + event_description_label = "PCP Explanation" + elif event["event_type"] == "work_issue": + event_description = event["event_description"] + event_description_label = "Issue Description" + elif event.get("event_description"): + event_description = event["event_description"] + event_description_label = "Event Description" + # Add paragraphs for each event + data.append([ + Paragraph( + f"{event.get('event_date', 'Unknown Date'): %B %d, %Y} - {event.get('event_title', 'Unknown Title')}", + style, + ), + Paragraph( + f"{event_description_label}: {event_description}" if event_description else "", + style, + ), + ]) + return data def _format_table_data(self, period_data, row_index, style): """Generates styled table rows for the given period data""" + # Define a bold style for labels + style_bold = ParagraphStyle( + "Bold-Labels", + parent=style, + fontName="BCSans-Bold", + ) data = [] - for work in period_data: - event_description = "" - if work["is_decision_event"] and work["decision_information"]: - event_description = work["decision_information"] - elif work["is_pecp_event"] and work["pecp_explanation"]: - event_description = work["pecp_explanation"] - elif work["event_description"]: - event_description = work["event_description"] + for group in period_data: + events = group["items"] + events_data = self._format_table_data_events(events, style) + work = events[0] data.append( [ [ Paragraph( f"{work['project_name']} - {work['work_report_title']}", - style, + style_bold, ), Paragraph( - f"{work['anticipated_decision_date']: %B %d, %Y}", + f"{work['event_date']: %B %d, %Y}", style, ), ], [ + Paragraph( + ("Description"), style_bold + ), Paragraph( ( - work["work_short_description"] + work['work_short_description'] if work["work_short_description"] else "" ), style, ), + Paragraph( + ("Status"), style_bold + ), Paragraph( ( - work["work_status_text"] + work['work_status_text'] if work["work_status_text"] else "" ), style, ), Paragraph( - event_description if event_description else "", - style, + ("Key Milestones"), style_bold ), + events_data ], ] ) row_index += 1 return data, row_index - def _get_table_data_and_styles(self, data, normal_style): + def _get_table_data_and_styles(self, data, normal_style, subheading_style): """Create and return table data and styles""" table_data = [] styles = [] row_index = 1 - for period, item in data.items(): - table_data.append([f"{period} days", ""]) + for period, groups in data.items(): + table_data.append([Paragraph(f"{period} days", subheading_style)]) styles.append( ( "SPAN", @@ -471,11 +579,11 @@ def _get_table_data_and_styles(self, data, normal_style): ) styles.append(("ALIGN", (0, row_index), (-1, row_index), "LEFT")) styles.append( - ("FONTNAME", (0, row_index), (-1, row_index), "Helvetica-Bold"), + ("FONTNAME", (0, row_index), (-1, row_index), "BCSans-Bold"), ) row_index += 1 period_data, row_index = self._format_table_data( - item, row_index, normal_style + groups, row_index, normal_style ) table_data.extend(period_data) return table_data, styles @@ -500,10 +608,11 @@ def _get_project_ids_by_period(self, data: List[dict]) -> Dict[int, List[int]]: periods = {30: [], 60: [], 90: []} for index, period in enumerate(periods): periods[period] = [ - x["project_id"] + x.get("project_id") for x in data - if x["anticipated_decision_date"] <= self.report_date + timedelta(days=period) - and x["anticipated_decision_date"] >= self.report_date + timedelta(days=index * 30) + if x.get("anticipated_decision_date") is not None + and x.get("anticipated_decision_date") <= self.report_date + timedelta(days=period) + and x.get("anticipated_decision_date") >= self.report_date + timedelta(days=index * 30) ] return periods @@ -521,19 +630,75 @@ def _get_project_special_history(self, data: List[dict]) -> Dict[int, List]: ) return periods + def _categorize_event(self, event: dict, work_id: int) -> str: + if event.get("event_configuration_id") in self.decision_configuration_ids or event.get("event_type_id") == EventTypeEnum.REFERRAL.value: + return "decision_referral" + if event.get("event_configuration_id") in self.pecp_configuration_ids: + return "pcp" + if work_id in self.high_profile_work_issue_work_ids: + return "work_issue" + return "other" + + def _resolve_multiple_events(self, data: List[Dict]) -> List[Dict]: + """ + Resolves duplicates and orders multiple events for each work. + + Processes a list of event groups, where each group contains a work_id (`group`) and + associated events (`items`). Events are categorized and ordered and only one decision or referral is kept + Events are sorted within each work_id group by type and anticipated date. + """ + resolved_data = [] + for group in data: + work_id = group['group'] + events = group['items'] + resolved_events = [] + decision_referral_event = None + for event in events: + event_type = self._categorize_event(event, work_id) + event["event_type"] = event_type + if event_type == "decision_referral": + if not decision_referral_event: + decision_referral_event = event + elif event.get("event_type_id") == EventTypeEnum.REFERRAL.value and event.get("actual_date") is None: + decision_referral_event = event + continue + if event_type == "work_issue": + first_work_issue = event.get("work_issues")[0] + latest_update = first_work_issue.get("latest_update") + event["event_date"] = datetime.fromisoformat(latest_update.get("posted_date")) + event["event_title"] = first_work_issue.get("title") + event["event_description"] = latest_update.get("description") + resolved_events.append(event) + if decision_referral_event: + resolved_events.append(decision_referral_event) + # Sort the events for each work + sorted_events = sorted( + resolved_events, + key=lambda x: ( + self.event_order.get(x['event_type']), + x.get('anticipated_date', datetime.max) + ) + ) + resolved_data.append({ + 'group': work_id, + 'items': sorted_events + }) + return resolved_data + def _update_staleness(self, data: dict, report_date: datetime) -> dict: """Calculate the staleness based on report date""" date = report_date.astimezone(CANADA_TIMEZONE) for _, work_type_data in data.items(): - for work in work_type_data: - if work["status_date_updated"]: - diff = (date - work["status_date_updated"]).days + for group in work_type_data: + first_event = group["items"][0] + if first_event["status_date_updated"]: + diff = (date - first_event["status_date_updated"]).days if diff > 10: - work["status_staleness"] = StalenessEnum.CRITICAL.value + first_event["status_staleness"] = StalenessEnum.CRITICAL.value elif diff > 5: - work["status_staleness"] = StalenessEnum.WARN.value + first_event["status_staleness"] = StalenessEnum.WARN.value else: - work["status_staleness"] = StalenessEnum.GOOD.value + first_event["status_staleness"] = StalenessEnum.GOOD.value else: - work["status_staleness"] = StalenessEnum.CRITICAL.value + first_event["status_staleness"] = StalenessEnum.CRITICAL.value return data diff --git a/epictrack-web/src/components/reports/30-60-90Report/ThirtySixtyNinety.tsx b/epictrack-web/src/components/reports/30-60-90Report/ThirtySixtyNinety.tsx index b454813bc..156643493 100644 --- a/epictrack-web/src/components/reports/30-60-90Report/ThirtySixtyNinety.tsx +++ b/epictrack-web/src/components/reports/30-60-90Report/ThirtySixtyNinety.tsx @@ -1,4 +1,4 @@ -import React, { useCallback, useMemo } from "react"; +import React from "react"; import { Container } from "@mui/system"; import { Accordion, @@ -35,9 +35,22 @@ import { ETPageContainer } from "../../shared"; import { staleLevel } from "utils/uiUtils"; import { If } from "react-if"; +interface Group { + group: string; + items: any[]; +} + +interface Period { + [key: string]: Group[]; +} + +interface ReportData { + data: Period; +} + const IndicatorIcon: React.FC = Icons["IndicatorIcon"]; export default function ThirtySixtyNinety() { - const [reports, setReports] = React.useState({}); + const [reports, setReports] = React.useState(); const [showReportDateBanner, setShowReportDateBanner] = React.useState(false); const [selectedTab, setSelectedTab] = React.useState(0); @@ -63,15 +76,20 @@ export default function ThirtySixtyNinety() { ); setResultStatus(RESULT_STATUS.LOADED); if (reportData.status === 200) { - const result = (reportData.data as never)["data"]; - Object.keys(result).forEach((key) => { - (result[key] as []).forEach((resultItem: any) => { - (resultItem.work_issues as []).forEach((workIssue: any) => { - if (stalenessLevel(workIssue) === StalenessEnum.CRITICAL) - workIssue["staleness"] = StalenessEnum.CRITICAL; - else if (stalenessLevel(workIssue) === StalenessEnum.WARN) - workIssue["staleness"] = StalenessEnum.WARN; - else workIssue["staleness"] = StalenessEnum.GOOD; + const result = (reportData.data as ReportData).data; + // Iterate through each period: "30", "60", "90" + Object.keys(result).forEach((key: string) => { + const periodGroups = result[key] as Group[]; + periodGroups.forEach((group) => { + // Check and set staleness for each work issue in the work + (group.items[0].work_issues as any[]).forEach((workIssue) => { + const staleness = stalenessLevel(workIssue); + workIssue.staleness = + staleness === StalenessEnum.CRITICAL + ? StalenessEnum.CRITICAL + : staleness === StalenessEnum.WARN + ? StalenessEnum.WARN + : StalenessEnum.GOOD; }); }); }); @@ -212,6 +230,7 @@ export default function ThirtySixtyNinety() { {resultStatus === RESULT_STATUS.LOADED && + reports && Object.keys(reports).map((key) => { return ( <> @@ -220,7 +239,10 @@ export default function ThirtySixtyNinety() { {key} - {((reports as any)[key] as []).map((item, itemIndex) => { + {/* Iterate over groups within the period */} + {reports[key].map((group, groupIndex) => { + const item = group.items[0]; + const itemIndex = groupIndex; return ( }> @@ -255,7 +277,7 @@ export default function ThirtySixtyNinety() { } /> - {item["project_name"]} - {item["event_title"]}: + {item["project_name"]} - {item["event_title"]}:{" "} {dateUtils.formatDate( item["event_date"], DISPLAY_DATE_FORMAT @@ -322,11 +344,13 @@ export default function ThirtySixtyNinety() { - Anticipated Decision Date + {item["event_type"] === "work_issue" + ? "Work Issue Update Date" + : "Anticipated Decision Date"}{" "} {dateUtils.formatDate( - item["anticipated_decision_date"], + item["event_date"], DISPLAY_DATE_FORMAT )}