Skip to content

Commit

Permalink
93214: Update DR SavedClaim updater jobs logging and statsd metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
dfong-adh committed Sep 23, 2024
1 parent b818f92 commit 509e9c5
Show file tree
Hide file tree
Showing 6 changed files with 327 additions and 10 deletions.
17 changes: 15 additions & 2 deletions app/sidekiq/decision_review/saved_claim_hlr_status_updater_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ class SavedClaimHlrStatusUpdaterJob

SUCCESSFUL_STATUS = %w[complete].freeze

ERROR_STATUS = 'error'

STATSD_KEY_PREFIX = 'worker.decision_review.saved_claim_hlr_status_updater'

def perform
Expand All @@ -33,7 +35,7 @@ def perform
StatsD.increment("#{STATSD_KEY_PREFIX}.delete_date_update")
Rails.logger.info("#{self.class.name} updated delete_date", guid:)
else
StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
handle_form_status_metrics_and_logging(hlr, status)
end

hlr.update(params)
Expand All @@ -57,12 +59,23 @@ def higher_level_reviews

def get_status_and_attributes(guid)
response = decision_review_service.get_higher_level_review(guid).body
status = response.dig('data', 'attributes', 'status')
attributes = response.dig('data', 'attributes')
status = attributes['status']

[status, attributes]
end

def handle_form_status_metrics_and_logging(hlr, status)
if status == ERROR_STATUS
# ignore logging and metrics for stale errors
return if JSON.parse(hlr.metadata || '{}')['status'] == ERROR_STATUS

Rails.logger.info('DecisionReview::SavedClaimHlrStatusUpdaterJob form status error', guid: hlr.guid)
end

StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
end

def enabled?
Flipper.enabled? :decision_review_saved_claim_hlr_status_updater_job_enabled
end
Expand Down
36 changes: 32 additions & 4 deletions app/sidekiq/decision_review/saved_claim_nod_status_updater_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ class SavedClaimNodStatusUpdaterJob

SUCCESSFUL_STATUS = %w[complete].freeze

ERROR_STATUS = 'error'

UPLOAD_SUCCESSFUL_STATUS = %w[vbms].freeze

ATTRIBUTES_TO_STORE = %w[status detail createDate updateDate].freeze
Expand All @@ -34,11 +36,11 @@ def perform
params = { metadata: attributes.merge(uploads: uploads_metadata).to_json, metadata_updated_at: timestamp }

# only set delete date if attachments are all successful as well
if all_uploads_successful?(uploads_metadata) && SUCCESSFUL_STATUS.include?(status)
if check_attachments_status(nod, uploads_metadata) && SUCCESSFUL_STATUS.include?(status)
params[:delete_date] = timestamp + RETENTION_PERIOD
StatsD.increment("#{STATSD_KEY_PREFIX}.delete_date_update")
else
StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
handle_form_status_metrics_and_logging(nod, status)
end

nod.update(params)
Expand Down Expand Up @@ -83,18 +85,44 @@ def get_evidence_uploads_statuses(submitted_appeal_uuid)
result
end

def all_uploads_successful?(uploads_metadata)
def handle_form_status_metrics_and_logging(nod, status)
if status == ERROR_STATUS
# ignore logging and metrics for stale errors
return if JSON.parse(nod.metadata || '{}')['status'] == ERROR_STATUS

Rails.logger.info('DecisionReview::SavedClaimNodStatusUpdaterJob form status error', guid: nod.guid)
end

StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
end

def check_attachments_status(nod, uploads_metadata)
result = true

old_uploads_metadata = extract_uploads_metadata(nod.metadata)

uploads_metadata.each do |upload|
status = upload['status']
result = false unless UPLOAD_SUCCESSFUL_STATUS.include? status
StatsD.increment("#{STATSD_KEY_PREFIX}_upload.status", tags: ["status:#{status}"])

upload_id = upload['id']
# Increment StatsD and log only for new errors
unless old_uploads_metadata.dig(upload_id, 'status') == ERROR_STATUS
StatsD.increment("#{STATSD_KEY_PREFIX}_upload.status", tags: ["status:#{status}"])
Rails.logger.info('DecisionReview::SavedClaimNodStatusUpdaterJob evidence status error',
{ guid: nod.guid, lighthouse_upload_id: upload_id, detail: upload['detail'] })
end
end

result
end

def extract_uploads_metadata(metadata)
return {} if metadata.nil?

JSON.parse(metadata).fetch('uploads', []).index_by { |upload| upload['id'] }
end

def enabled?
Flipper.enabled? :decision_review_saved_claim_nod_status_updater_job_enabled
end
Expand Down
36 changes: 32 additions & 4 deletions app/sidekiq/decision_review/saved_claim_sc_status_updater_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ class SavedClaimScStatusUpdaterJob

SUCCESSFUL_STATUS = %w[complete].freeze

ERROR_STATUS = 'error'

UPLOAD_SUCCESSFUL_STATUS = %w[vbms].freeze

ATTRIBUTES_TO_STORE = %w[status detail createDate updateDate].freeze
Expand All @@ -34,11 +36,11 @@ def perform
params = { metadata: attributes.merge(uploads: uploads_metadata).to_json, metadata_updated_at: timestamp }

# only set delete date if attachments are all successful as well
if all_uploads_successful?(uploads_metadata) && SUCCESSFUL_STATUS.include?(status)
if check_attachments_status(sc, uploads_metadata) && SUCCESSFUL_STATUS.include?(status)
params[:delete_date] = timestamp + RETENTION_PERIOD
StatsD.increment("#{STATSD_KEY_PREFIX}.delete_date_update")
else
StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
handle_form_status_metrics_and_logging(sc, status)
end

sc.update(params)
Expand Down Expand Up @@ -83,18 +85,44 @@ def get_evidence_uploads_statuses(submitted_appeal_uuid)
result
end

def all_uploads_successful?(uploads_metadata)
def handle_form_status_metrics_and_logging(sc, status)
if status == ERROR_STATUS
# ignore logging and metrics for stale errors
return if JSON.parse(sc.metadata || '{}')['status'] == ERROR_STATUS

Rails.logger.info('DecisionReview::SavedClaimScStatusUpdaterJob form status error', guid: sc.guid)
end

StatsD.increment("#{STATSD_KEY_PREFIX}.status", tags: ["status:#{status}"])
end

def check_attachments_status(sc, uploads_metadata)
result = true

old_uploads_metadata = extract_uploads_metadata(sc.metadata)

uploads_metadata.each do |upload|
status = upload['status']
result = false unless UPLOAD_SUCCESSFUL_STATUS.include? status
StatsD.increment("#{STATSD_KEY_PREFIX}_upload.status", tags: ["status:#{status}"])

upload_id = upload['id']
# Increment StatsD and log only for new errors
unless old_uploads_metadata.dig(upload_id, 'status') == ERROR_STATUS
StatsD.increment("#{STATSD_KEY_PREFIX}_upload.status", tags: ["status:#{status}"])
Rails.logger.info('DecisionReview::SavedClaimScStatusUpdaterJob evidence status error',
{ guid: sc.guid, lighthouse_upload_id: upload_id, detail: upload['detail'] })
end
end

result
end

def extract_uploads_metadata(metadata)
return {} if metadata.nil?

JSON.parse(metadata).fetch('uploads', []).index_by { |upload| upload['id'] }
end

def enabled?
Flipper.enabled? :decision_review_saved_claim_sc_status_updater_job_enabled
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@
instance_double(Faraday::Response, body: VetsJsonSchema::EXAMPLES.fetch('HLR-SHOW-RESPONSE-200_V2'))
end

let(:response_error) do
response = JSON.parse(VetsJsonSchema::EXAMPLES.fetch('SC-SHOW-RESPONSE-200_V2').to_json) # deep copy
response['data']['attributes']['status'] = 'error'
instance_double(Faraday::Response, body: response)
end

before do
allow(DecisionReviewV1::Service).to receive(:new).and_return(service)
end
Expand Down Expand Up @@ -84,6 +90,39 @@
.with('worker.decision_review.saved_claim_hlr_status_updater.error').exactly(2).times
end
end

context 'SavedClaim record with previous metadata' do
before do
allow(Rails.logger).to receive(:info)
end

it 'does not log or increment metrics for stale form error status' do
SavedClaim::HigherLevelReview.create(guid: guid1, form: '{}', metadata: '{"status":"error"}')
SavedClaim::HigherLevelReview.create(guid: guid2, form: '{}', metadata: '{"status":"submitted"}')

expect(service).to receive(:get_higher_level_review).with(guid1).and_return(response_error)
expect(service).to receive(:get_higher_level_review).with(guid2).and_return(response_error)

subject.new.perform

claim1 = SavedClaim::HigherLevelReview.find_by(guid: guid1)
expect(claim1.delete_date).to be_nil
expect(claim1.metadata).to include 'error'

claim2 = SavedClaim::HigherLevelReview.find_by(guid: guid2)
expect(claim2.delete_date).to be_nil
expect(claim2.metadata).to include 'error'

expect(StatsD).to have_received(:increment)
.with('worker.decision_review.saved_claim_hlr_status_updater.status', tags: ['status:error'])
.exactly(1).time

expect(Rails.logger).not_to have_received(:info)
.with('DecisionReview::SavedClaimHlrStatusUpdaterJob form status error', guid: guid1)
expect(Rails.logger).to have_received(:info)
.with('DecisionReview::SavedClaimHlrStatusUpdaterJob form status error', guid: guid2)
end
end
end

context 'with flag disabled' do
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@
instance_double(Faraday::Response, body: VetsJsonSchema::EXAMPLES.fetch('NOD-SHOW-RESPONSE-200_V2'))
end

let(:response_error) do
response = JSON.parse(VetsJsonSchema::EXAMPLES.fetch('SC-SHOW-RESPONSE-200_V2').to_json) # deep copy
response['data']['attributes']['status'] = 'error'
instance_double(Faraday::Response, body: response)
end

let(:upload_response_vbms) do
response = JSON.parse(File.read('spec/fixtures/notice_of_disagreements/NOD_upload_show_response_200.json'))
instance_double(Faraday::Response, body: response)
Expand All @@ -33,6 +39,13 @@
instance_double(Faraday::Response, body: response)
end

let(:upload_response_error) do
response = JSON.parse(File.read('spec/fixtures/supplemental_claims/SC_upload_show_response_200.json'))
response['data']['attributes']['status'] = 'error'
response['data']['attributes']['detail'] = 'Invalid PDF'
instance_double(Faraday::Response, body: response)
end

before do
allow(DecisionReviewV1::Service).to receive(:new).and_return(service)
end
Expand Down Expand Up @@ -167,6 +180,98 @@
end
end

context 'SavedClaim record with previous metadata' do
before do
allow(Rails.logger).to receive(:info)
end

let(:upload_id) { SecureRandom.uuid }
let(:upload_id2) { SecureRandom.uuid }

let(:metadata1) do
{
'status' => 'submitted',
'uploads' => [
{
'status' => 'error',
'detail' => 'Invalid PDF',
'id' => upload_id
}
]
}
end

let(:metadata2) do
{
'status' => 'submitted',
'uploads' => [
{
'status' => 'pending',
'detail' => nil,
'id' => upload_id2
}
]
}
end

it 'does not log or increment metrics for stale form error status' do
SavedClaim::NoticeOfDisagreement.create(guid: guid1, form: '{}', metadata: '{"status":"error","uploads":[]}')
SavedClaim::NoticeOfDisagreement.create(guid: guid2, form: '{}',
metadata: '{"status":"submitted","uploads":[]}')

expect(service).to receive(:get_notice_of_disagreement).with(guid1).and_return(response_error)
expect(service).to receive(:get_notice_of_disagreement).with(guid2).and_return(response_error)

subject.new.perform

claim1 = SavedClaim::NoticeOfDisagreement.find_by(guid: guid1)
expect(claim1.delete_date).to be_nil
expect(claim1.metadata).to include 'error'

claim2 = SavedClaim::NoticeOfDisagreement.find_by(guid: guid2)
expect(claim2.delete_date).to be_nil
expect(claim2.metadata).to include 'error'

expect(StatsD).to have_received(:increment)
.with('worker.decision_review.saved_claim_nod_status_updater.status', tags: ['status:error'])
.exactly(1).time

expect(Rails.logger).not_to have_received(:info)
.with('DecisionReview::SavedClaimNodStatusUpdaterJob form status error', guid: guid1)
expect(Rails.logger).to have_received(:info)
.with('DecisionReview::SavedClaimNodStatusUpdaterJob form status error', guid: guid2)
end

it 'does not log or increment metrics for stale evidence error status' do
SavedClaim::NoticeOfDisagreement.create(guid: guid1, form: '{}', metadata: metadata1.to_json)
appeal_submission = create(:appeal_submission, submitted_appeal_uuid: guid1)
create(:appeal_submission_upload, appeal_submission:, lighthouse_upload_id: upload_id)

SavedClaim::NoticeOfDisagreement.create(guid: guid2, form: '{}', metadata: metadata2.to_json)
appeal_submission2 = create(:appeal_submission, submitted_appeal_uuid: guid2)
create(:appeal_submission_upload, appeal_submission: appeal_submission2, lighthouse_upload_id: upload_id2)

expect(service).to receive(:get_notice_of_disagreement).with(guid1).and_return(response_pending)
expect(service).to receive(:get_notice_of_disagreement).with(guid2).and_return(response_error)
expect(service).to receive(:get_notice_of_disagreement_upload).with(guid: upload_id)
.and_return(upload_response_error)
expect(service).to receive(:get_notice_of_disagreement_upload).with(guid: upload_id2)
.and_return(upload_response_error)

subject.new.perform

expect(StatsD).to have_received(:increment)
.with('worker.decision_review.saved_claim_nod_status_updater_upload.status', tags: ['status:error'])
.exactly(1).times
expect(Rails.logger).not_to have_received(:info)
.with('DecisionReview::SavedClaimNodStatusUpdaterJob evidence status error',
guid: anything, lighthouse_upload_id: upload_id, detail: anything)
expect(Rails.logger).to have_received(:info)
.with('DecisionReview::SavedClaimNodStatusUpdaterJob evidence status error',
guid: guid2, lighthouse_upload_id: upload_id2, detail: 'Invalid PDF')
end
end

context 'an error occurs while processing' do
before do
SavedClaim::NoticeOfDisagreement.create(guid: SecureRandom.uuid, form: '{}')
Expand Down
Loading

0 comments on commit 509e9c5

Please sign in to comment.