index
int64 0
0
| repo_id
stringclasses 829
values | file_path
stringlengths 34
254
| content
stringlengths 6
5.38M
|
|---|---|---|---|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/persistent_attachment_remediation.rake
|
# frozen_string_literal: true
def sanitize_attachments_for_key(claim, key, attachments, ipf_form_data, destroyed_names, delete_claim, dry_run) # rubocop:disable Metrics/ParameterLists
attachments.each do |attachment|
attachment.file_data || attachment.saved_claim_id.nil?
rescue => e
puts "Attachment #{attachment.id} failed to decrypt: #{e.class}"
if dry_run
puts "[DRY RUN] Would destroy attachment #{attachment.id}"
else
attachment.delete
end
ipf_form_data = update_ipf_form_data(attachment, claim, ipf_form_data, key, destroyed_names)
delete_claim = true
end
[delete_claim, ipf_form_data || nil]
end
def update_ipf_form_data(attachment, claim, ipf_form_data, key, destroyed_names)
# Retrieve the proper key for the form data section.
form_key = claim.respond_to?(:attachment_key_map) ? (claim.attachment_key_map[key] || key) : key
# Get the section of the form data either from ipf_form_data or by parsing claim.form.
form_data_section = if ipf_form_data
ipf_form_data.send(form_key)
elsif claim.form.present?
JSON.parse(claim.form, object_class: OpenStruct).send(key)
end
# Find the destroyed attachment in the form data section.
destroyed_attachment = form_data_section&.find do |att|
(att.respond_to?(:confirmation_code) && att.confirmation_code == attachment.guid) ||
(att.respond_to?(:confirmationCode) && att.confirmationCode == attachment.guid)
end
puts "Add destroyed attachment file to list: #{destroyed_attachment&.name}"
destroyed_names << destroyed_attachment&.name
# Remove the destroyed attachment from the section.
form_data_section&.reject! do |att|
(att.respond_to?(:confirmation_code) && att.confirmation_code == attachment.guid) ||
(att.respond_to?(:confirmationCode) && att.confirmationCode == attachment.guid)
end
# Update ipf_form_data if available.
ipf_form_data&.send("#{form_key}=", form_data_section)
ipf_form_data
end
def scrub_email(email)
prefix, domain = email.split('@')
masked_local = prefix[0] + ('*' * (prefix.length - 1))
"#{masked_local}@#{domain}"
end
def mask_file_name(filename)
return filename if filename.nil? || filename.strip.empty?
ext = File.extname(filename)
base = File.basename(filename, ext)
# If the base is too short, just return the filename unchanged.
return filename if base.length <= 4
first_two = base[0, 2]
last_two = base[-2, 2]
stars = '*' * (base.length - 4)
"#{first_two}#{stars}#{last_two}#{ext}"
end
#
# bundle exec rake persistent_attachment_remediation:run['264 265 267',true] Burials
# bundle exec rake persistent_attachment_remediation:run['273 274 275',true] Pensions
namespace :persistent_attachment_remediation do
desc 'Remediate SavedClaims and attachments by claim id. Deletes claims with bad attachments and notifies by email.'
task :run, %i[claim_ids dry_run] => :environment do |_, args|
claim_ids = args[:claim_ids]&.split&.map(&:strip)
dry_run = args[:dry_run].to_s == 'true'
unless claim_ids&.any?
puts 'Usage: rake persistent_attachment_remediation:run[CLAIM_ID1,CLAIM_ID2,...]'
exit 1
end
updated_time = Time.zone.local(2025, 6, 18, 0, 0, 0)
unique_emails_for_notification = Set.new
vanotify_service = ''
personalization = {}
claim_ids.each do |claim_id|
puts '========================================'
claim = SavedClaim.find_by(id: claim_id)
unless claim
puts "SavedClaim with id #{claim_id} not found."
next
end
# If the claim is a type-casted STI base class, try the module-specific classes
if claim && ['SavedClaim::Burial', 'SavedClaim::Pension'].include?(claim.type)
type_map = {
'SavedClaim::Burial' => Burials::SavedClaim,
'SavedClaim::Pension' => Pensions::SavedClaim
}
claim = claim.becomes(type_map[claim.type])
end
puts "Step 1: Start processing a #{claim.class.name} with id: #{claim_id}"
vanotify_service = claim&.form_id&.downcase&.gsub(/-/, '_')
# Find InProgressForm with matching email and SSN from Claim
puts 'Step 2: Searching InProgressForms against claim\'s email and SSN...'
claim_email = claim.email || claim.open_struct_form&.claimantEmail # Pensions || Burial
claim_veteran_ssn = claim.open_struct_form&.veteranSocialSecurityNumber
ipf = InProgressForm.where(form_id: claim.form_id)
.where('updated_at >= ?', updated_time)
.find do |ipf|
JSON.parse(ipf.form_data).then do |data|
email = data['email'] || data['claimant_email']
email == claim_email && data['veteran_social_security_number'] == claim_veteran_ssn
end
rescue
false
end
if ipf
puts "Found InProgressForm: #{ipf.id}"
ipf_form_data = JSON.parse(ipf&.form_data, object_class: OpenStruct)
end
# Gather expected attachment GUIDs from the claim's form data
updated_ipf_form_data = ipf_form_data
if claim.respond_to?(:attachment_keys) && claim.respond_to?(:open_struct_form)
delete_claim_array = []
destroyed_names = []
puts "Step 3: Sanitizing attachments for claim id: #{claim.id}"
claim.attachment_keys.each do |key|
guids = Array(claim.open_struct_form.send(key)).map do |att|
att.try(:confirmationCode) || att.try(:confirmation_code)
end
attachments = PersistentAttachment.where(guid: guids)
delete_claim, updated_ipf_form_data =
sanitize_attachments_for_key(claim, key, attachments, updated_ipf_form_data, destroyed_names, delete_claim,
dry_run)
delete_claim_array << delete_claim
end
end
if dry_run
puts "[DRY RUN] Would update InProgressForm #{ipf&.id}"
elsif updated_ipf_form_data
puts "Step 4: Updating InProgressForm #{ipf&.id} with sanitized form data"
ipf.update!(form_data: Common::HashHelpers.deep_to_h(updated_ipf_form_data).to_json)
end
if delete_claim_array.any?
if claim.email.present?
unique_emails_for_notification << claim.email
data = JSON.parse(claim.form)
if claim.form_id == '21P-527EZ'
claim_type = 'Application for Veterans Pension (VA Form 21P-527EZ)'
url = 'http://va.gov/pension/apply-for-veteran-pension-form-21p-527ez'
else
claim_type = 'Application for Veterans Burial (VA Form 21P-530EZ)'
url = 'http://va.gov/burials-and-memorials/apply-burial-benefits-form-21p-530ez'
end
first_name = data.dig('claimantFullName', 'first') || data.dig('veteranFullName', 'first')
personalization = {
first_name:,
claim_type:,
url:,
file_count: destroyed_names.size,
file_names: destroyed_names.map { |name| mask_file_name(name) }.join(",\n ")
}
end
puts "Step 5: Destroying claim #{claim.id} due to invalid attachments"
if dry_run
puts "[DRY RUN] Would destroy SavedClaim #{claim.id}"
else
claim.destroy!
end
else
puts "All attachments for SavedClaim #{claim.id} are valid and decryptable"
end
end
# Send out emails to unique email list
if unique_emails_for_notification.size.positive?
scrubbed_emails = unique_emails_for_notification.to_a.map { |email| scrub_email(email) }.join(', ')
puts "Step 6: Sending remediation email to unique email(s): #{scrubbed_emails}"
unique_emails_for_notification.each do |email|
if dry_run
puts "[DRY RUN] Would send remediation email to #{scrub_email(email)}"
else
service_config = Settings.vanotify.services[vanotify_service]
VANotify::EmailJob.perform_async(
email,
service_config.email.persistent_attachment_error.template_id,
personalization,
service_config.api_key
)
end
end
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/backfill_user_account_for_tud_accounts.rake
|
# frozen_string_literal: true
desc 'Backfill user account id records for TestUserDashboard::TudAccount'
task backfill_user_account_for_tud_accounts: :environment do
def get_nil_user_account_ids
TestUserDashboard::TudAccount.where(user_account_id: nil)
end
def nil_user_account_ids_count_message(nil_user_account_id_count)
Rails.logger.info('[BackfillUserAccountForTudAccounts] TestUserDashboard::TudAccount ' \
"with user_account_id: nil, count: #{nil_user_account_id_count}")
end
Rails.logger.info('[BackfillUserAccountForTudAccounts] Starting rake task')
starting_nil_user_account_ids = get_nil_user_account_ids
nil_user_account_ids_count_message(starting_nil_user_account_ids.count)
starting_nil_user_account_ids.each do |record|
account = Account.find_by(uuid: record.account_uuid)
user_account = UserAccount.find_by(icn: account&.icn) ||
UserVerification.find_by(idme_uuid: record.idme_uuid)&.user_account ||
UserVerification.find_by(backing_idme_uuid: record.idme_uuid)&.user_account ||
UserVerification.find_by(logingov_uuid: record.logingov_uuid)&.user_account
record.user_account_id = user_account&.id
record.save!
end
Rails.logger.info('[BackfillUserAccountForTudAccounts] Finished rake task')
nil_user_account_ids_count_message(get_nil_user_account_ids.count)
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/jobs.rake
|
# frozen_string_literal: true
namespace :jobs do
desc 'Create daily spool files'
task create_daily_spool_files: :environment do
EducationForm::CreateDailySpoolFiles.perform_async
end
desc 'Email daily year to date report'
task create_daily_year_to_date_report: :environment do
EducationForm::CreateDailyYearToDateReport.perform_async(Time.zone.today)
end
desc 'Process 10203 submissions for automated decision'
task process_10203_submissions: :environment do
EducationForm::Process10203Submissions.perform_async
end
desc 'Remove SpoolFileEvent rows for today so the create_daily_spool_files rake task can rerun'
task reset_daily_spool_files_for_today: :environment do
raise Common::Exceptions::Unauthorized if Settings.vsp_environment.eql?('production') # only allowed for test envs
SpoolFileEvent.where('DATE(successful_at) = ?', Date.current).delete_all
end
desc 'Create daily excel files'
task create_daily_excel_files: :environment do
EducationForm::CreateDailyExcelFiles.perform_async
end
desc 'Remove ExcelFileEvent rows for today so the create_daily_excel_files rake task can rerun'
task reset_daily_excel_files_for_today: :environment do
raise Common::Exceptions::Unauthorized if Settings.vsp_environment.eql?('production') # only allowed for test envs
ExcelFileEvent.where('DATE(successful_at) = ?', Date.current).delete_all
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/import_disability_contentions.rake
|
# frozen_string_literal: true
desc 'imports conditions into disability_contentions table'
task :import_conditions, [:csv_path] => [:environment] do |_, args|
raise 'No CSV path provided' unless args[:csv_path]
CSV.foreach(args[:csv_path], headers: true) do |row|
condition = DisabilityContention.find_or_create_by(code: row['code'])
condition.update!(medical_term: row['medical_term'], lay_term: row['lay_term'])
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/yardoc.rake
|
# frozen_string_literal: true
desc 'run yardoc against changed files'
task :yardoc do # rubocop:disable Rails/RakeEnvironment
require 'rainbow'
require 'yaml'
head_sha = `git rev-parse --abbrev-ref HEAD`.chomp.freeze
# GITHUB_BASE_REF points to the target branch for the pull request
base_sha = "origin/#{ENV.fetch('GITHUB_BASE_REF', 'master')}"
# git diff the glob list - only want to check the changed files
globs = ['*.rb']
globs = globs.map { |g| "'#{g}'" }.join(' ')
cmd = "git diff #{base_sha}...#{head_sha} --name-only -- #{globs}"
puts "\n#{cmd}\n"
# filter to only ruby files
# lots of false positives if yardoc is run on other files
files = `#{cmd}`.split("\n").select { |f| %w[.rb .rake].include?(File.extname(f)) }
if files.empty?
puts Rainbow('Finished. No RUBY files changed.').yellow
exit!
end
# only run on paths specified in GH action yaml
yardoc_yaml = Rails.root.join('.github', 'workflows', 'yardoc.yml')
config = YAML.load_file(yardoc_yaml)
# true == 'on' in GH action yaml
paths = config[true]['pull_request']['paths'].select do |path|
files.find { |file| File.fnmatch(path, file) }
end
if paths.empty?
puts Rainbow('Finished. No watched paths changed.').yellow
exit!
end
paths = paths.map { |g| "'#{g}'" }.join(' ')
cmd = "yardoc #{paths}"
puts "#{cmd}\n\n"
yardoc_output = `#{cmd}`.strip.split("\n")
# non zero exit == parsing error
if (yardoc_result = $CHILD_STATUS.exitstatus).positive?
puts yardoc_output
puts "\n"
puts Rainbow('Failed. Documentation issues were found.').red
exit(yardoc_result)
end
# 'fail' if not 100% - mark this task as required in github to block merging
percentage = yardoc_output.last.strip[/\d+\.\d+/].to_f
if percentage < 100.0
cmd = "yard stats --list-undoc #{paths}"
puts "#{cmd}\n\n"
yardoc_stats = `#{cmd}`.strip.split("\n")
puts yardoc_stats
puts "\n"
puts Rainbow('Warning. Documentation is missing.').yellow
exit(1)
end
puts yardoc_output
puts "\n"
puts Rainbow('Passed. Everything looks documented!').green
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/piilog_repl.rb
|
# frozen_string_literal: true
# run this by starting a rails console and running: require_relative 'rakelib/piilog_repl
# see "spec/rakelib/piilog_repl/piilog_helpers_spec.rb" for more examples of using
# the PersonalInformationLogQueryBuilder
require_relative 'piilog_repl/piilog_helpers'
Q = PersonalInformationLogQueryBuilder
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/lint.rake
|
# frozen_string_literal: true
require 'open3'
require './rakelib/support/shell_command'
desc 'shortcut to run all linting tools, at the same time.'
task :lint, [:files] => [:environment] do |_, args|
require 'rainbow'
files = args[:files]
opts = if ENV['CI']
"-r rubocop/formatter/junit_formatter.rb \
--format RuboCop::Formatter::JUnitFormatter --out log/rubocop.xml \
--format clang \
--parallel"
else
'--display-cop-names --autocorrect'
end
opts += ' --force-exclusion' if files.present?
puts 'running rubocop...'
rubocop_result = ShellCommand.run("rubocop #{opts} --color #{files}")
puts "\n"
if rubocop_result
puts Rainbow('Passed. Everything looks stylish!').green
else
puts Rainbow('Failed. Linting issues were found.').red
exit!(1)
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/rswag.rake
|
# frozen_string_literal: true
require 'fileutils'
APPEALS_API_DOCS_DIR = 'modules/appeals_api/spec/docs'
APPEALS_API_DOCS = [
{ name: 'appealable_issues', version: 'v0' },
{ name: 'appeals_status', version: 'v1' },
{ name: 'decision_reviews', version: 'v2' },
{ name: 'higher_level_reviews', version: 'v0' },
{ name: 'legacy_appeals', version: 'v0' },
{ name: 'notice_of_disagreements', version: 'v0' },
{ name: 'supplemental_claims', version: 'v0' }
].freeze
APPEALS_API_NAMES = APPEALS_API_DOCS.pluck(:name).freeze
def appeals_api_output_files(dev: false)
suffix = dev ? '_dev' : ''
APPEALS_API_DOCS.map do |config|
"modules/appeals_api/app/swagger/#{config[:name]}/#{config[:version]}/swagger#{suffix}.json"
end
end
def run_tasks_in_parallel(task_names)
Parallel.each(task_names) { |task_name| Rake::Task[task_name].invoke }
end
def abbreviate_snake_case_name(name)
name.scan(/(?<=^|_)(\S)/).join
end
namespace :rswag do
namespace :openapi do
desc 'Generate rswag docs for all VA.gov APIs'
task build: :environment do
ENV['PATTERN'] = 'spec/rswag/v0/*_spec.rb'
ENV['RAILS_MODULE'] = 'public'
ENV['SWAGGER_DRY_RUN'] = '0'
Rake::Task['rswag:specs:swaggerize'].invoke
end
end
namespace :claims_api do
desc 'Generate rswag docs by environment for the claims_api'
task build: :environment do
ENV['PATTERN'] = 'modules/claims_api/spec/requests/**/*_spec.rb'
ENV['RAILS_MODULE'] = 'claims_api'
ENV['SWAGGER_DRY_RUN'] = '0'
%w[dev production].each do |environment|
ENV['DOCUMENTATION_ENVIRONMENT'] = environment
Rake::Task['rswag:specs:swaggerize'].invoke
%w[v1 v2].each { |version| format_for_swagger(version, version.eql?('v2') ? environment : nil) }
Rake::Task['rswag:specs:swaggerize'].reenable
end
end
end
namespace :appeals_api do
desc 'Generate production docs for all appeals APIs'
task prod: :environment do
generate_appeals_docs
end
desc 'Generate development docs for all appeals APIs'
task dev: :environment do
generate_appeals_docs(dev: true)
end
desc 'Generate all docs for all appeals APIs'
task all: :environment do
run_tasks_in_parallel(%w[rswag:appeals_api:prod rswag:appeals_api:dev])
end
end
namespace :representation_management do
desc 'Generate rswag docs for representation_management'
task build: :environment do
ENV['PATTERN'] = 'modules/representation_management/spec/requests/**/*_spec.rb'
ENV['RAILS_MODULE'] = 'representation_management'
ENV['SWAGGER_DRY_RUN'] = '0'
Rake::Task['rswag:specs:swaggerize'].invoke
end
end
end
def generate_appeals_docs(dev: false)
ENV['RAILS_MODULE'] = 'appeals_api'
ENV['SWAGGER_DRY_RUN'] = '0'
ENV['PATTERN'] = APPEALS_API_DOCS_DIR
if dev
ENV['RSWAG_ENV'] = 'dev'
ENV['WIP_DOCS_ENABLED'] = Settings.modules_appeals_api.documentation.wip_docs&.join(',') || ''
end
begin
Rake::Task['rswag:specs:swaggerize'].invoke
rescue => e
warn 'Rswag doc generation failed:'
puts e.full_message(highlight: true, order: :top)
exit 1
end
# Correct formatting on rswag output so that it matches the expected OAS format
appeals_api_output_files(dev:).each { |file_path| rswag_to_oas!(file_path) }
end
# validates the null values for fields in the JSON correctly we use type: ['string', 'null']
# Swagger displays that as stringnull so in order to make the docs remain readable we remove the null before writing
def format_for_swagger(version, env = nil)
path = "app/swagger/claims_api/#{version}/swagger.json"
path = "app/swagger/claims_api/#{version}/#{env}/swagger.json" if version.eql?('v2')
swagger_file_path = ClaimsApi::Engine.root.join(path)
oas = JSON.parse(File.read(swagger_file_path.to_s))
clear_null_types!(oas) if version == 'v2'
clear_null_enums!(oas) if version == 'v2'
File.write(swagger_file_path, JSON.pretty_generate(oas))
end
def deep_transform(hash, transformer:, root: [])
return unless hash.is_a?(Hash)
ret = hash.map do |key, v|
v = transformer.call key, v, root
h_ret = v
proot = root.dup
if v.is_a? Hash
root.push(key)
h_ret = deep_transform(v, root: root.dup, transformer:)
root = proot
elsif v.is_a? Array
root.push(key)
h_ret = v.map do |val|
next deep_transform(val, root: root.dup, transformer:) if val.is_a?(Hash) || val.is_a?(Array)
next val
end
root = proot
end
[key, h_ret]
end
ret.to_h
end
def clear_null_types!(data)
transformer = lambda do |k, v, root|
if k == 'type' && v.is_a?(Array) && root[0] == 'paths'
r = v.excluding('null')
r.size > 1 ? r : r[0]
else
v
end
end
data.replace deep_transform(data, transformer:)
end
def clear_null_enums!(data)
transformer = lambda do |k, v, root|
if k == 'enum' && v.is_a?(Array) && root.include?('schema')
v.compact
else
v
end
end
data.replace deep_transform(data, transformer:)
end
# Does file manipulation to make an rswag-output json file compatible to OAS v3
# Rwag still generates `basePath`, which is invalid in OAS v3 (https://github.com/rswag/rswag/issues/318)
def rswag_to_oas!(filepath)
temp_path = "/tmp/#{SecureRandom.urlsafe_base64}.json"
File.open(temp_path, 'w') do |outfile|
File.foreach(filepath) do |line|
outfile.puts line unless line.include?('basePath')
end
end
FileUtils.mv(temp_path, filepath)
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/appeals_api_event_subscriber.rake
|
# frozen_string_literal: true
require 'appeals_api/data_migrations/event_subscription_subscriber'
namespace :data_migration do
desc 'subscribe event callbacks to corresponding topics - AppealsApi'
task appeals_api_event_subscriber: :environment do
AppealsApi::DataMigrations::EventSubscriptionSubscriber.run
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/id_card_announcement_subscriptions.rake
|
# frozen_string_literal: true
namespace :id_card_announcement_subscriptions do
# Example: To export the second set of 100 non-VA emails, provide offset=100
# $ rake id_card_announcement_subscriptions:export_non_va[100]
# rubocop:disable Style/FormatStringToken
desc 'Export distinct email addresses'
task :export_non_va, %i[offset limit] => [:environment] do |_, args|
offset = (args[:offset] || 0).to_i
limit = (args[:limit] || 100).to_i
emails = IdCardAnnouncementSubscription.non_va.offset(offset)
.limit(limit)
.order(:created_at)
.pluck(:email)
puts emails.join("\n")
end
# Example: To export 50 VA emails after 200 have already been processed:
# $ rake id_card_announcement_subscriptions:export_va[200,50]
desc 'Export distinct VA email addresses'
task :export_va, %i[offset limit] => [:environment] do |_, args|
offset = (args[:offset] || 0).to_i
limit = (args[:limit] || 100).to_i
emails = IdCardAnnouncementSubscription.va.offset(offset)
.limit(limit)
.order(:created_at)
.pluck(:email)
puts emails.join("\n")
end
desc 'Export report'
task report: :environment do
ActiveRecord::Base.connection.execute 'set statement_timeout to 60000'
email_count = IdCardAnnouncementSubscription.count
va_email_count = IdCardAnnouncementSubscription.va.count
printf "%-20s %d\n", 'Email Count:', email_count
printf "%-20s %d\n", 'VA Email Count:', va_email_count
end
# rubocop:enable Style/FormatStringToken
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/backfill_user_account_for_tud_account_availability_logs.rake
|
# frozen_string_literal: true
desc 'Backfill user account id records for TestUserDashboard::TudAccountAvailabilityLogs'
task backfill_user_account_for_tud_account_availability_logs: :environment do
def get_nil_user_account_ids
TestUserDashboard::TudAccountAvailabilityLog.where(user_account_id: nil)
end
def nil_user_account_ids_count_message(nil_user_account_id_count)
Rails.logger.info('[BackfillUserAccountForTudAccountAvailabilityLogs] ' \
'TestUserDashboard::TudAccountAvailabilityLog ' \
"with user_account_id: nil, count: #{nil_user_account_id_count}")
end
Rails.logger.info('[BackfillUserAccountForTudAccountAvailabilityLogs] Starting rake task')
starting_nil_user_account_ids = get_nil_user_account_ids
nil_user_account_ids_count_message(starting_nil_user_account_ids.count)
starting_nil_user_account_ids.each do |record|
account = Account.find_by(uuid: record.account_uuid)
user_account = UserAccount.find_by(icn: account&.icn)
unless user_account
tud_account = TestUserDashboard::TudAccount.find_by(account_uuid: record.account_uuid)
user_account = UserVerification.find_by(idme_uuid: tud_account.idme_uuid)&.user_account ||
UserVerification.find_by(backing_idme_uuid: tud_account.idme_uuid)&.user_account ||
UserVerification.find_by(logingov_uuid: tud_account.logingov_uuid)&.user_account
end
record.user_account_id = user_account&.id
record.save!
end
Rails.logger.info('[BackfillUserAccountForTudAccountAvailabilityLogs] Finished rake task')
nil_user_account_ids_count_message(get_nil_user_account_ids.count)
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/edu.rake
|
# frozen_string_literal: true
namespace :edu do
desc 'Given a confirmation number, print a spool file entry'
task :print, [:id] => [:environment] do |_t, args|
raise 'need to give an id. edu:print[{id}]' if args[:id].blank?
id = args[:id].gsub(/\D/, '').to_i
app = EducationBenefitsClaim.find(id)
puts EducationForm::Forms::Base.build(app).text
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/secure_messaging.rake
|
# frozen_string_literal: true
namespace :sm do
desc 'set up test users'
task setup_test_user: :environment do
user_number = ENV.fetch('user_number', nil)
mhv_correlation_id = ENV.fetch('mhv_id', nil)
unless user_number && mhv_correlation_id
raise 'Run this task like this: bundle exec rake sm:setup_test_user user_number=210 mhv_id=22336066'
end
Rails.logger.info("Correlating mock user: vets.gov.user+#{user_number}@gmail.com to MHV ID: #{mhv_correlation_id}")
idme_uuid = get_idme_uuid(user_number)
icn = MPI::Service.new.find_profile_by_identifier(
identifier: idme_uuid,
identifier_type: MPI::Constants::IDME_UUID
)&.profile&.icn
Rails.logger.info("ID.me UUID: #{idme_uuid}")
Rails.logger.info("ICN: #{icn}")
user_verification = Login::UserVerifier.new(
login_type: SAML::User::IDME_CSID,
auth_broker: nil,
mhv_uuid: nil,
idme_uuid:,
dslogon_uuid: nil,
logingov_uuid: nil,
icn:
).perform
user_account = user_verification.user_account
Rails.logger.info('User verification: ')
Rails.logger.info(user_verification.attributes)
Rails.logger.info('User Account: ')
Rails.logger.info(user_account.attributes)
if user_account.needs_accepted_terms_of_use?
Rails.logger.info('Accepting Terms of Use...')
user_account.terms_of_use_agreements.new(
agreement_version: IdentitySettings.terms_of_use.current_version
).accepted!
end
Rails.logger.info('Accepted TOU:')
Rails.logger.info(user_account.terms_of_use_agreements.current.last.attributes)
Rails.logger.info('Caching MHV account... (this is the important part)')
cache_mhv_account(icn, mhv_correlation_id)
Rails.logger.info('Cached MHV account:')
Rails.logger.info(Rails.cache.read("mhv_account_creation_#{icn}"))
end
def get_idme_uuid(number)
path = File.join(Settings.betamocks.cache_dir, 'credentials', 'idme', "vetsgovuser#{number}.json")
json = JSON.parse(File.read(path))
json['uuid']
rescue => e
puts 'Encountered an error while trying to source ID.me UUID. Is the user number you provided legitimate?'
raise e
end
def cache_mhv_account(icn, mhv_correlation_id)
Rails.cache.write(
"mhv_account_creation_#{icn}",
{
user_profile_id: mhv_correlation_id,
premium: true,
champ_va: true,
patient: true,
sm_account_created: true,
message: 'This cache entry was created by rakelib/secure_messaging.rake'
},
expires_in: 1.year
)
rescue => e
puts "Something went wrong while trying to cache mhv_account for user with ICN: #{icn}."
raise e
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/mockdata_synchronize.rake
|
# frozen_string_literal: true
namespace :mockdata_synchronize do
desc 'Iterate through mockdata records and update them with actual MPI calls'
task :mpi_profile_icn, [:icn] => [:environment] do |_, args|
require 'net/http'
require 'uri'
require 'openssl'
def create_cache_from_profile(icn, file_path)
response = create_curl(icn)
save_response(response, file_path)
end
def save_response(env, file_path)
response = {
method: :post,
body: Nokogiri::XML(env.body).root.to_xml,
headers: {
connection: 'close',
date: Time.zone.now.strftime('%a, %d %b %Y %H:%M:%S %Z'),
'content-type' => 'text/xml'
},
status: 200
}
File.write(file_path, response.to_yaml)
end
def create_curl(icn)
uri = URI.parse(IdentitySettings.mvi.url)
request = Net::HTTP::Post.new(uri)
request.content_type = 'text/xml;charset=UTF-8'
request['Connection'] = 'close'
request['User-Agent'] = 'Vets.gov Agent'
request['Soapaction'] = 'PRPA_IN201305UV02'
template = Liquid::Template.parse(File.read(File.join('config',
'mpi_schema',
'mpi_find_person_icn_template.xml')))
xml = template.render!('icn' => icn)
request.body = xml
req_options = { use_ssl: uri.scheme == 'https', verify_mode: OpenSSL::SSL::VERIFY_NONE }
Net::HTTP.start(uri.hostname, uri.port, req_options) { |http| http.request(request) }
end
def update_mpi_record_for_icn(icn, file_name)
create_cache_from_profile(icn, file_name)
puts "Updated record for #{file_name}"
end
if args[:icn].present?
icn = args[:icn]
file_name = "#{Settings.betamocks.cache_dir}/mvi/profile_icn/#{icn}.yml"
update_mpi_record_for_icn(icn, file_name)
else
Dir.glob("#{Settings.betamocks.cache_dir}/mvi/profile_icn/*.yml").each do |file_name|
icn = File.basename(file_name, '.yml')
update_mpi_record_for_icn(icn, file_name)
sleep 1
end
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/migrate_old_async_transaction_data.rake
|
# frozen_string_literal: true
namespace :VA526ez_submit_transaction do
desc 'Rotate the encryption keys'
task remove_old_data: :environment do
AsyncTransaction::Base.where(type: 'AsyncTransaction::EVSS::VA526ezSubmitTransaction').delete_all
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/breakers_outage.rake
|
# frozen_string_literal: true
namespace :breakers do
desc 'List out all breakers-compatible service names'
task list_services: :environment do
services = Breakers.client.services.map(&:name)
puts "Available Services:\n#{services}"
end
# e.g. bundle exec rake breakers:begin_forced_outage service=EVSS/Documents
desc 'Begin a forced outage (requires: service=<service_name>)'
task begin_forced_outage: :environment do
services = Breakers.client.services.map(&:name)
service = ENV.fetch('service', nil)
raise ArgumentError, "[#{service}] is not a valid service in: #{services}" unless services.include?(ENV['service'])
Breakers.client.services.select { |s| s.name == service }.first.begin_forced_outage!
puts "Successfully forced outage of: [#{ENV.fetch('service', nil)}]"
end
# e.g. bundle exec rake breakers:end_forced_outage service=EVSS/Documents
desc 'End a forced outage (requires: service=<service_name>)'
task end_forced_outage: :environment do
services = Breakers.client.services.map(&:name)
service = ENV.fetch('service', nil)
raise ArgumentError, "[#{service}] is not a valid service in: #{services}" unless services.include?(ENV['service'])
Breakers.client.services.select { |s| s.name == service }.first.end_forced_outage!
puts "Successfully ended forced outage of: [#{ENV.fetch('service', nil)}]"
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/swagger.rake
|
# frozen_string_literal: true
namespace :swagger do
desc 'Given a json schema file generates a swagger block: `bundle exec rake swagger:generate_block[letters.json]`'
task :generate_block, [:json_schema_file] => [:environment] do |_, args|
raise IOError, 'No json-schema file provided' unless args[:json_schema_file]
schema_path = Rails.root.join('spec', 'support', 'schemas', args[:json_schema_file])
raise IOError, "No json-schema file at #{schema_path}" unless File.exist? schema_path
schema = File.read(schema_path)
json = JSON.parse(schema)
puts "\n-----START BLOCK-----\n\n"
render_required(json) if json.key?('required')
render_properties json
puts "\n-----END BLOCK-----\n"
end
end
def render_properties(json, indent = 0)
return unless json.respond_to?(:key?) && json.key?('properties')
json['properties'].each do |key, value|
render_property(key, value, indent)
render_required(value, indent + 1)
render_properties(value, indent + 1) if value.key?('properties')
render_items(value, indent + 1) if value.key?('items')
puts "#{render_indent(indent)}end" if requires_end?(value)
end
end
def render_property(key, value, indent)
type = value['type']
enum = value['enum']
items = value['items']
prop = "#{render_indent(indent)}property :#{key}"
prop += ", type: #{render_type(type, enum)}" unless items
prop += if requires_end?(value)
' do'
else
", example: 'TODO'"
end
puts prop
end
def requires_end?(value)
value.key?('properties') || value.key?('items')
end
def render_items(value, indent = 0)
items = value['items']
if items.key? '$ref'
render_ref(indent)
else
render_item(indent, items)
end
end
def render_ref(indent)
puts "#{render_indent(indent)}items do"
puts "#{render_indent(indent)}key :type, :array"
puts "#{render_indent(indent)} key :'$ref', 'TODO'"
puts "#{render_indent(indent)}end"
end
def render_item(indent, items)
puts "#{render_indent(indent)}items do"
render_properties(items, indent + 1)
puts "#{render_indent(indent)}end"
end
def render_required(value, indent = 0)
puts "#{render_indent(indent)}key :required, #{value['required'].map(&:to_sym)}" if value['required']
end
def render_type(type, enum)
type = [*type].map(&:to_sym)
type = [:object] if type == %i[object null] # [object, null] is valid json-schema but swagger throws error
return type if type.count > 1
if enum
":string, enum: %w(#{enum.map { |x| x }.join(' ')})"
else
":#{type.first}"
end
end
def render_indent(indent)
Array.new(indent).map { ' ' }.join
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/in_progress_forms.rake
|
# frozen_string_literal: true
namespace :form_progress do
desc 'Get the last page a user completed before leaving the form'
# bundle exec rake form_progress:return_url[21-526EZ,2020-10-06,2020-11-06]
task :return_url, %i[form_id start_date end_date] => [:environment] do |_, args|
forms = forms_with_args(args)
data = forms.select(InProgressForm::RETURN_URL_SQL)
.group(InProgressForm::RETURN_URL_SQL).order(Arel.sql('count(*)')).count
puts data
end
desc 'Get counts of last page a user completed before validation errors'
# bundle exec rake form_progress:error_url[21-526EZ,2020-10-06,2020-11-06]
task :error_url, %i[form_id start_date end_date] => [:environment] do |_, args|
forms = forms_with_args(args)
data = forms.has_errors.select(InProgressForm::RETURN_URL_SQL)
.group(InProgressForm::RETURN_URL_SQL).order(Arel.sql('count(*)')).count
puts data
end
desc 'Get counts of last page a user completed before abandoning (without errors)'
# bundle exec rake form_progress:abandon_url[21-526EZ,2020-10-06,2020-11-06]
task :abandon_url, %i[form_id start_date end_date] => [:environment] do |_, args|
forms = forms_with_args(args)
data = forms.has_no_errors.select(InProgressForm::RETURN_URL_SQL)
.group(InProgressForm::RETURN_URL_SQL).order(Arel.sql('count(*)')).count
puts data
end
desc 'Get validation errors for a given form return_url'
# bundle exec rake form_progress:errors_for_return_url[21-526EZ,2020-10-06,2020-11-06,/review-and-submit]
task :errors_for_return_url, %i[form_id start_date end_date return_url] => [:environment] do |_, args|
forms = forms_with_args(args)
data = forms.has_errors.return_url(args[:return_url]).pluck(:metadata)
puts data
end
desc 'Get the metadata for users who got an error_message on submission'
# bundle exec rake form_progress:error_messages[21-526EZ,2020-10-06,2020-11-06]
task :error_messages, %i[form_id start_date end_date] => [:environment] do |_, args|
forms = forms_with_args(args)
data = forms.has_error_message.pluck(:metadata)
puts data
end
desc 'Get metadata for in-progress FSR forms that haven\'t been submitted'
task :pending_fsr, %i[start_date end_date] => [:environment] do |_, args|
start_date = args[:start_date]&.to_date || 31.days.ago.utc
end_date = args[:end_date]&.to_date || 1.day.ago
forms = InProgressForm.unsubmitted_fsr.where(updated_at: [start_date.beginning_of_day..end_date.end_of_day])
puts '------------------------------------------------------------'
puts "* #{forms.length} unsubmitted FSR form#{forms.length == 1 ? '' : 's'} from #{start_date} to #{end_date} *"
puts '------------------------------------------------------------'
puts forms.pluck :metadata
end
def forms_with_args(args)
form_id = args[:form_id] || '21-526EZ'
start_date = args[:start_date]&.to_date || 31.days.ago.utc
end_date = args[:end_date]&.to_date || 1.day.ago
puts '------------------------------------------------------------'
puts "* #{form_id} from #{start_date} to #{end_date} *"
puts '------------------------------------------------------------'
InProgressForm.where(updated_at: [start_date.beginning_of_day..end_date.end_of_day], form_id:)
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/lockbox_migration.rake
|
# frozen_string_literal: true
namespace :lockbox do
desc 're-encrypt existing db attributes after key rotation'
task re_encrypt_records: :environment do
models = ApplicationRecord.descendants.select do |model|
model.descendants.empty? && model.try(:lockbox_attributes) && !model.lockbox_attributes.empty?
end
models.map(&:name).each do |m|
encrypted_attributes = m.constantize.lockbox_attributes.keys
puts "re-encrypting model..... #{m.constantize} Total records: #{m.constantize.count}"
Lockbox.rotate(m.constantize, attributes: encrypted_attributes)
end
end
desc 'migrate from attr_encrypted to lockbox'
task migrate_db: :environment do
models = ApplicationRecord.descendants.select do |model|
model.descendants.empty? && !model.encrypted_attributes.empty?
end
models.map(&:name).each do |m|
puts "migrating model..... #{m.constantize}"
Lockbox.migrate(m.constantize)
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/security.rake
|
# frozen_string_literal: true
require 'open3'
require './rakelib/support/shell_command'
desc 'shortcut to run all linting tools, at the same time.'
task security: :environment do
require 'rainbow'
puts 'running Brakeman security scan...'
brakeman_result = ShellCommand.run(
'brakeman --exit-on-warn --run-all-checks --confidence-level=2 --format=plain'
)
puts 'running bundle-audit to check for insecure dependencies...'
exit!(1) unless ShellCommand.run('bundle-audit update')
audit_result = ShellCommand.run('bundle-audit check')
puts "\n"
if brakeman_result && audit_result
puts Rainbow('Passed. No obvious security vulnerabilities.').green
else
puts Rainbow('Failed. Security vulnerabilities were found.').red
exit!(1)
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/service_tags.rake
|
# frozen_string_literal: true
namespace :service_tags do
def changed_files
ENV['CHANGED_FILES'] || []
end
def controller_info_from_route(route)
return unless route.defaults[:controller]
controller_name = "#{route.defaults[:controller].camelize}Controller"
return unless Object.const_defined?(controller_name)
controller_class = controller_name.constantize
exclusive_methods = controller_class.instance_methods(false)
return if exclusive_methods.empty?
method_name = exclusive_methods.first
file_path = controller_class.instance_method(method_name).source_location.first
relative_path = Pathname.new(file_path).relative_path_from(Rails.root).to_s
{
name: controller_name,
path: relative_path
}
end
def controllers_from_routes(routes)
routes.map { |route| controller_info_from_route(route) }.compact.uniq { |info| info[:name] }
end
def valid_service_tag?(klass)
klass.ancestors.any? do |ancestor|
ancestor.included_modules.include?(Traceable) &&
ancestor.respond_to?(:trace_service_tag) &&
ancestor.try(:trace_service_tag).present?
end
end
def find_invalid_controllers(controllers)
errors = []
warnings = []
controllers.each do |controller|
excluded_prefixes = %w[ActionMailbox:: ActiveStorage:: ApplicationController OkComputer:: Rails::].freeze
next if excluded_prefixes.any? { |prefix| controller[:name].start_with?(prefix) }
klass = controller[:name].constantize
next if valid_service_tag?(klass)
if changed_files.include?(controller[:path])
errors << controller
else
warnings << controller
end
end
[errors, warnings]
end
desc 'Audit controllers for Traceable concern usage locally (outside of the CI pipeline)'
task audit_controllers: :environment do
main_app_controllers = controllers_from_routes(Rails.application.routes.routes)
engine_controllers = Rails::Engine.subclasses.flat_map { |engine| controllers_from_routes(engine.routes.routes) }
_, warnings = find_invalid_controllers(main_app_controllers + engine_controllers)
if warnings.any?
puts "\n\nThe following #{warnings.count} controllers are missing service tags. Please associate all " \
'controllers with a new or existing service catalog entry using the service_tag method from the ' \
"Traceable concern:\n\n"
warnings.each do |controller|
puts controller[:name]
end
else
puts 'All controllers have a service tag!'
end
end
desc 'Audit controllers for Traceable concern usage within a CI pipeline'
task audit_controllers_ci: :environment do
main_app_controllers = controllers_from_routes(Rails.application.routes.routes)
engine_controllers = Rails::Engine.subclasses.flat_map { |engine| controllers_from_routes(engine.routes.routes) }
errors, warnings = find_invalid_controllers(main_app_controllers + engine_controllers)
errors.each do |controller|
puts "::error file=#{controller[:path]}::#{controller[:name]} is missing a service tag. " \
'Please associate with a service catalog entry using the Traceable#service_tag method.'
end
warnings.each do |controller|
puts "::warning file=#{controller[:path]}::#{controller[:name]} is missing a service tag. " \
'Please associate with a service catalog entry using the Traceable#service_tag method.'
end
exit(errors.any? ? 1 : 0)
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/enable_online_submission_for_vso.rake
|
# frozen_string_literal: true
namespace :vso do
desc 'Enable online submission of VA Form 21-22 for the given POA code(s)'
task :enable_online_submission_for_vso, [:poa_codes] => :environment do |_t, args|
raise ArgumentError, 'POA codes required (comma-separated)' if args[:poa_codes].blank?
poa_codes = [args[:poa_codes], *args.extras].compact.uniq
Rails.logger.tagged('rake:vso:enable_online_submission_for_vso') do
Rails.logger.info("Received POA codes: #{poa_codes.join(', ')}")
Rails.logger.info('Enabling online submission for matching organization(s)...')
result = AccreditedRepresentativePortal::EnableOnlineSubmission2122Service.call(poa_codes:)
Rails.logger.info("Enabled online submission for #{result} organization(s).")
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/education_benefits_submission.rake
|
# frozen_string_literal: true
desc 'update all records in education_benefits_submissions table'
task education_benefits_submission: :environment do
# `update_all` is being used because the `vettec` field will reset to `false`
# as the form isn't live on staging and prod
# rubocop:disable Rails/SkipsModelValidations
EducationBenefitsSubmission.update_all(vettec: false)
# rubocop:enable Rails/SkipsModelValidations
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/backfill_new_id_for_digital_dispute_submissions.rake
|
# frozen_string_literal: true
desc 'Backfill new_id column for digital_dispute_submissions records using sequence'
task backfill_new_id_for_digital_dispute_submissions: :environment do
Rails.logger.info('[BackfillNewIdForDigitalDisputeSubmissions] Starting rake task')
initial_count = DebtsApi::V0::DigitalDisputeSubmission.where(new_id: nil).count
Rails.logger.info("[BackfillNewIdForDigitalDisputeSubmissions] Records to backfill: #{initial_count}")
updated_count = 0
DebtsApi::V0::DigitalDisputeSubmission.where(new_id: nil).find_in_batches(batch_size: 1000) do |batch|
batch.each do |submission|
new_id_value = ActiveRecord::Base.connection.select_value(
"SELECT nextval('digital_dispute_submissions_new_id_seq')"
)
submission.update_column(:new_id, new_id_value) # rubocop:disable Rails/SkipsModelValidations
updated_count += 1
end
end
Rails.logger.info("[BackfillNewIdForDigitalDisputeSubmissions] Finished - updated #{updated_count} records")
remaining_count = DebtsApi::V0::DigitalDisputeSubmission.where(new_id: nil).count
Rails.logger.info("[BackfillNewIdForDigitalDisputeSubmissions] Remaining null new_id: #{remaining_count}")
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/connectivity.rake
|
# frozen_string_literal: true
# Checks backend connectivity to the various VA machines. Run with
# `RAILS_ENV=production bundle exec rake connectivity:all`
# Also does sanity check to ensure that logs directory is writeable
# Allows running on development machines
Rails.logger = Logger.new($stdout)
class ConnectivityError < StandardError; end
# Convenience function that runs a connectivity example and prints out
# success/error messages
def check(name, config)
yield
puts "#{name} connection success for #{config}."
rescue => e
puts "#{name} connection unsuccessful for #{config}!"
puts " - Error encountered: `#{e}`"
end
namespace :connectivity do
desc 'Check connectivity to all backend services'
task all: %i[db edu evss gi hca logs mvi redis rx sm statsd]
desc 'Check DB'
task db: :environment do
check 'DB', Settings.database_url do
EVSSClaim.all.length
end
end
desc 'Check Edu SFTP'
task edu: :environment do
check 'Edu SFTP', Settings.edu.sftp.host do
Net::SFTP.start(
Settings.edu.sftp.host,
Settings.edu.sftp.user,
password: Settings.edu.sftp.pass,
port: Settings.edu.sftp.port,
non_interactive: true
)
end
end
desc 'Check EVSS'
task evss: :environment do
check 'EVSS', Settings.evss.url do
EVSS::ClaimsService.new({}).all_claims
# Should return an XML 403 response, which Faraday fails parsing,
# since it expects JSON
puts "EVSS connection super success for #{Settings.evss.url}!"
rescue Faraday::ParsingError
puts "EVSS connection success for #{Settings.evss.url}."
rescue => e
puts "EVSS connection unsuccessful for #{Settings.evss.url}!"
puts " - Error encountered: `#{e}`"
end
end
desc 'Check GI'
task gi: :environment do
check 'GIDS', Settings.gids.url do
GI::Client.new.get_autocomplete_suggestions(term: 'university')
end
end
desc 'Check HCA'
task hca: :environment do
check 'HCA', Settings.hca.endpoint do
HCA::Service.new.health_check
end
end
desc 'Check that logs are writeable'
task logs: :environment do
if Rails.root.join('log').writable?
puts 'Logging directory is writeable.'
else
puts 'Logging directory is not writeable!'
end
end
desc 'Check MVI'
task mvi: :environment do
check 'MVI', IdentitySettings.mvi.url do
user = User.new(
first_name: 'John',
last_name: 'Smith',
middle_name: 'W',
birth_date: '1945-01-25',
gender: 'M',
ssn: '555443333',
email: 'foo@bar.com',
uuid: SecureRandom.uuid,
loa: {
current: LOA::THREE,
highest: LOA::THREE
}
)
raise ConnectivityError if user.mpi_status == :server_error
end
end
desc 'Check Redis'
task redis: :environment do
check 'Redis', "#{Settings.redis.host}:#{Settings.redis.port}" do
$redis.get('asdf')
end
end
desc 'Check Rx'
task rx: :environment do
check 'Rx', Settings.mhv.rx.host do
Rx::Client.new(session: { user_id: '12210827' }).authenticate
end
end
desc 'Check SM'
task sm: :environment do
require 'sm/client'
check 'SM', Settings.mhv.sm.host do
SM::Client.new(session: { user_id: '12210827' }).authenticate
end
end
desc 'Check StatsD'
task statsd: :environment do
if ENV['STATSD_ADDR'].present?
puts "StatsD configured for #{ENV['STATSD_ADDR']}."
else
puts 'StatsD not configured!'
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/form_upload_add.rake
|
# frozen_string_literal: true
namespace :form_upload do
desc 'Add a new form to the Form Upload Tool and integrate with VANotify'
task :add, %i[form_number min_pages max_pages] => :environment do |_, args|
require 'fileutils'
require 'yaml'
unless args.form_number && args.min_pages && args.max_pages
abort 'Usage: rake "form_upload:add[FORM_NUMBER,MIN_PAGES,MAX_PAGES]"'
end
form_number = args.form_number.strip
min_pages = args.min_pages.to_i
max_pages = args.max_pages.to_i
# 1. Add to form_upload array in FormProfile::ALL_FORMS
form_profile_path = 'app/models/form_profile.rb'
form_upload_marker = 'form_upload: %w['
form_upload_entry = " #{form_number}-UPLOAD"
inserted = false
lines = File.readlines(form_profile_path)
File.open(form_profile_path, 'w') do |f|
lines.each do |line|
f.puts line
if !inserted && line.include?(form_upload_marker) && lines.none? { |l| l.include?(form_upload_entry) }
f.puts form_upload_entry
inserted = true
end
end
end
# 1b. Add to FORM_ID_TO_CLASS
form_id_to_class_marker = 'FORM_ID_TO_CLASS = {'
form_id_to_class_entry = " '#{form_number}-UPLOAD' => ::FormProfiles::FormUpload,"
lines = File.readlines(form_profile_path)
inserted = false
inside_hash = false
output_lines = []
entries = []
lines.each do |line|
if line.include?(form_id_to_class_marker)
inside_hash = true
output_lines << line
next
end
if inside_hash
# End of hash
if line.strip == '}.freeze'
inside_hash = false
# Insert the new entry in order
# Collect all upload entries, add the new one if not present, sort, then output
entries << form_id_to_class_entry unless entries.any? { |l| l.include?(form_id_to_class_entry.strip) }
entries = (entries + []).uniq.sort_by do |entry|
entry.match(/'([\dA-Za-zP-]+)-UPLOAD'/)[1].gsub(/\D/, '').to_i
end
# Remove trailing comma from last entry for Rubocop compliance
entries = entries.map.with_index do |e, i|
i == entries.length - 1 ? e.chomp.sub(/,+\s*$/, '') : e
end
# Remove blank line if present before closing }.freeze
output_lines += entries.reject(&:empty?).map { |e| e.end_with?("\n") ? e : "#{e}\n" }
output_lines << line
next
end
# Collect upload entries
if line.include?("-UPLOAD' => ::FormProfiles::FormUpload,")
entries << line.chomp unless entries.any? { |l| l.strip == line.strip }
next
end
end
output_lines << line
end
File.write(form_profile_path, output_lines.join)
# 2. Add to PersistentAttachments::VAForm::CONFIGS
va_form_path = 'app/models/persistent_attachments/va_form.rb'
va_form_entry = " '#{form_number}' => { max_pages: #{max_pages}, min_pages: #{min_pages} },"
lines = File.readlines(va_form_path)
inserted = false
File.open(va_form_path, 'w') do |f|
lines.each_with_index do |line, _|
# Look for the closing bracket of the merge hash
if !inserted && line.strip == '}' && lines.none? { |l| l.include?(va_form_entry) }
f.puts va_form_entry
inserted = true
end
f.puts line
end
end
# 3. Add to SUPPORTED_FORMS in FormUploadEmail
form_upload_email_path = 'modules/simple_forms_api/app/services/simple_forms_api/notification/form_upload_email.rb'
supported_forms_marker = 'SUPPORTED_FORMS = %w['
supported_forms_entry = " #{form_number}"
inserted = false
lines = File.readlines(form_upload_email_path)
File.open(form_upload_email_path, 'w') do |f|
lines.each do |line|
f.puts line
if !inserted && line.include?(supported_forms_marker) && lines.none? { |l| l.include?(supported_forms_entry) }
f.puts supported_forms_entry
inserted = true
end
end
end
puts "\n#{'-' * 72}"
puts "Form #{form_number} added to Form Upload Tool with min_pages: #{min_pages}, max_pages: #{max_pages}."
puts 'Please review and commit the changes and download the PDF into modules/simple_forms_api/spec/fixtures/pdfs/'
puts "#{'-' * 72}\n"
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/mvi.rake
|
# frozen_string_literal: true
require 'csv'
require 'mpi/models/mvi_profile'
require 'identity/parsers/gc_ids'
namespace :mvi do
desc 'Given user attributes, run a find candidate query'
task find: :environment do
unless valid_user_vars
raise(
ArgumentError,
'Run the task with all required attributes: bundle exec rake mvi:find first_name="John" middle_name="W" ' \
'last_name="Smith" birth_date="1945-01-25" gender="M" ssn="555443333"'
)
end
begin
uuid = SecureRandom.uuid
identity = UserIdentity.new(
uuid:,
first_name: ENV.fetch('first_name', nil),
middle_name: ENV.fetch('middle_name', nil),
last_name: ENV.fetch('last_name', nil),
birth_date: ENV.fetch('birth_date', nil),
gender: ENV.fetch('gender', nil),
ssn: ENV.fetch('ssn', nil),
email: 'foo@bar.com',
loa: {
current: LOA::THREE,
highest: LOA::THREE
}
)
identity.save
user = User.new(
uuid:,
identity:
)
user.last_signed_in = Time.now.utc
pp MPIData.for_user(user).profile
rescue => e
puts "User query failed: #{e.message}"
end
end
desc 'Given a CSV with ICNs, append attributes needed to stage the user as ID.me LOA3' do
task :idme_saml_stage_attributes, [:csvfile] => [:environment] do |_, args|
raise 'No input CSV provided' unless args[:csvfile]
CSV.open("#{args[:csvfile]}.out", 'w', write_headers: true) do |dest|
existing_headers = CSV.open(args[:csvfile], &:readline)
appended_headers = %w[first_name middle_name last_name gender birth_date ssn address]
CSV.open(args[:csvfile], headers: true) do |source|
dest << (existing_headers + appended_headers)
source.each do |row|
user_identity = UserIdentity.new(
uuid: SecureRandom.uuid,
email: 'fakeemail@needed_for_object_validation.gov',
mhv_icn: row['icn'], # HACK: because the presence of this attribute results in ICN based MVI lookup
loa: {
current: LOA::THREE,
highest: LOA::THREE
}
)
# Not persisting any users, user_identities, or caching MVI by doing it this way.
user = User.new(uuid: user_identity.uuid)
user.instance_variable_set(:@identity, user_identity)
mpi = MPIData.for_user(user)
response = mpi.send(:mpi_service).find_profile_by_identifier(identifier: user_identity.mhv_icn,
identifier_type: MPI::Constants::ICN)
appended_headers.each do |column_name|
case column_name
when 'address'
row['address'] = response.profile.address.to_json
when 'first_name'
row['first_name'] = response.profile.given_names.first
when 'middle_name'
row['middle_name'] = response.profile.given_names.to_a[1..]&.join(' ')
when 'last_name'
row['last_name'] = response.profile.family_name
else
row[column_name] = response.profile.send(column_name.to_sym)
end
end
dest << row
end
end
end
end
end
desc 'Build mock MVI yaml database for users in given CSV'
task :mock_database, [:csvfile] => [:environment] do |_, args|
raise 'No input CSV provided' unless args[:csvfile]
csv = CSV.open(args[:csvfile], headers: true)
csv.each_with_index do |row, i|
bd = Time.iso8601(row['birth_date']).strftime('%Y-%m-%d')
user = User.new(
first_name: row['first_name'],
last_name: row['last_name'],
middle_name: row['middle_name'],
birth_date: bd,
gender: row['gender'],
ssn: row['ssn'],
email: row['email'],
uuid: SecureRandom.uuid,
loa: { current: LOA::THREE, highest: LOA::THREE }
)
unless user.mpi_profile?
puts "Row #{i} #{row['first_name']} #{row['last_name']}: No MVI profile"
next
end
rescue => e
puts "Row #{i} #{row['first_name']} #{row['last_name']}: #{e.message}"
end
end
desc "Given a ssn update a mocked user's correlation ids"
task update_ids: :environment do
ssn = ENV.fetch('ssn', nil)
raise ArgumentError, 'ssn is required, usage: `rake mvi:update_ids ssn=111223333 icn=abc123`' unless ssn
ids = {}
ids['icn'] = ENV.fetch('icn', nil)
ids['edipi'] = ENV.fetch('edipi', nil)
ids['participant_id'] = ENV.fetch('participant_id', nil)
ids['mhv_ids'] = ENV['mhv_ids']&.split
ids['vha_facility_ids'] = ENV['vha_facility_ids']&.split
# 5343578988
if ids.values.all?(&:nil?)
message = 'at least one correlation id is required, e.g. `rake mvi:update_ids ssn=111223333 icn=abc123`'
raise ArgumentError, message
end
path = File.join(Settings.betamocks.cache_dir, 'mvi', 'profile', "#{ssn}.yml")
yaml = YAML.safe_load(File.read(path))
xml = yaml[:body].dup.prepend('<?xml version="1.0" encoding="UTF-8"?>') unless xml.match?(/^<\?xml/)
yaml[:body] = update_ids(xml, ids)
File.write(path, yaml.to_yaml)
puts 'ids updated!'
end
desc 'Create missing cache files from mock_mvi_responses.yml'
task migrate_mock_data: :environment do
yaml = YAML.safe_load(
File.read(File.join('config', 'mvi_schema', 'mock_mvi_responses.yml'))
)
template = Liquid::Template.parse(
File.read(File.join('config', 'mvi_schema', 'mvi_template.xml'))
)
yaml['find_candidate'].each do |k, v|
cache_file = File.join(Settings.betamocks.cache_dir, 'mvi', 'profile', "#{k}.yml")
unless File.exist? cache_file
puts "user with ssn #{k} not found, generating cache file"
profile = MPI::Models::MviProfile.new(v)
create_cache_from_profile(cache_file, profile, template)
end
end
puts 'cache files migrated!'
end
end
def update_ids(xml, ids)
doc = Ox.load(xml)
el = doc.locate(
'env:Envelope/env:Body/idm:PRPA_IN201306UV02/controlActProcess/subject/registrationEvent/subject1/patient'
).first
temp_parse_class = Class.new { extend Identity::Parsers::GCIds }
current_ids = temp_parse_class.parse_xml_gcids(el.locate('id'))
current_ids[:participant_id] = current_ids[:vba_corp_id]
el.nodes.delete_if do |n|
[Identity::Parsers::GCIds::VA_ROOT_OID, Identity::Parsers::GCIds::DOD_ROOT_OID].include? n.attributes[:root]
end
new_ids = {
icn: ids['icn'], edipi: ids['edipi'], participant_id: ids['participant_id'],
mhv_ids: ids['mhv_ids'], vha_facility_ids: ids['vha_facility_ids']
}
new_ids.compact!
current_ids.merge!(new_ids)
updated_ids_element(current_ids, el)
Ox.dump(doc)
end
def updated_ids_element(ids, el)
el.nodes << create_element(ids[:icn], :correlation, '%s^NI^200M^USVHA^P') if ids[:icn]
el.nodes << create_element(ids[:edipi], :edipi, '%s^NI^200DOD^USDOD^A') if ids[:edipi]
el.nodes << create_element(ids[:participant_id], :correlation, '%s^PI^200CORP^USVBA^A') if ids[:participant_id]
el.nodes.concat create_multiple_elements(ids[:mhv_ids], '%s^PI^200MH^USVHA^A') if ids[:mhv_ids]
el.nodes.concat create_multiple_elements(ids[:vha_facility_ids], '123456^PI^%s^USVHA^A') if ids[:vha_facility_ids]
end
def create_element(id, type, pattern)
el = create_root_id(type)
el[:extension] = pattern % id
el
end
def create_multiple_elements(ids, pattern)
ids.map { |id| create_element(id, :correlation, pattern) }
end
def create_root_id(type)
el = Ox::Element.new('id')
edipi_root = Identity::Parsers::GCIds::DOD_ROOT_OID
correlation_root = Identity::Parsers::GCIds::VA_ROOT_OID
el[:root] = type == :edipi ? edipi_root : correlation_root
el
end
def create_cache_from_profile(cache_file, profile, template)
xml = template.render!('profile' => profile.as_json.stringify_keys)
xml = update_ids(xml, profile.as_json)
response = {
method: :post,
body: xml,
headers: {
connection: 'close',
date: Time.now.utc.strftime('%a, %d %b %Y %H:%M:%S %Z'),
'content-length' => xml.bytesize,
'content-type' => 'text/xml',
'set-cookie' => '',
'x-powered-by' => 'Servlet/2.5 JSP/2.1'
},
status: 200
}
File.write(cache_file, response.to_yaml)
end
def valid_user_vars
date_valid = validate_date(ENV.fetch('birth_date', nil))
name_valid = ENV.fetch('first_name', nil) && ENV.fetch('middle_name', nil) && ENV.fetch('last_name', nil)
attrs_valid = ENV.fetch('gender', nil) && ENV.fetch('ssn', nil)
date_valid && name_valid && attrs_valid
end
def validate_date(s)
raise ArgumentError, 'Date string must be of format YYYY-MM-DD' unless s.match?(/\d{4}-\d{2}-\d{2}/)
Time.parse(s).utc
true
rescue => e
puts e.message
false
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/in_progress_forms_status_change.rake
|
# frozen_string_literal: true
namespace :data_migration do
task in_progress_forms_status: :environment do
DataMigrations::InProgressFormStatusDefault.run
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/test_user_account.rake
|
# frozen_string_literal: true
namespace :test_user_account do
desc 'Load the test user accounts from CSV'
task :load, [:path] => [:environment] do |_t, args|
CSV.foreach(args[:path], headers: true) do |row|
TestUserDashboard::CreateTestUserAccount.new(row).call
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/evss.rake
|
# frozen_string_literal: true
desc 'retry failed evss jobs'
task evss_retry_jobs: :environment do
RELEASE_TIME = Time.zone.parse('2017-09-20T21:59:58.486Z')
ERROR_CLASS = 'Aws::S3::Errors::NoSuchKey'
Sidekiq::DeadSet.new.each do |job|
if job.klass == 'EVSS::DocumentUpload'
created_at = DateTime.strptime(job['created_at'].to_s, '%s')
if created_at >= RELEASE_TIME && job['error_class'] == ERROR_CLASS
EVSS::DocumentUpload.perform_async(*job.args)
job.delete
end
end
end
end
namespace :evss do
desc 'print GIBS not found users in CSV format for last n days with a limit, usage: rake evss:gibs_not_found[7,100]'
task :gibs_not_found, %i[days limit] => [:environment] do |_, args|
args.with_defaults(days: 7, limit: 100)
result = PersonalInformationLog
.where('created_at >= ?', args[:days].to_i.day.ago)
.where(error_class: 'EVSS::GiBillStatus::NotFound')
.limit(args[:limit].to_i)
result.each_with_index do |r, i|
user = JSON.parse(r.data['user'])
puts user.keys.push('created_at').join(',') if i.zero?
puts user.values.push(r.created_at).join(',')
end
end
desc 'export EDIPIs users with invalid addresss, usage: rake evss:export_invalid_address_edipis[/export/path.csv]'
task :export_invalid_address_edipis, [:csv_path] => [:environment] do |_, args|
raise 'No CSV path provided' unless args[:csv_path]
CSV.open(args[:csv_path], 'wb') do |csv|
csv << %w[edipi created_at]
InvalidLetterAddressEdipi.find_each do |i|
csv << [
i.edipi,
i.created_at.iso8601
]
end
end
end
desc 'export post 911 not found users for the last week, usage: rake evss:export_post_911_not_found[/export/path.csv]'
task :export_post_911_not_found, [:file_path] => [:environment] do |_, args|
raise 'No JSON file path provided' unless args[:file_path]
File.open(args[:file_path], 'w+') do |f|
PersonalInformationLog.where(error_class: 'EVSS::GiBillStatus::NotFound').last_week.find_each do |error|
f.puts(error.data.to_json)
end
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/decision_reviews_recovery_emails.rake
|
# frozen_string_literal: true
require 'csv'
require 'stringio'
require 'decision_reviews/v1/constants'
namespace :decision_reviews do
namespace :remediation do
# Process evidence recovery emails
def self.process_evidence_recovery_emails(lighthouse_upload_ids, dry_run, output_buffer = nil) # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
# Helper to log to both console and buffer
log = lambda do |message|
puts message
output_buffer&.puts message
end
stats = {
processed: 0,
sent: 0,
skipped: 0,
errors: 0
}
results = {
emails_sent: [],
skipped: [],
errors: []
}
return { stats:, results: } if lighthouse_upload_ids.empty?
log.call "\n#{'📧 ' * 40}"
log.call 'SENDING EVIDENCE RECOVERY EMAILS'
log.call '📧 ' * 40
# Get template ID from settings
template_id = Settings.vanotify.services.benefits_decision_review.template_id.evidence_recovery_email
if template_id.blank?
log.call "\n❌ ERROR: Evidence recovery email template ID not configured"
log.call 'Please set: vanotify__services__benefits_decision_review__template_id__evidence_recovery_email'
return { stats:, results: }
end
log.call "Template ID: #{template_id}"
evidence_uploads = AppealSubmissionUpload.where(lighthouse_upload_id: lighthouse_upload_ids).includes(
appeal_submission: %i[saved_claim_sc saved_claim_hlr saved_claim_nod user_account]
)
log.call "\nFound #{evidence_uploads.count} AppealSubmissionUpload records"
evidence_uploads.each do |upload|
stats[:processed] += 1
begin
# Check if failure notification was sent
if upload.failure_notification_sent_at.blank?
stats[:skipped] += 1
log.call "\n ⚠️ Upload ##{upload.id}: No failure notification sent - skipping"
results[:skipped] << { id: upload.id, reason: 'No failure notification sent' }
next
end
# Get associated submission
submission = upload.appeal_submission
unless submission
stats[:skipped] += 1
log.call "\n ⚠️ Upload ##{upload.id}: No AppealSubmission found - skipping"
results[:skipped] << { id: upload.id, reason: 'No AppealSubmission found' }
next
end
# Get email address
email_address = submission.current_email_address
if email_address.blank?
stats[:skipped] += 1
log.call "\n ⚠️ Upload ##{upload.id}: No email address - skipping"
results[:skipped] << { id: upload.id, reason: 'No email address' }
next
end
# Get user info
mpi_profile = submission.get_mpi_profile
first_name = mpi_profile&.given_names&.first || 'Veteran'
# Format dates
failure_notification_date = upload.failure_notification_sent_at.strftime('%B %d, %Y')
date_submitted = upload.created_at.strftime('%B %d, %Y')
# Get filename
filename = upload.masked_attachment_filename || 'your evidence'
log.call "\n Processing Upload ##{upload.id}"
log.call " First name: #{first_name}"
log.call " Filename: #{filename}"
log.call " Date submitted: #{date_submitted}"
log.call " Failure notification sent: #{failure_notification_date}"
if dry_run
log.call ' [DRY RUN] Would send evidence recovery email'
stats[:sent] += 1
results[:emails_sent] << { id: upload.id, dry_run: true }
else
# Send email via VA Notify
appeal_type = submission.type_of_appeal
reference = "#{appeal_type}-evidence-recovery-#{upload.lighthouse_upload_id}"
callback_options = {
callback_metadata: {
email_type: :evidence_recovery,
service_name: DecisionReviews::V1::APPEAL_TYPE_TO_SERVICE_MAP[appeal_type],
function: 'recovered evidence upload follow up email',
submitted_appeal_uuid: submission.submitted_appeal_uuid,
lighthouse_upload_id: upload.lighthouse_upload_id,
email_template_id: template_id,
reference:,
statsd_tags: ["service:#{DecisionReviews::V1::APPEAL_TYPE_TO_SERVICE_MAP[appeal_type]}",
'function:evidence_recovery_email']
}
}
vanotify_service = VaNotify::Service.new(
Settings.vanotify.services.benefits_decision_review.api_key,
callback_options
)
vanotify_service.send_email(
email_address:,
template_id:,
personalisation: {
'first_name' => first_name,
'failure_notification_sent_at' => failure_notification_date,
'filename' => filename,
'date_submitted' => date_submitted
}
)
stats[:sent] += 1
log.call ' ✅ Email sent'
results[:emails_sent] << { id: upload.id }
end
rescue => e
stats[:errors] += 1
error_msg = e.message.gsub(/\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b/, '[EMAIL_REDACTED]')
log.call " ❌ Error for Upload #{upload.id}: #{error_msg}"
results[:errors] << { id: upload.id, error: error_msg }
end
end
{ stats:, results: }
end
# Process form recovery emails
def self.process_form_recovery_emails(appeal_submission_ids, dry_run, output_buffer = nil) # rubocop:disable Metrics/MethodLength
# Helper to log to both console and buffer
log = lambda do |message|
puts message
output_buffer&.puts message
end
stats = {
processed: 0,
sent: 0,
skipped: 0,
errors: 0
}
results = {
emails_sent: [],
skipped: [],
errors: []
}
return { stats:, results: } if appeal_submission_ids.empty?
log.call "\n#{'📧 ' * 40}"
log.call 'SENDING FORM RECOVERY EMAILS'
log.call '📧 ' * 40
# Get template ID from settings
template_id = Settings.vanotify.services.benefits_decision_review.template_id.form_recovery_email
if template_id.blank?
log.call "\n❌ ERROR: Form recovery email template ID not configured"
log.call 'Please set: vanotify__services__benefits_decision_review__template_id__form_recovery_email'
return { stats:, results: }
end
log.call "Template ID: #{template_id}"
form_submissions = AppealSubmission.where(id: appeal_submission_ids)
.includes(:saved_claim_sc, :saved_claim_hlr, :saved_claim_nod, :user_account)
log.call "\nFound #{form_submissions.count} AppealSubmission records"
form_submissions.each do |submission|
stats[:processed] += 1
begin
# Check if failure notification was sent
if submission.failure_notification_sent_at.blank?
stats[:skipped] += 1
log.call "\n ⚠️ Submission ##{submission.id}: No failure notification sent - skipping"
results[:skipped] << { id: submission.id, reason: 'No failure notification sent' }
next
end
# Get email address
email_address = submission.current_email_address
if email_address.blank?
stats[:skipped] += 1
log.call "\n ⚠️ Submission ##{submission.id}: No email address - skipping"
results[:skipped] << { id: submission.id, reason: 'No email address' }
next
end
# Get user info
mpi_profile = submission.get_mpi_profile
first_name = mpi_profile&.given_names&.first || 'Veteran'
# Get decision review type
decision_review_type, decision_review_form_id = case submission.type_of_appeal
when 'HLR' then ['Higher-Level Review', 'VA Form 20-0996']
when 'SC' then ['Supplemental Claim', 'VA Form 20-0995']
when 'NOD' then ['Notice of Disagreement (Board Appeal)',
'VA Form 10182']
else ['Decision Review', 'Decision Review Form']
end
# Format dates
failure_notification_date = submission.failure_notification_sent_at.strftime('%B %d, %Y')
date_submitted = submission.created_at.strftime('%B %d, %Y')
log.call "\n Processing Submission ##{submission.id}"
log.call " First name: #{first_name}"
log.call " Decision review type: #{decision_review_type}"
log.call " Form ID: #{decision_review_form_id}"
log.call " Date submitted: #{date_submitted}"
log.call " Failure notification sent: #{failure_notification_date}"
if dry_run
log.call ' [DRY RUN] Would send form recovery email'
stats[:sent] += 1
results[:emails_sent] << { id: submission.id, dry_run: true }
else
# Send email via VA Notify
appeal_type = submission.type_of_appeal
reference = "#{appeal_type}-form-recovery-#{submission.submitted_appeal_uuid}"
callback_options = {
callback_metadata: {
email_type: :form_recovery,
service_name: DecisionReviews::V1::APPEAL_TYPE_TO_SERVICE_MAP[appeal_type],
function: 'recovered form submission follow up email',
submitted_appeal_uuid: submission.submitted_appeal_uuid,
email_template_id: template_id,
reference:,
statsd_tags: ["service:#{DecisionReviews::V1::APPEAL_TYPE_TO_SERVICE_MAP[appeal_type]}",
'function:form_recovery_email']
}
}
vanotify_service = VaNotify::Service.new(
Settings.vanotify.services.benefits_decision_review.api_key,
callback_options
)
vanotify_service.send_email(
email_address:,
template_id:,
personalisation: {
'first_name' => first_name,
'decision_review_type' => decision_review_type,
'decision_review_form_id' => decision_review_form_id,
'date_submitted' => date_submitted
}
)
stats[:sent] += 1
log.call ' ✅ Email sent'
results[:emails_sent] << { id: submission.id }
end
rescue => e
stats[:errors] += 1
error_msg = e.message.gsub(/\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b/, '[EMAIL_REDACTED]')
log.call " ❌ Error for Submission #{submission.id}: #{error_msg}"
results[:errors] << { id: submission.id, error: error_msg }
end
end
{ stats:, results: }
end
# Upload results to S3
def self.upload_email_results_to_s3(content, dry_run) # rubocop:disable Metrics/MethodLength
puts "\n#{'💾 ' * 40}"
puts 'SAVING RESULTS TO S3'
puts '💾 ' * 40
timestamp = Time.current.strftime('%Y%m%d_%H%M%S')
mode = dry_run ? 'dry_run' : 'live'
file_name = "decision_reviews_recovery_emails_#{mode}_#{timestamp}.txt"
s3_key_prefix = 'remediation/decision_reviews/recovery_emails'
# Write to temp file
file_path = Rails.root.join('tmp', file_name)
File.write(file_path, content)
puts "\n✅ File written to: #{file_path}"
puts " File size: #{File.size(file_path)} bytes"
# Upload to S3
begin
s3_resource = Aws::S3::Resource.new(
region: Settings.reports.aws.region,
access_key_id: Settings.reports.aws.access_key_id,
secret_access_key: Settings.reports.aws.secret_access_key
)
s3_key = "#{s3_key_prefix}/#{file_name}"
obj = s3_resource.bucket(Settings.reports.aws.bucket).object(s3_key)
obj.put(body: File.read(file_path), content_type: 'text/plain')
puts "\n✅ File uploaded to S3:"
puts " Bucket: #{Settings.reports.aws.bucket}"
puts " Key: #{s3_key}"
puts " Region: #{Settings.reports.aws.region}"
puts "\n📝 To delete the S3 file later, run in Rails console:"
puts " s3_resource = Aws::S3::Resource.new(region: '#{Settings.reports.aws.region}', " \
'access_key_id: Settings.reports.aws.access_key_id, ' \
'secret_access_key: Settings.reports.aws.secret_access_key)'
puts " obj = s3_resource.bucket('#{Settings.reports.aws.bucket}').object('#{s3_key}')"
puts ' obj.delete'
puts "\n📝 To delete the local file, run:"
puts " File.delete('#{file_path}')"
rescue => e
puts "\n❌ Error uploading to S3: #{e.class} - #{e.message}"
puts " File is still available locally at: #{file_path}"
puts " Backtrace: #{e.backtrace.first(3)}"
end
puts "\n#{'=' * 80}"
end
desc 'Send follow-up emails for recovered evidence uploads that had failure notifications'
task send_evidence_recovery_emails: :environment do
lighthouse_upload_ids = ENV.fetch('LIGHTHOUSE_UPLOAD_IDS', '').split(',').map(&:strip)
dry_run = ENV['DRY_RUN'] == 'true'
upload_to_s3 = ENV.fetch('UPLOAD_TO_S3', 'true') == 'true'
if lighthouse_upload_ids.empty?
puts "\n❌ ERROR: No lighthouse upload IDs provided"
exit 1
end
# Create output buffer to capture all output
output_buffer = StringIO.new
# Process evidence recovery emails
result = process_evidence_recovery_emails(lighthouse_upload_ids, dry_run, output_buffer)
# Print summary
puts "\n#{'📊 ' * 40}"
puts 'EMAIL SENDING COMPLETE'
puts '📊 ' * 40
puts "\nEvidence Recovery Emails:"
puts " Processed: #{result[:stats][:processed]}"
puts " Sent: #{result[:stats][:sent]}"
puts " Skipped: #{result[:stats][:skipped]}"
puts " Errors: #{result[:stats][:errors]}"
puts "\nFinished at: #{Time.current}"
puts '=' * 80
if dry_run
puts "\n⚠️ DRY RUN MODE - No emails were sent"
puts 'Run without DRY_RUN=true to send emails'
end
# Upload to S3 if enabled
upload_email_results_to_s3(output_buffer.string, dry_run) if upload_to_s3
end
desc 'Send follow-up emails for recovered form submissions that had failure notifications'
task send_form_recovery_emails: :environment do
appeal_submission_ids = ENV.fetch('APPEAL_SUBMISSION_IDS', '').split(',').map(&:strip).map(&:to_i)
dry_run = ENV['DRY_RUN'] == 'true'
upload_to_s3 = ENV.fetch('UPLOAD_TO_S3', 'true') == 'true'
if appeal_submission_ids.empty?
puts "\n❌ ERROR: No appeal submission IDs provided"
exit 1
end
# Create output buffer to capture all output
output_buffer = StringIO.new
# Process form recovery emails
result = process_form_recovery_emails(appeal_submission_ids, dry_run, output_buffer)
# Print summary
puts "\n#{'📊 ' * 40}"
puts 'EMAIL SENDING COMPLETE'
puts '📊 ' * 40
puts "\nForm Recovery Emails:"
puts " Processed: #{result[:stats][:processed]}"
puts " Sent: #{result[:stats][:sent]}"
puts " Skipped: #{result[:stats][:skipped]}"
puts " Errors: #{result[:stats][:errors]}"
puts "\nFinished at: #{Time.current}"
puts '=' * 80
if dry_run
puts "\n⚠️ DRY RUN MODE - No emails were sent"
puts 'Run without DRY_RUN=true to send emails'
end
# Upload to S3 if enabled
upload_email_results_to_s3(output_buffer.string, dry_run) if upload_to_s3
end
desc 'One-time task: Send recovery emails for specific recovered submissions (November 2025)'
task send_november_2025_recovery_emails: :environment do
dry_run = ENV['DRY_RUN'] == 'true'
upload_to_s3 = ENV.fetch('UPLOAD_TO_S3', 'true') == 'true'
# Hardcoded list of lighthouse upload IDs that need correction evidence emails sent out
# (only those we've previously sent out failure notification emails to)
lighthouse_upload_ids = %w[
a29fbac2-6c54-4043-8767-bb84df914e33
df70f6ce-ff74-4178-8457-42053dcc9840
6edf7e27-0001-4ed1-bc9c-76d0969f2ada
0bf6184f-0118-419b-a26b-0c1f7c2841c8
f05d6fdf-1a29-4489-9721-3dc23c3a56af
22d0ea5d-fe4d-4041-a5c6-87c867df7af0
33fc299e-e072-4084-a623-f2cc5636befc
8cc19bc7-3ecd-4bc3-8d7f-8a6809d79942
4eaa30e1-2555-4431-b267-5c869c1cd210
76eaf5b9-53b1-45bf-ba8b-b5c98fc7b22d
eecc88ba-cec1-4df4-99f2-2b422eca6719
e7eb8517-4816-49fb-afec-62195c02c7f0
410a1c59-fdba-47bb-9265-8d44bf8ac36d
12f14a48-e617-4f84-851c-a71b6b05ed58
1931739c-ab71-4f4b-8d51-5b72c668f9c1
2fff768e-d576-4531-b3ca-c9b85ce2057b
e9c5826f-4dba-4fdd-8e1e-f781a5198b20
2942de40-9a90-41d6-a088-da84d33581b2
f4770ae6-f4af-4da0-a555-ec9707b04969
4d69992f-b534-4125-9345-3eba4c6d012d
2ebee1ce-a488-476a-950a-59d2b2b5969b
bb94c80e-dbb8-4a76-94e6-46f3bb5f36eb
a32b4771-c78d-4fff-b73d-41c12d51ca23
1d8bb436-c490-4783-ad21-7a9afd6803c7
d2702166-1274-41bf-858c-ee6dea8a527d
acd2f702-b36d-4208-b3c2-65eeffb1d3f8
db84604e-079e-4a19-9ec5-ca45a40b6042
bd634edc-dde2-40b6-8a5d-cbd80aca598e
f7e89f05-98f1-4f6a-a772-c44894d7b1a6
253ac638-3619-47d8-8c73-d7cf80d94ecf
c75d0def-6ddb-4378-9a80-c180ce19adc1
fec0cdc0-bdfa-48f3-bd15-154f17828dfb
17d7eb10-a7bc-42d4-8b58-b28dc4ec52ac
00100085-bbd7-4358-95ae-18fdb0adec8b
ce04fc84-898d-4f62-8b4b-d90f7ff5b2e7
7f192b97-661a-4730-8d39-b9f3d1f24a64
0982a2d4-ce35-4f26-b7e6-a7d018fc462a
2a9898e7-6e32-48a3-b241-d38098c99d5a
1ca47b96-12cd-4087-8d9f-7f437cdfd9de
fe3298cc-f5d3-4a6e-864b-7e27aaed25d5
cc5ff4f2-3a23-4445-a620-daf66e007bc9
964095ca-5719-4ae0-86e3-368ef5bb88f6
9bc52b5d-eb94-4ea5-8d42-7c3bea56187e
c140ec86-87ba-4d3e-9792-1a46f691b63a
c9083858-dbba-4576-8ddc-de09365db3aa
832af903-290b-46ef-830c-65808cb7d8b2
855948bf-0c9f-485e-ad36-a626461a2107
cab728c5-648a-472f-a812-a666e734595c
8b174564-5dbf-4cac-aab6-e17cbe183ddc
b569a88e-2674-4345-8e68-43ff72ddc3e6
5e16a8d7-75e7-4a91-9bad-e95c344ee5d8
c49d50e2-ce64-4f1f-a6b2-ccfda3d55eec
4490a9d0-165e-456f-a6df-634edec18c18
83c1f391-1afc-426f-8d88-fd47de6dd148
d8c475c7-69db-40a4-a07f-8fc7dc685d71
81317e09-6962-4b68-a248-afdcb8118e73
9c5652e7-6cc0-4a5a-b83b-639e9e750542
3d051836-fd57-43b1-a66c-08ce5df8b082
5875acac-7908-42f9-a979-c515658bc3a0
2661d705-f4e3-4cff-81ff-1ff79937691a
7db2659f-92e2-4182-8bdd-b0d49c63dcdb
2ce8bf3a-d82c-4c58-be3e-7821eceaf006
9b342f6d-7f28-4142-894b-a9c9c5064cda
85a0826c-9beb-4ec7-b9d5-5a0a7be2bb36
aee9a5cd-c7dc-4d48-ad3d-ce1148d484f6
059551e1-bcc4-404c-b1ca-f56bed479b0f
7b03150c-885a-44e3-a10e-00944d576271
cb4bcbaa-fb5e-44a2-82cd-01851ce98315
4b3ff3a8-964f-4c11-b688-46ae5f3a0e60
33054ef6-0243-4178-9a6b-bf0f38d8369d
d0a2863f-14f6-4762-9b3f-35845bb8d4e5
2b3c1faf-ff27-4ab9-b87f-01ce74f535e5
0fdecdbd-575f-4e85-b104-9a8253da811e
854805fb-68df-412d-bd32-e0334f05340e
ddd37618-33d1-475d-91c5-82b2d5e6212d
98b384fe-4caf-4135-8ee8-933b84c3aa48
e832345d-26fb-4e32-9527-4e78bda7b8fe
ea4b9a8a-b8bb-47aa-8183-2efaf7eb92a1
10f2d184-c6c6-4d6f-840a-65a646cad06d
fd801dfe-c9fb-4e6b-87fa-be3fd4866145
6fa93ca3-ea82-4fe7-aeaf-34e9017b5289
227cdae3-9171-44b1-9499-8f53ee434aa9
b87acb07-3cbe-4aaa-ae88-bc026e17d514
f399640c-66d6-4b4c-8d41-a5622f695954
78607d2b-2005-4ac6-9a49-1268aeaae556
9414bdce-e4a2-42de-98ad-77916e0d4950
4e4a70c7-09c5-46ba-b6dc-8325df09f9bf
3729e2e6-004f-4edd-8c28-11a3ea93af9f
a9927362-1f93-463f-bf69-2cb450dbc063
045d38de-d106-49f9-818f-eb7d452271ec
7bafbd33-084d-486b-9d20-b0559babbc89
e227cf9c-65a0-474e-ba54-756f852a75be
d08b5cd0-ca71-459f-82b3-01ab9a5c4287
cb95e186-4d8b-4ffa-bfe9-1fd8075d52dd
70d48af3-a087-42cf-b044-0456a45a6b09
f331d233-68a9-4e9c-a9f1-0ecc2eb6d1ea
f1c69ab0-0486-42f5-a086-2762eb8fb623
a002a98f-c4a3-4703-a2b1-deb4c0a56c6e
a4358865-10ed-4c17-9f80-843b1b0b207c
7a547657-c1b8-4ba9-bf82-c57501bea357
55d3534a-6245-4cff-bdb8-78d5867c8897
00283909-0632-40c1-9b0a-385a9e3f454d
a0322c99-ff99-4af4-a34d-e6b840aa45da
ddc963b5-232c-4c8b-886c-5a96737998d4
03f47240-1926-4d0a-be29-89043302e5a8
2c5daf14-5b39-4991-9613-ad696685677c
2fa066e4-8047-4d34-8475-acef16792532
65635419-e19c-4e89-8056-597d0b1274f4
dd496360-f432-40d7-bec6-569a2fb801dc
e57047a5-6822-4ec0-8027-85c252c2978f
e8c9b978-bc68-46fa-8ef2-f79d6a20b527
a75a3ca4-fff7-49cb-91d7-25675cd2fce8
57ab5d2e-8681-477e-bf5e-8f52886eda96
a75935cc-8dbb-4ff8-8202-e6e85f55cce8
56f6f36c-54b8-4887-918b-14de872e6a98
464e38d6-34f6-4ff2-b953-3c7d26725558
4d87cf48-bafc-47fa-8f8b-e14874fd7de6
7471fac3-549d-4b2c-b99b-b80a6897605d
ce93f551-b52f-4c96-9e14-864fe0cd3e4a
c4e15b63-9f3a-4a2f-bd24-dc6ecf405c5f
7a98da46-c066-40d5-a356-1d56bec72993
685b52c5-8349-444b-81e2-f30aab549172
1c17c047-2c62-469d-b17d-21c4d0d41061
360f7468-d243-49c4-9337-781ffdc018cd
91593cfb-b82d-471b-b7ed-85182cd1248c
1fa93f3d-0753-4375-8363-2fa6d6fe403e
7f11f094-374d-4e6d-8c87-868678965b1c
40865228-eacd-4927-b526-4e558f153e85
46fcae11-59b7-4d60-b0fa-52e14af61337
16330175-2df2-47b5-bbd6-a4b91c66a848
87cfbe2e-a09e-450a-b009-d9183be74c2b
4dedeffc-ff6d-49e8-bd7e-e0360baca21d
9a73a4d9-328c-4375-b1bf-914529f07a6b
068cd836-5a24-426d-aed9-5ea2908298e0
5e49e92c-4c51-41ab-ab18-72faad8ddf27
96202c83-6837-4a5b-868f-40ba0ea877b0
8bed7392-912a-4685-8072-0356b85a3c15
5a4847fa-fc77-4f18-bcae-bfcfd5c00333
c9285afa-bc69-43fb-a029-71078803c39c
982402bf-cb84-4ed6-b9f0-9d344a4a8666
0c2aed28-349c-4827-9b21-219302b4d3f2
d61e60ed-d3d9-4aa2-a66a-1d11bd3c2d5f
31dfdb02-78da-4b4a-bd27-ce3090f8caea
4a29d520-6492-4766-b038-3aca353429ae
fb3ecac1-9732-460d-91dc-32f1ca65cf06
cf60f010-c7dc-452a-bc81-26392db7a488
d5d0c0b1-ca95-497b-8a81-6a228b97bed3
39b33b3d-beb5-4861-95a2-71f98cbc906f
60b146b2-859b-467f-abe4-34c0858e2977
89465a8d-85e2-4ee0-b6cd-b3d35ff6f8e9
e809cffa-32e1-43d5-8af9-5fad3eb9fc04
215547c5-9820-4c24-bbca-b790d56d9a7b
3080bf51-ef12-43e0-920c-5a87d92e9855
31c5ca22-61c0-4f54-bf87-8903e70f93f7
bfe96f85-52a2-49a6-80ec-e2ca5aa1be36
73fc7d35-50e0-424f-a5d2-86ac9c6c9cde
20a2d01d-1b75-48cc-bd13-a77272b88c70
7a3954fa-0b10-47e0-bd53-9ed806520269
673f417d-3ca8-42c5-b47c-b3d7c98ff229
492d6b4c-5f62-4878-b296-a702706f2d7e
31481750-1dff-4fdb-b1cc-8c618dbf7079
8ce22ad5-769f-4c03-a8f5-fcf3571ba82d
d9472e85-341b-4dce-9cca-737c15b4ddeb
cc38bc5f-a47d-4b9b-a186-fca7eca4e1e2
74cd28a9-8693-495e-b4e2-de7138edd2d8
78a49eaf-0b68-41b5-af79-d3220c37cc3b
f929dbfe-df34-47c3-861e-e418fb3a75c3
c1cbbf23-13a0-48f6-820c-b29bef8a06b9
24a0fa9f-041d-4d4f-97f5-163643b8bc15
23619646-dedf-48aa-9bdd-b3f349bfd446
9c7a434c-0c0a-43d4-8002-de1bb14de847
abf228b2-f23b-405e-bce3-98d0b11b4247
fc8f1e01-2406-43ee-a39b-4841b8c2abab
578047b4-76eb-40cd-995c-c3c3c0a61764
4aaea75b-2d2b-4289-9df6-bbd63d5d9cc0
c4402836-04f7-4335-bd50-66a39e617055
6125f6b6-dcfa-4d34-97ec-cc9c5986fefd
d2b69121-1a72-47bc-8b9b-e945f16eef0b
5c8f942c-12d4-451c-bdcc-9d81b3a07f3c
caf0ab94-3def-4ef0-be15-8a95b5aeb1c1
d4a94cea-ecd1-4b4b-83ac-d7b68c48cac6
d043f648-ac84-4902-8fde-5ed1e8069b7c
0171475b-77bc-40a7-b00a-2cc7d24aeab7
ece435df-bd24-4616-bdbc-1431bc1207ff
c8d65641-f136-4ae0-85c5-68f885477f02
30ae5ce2-6626-4908-b476-9988954495dd
ba03f27b-4707-4795-883e-8bd49ccf2a66
8c72bf35-00a3-4513-922c-0c862fac9679
bc860195-2f15-44f2-b24c-743e300b0df7
ea32c8e6-6046-4675-b997-75d8d92edee1
35a2e82e-a6cc-455a-aaf9-534aacde44aa
195ad110-060a-4caf-b0d4-287981ee8c4b
83dcc83f-3029-4dde-9f99-5e9517b3a7b7
a7f71997-aa53-4860-a865-5019d6eaa6ab
55b610f2-1451-423f-a98c-e8728da52908
0f07692a-81c3-4559-802f-fa0dda9293b0
aa5101d5-77d0-4f43-9929-b5c330b6b716
e0279a0b-f7cf-4d9c-be9f-c831f748fdc2
9c85833a-8db5-4b3d-b198-49ce28b6401b
3fa30331-808f-4421-b51e-afec88d7d51f
00156a7f-b69e-4ccc-b549-728af8e3a2a3
6836bbaf-5c43-44bd-b397-0d9d4195522d
ce60d2bb-a667-4709-b66b-f6ffbfef4a84
5ead4556-b2b8-4a15-9d00-5a2cd8990900
07d15bf9-816f-42cf-b9bb-b7f3a4e2f4e6
ba9eae88-9131-499f-be9a-275250467afe
c2c41c31-3dbd-4ff1-b6a9-963e567316dd
4d514a82-baa9-40f4-8661-a93617379944
3d3440f9-847a-4dce-a3a8-4f77f9813f74
b7b46349-1635-4d6a-a4f6-719d9c1e903c
7d169cc8-b6c1-41e7-a3fb-ad788371ab91
26f2614a-3f51-423c-a4ee-5b9e12e37dd8
f298f217-5926-4dd2-9c09-5b669c3a116f
688c0bb3-a9ba-4dff-b3c5-ee63d1c46691
a4289af0-86e9-4881-8c2a-3f0d46c43379
20922452-11c6-42e3-ab94-567b006490ef
6b547f5e-00e9-486e-b0c0-d10118a26df7
f1933ea0-d67a-408d-9ef2-e243881000a6
2d8af34a-c945-4e9a-bdda-d00251b3a3b7
eebf06de-2404-453a-b1bd-a5698e4ba25f
bc24e0e8-21bb-44c1-a7ad-1393cc9fa552
3932f097-55f1-40fb-9d8e-858af6bee507
6fda76e5-4aee-4250-9320-ea20e1a45a1a
ab6cfcb7-9f19-4e93-a422-978756115951
0189c8cb-76c1-4449-b28a-d8ba7f7ee2c1
0b97582c-365a-419c-8cce-028f3213577b
c1edfbb4-d8e7-45b7-b13d-5ef519bd2ddc
b6d5315e-2b5a-4911-94f7-9a2a351d444e
ba724383-1c57-4d03-ae97-9a237c0d2fdd
7429916f-7f4b-4f49-86c5-4fe0ee97b666
e685f8eb-e252-41bb-8462-bdc1576e2a8e
31b0ebd7-9d44-428c-9900-a0c8f37b4d5e
984310d9-058a-49bf-8fdd-6c8f65383266
19af3d40-71b8-4f90-ab8b-13a7e47f639e
ca3bfd6e-c268-4ffd-b258-33c836e8f043
2711f1dc-1339-480a-8931-e3975a3684cb
eb150d56-ab17-4d4e-8394-0480e21e5688
a7701c39-40ba-47e1-b042-e22680149cf6
8b9979e3-8008-4587-bddd-ba0d82457bf9
adc83a90-9418-4fc8-9d7b-c91c87c3749f
8941b8c5-3120-47c8-84e9-599a533c601f
051df8a0-7d63-4229-8da8-2e75d4090e74
c93fce30-bfcf-4517-ad67-d462913d393c
9fe6f1b5-6fa3-4733-a9a2-4c40d2bb7ba1
7bc27d7c-df16-4461-a534-ca8bc08349ac
3bad79c2-dcd5-4668-ae35-81b5a15bee42
fe350077-3fd9-49a0-9947-b38fb192ddc4
e04ae3b8-cf45-42b4-942a-17866c0d4a58
63ef1673-615f-4557-8e0d-6fd05f2d1fd7
6841f882-91bb-40c6-8265-39f0130914e3
99e0c690-7a42-4497-88d6-f442d174e6ed
3c36ee42-6ffa-47a6-a720-45db7c1c94d5
669bd626-37ba-4ae6-aed8-051775e38676
bda26468-5420-4f29-b506-3c7e8a368223
17abd23f-8393-4044-b675-efc6acd19b4d
2525bca7-81de-475a-8e88-35cf412258d4
1699d004-5418-4ab2-a2dd-5c23c643b883
cebaaf40-7df6-4521-83ac-170b52cae049
fc9e90ae-f0b6-4cf6-9619-fd7ea2434269
a8fd923f-4c40-4558-a770-fcff065725fa
43ae2473-aa46-48f6-add7-b963488d4216
07e2956f-5703-43f3-a2d9-0779536ffb7e
cd3b7f12-d97a-42e3-a0a9-59eef58453d8
0458ed82-93f1-45de-b747-2590cee76ea9
90b9f710-6a24-4535-ae71-79aa49a4a7e2
79106c75-6e75-4116-946c-1e651d57c2a0
8196c200-a35c-473d-adf0-c772f607092e
57d0b0c4-844e-4eaa-ae37-ade0b97a1828
a6b3b267-a049-414b-9e8a-87a4ff1d7f6c
69f70bb2-c07a-4b20-b25f-605d89d23f8e
bf45c535-4517-4b42-8138-9d6965115607
c0720c7d-ce28-4cec-a215-9417c70ab3d2
82d69102-3b15-45cc-9567-958f940970e3
81c468c2-6829-4f82-9a35-add31141c7f8
8e65cb34-d4a6-4d7c-9712-214ceee7f226
36b2e2aa-bb8f-4abd-8f26-391dd1db2935
85f3c11a-7a04-47d7-89bd-fcc917995546
3b356213-e54e-41af-9ef0-3369219cae8e
1faada60-e3e1-4045-9229-c62c01f4854d
7172f922-ceb2-4822-9567-8ed519dbbb1f
c6b9ffac-abd7-4fcf-9a4a-4239695ceed0
53567a84-4675-4aad-9297-ec6cc81b7441
2365c720-aef4-460a-b111-87e4e0feaa15
7bbb8097-db77-49c9-9ab1-d11c6870f345
1fcd0eeb-78b2-40b5-8354-bad7843bd6b8
0764a15b-4529-4b0a-b976-da6da28ddeba
ba6bb392-1548-40e8-a1b2-690b4f2ac95b
a859d1f0-b854-4165-8d0f-6d3ea35027bc
f3836972-a633-4b5e-be59-1a2cb4a269c5
6cae8ede-7348-40be-98a9-3ab3c3425695
52aa98a4-87db-4c88-8b27-8f724f8465c1
e6b2d598-c835-48ba-98c2-4fc90c40350d
9a75f6cd-b61f-49b3-9682-bb976c5bdef5
bce4a209-870f-4f3a-ab2e-62295e09d7ae
ea3bb41b-a1a1-4739-9caf-74f1d54d43cf
4d02e73d-d2a6-42b7-a361-5d3d688df1ee
838088f0-545b-4871-9a90-5c15fbd81aae
6c6ab538-d280-41bf-9779-603f0d446774
40e6b49f-eff8-44d4-a272-19cd1ac6723e
eefaa81e-c381-4cd0-a325-8b158a273ea1
e678f180-c679-44bd-b7b2-c81ebf46a46a
5b544a6d-8f88-4e26-be8f-6bda40a3dfc0
dbe195cd-4474-4ef3-9039-b57190526fca
f9faafae-e122-4c37-a811-82e59be1d4e7
99079c19-19ed-4006-b669-ac6bead87de8
74147a22-e8e7-4402-b928-d59fb28744fa
59ef89fa-5c10-45fe-9ac8-bb1d06560b4d
d2297020-db10-4e57-8206-3b46e511f024
67d1babd-81fd-46ca-b089-9774d5fa3c06
c968fc19-e799-4dec-a683-8ddecfd9059f
14ab3078-c786-4d43-a45d-4482c51d65a1
fd7b1731-bd24-47a1-93db-b9bc3922e15d
f51c0987-7f08-4afa-86d0-dca1d407909a
6f3ce74a-64b3-4b46-91ac-44a57f9b8ad1
6226bfb4-cfd9-4778-b286-71bb2a7d5ecd
ea66a6f2-92cc-4a5f-bf5a-1be5daf48bae
f0443baa-6b69-45eb-8894-fd282d6e799d
b1412eb8-be2d-4676-804b-872f267ec659
3f1f6357-9dce-4b2c-af58-e16ff7294bf8
3229284c-5260-4d0e-ae73-172583c6c28e
efd857d0-5c53-468f-8d14-1cac10decb7a
afad6e10-9f23-490e-9935-1fef4c5ef817
0a3ec44f-7f3f-4752-bed9-97ef9756e3a8
4276965d-0f03-42d0-a39b-7d036299d280
0bcf7ef1-6bf6-4e89-abc7-9a5bcface79b
3765b29c-f198-4053-8cc6-6eb188286d83
c309c291-9d3b-4fa6-a6d7-26d08fe3c480
0e81fe77-5244-4197-8d96-850f50043e47
9725139e-cd05-4d10-bd13-8bd589d3c537
52220228-bae7-4b78-b5db-205c3fe25979
624a0905-838b-43d4-b5eb-e0a79d05b787
24d0823c-29c6-4c49-8448-153255a5662d
ac99febe-7127-4dfc-aaec-fe2e5589360a
7667b82e-07f6-4456-8f14-50fa36868da1
b8831303-95c5-43aa-bba2-7f3ea6fe5eba
e1635131-1390-4665-89d7-3c4b5e384072
f0d33a94-59f5-401b-b8cb-425e72795944
d2b6e4d2-b9dc-4cca-bd66-76a951a50084
f8296da1-c118-4a36-a994-b6de8a3e4f5f
2ca2d902-35a2-4c48-bb95-11fb6739ff57
9765d501-86e4-49b0-9a42-703b493f9215
fae7513a-68e4-40c2-a7d6-07b3ca7a77f9
e85ad13d-c3b9-43a7-9188-42b2ee77b130
3c81a146-2b06-4441-a2af-94ecb8153ea2
3651a991-7dda-468c-b4d6-78940a171472
db2a3b17-50cd-45fb-b2e6-209fc04cb421
25c1ee35-7370-4c35-8ba7-3dffbe90b2b1
cc8bfdea-27f1-4119-ae24-ed06afae7c02
6fcb5c99-266b-4de7-9c24-07b23c7f120c
a08bbf67-b443-4ed3-aee6-e28982bcc716
0daa4e46-f895-422e-8f41-03522b3bd377
2771393d-7451-44fe-ac87-093b8eb84b9f
2b216e44-f2f5-4ea8-8dba-3cb46a90f8b3
64cbcef5-e5dc-4690-94bc-81ea6b1db66a
2dd9b90e-c48b-44d7-a191-a4c781afe42a
fee9ac65-c250-41b6-b9bc-49e6da79f44c
c96f622b-0d25-488b-afb2-877a1e26c650
adf832bf-73d9-471f-a1d8-82ecf061679e
7c46a0cf-2c1c-4909-b91a-30faec2507ee
748b9826-bc78-470c-9687-5c72be99f249
15e0579b-d282-4c0e-a161-cc7374b4fd9c
4c6ca965-314a-4bb8-8ee6-659e5b644dfc
35519bd7-cea2-4bd9-9057-9c9b7d8b81b7
bb4e8335-130b-48eb-9faa-592b1290552b
9e588d97-55d4-4f02-bea0-5689df08220b
a9a0e32d-70e7-4f5e-895a-25c5236cfb92
adaba3fe-d035-404e-80e3-255b30544d36
ad121e94-e8ce-4af0-ba91-7e00cc0ed0ed
a021ca28-f379-4a46-97e6-403ab015132c
29bd6fd9-14cb-447b-927f-618a722536c8
8b961569-5c52-4f23-96f5-98c360d6cbe6
08d8be63-f292-4622-9386-15c3e7e978e7
63504a50-4465-422f-bae4-cfb53e9e892f
9a6616da-cb6a-41de-97d2-bde96c84551e
a4812582-4c74-49ea-aa4c-ee6651cb61ba
9a608d31-0592-4202-948f-25084d122174
f903ae2e-9e23-43bf-b2ad-4f97ddcc02b5
784c13d6-1156-472f-9e55-a534eac4f0d0
da69c646-9683-4e77-bcea-02a7738215cb
40d08425-d4ba-4921-8cd0-ca39c5a28840
5e452364-2362-49f4-b220-7298370175ac
b32b8dd8-eab2-43e4-92db-9d25475524a2
257fdb72-4a63-41ea-a27e-df2ec57ac834
b31125b9-ec2e-444e-b0ed-7cd8d950acf6
9d9073cc-2abb-411a-a5a0-60413fa1a210
2e63dea0-6979-41d5-90e6-fb65df9949ce
397e9604-c2f7-4668-9f6a-f9bd52a2919d
3c7c3041-bd5a-40aa-9ebc-018d3ed6a400
857a8a92-0d1b-4b34-b921-10dbf12e07da
54656991-159c-4e60-97ca-d95634ea54ce
59cb75ec-e543-497d-9ccf-e84b3ddfed24
812e129d-d281-4eb6-b43f-33fc43ee3766
429715da-4c5c-4c28-bcd8-40f79900ac5c
05495054-16ee-4a07-898b-c6c1f7d5d0d0
7ac7c91c-cc60-438c-aa63-98a7f874df28
8b5ef3c7-4f68-487c-98ab-2331fff41182
b86a26d2-4a49-4faa-9f3b-51511c3b12ca
d0f58937-b8cb-4482-aa57-1ebc6f33a347
f9494786-da67-4f6a-94ca-26d29b734f5d
75cad579-63f8-4e02-a01c-fb744f51761b
081c9b50-4df6-47a6-acea-ec5274155c78
d325bbae-e038-4105-8515-09884e786c0b
8f8edee0-32e8-4a1f-a0b0-5b283d92e069
2288a458-a431-4219-a509-5c0d23ec80ca
918170ad-cf2e-4b2a-88e2-97621fa54a28
abd1db34-4001-418c-95b8-7064ad403dbc
78b0c1c1-6956-4a3e-9dfe-068078b2314d
d2aab71c-725f-47db-b6a1-65f100b5af26
c65e7ca4-6072-489b-9838-76e8f0f9a550
3dfb88d8-391d-4b61-8481-c59f192f954d
c32c89f0-7c6b-446f-988e-324b8a2b2456
74141146-3fb7-4709-bb00-a8283886e0fd
38923ce2-4e1c-463f-bddb-3bef9c1cd69f
7c42b00d-a0b6-46f5-a1fc-9efeda7e3e5d
db3facf7-e36f-4927-88f7-a897b67e6ea8
8312e152-3a33-4e24-a2d8-acf3fb3fa6ea
e281fcda-6b31-4848-9735-7a1dd08f79d3
a0a3f28c-2b0d-47f2-a94f-18740b6b69bc
ba4216d0-973b-431d-a074-586a97f48774
1f6865e2-0793-4446-8542-eaf307a6a933
670de3c0-ec57-4b89-b398-e1bcd57be071
0a66b139-a49d-4f27-8ea3-1da4cb4aeb07
d56014e7-7ebb-4f9a-934d-269db82aebc3
221c2568-361f-4855-bb04-94634fdb336f
72cb2f5b-808e-4ea3-863b-4474ba12a8a7
0a8e7521-2f6b-4611-940e-2e5a17804bc4
759ee1f1-248f-43e3-a3b2-90c9e7323bc9
b7258755-496a-4a1b-9910-766d2728a76d
60f26b32-08f4-4f44-b91b-f772a2e52296
eb1c407d-f1ec-425d-b1ae-8655f8a9f2d1
5ef2f2ca-d086-46be-950a-72dee2f6f6f3
4140c72a-8bd3-4571-aed4-4388b4bf5542
0384b53d-20ab-4a05-a15e-6e738db7182f
13c9dabb-a7ec-4a64-8f85-e03c1d221087
debf806f-1b4a-41ad-9908-a6d4ca168dc0
819b0ee4-974a-4dda-827d-6dc76b4714e7
e12de5ac-ee2c-4dea-b8d2-d39c0ce90685
a1a6b0e5-d835-48de-a982-70de8c598a15
50037304-49bb-4ab7-88c8-038cc0913c4b
03ca2663-fe6b-4a62-b424-9e9029cb4a47
6541fb39-c173-4834-8c77-87d4e0001538
495ee68e-439c-4297-b7a2-0acbf6634098
44dfbc0b-8c21-4039-9ee4-777d52d8dde6
35124598-83bf-4778-800f-30cd499db24e
241bc6fc-5e7d-40bf-af7c-9e175e675beb
47ff65d6-6103-4036-abb4-1b9bd98b9d88
9ab92fd0-01ff-467b-b855-8a8fda71b56f
4ddb3dcd-1c9a-42d2-8fd6-aee0043b6c2b
c41c6e0a-d070-421a-9dd3-70c9bee34fb8
92391cc7-cbf6-4b15-94ab-37f3e38c457c
62d8d520-f3a2-4df0-8e57-265a8a00e708
00ffcb4a-3d1b-493a-9701-1c9838cc81f3
915bc8e3-9be3-41b0-b7f7-91a920790626
ef8e956d-0983-4d97-a315-4a987bbe65ab
034bfa35-38dc-46e1-832a-a1b19bc9469f
d833ee72-efd0-4695-81e8-e3b9764cbd2f
fbce009f-e438-4da8-a178-07a7f321e5d1
79e51770-2ce5-486b-ba81-08bc6c45a7c6
60906db9-63af-46e9-bb3d-c1b312eaa812
]
# Hardcoded list of appeal submission IDs that need correction form emails sent out
# (only those we've previously sent out failure notification emails to)
appeal_submission_ids = [740595, 740600, 740603, 748840, 748845, 748848, 748850, 748861, 748863, 748870, 748874, 740605, 740611, 740612, 740613, 740614, 740618, 748836, 748839, 748875, 748877, 748880, 748882, 748885, 748889, 740615, 740590, 740596, 740597, 740598, 740601, 740602, 740608, 740609, 748835, 748837, 748844, 748846, 748838, 748843, 748881, 740607, 740591, 740610, 740592, 740616, 740606, 740593, 740594, 748857, 748856, 748858, 748859, 748860, 748862, 748865, 748868, 748871, 748872, 748873, 748878, 748879, 748883, 748884, 748886, 748887, 747735, 740599, 740604, 748842, 748888, 748876, 754401, 754403, 754478, 754460, 754441, 754429, 754430, 754418, 754459, 754470, 754468, 754475, 754487, 754471, 754490, 754405, 754350, 754362, 754419, 754367, 754509, 754446, 754423, 754329, 754585, 754500, 754512, 754576, 754577, 754588, 754480, 754543, 754462, 754546, 754453, 754560, 754599, 754604, 754609, 754610, 754665, 754632, 754650, 754655, 754657, 754661, 754601, 754574, 754622, 754578, 754600, 754317, 754301, 754312, 754318, 754335, 754383, 754351, 754358, 754370, 754377, 754379, 754368, 754390, 754296, 754307, 754331, 754333, 754399, 754467, 754476, 754477, 754485, 754541, 754553, 754617, 754627, 754663, 754671, 754692, 754742, 754746, 754283, 754310, 754455, 754287, 754319, 754309, 754303, 754352, 754353, 754355, 754363, 754365, 754369, 754488, 754416, 754384, 754289, 754295, 754425, 754426, 754428, 754440, 754450, 754538, 754498, 754503, 754504, 754505, 754506, 754507, 754779, 754516, 754556, 754562, 754564, 754573, 754575, 754579, 754586, 754580, 754581, 754583, 754584, 754587, 754589, 754594, 754635, 754638, 754639, 754662, 754660, 754675, 754648, 754652, 754669, 754653, 754654, 754658, 754664, 754668, 754670, 754672, 754690, 754693, 754698, 754729, 754763, 754704, 754706, 754709, 754710, 754711, 754712, 754743, 754721, 754723, 754725, 754726, 754727, 754731, 754734, 754738, 754740, 754749, 754751, 754685, 754680, 754695, 754754, 754760, 754767, 754768, 754770, 754771, 754772, 754774, 754775, 754780, 754773, 754759, 754386, 754313, 754316, 754320, 754354, 754323, 754325, 754332, 754338, 754340, 754452, 754394, 754395, 754396, 754400, 754414, 754443, 754456, 754458, 754465, 754466, 754469, 754496, 754479, 754481, 754483, 754484, 754527, 754424, 754421, 754417, 754412, 754520, 754521, 754526, 754536, 754542, 754544, 754547, 754550, 754551, 754598, 754603, 754605, 754607, 754464, 754472, 754566, 754611, 754612, 754614, 754636, 754621, 754625, 754626, 754673, 754634, 754684, 754613, 754645, 754686, 754715] # rubocop:disable Style/NumericLiterals, Layout/LineLength
# Create output buffer to capture all output
output_buffer = StringIO.new
# Process evidence recovery emails
evidence_result = process_evidence_recovery_emails(lighthouse_upload_ids, dry_run, output_buffer)
# Process form recovery emails
form_result = process_form_recovery_emails(appeal_submission_ids, dry_run, output_buffer)
# Print summary
puts "\n#{'📊 ' * 40}"
puts 'EMAIL SENDING COMPLETE'
puts '📊 ' * 40
puts "\nEvidence Recovery Emails:"
puts " Processed: #{evidence_result[:stats][:processed]}"
puts " Sent: #{evidence_result[:stats][:sent]}"
puts " Skipped: #{evidence_result[:stats][:skipped]}"
puts " Errors: #{evidence_result[:stats][:errors]}"
puts "\nForm Recovery Emails:"
puts " Processed: #{form_result[:stats][:processed]}"
puts " Sent: #{form_result[:stats][:sent]}"
puts " Skipped: #{form_result[:stats][:skipped]}"
puts " Errors: #{form_result[:stats][:errors]}"
puts "\nFinished at: #{Time.current}"
puts '=' * 80
if dry_run
puts "\n⚠️ DRY RUN MODE - No emails were sent"
puts 'Run without DRY_RUN=true to send emails'
end
# Upload to S3 if enabled
upload_email_results_to_s3(output_buffer.string, dry_run) if upload_to_s3
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/decision_review_repl.rb
|
# frozen_string_literal: true
# run this by starting a rails console and running: require_relative 'rakelib/decision_review_repl'
# rubocop:disable Style/MultilineBlockChain
require_relative 'piilog_repl/piilog_helpers'
alias _p p
alias _pp pp
def recipes_string
<<~RECIPES
#############
# RECIPES #
#############
# 🍝 get an HLR piilog that was created right now (roughly)
logs = Q.(:hlr, :now) # builds/returns a PersonalInformationLog relation
# this will give you the time range of the current minute.
# beside timestamp strings you can use :now, :today, :yesterday,
# :tomorrow as well as any time/date obj
# 🍛 get all piilogs that have an error_class that includes either the string
# "DocumentUploader" or "nod" (ILIKE) that was created between two dates
logs = Q.('DocumentUploader', 'nod', '2021-03-14', '2021-03-15') # you can pass up to 2 time arguments
# which can be time, date, duration
# or even nil (to express an open-ended
# range)
# 🌯 get all NOD piilogs that occurred on the day before yesterday
logs = Q.(2.days.ago.to_date, :nod) # Note the 'to_date'! Passing a single time arg that is a date means
# everything on that date. When passing a single time arg that is a
# Time class (with hour, min, sec), the resulting range depends on
# how precise the time is. Examples: If the time is 00:00:00, then
# time range of that date. If minutes and second are 0 (but hour is not),
# the time range is that hour. If second is 0, time range is that minute.
# And so on.. See TIMES_TO_WHERE_ARGS for the complete rules.
# You can always use .to_sql to see the effect.
# 🥗 get all DecisionReview piilogs that occurred in the past week
logs = Q.(1.week, :nod, :hlr) # Note: this will go back 1.week from the current moment.
# You might want Q.(:nod, :hlr, 1.week.ago.to_date, nil)
# Use `nil` for open-ended time ranges
# 🍔 get the error counts for the last week of NOD errors
# print the error counts by category:
puts_by_error_message_category_counts wrap Q.(:nod, 7.days)
# {"Gateway timeout"=>7,
# "BackendServiceException [DR_422]"=>7,
# "Outage..."=>6,
# "BackendServiceException [DR_404]"=>2,
# "BackendServiceException [unmapped_service_exception]"=>1}
# print the same counts, but first organize by where the exception occurred
puts_by_error_class_counts wrap Q.(:nod, 7.days)
# {"V0::HigherLevelReviews::ContestableIssuesController#index exception Common::Exceptions::GatewayTimeout (HLR)"=>
# {"Gateway timeout"=>7},
# "V0::HigherLevelReviewsController#create exception DecisionReview::ServiceException (HLR)"=>
# {"BackendServiceException [DR_422]"=>7},
# "V0::HigherLevelReviews::ContestableIssuesController#index exception Breakers::OutageException (HLR)"=>
# {"Outage..."=>4},
# "V0::HigherLevelReviews::ContestableIssuesController#index exception DecisionReview::ServiceException (HLR)"=>
# {"BackendServiceException [DR_404]"=>2,
# "BackendServiceException [unmapped_service_exception]"=>1},
# "V0::HigherLevelReviewsController#create exception Breakers::OutageException (HLR)"=>
# {"Outage..."=>2}}
# now you can see that some of those 6 outages occurred at the ContestableIssuesController#index and
# some at HigherLevelReviewsController#create.
# Notice the 'wrap' method? 'wrap' adds helper methods to PersonalInformationLog objects. Type 'more' for details.
# 🍱 get the DecisionReview error counts for the past 15 days
15.downto(1).each do |d|
date = d.days.ago.to_date
count = Q.(:hlr, :nod, date).count
puts "%s %s %3d" % [date.strftime('%A')[0..2], date, count]
end
# 🍕 what helper methods were added to my PiiLog?
piilogs = wrap Q.(:hlr, 1.week)
piilogs.first.helper_methods
(type 'more' for more info)
RECIPES
end
def recipes
puts recipes_string
end
def more_string
<<~MORE
# Q is an alias for the PersonalInformationLogQueryBuilder --a lambda that makes it easier to write
# queries for PersonalInformationLog.
logs = Q.(:hlr, 7.days) # returns a PersonalInformationLog relation
logs = Q.(:hlr, updated_at: nil) # kwargs work just as they do with where
logs = Q.(:hlr, :nod).where('updated_at > ?' Time.zone.yesterday) # chainable. (that's HLR /or/ NOD PiiLogs)
# PersonalInformationLogs are hard to wrangle because so much data gets dumped into their 'data' attributes.
# In 'decision_review_repl.rb', there are a handful of PersonalInformationLog "wrapper classes" that add
# helper methods for "specific types" of PersonalInformationLogs (PiiLogs recorded in the controller as
# opposed to the service, for instance) (Note: I use PiiLog and PersonalInformationLog interchangeably
# although there /is/ a PiiLog class). You can use '.helper_methods' on a PiiLog or scroll through
# 'decision_review_repl.rb' to see what they offer, /BUT/, you do *not* need to explicitly call them. The
# wrap command will /wrap/ a PersonalInformationLog (or Logs) with the appropriate wrapper class. With `wrap`,
# you end up with an array of PersonalInformationLog /SimpleDelegator/ objects that are simply
# PersonalInformationLogs with added methods --added methods that are appropriate for each PiiLog.
# For instance, a PiiLog logged in a controller will have user info helper methods (as the 'data' attribute
# has user info). If the PiiLog was recorded near the schema code in the Service class, the PiiLog will have
# schema helper methods that help you navigate the PiiLog's data.
logs = wrap Q.(:hlr) # returns an array with helper methods added to each PersonalInformationLog
# depending on its type (was it thrown in a controller, the service, etc.)
# There are a slew of methods that work with an array of wrapped PiiLogs.
# for instance:
hash = by_error_message_category(wrap(Q.call(:hlr, :yesterday))) # notice the "wrap" nestled in there
# this will return a hash that organizes the array of PiiLogs by their error message category
# if you pretty-print it, it will look something like:
p hash
# {"Outage..."=>
# [#<PersonalInformationLog:0x0000000000000000
# id: 0000000,
# data:
# {"user"=>
# {"icn"=> ...
# "ssn"=> ...
# ...
# [#<PersonalInformationLog:0x0000000000000000
# ...
# "Timeout"=> ...
# see "spec/rakelib/piilog_repl/piilog_helpers_spec.rb" for more examples of using
# the PersonalInformationLogQueryBuilder
MORE
end
def more
puts more_string
end
# Ruby pretty print that doesn't echo the value after pretty printing it
def p(object)
_pp object
nil
end
# pretty print using pretty_generate
def pp(object)
puts JSON.pretty_generate object
end
# recurses through a hash and, anytime an array is encountered, it's replaced with its count
def counts(hash)
hash.reduce({}) do |acc, (k, v)|
count = case v
when Array
v.count
when Hash
counts(v)
else
v
end
acc.merge k => count
end.sort_by do |_k, v|
-(v.is_a?(Hash) ? total_in_tree(v) : v)
end.to_h
end
# given a hash, or subhash, adds up all of the integers encountered (recursive)
def total_in_tree(hash)
hash.reduce(0) do |acc, (_k, v)|
acc + case v
when Array, Hash
total_in_tree v
else
v.is_a?(Integer) ? v : 0
end
end
end
# PersonalInformationLog wrapper
class PiiLog < SimpleDelegator
def error
data['error']
end
def error_message
error&.send(:[], 'message').to_s
end
def error_message_category
if error_message.starts_with? 'Outage detected on DecisionReview beginning at'
'Outage...'
elsif error_message.starts_with? 'BackendServiceException: {:'
match = error_message.match(/:code=>"(?<code>.*?)"/)
if match
code = match[:code]
"BackendServiceException [#{code}]"
else
error_message
end
else
error_message
end
end
def helper_methods
puts (methods - PersonalInformationLog.instance_methods - SimpleDelegator.instance_methods).sort
end
end
# When using a PersonalInformationLog, everything pretty much gets dumped into 'data'.
# The shape of 'data' depends on where in the code a PersonalInformationLog is being created.
# The following classes help wrangle these deep hashes.
# When you use 'new' with the following classes (and pass in a PersonalInformationLog)
# what you get back out is still a PersonalInformationLog, just with some added helper methods.
# Wrapper for PersonalInformationLogs created from DecisionReview controller exceptions
# PersonalInformationLog wrapper
class ControllerException < PiiLog
def user_ids
data['user']
end
def user_id
# user_ids.values.reduce('') { |acc, id| acc + (id ? id.to_s : '|') }
user_ids.values.join('|')
end
def errors
error['errors']
end
def backtrace
error['backtrace']
end
end
# for HigherLevelReviewsController#create
# and NoticeOfDisagreementsController#create
# PersonalInformationLog wrapper
class ControllerCreateException < ControllerException
def body
data.dig('additional_data', 'request', 'body')
end
def lighthouse_response
data.dig('error', 'original_body')
end
end
# PersonalInformationLog wrapper
class HlrContestableIssuesControllerIndexException < ControllerException
def benefit_type
data['additional_data']['benefit_type']
end
end
# PersonalInformationLog wrapper
class GetContestableIssuesSchemaValidationError < PiiLog
def json
data['json']
end
def lighthouse_response
json
end
def schema
data['schema']
end
def errors
data['errors']
end
def contestable_issues
data.dig('json', 'data')
end
def data_pointers
errors.pluck('data_pointer')
end
def local_schemas
errors.pluck('schema')
end
end
# given a PersonalInformationLog, picks the correct wrapper class
def wrap_personal_information_log(value, raise_if_no_suitable_wrapper_found: true)
case
when value.data.dig('schema', 'properties', 'data', 'items', 'properties', 'type', 'enum', 0) == 'contestableIssue'
GetContestableIssuesSchemaValidationError
when value.error_class.include?('HigherLevelReviews::ContestableIssuesController#index exception')
HlrContestableIssuesControllerIndexException
when value.error_class.include?('ContestableIssuesController#index exception')
ControllerException
when value.error_class.include?('HigherLevelReviewsController#create exception')
ControllerCreateException
when raise_if_no_suitable_wrapper_found
raise "couldn't find a suitable wrapper for #{value.inspect}"
else
return value
end.new value
end
# takes a PersonalInformationLog relation and returns an array of wrapped PersonalInformationLogs
def wrap_personal_information_logs(relation)
relation.map do |personal_information_log|
wrap_personal_information_log personal_information_log
end
end
def wrap(value)
if value.is_a?(PersonalInformationLog)
wrap_personal_information_log value
else
wrap_personal_information_logs value
end
end
# the following methods take an array of wrapped-PersonalInformationLogs
# takes in an array of wrapped-PiiLogs and returns a hash where they're
# sorted by error_class and then by error message category
def by_error_class(array)
new_hash = Hash.new do |hash, key|
hash[key] = Hash.new { |hash, key| hash[key] = [] }
end
array.each do |piilog|
new_hash[piilog.error_class][piilog.error_message_category] << piilog
end
new_hash
end
# takes in an array of wrapped-PiiLogs and returns a hash where they're sorted by error message category
def by_error_message_category(array)
new_hash = Hash.new { |hash, key| hash[key] = [] }
array.each do |piilog|
new_hash[piilog.error_message_category] << piilog
end
new_hash
end
def puts_by_error_class(array)
puts by_error_class array
end
def by_error_class_counts(array)
counts by_error_class array
end
def puts_by_error_class_counts(array)
puts counts by_error_class array
end
def puts_by_error_message_category(array)
puts by_error_message_category array
end
def by_error_message_category_counts(array)
counts by_error_message_category array
end
def puts_by_error_message_category_counts(array)
puts counts by_error_message_category array
end
Q = PersonalInformationLogQueryBuilder
puts recipes
# rubocop:enable Style/MultilineBlockChain
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/form526.rake
|
# frozen_string_literal: true
require 'csv'
require 'date'
require 'reports/uploader'
# Used to provide feedback while processing
# a collection of Form526Submission instances.
@form526_verbose = ENV.key?('FORM526_VERBOSE')
namespace :form526 do
desc <<~HEREDOC
Get all submissions within a date period:
rake form526:submissions[2021-01-23,2021-01-24]
Last 30 days:
rake form526:submissions[]
BDD stats mode: (argument order doesn't matter)
rake form526:submissions[bdd]
rake form526:submissions[bdd,2021-02-11] # this date and beyond
rake form526:submissions[bdd,2021-02-10,2021-02-11]
rake form526:submissions[bdd,2021-02-10,2021-02-11,unredacted]
HEREDOC
task submissions: :environment do |_, args|
# rubocop:disable Style/FormatStringToken
# This forces string token formatting. Our examples don't match
# what this style is enforcing
# rubocop: format('%<greeting>s', greeting: 'Hello')
# vets-api example: printf "%-20s %s\n", header, total
##### RUN-IN-CONSOLE HELPER CODE ####
## When pasting this task into a console, this snippet saves up the output to
## print at the very end (instead of being printed within the code your pasting).
## After playing around with $stdout, I found temporarily redefining "puts" more
## closely accomplishes the behavior I was trying to achieve.
#
# OUTPUT = ""
# def puts(string = "")
# OUTPUT << string
# OUTPUT << "\n"
# end
#
## Set the args:
#
# args = { first: '2020-12-25' }
# args[:second] = args[:first]
#######################################
unless defined? F526_ROW
F526_ROW = {
order: %i[created_at updated_at id c_id p_id complete version],
format_strings: {
created_at: '%-24s',
updated_at: '%-24s',
id: '%-15s',
c_id: '%-10s',
p_id: '%-15s',
complete: '%-18s',
version: '%s'
},
headers: {
created_at: 'created at:',
updated_at: 'updated at:',
id: 'submission id:',
c_id: 'claim id:',
p_id: 'participant id:',
complete: 'workflow complete:',
version: 'form version:'
}
}.freeze
end
unless defined? F526_OPTIONS_STRUCT
F526_OPTIONS_STRUCT = Struct.new(
:print_header,
:print_hr,
:print_row,
:print_total,
:ignore_submission,
:submissions,
:success_failure_totals_header_string,
keyword_init: true
)
end
def date_range_mode(args_array)
start_date = args_array.first&.to_date || 30.days.ago.utc
end_date = args_array.second&.to_date || Time.zone.now.utc
separator = ' '
printf_string = F526_ROW[:order].map { |key| F526_ROW[:format_strings][key] }.join(separator)
print_row = ->(**fields) { puts format(printf_string, *F526_ROW[:order].map { |key| fields[key] }) }
F526_OPTIONS_STRUCT.new(
print_header: -> { print_row.call(**F526_ROW[:headers]) },
print_hr: -> { puts '------------------------------------------------------------' },
print_row:,
print_total: ->(header, total) { puts format("%-20s#{separator}%s", "#{header}:", total) },
ignore_submission: ->(_) { false },
submissions: Form526Submission.where(created_at: [start_date.beginning_of_day..end_date.end_of_day]),
success_failure_totals_header_string: "* Job Success/Failure counts between #{start_date} - #{end_date} *"
)
end
def bdd_stats_mode(args_array)
dates = dates_from_array args_array
# rubocop:disable Layout/LineLength
prnt = ->(**fields) { puts F526_ROW[:order].map { |key| fields[key].try(:iso8601) || fields[key].inspect }.join(',') }
# rubocop:enable Layout/LineLength
F526_OPTIONS_STRUCT.new(
print_header: -> { puts F526_ROW[:order].map { |key| F526_ROW[:headers][key] }.join(',') },
print_hr: -> { puts },
print_row: (
if unredacted_flag_present?(args_array)
prnt
else
->(**fields) { prnt.call(**fields.merge(p_id: "*****#{fields[:p_id].to_s[5..]}")) }
end
),
print_total: ->(header, total) { puts "#{header.to_s.strip},#{total}" },
ignore_submission: ->(submission) { submission.bdd? ? false : submission.id },
submissions: Form526Submission.where(created_at: [
(((dates.first || '2020-11-01'.to_date).beginning_of_day)..
((dates.second || (Time.zone.now.utc + 1.day)).end_of_day))
]),
success_failure_totals_header_string: '* Job Success/Failure counts *'
)
end
def bdd_stats_mode_dates_from_args(args)
args_array = args.values_at :first, :second, :third
return [] unless bdd_flag_present? args_array
dates = dates_from_array args_array
start_date = dates.first || '2020-11-01'.to_date
end_date = dates.second || (Time.zone.now.utc + 1.day)
[start_date, end_date]
end
def missing_dates_as_zero(hash_with_date_keys)
dates = hash_with_date_keys.keys.sort
return {} if dates.blank?
earliest_date = dates.first
latest_date = dates.last
raise unless earliest_date.to_date <= latest_date.to_date
new_hash = {}
date = earliest_date
loop do
new_hash[date] = hash_with_date_keys[date] || 0
break if date == latest_date
date = tomorrow date
end
new_hash
end
def to_date_string(value)
value.try(:to_date)&.iso8601
end
def tomorrow(date_string)
to_date_string date_string.to_date.tomorrow
end
def bdd_flag_present?(array)
flag_present_in_array? 'bdd', array
end
def unredacted_flag_present?(array)
flag_present_in_array? 'unr', array
end
def flag_present_in_array?(flag, array)
array.any? { |value| value&.to_s&.downcase&.include? flag }
end
def dates_from_array(array)
dates = []
array.each do |value|
date = begin
value.to_date
rescue
nil
end
dates << date if date
end
dates
end
options = bdd_flag_present?(args.extras) ? bdd_stats_mode(args.extras) : date_range_mode(args.extras)
options.print_hr.call
options.print_header.call
outage_errors = 0
ancillary_job_errors = Hash.new 0
other_errors = 0
submissions_per_day = Hash.new 0
ids_to_ignore = []
# Scoped order are ignored for find_each. Its forced to be batch order (on primary key)
# This should be fine as created_at dates correlate directly to PKs
options.submissions.find_each do |submission|
if (id_to_ignore = options.ignore_submission.call(submission))
ids_to_ignore << id_to_ignore
next
end
submissions_per_day[to_date_string(submission.created_at)] += 1
submission.form526_job_statuses.where.not(error_message: [nil, '']).find_each do |job_status|
if job_status.job_class == 'SubmitForm526AllClaim'
job_status.error_message.include?('.serviceError') ? (outage_errors += 1) : (other_errors += 1)
else
ancillary_job_errors[job_status.job_class] += 1
end
end
version = submission.bdd? ? 'BDD' : 'ALL'
options.print_row.call(
created_at: submission.created_at,
updated_at: submission.updated_at,
id: submission.id,
c_id: submission.submitted_claim_id,
p_id: submission.auth_headers['va_eauth_pid'],
complete: submission.workflow_complete,
version:
)
end
options.submissions = options.submissions.where.not(id: ids_to_ignore) if ids_to_ignore.present?
total_jobs = options.submissions.count
success_jobs = options.submissions.where(workflow_complete: true)
success_jobs_count = success_jobs.count
fail_jobs = total_jobs - success_jobs.count
total_users_submitting = options.submissions.count('DISTINCT user_uuid')
total_successful_users_submitting = success_jobs.count('DISTINCT user_uuid')
user_success_rate = (total_successful_users_submitting.to_f / total_users_submitting)
options.print_hr.call
puts options.success_failure_totals_header_string
options.print_total.call('Total Jobs', total_jobs)
options.print_total.call('Successful Jobs', success_jobs_count)
options.print_total.call('Failed Jobs', fail_jobs)
options.print_total.call('User Success Rate', user_success_rate)
options.print_hr.call
options.print_total.call('Total Users Submitted', total_users_submitting)
options.print_total.call('Total Users Submitted Successfully', total_successful_users_submitting)
options.print_total.call('User Success rate', user_success_rate)
options.print_hr.call
puts '* Failure Counts for form526 Submission Job (not including uploads/cleanup/etc...) *'
options.print_total.call('Outage Failures', outage_errors)
options.print_total.call('Other Failures', other_errors)
puts 'Ancillary Job Errors:'
ancillary_job_errors.each do |class_name, error_count|
options.print_total.call " #{class_name}", error_count
end
options.print_hr.call
puts '* Daily Totals *'
missing_dates_as_zero(submissions_per_day).each do |date, submission_count|
options.print_total.call date, submission_count
end
#### RUN-IN-CONSOLE HELPER CODE ####
# STDOUT.puts OUTPUT;nil
######################################
end
desc 'Get an error report within a given date period. [<start date: yyyy-mm-dd>,<end date: yyyy-mm-dd>,<flag>]'
task :errors, %i[start_date end_date flag] => [:environment] do |_, args|
# rubocop:disable Metrics/ParameterLists
def print_row(sub_id, evss_id, user_uuid, created_at, is_bdd, job_class)
printf "%-15s %-45s %-35s %-25s %-10s %-20s\n", sub_id, evss_id, user_uuid, created_at, is_bdd, job_class
end
# rubocop:enable Metrics/ParameterLists
# rubocop:enable Style/FormatStringToken
def print_errors(errors)
errors.sort_by { |_message, hash| -hash[:submission_ids].length }.each do |(k, v)|
puts k
puts '*****************'
puts "Unique Participant ID count: #{v[:participant_ids].count}"
print_row('submission_id:', 'evss_id', 'user_uuid', 'created_at:', 'is_bdd?', 'job_class')
v[:submission_ids].each do |submission|
print_row(submission[:sub_id],
submission[:evss_id],
submission[:user_uuid],
submission[:date],
submission[:is_bdd],
submission[:job_class])
end
puts '*****************'
puts ''
end
end
def message_string(msg)
return nil if msg['severity'] == 'WARN'
message = msg['key']&.gsub(/\[(\d*)\]|\\/, '')
# strip the GUID from BGS errors for grouping purposes
# don't show disability names, for better grouping. Can be removed after we fix inflection issue
unless message == 'form526.treatments.treatedDisabilityNames.isInvalidValue'
message += msg['text'].gsub(/GUID.*/, '')
end
message
end
start_date = args[:start_date]&.to_date || 30.days.ago.utc
end_date = args[:end_date]&.to_date || Time.zone.now.utc
errors = Hash.new { |hash, message_name| hash[message_name] = { submission_ids: [], participant_ids: Set[] } }
submissions = Form526Submission.where(
'created_at BETWEEN ? AND ?', start_date.beginning_of_day, end_date.end_of_day
)
submissions.find_each do |submission|
submit_jobs = submission.form526_job_statuses.where(
job_class: Form526Submission::SUBMIT_FORM_526_JOB_CLASSES
)
ancillary_jobs = submission.form526_job_statuses.where.not(
job_class: Form526Submission::SUBMIT_FORM_526_JOB_CLASSES
)
unsuccessful_submit_jobs, unsuccessful_ancillary_jobs = [submit_jobs, ancillary_jobs].map do |jobs|
jobs.where.not status: [Form526JobStatus::STATUS[:try], Form526JobStatus::STATUS[:success]]
end
in_progress_submit_jobs = submit_jobs.where status: Form526JobStatus::STATUS[:try]
the_submission_has_been_successfully_submitted = submission.a_submit_form_526_job_succeeded?
it_is_still_trying = in_progress_submit_jobs.present?
# we're not interested in unsuccessful submit jobs if submit eventually succeeded (or is still being attempted)
unsuccessful_jobs = if the_submission_has_been_successfully_submitted || it_is_still_trying
unsuccessful_ancillary_jobs
else
unsuccessful_ancillary_jobs.or unsuccessful_submit_jobs
end
unsuccessful_jobs.each do |job_status|
# Check if its an EVSS error and parse, otherwise store the entire message
messages = if job_status.error_message.include?('=>') &&
job_status.error_message.exclude?('BackendServiceException')
JSON.parse(job_status.error_message.gsub('=>', ':')).collect { |message| message_string(message) }
else
[job_status.error_message]
end
messages.each do |message|
errors[message][:submission_ids].append(
sub_id: submission.id,
p_id: submission.auth_headers['va_eauth_pid'],
evss_id: submission.auth_headers['va_eauth_service_transaction_id'],
user_uuid: submission.user_uuid,
date: submission.created_at,
is_bdd: submission.bdd?,
job_class: job_status.job_class
)
errors[message][:participant_ids].add(submission.auth_headers['va_eauth_pid'])
end
end
end
if args[:flag]&.downcase&.include?('j')
puts errors.to_json
next
end
puts '------------------------------------------------------------'
puts "* Form526 Submission Errors from #{start_date} to #{end_date} *"
puts '------------------------------------------------------------'
puts ''
print_errors(errors)
end
desc 'Get one or more submission details given an array of ids (either submission_ids or job_ids)'
task submission: :environment do |_, args|
raise 'No submission ids provided' unless args.extras.count.positive?
def integer?(obj)
obj.to_s == obj.to_i.to_s
end
Rails.application.eager_load!
args.extras.each do |id|
submission = if integer?(id)
Form526Submission.find(id)
else
Form526JobStatus.where(job_id: id).first.form526_submission
end
saved_claim_form = submission.saved_claim.parsed_form
saved_claim_form['veteran'] = 'FILTERED'
submitted_claim_form = submission.form
submitted_claim_form['form526']['form526']['directDeposit'] = 'FILTERED'
submitted_claim_form['form526']['form526']['veteran'] = 'FILTERED'
auth_headers = JSON.parse(submission.auth_headers_json)
# There have been prod instances of users not having a ssn
ssn = auth_headers['va_eauth_pnid'] || ''
puts '------------------------------------------------------------'
puts "Submission (#{submission.id}):\n\n"
puts "user uuid: #{submission.user_uuid}"
puts "user edipi: #{auth_headers['va_eauth_dodedipnid']}"
puts "user participant id: #{auth_headers['va_eauth_pid']}"
puts "user ssn: #{ssn.gsub(/(?=\d{5})\d/, '*')}"
puts "saved claim id: #{submission.saved_claim_id}"
puts "submitted claim id: #{submission.submitted_claim_id}"
puts "workflow complete: #{submission.workflow_complete}"
puts "created at: #{submission.created_at}"
puts "updated at: #{submission.updated_at}"
puts "\n"
puts '----------------------------------------'
puts "Jobs:\n\n"
submission.form526_job_statuses.each do |s|
puts s.job_class
puts " status: #{s.status}"
puts " error: #{s.error_class}" if s.error_class
puts " message: #{s.error_message}" if s.error_message
puts " updated at: #{s.updated_at}"
puts "\n"
end
puts '----------------------------------------'
puts "Form From User JSON:\n\n"
puts JSON.pretty_generate(saved_claim_form)
puts "\n\n"
puts '----------------------------------------'
puts "Translated form for EVSS JSON:\n\n"
puts JSON.pretty_generate(submitted_claim_form)
puts "\n\n"
end
end
# context in https://github.com/department-of-veterans-affairs/va.gov-team/issues/29651
desc 'get a csv of all vets affected by BIRLS id mismatch errors since date'
task :birls_errors, [:start_date] => [:environment] do |_, args|
start_date = args[:start_date]&.to_date || 30.days.ago.utc
fss = Form526JobStatus.where(status: 'exhausted',
updated_at: [start_date..Time.now.utc])
CSV.open('tmp/birls_errors.csv', 'wb') do |csv|
csv << %w[veteran_name edipi birls_id ssn]
fss.each do |form_status|
fs = form_status.submission
next unless fs
ssn = fs.auth_headers['va_eauth_pnid']
birls_id = fs.auth_headers['va_eauth_birlsfilenumber']
edipi = fs.auth_headers['va_eauth_dodedipnid']
vname = "#{fs.auth_headers['va_eauth_firstName']} #{fs.auth_headers['va_eauth_lastName']}"
diff = StringHelpers.levenshtein_distance(birls_id, ssn)
csv << [vname, edipi, birls_id, ssn] if diff.positive? && diff < 3
end
end
puts 'tmp/birls_errors.csv'
end
# context in https://github.com/department-of-veterans-affairs/va.gov-team/issues/11353
desc 'get a csv of all vets affected by payee code errors with multiple corp ids since date'
task :corp_id_errors, [:start_date] => [:environment] do |_, args|
start_date = args[:start_date]&.to_date || 30.days.ago.utc
fss = Form526JobStatus.where(status: 'non_retryable_error',
updated_at: [start_date..Time.now.utc]).where("error_message like '%Payee code%'")
file_path = 'tmp/corp_errors.csv'
edipis = []
CSV.open(file_path, 'wb') do |csv|
csv << %w[veteran_name edipi corp_ids]
fss.each do |form_status|
fs = form_status.submission
next unless fs
edipi = fs.auth_headers['va_eauth_dodedipnid']
if edipis.include? edipi
next
else
edipis << edipi
end
response = MPI::Service.new.find_profile_by_edipi(edipi:).profile
active_corp_ids = response.full_mvi_ids.grep(/\d*\^PI\^200CORP\^USVBA\^A/)
vname = "#{fs.auth_headers['va_eauth_firstName']} #{fs.auth_headers['va_eauth_lastName']}"
csv << [vname, edipi, active_corp_ids] if active_corp_ids.count > 1
end
end
puts file_path
end
desc 'get a csv of all vets affected by PIF errors since date'
task :pif_errors, [:start_date] => [:environment] do |_, args|
start_date = args[:start_date]&.to_date || 30.days.ago.utc
fss = Form526JobStatus.where(status: 'exhausted',
updated_at: [start_date..Time.now.utc]).where("error_message like '%PIF%'")
ssns = []
CSV.open('tmp/pif_errors.csv', 'wb') do |csv|
csv << %w[veteran_name ssn soj form526_submission_id]
fss.each do |form_status|
fs = form_status.submission
ssn = fs.auth_headers['va_eauth_pnid']
if ssns.include? ssn
next
else
ssns << ssn
end
vname = "#{fs.auth_headers['va_eauth_firstName']} #{fs.auth_headers['va_eauth_lastName']}"
icn = fs.user_account&.icn
if icn.blank?
mpi_response = MPI::Service.new.find_profile_by_edipi(edipi: fs['va_eauth_dodedipnid'])
if mpi_response.ok? && mpi_response.profile.icn.present?
icn = mpi_response.profile.icn
else
puts "icn blank #{fs.id}"
next
end
end
user = OpenStruct.new(participant_id: fs.auth_headers['va_eauth_pid'], icn:, common_name: vname,
ssn:)
award_response = BGS::AwardsService.new(user).get_awards
if award_response
soj = award_response[:award_stn_nbr]
else
addr = fs.form.dig('form526', 'form526', 'veteran', 'currentMailingAddress')
soj = BGS::Service.new(user).get_regional_office_by_zip_code(addr['zipFirstFive'], addr['country'],
addr['state'], 'CP', ssn)
end
row = [vname, ssn, soj]
csv << row
row
rescue
puts "failed for #{form_status.id}"
end
end
puts 'csv complete in tmp/pif_errors.csv'
end
# EVSS has asked us to re-upload files that were corrupted upstream
desc 'Resubmit uploads to EVSS for submitted claims given an array of saved_claim_ids'
task retry_corrupted_uploads: :environment do |_, args|
raise 'No saved_claim_ids provided' unless args.extras.count.positive?
form_submissions = Form526Submission.where(saved_claim_id: args.extras)
form_submissions.each do |form_submission|
form_submission.send(:submit_uploads)
puts "reuploaded files for saved_claim_id #{form_submission.saved_claim_id}"
end
puts "reuploaded files for #{form_submissions.count} submissions"
end
desc 'Convert SIP data to camel case and fix checkboxes [/export/path.csv]'
task :convert_sip_data, [:csv_path] => :environment do |_, args|
raise 'No CSV path provided' unless args[:csv_path]
ids = args.extras || []
def to_olivebranch_case(val)
OliveBranch::Transformations.transform(
val,
OliveBranch::Transformations.method(:camelize)
)
end
def un_camel_va_keys!(hash)
json = hash.to_json
# rubocop:disable Style/PerlBackrefs
# gsub with a block explicitly sets backrefs correctly https://ruby-doc.org/core-2.6.6/String.html#method-i-gsub
json.gsub!(OliveBranch::Middleware::VA_KEY_REGEX) do
key = $1
"#{key.gsub('VA', 'Va')}:"
end
JSON.parse(json)
# rubocop:enable Style/PerlBackrefs
end
def get_disability_array(form_data_hash)
new_conditions = form_data_hash['newDisabilities']&.pluck('condition') || []
rated_disabilities = form_data_hash['ratedDisabilities']&.pluck('name') || []
new_conditions + rated_disabilities
end
# downcase and remove everything but letters and numbers
def simplify_string(string)
string&.downcase&.gsub(/[^a-z0-9]/, '')
end
# We want the original version of the string, downcased as the json key for checkboxes
def get_dis_translation_hash(disability_array)
dis_translation_hash = {}
disability_array.each do |dis|
dis_translation_hash[simplify_string(dis)] = dis&.downcase
end
dis_translation_hash
end
def fix_treatment_facilities_disability_name(form_data_hash, dis_translation_hash)
transformed = false
# fix vaTreatmentFacilities -> treatedDisabilityNames
# this should never happen, just want to confirm
form_data_hash['vaTreatmentFacilities']&.each do |va_treatment_facilities|
new_treated_disability_names = {}
if va_treatment_facilities['treatedDisabilityNames']
va_treatment_facilities['treatedDisabilityNames'].each do |disability_name, value|
if dis_translation_hash.values.include? disability_name
new_treated_disability_names[disability_name] = value
else
transformed = true
original_disability_name = dis_translation_hash[simplify_string(disability_name)]
new_treated_disability_names[original_disability_name] = value unless original_disability_name.nil?
end
end
va_treatment_facilities['treatedDisabilityNames'] = new_treated_disability_names
end
end
transformed
end
def fix_pow_disabilities(form_data_hash, dis_translation_hash)
transformed = false
# just like treatedDisabilityNames fix the same checkbox data for POW disabilities
pow_disabilities = form_data_hash.dig('view:isPow', 'powDisabilities')
if pow_disabilities
new_pow_disability_names = {}
pow_disabilities.each do |disability_name, value|
if dis_translation_hash.values.include? disability_name
new_pow_disability_names[disability_name] = value
else
transformed = true
original_disability_name = dis_translation_hash[simplify_string(disability_name)]
new_pow_disability_names[original_disability_name] = value unless original_disability_name.nil?
end
end
form_data_hash['view:isPow']['powDisabilities'] = new_pow_disability_names
end
transformed
end
# get all of the forms that have not yet been converted.
ipf = InProgressForm.where(form_id: FormProfiles::VA526ez::FORM_ID)
in_progress_forms = if ids.present?
ipf.where(id: ids)
else
ipf.where("metadata -> 'return_url' is not null")
end
CSV.open(args[:csv_path], 'wb') do |csv|
csv << %w[in_progress_form_id in_progress_form_user_uuid email_address]
in_progress_forms.each do |in_progress_form|
in_progress_form.metadata = to_olivebranch_case(in_progress_form.metadata)
form_data_hash = un_camel_va_keys!(to_olivebranch_case(JSON.parse(in_progress_form.form_data)))
disability_array = get_disability_array(form_data_hash)
dis_translation_hash = get_dis_translation_hash(disability_array)
treatment_facilities_transformed = fix_treatment_facilities_disability_name(form_data_hash,
dis_translation_hash)
pow_transformed = fix_pow_disabilities(form_data_hash,
dis_translation_hash)
fixed_va_inflection = OliveBranch::Middleware.send(:un_camel_va_keys!, form_data_hash.to_json)
if treatment_facilities_transformed || pow_transformed
csv << [in_progress_form.id,
in_progress_form.user_uuid,
form_data_hash.dig('phoneAndEmail', 'emailAddress')]
end
in_progress_form.form_data = fixed_va_inflection
in_progress_form.save!
end
end
end
desc 'pretty print MPI profile for submission'
task mpi: :environment do |_, args|
def puts_mpi_profile(submission)
edipi = submission.auth_headers['va_eauth_dodedipnid']
raise Error, 'no edipi' unless edipi
ids = { edipi:, icn: submission.user_account&.icn }
pp mpi_profile(ids).as_json
end
def mpi_profile(ids)
if ids[:icn]
find_profile_response = MPI::Service.new.find_profile_by_identifier(identifier: ids[:icn],
identifier_type: MPI::Constants::ICN)
else
find_profile_response = MPI::Service.new.find_profile_by_edipi(edipi: ids[:edipi])
end
raise find_profile_response.error if find_profile_response.error
find_profile_response.profile
end
Form526Submission.where(id: args.extras).find_each { |sub| puts_mpi_profile sub }
end
# Check a selected collection of form526_submissions
# (class: Form526Submission) for valid form526 content.
#
desc 'Check selected form526 submissions for errors'
task :lh_validate, %i[local_file start_date end_date] => :environment do |task_name, args|
params = args.to_h
unless (2..3).include?(params.size)
abort_with_message <<~USAGE
Send records from the form526_submissions table through
the lighthouse validate endpoint selecting records based
on their created_at timestamp. Produces a CSV
file that shows the results.
Usage: bundle exec rake #{task_name}[local_file,YYYYMMDD,YYYYNNDD]
local_file(String) when 'local' the CSV file is saved to
the local file system. Otherwise it is uploaded to S3.
The filename will be in the form of
"form526_YYYY-MM-DD_YYYY-MM-DD_validation.csv" using the
start and end dates. If the end date is not
provided the value "9999-99-99" will be used
in the file name.
These two parameters control which records from the
form526_submissions table are selected based upon
the record's created_at value.
start_date(YYYYMMDD)
end_date(YYYYMMDD) **Optional**
When the end date is not provided the selections
of records will be on or after start date.
When present the query is between start and end dates inclusive.
form526_verbose? is #{form526_verbose?}
Export or unset system environment variable FORM526_VERGOSE as desired
to get feedback while processing records.
USAGE
end
@local_file = validate_local_file(params[:local_file])
start_date = validate_yyyymmdd(params[:start_date])
end_date = (validate_yyyymmdd(params[:end_date]) if params[:end_date])
if params.size == 3 && (start_date > end_date)
abort_with_message "ERROR: start_date (#{start_date}) is after end_date (#{end_date})"
end
csv_filename = "form526_#{start_date}_#{end_date || '9999-99-99'}_validation.csv"
csv_header = %w[RecID Original Valid? Error]
# SMELL: created_at is not indexed
# Not a problem because this is
# a manually launched task with
# an expected low number of records
submissions = if end_date.nil?
Form526Submission.where('created_at >= ?', start_date)
else
Form526Submission.where(created_at: start_date..end_date)
end
csv_content = CSV.generate do |csv|
csv << csv_header
submissions.each do |submission|
base_row = [submission.id]
base_row << original_success_indicator(submission)
base_row << submission.form_content_valid?
# if it was valid then append no errors and
# do the next submission
if base_row.last
base_row << ''
csv << base_row
puts base_row.join(', ') if form526_verbose?
next
end
errors = submission.lighthouse_validation_errors
if form526_verbose?
print base_row.join(', ')
puts " has #{errors.size} errors."
end
errors.each do |error|
row = base_row.dup
row << error['title']
csv << row
next
end
end
end
print "Saving #{csv_filename} " if form526_verbose?
if local_file?
print '... ' if form526_verbose?
csv_file = File.new(csv_filename, 'w')
csv_file.write csv_content
csv_file.close
else
print 'to S3 ... ' if form526_verbose?
s3_resource = Reports::Uploader.new_s3_resource
target_bucket = Reports::Uploader.s3_bucket
object = s3_resource.bucket(target_bucket).object(csv_filename)
object.put(body: csv_content)
end
puts 'Done.' if form526_verbose?
end
############################################
## Utility Methods
def validate_local_file(a_string) = a_string.downcase == 'local'
def local_file? = @local_file
# Ensure that a date string is correctly formatted
# Returns a Date object
# abort if invalid
def validate_yyyymmdd(a_string)
if a_string.match?(/\A[0-9]{8}\z/)
begin
Date.strptime(a_string, '%Y%m%d')
rescue Date::Error
abort_with_message "ERROR: bad date (#{a_string}) must be 8 digits in format YYYYMMDD"
end
else
abort_with_message "ERROR: bad date (#{a_string}) must be 8 digits in format YYYYMMDD"
end
end
# Send error message to STDOUT and
# then abort
#
def abort_with_message(a_string)
print "\n#{a_string}\n\n"
abort
end
def form526_verbose?
@form526_verbose
end
# Use the form526_job_statuses has_many link
# to get the OSI value
#
def original_success_indicator(a_submission_record)
job_status = a_submission_record
.form526_job_statuses
.order(:updated_at)
.pluck(:job_class, :status)
.to_h
if job_status.empty?
'Not Processed'
elsif job_status['SubmitForm526AllClaim'] == 'success'
'Primary Success'
elsif job_status['BackupSubmission'] == 'success'
'Backup Success'
else
'Unknown'
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/ci.rake
|
# frozen_string_literal: true
desc 'Runs the continuous integration scripts'
task ci: %i[lint security spec]
$stdout.sync = false
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/vet360.rake
|
# frozen_string_literal: true
require 'va_profile/contact_information/v2/service'
require 'va_profile/exceptions/builder'
require 'va_profile/models/email'
require 'va_profile/models/telephone'
require 'va_profile/person/service'
namespace :vet360 do
###########
## TASKS ##
###########
ENV_VAR_NAME = 'VET360_RAKE_DATA'
## GETs
desc 'Request Vet360 person contact information'
task :get_person, [:vet360_id] => [:environment] do |_, args|
ensure_arg(:vet360_id, args)
trx = VAProfile::ContactInformation::V2::Service.new(user_struct(args[:vet360_id])).get_person
pp trx.to_h
end
desc 'GET Vet360 email transaction status'
task :get_email_transaction_status, %i[vet360_id tx_audit_id] => [:environment] do |_, args|
ensure_arg(:vet360_id, args)
ensure_arg(:tx_audit_id, args)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(args[:vet360_id]))
.get_email_transaction_status(args[:tx_audit_id])
pp trx.to_h
end
desc 'GET Vet360 address transaction status'
task :get_address_transaction_status, %i[vet360_id tx_audit_id] => [:environment] do |_, args|
ensure_arg(:vet360_id, args)
ensure_arg(:tx_audit_id, args)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(args[:vet360_id]))
.get_address_transaction_status(args[:tx_audit_id])
pp trx.to_h
end
desc 'GET Vet360 telephone transaction status'
task :get_telephone_transaction_status, %i[vet360_id tx_audit_id] => [:environment] do |_, args|
ensure_arg(:vet360_id, args)
ensure_arg(:tx_audit_id, args)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(args[:vet360_id]))
.get_telephone_transaction_status(args[:tx_audit_id])
pp trx.to_h
end
desc 'GET Vet360 permission transaction status'
task :get_permission_transaction_status, %i[vet360_id tx_audit_id] => [:environment] do |_, args|
ensure_arg(:vet360_id, args)
ensure_arg(:tx_audit_id, args)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(args[:vet360_id]))
.get_permission_transaction_status(args[:tx_audit_id])
pp trx.to_h
end
## PUTs
desc "Update Vet360 email (from #{ENV_VAR_NAME})"
task put_email: [:environment] do
# EXPECTED FORMAT OF VET360_RAKE_DATA:
# {
# "email_address": "string",
# "email_id": 0,
# "email_perm_ind": true,
# "vet360_id": 0
# ...
# [ see lib/vet360/models/email.rb ]
# }
ensure_data_var
data = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = data['vet360_id']
email = VAProfile::Models::Email.build_from(data)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.put_email(email)
pp trx.to_h
end
desc "Update Vet360 telephone (from #{ENV_VAR_NAME})"
task put_telephone: [:environment] do
# EXPECTED FORMAT OF VET360_RAKE_DATA:
# {
# "area_code": "string",
# "country_code": "string",
# "phone_number": "string",
# ...
# [ see lib/vet360/models/telephone.rb ]
# }
ensure_data_var
body = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = body['vet360_id']
telephone = VAProfile::Models::Telephone.build_from(body)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.put_telephone(telephone)
pp trx.to_h
end
desc "Update Vet360 address (from #{ENV_VAR_NAME})"
task put_address: [:environment] do
# EXPECTED FORMAT OF VET360_RAKE_DATA:
# {
# "address_id": 0,
# "address_line1": "string",
# "address_line2": "string",
# "address_line3": "string",
# "address_pou": "RESIDENCE/CHOICE",
# ...
# [ see lib/vet360/models/address.rb ]
# }
ensure_data_var
body = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = body['vet360_id']
address = VAProfile::Models::Address.build_from(body)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.put_address(address)
pp trx.to_h
end
## POSTs
desc "Create Vet360 email (from #{ENV_VAR_NAME})"
task post_email: [:environment] do
# EXPECTED FORMAT OF VET360_RAKE_DATA:
# {
# "email_address_text": "string",
# "email_perm_ind": true,
# "vet360_id": 0
# ...
# [ see lib/vet360/models/email.rb ]
# }
ensure_data_var
body = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = body['vet360_id']
email = VAProfile::Models::Email.build_from(body)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.post_email(email)
pp trx.to_h
end
desc "Create Vet360 telephone (from #{ENV_VAR_NAME})"
task post_telephone: [:environment] do
# EXPECTED FORMAT OF BODY:
# {
# "area_code": "string",
# "phone_number": "string",
# "phone_number_ext": "string",
# "phone_type": "MOBILE",
# "vet360_id": 0,
# ...
# [ see lib/vet360/models/telephone.rb ]
# }
ensure_data_var
body = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = body['vet360_id']
telephone = VAProfile::Models::Telephone.build_from(body)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.post_telephone(telephone)
pp trx.to_h
end
desc "Create Vet360 address (from #{ENV_VAR_NAME})"
task post_address: [:environment] do
# EXPECTED FORMAT OF BODY:
# {
# "address_line1": "string",
# "address_line2": "string",
# "vet360_id": 0,
# ...
# [ see lib/vet360/models/address.rb ]
# }
ensure_data_var
body = JSON.parse(ENV.fetch(ENV_VAR_NAME, nil))
vet360_id = body['vet360_id']
address = VAProfile::Models::Address.build_from(body)
trx = VAProfile::ContactInformation::V2::Service
.new(user_struct(vet360_id))
.post_address(address)
pp trx.to_h
end
desc <<~DESCRIPTION
Initializes a vet360_id for the passed in ICNs.
Takes a comma-separated list of ICNs as an argument. Prints an array of hash results.
Sample way to call this rake task:
rake vet360:init_vet360_id[123456,1312312,134234234,4234234]'
Note: There *cannot* be any spaces around the commas (i.e. [123456, 1312312, 134234234, 4234234])
DESCRIPTION
task :init_vet360_id, [:icns] => [:environment] do |_, args|
service = VAProfile::Person::Service.new('rake_user')
icns = args.extras.prepend(args[:icns])
results = []
puts "#{icns.size} to be initialized"
icns.each do |icn|
response = service.init_vet360_id(icn)
vet360_id = response&.person&.vet360_id
results << { icn:, vet360_id: }
rescue => e
results << { icn:, vet360_id: e.message }
end
puts "Results:\n\n#{results}"
end
desc <<~DESCRIPTION
Prep Vet360 error codes for locales.exceptions.en.yml file.
This rake task is idempotent. It takes all of the current error code
csv data that you import, and converts it into the proper error code
format for the config/locales/exceptions.en.yml file.
This requires a developer to follow some manual steps. Here are the
instructions:
1. The full set of error codes are located at: https://github.com/department-of-veterans-affairs/mdm-cuf-person/blob/development/mdm-cuf-person-server/src/inttest/resources/mdm/cuf/person/testData/error_codes.csv
2. Copy and paste this full set of raw error code csv data into
spec/support/vet360/api_response_error_messages.csv (not just the diff,
as this rake task is idempotent.)
3. Make sure these header columns are present in the csv (no spaces after commas):
Message Code,Sub Code,Message Key,Type,Status,State,Queue,Message Description
Here is an example that matches up the headers with one row of matching data:
Message Code,Sub Code,Message Key,Type,Status,State,Queue,Message Description
EMAIL200,emailId,emailId.Null,ERROR,400,REJECTED,RECEIVED_ERROR_QUEUE,Email ID in email bio must be null for inserts/adds
4. Run this rake task.
5. test.yml will now contain all of the formatted error codes. You will need to make the
following adjustments
- Manually reformat each one of these rows to remove the leading : and double quotes
:<<: "*external_defaults" becomes
<<: *external_defaults
- Change all of the "status" values from strings to integers (i.e. '400' => 400)
- Remove all of the leading dashes (-) before each VET360_ key.
For example, - VET360_ADDR101: becomes VET360_ADDR101:
6. The rake task will output any "titles" or "details" that are missing. If any are
missing, you will need to come up with them, and type them in.
7. Delete all of the VET360_ error codes from exceptions.en.yml
8. Paste in all of the updated VET360_ error codes from test.yml, into exceptions.en.yml
9. Delete test.yml
DESCRIPTION
task prep_error_codes: :environment do
VAProfile::Exceptions::Builder.new.construct_exceptions_from_csv
end
def ensure_data_var
abort "Env var: #{ENV_VAR_NAME} not set" if ENV[ENV_VAR_NAME].blank?
end
def ensure_arg(arg_symbol, args)
abort "No #{arg_symbol} argument provided" if args[arg_symbol].blank?
end
def ensure_var(name, var)
abort "No #{name} included" if var.blank?
end
def user_struct(vet360_id)
OpenStruct.new(vet360_id:)
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/rakelib/routes_csv.rake
|
# frozen_string_literal: true
namespace :routes do
desc 'Print out all defined routes in CSV format.'
task csv: :environment do
all_routes = Rails.application.routes.routes
require 'action_dispatch/routing/inspector'
inspector = ActionDispatch::Routing::RoutesInspector.new(all_routes)
puts inspector.format(CSVFormatter.new)
end
end
class CSVFormatter
def initialize
@buffer = []
end
def result
@buffer.join("\n")
end
def section_title(title)
@buffer << "\n#{title}:"
end
def section(routes)
routes.map do |r|
@buffer << "#{r[:path]},#{r[:reqs]}"
end
end
def header(_routes)
@buffer << 'Prefix,Controller#Action'
end
def no_routes
@buffer << <<~MESSAGE
You don't have any routes defined!
Please add some routes in config/routes.rb.
For more information about routes, see the Rails guide: http://guides.rubyonrails.org/routing.html.
MESSAGE
end
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/backfill_user_account_for_appeal_submissions.rake
|
# frozen_string_literal: true
desc 'Backill user account id records for AppealSubmission forms'
task backfill_user_account_for_appeal_submissions: :environment do
def null_user_account_id_count_message
'[BackfillUserAccountForAppealSubmissions] AppealSubmission with user_account_id: nil, ' \
"count: #{user_account_nil.count}"
end
def user_account_nil
AppealSubmission.where(user_account: nil)
end
Rails.logger.info('[BackfillUserAccountForAppealSubmissions] Starting rake task')
Rails.logger.info(null_user_account_id_count_message)
mpi_service = MPI::Service.new
user_account_nil.find_in_batches(batch_size: 1000) do |batch|
batch.each do |sub|
user_uuid = sub.user_uuid
user_account = UserVerification.find_by(idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(backing_idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(logingov_uuid: user_uuid)&.user_account
unless user_account
icn = mpi_service.find_profile_by_identifier(identifier: user_uuid, identifier_type: 'idme')&.profile&.icn
user_account = UserAccount.find_or_create_by(icn:) if icn
end
sub.user_account = user_account
sub.save!
end
end
Rails.logger.info('[BackfillUserAccountForAppealSubmissions] Finished rake task')
Rails.logger.info(null_user_account_id_count_message)
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/backfill_user_account_id_for_in_progress_forms.rake
|
# frozen_string_literal: true
desc 'Backill user account id records for InProgressForm'
task backfill_user_account_for_in_progress_forms: :environment do
def in_progress_form_rails_logger_message
"[BackfillUserAccountIdForInProgressForms] InProgressForm with user_account_id: nil, count: #{user_account_nil}"
end
def user_account_nil
InProgressForm.where(user_account: nil).count
end
def get_user_verification(uuid)
user_verification = UserVerification.find_by(idme_uuid: uuid) || UserVerification.find_by(logingov_uuid: uuid)
user_verification&.user_account
end
Rails.logger.info('[BackfillUserAccountIdForInProgressForms] Starting rake task')
Rails.logger.info(in_progress_form_rails_logger_message)
InProgressForm.where(user_account: nil).find_each do |form|
user_account = get_user_verification(form.user_uuid)
next unless user_account
form.update!(user_account:)
end
Rails.logger.info('[BackfillUserAccountIdForInProgressForms] Finished rake task')
Rails.logger.info(in_progress_form_rails_logger_message)
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/README.md
|
This folder should be used to store and obtain approval from the BE Tools team for production database changes.
The approval process can be found here: https://github.com/department-of-veterans-affairs/va.gov-team/blob/master/platform/documentation/Prod_db_change_process.md
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/backfill_user_account_for_form_email_matches_profile_logs.rake
|
# frozen_string_literal: true
desc 'Backfill user account id records for FormEmailMatchesProfileLog'
task backfill_user_account_for_form_email_matches_profile_logs: :environment do
def get_nil_user_account_ids
FormEmailMatchesProfileLog.where(user_account_id: nil)
end
def nil_user_account_ids_count_message(nil_user_account_id_count)
Rails.logger.info('[BackfillUserAccountForFormEmailMatchesProfileLog] FormEmailMatchesProfileLog ' \
"with user_account_id: nil, count: #{nil_user_account_id_count}")
end
Rails.logger.info('[BackfillUserAccountForFormEmailMatchesProfileLog] Starting rake task')
mpi_service = MPI::Service.new
starting_nil_user_account_ids = get_nil_user_account_ids
nil_user_account_ids_count_message(starting_nil_user_account_ids.count)
starting_nil_user_account_ids.find_in_batches(batch_size: 1000) do |batch|
batch.each do |sub|
user_uuid = sub.user_uuid
user_account = UserVerification.find_by(idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(backing_idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(logingov_uuid: user_uuid)&.user_account
unless user_account
icn = mpi_service.find_profile_by_identifier(identifier: user_uuid, identifier_type: 'idme')&.profile&.icn
user_account = UserAccount.find_or_create_by(icn:) if icn
end
sub.user_account_id = user_account&.id
sub.save!
end
end
Rails.logger.info('[BackfillUserAccountForFormEmailMatchesProfileLog] Finished rake task')
nil_user_account_ids_count_message(get_nil_user_account_ids.count)
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/user_credential.rake
|
# frozen_string_literal: true
desc 'Lock and unlock user credentials'
namespace :user_credential do
task :lock, %i[type credential_id requested_by] => :environment do |_, args|
run_task(:lock, args)
end
task :unlock, %i[type credential_id requested_by] => :environment do |_, args|
run_task(:unlock, args)
end
task :lock_all, %i[icn requested_by] => :environment do |_, args|
run_task(:lock_all, args, all_credentials: true)
end
task :unlock_all, %i[icn requested_by] => :environment do |_, args|
run_task(:unlock_all, args, all_credentials: true)
end
def run_task(action, args, all_credentials: false)
namespace = "UserCredential::#{action.to_s.camelize}"
lock_action = %i[lock lock_all].include?(action) ? :lock : :unlock
validate_args(args, all_credentials)
context = build_context(args)
log_message(level: 'info', message: "[#{namespace}] rake task start, context: #{context.to_json}")
if all_credentials
UserAccount.find_by(icn: args[:icn]).user_verifications.each do |user_verification|
update_credential(user_verification, lock_action, namespace, context)
end
else
user_verification = UserVerification.where(["#{args[:type]}_uuid = ?", args[:credential_id]]).first
update_credential(user_verification, lock_action, namespace, context)
end
log_message(level: 'info', message: "[#{namespace}] rake task complete, context: #{context.to_json}")
rescue => e
log_message(level: 'error', message: "[#{namespace}] failed - #{e.message}")
end
def validate_args(args, all_credentials)
missing_args = all_credentials ? %i[icn requested_by] : %i[type credential_id requested_by]
raise 'Missing required arguments' unless args.values_at(*missing_args).all?
raise 'Invalid type' if SignIn::Constants::Auth::CSP_TYPES.exclude?(args[:type]) && !all_credentials
end
def build_context(args)
{ icn: args[:icn],
type: args[:type],
credential_id: args[:credential_id],
requested_by: args[:requested_by] }.compact
end
def update_credential(user_verification, lock_action, namespace, context)
user_verification.send("#{lock_action}!")
credential_context = context.merge({ type: user_verification.credential_type,
credential_id: user_verification.credential_identifier,
locked: user_verification.locked }).compact
log_message(level: 'info',
message: "[#{namespace}] credential #{lock_action}, context: #{credential_context.to_json}")
end
def log_message(level:, message:)
`echo "#{datadog_log(level:, message:).to_json.dump}" >> /proc/1/fd/1` if Rails.env.production?
puts message
end
def datadog_log(level:, message:)
{
level:,
message:,
application: 'vets-api-server',
environment: Rails.env,
timestamp: Time.zone.now.iso8601,
file: 'rakelib/prod/user_credential.rake',
named_tags: {
dd: {
env: ENV.fetch('DD_ENV', nil),
service: 'vets-api'
},
ddsource: 'ruby'
},
name: 'Rails'
}
end
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/backfill_user_account_for_async_transactions.rake
|
# frozen_string_literal: true
desc 'Backill user account id records for AsyncTransaction::Base records'
task backfill_user_account_for_async_transactions: :environment do
def null_user_account_id_count_message
'[BackfillUserAccountForAsyncTransactions] AsyncTransaction::Base with user_account_id: nil, ' \
"count: #{user_account_nil.count}"
end
def user_account_nil
AsyncTransaction::Base.where(user_account: nil)
end
Rails.logger.info('[BackfillUserAccountForAsyncTransactions] Starting rake task')
Rails.logger.info(null_user_account_id_count_message)
mpi_service = MPI::Service.new
user_account_nil.find_in_batches(batch_size: 1000) do |batch|
batch.each do |sub|
user_uuid = sub.user_uuid
user_account = UserVerification.find_by(idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(backing_idme_uuid: user_uuid)&.user_account ||
UserVerification.find_by(logingov_uuid: user_uuid)&.user_account
unless user_account
icn = mpi_service.find_profile_by_identifier(identifier: user_uuid, identifier_type: 'idme')&.profile&.icn ||
mpi_service.find_profile_by_identifier(identifier: user_uuid, identifier_type: 'logingov')&.profile&.icn
user_account = UserAccount.find_or_create_by(icn:) if icn
end
sub.user_account = user_account
sub.save!
end
end
Rails.logger.info('[BackfillUserAccountForAsyncTransactions] Finished rake task')
Rails.logger.info(null_user_account_id_count_message)
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/prod/backfill_user_acceptable_credential.rake
|
# frozen_string_literal: true
desc 'Backill acceptable_verified_credential_at/idme_verified_credential_at for user_acceptable_verified_credentials'
task :backfill_user_acceptable_credential, [:start_uuid] => :environment do |_, args|
def user_acceptable_credential_rails_logger_message
'[BackfillUserAcceptableCredential] acceptable_verified_credential_at: nil, ' \
"count: #{acceptable_verified_credential_at} " \
'idme_verified_credential_at: nil, ' \
"count: #{idme_verified_credential_at}" \
"total UserAVC records, count: #{total_avc}"
end
def batch_range_log(start_uuid, end_uuid)
Rails.logger.info("[BackfillUserAcceptableCredential] Batch start_uuid: #{start_uuid}, end_uuid: #{end_uuid}")
end
def acceptable_verified_credential_at
UserAcceptableVerifiedCredential.where(acceptable_verified_credential_at: nil).count
end
def idme_verified_credential_at
UserAcceptableVerifiedCredential.where(idme_verified_credential_at: nil).count
end
def total_avc
UserAcceptableVerifiedCredential.count
end
Rails.logger.info('[BackfillUserAcceptableCredential] Starting rake task')
Rails.logger.info(user_acceptable_credential_rails_logger_message)
start_uuid = args[:start_uuid].presence
UserAccount.where.not(icn: nil).find_in_batches(start: start_uuid, order: :asc) do |user_account_batch|
batch_range_log(user_account_batch.first.id, user_account_batch.last.id)
ActiveRecord::Base.logger.silence do
user_account_batch.each do |user_account|
Login::UserAcceptableVerifiedCredentialUpdater.new(user_account:).perform
end
end
end
Rails.logger.info('[BackfillUserAcceptableCredential] Finished rake task')
Rails.logger.info(user_acceptable_credential_rails_logger_message)
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/piilog_repl/piilog_helpers.rb
|
# frozen_string_literal: true
# rubocop:disable Metrics/BlockNesting
TimeArg = Struct.new(:time, :type, keyword_init: true)
TIME_ARGS_ARRAY_BUILDER = lambda do |array_for_collecting_time_args|
# closure for array_for_collecting_time_args
lambda do |arg|
if array_for_collecting_time_args.length == 2
raise "can only specify a start and a stop time. extra time param: #{arg.inspect}"
end
time_arg = case arg
when nil
TimeArg.new type: :open
when String, Symbol
string = arg.to_s.downcase
case string
when 'now'
TimeArg.new time: Time.zone.now, type: :time
when 'yesterday', 'today', 'tomorrow'
TimeArg.new time: Time.zone.send(string), type: :date
else
parsed = if string.include?(':')
begin
{ time: string.in_time_zone, type: :time }
rescue
nil
end
else
begin
{ time: string.to_date, type: :date }
rescue
nil
end
end
return false unless parsed
TimeArg.new(**parsed)
end
else
if arg.respond_to?(:strftime)
TimeArg.new time: arg, type: arg.respond_to?(:min) ? :time : :date
elsif arg.is_a?(ActiveSupport::Duration)
TimeArg.new time: arg, type: :duration
else
return false
end
end
array_for_collecting_time_args << time_arg
true
end
end
TIMES_TO_WHERE_ARGS = lambda do |times|
a, b = times.map(&:time)
types = times.map(&:type)
start_time, stop_time = case types
when [], %i[open open]
[nil, nil]
when %i[date]
[a.beginning_of_day, a.end_of_day]
when %i[duration], %i[duration open]
[Time.zone.now - a, nil]
when %i[open]
raise "open-ended time range wasn't completed"
when %i[time]
if a.min.zero? && a.hour.zero?
[a.beginning_of_day, a.end_of_day]
elsif a.sec != 0
[a.beginning_of_minute, a.end_of_minute]
elsif a.min.zero?
[a.beginning_of_hour, a.end_of_hour]
elsif (a.min % 5).zero?
[a - 5.minutes, a + 5.minutes]
else
[a - 1.minute, a + 1.minute]
end
when %i[date date]
[a.beginning_of_day, b.end_of_day]
when %i[date duration]
start = a.beginning_of_day
[start, start + b]
when %i[date open]
[a.beginning_of_day, nil]
when %i[date time]
[a.beginning_of_day, b]
when %i[duration date]
stop = b.end_of_day
[stop - a, stop]
when %i[duration duration]
start = Time.zone.now - a
[start, start + b]
when %i[duration time]
[b - a, b]
when %i[open date]
[nil, b.end_of_day]
when %i[open duration]
[nil, Time.zone.now - a]
when %i[open time]
[nil, b]
when %i[time date]
[a, b.end_of_day]
when %i[time duration]
[a, a + b]
when %i[time open]
[a, nil]
when %i[time time]
[a, b]
else
raise "unknown types: #{types.inspect}"
end
result_struct = Struct.new(:args, :kwargs, keyword_init: true)
return result_struct.new unless start_time || stop_time
return result_struct.new args: ['created_at >= ?', start_time] unless stop_time
return result_struct.new args: ['created_at <= ?', stop_time] unless start_time
result_struct.new kwargs: { created_at: [start_time..stop_time] }
end
PersonalInformationLogQueryBuilder = lambda do |*args, **where_kwargs|
query, args = if args.first.respond_to? :to_sql
[args.first, args[1..]]
else
[PersonalInformationLog.all, args]
end
query = query.where(**where_kwargs) if where_kwargs.present?
times = []
# add_time is a lambda that takes in an arg
# --if it's a time, adds it to times and returns true. otherwise returns false
add_time = TIME_ARGS_ARRAY_BUILDER.call(times)
error_class = []
args.each do |arg|
next if add_time.call(arg)
case arg
when String, Symbol
error_class << "%#{arg}%"
else
raise "don't know what to do with arg: #{arg.inspect}"
end
end
query = query.where('error_class ILIKE ANY (array[?])', error_class) if error_class.present?
where_args = TIMES_TO_WHERE_ARGS.call(times)
query = query.where(*where_args.args) if where_args.args
query = query.where(**where_args.kwargs) if where_args.kwargs
query
end
# rubocop:enable Metrics/BlockNesting
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/support/shell_command.rb
|
# frozen_string_literal: true
require 'open3'
class ShellCommand
# runs shell command and prints output
# returns boolean depending on the success of the command
def self.run(command)
success = false
old_sync = $stdout.sync
$stdout.sync = true
Open3.popen2e(command) do |_stdin, stdout_and_stderr, thread|
while (line = stdout_and_stderr.gets)
puts(line)
end
success = thread.value.success?
end
$stdout.sync = old_sync
success
end
def self.run_quiet(command)
success = false
old_sync = $stdout.sync
$stdout.sync = true
Open3.popen3(command) do |_stdin, _stdout, stderr, wait_thr|
error = stderr.read
puts error unless error.empty?
success = wait_thr.value.exitstatus.zero?
end
$stdout.sync = old_sync
success
end
end
|
0
|
code_files/vets-api-private/rakelib
|
code_files/vets-api-private/rakelib/support/vic_load_test.rb
|
# frozen_string_literal: true
require 'faraday/multipart'
module LoadTest
module_function
def measure_elapsed
t0 = Time.zone.now
yield
t1 = Time.zone.now
elapsed = t1 - t0
puts
puts "Start: #{t0}"
puts "End: #{t1}"
puts "Elapsed: #{elapsed.round(2)} seconds"
end
def conn(host, route)
Faraday.new("#{host}/v0/vic/#{route}") do |c|
c.request :multipart
c.request :url_encoded
c.adapter Faraday.default_adapter
end
end
def photo_payload
{
profile_photo_attachment: {
file_data: Faraday::UploadIO.new(
Rails.root.join('rakelib', 'support', 'files', 'example_10mb.png').to_s,
'image/png'
)
}
}
end
def doc_png_payload
{
supporting_documentation_attachment: {
file_data: Faraday::UploadIO.new(
Rails.root.join('rakelib', 'support', 'files', 'example_10mb.png').to_s,
'image/png'
)
}
}
end
def doc_pdf_payload
{
supporting_documentation_attachment: {
file_data: Faraday::UploadIO.new(
Rails.root.join('rakelib', 'support', 'files', 'example_25mb.pdf').to_s,
'image/png'
)
}
}
end
end
|
0
|
code_files/vets-api-private/rakelib/support
|
code_files/vets-api-private/rakelib/support/files/dod_facilities.csv
|
name,city,state,country
US Naval Hospital Guantanamo Bay,Guantanamo Bay,,Cuba
US Naval Hospital Guam,Agana,,Guam
Landstuhl Regional Medical Center,Landstuhl,,Germany
Aviano Air Base(31st Medical Group),Aviano,,Italy
US Naval Hospital Naples,Naples,,Italy
US Naval Hospital Sigonella,Sicily,,Italy
US Army Vicenza Health Center,Vicenza,,Italy
Misawa Airbase Medical Facility(35th Medical Group),Misawa,,Japan
US Naval Hospital Okinawa,Okinawa,,Japan
US Naval Hospital Yokosuka,Yokosuka,,Japan
Yokota Air Base Medical Facility(374th Medical Group),Yokota,,Japan
Brian AllGood Army Community Hospital(65th Medical Brigade),Seoul,,Korea
US Naval Hospital Rota,Rota,,Spain
Royal Air Force Lakenheath(48th Medical Group),Lakenheath,,UK
Bassett Army Community Hospital,Fort Wainwright,AK,USA
Elmendorf Air Force Base Joint Base Elmendorf-Richardson(673rd Medical Group) ,Elmendorf AFB,AK,USA
David Grant USAF Medical Center(60th Medical Group),Travis Air Force Base ,CA,USA
Naval Hospital Camp Pendleton,Camp Pendleton,CA,USA
Naval Medical Center San Diego,San Diego,CA,USA
Naval Hospital Lemoore,Lemoore,CA,USA
Robert E. Bush Naval Hospital,Twentynine Palms,CA,USA
Weed Army Community Hospital,Fort Irwin,CA,USA
Evans Army Community Hospital,Fort Carson,CO,USA
Eglin Air Force Base Hospital(96th Medical Group),Eglin AFB,FL,USA
Naval Hospital Jacksonville,Jacksonville,FL,USA
Naval Hospital Pensacola,Pensacola,FL,USA
Martin Army Community Hospital Fort Benning Medical Acivity,Fort Benning,GA,USA
Winn Army Community Hospital,Fort Stewart,GA,USA
Tripler Army Medical Center,Honolulu,HI,USA
Mountain Home Air Force Base Medical Facility(366th Medical Group),Mountain Home AFB,ID,USA
Irwin Army Community Hospital,Fort Riley,KS,USA
Blanchfield Army Community Hospital,Fort Campbell,KY,USA
Ireland Army Community Hospital,Fort Knox,KY,USA
Bayne-Jones Army Community Hospital,Fort Polk,LA,USA
Walter Reed National Military Medical Center,Bethesda,MD,USA
General Leonard Wood Army Community Hospital,Fort Leonard Wood,MO,USA
Keesler Air Force Base Medical Center(81st Medical Group),Biloxi,MS,USA
Womack Army Medical Center,Fort Bragg,NC,USA
Naval Hospital Camp Lejeune,Camp Lejeune,NC,USA
Keller Army Community Hospital,West Point,NY,USA
Mike O'Callghan Federal Medical Center(99th Medical Group),Nellis Air Force Base ,NV,USA
Wright-Patterson Medical Center(88th Medical Group),Wright Patterson AFB,OH,USA
Reynolds Army Community Hospital,Fort Sill,OK,USA
Brook Army Medical Center,Fort Sam Houston,TX,USA
Carl R. Darnall Army Medical Center,Fort Hood,TX,USA
San Antonio Military Medical Center(SAMMC),San Antonio,TX,USA
William Beaumont Army Medical Center,El Paso,TX,USA
Fort Belvoir Community Hospital,Fort Belvoir,VA,USA
Langley Air Force Hospital Joint Base Langley-Eustis(633rd Medical Group),Langley,VA,USA
Naval Medical Center Portsmouth,Portsmouth,VA,USAF
Madigan Army Medical Center(Madigan Healthcare System),Tacoma,WA,USA
Naval Hospital Bremerton,Bremerton,WA,USA
Naval Hospital Oak Harbor,Oak Harbor,WA,USA
|
0
|
code_files/vets-api-private/rakelib/support
|
code_files/vets-api-private/rakelib/support/files/conditions.csv
|
code,medical_term,lay_term
10,abnormal heart,
20,abnormal mitral valve, abnormal mitral valve in heart
30,abnormal weight loss,
40,"abscess, brain", bacterial or fungal infection in a part of the brain
50,"abscess, kidney", bacterial or fungal infection in a part of the kidney
60,"abscess, liver",bacterial or fungal infection in a part of the liver
70,acne,
80,acromegaly,hormonal disorder causing excess growth hormone
90,acute nonlymphocytic leukemia,
110,adenocarcinoma,
120,adenomyoepithelioma,
130,adhesions, tough scar tissue
140,aging,
150,agoraphobia, afraid or in fear of crowded or public spaces
160,AIDS,
170,alcohol abuse,
180,allergies,
190,ALS,
200,alveolar abscesses,dental or tooth infection
210,amebiasis,parasitic infection of the colon
220,amputation,
230,amputation - above knee,
240,"amputation - arm, forearm, hand",
250,amputation - below knee,
260,amputation - foot (feet),
270,amputation - leg(s),
280,amputation - multiple fingers,
290,amputation - single finger,
300,amputation - toe (toes),
310,amyotrophic lateral sclerosis, ALS
320,anatomical loss, loss of a body part
330,"anemia, primary",
340,aneurysm,
350,angina, chest pain
360,"ankle condition, bilateral (both ankles)", ankle condition in both ankles
370,"ankle condition, left",
380,"ankle condition, right",
390,ankle sprain,
400,ankle strain,
410,anxiety condition,
420,"arm condition, bilateral (both arms)", arm condition in both arms
430,"arm condition, left",
440,"arm condition, right",
450,arrhythmia, irregular heartbeat
460,arteriosclerosis, hardening or thickening of arteries
470,arteriosclerosis obliterans, blocked artery(ies)
480,arteriosclerotic heart disease,hardening or thickening of arteries in heart
490,arthritis,
500,"arthritis, degenerative",
510,"arthritis, due to trauma",
520,asbestosis (asbestos disease),
540,asthma,
550,atherosclerosis, fat or cholesterol buildup in arteries
560,atherosclerotic peripheral vascular disease,
570,athlete's foot,
580,"atrophy, progressive muscular",
590,avitaminosis,vitamin deficiency
600,back condition,
610,back injury,
620,back sprain,
630,back strain,
640,basaloma,skin tumor
650,Bell's palsy,
660,beriberi (including beriberi heart disease), thiamine deficiency
670,bipolar disorder,
680,birth defect in child,
690,blackwater fever,
700,bladder condition,
710,bladder stone(s),
720,blindness (including light perception only),
730,blood clot(s),
740,Boeck's disease,
750,bone atrophy,weakening of bones
760,bone condition,
770,bone fracture(s),
780,bowel condition,
790,brain hemorrhage,
800,brain thrombosis,clot in brain's blood vessel
810,brain tumor(s),
820,"breast condition, bilateral (both)", breast condition in both breasts
830,"breast condition, left",
840,"breast condition, right",
850,bronchiectasis,damaged airway from inflammation or infection
860,bronchitis,
870,bruise,
880,Buerger's disease,
890,bulbar palsy,
900,burn(s),
910,bursitis, inflammation near the joints
920,CAD, coronary artery disease
930,Caisson disease, decompression sickness
940,calculus - bladder, bladder stones
950,calculus - gallbladder, gallbladder stones
960,calculus - kidney, kidney stones
970,cancer,
980,cancer - bile ducts,
990,cancer - bone,
1000,cancer - brain,
1010,cancer - breast,
1020,cancer - bronchus,
1030,cancer - cheek,
1040,cancer - colon,
1050,cancer - esophagus,
1060,cancer - gall bladder,
1070,cancer - gastrointestinal tract,
1080,cancer - gum,
1090,cancer - kidney,
1100,cancer - larynx,
1110,cancer - lip,
1120,cancer - liver,
1130,cancer - lung,
1140,cancer - lymph nodes,
1150,cancer - nasopharyngeal,
1160,cancer - ovarian,
1170,cancer - pancreas,
1180,cancer - pharyngolaryngeal,
1190,cancer - pharynx,
1200,cancer - prostate,
1210,cancer - rectum,
1220,cancer - renal pelves,
1230,Cancer - respiratory,
1240,cancer - salivary gland,
1250,Cancer - skin,
1260,cancer - small intestine,
1270,"cancer - squamous cell, of the skin",
1280,cancer - stomach,
1290,cancer - testicle,
1300,cancer - thyroid,
1310,cancer - tongue,
1320,cancer - trachea,
1330,cancer - ureters,
1340,cancer - urethra,
1350,cancer - urinary bladder,
1360,cancer - urinary tract,
1370,cancer - urogenital system (except prostate),
1380,carate (also known as pinta), skin disease
1390,carcinoma,
1400,cardiovascular disease,
1410,cardiovascular signs or symptoms,
1420,carpal tunnel syndrome,
1440,cataract,
1450,"cataracts, posterior subcapsular",
1460,catarrhal fever (common cold),
1470,cavity,
1480,central nervous system tumors,
1490,cerebral accident,
1500,cerebral hemorrhage,
1510,cerebral vascular accident, stroke
1520,cerebrovascular accident, stroke
1530,cerebrovascular hemorrhage,stroke from ruptured blood vessel in brain
1540,cervical sprain, neck sprain
1550,cervical strain, neck strain
1560,cervicitis, inflammation of cervix
1570,CHF,congestive heart failure
1580,chest pain,
1590,Chloracne or other acneform disease consistent with chloracne, skin condition
1600,chloroma,acute leukemia
1610,cholangiocarcinoma,bile duct cancer
1620,cholangioma, bile duct tumor
1630,cholangitis,bacterial infection of bile duct
1640,cholecystitis,inflammation of gallbladder
1650,cholera,diarrhea illness
1660,choriocarcinoma,uterine cancer
1670,choroiditis,inflammation of parts of the eye
1680,chronic fatigue syndrome,
1690,chronic lymphocytic leukemia,
1700,chronic obstructive pulmonary disease,
1710,chronic organic brain syndrome,
1720,chronic renal or kidney disease,
1730,cirrhosis of liver,
1750,clots,
1760,coccidiodomycosis, infection from soil fungus
1770,cold,
1780,colitis, inflammation of inner colon lining
1790,"colon syndrome, irritable", irritable bowel syndrome
1800,combat fatigue,
1810,combat neurosis,anxiety from traumatic events
1820,comedocarcinoma,early-stage breast cancer
1830,congestive heart failure,
1840,conjunctivitis,pinkeye
1850,cor pulmonale,pulmonary heart disease
1860,corneal opacities, eye disorder from cornea scars
1870,coronary,
1880,coronary accident,
1890,coronary artery bypass,
1900,coronary artery disease,
1910,costochondritis,inflammation of cartilage between rib and breastbone
1920,Cushing's syndrome, hormonal disorder
1930,CVA,stroke
1940,cyclitis,inflammation in middle part of eye
1950,cylindroma,benign skin tumors in head or neck
1960,cyst,
1970,cystitis, bladder infection
1990,degenerative arthritis,
2000,dehydration,
2010,dementia,
2020,dental abscesses,
2030,depression,
2040,depressive neurosis,depression with unstable emotions
2050,dermatitis,skin inflammation
2060,diabetes,
2070,diabetes insipidus,
2080,diabetes mellitus,
2090,diarrhea,
2100,digestive disorder,
2110,disarticulation,separation of bones at the joint
2120,discoid lupus,autoimmune disease affecting the skin
2130,diverticulitis,inflammation or infection of pouches in colon
2140,dizziness,
2150,dracontiasis,infection by the Guinea worm
2160,dracunculiasis,infection by the Guinea worm (combine with above 'dracontiasi or dracunculiasis)
2170,drug abuse,
2180,dysentery,inflammatory intestine disease
2190,dysthymic disorder,chronic depression
2200,ear condition,
2210,ear infection,
2220,ears ringing,
2230,eating disorder,
2240,eczema,
2250,"elbow condition, bilateral(both elbows)", elbow condition in both elbows
2260,"elbow condition, left",
2270,"elbow condition, right",
2280,elbow sprain,
2290,elbow strain,
2300,embolism, blocking of an artery
2310,"embolism, brain",
2320,emphysema, lung condition
2330,"encephalitis, lethargica residuals", form of brain inflammation
2340,endocarditis,infection of endocardium in heart
2350,endocrinopathy,disease of the endocrine gland
2360,endometriosis,uterine lining tissue grows outside the uterus
2370,endothelioma,
2380,enucleation, removal or loss of the eye
2390,epididymitis,inflammation at back of testicles
2400,epilepsy,
2410,epithelioma,
2420,erythematosus lupus,
2430,esophageal condition,
2440,"eye condition, bilateral (both eyes)", eye condition in both eyes
2450,"eye condition, left",
2460,"eye condition, right",
2470,eye disease,
2480,facial injury,
2490,"fallen arches, bilateral (both arches)", fallen arches in both feet
2500,"fallen arches, left",
2510,"fallen arches, right",
2520,fatigue,
2540,fibromyalgia, pain disorder that causes muscle pain and tenderness in whole body
2560,fibrosis,scarring of connective tissue
2570,"fibrosis, interstitial pulmonary",
2580,filariasis,parasitic disease from roundworm
2600,"flatfoot, bilateral (both feet)", flat feet in both feet
2610,"flatfoot, left",
2620,"flatfoot, right",
2630,flu,
2640,flu-like conditions,
2650,"foot condition, bilateral (both feet)", foot condition in both feet
2660,"foot condition, left",
2670,"foot condition, right",
2680,foot pain,
2690,"forearm condition, bilateral (both forearms)", forearm condition in both forearms
2700,"forearm condition, left",
2710,"forearm condition, right",
2720,fracture(s),
2730,"frostbite, organic residuals of",
2740,frozen feet,
2750,fungal condition,
2760,gall bladder condition,
2770,gallstone(s),
2780,gangrene, dead tissue caused by an infection
2790,gastritis, stomach inflammation
2800,gastroenteritis,irritation or inflammation of stomach and intestines
2810,gastrointestinal problems,
2820,gastrointestinal signs or symptoms,
2830,generalized arteriosclerosis,hardening of artery walls
2840,genitourinary condition,
2850,GERD,Gastroesophageal reflux disease
2860,gingivitis,gum disease
2870,"glaucoma, bilateral (both eyes)", damage to optic nerve in both eyes
2880,"glaucoma, left",damage to optic nerve in left eye
2890,"glaucoma, right",damage to optic nerve in right eye
2900,glomerulonephritis, a type of kidney inflammation
3000,gout, a form of inflammatory arthritis
3010,grand mal epilepsy,
3020,groin condition,
3030,Gulf War Veteran with unexplained chronic multisymptom illness,
3050,"gums, sore", sore gums
3060,gynecological condition,
3070,hair loss,
3080,"hand condition, bilateral (both hands)", hand condition in both hands
3090,"hand condition, left",
3100,"hand condition, right",
3110,Hansen's disease,
3120,head injury,
3130,headaches,
3140,hearing loss,
3150,"hearing loss, sensorineural",
3160,heart attack,
3170,heart condition,
3180,heart disease,
3190,heart failure,
3200,heat stroke,
3210,helminthiasis,parasitic worm infection
3220,hematuria,blood in urine
3230,hemic-lymphatic condition,blood and lymph condition
3240,hemorrhage,ruptured blood vessel
3250,hemorrhagic purpura idiopathic,excessive bruising or bleeeding
3260,hemorrhoids,
3270,hepatitis,
3280,hepatocarcinoma,
3290,hepatoma,
3300,hernia condition,
3310,"hernia, femoral",
3320,"hernia, hiatal", stomach hernia
3330,"hernia, inguinal", groin hernia
3360,herpes,
3370,high blood pressure,
3380,"hip condition, bilateral (both hips)", hip condition in both hips
3390,"hip condition, left", hip condition in left hip
3400,"hip condition, right", hip condition in right hip
3410,HIV,
3420,Hodgkin's disease,
3430,human immunodeficiency virus,
3440,hypercholesterolemia,high cholesterol levels in blood
3450,hyperlipidemia,high levels of fats in blood
3460,hypertension,high blood pressure
3470,hyperthyroidism,overactive thyroid
3480,hypothyroidism,underactive thyroid
3490,hypovitaminosis,vitamin deficiency
3500,hysterectomy,removal of uterus
3510,immune disorder,
3520,infarction,death of tissue from loss of blood supply
3530,infection(s),
3540,intestinal condition,
3550,involutional changes,age-related changes
3560,iritis,inflammation of iris in eye
3570,irritable bowel syndrome,
3580,irritable colon,
3590,jaw condition,
3600,joint disease,
3610,joint pains,
3620,kala azar,black fever
3630,keratitis,inflammation of cornea in eye
3640,kidney disease,
3650,kidney failure,
3660,kidney stone(s),
3670,kidney transplant,
3680,"kidney, removal of",
3690,"knee condition, bilateral (both knees)", knee condition in both knees
3700,"knee condition, left", knee condition in left knee
3710,"knee condition, right", knee condition in right knee
3720,knee sprain,
3730,knee strain,
3740,labyrinthitis,inner ear disorder
3750,laryngectomy,removal of larynx or voice box
3760,laryngitis,inflammation of larynx or voice box
3770,larynx condition,voice box condition
3780,"leg condition, bilateral (both legs)", leg condition in both legs
3790,"leg condition, left", leg condition in left leg
3800,"leg condition, right", leg condition in right leg
3810,leishmaniasis, parasitic disease from a sandfly
3820,leprosy,bacterial infection of skin
3830,leukemia,
3840,leukosarcoma,
3850,liver condition,
3860,loiasis,parasitic infection from an African eyeworm
3870,loose teeth,
3890,Lou Gehrig's disease,
3900,lower back condition,
3910,lower back sprain,
3920,lower back strain,
3930,lumbar sprain,
3940,lumbar strain,
3950,lung condition,
3960,lupus,
3970,"lupus, discoid",autoimmune disease affecting skin
3980,"lupus, erythematosus",systemic lupus
3990,"lupus, systemic",
4000,Lyme disease,
4010,lymphogranulomatosis,Hodgkin's lymphoma
4020,Lymphoma - except Hodgkin's disease,
4030,lymphomas,
4040,lymphosarcoma,
4050,major depression,
4060,malaria,
4080,malignant leiomyblastoma,
4090,malignant neoplasm,
4100,malignant tumor(s),
4110,malnutrition,
4120,manic-depressive reaction,
4130,mastoiditis,bacterial infection of inner and middle ear
4140,melanocarcinoma,
4150,melanoepithelioma,
4160,melanoma, type of skin cancer
4170,"melanoma, malignant",
4180,melanosarcoma,
4190,melanoscirrhus,
4200,memory loss,
4210,Meniere's disease, inner ear disorder causing vertigo and ringing in ears
4220,menstrual disorders,
4230,mental disease,
4240,mental disorder,
4250,mesenchymoma,
4260,mesonephroma,
4270,mesotheliomas of peritoneum,
4280,mesotheliomas of pleura,
4290,metritis,inflammation of uterus
4300,migraine,
4310,Morton's disease,
4320,mouth condition,
4330,multiple sclerosis, MS
4340,muscle atrophy, wasting away of muscles
4350,muscle condition,
4360,muscle injury,
4370,muscle pain,
4380,musculoskeletal condition,
4390,myasthenia gravis, autoimmune disease that causes muscle weakness and tiredness
4400,Mycosis fungoides, Alibert-Bazin syndrome or cutaneous T-cell lymphoma
4410,myelitis (a spinal cord condition), inflammation of the spinal cord
4420,myeloma, cancer of the plasma cells
4430,"myeloma, multiple", cancer in the bone marrow
4440,myocardial infarction (MI), heart attack
4450,myocarditis, inflammation of the heart muscle
4460,myositis, inflammation of the muscles
4470,myxosarcoma, malignant heart tumor
4480,nausea,
4490,neck condition,
4500,neck sprain,
4510,neck strain,
4520,"neoplasm(s), benign", abnormal mass of tissue (not cancerous)
4530,"neoplasm(s), malignant", abnormal mass of tissue (cancerous)
4540,nephritis, inflammation of the kidneys
4550,nerve damage,
4560,nerve deafness, hearing loss due to damage to the cochlea
4580,nervous system disease,
4590,nervousness,
4600,neuralgia, sharp pain from damaged or irritated nerves
4610,neuritis, inflammation of a peripheral nerve
4620,neurologic signs or symptoms,
4630,neurological disorder,
4640,"neuropathy, peripheral", pain and nerve damage beyond the brain and spine
4650,neuropsychological signs or symptoms,
4660,"new growths, malignant", new growths (cancerous)
4670,niacin deficiency,
4680,Non-Hodgkin's lymphoma (NHL),
4690,non-specific digestive complaints,
4700,non-specific ear condition,
4710,non-specific genitourinary complaints,
4720,non-specific gynecological complaints,
4730,non-specific nervous complaints,
4740,non-specific neurological complaints,
4750,non-specific orthopedic claims,
4760,non-specific respiratory condition,
4770,nose condition,
4780,nutritional deficiency,
4790,OBS, organic brain syndrome
4800,onchocerciasis, river blindness
4810,oophoritis, inflammation of an ovary
4820,optic atrophy associated with malnutrition, vision loss due to malnutrition
4830,organic brain syndrome, neurocognitive disorder
4840,organic diseases of the nervous system,
4850,organic heart disease,
4860,oroya fever, Carrion's disease
4870,orthopedic condition,
4880,Osgood Schlatter disease, knee pain
4890,osteitis deformans (Paget's disease), chronic bone disorder
4900,"osteoarthritis, post-traumatic", trauma causing the cartilage in the joint to wear away
4910,osteomalacia, softening of the bones
4920,otitis externa,swimmer's ear
5000,otitis media, ear pain and inflammation of the middle ear
5010,otosclerosis,disease of the bones of the middle and inner ear
5020,"ovarian condition, bilateral (both ovaries)", ovarian condition in both ovaries
5030,"ovary (disease, injury, or adhesions of)", disease or injury of the ovary
5040,ovary (removal of), removal of the ovary
5050,painful joints,
5060,palsy,paralysis and involuntary tremors
5070,"palsy, bulbar", problems controlling muscles in the face
5080,pancreatitis,inflammation in the pancreas
5090,paralysis,loss of muscle function in part of your body
5100,paralysis agitans,Parkinson's disease
5110,"paralysis, nerve",
5120,paranoid reaction,gradual formation of delusions
5130,parasites,
5140,parathyroid adenoma,benign tumor of the parathyroid gland
5150,parenchymal lung disease, interstitial lung disease
5160,Parkinson's disease, tremors or stiffness of movement
5170,pellagra,dementia diarrhea and dermatitis
5180,penile condition,
5190,peptic ulcer disease, stomach ulcer
5200,pericarditis,swelling and irritation of the pericardium
5210,periodontal disease (pyorrhea),gum disease
5220,periostitis,shin splints
5230,peripheral neuropathy,damage to the peripheral nerves
5240,"peripheral neuropathy, acute and subacute", weakness or pain in the hands and feet
5250,peripheral vascular disease, circulatory condition
5260,peritonitis, inflammation of the peritoneum (membrane that lines the inner abdominal wall and covers the abdominal organs)
5270,personality disorder, mental disorder with unstable moods and behavior
5280,petit mal epilepsy,Absence seizures with brief sudden lapses of consciousness
5290,pharyngeal condition,throat problems
5300,phlebitis, inflammation of a vein
5310,phobias,an extreme or irrational fear of or aversion to something
5320,pinta,skin disease
5330,plague,
5340,plantar fasciitis, stabbing foot pain or heel pain
5350,pleural effusions, unusual amount of fluid around the lung
5360,pleural plaques, asbestos exposure causing scarring in the lungs
5370,pleurisy, inflammation of the tissue that lines the lung and chest cavity
5380,pneumocystis carini pneumonia, lung infection that can affect people with weakened immune systems
5390,pneumonia,
5400,polymyositis,a rare form of arthritis that causes muscle weakness and pain
5410,porphyria cutanea tarda (PCT) (a skin condition), skin condition with painful blistering lesions
5420,PTSD (posttraumatic stress disorder),
5430,posterior subcapsular cataracts,cataract on the back surface of the lens beneath the lens capsule
5440,"pregnancy, surgical complications of", complications from surgical pregnancy
5450,primary anemia,
5460,progressive muscular atrophy,
5470,"prostate gland (injuries, infections, hypertrophy)",
5480,psoriasis, skin condition that results in scales and itchy dry patches
5490,psychomotor epilepsy,
5500,psychophysiological disorder,
5510,psychoses,
5520,psychosis, severe mental disorder that results in a disconnection from reality
5530,psychotic depressive reaction,
5540,psychotic reaction,
5550,"purpura idiopathic, hemorrhagic", autoimmune disease that can lead to easy or excessive bruising and bleeding
5560,Raynaud's disease,
5570,rectal bleeding,
5580,removal of kidneys,
5590,renal disease, kidney disease
5600,renal failure, kidney failure
5610,respiratory complaints,
5620,Reticulum cell sarcoma,
5630,retina (detachment of), detached retina
5640,"retina (scars, atrophy, or irregularities of)",
5650,retinitis,inflammation of the retina
5660,rheumatic fever,
5670,rheumatic heart disease,
5680,rheumatism, inflammation in the joints
5690,rhinitis, stuffy nose
5710,ringing in ears,
5720,"saliva, thickened",
5730,salpingitis, inflammation of the fallopian tubes
5740,sarcoidosis,
5750,sarcoma, tumor in the bones or soft tissue
5760,"sarcoma, reticulum cell",
5770,"sarcoma, soft-tissue",
5780,"sarcoma, soft-tissue - adult fibrosarcoma",
5790,"sarcoma, soft-tissue - alveolar soft part sarcoma",
5800,"sarcoma, soft-tissue - angiosarcoma",
5810,"sarcoma, soft-tissue - chondrosarcoma",
5820,"sarcoma, soft-tissue - clear cell, of tendons and aponeuroses",
5830,"sarcoma, soft-tissue - congenital fibrosarcoma",
5840,"sarcoma, soft-tissue - dermatofibrosarcoma protuberans",
5850,"sarcoma, soft-tissue - ectomesenchymoma",
5860,"sarcoma, soft-tissue - epithelioid leiomyosarcoma",
5870,"sarcoma, soft-tissue - epithelioid sarcoma",
5880,"sarcoma, soft-tissue - extraskeletal Ewing's sarcoma",
5890,"sarcoma, soft-tissue - fibrosarcoma",
5900,"sarcoma, soft-tissue - hemangiosarcoma",
5910,"sarcoma, soft-tissue - infantile fibrosarcoma",
5920,"sarcoma, soft-tissue - Kaposi's sarcoma",
5930,"sarcoma, soft-tissue - leiomyosarcoma",
5940,"sarcoma, soft-tissue - liposarcoma",
5950,"sarcoma, soft-tissue - lymphangiosarcoma",
5960,"sarcoma, soft-tissue - malignant fibrous histiocytoma",
5970,"sarcoma, soft-tissue - malignant ganglioneuroma",
5980,"sarcoma, soft-tissue - malignant glomus tumor",
5990,"sarcoma, soft-tissue - malignant granular cell tumor",
6000,"sarcoma, soft-tissue - malignant hemangiopericytoma",
6010,"sarcoma, soft-tissue - malignant leiomyoblastoma",
6020,"sarcoma, soft-tissue - malignant mesenchymoma",
6030,"sarcoma, soft-tissue - malignant schwannoma",
6040,"sarcoma, soft-tissue - malignant Triton tumor",
6050,"sarcoma, soft-tissue - mesothelioma",
6060,"sarcoma, soft-tissue - osteosarcoma",
6070,"sarcoma, soft-tissue - proliferating angioendotheliomatosis",
6080,"sarcoma, soft-tissue - rhabdomyosarcoma",
6090,"sarcoma, soft-tissue - synovial sarcoma",
6100,"sarcoma, soft-tissue - synovioma, malignant",
6110,"sarcoma,soft-tissue-malignant giant cell tumor/tendon sheath",
6130,scar formation,
6140,scar(s),
6150,"scar(s), burns",
6160,schistosomiasis, parasitic disease
6170,schizophrenia,
6180,"schwannoma, epitheloid, malignant",
6190,"schwannoma, glandular, malignant",
6200,"schwannoma, malignant with rhabdomyoblastic differentiation",
6210,scleritis, inflammatory eye disease
6220,scleroderma, hardening or tightening of the skin
6230,sebaceous cyst, non-cancerous bump on the skin
6240,seizure disorder,
6250,seminoma,
6260,shell shock,
6270,shin splints,
6280,shortness of breath,
6290,"shoulder condition, bilateral (both shoulders)", shoulder condition of both shoulders
6300,"shoulder condition, left",
6310,"shoulder condition, right",
6320,shoulder replacement,
6330,shoulder sprain,
6340,shoulder strain,
6350,sickle cell anemia,
6360,signs or symptoms involving cardiovascular system,
6370,signs or symptoms involving digestive system,
6380,signs or symptoms involving nervous system,
6390,signs or symptoms involving respiratory system,
6400,signs or symptoms involving skin,
6410,sinus condition,
6420,sinusitis,
6430,skin condition,
6440,sleep apnea,
6450,sleep disturbances,
6460,"smell, loss of sense of", no sense of smell
6470,spine,
6480,spleen condition,
6490,sprains,
6500,Sternberg's sarcoma,
6510,stomach condition,
6530,strains,
6540,strep throat,
6550,stress,
6560,stress disorder,
6570,stroke,
6590,"surgery, residuals",
6600,synoviosarcoma,
6610,synovitis, inflammation of the synovial membrane
6620,syphilis,
6630,syringomyelia, cyst in the spinal cord
6640,systemic lupus,
6650,"taste, loss of sense of", no sense of taste
6660,tendinitis,
6670,tendonitis,
6680,tenosynovitis,
6690,"testicular condition, bilateral (both testicles)", testicular condition of both testicles
6700,"testicular condition, left", testicular condition left testicle
6710,"testicular condition, right", testicular condition right testicle
6720,thiamine deficiency,
6730,thigh condition,
6740,thoracic sprain,
6750,thoracic strain,
6760,throat condition,
6770,"thromboangiitis, obliterans (Buerger's disease)",
6780,thrombocytopenia, low blood platelet count
6790,thrombocytopenic purpura,
6800,"thrombosis, brain",
6820,thyroid condition,
6830,"thyroid nodular disease, non-malignant", non-cancerous lump in thyroid
6840,tinea pedis, athlete's foot
6850,tinnitus, ringing in the ears
6870,tongue condition,
6880,tooth condition,
6890,Tuberculosis,
6900,tumor(s) - malignant glomus,
6910,tumor(s) - malignant granular cell,
6920,tumors in brain,
6930,tumors in central nervous system,
6940,tumors - malignant,
6950,tumors in peripheral nerves,
6960,tumors in spinal cord,
6970,ulcer condition,
6980,Gulf War Veteran with undiagnosed illness,
6990,Unemployability,
7000,"uterus (disease, injury, or adhesions of)", disease or injury of the uterus
7010,uterus (displacement of), displaced uterus
7020,uterus (prolapse), uterine prolapse
7030,uterus (removal of),
7040,uveitis, inflammation of middle layer of eye
7050,vaginal condition,
7060,vaginitis,
7070,valvular heart disease,
7080,varicose veins,
7090,vasectomy,
7100,Vincent's stomatitis, trench mouth
7120,visually impaired,
7130,vitamin deficiency,
7140,voice loss,
7150,vulvovaginitis,
7160,war neurosis, combat fatigue
7170,wasting syndrome, ill health and weight and muscle loss
7180,worms, intestinal or parasitic worms
7190,"wound, bullet",
7200,"wound, shell fragment",
7210,"wrist condition, bilateral (both wrists)", wrist condition of both wrists
7220,"wrist condition, left",
7230,"wrist condition, right",
7240,wrist sprain,
7250,wrist strain,
7260,yaws, infectious tropical disease
7270,yellow fever,
7280,hepatitis B,
7290,PTSD personal trauma,
7300,hepatitis C,
100,Addison's disease,
3350,"hernia, ventral",
2065,"diabetes, adult onset", adult-onset diabetes
2085,"diabetes mellitus, type 2", Type 2 diabetes
3035,Gulf War Veteran with diagnosed illness as qualifying chronic disability,
530,ASHD, thickening and hardening of the arteries
8918,hip replacement,
8919,knee replacement,
8920,adhesions - digestive,
8921,adhesions - gynecological,
8922,adhesions - heart/veins/arteries,
8923,adhesions - neurological other system,
8924,adhesions - respiratory,
8926,aneurysm - heart/veins/arteries,
8927,aneurysm - neurological other system,
8928,arm condition - heart/veins/arteries,
8929,arm condition - neurological other system,
8930,Caisson disease, decompression sickness
8931,cancer - dental and oral,
8932,cancer - digestive,
8933,cancer - endocrine,
8934,cancer - eye (vision),
8935,cancer - genitourinary, cancer of the genitals or urinary organs
8937,cancer - gynecological,
8938,cancer - heart/veins/arteries,
8939,cancer - hemic and lymphatic,
8940,cancer - musculoskeletal - other,
8941,cancer - musculoskeletal and muscle injuries,
8942,cancer - neurological other system,
8943,cold injury residuals,
8945,cyst/benign growth - dental and oral,
8946,cyst/benign growth - digestive,
8947,cyst/benign growth - ear disease and other sense organs,
8948,cyst/benign growth - endocrine,
8949,cyst/benign growth - eye (vision),
8950,cyst/benign growth - genitourinary,cyst/benign growth in the genitals or urinary organs
8951,cyst/benign growth - gynecological,
8952,cyst/benign growth - musculoskeletal - ankle,
8953,cyst/benign growth - musculoskeletal - elbow,
8954,cyst/benign growth - musculoskeletal - foot,
8955,cyst/benign growth - musculoskeletal - hand,
8956,cyst/benign growth - musculoskeletal - hip,
8957,cyst/benign growth - musculoskeletal - knee,
8958,cyst/benign growth - musculoskeletal - mid- or lower back (thoracolumbar spine),
8959,cyst/benign growth - musculoskeletal - neck or upper back (cervical spine),
8960,cyst/benign growth - musculoskeletal - other,
8961,cyst/benign growth - musculoskeletal - shoulder,
8962,cyst/benign growth - musculoskeletal - wrist,
8963,cyst/benign growth - musculoskeletal and muscle injuries,
8964,cyst/benign growth - neurological other system,
8965,cyst/benign growth - respiratory,
8966,cyst/benign growth - skin,
8970,eating disorders,
8971,embolism/infarction - heart/veins/arteries,
8972,embolism/infarction - neurological other system,
8973,endocrine,
8975,genitourinary, genital or urinary organs
8976,Gulf War unexplained chronic multisymptom illness,
8977,Gulf War undiagnosed illness,
8980,child not capable of self-support, disabled child
8982,hemorrhage - eye (vision),
8983,hemorrhage - neurological other system,
8984,herpes - eye (vision),
8985,herpes - skin,
8986,"infectious diseases, immune disorders and nutritional deficiencies",
8987,leg condition - heart/veins/arteries,
8988,leg condition - neurological other system,
8989,mental disorders,
8990,musculoskeletal - amputations,
8991,musculoskeletal - ankle,
8992,musculoskeletal - arthritis,
8993,musculoskeletal - elbow,
8995,musculoskeletal - hand,
8996,musculoskeletal - hip,
8997,musculoskeletal - knee,
8998,musculoskeletal - mid- or lower back (thoracolumbar spine),
8999,musculoskeletal - neck or upper back (cervical spine),
9000,musculoskeletal - osteomyelitis, bone infection
9001,musculoskeletal - other,
9002,musculoskeletal - shoulder,
9003,musculoskeletal - toe amputations,
9004,musculoskeletal - wrist,
9005,musculoskeletal and muscle injuries,
9006,neurological - cranial or peripheral nerves,
9007,neurological other system,
9010,Posttraumatic Stress Disorder (PTSD) combat - mental disorders,
9011,Posttraumatic Stress Disorder (PTSD) personal trauma - mental disorders,
9012,respiratory,
9013,scars (extremities and trunk/torso),
9014,"scars (head, face, neck)",
9015,service-connected death,
9019,Spina bifida,
9020,Traumatic Brain Injury,
9021,"wound, bullet/shell fragment",
8994,musculoskeletal - foot,
249468,cancer - ear disease and other sense organs,
249469,cancer - musculoskeletal - ankle,
249470,cancer - musculoskeletal - elbow,
249471,cancer - musculoskeletal - hand,
249472,cancer - musculoskeletal - foot,
249473,cancer - musculoskeletal - knee,
249474,cancer - musculoskeletal - hip,
249475,cancer - musculoskeletal - mid- or lower back (thoracolumbar spine),
249476,cancer - musculoskeletal - neck or upper back (cervical spine),
249477,cancer - musculoskeletal - shoulder,
249478,cancer - musculoskeletal - wrist,
249479,cyst/benign growth - heart/veins/arteries,
249480,cyst/benign growth - hemic and lymphatic,
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/spec/spec_helper.rb
|
# frozen_string_literal: true
require 'i18n'
require 'support/codeowners_parser'
require 'support/spec_builders'
require 'support/matchers'
require 'support/spool_helpers'
require 'support/excel_helpers'
require 'support/fixture_helpers'
require 'support/silence_stream'
require 'simplecov_helper'
require 'sidekiq-pro' if Gem.loaded_specs.key?('sidekiq-pro')
require 'support/rswag/text_helpers'
require 'support/sidekiq/batch'
require 'support/stub_va_profile'
require 'pundit/rspec'
require 'rspec/its'
require 'rspec/retry'
require 'aasm/rspec'
require 'mock_redis'
# By default run SimpleCov, but allow an environment variable to disable.
unless ENV['NOCOVERAGE']
require 'simplecov'
SimpleCovHelper.start_coverage
end
# @see https://medium.com/@petro.yakubiv/testing-time-and-date-in-rspec-98483ce7a138
RSpec::Matchers.define :be_the_same_time_as do |expected|
match do |actual|
formatted = '%d/%m/%Y %H:%M:%S'
expect(expected.strftime(formatted)).to eq(actual.strftime(formatted))
end
end
# This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
config.order = :random
Kernel.srand config.seed
config.filter_run focus: true
config.filter_run_excluding skip: true unless ENV['PENDING'] == 'true'
config.run_all_when_everything_filtered = true
config.example_status_persistence_file_path = 'tmp/specs.txt'
config.default_retry_count = 3 if ENV['CI']
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
config.before(:suite) do
# Some specs stub out `YAML.load_file`, which I18n uses to load the
# translation files. Because rspec runs things in random order, it's
# possible that the YAML.load_file that's stubbed out for a spec
# could actually be called by I18n if translations are required before
# the functionality being tested. Once loaded, the translations stay
# loaded, so we may as well take the hit and load them right away.
# Verified working on --seed 11101, commit e378e8
I18n.locale_available?(:en)
end
config.include SpecBuilders
config.include SpoolHelpers
config.include ExcelHelpers
config.include FixtureHelpers
config.around(:example, :run_at) do |example|
Timecop.freeze(Time.zone.parse(example.metadata[:run_at]))
example.run
Timecop.return
end
# enable `allow_forgery_protection` in Lighthouse specs to ensure that the endpoints
# in those modules have explicitly skipped the CSRF protection functionality
lighthouse_dirs = %r{
modules/
(appeals_api|apps_api|claims_api|openid_auth|vba_documents|
veteran)/
}x
config.define_derived_metadata(file_path: lighthouse_dirs) do |metadata|
metadata[:enable_csrf_protection] = true
end
config.before(:all, :enable_csrf_protection) do
@original_allow_forgery_protection = ActionController::Base.allow_forgery_protection
ActionController::Base.allow_forgery_protection = true
end
config.after(:all, :enable_csrf_protection) do
ActionController::Base.allow_forgery_protection = @original_allow_forgery_protection
end
config.after do
Timecop.return
end
config.before do
$redis.flushdb
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/spec/rswag_override.rb
|
# frozen_string_literal: true
module Rswag
module Specs
class SwaggerFormatter < ::RSpec::Core::Formatters::BaseTextFormatter
def example_group_finished(notification)
metadata = if RSPEC_VERSION > 2
notification.group.metadata
else
notification.metadata
end
# !metadata[:document] won't work, since nil means we should generate
# docs.
return if metadata[ENV['DOCUMENTATION_ENVIRONMENT']&.to_sym] == false
return if metadata[:document] == false
return unless metadata.key?(:response)
openapi_spec = @config.get_openapi_spec(metadata[:openapi_spec])
puts "metadata[:swagger_doc] => #{metadata[:swagger_doc]}" if metadata[:swagger_doc].present?
unless doc_version(openapi_spec).start_with?('2')
# This is called multiple times per file!
# metadata[:operation] is also re-used between examples within file
# therefore be careful NOT to modify its content here.
upgrade_request_type!(metadata)
upgrade_servers!(openapi_spec)
upgrade_oauth!(openapi_spec)
upgrade_response_produces!(openapi_spec, metadata)
end
openapi_spec.deep_merge!(metadata_to_swagger(metadata))
end
# rubocop:disable Layout/LineLength, Style/CommentedKeyword, Metrics/MethodLength
def stop(_notification = nil)
@config.openapi_specs.each do |url_path, doc|
unless doc_version(doc).start_with?('2')
doc[:paths]&.each_pair do |_k, v|
v.each_pair do |_verb, value|
is_hash = value.is_a?(Hash)
if is_hash && value[:parameters]
schema_param = value[:parameters]&.find { |p| %i[body formData].include?(p[:in]) && p[:schema] }
mime_list = value[:consumes] || doc[:consumes]
if value && schema_param && mime_list
value[:requestBody] = { content: {} } unless value.dig(:requestBody, :content)
value[:requestBody][:required] = true if schema_param[:required]
mime_list.each do |mime|
value[:requestBody][:content][mime] = { schema: schema_param[:schema] }.merge(request_examples(value)) # Changed line
end
end
value[:parameters].reject! { |p| %i[body formData].include?(p[:in]) }
end
remove_invalid_operation_keys!(value)
end
end
end
if relevant_path?(url_path) # Added conditional
file_path = File.join(@config.openapi_root, url_path)
dirname = File.dirname(file_path)
FileUtils.mkdir_p dirname
File.write(file_path, pretty_generate(doc))
@output.puts "Swagger doc generated at #{file_path}"
end # Added conditional
end
end
# rubocop:enable Layout/LineLength, Style/CommentedKeyword, Metrics/MethodLength
private # Added methods
def request_examples(value)
examples = value[:parameters]&.find { |p| %i[body formData].include?(p[:in]) && p[:examples] }
if examples && examples[:examples]
{ examples: examples[:examples] }
else
{}
end
end
def relevant_path?(url_path)
url_path.include?(ENV.fetch('RAILS_MODULE'))
end
end
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/spec/simplecov_helper.rb
|
# frozen_string_literal: true
# spec/simplecov_helper.rb
require 'active_support/inflector'
require 'simplecov'
require_relative 'support/codeowners_parser'
class SimpleCovHelper
def self.start_coverage
SimpleCov.start 'rails' do
ENV.fetch('SKIP_COVERAGE_CHECK', 'false')
print(ENV.fetch('TEST_ENV_NUMBER', nil))
# parallel_tests_count = ParallelTests.number_of_running_processes
# SimpleCov.command_name "(#{ENV['TEST_ENV_NUMBER'] || '1'}/#{parallel_tests_count})"
SimpleCov.command_name "rspec-#{ENV['TEST_ENV_NUMBER'] || '0'}"
track_files '{app,lib}/**/*.rb'
add_filters
add_modules
# parse_codeowners
# skip_check_coverage = ENV.fetch('SKIP_COVERAGE_CHECK', 'false')
# minimum_coverage(90) unless skip_check_coverage
# refuse_coverage_drop unless skip_check_coverage
# merge_timeout(3600)
if ENV['CI']
SimpleCov.minimum_coverage 90
SimpleCov.refuse_coverage_drop
end
end
if ENV['TEST_ENV_NUMBER'] # parallel specs
SimpleCov.at_exit do
# SimpleCovHelper.report_coverage
result = SimpleCov.result
result.format!
# SimpleCovHelper.report_coverage # merge and format
end
end
end
def self.report_coverage(base_dir: './coverage')
SimpleCov.collate Dir["#{base_dir}/.resultset*.json"] do
add_filters
add_modules
end
rescue RuntimeError
nil
end
def self.add_filters
add_filter 'app/models/in_progress_disability_compensation_form.rb'
add_filter 'lib/apps/configuration.rb'
add_filter 'lib/apps/responses/response.rb'
add_filter 'lib/config_helper.rb'
add_filter 'lib/clamav'
add_filter 'lib/feature_flipper.rb'
add_filter 'lib/gibft/configuration.rb'
add_filter 'lib/salesforce/configuration.rb'
add_filter 'lib/search/response.rb'
add_filter 'lib/search_gsa/response.rb'
add_filter 'lib/va_profile/v3/address_validation/configuration.rb'
add_filter 'lib/va_profile/exceptions/builder.rb'
add_filter 'lib/va_profile/response.rb'
add_filter 'lib/vet360/address_validation/configuration.rb'
add_filter 'lib/vet360/exceptions/builder.rb'
add_filter 'lib/vet360/response.rb'
add_filter 'lib/rubocop/*'
add_filter 'modules/appeals_api/app/swagger'
add_filter 'modules/apps_api/app/controllers/apps_api/docs/v0/api_controller.rb'
add_filter 'modules/apps_api/app/swagger'
add_filter 'modules/burials/lib/benefits_intake/submission_handler.rb'
add_filter 'modules/check_in/config/initializers/statsd.rb'
add_filter 'modules/claims_api/app/controllers/claims_api/v1/forms/disability_compensation_controller.rb'
add_filter 'modules/claims_api/app/swagger/*'
add_filter 'modules/pensions/app/swagger'
add_filter 'modules/pensions/lib/benefits_intake/submission_handler.rb'
add_filter 'modules/vre/app/services/vre'
add_filter 'modules/**/db/*'
add_filter 'modules/**/lib/tasks/*'
add_filter 'rakelib/'
add_filter '**/rakelib/**/*'
add_filter '**/rakelib/*'
add_filter 'version.rb'
end
def self.add_modules
# Modules
add_group 'AccreditedRepresentativePortal', 'modules/accredited_representative_portal/'
add_group 'AppealsApi', 'modules/appeals_api/'
add_group 'AppsApi', 'modules/apps_api'
add_group 'AskVAApi', 'modules/ask_va_api/'
add_group 'Avs', 'modules/avs/'
add_group 'BPDS', 'modules/bpds/'
add_group 'Banners', 'modules/banners/'
add_group 'Burials', 'modules/burials/'
add_group 'CheckIn', 'modules/check_in/'
add_group 'ClaimsApi', 'modules/claims_api/'
add_group 'ClaimsEvidenceApi', 'modules/claims_evidence_api/'
add_group 'CovidResearch', 'modules/covid_research/'
add_group 'DebtsApi', 'modules/debts_api/'
add_group 'DecisionReviews', 'modules/decision_reviews'
add_group 'DependentsBenefits', 'modules/dependents_benefits/'
add_group 'DependentsVerification', 'modules/dependents_verification/'
add_group 'DhpConnectedDevices', 'modules/dhp_connected_devices/'
add_group 'DigitalFormsApi', 'modules/digital_forms_api/'
add_group 'EmploymentQuestionnaires', 'modules/employment_questionnaires/'
add_group 'FacilitiesApi', 'modules/facilities_api/'
add_group 'IncomeAndAssets', 'modules/income_and_assets/'
add_group 'IncreaseCompensation', 'modules/increase_compensation/'
add_group 'IvcChampva', 'modules/ivc_champva/'
add_group 'MedicalExpenseReports', 'modules/medical_expense_reports/'
add_group 'RepresentationManagement', 'modules/representation_management/'
add_group 'SimpleFormsApi', 'modules/simple_forms_api/'
add_group 'IncomeLimits', 'modules/income_limits/'
add_group 'MebApi', 'modules/meb_api/'
add_group 'Mobile', 'modules/mobile/'
add_group 'MyHealth', 'modules/my_health/'
add_group 'Pensions', 'modules/pensions/'
add_group 'Policies', 'app/policies'
add_group 'Serializers', 'app/serializers'
add_group 'Services', 'app/services'
add_group 'Sob', 'modules/sob/'
add_group 'SurvivorsBenefits', 'modules/survivors_benefits/'
add_group 'Swagger', 'app/swagger'
add_group 'TestUserDashboard', 'modules/test_user_dashboard/'
add_group 'TravelPay', 'modules/travel_pay/'
add_group 'Uploaders', 'app/uploaders'
add_group 'VRE', 'modules/vre/'
add_group 'VaNotify', 'modules/va_notify/'
add_group 'VAOS', 'modules/vaos/'
add_group 'VBADocuments', 'modules/vba_documents/'
add_group 'Veteran', 'modules/veteran/'
add_group 'VeteranVerification', 'modules/veteran_verification/'
add_group 'Vye', 'modules/vye/'
end
def self.parse_codeowners
# Team Groups
codeowners_parser = CodeownersParser.new
octo_identity_files = codeowners_parser.perform('octo-identity')
add_group 'OctoIdentity', octo_identity_files
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/spec/rails_helper.rb
|
# frozen_string_literal: true
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
ENV['RACK_ENV'] ||= 'test' # Shrine uses this to determine log levels
require File.expand_path('../config/environment', __dir__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'statsd-instrument'
require 'statsd/instrument/matchers'
require 'rspec/rails'
require 'webmock/rspec'
require 'shoulda/matchers'
require 'sidekiq/semantic_logging'
require 'sidekiq/error_tag'
require 'support/stub_va_profile'
require 'support/mpi/stub_mpi'
require 'support/va_profile/stub_vaprofile_user'
require 'support/factory_bot'
require 'support/serializer_spec_helper'
require 'support/validation_helpers'
require 'support/model_helpers'
require 'support/authenticated_session_helper'
require 'support/aws_helpers'
require 'support/vcr'
require 'support/mdot_helpers'
require 'support/financial_status_report_helpers'
require 'support/poa_stub'
require 'support/sm_spec_helper'
require 'support/vcr_multipart_matcher_helper'
require 'support/request_helper'
require 'support/uploader_helpers'
require 'support/sign_in'
require 'super_diff/rspec-rails'
require 'super_diff/active_support'
require './spec/support/default_configuration_helper'
WebMock.disable_net_connect!(allow_localhost: true)
SemanticLogger.sync!
# Helper function for testing changes to the global Settings object
# Pass in the particular settings object that you want to change,
# along with temporary values that should be set on that object.
# For example,
#
# with_settings(Settings.some_group, {foo: 'temp1', bar: 'temp2'}) do
# expect(something).to equal(2)
# end
def with_settings(settings, temp_values)
old_settings = temp_values.keys.index_with { |k| settings[k] }
# The `Config` object doesn't support `.merge!`, so manually copy
# the updated values.
begin
temp_values.each do |k, v|
settings[k] = v
end
yield
ensure
old_settings.each do |k, v|
settings[k] = v
end
end
end
VCR::MATCH_EVERYTHING = { match_requests_on: %i[method uri headers body] }.freeze
module VCR
def self.all_matches
%i[method uri body]
end
end
VCR.configure(&:configure_rspec_metadata!)
VCR.configure do |c|
c.before_record(:force_utf8) do |interaction|
interaction.response.body.force_encoding('UTF-8')
end
end
VCR.configure do |config|
ignored_uris = [
'http://169.254.169.254/latest/api/token' # ec2
]
config.ignore_request do |request|
ignored_uris.include?(request.uri)
end
end
Datadog.configure do |c|
c.tracing.enabled = false
end
ActiveRecord::Migration.maintain_test_schema!
require 'sidekiq/testing'
Sidekiq::Testing.fake!
Sidekiq::Testing.server_middleware do |chain|
chain.add Sidekiq::SemanticLogging
chain.add SidekiqStatsInstrumentation::ServerMiddleware
chain.add Sidekiq::ErrorTag
end
require 'shrine/storage/memory'
Shrine.storages = {
cache: Shrine::Storage::Memory.new,
store: Shrine::Storage::Memory.new
}
CarrierWave.root = Rails.root.join('spec', 'support', "uploads#{ENV.fetch('TEST_ENV_NUMBER', nil)}")
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
# config.fixture_paths = Array(Rails.root / 'spec/fixtures')
config.include(ValidationHelpers, type: :model)
%i[controller model].each do |type|
config.include(ModelHelpers, type:)
end
config.include(SAML, type: :controller)
config.include(AwsHelpers, type: :aws_helpers)
config.include(UploaderHelpers, uploader_helpers: true)
%i[controller mdot_helpers request].each do |type|
config.include(MDOTHelpers, type:)
end
# Allows setting of filenet_id in the FinancialStatusReport model
config.include FinancialStatusReportHelpers, type: :controller
config.include FinancialStatusReportHelpers, type: :service
config.include FinancialStatusReportHelpers, type: :request
# Adding support for url_helper
config.include Rails.application.routes.url_helpers
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.include FactoryBot::Syntax::Methods
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# set `:type` for serializers directory
config.define_derived_metadata(file_path: Regexp.new('/spec/serializers/')) do |metadata|
metadata[:type] = :serializer
end
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
# serializer_spec_helper
config.include SerializerSpecHelper, type: :serializer
# authentication_session_helper
config.include AuthenticatedSessionHelper, type: :request
config.include AuthenticatedSessionHelper, type: :controller
# ability to test options
config.include RequestHelper, type: :request
config.include StatsD::Instrument::Matchers
config.before :each, type: :controller do
request.host = Settings.hostname
end
config.before do |example|
stub_mpi unless example.metadata[:skip_mvi]
stub_va_profile unless example.metadata[:skip_va_profile]
stub_vaprofile_user unless example.metadata[:skip_va_profile_user]
Sidekiq::Job.clear_all
end
# clean up carrierwave uploads
# https://github.com/carrierwaveuploader/carrierwave/wiki/How-to:-Cleanup-after-your-Rspec-tests
config.after(:all) do
FileUtils.rm_rf(Rails.root.glob("spec/support/uploads#{ENV.fetch('TEST_ENV_NUMBER', nil)}")) if Rails.env.test?
end
end
BGS.configure do |config|
config.logger = Rails.logger
end
Gem::Deprecate.skip = true
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
|
0
|
code_files/vets-api-private
|
code_files/vets-api-private/spec/swagger_helper.rb
|
# frozen_string_literal: true
require 'rails_helper'
require_relative 'support/rswag_config'
RSpec.configure do |config|
# Specify a root folder where Swagger JSON files are generated
# NOTE: If you're using the rswag-api to serve API descriptions, you'll need
# to ensure that it's configured to serve Swagger from the same folder
config.openapi_root = Rails.root
# Define one or more Swagger documents and provide global metadata for each one
# When you run the 'rswag:specs:swaggerize' rake task, the complete Swagger will
# be generated at the provided relative path under openapi_root
# By default, the operations defined in spec files are added to the first
# document below. You can override this behavior by adding a openapi_spec tag to the
# the root example_group in your specs e.g.
# describe '...', openapi_spec: 'modules/claims_api/app/swagger/claims_api/v2/swagger.json'
mods = [RepresentationManagement, ClaimsApi, AppealsApi]
# Load each engine’s rswag config file
mods.each do |m|
require_relative m::Engine.root.join('spec', 'support', 'rswag_config')
end
# Merge base + per-engine configs
combined = mods
.map { |m| m::RswagConfig.new.config }
.reduce({}, :deep_merge)
config.openapi_specs = RswagConfig.new.config.merge(combined)
# Specify the format of the output Swagger file when running 'rswag:specs:swaggerize'.
# The openapi_specs configuration option has the filename including format in
# the key, this may want to be changed to avoid putting yaml in json files.
# Defaults to json. Accepts ':json' and ':yaml'.
config.openapi_format = :json
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/supporting_evidence_attachment_uploader_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe SupportingEvidenceAttachmentUploader do
subject { described_class.new(guid) }
let(:guid) { '1234' }
it 'allows image, pdf, and text files' do
expect(subject.extension_allowlist).to match_array %w[pdf png gif tiff tif jpeg jpg bmp txt]
end
it 'returns a store directory containing guid' do
expect(subject.store_dir).to eq "disability_compensation_supporting_form/#{guid}"
end
it 'throws an error if no guid is given' do
blank_uploader = described_class.new(nil)
expect { blank_uploader.store_dir }.to raise_error(RuntimeError, 'missing guid')
end
describe 'logging methods' do
let(:mock_file) do
double('uploaded_file', size: 1024, headers: {
'Content-Type' => 'application/pdf',
'User-Agent' => 'Mozilla/5.0',
'filename' => 'PII.pdf'
})
end
describe '#log_transaction_start' do
it 'logs process_id, filesize, and upload_start without file_headers' do
freeze_time = Time.parse('2025-08-26 12:00:00 UTC')
allow(Time).to receive(:current).and_return(freeze_time)
allow(Rails.logger).to receive(:info)
subject.log_transaction_start(mock_file)
expected_log = {
process_id: Process.pid,
filesize: 1024,
upload_start: freeze_time
}
expect(Rails.logger).to have_received(:info).with(expected_log)
end
it 'does not log file headers which could contain PII' do
allow(Rails.logger).to receive(:info) do |log_data|
expect(log_data).not_to have_key(:file_headers)
expect(log_data.values.join).not_to include('Mozilla')
expect(log_data.values.join).not_to include('User-Agent')
expect(log_data.values.join).not_to include('PII.pdf')
end
subject.log_transaction_start(mock_file)
expect(Rails.logger).to have_received(:info)
end
end
describe '#log_transaction_complete' do
it 'logs process_id, filesize, and upload_complete without file_headers' do
freeze_time = Time.parse('2025-08-26 12:00:00 UTC')
allow(Time).to receive(:current).and_return(freeze_time)
allow(Rails.logger).to receive(:info)
subject.log_transaction_complete(mock_file)
expected_log = {
process_id: Process.pid,
filesize: 1024,
upload_complete: freeze_time
}
expect(Rails.logger).to have_received(:info).with(expected_log)
end
it 'does not log file headers which could contain PII' do
allow(Rails.logger).to receive(:info) do |log_data|
expect(log_data).not_to have_key(:file_headers)
expect(log_data.values.join).not_to include('Mozilla')
expect(log_data.values.join).not_to include('User-Agent')
end
subject.log_transaction_complete(mock_file)
expect(Rails.logger).to have_received(:info)
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/hca_attachment_uploader_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'carrierwave/test/matchers'
RSpec.describe HCAAttachmentUploader, type: :uploader do
include CarrierWave::Test::Matchers
let(:uploader) { described_class.new(guid) }
let(:guid) { 'test-guid' }
let(:file) do
Rack::Test::UploadedFile.new(
Rails.root.join('spec', 'fixtures', 'files', 'doctors-note.png'),
'image/png'
)
end
before do
CarrierWave.configure do |config|
config.enable_processing = true
end
uploader.store!(file)
end
after do
uploader.remove!
CarrierWave.configure do |config|
config.enable_processing = false
end
end
describe '#initialize' do
context 'when Rails.env is production' do
let(:settings) do
OpenStruct.new(
aws_access_key_id: 'access-key',
aws_secret_access_key: 'shh-its-a-secret',
region: 'my-region',
bucket: 'bucket/path'
)
end
before do
allow(Settings).to receive(:hca).and_return(OpenStruct.new(s3: settings))
allow(Rails.env).to receive(:production?).and_return(true)
end
it 'sets AWS config with production settings' do
expect_any_instance_of(HCAAttachmentUploader).to receive(:set_aws_config).with(
Settings.hca.s3.aws_access_key_id,
Settings.hca.s3.aws_secret_access_key,
Settings.hca.s3.region,
Settings.hca.s3.bucket
)
described_class.new('test-guid')
end
end
context 'when Rails.env is not production' do
before do
allow(Rails.env).to receive(:production?).and_return(false)
end
it 'does not set AWS config' do
expect_any_instance_of(HCAAttachmentUploader).not_to receive(:set_aws_config)
described_class.new('test-guid')
end
end
end
describe '#size_range' do
it 'has a valid size range' do
expect(uploader.size_range).to eq((1.byte)...(10.megabytes))
end
end
describe '#extension_allowlist' do
context ':hca_heif_attachments_enabled enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(true)
end
it 'allows valid file extensions' do
expect(uploader.extension_allowlist).to include('pdf', 'doc', 'docx', 'jpg', 'jpeg', 'rtf', 'png', 'heic',
'heif')
end
it 'does not allow invalid file extensions' do
expect(uploader.extension_allowlist).not_to include('exe', 'bat', 'zip')
end
end
context ':hca_heif_attachments_enabled disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(false)
end
it 'allows valid file extensions - no heic files' do
expect(uploader.extension_allowlist).to include('pdf', 'doc', 'docx', 'jpg', 'jpeg', 'rtf', 'png')
end
it 'does not allow invalid file extensions' do
expect(uploader.extension_allowlist).not_to include('exe', 'bat', 'zip', 'heic', 'heif')
end
end
end
describe '#store_dir' do
it 'sets the correct store directory' do
expect(uploader.store_dir).to eq('hca_attachments')
end
end
describe '#filename' do
it 'sets the filename to the guid' do
expect(uploader.filename).to eq(guid)
end
end
describe 'processing' do
context 'when the file is a PNG' do
it 'converts the file to JPG' do
expect(uploader).to receive(:convert).with('jpg')
uploader.store!(file)
end
end
context 'when the file is a HEIC' do
let(:file) do
Rack::Test::UploadedFile.new(
Rails.root.join('spec', 'fixtures', 'files', 'steelers.heic'),
'image/heic'
)
end
context ':hca_heif_attachments_enabled enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(true)
end
it 'converts the file to jpg' do
expect(uploader).to receive(:convert).with('jpg')
uploader.store!(file)
end
end
context ':hca_heif_attachments_enabled disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(false)
end
it 'raises invalid file type error' do
expect { uploader.store!(file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
'You can’t upload "heic" files. The allowed file types are: pdf, doc, docx, jpg, jpeg, rtf, png'
)
end
end
end
end
context 'when the file is a HEIF' do
let(:file) do
Rack::Test::UploadedFile.new(
Rails.root.join('spec', 'fixtures', 'files', 'steelers.heif'),
'image/heif'
)
end
context ':hca_heif_attachments_enabled enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(true)
end
it 'converts the file to jpg' do
expect(uploader).to receive(:convert).with('jpg')
uploader.store!(file)
end
end
context ':hca_heif_attachments_enabled disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:hca_heif_attachments_enabled).and_return(false)
end
it 'raises invalid file type error' do
expect { uploader.store!(file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
'You can’t upload "heif" files. The allowed file types are: pdf, doc, docx, jpg, jpeg, rtf, png'
)
end
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/log_metrics_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe LogMetrics do
class LogMetricsUploader < CarrierWave::Uploader::Base
include LogMetrics
end
module MyApp
class LogMetricsUploader < CarrierWave::Uploader::Base
include LogMetrics
end
end
let(:test_uploader) { LogMetricsUploader.new }
it 'logs metrics of uploaded file' do
expect(StatsD).to receive(:measure).with(
'api.upload.log_metrics_uploader.size',
90_537,
tags: ['content_type:gif']
)
test_uploader.store!(
Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif')
)
end
describe 'metric key' do
let(:test_uploader) { MyApp::LogMetricsUploader.new }
context 'with module namespace' do
it 'logs metric with module name' do
expect(StatsD).to receive(:measure).with(
'api.upload.my_app_log_metrics_uploader.size',
90_537,
tags: ['content_type:gif']
)
test_uploader.store!(
Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif')
)
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/evss_claim_document_uploader_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe EVSSClaimDocumentUploader do
subject { document_uploader }
let(:user_uuid) { SecureRandom.uuid }
let(:document_uploader) { described_class.new(user_uuid, ['11', nil]) }
let(:uploader_with_tiff) do
f = Rack::Test::UploadedFile.new('spec/fixtures/evss_claim/image.TIF', 'image/tiff')
document_uploader.store!(f)
document_uploader
end
let(:uploader_with_jpg) do
f = Rack::Test::UploadedFile.new('spec/fixtures/evss_claim/converted_image.TIF.jpg', 'image/jpeg')
document_uploader.store!(f)
document_uploader
end
after do
FileUtils.rm_rf(Rails.root.glob(
'tmp/uploads/evss_claim_documents/**/*/*/**/*/*/evss_claim_documents/**/*/*/**/*/*'
))
end
describe 'initialize' do
context 'when uploads are disabled' do
it 'sets storage to file' do
with_settings(Settings.evss.s3, uploads_enabled: false) do
expect(subject.class.storage).to eq(CarrierWave::Storage::File)
end
end
end
context 'when uploads are set to nil' do
it 'sets storage to file' do
with_settings(Settings.evss.s3, uploads_enabled: nil) do
expect(subject.class.storage).to eq(CarrierWave::Storage::File)
end
end
end
context 'when uploads are enabled' do
it 'sets storage to fog' do
with_settings(Settings.evss.s3, uploads_enabled: true) do
expect(subject.class.storage).to eq(CarrierWave::Storage::AWS)
expect(subject.aws_credentials).to eq(access_key_id: 'EVSS_S3_AWS_ACCESS_KEY_ID_XYZ',
secret_access_key: 'EVSS_S3_AWS_SECRET_ACCESS_KEY_XYZ',
region: 'evss_s3_region')
expect(subject.aws_acl).to eq('private')
expect(subject.aws_bucket).to eq('evss_s3_bucket')
end
end
end
end
describe '#read_for_upload' do
let(:converted) { double }
before do
allow(subject).to receive(:converted).and_return(converted)
end
context 'with a converted image' do
before do
expect(converted).to receive(:present?).and_return(true)
expect(converted).to receive(:file).and_return(OpenStruct.new(exists?: true))
end
it 'reads from converted' do
expect(converted).to receive(:read)
subject.read_for_upload
end
end
context 'with no converted image' do
before do
expect(converted).to receive(:present?).and_return(true)
expect(converted).to receive(:file).and_return(OpenStruct.new(exists?: false))
end
it 'reads from the base file' do
expect(subject).to receive(:read)
subject.read_for_upload
end
end
end
describe '#final_filename', skip: 'flakey spec' do
it 'returns the right filename' do
[uploader_with_tiff, uploader_with_jpg].each do |uploader|
expect(uploader.final_filename).to eq('converted_image_TIF.jpg')
end
end
end
describe 'converted version', skip: 'flakey specs' do
it 'converts tiff files to jpg' do
expect(MimeMagic.by_magic(uploader_with_tiff.converted.file.read).type).to eq(
'image/jpeg'
)
end
it 'shouldnt convert if the file isnt tiff' do
expect(uploader_with_jpg.converted_exists?).to be(false)
end
[
{
path: 'files/doctors-note.gif',
final_filename: 'converted_doctors-note_gif.png',
description: 'misnamed png',
binary_or_name_changes: true
},
{
path: 'files/doctors-note.jpg',
final_filename: 'converted_doctors-note_jpg.png',
description: 'misnamed png',
binary_or_name_changes: true
},
{
path: 'files/va.gif',
final_filename: 'va.gif',
description: 'no change',
binary_or_name_changes: false
},
{
path: 'evss_claim/image.TIF',
final_filename: 'converted_image_TIF.jpg',
description: 'ext and filetype match /BUT/ tifs not allowed',
binary_or_name_changes: true
},
{
path: 'evss_claim/secretly_a_jpg.tif',
final_filename: 'converted_secretly_a_jpg_tif.jpg',
description: 'misnamed jpg',
binary_or_name_changes: true
},
{
path: 'evss_claim/secretly_a_tif.jpg',
final_filename: 'converted_secretly_a_tif.jpg',
description: "converted, but file extension doesn't change",
binary_or_name_changes: true
}
].each do |args|
path, final_filename, description, binary_or_name_changes = args.values_at(
:path, :final_filename, :description, :binary_or_name_changes
)
it "#{description}: #{path.split('/').last} -> #{final_filename}" do
uploader = described_class.new '1234', ['11', nil]
file = Rack::Test::UploadedFile.new "spec/fixtures/#{path}", "image/#{path.split('.').last}"
uploader.store! file
expect(uploader.converted_exists?).to eq binary_or_name_changes
expect(uploader.final_filename).to eq(final_filename)
end
end
end
describe '#store_dir' do
let(:user_uuid) { SecureRandom.uuid }
let(:tracked_item_id) { '13' }
let(:secondary_id) { SecureRandom.uuid }
it 'omits the tracked item id if it is nil' do
uploader = described_class.new(user_uuid, [nil, nil])
expect(uploader.store_dir).to eq("evss_claim_documents/#{user_uuid}")
end
it 'includes the tracked item id if provided' do
uploader = described_class.new(user_uuid, [tracked_item_id, nil])
expect(uploader.store_dir).to eq("evss_claim_documents/#{user_uuid}/#{tracked_item_id}")
end
it 'includes both tracked item id and secondary id if provided' do
uploader = described_class.new(user_uuid, [tracked_item_id, secondary_id])
expect(uploader.store_dir).to eq("evss_claim_documents/#{user_uuid}/#{tracked_item_id}/#{secondary_id}")
end
it 'handles a case where user_uuid is missing or nil' do
uploader = described_class.new(nil, [tracked_item_id, secondary_id])
expect(uploader.store_dir).to eq("evss_claim_documents//#{tracked_item_id}/#{secondary_id}")
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/validate_pdf_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
describe ValidatePdf, :uploader_helpers do
class ValidatePdfTest < CarrierWave::Uploader::Base
include ValidatePdf
end
def store_image
ValidatePdfTest.new.store!(file)
end
context 'with a file that is not a PDF' do
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif') }
it 'does not raise an error' do
expect { store_image }.not_to raise_error
end
end
context 'with a valid PDF' do
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/doctors-note.pdf', 'application/pdf') }
it 'does not raise an error' do
expect { store_image }.not_to raise_error
end
end
context 'with an encrypted PDF' do
let(:file) do
Rack::Test::UploadedFile.new('spec/fixtures/files/locked_pdf_password_is_test.pdf',
'application/pdf')
end
it 'raises an error' do
expect { store_image }
.to raise_error(Common::Exceptions::UnprocessableEntity)
end
end
context 'with a corrupted PDF' do
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/malformed-pdf.pdf', 'application/pdf') }
it 'raises an error' do
expect { store_image }
.to raise_error(Common::Exceptions::UnprocessableEntity)
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/evss_claim_document_uploader_base_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
describe EVSSClaimDocumentUploaderBase, :uploader_helpers do
before do
allow_any_instance_of(described_class).to receive(:max_file_size_non_pdf).and_return(100)
end
def store_image
EVSSClaimDocumentUploaderBase.new.store!(file)
end
context 'with a too large file that is not a PDF' do
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif') }
it 'raises an error' do
expect { store_image }.to raise_error CarrierWave::IntegrityError
end
end
context 'with a valid PDF' do
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/doctors-note.pdf', 'application/pdf') }
it 'does not raise an error' do
expect { store_image }.not_to raise_error
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/uploaders/uploader_virus_scan_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
describe UploaderVirusScan, :uploader_helpers do
class UploaderVirusScanTest < CarrierWave::Uploader::Base
include UploaderVirusScan
end
let(:file) { Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif') }
def store_image
UploaderVirusScanTest.new.store!(file)
end
context 'in production' do
stub_virus_scan
context 'with no virus' do
it 'runs the virus scan' do
expect(Rails.env).to receive(:production?).and_return(true)
store_image
end
end
context 'with a virus' do
let(:result) { false }
it 'raises an error' do
allow(Common::VirusScan).to receive(:scan).and_return(false)
expect(Rails.env).to receive(:production?).and_return(true)
expect(file).to receive(:delete)
expect { store_image }.to raise_error(
UploaderVirusScan::VirusFoundError
)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/uploaders/simple_forms_api
|
code_files/vets-api-private/spec/uploaders/simple_forms_api/form_remediation/uploader_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require SimpleFormsApi::Engine.root.join('spec', 'spec_helper.rb')
require 'simple_forms_api/form_remediation/configuration/vff_config'
RSpec.describe SimpleFormsApi::FormRemediation::Uploader do
let(:config) do
instance_double(
SimpleFormsApi::FormRemediation::Configuration::VffConfig,
s3_settings: OpenStruct.new(region: 'region', bucket: bucket_name)
)
end
let(:directory) { '/some/path' }
let(:bucket_name) { 'bucket' }
let(:mock_config) { instance_double(Config::Options) }
let(:uploader_instance) { described_class.new(directory:, config:) }
before do
allow(Rails.logger).to receive(:error).and_call_original
allow(Rails.logger).to receive(:info).and_call_original
end
describe '#initialize' do
subject(:new) { uploader_instance }
it 'uses an AWS store', skip: 'TODO: Fix Flaky Test' do
expect(described_class.storage).to eq(CarrierWave::Storage::AWS)
expect(new._storage?).to be(true)
expect(new._storage).to eq(CarrierWave::Storage::AWS)
end
it 'sets aws config' do
expect(new.aws_acl).to eq('private')
expect(new.aws_bucket).to eq(bucket_name)
expect(new.aws_attributes).to eq(server_side_encryption: 'AES256')
expect(new.aws_credentials).to eq(region: 'region')
end
context 'when config is nil' do
let(:config) { nil }
it 'throws an error' do
expect { new }.to raise_exception(RuntimeError, a_string_including('The configuration is missing.'))
end
end
context 'when directory is nil' do
let(:directory) { nil }
it 'throws an error' do
expect { new }.to raise_exception(RuntimeError, a_string_including('The S3 directory is missing.'))
end
end
end
describe '#size_range' do
subject(:size_range) { uploader_instance.size_range }
it 'returns a range from 1 byte to 150 megabytes' do
expect(size_range).to eq((1.byte)...(150.megabytes))
end
end
describe '#extension_allowlist' do
subject(:extension_allowlist) { uploader_instance.extension_allowlist }
it 'allows image, pdf, json, csv, and text files' do
expect(extension_allowlist).to match_array %w[bmp csv gif jpeg jpg json pdf png tif tiff txt zip]
end
end
describe '#store_dir' do
subject(:store_dir) { uploader_instance.store_dir }
it 'returns a store directory containing the given directory' do
expect(store_dir).to eq(directory)
end
end
describe '#store!' do
subject(:store!) { uploader_instance.store!(file) }
let(:file) { instance_double(CarrierWave::SanitizedFile, filename: 'test_file.txt') }
before { allow(config).to receive(:handle_error) }
context 'when the file is nil' do
let(:file) { nil }
let(:error_message) { 'An error occurred while uploading the file.' }
it 'logs an error and returns' do
store!
expect(config).to have_received(:handle_error).with(error_message, an_instance_of(RuntimeError))
end
end
context 'when the file is not nil' do
it 'stores the file' do
expect { store! }.not_to raise_exception
end
end
context 'when an aws service error occurs' do
let(:aws_service_error) { Aws::S3::Errors::ServiceError.new(nil, 'Service error') }
before do
allow_any_instance_of(CarrierWave::Uploader::Base).to receive(:store!).and_raise(aws_service_error)
allow(SimpleFormsApi::FormRemediation::UploadRetryJob).to receive(:perform_async)
end
it 'logs an error and retries the upload' do
expect { store! }.not_to raise_error
expect(Rails.logger).to(
have_received(:error).with("Upload failed for #{file.filename}. Enqueuing for retry.", aws_service_error)
)
expect(SimpleFormsApi::FormRemediation::UploadRetryJob).to(
have_received(:perform_async).with(file, directory, config)
)
end
end
end
describe '#get_s3_link' do
subject(:get_s3_link) { uploader_instance.get_s3_link(file_path, filename) }
let(:file_path) { 'file_path' }
let(:filename) { 'filename' }
let(:s3_obj) { instance_double(Aws::S3::Object) }
before do
allow(uploader_instance).to receive(:s3_obj).with(file_path).and_return(s3_obj)
allow(s3_obj).to receive(:presigned_url).with(
:get,
expires_in: 30.minutes.to_i,
response_content_disposition: "attachment; filename=\"#{filename}\""
).and_return('url')
end
it 'returns a presigned URL' do
expect(get_s3_link).to eq('url')
end
end
describe '#get_s3_file' do
subject(:get_s3_file) { uploader_instance.get_s3_file(from_path, to_path) }
let(:from_path) { 'from_path' }
let(:to_path) { 'to_path' }
let(:s3_obj) { instance_double(Aws::S3::Object) }
before do
allow(uploader_instance).to receive(:s3_obj).with(from_path).and_return(s3_obj)
allow(s3_obj).to receive(:get).with(response_target: to_path)
end
it 'downloads the file to the given path' do
expect(get_s3_file).to be_nil
end
end
end
|
0
|
code_files/vets-api-private/spec/uploaders
|
code_files/vets-api-private/spec/uploaders/form1010cg/poa_uploader_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require_relative '../../support/form1010cg_helpers/test_file_helpers'
describe Form1010cg::PoaUploader, :uploader_helpers do
let(:form_attachment_guid) { 'cdbaedd7-e268-49ed-b714-ec543fbb1fb8' }
let(:subject) { described_class.new(form_attachment_guid) }
let(:source_file_name) { 'doctors-note.jpg' }
let(:source_file_path) { "spec/fixtures/files/#{source_file_name}" }
let(:source_file) { Form1010cgHelpers::TestFileHelpers.create_test_uploaded_file(source_file_name, 'image/jpg') }
let(:vcr_options) do
{
record: :none,
allow_unused_http_interactions: false,
match_requests_on: %i[method host body]
}
end
describe 'configuration' do
it 'uses an AWS store' do
expect(described_class.storage).to eq(CarrierWave::Storage::AWS)
expect(subject._storage?).to be(true)
expect(subject._storage).to eq(CarrierWave::Storage::AWS)
end
it 'sets aws config' do
expect(subject.aws_acl).to eq('private')
expect(subject.aws_bucket).to eq('my-bucket')
expect(subject.aws_attributes).to eq(server_side_encryption: 'AES256')
expect(subject.aws_credentials).to eq(
access_key_id: 'my-aws-key-id',
secret_access_key: 'my-aws-access-key',
region: 'us-gov-west-1'
)
end
end
describe '#size_range' do
it 'sets the store_dir to the initialized argument' do
expect(subject.size_range).to eq((1.byte)...(10.megabytes))
end
end
describe '#store_dir' do
it 'sets the store_dir to the initialized argument' do
expect(subject.store_dir).to eq(form_attachment_guid)
end
end
describe '#store!' do
context 'with invalid extension' do
let(:source_file) { Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif') }
it 'raises an error' do
expect { subject.store!(source_file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
'You can’t upload "gif" files. The allowed file types are: jpg, jpeg, png, pdf'
)
end
end
end
context 'with invalid content-type' do
let(:source_file) do
Rack::Test::UploadedFile.new('spec/fixtures/files/invalid_content_type.jpg', 'application/json')
end
it 'raises an error' do
expect { subject.store!(source_file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
# rubocop:disable Layout/LineLength
'You can’t upload application/json files. The allowed file types are: image/jpg, image/jpeg, image/png, application/pdf'
# rubocop:enable Layout/LineLength
)
end
end
end
context 'with file size below the minimum' do
let(:source_file) { Rack::Test::UploadedFile.new('spec/fixtures/files/empty-file.jpg', 'image/jpg') }
it 'raises an error' do
expect { subject.store!(source_file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
'We couldn’t upload your file because it’s too small. File size needs to be greater than 1 Byte'
)
end
end
end
context 'with file size above the maximum' do
let(:source_file) { Rack::Test::UploadedFile.new('spec/fixtures/files/doctors-note.jpg', 'image/jpg') }
before do
expect(subject).to receive(:size_range).and_return((1.byte)...(3.bytes)) # rubocop:disable RSpec/SubjectStub
end
it 'raises an error' do
expect { subject.store!(source_file) }.to raise_error do |error|
expect(error).to be_instance_of(CarrierWave::IntegrityError)
expect(error.message).to eq(
'We couldn’t upload your file because it’s too large. File size needs to be less than 2 Bytes'
)
end
end
end
context 'with valid data' do
before do
expect(StatsD).to receive(:measure).with(
'api.upload.form1010cg_poa_uploader.size',
83_403,
{
tags: [
'content_type:jpg'
]
}
)
end
it 'stores file in aws' do
VCR.use_cassette("s3/object/put/#{form_attachment_guid}/doctors-note.jpg", vcr_options) do
expect(subject.filename).to be_nil
expect(subject.file).to be_nil
expect(subject.versions).to eq({})
subject.store!(source_file)
expect(subject.filename).to eq('doctors-note.jpg')
expect(subject.file.path).to eq("#{form_attachment_guid}/#{source_file.original_filename}")
# Should not versions objects so they can be permanently destroyed
expect(subject.versions).to eq({})
end
end
end
end
describe '#retrieve_from_store!' do
it 'retrieves the stored file in s3' do
VCR.use_cassette("s3/object/get/#{form_attachment_guid}/doctors-note.jpg", vcr_options) do
subject.retrieve_from_store!(source_file_name)
expect(subject.file.filename).to eq('doctors-note.jpg')
expect(subject.file.path).to eq("#{form_attachment_guid}/#{source_file_name}")
expect(subject.versions).to eq({})
expect(subject.file.read.force_encoding('BINARY')).to eq(
File.read(source_file_path).force_encoding('BINARY')
)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/middleware
|
code_files/vets-api-private/spec/middleware/faraday/middleware_default_options_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
describe Faraday::Middleware do
before do
@stubs = Faraday::Adapter::Test::Stubs.new
@stubs.get('/imminent-failure') do
[500, {}, '']
end
end
context 'include_request: false (default)' do
it 'raises an error that does not include the request in the response body' do
conn = Faraday.new do |c|
c.adapter :test, @stubs
c.response :raise_error
end
expect { conn.get('/imminent-failure') }
.to raise_error do |error|
expect(error.response[:request]).to be_nil
end
end
end
context 'include_request: true' do
it 'raises an error that includes the request in the response body' do
conn = Faraday.new do |c|
c.adapter :test, @stubs
c.response :raise_error, include_request: true
end
expect { conn.get('/imminent-failure') }
.to raise_error do |error|
expect(error.response[:request]).not_to be_nil
end
end
end
end
|
0
|
code_files/vets-api-private/spec/middleware
|
code_files/vets-api-private/spec/middleware/rack/attack_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Rack::Attack do
include Rack::Test::Methods
let(:headers) { { 'REMOTE_ADDR' => '1.2.3.4' } }
def app
Rails.application
end
before do
Rack::Attack.cache.store.flushdb
end
before(:all) do
Rack::Attack.cache.store = Rack::Attack::StoreProxy::RedisStoreProxy.new($redis)
end
describe '#throttled_response' do
it 'adds X-RateLimit-* headers to the response' do
post('/v0/limited', headers:)
expect(last_response).not_to have_http_status(:too_many_requests)
post('/v0/limited', headers:)
expect(last_response).to have_http_status(:too_many_requests)
expect(last_response.headers).to include(
'X-RateLimit-Limit',
'X-RateLimit-Remaining',
'X-RateLimit-Reset'
)
end
end
describe 'check_in/ip' do
let(:data) { { data: 'foo', status: 200 } }
context 'when more than 10 requests' do
context 'when GET endpoint' do
before do
allow_any_instance_of(CheckIn::V2::Session).to receive(:authorized?).and_return(true)
allow_any_instance_of(V2::Lorota::Service).to receive(:check_in_data).and_return(data)
allow_any_instance_of(V2::Chip::Service).to receive(:set_echeckin_started).and_return(data)
10.times do
get('/check_in/v2/patient_check_ins/d602d9eb-9a31-484f-9637-13ab0b507e0d', headers:)
expect(last_response).to have_http_status(:ok)
end
end
it 'throttles with status 429' do
get('/check_in/v2/patient_check_ins/d602d9eb-9a31-484f-9637-13ab0b507e0d', headers:)
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'when POST endpoint' do
let(:post_params) do
{ patient_check_ins: { uuid: 'd602d9eb-9a31-484f-9637-13ab0b507e0d', appointment_ien: '450' } }
end
before do
allow_any_instance_of(V2::Chip::Service).to receive(:create_check_in).and_return(data)
10.times do
post '/check_in/v2/patient_check_ins', post_params, headers
expect(last_response).to have_http_status(:ok)
end
end
it 'throttles with status 429' do
post '/check_in/v2/patient_check_ins', post_params, headers
expect(last_response).to have_http_status(:too_many_requests)
end
end
end
end
describe 'medical_copays/ip' do
before do
allow_any_instance_of(MedicalCopays::VBS::Service).to receive(:get_copays).and_return([])
end
context 'when more than 20 requests' do
before do
20.times do
get('/v0/medical_copays', headers:)
expect(last_response).to have_http_status(:unauthorized)
end
end
it 'throttles with status 429' do
get('/v0/medical_copays', headers:)
expect(last_response).to have_http_status(:too_many_requests)
end
end
end
describe 'facilities_api/v2/va/ip' do
let(:endpoint) { '/facilities_api/v2/va' }
let(:headers) { { 'X-Real-Ip' => '1.2.3.4' } }
let(:limit) { 30 }
before do
limit.times do
post endpoint, nil, headers
expect(last_response).not_to have_http_status(:too_many_requests)
end
post endpoint, nil, other_headers
end
context 'response status for repeated requests from the same IP' do
let(:other_headers) { headers }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'response status for request from different IP' do
let(:other_headers) { { 'X-Real-Ip' => '4.3.2.1' } }
it 'does not limit request' do
expect(last_response).not_to have_http_status(:too_many_requests)
end
end
end
describe 'facilities_api/v2/ccp/ip' do
let(:endpoint) { '/facilities_api/v2/ccp/provider' }
let(:headers) { { 'X-Real-Ip' => '1.2.3.4' } }
let(:limit) { 8 }
before do
limit.times do
get endpoint, nil, headers
expect(last_response).not_to have_http_status(:too_many_requests)
end
get endpoint, nil, other_headers
end
context 'response status for repeated requests from the same IP' do
let(:other_headers) { headers }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'response status for request from different IP' do
let(:other_headers) { { 'X-Real-Ip' => '4.3.2.1' } }
it 'limits requests' do
expect(last_response).not_to have_http_status(:too_many_requests)
end
end
end
describe 'education_benefits_claims/v0/ip' do
let(:endpoint) { '/v0/education_benefits_claims/1995' }
let(:headers) { { 'X-Real-Ip' => '1.2.3.4' } }
let(:limit) { 15 }
before do
limit.times do
post endpoint, nil, headers
expect(last_response).not_to have_http_status(:too_many_requests)
end
post endpoint, nil, other_headers
end
context 'response status for repeated requests from the same IP' do
let(:other_headers) { headers }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'response status for request from different IP' do
let(:other_headers) { { 'X-Real-Ip' => '4.3.2.1' } }
it 'limits requests' do
expect(last_response).not_to have_http_status(:too_many_requests)
end
end
end
describe 'vic rate-limits', run_at: 'Thu, 26 Dec 2015 15:54:20 GMT' do
before do
limit.times do
post(endpoint, headers:)
expect(last_response).not_to have_http_status(:too_many_requests)
end
post endpoint, headers:
end
context 'profile photo upload' do
let(:limit) { 8 }
let(:endpoint) { '/v0/vic/profile_photo_attachments' }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'supporting doc upload' do
let(:limit) { 8 }
let(:endpoint) { '/v0/vic/supporting_documentation_attachments' }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
context 'form submission' do
let(:limit) { 10 }
let(:endpoint) { '/v0/vic/vic_submissions' }
it 'limits requests' do
expect(last_response).to have_http_status(:too_many_requests)
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/va_notify_email_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe VANotifyEmailJob, type: :model do
let(:email) { 'user@example.com' }
let(:template_id) { 'template_id' }
before do
allow_any_instance_of(VaNotify::Configuration).to receive(:base_path).and_return('http://fakeapi.com')
allow(Settings.vanotify.services.va_gov).to receive(:api_key).and_return(
'test-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa-bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
)
end
describe '#perform' do
it 'sends an email using the template id' do
client = double
expect(VaNotify::Service).to receive(:new).with(Settings.vanotify.services.va_gov.api_key).and_return(client)
expect(client).to receive(:send_email).with(
email_address: email,
template_id:
)
described_class.new.perform(email, template_id)
end
context 'when vanotify returns a 400 error' do
it 'rescues and logs the error' do
VCR.use_cassette('va_notify/bad_request_invalid_template_id') do
job = described_class.new
expect(job).to receive(:log_exception_to_sentry).with(
instance_of(VANotify::BadRequest),
{
args: {
template_id:,
personalisation: nil
}
},
{
error: :va_notify_email_job
}
)
expect(job).to receive(:log_exception_to_rails).with(
instance_of(VANotify::BadRequest)
)
job.perform(email, template_id)
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_submission_failure_email_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526SubmissionFailureEmailJob, type: :job do
subject { described_class }
let(:callback_metadata) do
{ callback_metadata: { form_number: 'form526',
notification_type: 'error',
statsd_tags: {
function: '526_backup_submission_to_lighthouse',
service: 'disability-application'
} } }
end
let(:email_service) { double('VaNotify::Service') }
let(:timestamp) { Time.now.utc }
let(:failure_timestamp) { timestamp.strftime('%B %-d, %Y %-l:%M %P %Z').sub(/([ap])m/, '\1.m.') }
before do
Sidekiq::Job.clear_all
allow(VaNotify::Service)
.to receive(:new)
.with(Settings.vanotify.services.benefits_disability.api_key, callback_metadata)
.and_return(email_service)
end
describe '#perform' do
context 'when a user has additional forms and files with their submission' do
let!(:form526_submission) { create(:form526_submission, :with_uploads_and_ancillary_forms) }
let(:expected_params) do
{
email_address: 'test@email.com',
template_id: 'form526_submission_failure_notification_template_id',
personalisation: {
first_name: form526_submission.get_first_name,
date_submitted: form526_submission.format_creation_time_for_mailers,
date_of_failure: failure_timestamp,
files_submitted: ['extXas.pdf', 'extXas.pdf', 'extXas.pdf'],
forms_submitted: [
'VA Form 21-4142',
'VA Form 21-0781',
'VA Form 21-0781a',
'VA Form 21-8940'
]
}
}
end
context 'when a timestamp is not passed' do
it 'marks the current time as the date_of_failure' do
Timecop.freeze(timestamp) do
expect(email_service).to receive(:send_email).with(expected_params)
subject.perform_async(form526_submission.id)
subject.drain
end
end
end
it 'dispatches a failure notification email with the expected params' do
Timecop.freeze(timestamp) do
expect(email_service).to receive(:send_email).with(expected_params)
subject.perform_async(form526_submission.id, timestamp.to_s)
subject.drain
end
end
it 'creates a remediation record for the submission' do
Timecop.freeze(timestamp) do
allow(email_service).to receive(:send_email)
expect { subject.new.perform(form526_submission.id) }.to change(Form526SubmissionRemediation, :count)
remediation = Form526SubmissionRemediation.where(form526_submission_id: form526_submission.id)
expect(remediation.present?).to be true
end
end
end
context 'when a user has no additional forms with their submission' do
let!(:form526_submission) { create(:form526_submission, :with_uploads) }
let(:expected_params) do
{
email_address: 'test@email.com',
template_id: 'form526_submission_failure_notification_template_id',
personalisation: {
first_name: form526_submission.get_first_name,
date_submitted: form526_submission.format_creation_time_for_mailers,
date_of_failure: failure_timestamp,
files_submitted: ['extXas.pdf', 'extXas.pdf', 'extXas.pdf'],
forms_submitted: 'None'
}
}
end
before do
form526_submission.form['form526_uploads'].each do |upload|
create(:supporting_evidence_attachment, :with_file_data, guid: upload['confirmationCode'])
end
end
it 'replaces the forms list variable with a placeholder' do
Timecop.freeze(timestamp) do
expect(email_service).to receive(:send_email).with(expected_params)
subject.perform_async(form526_submission.id, timestamp.to_s)
subject.drain
end
end
end
context 'when a user has no additional user-uploaded files with their submission' do
context 'when using v1 of form 0781' do
let(:expected_params) do
{
email_address: 'test@email.com',
template_id: 'form526_submission_failure_notification_template_id',
personalisation: {
first_name: form526_submission.get_first_name,
date_submitted: form526_submission.format_creation_time_for_mailers,
date_of_failure: failure_timestamp,
files_submitted: 'None',
forms_submitted: [
'VA Form 21-4142',
'VA Form 21-0781',
'VA Form 21-0781a',
'VA Form 21-8940'
]
}
}
end
let!(:form526_submission) { create(:form526_submission, :with_everything) }
it 'replaces the files list variable with a placeholder' do
Timecop.freeze(timestamp) do
expect(email_service).to receive(:send_email).with(expected_params)
subject.perform_async(form526_submission.id, timestamp.to_s)
subject.drain
end
end
end
context 'when using v2 of form 0781' do
let(:expected_params) do
{
email_address: 'test@email.com',
template_id: 'form526_submission_failure_notification_template_id',
personalisation: {
first_name: form526_submission.get_first_name,
date_submitted: form526_submission.format_creation_time_for_mailers,
date_of_failure: failure_timestamp,
files_submitted: 'None',
forms_submitted: [
'VA Form 21-4142',
'VA Form 21-0781',
'VA Form 21-8940'
]
}
}
end
let!(:form526_submission) { create(:form526_submission, :with_0781v2) }
it 'replaces the files list variable with a placeholder' do
Timecop.freeze(timestamp) do
expect(email_service).to receive(:send_email).with(expected_params)
subject.perform_async(form526_submission.id, timestamp.to_s)
subject.drain
end
end
end
end
end
describe 'logging' do
let!(:form526_submission) { create(:form526_submission, :with_uploads_and_ancillary_forms) }
let(:timestamp) { Time.now.utc }
let(:tags) { described_class::DD_ZSF_TAGS }
context 'on success' do
before do
allow(email_service).to receive(:send_email)
end
it 'increments StatsD' do
expect(StatsD).to receive(:increment).with("#{described_class::STATSD_PREFIX}.success")
subject.new.perform(form526_submission.id)
subject.drain
end
it 'logs success' do
Timecop.freeze(timestamp) do
expect(Rails.logger).to receive(:info).with(
'Form526SubmissionFailureEmailJob notification dispatched',
{ form526_submission_id: form526_submission.id, timestamp: }
)
subject.new.perform(form526_submission.id)
end
end
end
context 'on failure' do
let(:error_message) { 'oh gosh oh jeeze oh no' }
let(:expected_log) do
[
'Form526SubmissionFailureEmailJob notification dispatched',
{
form526_submission_id: form526_submission.id,
error_message:,
timestamp:
}
]
end
before do
allow(email_service).to receive(:send_email).and_raise error_message
end
it 'increments StatsD' do
expect(StatsD).to receive(:increment).with("#{described_class::STATSD_PREFIX}.error")
expect { subject.new.perform(form526_submission.id) }.to raise_error(error_message)
end
it 'logs error' do
Timecop.freeze(timestamp) do
expect(Rails.logger).to receive(:error).with(
'Form526SubmissionFailureEmailJob notification failed',
{
form526_submission_id: form526_submission.id,
error_message:,
timestamp:
}
)
expect { subject.new.perform(form526_submission.id) }.to raise_error(error_message)
end
end
end
context 'on exhaustion' do
let!(:form526_job_status) { create(:form526_job_status, :retryable_error, form526_submission:, job_id: 1) }
let(:expected_log) do
{
job_id: form526_job_status.job_id,
form526_submission_id: form526_submission.id,
error_class: 'WhoopsieDasiy',
error_message: 'aww shucks',
timestamp:
}
end
let(:exhaustion_block_args) do
{
'jid' => form526_job_status.job_id,
'args' => [form526_submission.id],
'error_class' => 'WhoopsieDasiy',
'error_message' => 'aww shucks'
}
end
it 'logs' do
Timecop.freeze(timestamp) do
subject.within_sidekiq_retries_exhausted_block(exhaustion_block_args) do
expect(Rails.logger).to receive(:warn).with(
'Form526SubmissionFailureEmailJob retries exhausted',
expected_log
)
end
end
end
it 'increments StatsD' do
Timecop.freeze(timestamp) do
subject.within_sidekiq_retries_exhausted_block(exhaustion_block_args) do
expect(StatsD).to receive(:increment).with("#{described_class::STATSD_PREFIX}.exhausted")
expect(StatsD).to receive(:increment).with('silent_failure', tags: described_class::DD_ZSF_TAGS)
end
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/user_actions_cleanup_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe UserActionsCleanupJob, type: :model do
let!(:user_action_event) { create(:user_action_event) }
let!(:old_user_action) { create(:user_action, user_action_event:, created_at: 2.years.ago) }
let!(:recent_user_action) { create(:user_action, user_action_event:, created_at: 6.months.ago) }
it 'removes user actions older than 1 year' do
expect { subject.perform }.to change(UserAction, :count).by(-1)
expect(model_exists?(old_user_action)).to be_falsey
expect(model_exists?(recent_user_action)).to be_truthy
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/delete_in_progress_form_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe DeleteInProgressFormJob, type: :job do
subject { described_class.new }
let(:current_user) { build(:user, :loa3) }
let(:form_id) { '1010ez' }
let!(:in_progress_form) do
create(:in_progress_form, form_id:, user_uuid: current_user.uuid)
end
describe '#perform' do
context 'when current_user exists' do
context 'and has an in-progress form' do
it 'deletes the in-progress form' do
expect do
subject.perform(form_id, current_user.uuid)
end.to change(InProgressForm, :count).by(-1)
expect(InProgressForm.form_for_user(form_id, current_user)).to be_nil
end
end
context 'and no in-progress form' do
before { in_progress_form.destroy }
it 'does not raise an error' do
expect { subject.perform(form_id, current_user.uuid) }.not_to raise_error
end
end
end
context 'when user_uuid is nil' do
it 'does not attempt to delete and logs appropriately' do
expect { subject.perform(form_id, nil) }.not_to change(InProgressForm, :count)
end
end
end
describe 'sidekiq configuration' do
it 'has retry configured' do
expect(described_class.sidekiq_options_hash['retry']).to eq(5)
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/delete_old_transactions_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe DeleteOldTransactionsJob do
context 'if an exception happens' do
before do
allow_any_instance_of(AsyncTransaction::VAProfile::AddressTransaction)
.to receive(:destroy!)
.and_raise(ActiveRecord::RecordNotDestroyed, 'BOOM!')
end
it 'rescues and logs the details' do
create(:address_transaction,
created_at: (Time.current - AsyncTransaction::Base::DELETE_COMPLETED_AFTER - 1.day).iso8601,
status: AsyncTransaction::Base::COMPLETED)
job = DeleteOldTransactionsJob.new
expect(job).to receive(:log_message_to_sentry).once
job.perform
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_confirmation_email_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526ConfirmationEmailJob, type: :worker do
before do
Sidekiq::Job.clear_all
allow(Flipper).to receive(:enabled?).with(:va_notify_notification_creation).and_return(false)
end
describe '#perform' do
let(:notification_client) { instance_double(Notifications::Client) }
let(:va_notify_client) { instance_double(VaNotify::Client) }
context 'with default attributes' do
let(:email_address) { 'foo@example.com' }
let(:email_response) do
{
content: {
body: '<html><body><h1>Hello</h1> World.</body></html>',
from_email: 'from_email',
subject: 'Hello World'
},
id: '123456789',
reference: nil,
scheduled_for: nil,
template: {
id: Settings.vanotify.services.va_gov.template_id.form526_confirmation_email,
uri: 'template_url',
version: 1
},
uri: 'url'
}
end
let(:personalization_parameters) do
{
'email' => email_address,
'submitted_claim_id' => '600191990',
'date_submitted' => 'July 12, 2020',
'date_received' => 'July 15, 2020',
'first_name' => 'firstname'
}
end
before do
allow(Notifications::Client).to receive(:new).and_return(notification_client)
allow(VaNotify::Client).to receive(:new).and_return(va_notify_client)
end
it 'the service is initialized with the correct parameters' do
test_service_api_key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
with_settings(
Settings.vanotify.services.va_gov, { api_key: test_service_api_key }
) do
mocked_notification_service = instance_double(VaNotify::Service)
allow(VaNotify::Service).to receive(:new).and_return(mocked_notification_service)
allow(mocked_notification_service).to receive(:send_email).and_return(email_response)
subject.perform('')
expect(VaNotify::Service).to have_received(:new).with(test_service_api_key)
end
end
it 'sends a confirmation email' do
requirements = {
email_address:,
template_id: Settings.vanotify
.services
.va_gov
.template_id
.form526_confirmation_email,
personalisation: {
'claim_id' => '600191990',
'date_submitted' => 'July 12, 2020',
'date_received' => 'July 15, 2020',
'first_name' => 'firstname'
}
}
allow(notification_client).to receive(:send_email).and_return(email_response)
expect(notification_client).to receive(:send_email).with(requirements)
subject.perform(personalization_parameters)
end
it 'handles 4xx errors when sending an email' do
error = Common::Exceptions::BackendServiceException.new(
'VANOTIFY_400',
{ source: VaNotify::Service.to_s },
400,
'Error'
)
allow(notification_client).to receive(:send_email).and_raise(error)
expect(Rails.logger).to receive(:error).with('Form526ConfirmationEmailJob error', error:)
expect { subject.perform(personalization_parameters) }
.to trigger_statsd_increment('worker.form526_confirmation_email.error')
end
it 'handles 5xx errors when sending an email' do
error = Common::Exceptions::BackendServiceException.new(
'VANOTIFY_500',
{ source: VaNotify::Service.to_s },
500,
'Error'
)
allow(notification_client).to receive(:send_email).and_raise(error)
expect(Rails.logger).to receive(:error).with('Form526ConfirmationEmailJob error', error:)
expect { subject.perform(personalization_parameters) }
.to raise_error(Common::Exceptions::BackendServiceException)
.and trigger_statsd_increment('worker.form526_confirmation_email.error')
end
it 'returns one job triggered' do
allow(notification_client).to receive(:send_email).and_return(email_response)
expect do
Form526ConfirmationEmailJob.perform_async(personalization_parameters)
end.to change(Form526ConfirmationEmailJob.jobs, :size).by(1)
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/email_verification_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'sidekiq/testing'
require 'sidekiq/attr_package'
Sidekiq::Testing.fake!
RSpec.describe EmailVerificationJob, type: :job do
subject { described_class }
let(:template_type) { 'initial_verification' }
let(:cache_key) { 'test_cache_key_123' }
let(:personalisation_data) do
{
verification_link: 'https://va.gov/verify/123',
first_name: 'John',
email: 'veteran@example.com'
}
end
let(:notify_client) { instance_double(VaNotify::Service) }
before do
allow(StatsD).to receive(:increment)
allow(Rails.logger).to receive(:info)
allow(Rails.logger).to receive(:error)
allow(VaNotify::Service).to receive(:new).and_return(notify_client)
allow(notify_client).to receive(:send_email)
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(personalisation_data)
allow(Sidekiq::AttrPackage).to receive(:delete).with(cache_key)
end
describe '#perform' do
context 'when feature flag is enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:auth_exp_email_verification_enabled).and_return(true)
end
it 'retrieves PII data from cache and logs email verification and increments success metric' do
subject.new.perform(template_type, cache_key)
expect(Sidekiq::AttrPackage).to have_received(:find).with(cache_key)
expect(Rails.logger).to have_received(:info).with(
'Email verification sent (logging only - not actually sent)',
hash_including(template_type:)
)
expect(StatsD).to have_received(:increment).with('api.vanotify.email_verification.success')
expect(Sidekiq::AttrPackage).to have_received(:delete).with(cache_key)
end
it 'raises ArgumentError when cache data is missing' do
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(nil)
expect do
subject.new.perform(template_type, cache_key)
end.to raise_error(ArgumentError, 'Missing personalisation data in Redis')
expect(Rails.logger).to have_received(:error).with(
'EmailVerificationJob failed: Missing personalisation data in Redis',
hash_including(template_type:, cache_key_present: true)
)
end
%w[initial_verification annual_verification email_change_verification].each do |type|
it "handles #{type} template type with verification templates data structure" do
verification_data = {
verification_link: 'https://va.gov/verify/123',
first_name: 'John',
email: 'veteran@example.com'
}
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(verification_data)
expect { subject.new.perform(type, cache_key) }.not_to raise_error
end
end
it 'handles verification_success template type with success data structure' do
success_data = { first_name: 'John', email: 'veteran@example.com' }
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(success_data)
expect { subject.new.perform('verification_success', cache_key) }.not_to raise_error
end
end
context 'when feature flag is disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:auth_exp_email_verification_enabled).and_return(false)
end
it 'returns early without logging or metrics' do
subject.new.perform(template_type, cache_key)
expect(Rails.logger).not_to have_received(:info)
expect(StatsD).not_to have_received(:increment)
end
end
end
describe 'sidekiq configuration' do
it 'is configured with retry: 5' do
expect(subject.sidekiq_options['retry']).to eq(5)
end
it 'enqueues the job' do
expect do
subject.perform_async(template_type, cache_key)
end.to change(subject.jobs, :size).by(1)
end
end
describe 'error handling' do
before do
allow(Flipper).to receive(:enabled?).with(:auth_exp_email_verification_enabled).and_return(true)
end
it 'handles general errors with failure metrics and logging' do
allow_any_instance_of(described_class).to receive(:build_personalisation).and_raise(StandardError,
'Service error')
expect do
subject.new.perform(template_type, cache_key)
end.to raise_error(StandardError, 'Service error')
expect(StatsD).to have_received(:increment).with('api.vanotify.email_verification.failure')
expect(Rails.logger).to have_received(:error).with('EmailVerificationJob failed', {
error: 'Service error', template_type:
})
end
it 'does not increment failure metrics for ArgumentError' do
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(nil)
expect do
subject.new.perform(template_type, cache_key)
end.to raise_error(ArgumentError)
expect(StatsD).not_to have_received(:increment)
.with('api.vanotify.email_verification.failure')
expect(Rails.logger).to have_received(:error)
.with(
'EmailVerificationJob failed: Missing personalisation data in Redis', {
template_type:,
cache_key_present: true
}
)
end
it 'handles Sidekiq::AttrPackageError as ArgumentError (no retries)' do
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_raise(
Sidekiq::AttrPackageError.new('find', 'Redis connection failed')
)
expect do
subject.new.perform(template_type, cache_key)
end.to raise_error(ArgumentError, '[Sidekiq] [AttrPackage] find error: Redis connection failed')
expect(Rails.logger)
.to have_received(:error)
.with(
'EmailVerificationJob AttrPackage error', {
error: '[Sidekiq] [AttrPackage] find error: Redis connection failed',
template_type:
}
)
expect(StatsD).not_to have_received(:increment).with('api.vanotify.email_verification.failure')
end
end
describe 'retries exhausted' do
it 'logs exhaustion with proper context and cleans up cache' do
# Simulate sidekiq_retries_exhausted callback with new parameter structure
msg = {
'jid' => 'test_job_id',
'class' => 'EmailVerificationJob',
'error_class' => 'StandardError',
'error_message' => 'Connection failed',
'args' => [template_type, cache_key]
}
expect(Sidekiq::AttrPackage).to receive(:delete).with(cache_key)
described_class.sidekiq_retries_exhausted_block.call(msg, nil)
expect(Rails.logger).to have_received(:error).with(
'EmailVerificationJob retries exhausted',
hash_including(
job_id: 'test_job_id',
error_class: 'StandardError',
error_message: 'Connection failed',
template_type:
)
)
expect(StatsD).to have_received(:increment).with('api.vanotify.email_verification.retries_exhausted')
end
it 'handles cache cleanup failure gracefully in retries exhausted' do
msg = {
'jid' => 'test_job_id',
'class' => 'EmailVerificationJob',
'error_class' => 'StandardError',
'error_message' => 'Connection failed',
'args' => [template_type, cache_key]
}
allow(Sidekiq::AttrPackage).to receive(:delete).with(cache_key).and_raise(
Sidekiq::AttrPackageError.new('delete', 'Redis failed')
)
allow(Rails.logger).to receive(:warn)
expect { described_class.sidekiq_retries_exhausted_block.call(msg, nil) }.not_to raise_error
expect(Rails.logger).to have_received(:warn).with(
'Failed to clean up AttrPackage after retries exhausted',
hash_including(cache_key:, error: '[Sidekiq] [AttrPackage] delete error: Redis failed')
)
end
end
describe '#callback_options' do
let(:job_instance) { subject.new }
it 'returns properly structured callback options for each template type' do
%w[initial_verification annual_verification email_change_verification verification_success].each do |type|
options = job_instance.send(:callback_options, type)
expect(options).to eq({
callback_klass: 'EmailVerificationCallback',
callback_metadata: {
statsd_tags: {
service: 'vagov-profile-email-verification',
function: "#{type}_email"
}
}
})
end
end
it 'references a valid callback class' do
options = job_instance.send(:callback_options, 'initial_verification')
callback_klass = options[:callback_klass]
expect { callback_klass.constantize }.not_to raise_error
expect(callback_klass.constantize).to respond_to(:call)
end
end
describe '#validate_personalisation!' do
let(:job_instance) { subject.new }
context 'validates required fields per template type' do
it 'requires verification_link, first_name, email_address for verification templates' do
%w[initial_verification annual_verification email_change_verification].each do |type|
incomplete_personalisation = { 'first_name' => 'John' } # missing verification_link and email_address
expect do
job_instance.send(:validate_personalisation!, type, incomplete_personalisation)
end.to raise_error(ArgumentError, /Missing required personalisation fields/)
end
end
it 'requires only first_name for verification_success' do
valid_personalisation = { 'first_name' => 'John' }
expect do
job_instance.send(:validate_personalisation!, 'verification_success', valid_personalisation)
end.not_to raise_error
end
it 'raises ArgumentError when personalisation is nil' do
expect do
job_instance.send(:validate_personalisation!, template_type, nil)
end.to raise_error(ArgumentError, 'Personalisation cannot be nil')
end
end
end
describe '#build_personalisation' do
let(:job_instance) { subject.new }
it 'builds correct personalisation for verification templates' do
data = { verification_link: 'https://va.gov/verify/123', first_name: 'John', email: 'test@va.gov' }
%w[initial_verification annual_verification email_change_verification].each do |type|
result = job_instance.send(:build_personalisation, type, data)
expect(result).to eq({
'verification_link' => 'https://va.gov/verify/123',
'first_name' => 'John',
'email_address' => 'test@va.gov'
})
end
end
it 'builds correct personalisation for verification_success' do
data = { first_name: 'John', email: 'test@va.gov' }
result = job_instance.send(:build_personalisation, 'verification_success', data)
expect(result).to eq({ 'first_name' => 'John' })
end
it 'raises ArgumentError for unknown template type' do
data = { first_name: 'John' }
expect do
job_instance.send(:build_personalisation, 'unknown_type', data)
end.to raise_error(ArgumentError, 'Unknown template type')
end
end
describe '#get_template_id' do
let(:job_instance) { subject.new }
it 'returns correct template IDs for each type' do
expect(job_instance.send(:get_template_id, 'initial_verification')).to eq(
Settings.vanotify.services.va_gov.template_id.contact_email_address_confirmation_needed_email
)
expect(job_instance.send(:get_template_id, 'annual_verification')).to eq(
Settings.vanotify.services.va_gov.template_id.contact_email_address_confirmation_needed_email
)
expect(job_instance.send(:get_template_id, 'email_change_verification')).to eq(
Settings.vanotify.services.va_gov.template_id.contact_email_address_change_confirmation_needed_email
)
expect(job_instance.send(:get_template_id, 'verification_success')).to eq(
Settings.vanotify.services.va_gov.template_id.contact_email_address_confirmed_email
)
end
it 'raises ArgumentError for unknown template type' do
expect do
job_instance.send(:get_template_id, 'unknown')
end.to raise_error(ArgumentError, 'Unknown template type')
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/export_breaker_status_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe ExportBreakerStatus do
describe '#perform' do
let(:service) do
Breakers.client.services.first
end
let(:metric) { "api.external_service.#{service.name}.up" }
before do
# Reset breakers before each test
Breakers.client.redis_connection.redis.flushdb
end
after(:all) do
Breakers.client.redis_connection.redis.flushdb
end
context 'no failures on test service' do
it 'reports up to statsd' do
expect { subject.perform }.to trigger_statsd_gauge(metric, value: 1)
end
end
context 'around outage on test service' do
it 'reports down during an outage' do
now = Time.current
Timecop.freeze(now - 120)
service.add_error # create outage
expect { subject.perform }.to trigger_statsd_gauge(metric, value: 0)
Timecop.freeze(now)
service.latest_outage.end!
expect { subject.perform }.to trigger_statsd_gauge(metric, value: 1)
Timecop.return
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/gi_bill_feedback_submission_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe GIBillFeedbackSubmissionJob do
let(:gi_bill_feedback) { create(:gi_bill_feedback) }
describe '#perform' do
before do
expect_any_instance_of(GIBillFeedback).to receive(:transform_form).and_return({})
end
context 'with a valid submission' do
it 'updates the gi bill feedback model' do
expect_any_instance_of(Gibft::Service).to receive(:submit).with(
{}
).and_return(case_id: 'case_id')
described_class.new.perform(gi_bill_feedback.guid, {}, nil)
updated_feedback = GIBillFeedback.find(gi_bill_feedback.guid)
expect(updated_feedback.state).to eq('success')
expect(updated_feedback.parsed_response).to eq('case_id' => 'case_id')
end
end
context 'when the service has an error' do
it 'sets the submission to failed' do
expect_any_instance_of(Gibft::Service).to receive(:submit).and_raise('foo')
expect do
described_class.new.perform(gi_bill_feedback.guid, {}, nil)
end.to raise_error('foo')
updated_feedback = GIBillFeedback.find(gi_bill_feedback.guid)
expect(updated_feedback.state).to eq('failed')
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/external_services_status_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'support/pagerduty/services/spec_setup'
RSpec.describe ExternalServicesStatusJob do
include_context 'simulating Redis caching of PagerDuty#get_services'
describe '#perform' do
it 'calls ExternalServicesRedis::Status.new.fetch_or_cache' do
expect_any_instance_of(ExternalServicesRedis::Status).to receive(:fetch_or_cache)
described_class.new.perform
end
it 'calls PagerDuty::ExternalServices::Service.new.get_services' do
expect_any_instance_of(PagerDuty::ExternalServices::Service).to receive(:get_services)
described_class.new.perform
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/benefits_intake_status_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe BenefitsIntakeStatusJob, type: :job do
describe '#perform' do
describe 'job lifecycle metrics' do
it 'increments job.started and job.completed when job begins and finishes' do
allow_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.and_return(double(body: { 'data' => [] }, success?: true))
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.started")
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.completed")
BenefitsIntakeStatusJob.new.perform
end
it 'increments job.failed when batch_process returns false' do
allow_any_instance_of(described_class).to receive(:batch_process).and_return([0, false])
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.started")
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.failed")
BenefitsIntakeStatusJob.new.perform
end
it 'increments job.failed and re-raises exception on unexpected errors' do
allow(FormSubmissionAttempt).to receive(:where).and_raise(StandardError.new('Database error'))
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.started")
expect(StatsD).to receive(:increment)
.with("#{described_class::STATS_KEY}.job.failed")
expect(Rails.logger).to receive(:error)
.with('BenefitsIntakeStatusJob failed with exception',
hash_including(class: 'BenefitsIntakeStatusJob', message: 'Database error'))
expect { BenefitsIntakeStatusJob.new.perform }.to raise_error(StandardError, 'Database error')
end
end
describe 'submission to the bulk status report endpoint' do
context 'multiple attempts and multiple form submissions' do
before do
create_list(:form_submission, 2, :success)
create_list(:form_submission, 2, :failure)
end
let(:pending_form_submission_attempts_ids) do
create_list(:form_submission_attempt, 2,
:pending).map(&:benefits_intake_uuid)
end
it 'submits only pending form submissions' do
response = double(body: { 'data' => [] }, success?: true)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: pending_form_submission_attempts_ids).and_return(response)
BenefitsIntakeStatusJob.new.perform
end
end
context 'multiple attempts on one form submission' do
before do
create(:form_submission_attempt, :success, form_submission:)
end
let(:form_submission) { create(:form_submission) }
let(:pending_form_submission_attempts_ids) do
create_list(:form_submission_attempt, 2,
:pending, form_submission:).map(&:benefits_intake_uuid)
end
it 'submits only pending form submissions' do
response = double(body: { 'data' => [] }, success?: true)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: pending_form_submission_attempts_ids).and_return(response)
BenefitsIntakeStatusJob.new.perform
end
end
end
describe 'when batch size is less than or equal to max batch size' do
it 'successfully submits batch intake' do
pending_form_submission_attempts_ids = create_list(:form_submission_attempt, 2,
:pending).map(&:benefits_intake_uuid)
response = double(body: { 'data' => [] }, success?: true)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: pending_form_submission_attempts_ids).and_return(response)
BenefitsIntakeStatusJob.new.perform
end
end
describe 'when batch size is greater than max batch size' do
it 'successfully submits batch intake via batch' do
create_list(:form_submission, 4, :pending)
response = double(body: { 'data' => [] }, success?: true)
service = double(bulk_status: response)
allow(BenefitsIntake::Service).to receive(:new).and_return(service)
BenefitsIntakeStatusJob.new(batch_size: 2).perform
expect(service).to have_received(:bulk_status).twice
end
end
describe 'when bulk status update fails' do
let(:service) { instance_double(BenefitsIntake::Service) }
let(:form_submissions) { create_list(:form_submission, 4, :pending) }
let(:success_response) { double(body: success_body, success?: true) }
let(:failure_response) { double(body: failure_body, success?: false) }
let(:success_body) do
{ 'data' =>
[{
'id' => form_submissions.first.form_submission_attempts.first.benefits_intake_uuid,
'type' => 'document_upload',
'attributes' => {
'guid' => form_submissions.first.form_submission_attempts.first.benefits_intake_uuid,
'status' => 'pending',
'code' => 'DOC108',
'detail' => 'Maximum page size exceeded. Limit is 78 in x 101 in.',
'updated_at' => '2018-07-30T17:31:15.958Z',
'created_at' => '2018-07-30T17:31:15.958Z'
}
}] }
end
let(:failure_body) { 'error' }
before do
allow(Rails.logger).to receive(:info)
allow(Rails.logger).to receive(:error)
allow_any_instance_of(SimpleFormsApi::Notification::Email).to receive(:send)
allow(BenefitsIntake::Service).to receive(:new).and_return(service)
allow(service).to(
receive(:bulk_status).and_return(success_response, success_response, failure_response, success_response)
)
described_class.new(batch_size: 1).perform
end
it 'logs the error' do
expect(Rails.logger).to have_received(:error).with('Errors occurred while processing Intake Status batch',
class: 'BenefitsIntakeStatusJob', errors: [failure_body])
end
it 'does not short circuit the batch processing job' do
expect(service).to have_received(:bulk_status).exactly(4).times
end
end
describe 'updating the form submission status' do
before { allow_any_instance_of(SimpleFormsApi::Notification::Email).to receive(:send) }
it 'updates the status with vbms from the bulk status report endpoint' do
pending_form_submission_attempts = create_list(:form_submission_attempt, 1, :pending)
batch_uuids = pending_form_submission_attempts.map(&:benefits_intake_uuid)
data = batch_uuids.map { |id| { 'id' => id, 'attributes' => { 'status' => 'vbms' } } }
response = double(success?: true, body: { 'data' => data })
status_job = BenefitsIntakeStatusJob.new
pfsa = pending_form_submission_attempts.first
expect(status_job).to receive(:log_result).with('success', pfsa.form_submission.form_type,
pfsa.benefits_intake_uuid, anything)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: batch_uuids).and_return(response)
status_job.perform
pending_form_submission_attempts.each do |form_submission_attempt|
expect(form_submission_attempt.reload.aasm_state).to eq 'vbms'
end
end
it 'updates the status with error from the bulk status report endpoint' do
pending_form_submission_attempts = create_list(:form_submission_attempt, 1, :pending)
batch_uuids = pending_form_submission_attempts.map(&:benefits_intake_uuid)
error_code = 'error-code'
error_detail = 'error-detail'
data = batch_uuids.map do |id|
{ 'id' => id, 'attributes' => { 'code' => error_code, 'detail' => error_detail, 'status' => 'error' } }
end
response = double(success?: true, body: { 'data' => data })
status_job = BenefitsIntakeStatusJob.new
pfsa = pending_form_submission_attempts.first
expect(status_job).to receive(:log_result).with('failure', pfsa.form_submission.form_type,
pfsa.benefits_intake_uuid, anything,
"#{error_code}: #{error_detail}")
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: batch_uuids).and_return(response)
status_job.perform
pending_form_submission_attempts.each do |form_submission_attempt|
expect(form_submission_attempt.reload.aasm_state).to eq 'failure'
end
end
it 'updates the status with expired from the bulk status report endpoint' do
pending_form_submission_attempts = create_list(:form_submission_attempt, 1, :pending)
batch_uuids = pending_form_submission_attempts.map(&:benefits_intake_uuid)
data = batch_uuids.map { |id| { 'id' => id, 'attributes' => { 'status' => 'expired' } } }
response = double(success?: true, body: { 'data' => data })
status_job = BenefitsIntakeStatusJob.new
pfsa = pending_form_submission_attempts.first
expect(status_job).to receive(:log_result).with('failure', pfsa.form_submission.form_type,
pfsa.benefits_intake_uuid, anything,
'expired')
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: batch_uuids).and_return(response)
status_job.perform
pending_form_submission_attempts.each do |form_submission_attempt|
expect(form_submission_attempt.reload.aasm_state).to eq 'failure'
end
end
it 'logs a stale submission if over the number of SLA days' do
pending_form_submission_attempts = create_list(:form_submission_attempt, 1, :stale)
batch_uuids = pending_form_submission_attempts.map(&:benefits_intake_uuid)
data = batch_uuids.map { |id| { 'id' => id, 'attributes' => { 'status' => 'ANYTHING-ELSE' } } }
response = double(success?: true, body: { 'data' => data })
status_job = BenefitsIntakeStatusJob.new
pfsa = pending_form_submission_attempts.first
expect(status_job).to receive(:log_result).with('stale', pfsa.form_submission.form_type,
pfsa.benefits_intake_uuid, anything)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: batch_uuids).and_return(response)
status_job.perform
pending_form_submission_attempts.each do |form_submission_attempt|
expect(form_submission_attempt.reload.aasm_state).to eq 'pending'
end
end
it 'logs a pending submission' do
pending_form_submission_attempts = create_list(:form_submission_attempt, 1, :pending)
batch_uuids = pending_form_submission_attempts.map(&:benefits_intake_uuid)
data = batch_uuids.map { |id| { 'id' => id, 'attributes' => { 'status' => 'ANYTHING-ELSE' } } }
response = double(success?: true, body: { 'data' => data })
status_job = BenefitsIntakeStatusJob.new
pfsa = pending_form_submission_attempts.first
expect(status_job).to receive(:log_result).with('pending', pfsa.form_submission.form_type,
pfsa.benefits_intake_uuid)
expect_any_instance_of(BenefitsIntake::Service).to receive(:bulk_status)
.with(uuids: batch_uuids).and_return(response)
status_job.perform
pending_form_submission_attempts.each do |form_submission_attempt|
expect(form_submission_attempt.reload.aasm_state).to eq 'pending'
end
end
# end 'updating the form submission status'
end
# end #perform
end
describe '#log_result' do
it 'increments StatsD and logs result' do
expect(StatsD).to receive(:increment).with("#{described_class::STATS_KEY}.FORM_ID.RESULT")
expect(StatsD).to receive(:increment).with("#{described_class::STATS_KEY}.all_forms.RESULT")
expect(Rails.logger).to receive(:info).with('BenefitsIntakeStatusJob',
hash_including(result: 'RESULT', form_id: 'FORM_ID', uuid: 'UUID',
time_to_transition: nil))
BenefitsIntakeStatusJob.new.send(:log_result, 'RESULT', 'FORM_ID', 'UUID')
end
end
# end RSpec.describe
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_failure_state_snapshot_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526FailureStateSnapshotJob, type: :worker do
before do
Sidekiq::Job.clear_all
allow(Flipper).to receive(:enabled?).and_call_original
end
let!(:olden_times) { (Form526Submission::MAX_PENDING_TIME + 1.day).ago }
let!(:modern_times) { 2.days.ago }
let!(:end_date) { Time.zone.today.beginning_of_day }
let!(:start_date) { end_date - 1.week }
describe '526 state logging' do
let!(:new_unprocessed) do
Timecop.freeze(modern_times) do
create(:form526_submission)
end
end
let!(:old_unprocessed) do
Timecop.freeze(olden_times) do
create(:form526_submission)
end
end
let!(:new_primary_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :with_one_succesful_job)
end
end
let!(:old_primary_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :with_one_succesful_job)
end
end
let!(:new_backup_pending) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job)
end
end
let!(:old_backup_pending) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job)
end
end
let!(:new_backup_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :paranoid_success, :with_failed_primary_job)
end
end
let!(:old_backup_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :paranoid_success, :with_failed_primary_job)
end
end
let!(:new_backup_vbms) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :backup_accepted, :with_failed_primary_job)
end
end
let!(:old_backup_vbms) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :backup_accepted, :with_failed_primary_job)
end
end
let!(:new_backup_rejected) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :backup_rejected, :with_failed_primary_job)
end
end
let!(:old_backup_rejected) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :backup_rejected, :with_failed_primary_job)
end
end
let!(:new_double_job_failure) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job)
end
end
let!(:old_double_job_failure) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job)
end
end
let!(:new_double_job_failure_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :remediated)
end
end
let!(:old_double_job_failure_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :remediated)
end
end
let!(:new_double_job_failure_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :no_longer_remediated)
end
end
let!(:old_double_job_failure_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :no_longer_remediated)
end
end
let!(:new_no_job_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :remediated)
end
end
let!(:old_no_job_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :remediated)
end
end
let!(:new_backup_paranoid) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job, :paranoid_success)
end
end
let!(:old_backup_paranoid) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job, :paranoid_success)
end
end
let!(:still_running_with_retryable_errors) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_one_failed_job)
end
end
# RARE EDGECASES
let!(:new_no_job_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :no_longer_remediated)
end
end
let!(:old_no_job_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :no_longer_remediated)
end
end
let!(:new_double_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path)
end
end
let!(:old_double_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path)
end
end
let!(:new_triple_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :remediated)
end
end
let!(:old_triple_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :remediated)
end
end
let!(:new_double_success_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :no_longer_remediated)
end
end
let!(:old_double_success_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :no_longer_remediated)
end
end
let!(:new_remediated_and_de_remediated) do
sub = Timecop.freeze(modern_times) do
create(:form526_submission, :remediated)
end
Timecop.freeze(modern_times + 1.hour) do
create(:form526_submission_remediation,
form526_submission: sub,
lifecycle: ['i am no longer remediated'],
success: false)
end
sub
end
let!(:old_remediated_and_de_remediated) do
sub = Timecop.freeze(olden_times) do
create(:form526_submission, :remediated)
end
Timecop.freeze(olden_times + 1.hour) do
create(:form526_submission_remediation,
form526_submission: sub,
lifecycle: ['i am no longer remediated'],
success: false)
end
sub
end
it 'logs 526 state metrics correctly' do
expected_log = {
total_awaiting_backup_status: [
new_backup_pending.id
].sort,
total_incomplete_type: [
still_running_with_retryable_errors.id,
new_unprocessed.id,
new_backup_pending.id,
new_no_job_de_remediated.id,
new_remediated_and_de_remediated.id
].sort,
total_failure_type: [
old_unprocessed.id,
old_backup_pending.id,
new_backup_rejected.id,
old_backup_rejected.id,
old_double_job_failure.id,
old_double_job_failure_de_remediated.id,
old_no_job_de_remediated.id,
old_remediated_and_de_remediated.id,
new_double_job_failure.id,
new_double_job_failure_de_remediated.id
].sort
}
expect(described_class.new.snapshot_state).to eq(expected_log)
end
it 'writes counts as Stats D gauges' do
prefix = described_class::STATSD_PREFIX
expect(StatsD).to receive(:gauge).with("#{prefix}.total_awaiting_backup_status_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.total_incomplete_type_count", 5)
expect(StatsD).to receive(:gauge).with("#{prefix}.total_failure_type_count", 10)
described_class.new.perform
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/in_progress_form_cleaner_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe InProgressFormCleaner do
describe '#perform' do
let(:now) { Time.now.utc }
context 'when there is a set of records' do
before do
Timecop.freeze(now - 61.days)
@form_expired = create(:in_progress_form)
Timecop.freeze(now - 59.days)
@form_active = create(:in_progress_form)
Timecop.freeze(now)
@form_new = create(:in_progress_form)
Timecop.return
end
it 'deletes old records' do
expect { subject.perform }.to change(InProgressForm, :count).by(-1)
expect { @form_expired.reload }.to raise_exception(ActiveRecord::RecordNotFound)
end
end
context 'when there is a form526 record older than 60 days' do
before do
Timecop.freeze(now - 61.days)
@form526_active = create(:in_progress_526_form)
Timecop.return
end
it 'does not delete the record' do
expect { subject.perform }.not_to change(InProgressForm, :count)
expect { @form526_active.reload }.not_to raise_exception
end
end
context 'when there is a form526 record older than 365 days' do
before do
Timecop.freeze(now - 366.days)
@form526_expired = create(:in_progress_526_form)
Timecop.return
end
it 'deletes the record' do
expect { subject.perform }.to change(InProgressForm, :count).by(-1)
expect { @form526_expired.reload }.to raise_exception(ActiveRecord::RecordNotFound)
end
end
context 'when tracking form deletions' do
it 'increments stats for each form type' do
Timecop.freeze(now - 366.days)
create(:in_progress_form, form_id: 'form-1')
create(:in_progress_form, form_id: 'form-1')
create(:in_progress_form, form_id: 'form-2')
Timecop.return
# Expect StatsD to be called for each form type with correct count
expect(StatsD).to receive(:increment)
.with('worker.in_progress_form_cleaner.form_1_deleted', 2)
expect(StatsD).to receive(:increment)
.with('worker.in_progress_form_cleaner.form_2_deleted', 1)
subject.perform
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/transactional_email_analytics_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe TransactionalEmailAnalyticsJob, type: :job do
subject do
described_class.new
end
before do
allow(Settings.govdelivery).to receive(:token).and_return('asdf')
allow(Settings.google_analytics).to receive(:tracking_id).and_return('UA-XXXXXXXXX-1')
end
describe '#perform', run_at: '2018-05-30 18:18:56' do
context 'GovDelivery token is missing from settings' do
it 'raises an error' do
allow(FeatureFlipper).to receive(:send_email?).and_return(false)
expect { subject.perform }.to raise_error(Common::Exceptions::ParameterMissing)
end
end
context 'Google Analytics tracking ID is missing from settings' do
it 'raises an error' do
allow(Settings.google_analytics).to receive(:tracking_id).and_return(nil)
expect { subject.perform }.to raise_error(Common::Exceptions::ParameterMissing)
end
end
it 'retrieves messages at least once, and stop when loop-break conditions are met' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect(subject).to receive(:relevant_emails).twice.and_call_original
subject.perform
end
end
it 'processes transactional emails for Google Analytics evaluation' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect(subject).to receive(:eval_email).once
subject.perform
end
end
it 'sends events to Google Analytics' do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
expect_any_instance_of(Staccato::Tracker).to receive(:event).once
subject.perform
end
end
end
describe '#we_should_break?', run_at: '2018-05-30 18:27:56' do
before do
VCR.use_cassette('govdelivery_emails', allow_playback_repeats: true) do
subject.send(:relevant_emails, 1)
@emails = subject.instance_variable_get(:@all_emails)
end
end
context 'last email created_at > time-range start time and 50 emails in collection' do
it 'returns false' do
@emails.collection.last.attributes[:created_at] = 1440.minutes.ago.to_s
expect(subject.send(:we_should_break?)).to be false
end
end
context 'last email created_at < time-range start time' do
it 'returns true' do
@emails.collection.last.attributes[:created_at] = 25.hours.ago.to_s
expect(subject.send(:we_should_break?)).to be true
end
end
context 'less than 50 emails were returned by govdelivery' do
it 'returns true' do
@emails.collection.delete_at(0)
expect(subject.send(:we_should_break?)).to be true
end
end
end
describe '.mailers' do
it 'returns all the possible TransactionalEmailMailer descendants' do
# constantize all mailers so they are loaded
Dir['app/mailers/*.rb']
.collect { |mailer| %r{app/mailers/(.*)\.rb}.match(mailer)[1] }
.map { |mailer_name| mailer_name.camelize.constantize }
expect(TransactionalEmailAnalyticsJob.mailers).to match_array(TransactionalEmailMailer.descendants)
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/va_notify_dd_email_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe VANotifyDdEmailJob, type: :model do
let(:email) { 'user@example.com' }
describe '.send_to_emails' do
context 'when multiple emails are passed in' do
it 'sends an email for each address' do
emails = %w[
email1@mail.com
email2@mail.com
]
emails.each do |email|
expect(described_class).to receive(:perform_async).with(email)
end
described_class.send_to_emails(emails)
end
end
context 'when no emails are passed in' do
it 'logs info message' do
expect(Rails.logger).to receive(:info).with(
event: 'direct_deposit_confirmation_skipped',
reason: 'missing_email',
context: {
feature: 'direct_deposit',
job: described_class.name
},
message: 'No email address present for Direct Deposit confirmation email'
)
described_class.send_to_emails([])
end
end
end
describe '#perform' do
let(:notification_client) { instance_double(Notifications::Client) }
let(:va_notify_client) { instance_double(VaNotify::Client) }
context 'with default email template' do
it 'sends a confirmation email using the direct_deposit template' do
allow(VaNotify::Service).to receive(:new)
.with(Settings.vanotify.services.va_gov.api_key).and_return(notification_client)
expect(notification_client).to receive(:send_email).with(
email_address: email, template_id: 'direct_deposit_template_id'
)
described_class.new.perform(email)
end
end
it 'handles 4xx errors when sending an email' do
allow(Notifications::Client).to receive(:new).and_return(notification_client)
allow(VaNotify::Client).to receive(:new).and_return(va_notify_client)
error = Common::Exceptions::BackendServiceException.new(
'VANOTIFY_400',
{ source: VaNotify::Service.to_s },
400,
'Error'
)
allow(notification_client).to receive(:send_email).and_raise(error)
expect(Rails.logger).to receive(:error)
expect { subject.perform(email) }
.to trigger_statsd_increment('worker.direct_deposit_confirmation_email.error')
end
it 'handles 5xx errors when sending an email' do
allow(Notifications::Client).to receive(:new).and_return(notification_client)
allow(VaNotify::Client).to receive(:new).and_return(va_notify_client)
error = Common::Exceptions::BackendServiceException.new(
'VANOTIFY_500',
{ source: VaNotify::Service.to_s },
500,
'Error'
)
allow(notification_client).to receive(:send_email).and_raise(error)
expect(Rails.logger).to receive(:error)
expect { subject.perform(email) }
.to raise_error(Common::Exceptions::BackendServiceException)
.and trigger_statsd_increment('worker.direct_deposit_confirmation_email.error')
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_paranoid_success_polling_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526ParanoidSuccessPollingJob, type: :job do
describe '#perform' do
let!(:new_submission) { create(:form526_submission) }
let!(:backup_submission) { create(:form526_submission, :backup_path) }
let!(:paranoid_submission1) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission2) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission3) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission4) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission5) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission6) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:success_by_age) do
Timecop.freeze((1.year + 1.day).ago) do
create(:form526_submission, :backup_path, :paranoid_success)
end
end
let!(:accepted_backup_submission) do
create(:form526_submission, :backup_path, :backup_accepted)
end
let!(:rejected_backup_submission) do
create(:form526_submission, :backup_path, :backup_rejected)
end
context 'polling on paranoid success type submissions' do
let(:api_response) do
{
'data' => [
{
'id' => paranoid_submission1.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission1.backup_submitted_claim_id,
'status' => 'success'
}
},
{
'id' => paranoid_submission2.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission2.backup_submitted_claim_id,
'status' => 'processing'
}
},
{
'id' => paranoid_submission3.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission3.backup_submitted_claim_id,
'status' => 'error'
}
},
{
'id' => paranoid_submission4.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission4.backup_submitted_claim_id,
'status' => 'expired'
}
},
{
'id' => paranoid_submission5.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission5.backup_submitted_claim_id,
'status' => 'something_crazy'
}
},
{
'id' => paranoid_submission6.backup_submitted_claim_id,
'attributes' => {
'guid' => paranoid_submission6.backup_submitted_claim_id,
'status' => 'vbms'
}
}
]
}
end
describe 'submission to the bulk status report endpoint' do
it 'submits only paranoid_success form submissions' do
paranoid_claim_ids = Form526Submission.paranoid_success_type.pluck(:backup_submitted_claim_id)
response = double
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(paranoid_claim_ids)
.and_return(response)
expect(paranoid_claim_ids).to contain_exactly(
paranoid_submission1.backup_submitted_claim_id,
paranoid_submission2.backup_submitted_claim_id,
paranoid_submission3.backup_submitted_claim_id,
paranoid_submission4.backup_submitted_claim_id,
paranoid_submission5.backup_submitted_claim_id,
paranoid_submission6.backup_submitted_claim_id
)
expect_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(paranoid_claim_ids)
.and_return(response)
Form526ParanoidSuccessPollingJob.new.perform
end
end
describe 'updating changed states' do
it 'updates paranoid submissions to their correct state' do
paranoid_claim_ids = Form526Submission.paranoid_success_type.pluck(:backup_submitted_claim_id)
response = double
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(paranoid_claim_ids)
.and_return(response)
expect(paranoid_claim_ids).to contain_exactly(
paranoid_submission1.backup_submitted_claim_id,
paranoid_submission2.backup_submitted_claim_id,
paranoid_submission3.backup_submitted_claim_id,
paranoid_submission4.backup_submitted_claim_id,
paranoid_submission5.backup_submitted_claim_id,
paranoid_submission6.backup_submitted_claim_id
)
Form526ParanoidSuccessPollingJob.new.perform
paranoid_submission1.reload
paranoid_submission2.reload
paranoid_submission3.reload
paranoid_submission4.reload
paranoid_submission5.reload
paranoid_submission6.reload
expect(paranoid_submission1.backup_submitted_claim_status).to eq 'paranoid_success'
expect(paranoid_submission2.backup_submitted_claim_status).to be_nil
expect(paranoid_submission3.backup_submitted_claim_status).to eq 'rejected'
expect(paranoid_submission4.backup_submitted_claim_status).to eq 'rejected'
expect(paranoid_submission5.backup_submitted_claim_status).to eq 'rejected'
expect(paranoid_submission6.backup_submitted_claim_status).to eq 'accepted'
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_status_polling_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526StatusPollingJob, type: :job do
describe '#perform' do
let!(:new_submission) { create(:form526_submission) }
let!(:backup_submission_a) { create(:form526_submission, :backup_path) }
let!(:backup_submission_b) { create(:form526_submission, :backup_path) }
let!(:backup_submission_c) { create(:form526_submission, :backup_path) }
let!(:backup_submission_d) { create(:form526_submission, :backup_path) }
let!(:paranoid_submission_a) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission_b) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:paranoid_submission_c) { create(:form526_submission, :backup_path, :paranoid_success) }
let!(:old_paranoid_submission) do
Timecop.freeze((1.year + 1.day).ago) do
create(:form526_submission, :backup_path, :paranoid_success)
end
end
let!(:accepted_backup_submission) do
create(:form526_submission, :backup_path, :backup_accepted)
end
let!(:rejected_backup_submission) do
create(:form526_submission, :backup_path, :backup_rejected)
end
context 'polling on pending submissions' do
let(:api_response) do
{
'data' => [
{
'id' => backup_submission_a.backup_submitted_claim_id,
'attributes' => {
'guid' => backup_submission_a.backup_submitted_claim_id,
'status' => 'vbms'
}
},
{
'id' => backup_submission_b.backup_submitted_claim_id,
'attributes' => {
'guid' => backup_submission_b.backup_submitted_claim_id,
'status' => 'success'
}
},
{
'id' => backup_submission_c.backup_submitted_claim_id,
'attributes' => {
'guid' => backup_submission_c.backup_submitted_claim_id,
'status' => 'error'
}
},
{
'id' => backup_submission_d.backup_submitted_claim_id,
'attributes' => {
'guid' => backup_submission_d.backup_submitted_claim_id,
'status' => 'expired'
}
}
]
}
end
describe 'submission to the bulk status report endpoint' do
it 'submits only pending form submissions' do
pending_claim_ids = Form526Submission.pending_backup.pluck(:backup_submitted_claim_id)
response = double
allow(response).to receive(:body).and_return({ 'data' => [] })
expect(pending_claim_ids).to contain_exactly(
backup_submission_a.backup_submitted_claim_id,
backup_submission_b.backup_submitted_claim_id,
backup_submission_c.backup_submitted_claim_id,
backup_submission_d.backup_submitted_claim_id
)
expect_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(pending_claim_ids)
.and_return(response)
Form526StatusPollingJob.new.perform
end
end
describe 'when batch size is greater than max batch size' do
it 'successfully submits batch intake via batch' do
response = double
service = double(get_bulk_status_of_uploads: response)
allow(response).to receive(:body).and_return({ 'data' => [] })
allow(BenefitsIntakeService::Service).to receive(:new).and_return(service)
Form526StatusPollingJob.new(max_batch_size: 3).perform
expect(service).to have_received(:get_bulk_status_of_uploads).twice
end
end
describe 'when bulk status update fails' do
it 'logs the error' do
service = double
message = 'error'
allow(BenefitsIntakeService::Service).to receive(:new).and_return(service)
allow(service).to receive(:get_bulk_status_of_uploads).and_raise(message)
allow(Rails.logger).to receive(:info)
allow(Rails.logger).to receive(:error)
Form526StatusPollingJob.new.perform
expect(Rails.logger)
.to have_received(:error)
.with(
'Error processing 526 Intake Status batch',
class: 'Form526StatusPollingJob',
message:
)
expect(Rails.logger)
.not_to have_received(:info).with('Form 526 Intake Status polling complete')
end
end
describe 'updating the form 526s local submission state' do
let(:pending_claim_ids) do
Form526Submission.pending_backup
.pluck(:backup_submitted_claim_id)
end
let(:response) { double }
before do
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(pending_claim_ids)
.and_return(response)
allow(Flipper).to receive(:enabled?).and_call_original
end
# if a backup submission hits paranoid_success!, it should send the received email to the Veteran
context 'when disability_526_send_received_email_from_backup_path is enabled' do
before do
allow(Flipper).to receive(:enabled?)
.with(:disability_526_send_received_email_from_backup_path)
.and_return(true)
end
it 'behaves sends the received email' do
expect(Form526ConfirmationEmailJob).to receive(:perform_async).once
Form526StatusPollingJob.new.perform
end
end
context 'when disability_526_send_received_email_from_backup_path is disabled' do
before do
allow(Flipper).to receive(:enabled?)
.with(:disability_526_send_received_email_from_backup_path)
.and_return(false)
end
it 'behaves does not send the received email' do
expect(Form526ConfirmationEmailJob).not_to receive(:perform_async)
Form526StatusPollingJob.new.perform
end
end
it 'updates local state to reflect the returned statuses' do
Form526StatusPollingJob.new.perform
expect(backup_submission_a.reload.backup_submitted_claim_status).to eq 'accepted'
expect(backup_submission_b.reload.backup_submitted_claim_status).to eq 'paranoid_success'
expect(backup_submission_c.reload.backup_submitted_claim_status).to eq 'rejected'
expect(backup_submission_d.reload.backup_submitted_claim_status).to eq 'rejected'
end
end
context 'when a failure type response is returned from the API' do
context 'when form526_send_backup_submission_polling_failure_email_notice is enabled' do
let(:timestamp) { Time.now.utc }
before do
Flipper.enable(:form526_send_backup_submission_polling_failure_email_notice)
end
it 'enqueues a failure notification email job' do
Timecop.freeze(timestamp) do
pending_claim_ids = Form526Submission.pending_backup.pluck(:backup_submitted_claim_id)
response = double
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(pending_claim_ids)
.and_return(response)
expect(Form526SubmissionFailureEmailJob)
.not_to receive(:perform_async).with(backup_submission_a.id, timestamp.to_s)
expect(Form526SubmissionFailureEmailJob)
.not_to receive(:perform_async).with(backup_submission_b.id, timestamp.to_s)
expect(Form526SubmissionFailureEmailJob)
.to receive(:perform_async).once.ordered.with(backup_submission_c.id, timestamp.to_s)
expect(Form526SubmissionFailureEmailJob)
.to receive(:perform_async).once.ordered.with(backup_submission_d.id, timestamp.to_s)
Form526StatusPollingJob.new.perform
end
end
end
context 'when form526_send_backup_submission_polling_failure_email_notice is disabled' do
let!(:pending_claim_ids) { Form526Submission.pending_backup.pluck(:backup_submitted_claim_id) }
before do
Flipper.disable(:form526_send_backup_submission_polling_failure_email_notice)
end
it 'does not enqueue a failure notification email job' do
response = double
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(pending_claim_ids)
.and_return(response)
expect(Form526SubmissionFailureEmailJob).not_to receive(:perform_async)
Form526StatusPollingJob.new.perform
end
it 'logs submission failure' do
response = double
allow(response).to receive(:body).and_return(api_response)
allow_any_instance_of(BenefitsIntakeService::Service)
.to receive(:get_bulk_status_of_uploads)
.with(pending_claim_ids)
.and_return(response)
expect(Rails.logger).to receive(:warn).with(
'Form526StatusPollingJob submission failure',
{
result: 'failure',
submission_id: backup_submission_c.id
}
).once
expect(Rails.logger).to receive(:warn).with(
'Form526StatusPollingJob submission failure',
{
result: 'failure',
submission_id: backup_submission_d.id
}
).once
Form526StatusPollingJob.new.perform
end
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form_pdf_change_detection_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe FormPdfChangeDetectionJob, type: :job do
let(:meta_data) { "[#{described_class.name}]" }
let(:forms_response) do
{
'data' => [
{
'id' => '10-10EZ',
'attributes' => {
'form_name' => '10-10EZ',
'sha256' => 'abc123def456',
'last_revision_on' => '2024-01-15',
'url' => 'some-url.com'
}
},
{
'id' => '21-526EZ',
'attributes' => {
'form_name' => '21-526EZ',
'sha256' => 'xyz789uvw012',
'last_revision_on' => '2024-01-10',
'url' => 'some-other-url.com'
}
}
]
}
end
let(:client) { instance_double(Forms::Client) }
let(:response) { double(body: forms_response) }
before do
allow(Forms::Client).to receive(:new).with(nil).and_return(client)
allow(client).to receive(:get_all).and_return(response)
@mock_cache = {}
allow(Rails.cache).to receive(:read_multi) do |*keys|
result = {}
keys.each do |key|
result[key] = @mock_cache[key] if @mock_cache.key?(key)
end
result
end
allow(Rails.cache).to receive(:write_multi) do |data, **_options|
@mock_cache.merge!(data)
true
end
end
describe '#perform' do
context ':form_pdf_change_detection disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:form_pdf_change_detection).and_return(false)
end
it 'does not run' do
expect(Forms::Client).not_to receive(:new)
expect(Rails.cache).not_to receive(:read_multi)
expect(Rails.cache).not_to receive(:write_multi)
expect(StatsD).not_to receive(:increment)
expect(Rails.logger).not_to receive(:info).with(
"#{meta_data} - Job started."
)
subject.perform
end
end
context ':form_pdf_change_detection enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:form_pdf_change_detection).and_return(true)
end
it 'sets cache values without triggering change detection metrics on initial run' do
expect(@mock_cache).to be_empty
expect(StatsD).not_to receive(:increment)
.with('form.pdf.change.detected', anything)
expect(Rails.logger).not_to receive(:info)
.with(a_string_including('was revised'))
subject.perform
expect(@mock_cache['form_pdf_revision_sha256:10-10EZ']).to eq('abc123def456')
expect(@mock_cache['form_pdf_revision_sha256:21-526EZ']).to eq('xyz789uvw012')
end
it 'completes successfully with valid form data' do
expect(Rails.logger).to receive(:info).with(
"#{meta_data} - Job started."
)
expect(Rails.logger).to receive(:info).with(
"#{meta_data} - Job finished successfully."
)
expect { subject.perform }.not_to raise_error
end
it 'uses batch operations to read and write cache' do
expected_keys = [
'form_pdf_revision_sha256:10-10EZ',
'form_pdf_revision_sha256:21-526EZ'
]
expect(Rails.cache).to receive(:read_multi).with(*expected_keys)
expected_data = {
'form_pdf_revision_sha256:10-10EZ' => 'abc123def456',
'form_pdf_revision_sha256:21-526EZ' => 'xyz789uvw012'
}
expect(Rails.cache).to receive(:write_multi).with(expected_data, expires_in: 7.days.to_i)
subject.perform
end
it 'caches SHA256 values for forms using batch write' do
subject.perform
expect(@mock_cache['form_pdf_revision_sha256:10-10EZ']).to eq('abc123def456')
expect(@mock_cache['form_pdf_revision_sha256:21-526EZ']).to eq('xyz789uvw012')
end
context 'when form has changed' do
before do
@mock_cache['form_pdf_revision_sha256:10-10EZ'] = 'old_sha_value'
end
it 'detects changes using batch read and records metrics' do
expect(StatsD).to receive(:increment)
.with('form.pdf.change.detected', tags: ['form:10-10EZ', 'form_id:10-10EZ'])
subject.perform
end
it 'logs revision information' do
form = forms_response['data'][0]
attributes = form['attributes']
expect(Rails.logger).to receive(:info).with(
"#{meta_data} - Job started."
)
expect(Rails.logger).to receive(:info).with(
"#{meta_data} - Job finished successfully."
)
expect(Rails.logger).to receive(:info).with(
a_string_including(
"#{meta_data} - PDF form #{attributes['form_name']} (form_id: #{form['id']}) was revised. " \
"Last revised on date: #{attributes['last_revision_on']}. " \
"URL: #{attributes['url']}"
)
)
subject.perform
end
it 'updates cache with new SHA256 value using batch write' do
subject.perform
expect(@mock_cache['form_pdf_revision_sha256:10-10EZ']).to eq('abc123def456')
end
end
context 'when form has not changed' do
before do
@mock_cache['form_pdf_revision_sha256:10-10EZ'] = 'abc123def456'
end
it 'does not trigger change detection metrics' do
expect(StatsD).not_to receive(:increment)
.with('form.pdf.change.detected', anything)
subject.perform
end
it 'still updates cache using batch write to refresh TTL' do
expect(Rails.cache).to receive(:write_multi)
subject.perform
expect(@mock_cache['form_pdf_revision_sha256:10-10EZ']).to eq('abc123def456')
end
end
context 'when API call fails' do
before do
allow(client).to receive(:get_all).and_raise(StandardError, 'API Error')
end
it 'logs the error and re-raises' do
expect(Rails.logger).to receive(:info).with(
"#{meta_data} - Job started."
)
expect(Rails.logger).not_to receive(:info).with(
"#{meta_data} - Job finished successfully."
)
expect(Rails.logger).to receive(:error)
.with("#{meta_data} - Job raised an error: API Error")
expect { subject.perform }.to raise_error(StandardError, 'API Error')
end
it 'does not perform any cache operations' do
expect(Rails.cache).not_to receive(:read_multi)
expect(Rails.cache).not_to receive(:write_multi)
expect { subject.perform }.to raise_error(StandardError, 'API Error')
end
end
context 'when form processing raises an exception despite valid id' do
let(:forms_response) do
{
'data' => [
{
'id' => '10-10EZ',
'attributes' => {
'form_name' => '10-10EZ',
'sha256' => 'abc123def456'
}
}
]
}
end
before do
allow_any_instance_of(Hash).to receive(:dig).and_call_original
allow_any_instance_of(Hash).to receive(:dig).with('attributes', 'sha256') do |form|
if form['id'] == '10-10EZ'
raise StandardError, 'Simulated processing error'
else
form.dig('attributes', 'sha256')
end
end
end
it 'catches the exception and logs the error' do
expect(Rails.logger).to receive(:error)
.with("#{meta_data} - Error processing form 10-10EZ: Simulated processing error")
expect { subject.perform }.not_to raise_error
end
end
context 'with empty forms array' do
let(:forms_response) { { 'data' => [] } }
it 'completes without performing cache operations' do
expect(Rails.cache).to receive(:read_multi).with(no_args).and_return({})
expect(Rails.cache).not_to receive(:write_multi)
expect { subject.perform }.not_to raise_error
end
end
context 'with missing forms data' do
let(:forms_response) { {} }
it 'handles missing data structure gracefully' do
expect { subject.perform }.to raise_error(NoMethodError)
end
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/delete_old_pii_logs_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe DeleteOldPiiLogsJob, type: :model do
let!(:old_log) { create(:personal_information_log, created_at: 3.weeks.ago) }
let!(:new_log) { create(:personal_information_log, created_at: 1.week.ago) }
describe '#perform' do
it 'deletes old records' do
expect { subject.perform }.to change(PersonalInformationLog, :count).from(2).to(1)
expect(model_exists?(new_log)).to be(true)
end
it 'deletes old records in batches' do
expect { subject.perform }.to change { PersonalInformationLog.where('created_at < ?', 2.weeks.ago).count }.to(0)
expect(model_exists?(new_log)).to be(true)
end
it 'does not delete new records' do
subject.perform
expect(PersonalInformationLog.exists?(new_log.id)).to be(true)
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/load_average_days_for_claim_completion_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'webmock/rspec'
RSpec.describe LoadAverageDaysForClaimCompletionJob, type: :job do
describe '#perform' do
describe 'should load data into the table' do
it 'queries the appropriate web page' do
stub_request(:get, 'https://www.va.gov/disability/after-you-file-claim/')
.to_return(status: 200, body: '>100.0 days</')
LoadAverageDaysForClaimCompletionJob.new.perform
assert_requested :get, 'https://www.va.gov/disability/after-you-file-claim/', times: 1
end
it 'inserts the record into the database' do
stub_request(:get, 'https://www.va.gov/disability/after-you-file-claim/')
.to_return(status: 200, body: '>101.0 days</')
LoadAverageDaysForClaimCompletionJob.new.perform
rtn = AverageDaysForClaimCompletion.order('created_at DESC').first
expect(rtn.present?).to be(true)
expect(rtn.average_days).to eq(101.0)
end
it 'does not perform an insert if the record fails to parse' do
stub_request(:get, 'https://www.va.gov/disability/after-you-file-claim/')
.to_return(status: 200, body: 'no match days')
LoadAverageDaysForClaimCompletionJob.new.perform
rtn = AverageDaysForClaimCompletion.order('created_at DESC').first
expect(rtn.present?).to be(false)
end
it 'does not perform an insert if the web failure' do
stub_request(:get, 'https://www.va.gov/disability/after-you-file-claim/')
.to_return(status: 404, body: 'error back')
LoadAverageDaysForClaimCompletionJob.new.perform
rtn = AverageDaysForClaimCompletion.order('created_at DESC').first
expect(rtn.present?).to be(false)
end
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/process_file_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe ProcessFileJob do
class TestUploader < CarrierWave::Uploader::Base
def store_dir
'store'
end
def filename
'filename'
end
end
class TestUploader2 < CarrierWave::Uploader::Base
def store_dir
'store'
end
def filename
'filename2'
end
end
let(:test_uploader) { TestUploader.new }
def store_image
test_uploader.store!(
Rack::Test::UploadedFile.new('spec/fixtures/files/va.gif', 'image/gif')
)
end
describe '#perform' do
it 'saves the new processed file and delete the old file' do
store_image
test_class_string = double
expect(test_class_string).to receive(:constantize).and_return(TestUploader2)
ProcessFileJob.new.perform(test_class_string, test_uploader.store_dir, test_uploader.filename)
test_uploader.retrieve_from_store!('filename2')
expect(test_uploader.file.exists?).to be(true)
# test old file deleted
test_uploader.retrieve_from_store!('filename')
expect(test_uploader.file.exists?).to be(false)
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form0781_state_snapshot_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form0781StateSnapshotJob, type: :worker do
before do
Sidekiq::Job.clear_all
allow(Flipper).to receive(:enabled?).and_call_original
# Make sure tests use a fixed date for ROLLOUT_DATE
stub_const('Form0781StateSnapshotJob::ROLLOUT_DATE', Date.new(2025, 4, 2))
end
# Ensure modern_times is after ROLLOUT_DATE
let!(:modern_times) { Date.new(2025, 4, 15).to_time }
# Ensure olden_times is before ROLLOUT_DATE
let!(:olden_times) { Date.new(2025, 3, 1).to_time }
describe '0781 state logging' do
# Create test data for new 0781 forms
let!(:in_progress_new_form) do
Timecop.freeze(modern_times) do
ipf = create(:in_progress_form, form_id: '21-526EZ')
form_data = JSON.parse(ipf.form_data)
form_data['view:mental_health_workflow_choice'] = 'optForOnlineForm0781'
ipf.update(form_data: form_data.to_json)
ipf
end
end
let!(:new_0781_submission) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781v2": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
sub
end
end
let!(:new_0781_successful_submission) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission, :with_one_succesful_job)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781v2": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
allow(sub.form526_job_statuses.first).to receive_messages(job_class: 'SubmitForm0781')
sub
end
end
let!(:new_0781_failed_submission) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission, :with_one_failed_job)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781v2": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
allow(sub.form526_job_statuses.first).to receive_messages(
job_class: 'SubmitForm0781',
status: 'non_retryable_error'
)
sub
end
end
let!(:new_0781_primary_path) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission, :with_submitted_claim_id)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781v2": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
sub
end
end
let!(:new_0781_secondary_path) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781v2": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
sub
end
end
# Create test data for old 0781 forms - these are BEFORE the rollout date
let!(:in_progress_old_form) do
Timecop.freeze(olden_times) do
ipf = create(:in_progress_form, form_id: '21-526EZ')
form_data = JSON.parse(ipf.form_data)
form_data['view:selectable_ptsd_types'] = { 'PTSD_COMBAT' => true }
ipf.update(form_data: form_data.to_json)
ipf
end
end
let!(:old_0781_submission) do
Timecop.freeze(olden_times) do
sub = create(:form526_submission)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781": {}}')
allow(sub).to receive(:created_at).and_return(olden_times)
sub
end
end
let!(:old_0781_successful_submission) do
Timecop.freeze(olden_times) do
sub = create(:form526_submission, :with_one_succesful_job)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781": {}}')
allow(sub).to receive(:created_at).and_return(olden_times)
allow(sub.form526_job_statuses.first).to receive_messages(job_class: 'SubmitForm0781')
sub
end
end
let!(:old_0781_failed_submission) do
Timecop.freeze(olden_times) do
sub = create(:form526_submission, :with_one_failed_job)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781": {}}')
allow(sub).to receive(:created_at).and_return(olden_times)
allow(sub.form526_job_statuses.first).to receive_messages(
job_class: 'SubmitForm0781',
status: 'non_retryable_error'
)
sub
end
end
# Create a new old-form submission AFTER the rollout date
let!(:new_old_submission) do
Timecop.freeze(modern_times) do
sub = create(:form526_submission)
allow(sub).to receive(:form_to_json).with(Form526Submission::FORM_0781).and_return('{"form0781": {}}')
allow(sub).to receive(:created_at).and_return(modern_times)
sub
end
end
it 'logs 0781 state metrics correctly' do
# Allow each method to return its expected values
allow_any_instance_of(described_class)
.to receive(:new_0781_in_progress_forms)
.and_return([in_progress_new_form.id])
allow_any_instance_of(described_class)
.to receive(:new_0781_submissions)
.and_return([
new_0781_submission.id,
new_0781_successful_submission.id,
new_0781_failed_submission.id,
new_0781_primary_path.id,
new_0781_secondary_path.id
])
allow_any_instance_of(described_class)
.to receive(:new_0781_successful_submissions)
.and_return([new_0781_successful_submission.id])
allow_any_instance_of(described_class)
.to receive(:new_0781_failed_submissions)
.and_return([new_0781_failed_submission.id])
allow_any_instance_of(described_class)
.to receive(:new_0781_primary_path_submissions)
.and_return([new_0781_primary_path.id])
allow_any_instance_of(described_class)
.to receive(:new_0781_secondary_path_submissions)
.and_return([
new_0781_submission.id,
new_0781_secondary_path.id
])
allow_any_instance_of(described_class)
.to receive(:old_0781_in_progress_forms)
.and_return([in_progress_old_form.id])
# Only returns the one submission that's after the rollout date
allow_any_instance_of(described_class)
.to receive(:old_0781_submissions)
.and_return([new_old_submission.id])
allow_any_instance_of(described_class)
.to receive(:old_0781_successful_submissions)
.and_return([])
allow_any_instance_of(described_class)
.to receive(:old_0781_failed_submissions)
.and_return([])
# Expected log with the correct data based on our setup
expected_log = {
new_0781_in_progress_forms: [in_progress_new_form.id],
new_0781_submissions: [
new_0781_submission.id,
new_0781_successful_submission.id,
new_0781_failed_submission.id,
new_0781_primary_path.id,
new_0781_secondary_path.id
],
new_0781_successful_submissions: [new_0781_successful_submission.id],
new_0781_failed_submissions: [new_0781_failed_submission.id],
new_0781_primary_path_submissions: [new_0781_primary_path.id],
new_0781_secondary_path_submissions: [
new_0781_submission.id,
new_0781_secondary_path.id
],
old_0781_in_progress_forms: [in_progress_old_form.id],
old_0781_submissions: [new_old_submission.id], # Only includes submission after rollout date
old_0781_successful_submissions: [],
old_0781_failed_submissions: []
}
# Mock the load_snapshot_state method to return our expected data
allow_any_instance_of(described_class).to receive(:load_snapshot_state).and_return(expected_log)
expect(described_class.new.snapshot_state).to eq(expected_log)
end
it 'writes counts as Stats D gauges' do
prefix = described_class::STATSD_PREFIX
# Create a mock snapshot_state result
mock_snapshot = {
new_0781_in_progress_forms: [1],
new_0781_submissions: [1, 2, 3, 4, 5],
new_0781_successful_submissions: [2],
new_0781_failed_submissions: [3],
new_0781_primary_path_submissions: [4],
new_0781_secondary_path_submissions: [1, 5],
old_0781_in_progress_forms: [6],
old_0781_submissions: [7, 8, 9],
old_0781_successful_submissions: [8],
old_0781_failed_submissions: [9]
}
# Stub the snapshot_state method to return our mock data
allow_any_instance_of(described_class).to receive(:snapshot_state).and_return(mock_snapshot)
# Expect StatsD.gauge to be called for each metric with the correct count
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_in_progress_forms_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_submissions_count", 5)
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_successful_submissions_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_failed_submissions_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_primary_path_submissions_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.new_0781_secondary_path_submissions_count", 2)
expect(StatsD).to receive(:gauge).with("#{prefix}.old_0781_in_progress_forms_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.old_0781_submissions_count", 3)
expect(StatsD).to receive(:gauge).with("#{prefix}.old_0781_successful_submissions_count", 1)
expect(StatsD).to receive(:gauge).with("#{prefix}.old_0781_failed_submissions_count", 1)
described_class.new.perform
end
end
end
|
0
|
code_files/vets-api-private/spec
|
code_files/vets-api-private/spec/sidekiq/form526_submission_processing_report_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form526SubmissionProcessingReportJob, type: :worker do
before do
Sidekiq::Job.clear_all
end
let!(:olden_times) { (Form526Submission::MAX_PENDING_TIME + 1.day).ago }
let!(:modern_times) { 2.days.ago }
let!(:end_date) { Time.zone.today.beginning_of_day }
let!(:start_date) { end_date - 1.week }
describe '526 state logging' do
let!(:new_unprocessed) do
Timecop.freeze(modern_times) do
create(:form526_submission)
end
end
let!(:old_unprocessed) do
Timecop.freeze(olden_times) do
create(:form526_submission)
end
end
let!(:new_primary_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :with_one_succesful_job)
end
end
let!(:old_primary_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :with_one_succesful_job)
end
end
let!(:new_backup_pending) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job)
end
end
let!(:old_backup_pending) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job)
end
end
let!(:new_backup_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :paranoid_success, :with_failed_primary_job)
end
end
let!(:old_backup_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :paranoid_success, :with_failed_primary_job)
end
end
let!(:new_backup_vbms) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :backup_accepted, :with_failed_primary_job)
end
end
let!(:old_backup_vbms) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :backup_accepted, :with_failed_primary_job)
end
end
let!(:new_backup_rejected) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :backup_rejected, :with_failed_primary_job)
end
end
let!(:old_backup_rejected) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :backup_rejected, :with_failed_primary_job)
end
end
let!(:new_double_job_failure) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job)
end
end
let!(:old_double_job_failure) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job)
end
end
let!(:new_double_job_failure_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :remediated)
end
end
let!(:old_double_job_failure_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :remediated)
end
end
let!(:new_double_job_failure_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :no_longer_remediated)
end
end
let!(:old_double_job_failure_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_failed_primary_job, :with_failed_backup_job, :no_longer_remediated)
end
end
let!(:new_no_job_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :remediated)
end
end
let!(:old_no_job_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :remediated)
end
end
let!(:new_backup_paranoid) do
Timecop.freeze(modern_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job, :paranoid_success)
end
end
let!(:old_backup_paranoid) do
Timecop.freeze(olden_times) do
create(:form526_submission, :backup_path, :with_failed_primary_job, :paranoid_success)
end
end
let!(:still_running_with_retryable_errors) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_one_failed_job)
end
end
# RARE EDGECASES
let!(:new_no_job_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :no_longer_remediated)
end
end
let!(:old_no_job_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :no_longer_remediated)
end
end
let!(:new_double_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path)
end
end
let!(:old_double_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path)
end
end
let!(:new_triple_success) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :remediated)
end
end
let!(:old_triple_success) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :remediated)
end
end
let!(:new_double_success_de_remediated) do
Timecop.freeze(modern_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :no_longer_remediated)
end
end
let!(:old_double_success_de_remediated) do
Timecop.freeze(olden_times) do
create(:form526_submission, :with_submitted_claim_id, :backup_path, :no_longer_remediated)
end
end
let!(:new_remediated_and_de_remediated) do
sub = Timecop.freeze(modern_times) do
create(:form526_submission, :remediated)
end
Timecop.freeze(modern_times + 1.hour) do
create(:form526_submission_remediation,
form526_submission: sub,
lifecycle: ['i am no longer remediated'],
success: false)
end
sub
end
let!(:old_remediated_and_de_remediated) do
sub = Timecop.freeze(olden_times) do
create(:form526_submission, :remediated)
end
Timecop.freeze(olden_times + 1.hour) do
create(:form526_submission_remediation,
form526_submission: sub,
lifecycle: ['i am no longer remediated'],
success: false)
end
sub
end
it 'logs 526 state metrics correctly' do
expected_log = {
timeboxed: [
still_running_with_retryable_errors.id,
new_unprocessed.id,
new_primary_success.id,
new_backup_pending.id,
new_backup_success.id,
new_backup_vbms.id,
new_backup_rejected.id,
new_double_job_failure.id,
new_double_job_failure_remediated.id,
new_double_job_failure_de_remediated.id,
new_no_job_remediated.id,
new_no_job_de_remediated.id,
new_backup_paranoid.id,
new_double_success.id,
new_triple_success.id,
new_double_success_de_remediated.id,
new_remediated_and_de_remediated.id
].sort,
timeboxed_primary_successes: [
new_primary_success.id,
new_triple_success.id,
new_double_success_de_remediated.id,
new_double_success.id
].sort,
timeboxed_exhausted_primary_job: [
new_backup_pending.id,
new_backup_success.id,
new_backup_vbms.id,
new_backup_rejected.id,
new_backup_paranoid.id,
new_double_job_failure.id,
new_double_job_failure_remediated.id,
new_double_job_failure_de_remediated.id
].sort,
timeboxed_exhausted_backup_job: [
new_double_job_failure_remediated.id,
new_double_job_failure_de_remediated.id,
new_double_job_failure.id
].sort,
timeboxed_incomplete_type: [
still_running_with_retryable_errors.id,
new_unprocessed.id,
new_backup_pending.id,
new_remediated_and_de_remediated.id,
new_no_job_de_remediated.id
].sort
}
expect(described_class.new.timeboxed_state).to eq(expected_log)
end
it 'converts the logs to counts where prefered' do
expected_log = {
state_log: {
timeboxed_count: 17,
timeboxed_primary_successes_count: 4,
timeboxed_exhausted_primary_job_count: 8,
timeboxed_exhausted_backup_job_count: 3,
timeboxed_incomplete_type_count: 5
},
start_date:,
end_date:
}
expect(Rails.logger).to receive(:info) do |label, log|
expect(label).to eq('Form 526 State Data')
expect(log).to eq(expected_log)
end
described_class.new.perform
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/copay_notifications/new_statement_notification_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'debt_management_center/statement_identifier_service'
require 'debt_management_center/sidekiq/va_notify_email_job'
RSpec.describe CopayNotifications::NewStatementNotificationJob, type: :worker do
before do
Sidekiq::Job.clear_all
end
describe '#perform' do
let(:statement) do
{
'veteranIdentifier' => '492031291',
'identifierType' => 'edipi',
'facilityNum' => '123',
'facilityName' => 'VA Medical Center',
'statementDate' => '01/01/2023'
}
end
let(:personalisation) do
{
'icn' => '1234',
'first_name' => 'Guy'
}
end
before do
allow_any_instance_of(DebtManagementCenter::StatementIdentifierService)
.to receive(:get_mpi_data).and_return(personalisation)
end
it 'sends a new mcp notification email job frome edipi' do
job = described_class.new
# pausing until further notice
expect { job.perform(statement) }
.not_to change { DebtManagementCenter::VANotifyEmailJob.jobs.size }
.from(0)
# expect { job.perform(statement) }
# .to change { DebtManagementCenter::VANotifyEmailJob.jobs.size }
# .from(0)
# .to(1)
end
context 'veteran identifier is a vista id' do
let(:icn) { '1234' }
let(:statement) do
{
'veteranIdentifier' => '348530923',
'identifierType' => 'dfn',
'facilityNum' => '456',
'facilityName' => 'VA Medical Center',
'statementDate' => '01/01/2023'
}
end
it 'sends a new mcp notification email job frome facility and vista id' do
job = described_class.new
# pausing until further notice
expect { job.perform(statement) }
.not_to change { DebtManagementCenter::VANotifyEmailJob.jobs.size }
.from(0)
# expect { job.perform(statement) }
# .to change { DebtManagementCenter::VANotifyEmailJob.jobs.size }
# .from(0)
# .to(1)
end
end
context 'with malformed statement' do
let(:statement) do
{
'identifierType' => 'dfn',
'facilityName' => 'VA Medical Center',
'statementDate' => '01/01/2023'
}
end
it 'throws an error' do
job = described_class.new
expect { job.perform(statement) }.to raise_error do |error|
expect(error).to be_instance_of(DebtManagementCenter::StatementIdentifierService::MalformedMCPStatement)
end
end
end
context 'with retryable error' do
subject(:config) { described_class }
let(:error) { OpenStruct.new(message: 'oh shoot') }
let(:exception) { DebtManagementCenter::StatementIdentifierService::RetryableError.new(error) }
it 'sends job to retry queue' do
expect(config.sidekiq_retry_in_block.call(0, exception, nil)).to eq(10)
end
end
context 'with retries exhausted' do
subject(:config) { described_class }
let(:error) { OpenStruct.new(message: 'oh shoot') }
let(:exception) do
e = DebtManagementCenter::StatementIdentifierService::RetryableError.new(error)
allow(e).to receive(:backtrace).and_return(['line 1', 'line 2', 'line 3'])
e
end
let(:msg) do
{
'class' => 'YourJobClassName',
'args' => [statement],
'jid' => '12345abcde',
'retry_count' => 5
}
end
it 'logs the error' do
expected_log_message = <<~LOG
NewStatementNotificationJob retries exhausted:
Exception: #{exception.class} - #{exception.message}
Backtrace: #{exception.backtrace.join("\n")}
LOG
statsd_key = CopayNotifications::NewStatementNotificationJob::STATSD_KEY_PREFIX
["#{statsd_key}.failure", "#{statsd_key}.retries_exhausted"].each do |key|
expect(StatsD).to receive(:increment).with(key)
end
expect(Rails.logger).to receive(:error).with(expected_log_message)
config.sidekiq_retries_exhausted_block.call(msg, exception)
end
end
context 'with any other error' do
subject(:config) { described_class }
let(:exception) { DebtManagementCenter::StatementIdentifierService::MalformedMCPStatement.new(nil) }
it 'kills the job' do
expect(config.sidekiq_retry_in_block.call(0, exception, nil)).to eq(:kill)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/copay_notifications/parse_new_statements_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe CopayNotifications::ParseNewStatementsJob, type: :worker do
before do
Sidekiq::Job.clear_all
end
describe '#perform' do
let(:statements_json_byte) { Base64.encode64(File.read('spec/fixtures/medical_copays/new_statements.json')) }
it 'parses and creates individual new statement jobs' do
job = described_class.new
expect { job.perform(statements_json_byte) }
.to change { CopayNotifications::NewStatementNotificationJob.jobs.size }
.from(0)
.to(2)
end
context 'duplicate identifiers' do
let(:statements_json_byte) do
Base64.encode64(File.read('spec/fixtures/medical_copays/duplicate_new_statements.json'))
end
it 'only creates a single job for duplicate identifiers' do
job = described_class.new
expect { job.perform(statements_json_byte) }
.to change { CopayNotifications::NewStatementNotificationJob.jobs.size }
.from(0)
.to(1)
end
end
context 'batch processing' do
let(:statements_json_byte) do
Base64.encode64(File.read('spec/fixtures/medical_copays/new_statements.json'))
end
let(:job_interval) { Settings.mcp.notifications.job_interval }
before do
stub_const('CopayNotifications::ParseNewStatementsJob::BATCH_SIZE', 1)
end
it 'starts the jobs at different times' do
job = described_class.new
statement_json = Oj.load(Base64.decode64(statements_json_byte))
first_statement = statement_json[0]
expect(CopayNotifications::NewStatementNotificationJob).to receive(:perform_in).with(0, first_statement)
second_statement = statement_json[1]
expect(CopayNotifications::NewStatementNotificationJob).to receive(:perform_in).with(job_interval,
second_statement)
job.perform(statements_json_byte)
end
end
context 'with retries exhausted' do
let(:config) { described_class }
let(:exception) do
e = StandardError.new('Test error')
allow(e).to receive(:backtrace).and_return(%w[backtrace1 backtrace2])
e
end
let(:msg) do
{
'class' => 'YourJobClassName',
'args' => [statement],
'jid' => '12345abcde',
'retry_count' => 5
}
end
it 'logs the error' do
expected_log_message = <<~LOG
CopayNotifications::ParseNewStatementsJob retries exhausted:
Exception: #{exception.class} - #{exception.message}
Backtrace: #{exception.backtrace.join("\n")}
LOG
expect(StatsD).to receive(:increment).with(
"#{CopayNotifications::ParseNewStatementsJob::STATSD_KEY_PREFIX}.retries_exhausted"
)
expect(Rails.logger).to receive(:error).with(expected_log_message)
config.sidekiq_retries_exhausted_block.call(statements_json_byte, exception)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/kms_key_rotation/rotate_keys_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
GeneralError = Class.new(StandardError)
RSpec.describe KmsKeyRotation::RotateKeysJob, type: :job do
let(:job) { described_class.new }
let(:records) { create_list(:fake_saved_claim, 3) }
let(:args) { records.map(&:to_global_id) }
describe '#perform' do
it 'calls rotate_kms_key! on each record' do
allow(GlobalID::Locator).to receive(:locate_many).and_return(records)
expect(records).to all(receive(:rotate_kms_key!))
job.perform(args)
end
end
describe '#rotate_kms_key' do
it 'rotating keys without updating timestamps' do
record = records[0]
initial_updated_at = record.updated_at
initial_encrypted_kms_key = record.encrypted_kms_key
record.rotate_kms_key!
expect(record.updated_at).to eq(initial_updated_at)
expect(record.encrypted_kms_key).not_to eq(initial_encrypted_kms_key)
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/kms_key_rotation/batch_initiator_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe KmsKeyRotation::BatchInitiatorJob, type: :job do
let(:job) { described_class.new }
let!(:claim_records) { create_list(:fake_saved_claim, 6, needs_kms_rotation: true) }
let!(:form_1095_b_records) { create(:form1095_b, needs_kms_rotation: true) }
let!(:supplemental_claim_records) { create_list(:supplemental_claim, 5, needs_kms_rotation: true) }
before do
KmsKeyRotation::BatchInitiatorJob.send(:remove_const, :MAX_RECORDS_PER_BATCH)
KmsKeyRotation::BatchInitiatorJob.const_set(:MAX_RECORDS_PER_BATCH, 10)
KmsKeyRotation::BatchInitiatorJob.send(:remove_const, :MAX_RECORDS_PER_JOB)
KmsKeyRotation::BatchInitiatorJob.const_set(:MAX_RECORDS_PER_JOB, 2)
allow_any_instance_of(described_class).to receive(:models)
.and_return([SavedClaim, Form1095B, AppealsApi::SupplementalClaim])
allow_any_instance_of(KmsEncryptedModelPatch).to receive(:kms_version).and_return('other_version')
KmsKeyRotation::RotateKeysJob.jobs.clear
end
describe '#perform' do
context 'on the rotation date (Oct 12)' do
before do
allow(job).to receive(:rotation_date?).and_return(true)
end
it 'flags all eligible models for rotation' do
expect(SavedClaim).to receive(:update_all).with(needs_kms_rotation: true)
expect(Form1095B).to receive(:update_all).with(needs_kms_rotation: true)
expect(AppealsApi::SupplementalClaim).to receive(:update_all).with(needs_kms_rotation: true)
job.perform
end
it 'then enqueues RotateKeysJob jobs for each flagged record' do
job.perform
expect(KmsKeyRotation::RotateKeysJob.jobs.size).to eq(7)
end
end
context 'when not the rotation date' do
before do
allow(job).to receive(:rotation_date?).and_return(false)
end
it 'does not flag any records for rotation' do
expect(SavedClaim).not_to receive(:update_all)
expect(Form1095B).not_to receive(:update_all)
expect(AppealsApi::SupplementalClaim).not_to receive(:update_all)
job.perform
end
it 'still enqueues RotateKeysJob jobs for pre-flagged records' do
job.perform
expect(KmsKeyRotation::RotateKeysJob.jobs.size).to eq(7)
end
end
it 'creates RotateKeysJob jobs with the correct batch size' do
allow(job).to receive(:rotation_date?).and_return(true)
job.perform
rotate_jobs = KmsKeyRotation::RotateKeysJob.jobs
expect(rotate_jobs.size).to eq(7)
expect(rotate_jobs.first['class']).to eq('KmsKeyRotation::RotateKeysJob')
expect(rotate_jobs.first['args'].first.size).to eq(KmsKeyRotation::BatchInitiatorJob::MAX_RECORDS_PER_JOB)
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/hca/health_facilities_import_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe HCA::HealthFacilitiesImportJob, type: :worker do
describe '#perform' do
let(:statsd_key_prefix) { HCA::Service::STATSD_KEY_PREFIX }
let(:mock_get_facilities_page_one) do
FacilitiesApi::V2::Lighthouse::Response.new(
{
'data' => [
{ 'id' => 'vha_635HB', 'attributes' => { 'name' => 'My Fake VA Clinic' } },
{ 'id' => 'vha_463GA', 'attributes' => { 'name' => 'Yet Another Clinic Name' } },
{ 'id' => 'vha_499', 'attributes' => { 'name' => 'My Great New VA Clinic Name' } }
],
'meta' => { 'pagination' => { 'currentPage' => 1, 'perPage' => 1000, 'totalPages' => 2,
'totalEntries' => 4 } }
}.to_json, 200
).facilities
end
let(:mock_get_facilities_page_two) do
FacilitiesApi::V2::Lighthouse::Response.new(
{
'data' => [
{ 'id' => 'vha_463GE', 'attributes' => { 'name' => 'My Other Fake VA Clinic' } }
],
'meta' => { 'pagination' => { 'currentPage' => 2, 'perPage' => 2, 'totalPages' => 2, 'totalEntries' => 4 } }
}.to_json, 200
).facilities
end
let(:mock_institution_data) do
[
{ name: 'MY FAKE VA CLINIC', station_number: '635HB', street_state_id: 'AK' },
{ name: 'YET ANOTHER CLINIC NAME', station_number: '463GA', street_state_id: 'OH' },
{ name: 'MY OLD VA CLINIC NAME', station_number: '499', street_state_id: 'NY' },
{ name: 'MY OTHER FAKE VA CLINIC', station_number: '463GE', street_state_id: 'FL' }
]
end
let(:lighthouse_service) { double('FacilitiesApi::V2::Lighthouse::Client') }
before do
# Setup std_state and std_institution data
mock_institution_data.each_with_index do |institution, i|
street_state_id = i + 1
create(:std_state, postal_name: institution[:street_state_id], id: street_state_id)
create(:std_institution_facility, name: institution[:name],
station_number: institution[:station_number],
street_state_id:)
end
# Add existing health_facility record
create(:health_facility, name: 'My Fake VA Clinic',
station_number: mock_institution_data.first[:station_number],
postal_name: mock_institution_data.first[:street_state_id])
# Add existing health_facility record with stale name
create(:health_facility, name: 'My Old VA Clinic Name',
station_number: mock_institution_data.third[:station_number],
postal_name: mock_institution_data.third[:street_state_id])
# Add existing health_facility record that is no longer returned in the api call
create(:health_facility, name: 'My Really Old VA Clinic Name',
station_number: '111HB',
postal_name: 'OH')
allow(FacilitiesApi::V2::Lighthouse::Client).to receive(:new).and_return(lighthouse_service)
allow(lighthouse_service).to receive(:get_facilities)
.and_return(mock_get_facilities_page_one, mock_get_facilities_page_two)
allow(Rails.logger).to receive(:info)
allow(StatsD).to receive(:increment)
end
it 'has a retry count of 10' do
expect(described_class.get_sidekiq_options['retry']).to eq(10)
end
context 'success' do
it 'updates HealthFacilities table without duplicating existing records' do
expect(Rails.logger).to receive(:info).with(
'[HCA] - Job started with 3 existing health facilities.'
)
expect(Rails.logger).to receive(:info).with(
'[HCA] - Job ended with 3 health facilities.'
)
expect(Rails.logger).to receive(:info).with(
'[HCA] - Deleted 1 health facilities not present in import.'
)
expect(StatsD).to receive(:increment).with("#{statsd_key_prefix}.health_facilities_import_job_complete")
expect do
described_class.new.perform
end.not_to change(HealthFacility, :count)
station_numbers = mock_institution_data.map { |institution| institution[:station_number] }
expect(HealthFacility
.where(station_number: station_numbers)
.pluck(:name)).to contain_exactly(
'My Fake VA Clinic',
'Yet Another Clinic Name',
'My Great New VA Clinic Name' # Validates name is updated from Lighthouse api response
)
end
context 'pagination' do
mock_per_page = 2
before { stub_const("#{described_class}::PER_PAGE", mock_per_page) }
it 'fetches multiple pages of facilities' do
expect(lighthouse_service).to receive(:get_facilities).with(
type: 'health', per_page: mock_per_page,
page: 1, mobile: false
).and_return(mock_get_facilities_page_one)
expect(lighthouse_service).to receive(:get_facilities).with(
type: 'health', per_page: mock_per_page,
page: 2, mobile: false
).and_return(mock_get_facilities_page_two)
expect do
described_class.new.perform
end.to change(HealthFacility, :count).by(1)
# Verify the correct facilities are added
expect(HealthFacility.pluck(:name)).to contain_exactly(
'My Fake VA Clinic',
'Yet Another Clinic Name',
'My Great New VA Clinic Name',
'My Other Fake VA Clinic'
)
end
end
end
context 'error' do
it 'logs errors when API call fails' do
expect(lighthouse_service).to receive(:get_facilities).and_raise(StandardError, 'something broke')
expect(Rails.logger).to receive(:info).with(
'[HCA] - Job started with 3 existing health facilities.'
)
expect(Rails.logger).to receive(:error).with(
"[HCA] - Error occurred in #{described_class.name}: something broke"
)
expect do
described_class.new.perform
end.to raise_error(RuntimeError, "Failed to import health facilities in #{described_class.name}")
end
describe 'when retries are exhausted' do
it 'logs error and increments StatsD' do
described_class.within_sidekiq_retries_exhausted_block do
expect(Rails.logger).to receive(:error).with(
"[HCA] - #{described_class.name} failed with no retries left."
)
expect(StatsD).to receive(:increment).with(
"#{statsd_key_prefix}.health_facilities_import_job_failed_no_retries"
)
end
end
end
end
context 'std_states table is empty' do
before do
StdState.destroy_all
end
it 'enqueues IncomeLimits::StdStateImport and raises error' do
expect(Rails.logger).to receive(:info).with(
'[HCA] - Job started with 3 existing health facilities.'
)
expect(Rails.logger).to receive(:error).with(
"[HCA] - Error occurred in #{described_class.name}: StdStates missing – triggered import and retrying job"
)
import_job = instance_double(IncomeLimits::StdStateImport)
expect(IncomeLimits::StdStateImport).to receive(:new).and_return(import_job)
expect(import_job).to receive(:perform)
expect do
described_class.new.perform
end.to raise_error(RuntimeError, "Failed to import health facilities in #{described_class.name}")
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/hca/log_email_diff_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe HCA::LogEmailDiffJob, type: :job do
let!(:user) { create(:user, :loa3, :with_terms_of_use_agreement) }
let!(:in_progress_form) do
create(:in_progress_1010ez_form_with_email, user_uuid: user.uuid, user_account_id: user.user_account_uuid)
end
before do
allow(User).to receive(:find).with(user.uuid).and_return(user)
end
def self.expect_does_nothing
it 'does nothing' do
expect(StatsD).not_to receive(:increment)
subject
expect(FormEmailMatchesProfileLog).not_to receive(:find_or_create_by).with(
user_uuid: user.uuid, in_progress_form_id: in_progress_form.id, user_account_id: user.user_account_uuid
)
end
end
def self.expect_email_tag(tag)
it "logs that email is #{tag}" do
expect do
subject
end.to trigger_statsd_increment("api.1010ez.in_progress_form_email.#{tag}")
expect(InProgressForm.where(user_uuid: user.uuid, user_account_id: user.user_account_uuid,
id: in_progress_form.id)).to exist
end
end
describe '#perform' do
subject { described_class.new.perform(in_progress_form.id, user.uuid, user.user_account_uuid) }
context 'when the form has been deleted' do
before do
in_progress_form.destroy!
end
expect_does_nothing
end
context 'when form email is present' do
context 'when va profile email is different' do
expect_email_tag('different')
end
context 'when va profile is the same' do
before do
allow(user).to receive(:va_profile_email).and_return('Email@email.com')
end
expect_email_tag('same')
context 'when FormEmailMatchesProfileLog already exists' do
before do
FormEmailMatchesProfileLog.create(user_uuid: user.uuid, in_progress_form_id: in_progress_form.id,
user_account_id: user.user_account_uuid)
end
expect_does_nothing
end
context 'when FormEmailMatchesProfileLog find_or_create_by returns model' do
before do
form_email_matches_profile_log = FormEmailMatchesProfileLog.new(
user_uuid: user.uuid,
in_progress_form_id: in_progress_form.id,
user_account_id: user.user_account_uuid
)
allow(FormEmailMatchesProfileLog).to receive(:find_or_create_by).and_return(form_email_matches_profile_log)
end
it 'does not increment statsd' do
expect(StatsD).not_to receive(:increment)
subject
end
end
end
context 'when va profile email is blank' do
before do
expect(user).to receive(:va_profile_email).and_return(nil)
end
expect_email_tag('different')
end
end
context 'when form email is blank' do
before do
in_progress_form.update!(
form_data: JSON.parse(in_progress_form.form_data).except('email').to_json
)
end
expect_does_nothing
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/hca/ezr_submission_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'sidekiq'
require 'sidekiq/job_retry'
RSpec.describe HCA::EzrSubmissionJob, type: :job do
let(:user) { create(:evss_user, :loa3, icn: '1013032368V065534') }
let(:form) do
get_fixture('form1010_ezr/valid_form')
end
let(:encrypted_form) do
HealthCareApplication::LOCKBOX.encrypt(form.to_json)
end
let(:ezr_service) { double }
let(:tags) { described_class::DD_ZSF_TAGS }
let(:form_id) { described_class::FORM_ID }
let(:api_key) { Settings.vanotify.services.health_apps_1010.api_key }
let(:failure_email_template_id) { Settings.vanotify.services.health_apps_1010.template_id.form1010_ezr_failure_email }
let(:failure_email_template_params) do
[
form['email'],
failure_email_template_id,
{
'salutation' => "Dear #{form.dig('veteranFullName', 'first')},"
},
api_key,
{
callback_metadata: {
notification_type: 'error',
form_number: form_id,
statsd_tags: tags
}
}
]
end
def expect_submission_failure_email_and_statsd_increments
expect(VANotify::EmailJob).to receive(:perform_async).with(*failure_email_template_params)
expect(StatsD).to receive(:increment).with('api.1010ezr.submission_failure_email_sent')
end
def dont_expect_submission_failure_email_and_statsd_increments
expect(VANotify::EmailJob).not_to receive(:perform_async)
expect(StatsD).not_to receive(:increment).with('api.1010ezr.submission_failure_email_sent')
end
describe 'when retries are exhausted' do
before do
Flipper.enable(:ezr_use_va_notify_on_submission_failure)
end
after do
Flipper.disable(:ezr_use_va_notify_on_submission_failure)
end
context 'when the parsed form is not present' do
it 'only increments StatsD' do
msg = {
'args' => [HealthCareApplication::LOCKBOX.encrypt({}.to_json), nil]
}
described_class.within_sidekiq_retries_exhausted_block(msg) do
allow(StatsD).to receive(:increment)
expect(StatsD).to receive(:increment).with('api.1010ezr.failed_wont_retry')
expect(described_class).not_to receive(:send_failure_email)
end
end
end
context 'when the parsed form is present' do
context 'the send failure email flipper is enabled' do
it 'tracks the errors and sends the failure email' do
msg = {
'args' => [encrypted_form, nil]
}
described_class.within_sidekiq_retries_exhausted_block(msg) do
allow(VANotify::EmailJob).to receive(:perform_async)
expect(StatsD).to receive(:increment).with('api.1010ezr.failed_wont_retry')
expect_submission_failure_email_and_statsd_increments
end
pii_log = PersonalInformationLog.last
expect(pii_log.error_class).to eq('Form1010Ezr FailedWontRetry')
expect(pii_log.data).to eq(form)
end
it 'logs the errors' do
msg = {
'args' => [encrypted_form, nil]
}
described_class.within_sidekiq_retries_exhausted_block(msg) do
allow(VANotify::EmailJob).to receive(:perform_async)
expect_any_instance_of(Vets::SharedLogging).to receive(:log_message_to_rails).with(
'[10-10EZR] total failure',
:error,
{
first_initial: 'F',
middle_initial: 'M',
last_initial: 'Z'
}
)
end
end
it 'does not send the failure notification if email is blank' do
form['email'] = nil
msg = {
'args' => [encrypted_form, nil]
}
described_class.within_sidekiq_retries_exhausted_block(msg) do
expect(VANotify::EmailJob).not_to receive(:perform_async)
expect(StatsD).not_to receive(:increment).with('api.1010ezr.submission_failure_email_sent')
end
end
end
context 'the send failure email flipper is disabled' do
it 'does not send the email' do
msg = {
'args' => [encrypted_form, nil]
}
Flipper.disable(:ezr_use_va_notify_on_submission_failure)
described_class.within_sidekiq_retries_exhausted_block(msg) do
expect(VANotify::EmailJob).not_to receive(:perform_async).with(*failure_email_template_params)
expect(StatsD).not_to receive(:increment).with('api.1010ezr.submission_failure_email_sent')
end
end
end
end
end
describe '#perform' do
subject do
described_class.new.perform(encrypted_form, user.uuid)
end
before do
allow(User).to receive(:find).with(user.uuid).and_return(user)
allow(Form1010Ezr::Service).to receive(:new).with(user).once.and_return(ezr_service)
end
context 'when submission has an error' do
context 'with an enrollment system validation error' do
let(:error) { HCA::SOAPParser::ValidationError }
it "increments StatsD, creates a 'PersonalInformationLog', logs the submission failure, " \
'and sends a failure email' do
allow(ezr_service).to receive(:submit_sync).with(form).once.and_raise(error)
allow(StatsD).to receive(:increment)
expect(StatsD).to receive(:increment).with('api.1010ezr.enrollment_system_validation_error')
expect_submission_failure_email_and_statsd_increments
subject
personal_information_log = PersonalInformationLog.last
expect(personal_information_log.error_class).to eq('Form1010Ezr EnrollmentSystemValidationFailure')
expect(personal_information_log.data).to eq(form)
end
it 'logs the error' do
allow(ezr_service).to receive(:submit_sync).with(form).once.and_raise(error)
expect(HCA::EzrSubmissionJob).to receive(:log_exception_to_rails)
expect(Form1010Ezr::Service).to receive(:log_submission_failure).with(
form,
'[10-10EZR] failure'
)
subject
end
end
context 'with an Ox::ParseError' do
let(:error_msg) { 'invalid format, elements overlap at line 1, column 212 [parse.c:626]' }
let(:full_log_msg) { "Form1010Ezr FailedDidNotRetry: #{error_msg}" }
before do
allow(User).to receive(:find).with(user.uuid).and_return(user)
allow(StatsD).to receive(:increment)
allow(Rails.logger).to receive(:info)
allow(VANotify::EmailJob).to receive(:perform_async)
allow(Form1010Ezr::Service).to receive(:new).with(user).once.and_return(ezr_service)
allow(ezr_service).to receive(:submit_sync).and_raise(Ox::ParseError.new(error_msg))
end
it 'increments StatsD, creates a PersonalInformationLog, logs the error, and does not retry' do
expect(StatsD).to receive(:increment).with('api.1010ezr.failed_did_not_retry')
expect(Rails.logger).to receive(:info).with(full_log_msg)
expect(PersonalInformationLog).to receive(:create!).with(
data: form,
error_class: 'Form1010Ezr FailedDidNotRetry'
)
# The Sidekiq::JobRetry::Skip error will fail the job and not retry it
expect { subject }.to raise_error(Sidekiq::JobRetry::Skip)
end
context 'when the send failure email flipper is enabled' do
before do
allow(Flipper).to receive(:enabled?).with(:ezr_use_va_notify_on_submission_failure).and_return(true)
end
it 'sends a failure email, and does not retry' do
expect { subject }.to raise_error(Sidekiq::JobRetry::Skip)
end
end
context 'when the send failure email flipper is disabled' do
before do
allow(Flipper).to receive(:enabled?).with(:ezr_use_va_notify_on_submission_failure).and_return(false)
end
it 'sends a failure email, and does not retry' do
dont_expect_submission_failure_email_and_statsd_increments
expect { subject }.to raise_error(Sidekiq::JobRetry::Skip)
end
end
end
context 'with any other error' do
let(:error) { Common::Client::Errors::HTTPError }
it 'logs the retry' do
allow(ezr_service).to receive(:submit_sync).with(form).once.and_raise(error)
expect { subject }.to trigger_statsd_increment(
'api.1010ezr.async.retries'
).and raise_error(error)
end
end
end
context 'with a successful submission' do
it 'calls the service' do
expect(ezr_service).to receive(:submit_sync).with(form)
subject
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/hca/submission_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe HCA::SubmissionJob, type: :job do
let(:user) { create(:user) }
let(:user_identifier) { HealthCareApplication.get_user_identifier(user) }
let(:health_care_application) { create(:health_care_application) }
let(:form) do
{
foo: true,
email: 'foo@example.com',
veteranFullName: {
first: 'first',
last: 'last'
}
}.deep_stringify_keys
end
let(:encrypted_form) { HealthCareApplication::LOCKBOX.encrypt(form.to_json) }
let(:google_analytics_client_id) { '123456789' }
let(:result) do
{
formSubmissionId: 123,
timestamp: '2017-08-03 22:02:18 -0400'
}
end
let(:hca_service) { double }
describe '#notify' do
subject(:notify) { described_class.new.notify(params) }
let(:tags) { ["health_care_application_id:#{health_care_application.id}"] }
let(:args) do
[
user_identifier,
encrypted_form,
health_care_application.id,
google_analytics_client_id
]
end
before { allow(StatsD).to receive(:increment) }
context 'retry_count is not 9' do
let(:params) { { 'retry_count' => 5, 'args' => args } }
it 'does not increment failed_ten_retries statsd' do
expect(StatsD).not_to receive(:increment).with('api.1010ez.async.failed_ten_retries', tags:)
notify
end
end
context 'retry_count is 9' do
let(:params) { { 'retry_count' => 9, 'args' => args } }
it 'increments failed_ten_retries statsd' do
expect(StatsD).to receive(:increment).with('api.1010ez.async.failed_ten_retries', tags:)
notify
end
end
end
it 'returns an array of integers from retry_limits_for_notification' do
expect(described_class.new.retry_limits_for_notification).to eq([10])
end
describe 'when job has failed' do
let(:msg) do
{
'args' => [nil, encrypted_form, health_care_application.id, 'google_analytics_client_id']
}
end
it 'passes unencrypted form to health_care_application' do
expect_any_instance_of(HealthCareApplication).to receive(:update!).with(
state: 'failed',
form: form.to_json,
google_analytics_client_id: 'google_analytics_client_id'
)
described_class.new.sidekiq_retries_exhausted_block.call(msg)
end
it 'sets the health_care_application state to failed' do
described_class.new.sidekiq_retries_exhausted_block.call(msg)
expect(health_care_application.reload.state).to eq('failed')
end
end
describe '#perform' do
subject do
described_class.new.perform(
user_identifier,
encrypted_form,
health_care_application.id,
google_analytics_client_id
)
end
before do
expect(HCA::Service).to receive(:new).with(user_identifier).once.and_return(hca_service)
end
context 'when submission has an error' do
let(:error) { Common::Client::Errors::HTTPError }
before do
expect(hca_service).to receive(:submit_form).with(form).once.and_raise(error)
end
it 'sets the health_care_application state to error' do
expect { subject }.to raise_error(error)
health_care_application.reload
expect(health_care_application.state).to eq('error')
end
context 'with a validation error' do
let(:error) { HCA::SOAPParser::ValidationError }
it 'passes unencrypted form to health_care_application' do
expect_any_instance_of(HealthCareApplication).to receive(:update!).with(
state: 'failed',
form: form.to_json,
google_analytics_client_id:
)
subject
end
it 'sets the health_care_application state to failed' do
subject
expect(health_care_application.reload.state).to eq('failed')
end
it 'creates a pii log' do
subject
log = PersonalInformationLog.where(error_class: 'HCA::SOAPParser::ValidationError').last
expect(log.data['form']).to eq(form)
end
it 'increments statsd' do
expect { subject }.to trigger_statsd_increment('api.1010ez.enrollment_system_validation_error')
end
end
end
context 'with a successful submission' do
before do
expect(hca_service).to receive(:submit_form).with(form).once.and_return(result)
expect(Rails.logger).to receive(:info).with("[10-10EZ] - SubmissionID=#{result[:formSubmissionId]}")
end
it 'calls the service and save the results' do
subject
health_care_application.reload
expect(health_care_application.success?).to be(true)
expect(health_care_application.form_submission_id).to eq(result[:formSubmissionId])
expect(health_care_application.timestamp).to eq(result[:timestamp])
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/hca/std_institution_import_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'csv'
RSpec.describe HCA::StdInstitutionImportJob, type: :worker do
describe '#fetch_csv_data' do
let(:job) { described_class.new }
context 'when CSV fetch is successful' do
it 'returns the CSV data' do
csv_data = <<~CSV
header1,header2
value1,value2
CSV
stub_request(:get, 'https://sitewide-public-websites-income-limits-data.s3-us-gov-west-1.amazonaws.com/std_institution.csv')
.to_return(status: 200, body: csv_data)
result = job.fetch_csv_data
expect(result).to eq(csv_data)
end
end
context 'when CSV fetch fails' do
it 'logs an error and returns nil' do
stub_request(:get, 'https://sitewide-public-websites-income-limits-data.s3-us-gov-west-1.amazonaws.com/std_institution.csv')
.to_return(status: 404)
expect(Rails.logger).to receive(:warn).with('[HCA] - CSV retrieval failed with response code 404')
result = job.fetch_csv_data
expect(result).to be_nil
end
end
end
describe '#perform' do
context 'when :hca_health_facilities_update_job is disabled' do
it 'does nothing' do
allow(Flipper).to receive(:enabled?).with(:hca_health_facilities_update_job).and_return(false)
expect(Rails.logger).not_to receive(:info)
described_class.new.perform
end
end
context 'when :hca_health_facilities_update_job is enabled' do
before { allow(Flipper).to receive(:enabled?).with(:hca_health_facilities_update_job).and_return(true) }
context 'actual records' do
let(:csv_data) do
<<~CSV
ID,ACTIVATIONDATE,DEACTIVATIONDATE,NAME,STATIONNUMBER,VISTANAME,AGENCY_ID,STREETCOUNTRY_ID,STREETADDRESSLINE1,STREETADDRESSLINE2,STREETADDRESSLINE3,STREETCITY,STREETSTATE_ID,STREETCOUNTY_ID,STREETPOSTALCODE,MAILINGCOUNTRY_ID,MAILINGADDRESSLINE1,MAILINGADDRESSLINE2,MAILINGADDRESSLINE3,MAILINGCITY,MAILINGSTATE_ID,MAILINGCOUNTY_ID,MAILINGPOSTALCODE,FACILITYTYPE_ID,MFN_ZEG_RECIPIENT,PARENT_ID,REALIGNEDFROM_ID,REALIGNEDTO_ID,VISN_ID,VERSION,CREATED,UPDATED,CREATEDBY,UPDATEDBY
1000250,,,AUDIE L. MURPHY MEMORIAL HOSP,671,AUDIE L. MURPHY MEMORIAL HOSP,1009121,1006840,7400 MERTON MINTER BLVD,,,SAN ANTONIO,1009348,,78229-4404,1006840,7400 MERTON MINTER BLVD,,,SAN ANTONIO,1009348,,78229-4404,1009231,1,1002217,,,1002217,0,2004-06-04 13:18:48 +0000,2015-12-28 10:05:46 +0000,Initial Load,DataBroker - CQ# 0938 12/09/2015
1000090,,1969-12-31 00:00:00 +0000,CRAWFORD COUNTY CBOC (420),420,ZZ CRAWFORD COUNTY CBOC,1009121,1006840,,,,,1009342,,,,,,,,,,,1009197,0,,,,,0,2004-06-04 13:18:48 +0000,2007-05-07 10:18:36 +0000,Initial Load,Cleanup For Inactive Rows
CSV
end
it 'populates institutions with the relevant attributes' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
expect do
described_class.new.perform
end.to change(StdInstitutionFacility, :count).by(2)
san_antonio_facility = StdInstitutionFacility.find_by(station_number: '671')
expect(san_antonio_facility.name).to eq 'AUDIE L. MURPHY MEMORIAL HOSP'
expect(san_antonio_facility.deactivation_date).to be_nil
deacrivated_crawford_facility = StdInstitutionFacility.find_by(station_number: '420')
expect(deacrivated_crawford_facility.name).to eq 'CRAWFORD COUNTY CBOC (420)'
expect(deacrivated_crawford_facility.deactivation_date).to eq Date.new(1969, 12, 31)
end
it 'logs newly created facilities' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
expect(Rails.logger).to receive(:info).with('[HCA] - Job started with 0 existing facilities.')
expect(Rails.logger).to receive(:info).with('[HCA] - 2 new institutions: [1000250, 1000090]')
expect(Rails.logger).to receive(:info).with('[HCA] - Job ended with 2 existing facilities.')
expect do
described_class.new.perform
end.to change(StdInstitutionFacility, :count).by(2)
end
it 'logs when receiving preexisting facilities' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
expect(Rails.logger).to receive(:info).with('[HCA] - Job started with 0 existing facilities.')
expect(Rails.logger).to receive(:info).with('[HCA] - 2 new institutions: [1000250, 1000090]')
expect(Rails.logger).to receive(:info).with('[HCA] - Job started with 2 existing facilities.')
expect(Rails.logger).to receive(:info).with('[HCA] - Job ended with 2 existing facilities.').twice
expect do
described_class.new.perform
end.to change(StdInstitutionFacility, :count).by(2)
expect do
described_class.new.perform
end.not_to change(StdInstitutionFacility, :count)
end
end
context 'maximum record' do
it 'sets the attributes correctly' do
csv_data = <<~CSV
ID,ACTIVATIONDATE,DEACTIVATIONDATE,NAME,STATIONNUMBER,VISTANAME,AGENCY_ID,STREETCOUNTRY_ID,STREETADDRESSLINE1,STREETADDRESSLINE2,STREETADDRESSLINE3,STREETCITY,STREETSTATE_ID,STREETCOUNTY_ID,STREETPOSTALCODE,MAILINGCOUNTRY_ID,MAILINGADDRESSLINE1,MAILINGADDRESSLINE2,MAILINGADDRESSLINE3,MAILINGCITY,MAILINGSTATE_ID,MAILINGCOUNTY_ID,MAILINGPOSTALCODE,FACILITYTYPE_ID,MFN_ZEG_RECIPIENT,PARENT_ID,REALIGNEDFROM_ID,REALIGNEDTO_ID,VISN_ID,VERSION,CREATED,UPDATED,CREATEDBY,UPDATEDBY
1001304,2001-05-21 00:00:00 +0000,2015-06-30 00:00:00 +0000,ZZ-SENECA CLINIC,589GT,ZZ-SENECA CLINIC,1009121,1006840,1600 COMMUNITY DRIVE,,,SENECA,1009320,,66538-9739,1006840,1600 COMMUNITY DRIVE,,,SENECA,1009320,,66538-9739,1009148,0,1001263,1001956,,1002215,0,2004-06-04 13:18:48 +0000,2021-04-12 14:58:11 +0000,Initial Load,DataBroker - CQ# 0998 3/02/2021
CSV
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
described_class.new.perform
facility = StdInstitutionFacility.find_by(station_number: '589GT')
expect(facility.id).to eq 1_001_304
expect(facility.activation_date).to eq Date.new(2001, 5, 21)
expect(facility.deactivation_date).to eq Date.new(2015, 6, 30)
expect(facility.name).to eq 'ZZ-SENECA CLINIC'
expect(facility.station_number).to eq '589GT'
expect(facility.vista_name).to eq 'ZZ-SENECA CLINIC'
expect(facility.agency_id).to eq 1_009_121
expect(facility.street_country_id).to eq 1_006_840
expect(facility.street_address_line1).to eq '1600 COMMUNITY DRIVE'
expect(facility.street_address_line2).to be_nil
expect(facility.street_address_line3).to be_nil
expect(facility.street_city).to eq 'SENECA'
expect(facility.street_state_id).to eq 1_009_320
expect(facility.street_county_id).to be_nil
expect(facility.street_postal_code).to eq '66538-9739'
expect(facility.mailing_country_id).to eq 1_006_840
expect(facility.mailing_address_line1).to eq '1600 COMMUNITY DRIVE'
expect(facility.mailing_address_line2).to be_nil
expect(facility.mailing_address_line3).to be_nil
expect(facility.mailing_city).to eq 'SENECA'
expect(facility.mailing_state_id).to eq 1_009_320
expect(facility.mailing_county_id).to be_nil
expect(facility.mailing_postal_code).to eq '66538-9739'
expect(facility.facility_type_id).to eq 1_009_148
expect(facility.mfn_zeg_recipient).to eq 0
expect(facility.parent_id).to eq 1_001_263
expect(facility.realigned_from_id).to eq 1_001_956
expect(facility.realigned_to_id).to be_nil
expect(facility.visn_id).to eq 1_002_215
expect(facility.version).to eq 0
expect(facility.created).to eq '2004-06-04 13:18:48 +0000'
expect(facility.updated).to eq '2021-04-12 14:58:11 +0000'
expect(facility.created_by).to eq 'Initial Load'
expect(facility.updated_by).to eq 'DataBroker - CQ# 0998 3/02/2021'
end
end
context 'when fetch_csv_data returns nil' do
it 'raises an error' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(nil)
expect do
described_class.new.perform
end.to raise_error(RuntimeError, 'Failed to fetch CSV data.')
end
end
end
context 'HealthFacilitiesImportJob' do
let(:csv_data) do
<<~CSV
ID,ACTIVATIONDATE,DEACTIVATIONDATE,NAME,STATIONNUMBER,VISTANAME,AGENCY_ID,STREETCOUNTRY_ID,STREETADDRESSLINE1,STREETADDRESSLINE2,STREETADDRESSLINE3,STREETCITY,STREETSTATE_ID,STREETCOUNTY_ID,STREETPOSTALCODE,MAILINGCOUNTRY_ID,MAILINGADDRESSLINE1,MAILINGADDRESSLINE2,MAILINGADDRESSLINE3,MAILINGCITY,MAILINGSTATE_ID,MAILINGCOUNTY_ID,MAILINGPOSTALCODE,FACILITYTYPE_ID,MFN_ZEG_RECIPIENT,PARENT_ID,REALIGNEDFROM_ID,REALIGNEDTO_ID,VISN_ID,VERSION,CREATED,UPDATED,CREATEDBY,UPDATEDBY
1001304,2001-05-21 00:00:00 +0000,2015-06-30 00:00:00 +0000,ZZ-SENECA CLINIC,589GT,ZZ-SENECA CLINIC,1009121,1006840,1600 COMMUNITY DRIVE,,,SENECA,1009320,,66538-9739,1006840,1600 COMMUNITY DRIVE,,,SENECA,1009320,,66538-9739,1009148,0,1001263,1001956,,1002215,0,2004-06-04 13:18:48 +0000,2021-04-12 14:58:11 +0000,Initial Load,DataBroker - CQ# 0998 3/02/2021
CSV
end
it 'enqueues HCA::HealthFacilitiesImportJob' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
expect(HCA::HealthFacilitiesImportJob).to receive(:perform_async)
described_class.new.perform
end
it 'runs HCA::HealthFacilitiesImportJob immediately when specified' do
allow_any_instance_of(HCA::StdInstitutionImportJob).to receive(:fetch_csv_data).and_return(csv_data)
mock_health_facilities_job = instance_double(HCA::HealthFacilitiesImportJob)
expect(HCA::HealthFacilitiesImportJob).to receive(:new).and_return(mock_health_facilities_job)
expect(mock_health_facilities_job).to receive(:perform)
described_class.new.import_facilities(run_sync: true)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/identity/cerner_provisioner_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Identity::CernerProvisionerJob, type: :job do
subject(:job) { described_class.new }
let(:icn) { '123456789' }
let(:source) { :some_source }
let(:cerner_provisioner) { instance_double(Identity::CernerProvisioner) }
before do
allow(Identity::CernerProvisioner).to receive(:new).and_return(cerner_provisioner)
allow(cerner_provisioner).to receive(:perform)
end
it 'is unique for 5 minutes' do
expect(described_class.sidekiq_options['unique_for']).to eq(5.minutes)
end
it 'does not retry' do
expect(described_class.sidekiq_options['retry']).to be(false)
end
describe '#perform' do
it 'calls the CernerProvisioner service class' do
expect(cerner_provisioner).to receive(:perform)
job.perform(icn, source)
end
context 'when an error occurs' do
let(:error_message) { 'Some error occurred' }
before do
allow(cerner_provisioner).to receive(:perform).and_raise(Identity::Errors::CernerProvisionerError,
error_message)
allow(Rails.logger).to receive(:error)
end
it 'logs the error message' do
expect(Rails.logger).to receive(:error).with('[Identity] [CernerProvisionerJob] error',
{ icn:, error_message:, source: })
job.perform(icn, source)
end
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/identity/get_ssoe_traits_by_cspid_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'sidekiq/attr_package'
require 'ssoe/service'
require 'ssoe/models/user'
require 'ssoe/models/address'
# rubocop:disable RSpec/SpecFilePathFormat
RSpec.describe Identity::GetSSOeTraitsByCspidJob, type: :job do
let(:job) { described_class.new }
let(:cache_key) { SecureRandom.hex(32) }
let(:valid_attrs) do
{
first_name: 'Jane',
last_name: 'Doe',
birth_date: '1980-01-01',
ssn: '123456789',
email: 'jane.doe@example.com',
phone: '5551234567',
street1: '123 Main St',
city: 'Anytown',
state: 'CA',
zipcode: '90210'
}
end
let(:credential_method) { 'idme' }
let(:credential_id) { 'abc-123' }
let(:icn) { '1234567890V123456' }
before do
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(valid_attrs)
allow(Sidekiq::AttrPackage).to receive(:delete).with(cache_key)
allow(StatsD).to receive(:increment)
end
shared_examples 'service call failure' do |should_raise: true|
it 'logs failure and increments failure metric' do
expect(StatsD).to receive(:increment).with(
'worker.get_ssoe_traits_by_cspid.failure',
tags: ["credential_method:#{credential_method}"]
)
expect(Rails.logger).to receive(:error).with(
/\[GetSSOeTraitsByCspidJob\] .*/,
hash_including(credential_method:, credential_id:)
)
if should_raise
expect do
job.perform(cache_key, credential_method, credential_id)
end.to raise_error(StandardError)
else
expect do
job.perform(cache_key, credential_method, credential_id)
end.not_to raise_error
end
end
end
context 'when service call is successful' do
let(:icn) { '1234567890V123456' }
before do
allow_any_instance_of(SSOe::Service).to receive(:get_traits).and_return({ success: true, icn: })
end
it 'logs success and increments success metric' do
expect(Rails.logger).to receive(:info).with(
'[GetSSOeTraitsByCspidJob] SSOe::Service.get_traits success',
hash_including(icn:, credential_method:, credential_id:)
)
expect(StatsD).to receive(:increment).with('worker.get_ssoe_traits_by_cspid.success',
tags: ["credential_method:#{credential_method}"])
expect(Sidekiq::AttrPackage).to receive(:delete).with(cache_key)
job.perform(cache_key, credential_method, credential_id)
end
end
context 'when service call fails' do
before do
allow_any_instance_of(SSOe::Service).to receive(:get_traits).and_raise(
SSOe::Errors::RequestError, 'Something went wrong'
)
end
context 'when attributes are missing from Redis' do
before do
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(nil)
end
it 'logs failure and returns early without raising' do
expect(Rails.logger).to receive(:error).with(
'[GetSSOeTraitsByCspidJob] Missing attributes in Redis for key',
hash_including(credential_method:, credential_id:)
)
expect(StatsD).to receive(:increment).with('worker.get_ssoe_traits_by_cspid.failure',
tags: ["credential_method:#{credential_method}"])
expect(Sidekiq::AttrPackage).not_to receive(:delete)
job.perform(cache_key, credential_method, credential_id)
end
end
context 'when user is invalid' do
before do
invalid_attrs = valid_attrs.merge(first_name: nil)
allow(Sidekiq::AttrPackage).to receive(:find).with(cache_key).and_return(invalid_attrs)
end
it 'logs validation failure and returns early' do
expect(Rails.logger).to receive(:error).with(
/Invalid user attributes/,
hash_including(credential_method:, credential_id:)
)
expect(StatsD).to receive(:increment).with('worker.get_ssoe_traits_by_cspid.failure',
tags: ["credential_method:#{credential_method}"])
expect(Sidekiq::AttrPackage).not_to receive(:delete)
job.perform(cache_key, credential_method, credential_id)
end
it_behaves_like 'service call failure', should_raise: false
end
context 'when an unhandled exception occurs' do
before do
allow_any_instance_of(SSOe::Service).to receive(:get_traits).and_raise(SSOe::Errors::Error, 'Unexpected crash')
end
it 'logs and re-raises the exception' do
expect(Rails.logger).to receive(:error).with(
/\[GetSSOeTraitsByCspidJob\] .* error: SSOe::Errors::Error - Unexpected crash/,
hash_including(credential_method:, credential_id:)
)
expect(StatsD).to receive(:increment).with('worker.get_ssoe_traits_by_cspid.failure',
tags: ["credential_method:#{credential_method}"])
expect { job.perform(cache_key, credential_method, credential_id) }
.to raise_error(SSOe::Errors::Error, /Unexpected crash/)
end
it_behaves_like 'service call failure'
end
it 'logs failure, increments metric, does not delete cache, and raises' do
expect(Rails.logger).to receive(:error).with(
/\[GetSSOeTraitsByCspidJob\] .* error: SSOe::Errors::RequestError - Something went wrong/,
hash_including(credential_method:, credential_id:)
)
expect(StatsD).to receive(:increment).with('worker.get_ssoe_traits_by_cspid.failure',
tags: ["credential_method:#{credential_method}"])
expect(Sidekiq::AttrPackage).not_to receive(:delete)
expect { job.perform(cache_key, credential_method, credential_id) }
.to raise_error(SSOe::Errors::RequestError, /Something went wrong/)
end
end
end
# rubocop:enable RSpec/SpecFilePathFormat
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/identity/user_acceptable_verified_credential_totals_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Identity::UserAcceptableVerifiedCredentialTotalsJob do
shared_examples 'a StatsD gauge call' do |_params|
it 'calls gauge with expected keys and counts' do
allow(StatsD).to receive(:gauge)
subject.perform
expect(StatsD).to have_received(:gauge).with(expected_statsd_key, expected_statsd_count).exactly(1).time
end
end
shared_context 'when there are avc' do
let(:expected_scope) { :with_avc }
it_behaves_like 'a StatsD gauge call'
end
shared_context 'when there are ivc' do
let(:expected_scope) { :with_ivc }
it_behaves_like 'a StatsD gauge call'
end
shared_context 'when there are no avc' do
let(:expected_scope) { :without_avc }
it_behaves_like 'a StatsD gauge call'
end
shared_context 'when there are no ivc' do
let(:expected_scope) { :without_ivc }
it_behaves_like 'a StatsD gauge call'
end
shared_context 'when there are no avc and ivc' do
let(:expected_scope) { :without_avc_ivc }
it_behaves_like 'a StatsD gauge call'
end
let!(:user_avcs) do
create_list(:user_acceptable_verified_credential,
expected_count,
:"#{expected_provider}_verified_account",
:"#{expected_scope}")
end
let(:expected_provider) { nil }
let(:expected_scope) { nil }
let(:expected_statsd_key) { "worker.user_avc_totals.#{expected_provider}.#{expected_scope}.total" }
let(:expected_statsd_count) { expected_count }
let(:expected_count) { UserAcceptableVerifiedCredential.all.count }
describe '#perform' do
subject { described_class.new }
context 'idme verified accounts' do
let(:expected_provider) { :idme }
include_context 'when there are avc'
include_context 'when there are ivc'
include_context 'when there are no avc'
include_context 'when there are no ivc'
include_context 'when there are no avc and ivc'
end
context 'logingov verified accounts' do
let(:expected_provider) { :logingov }
include_context 'when there are avc'
include_context 'when there are ivc'
include_context 'when there are no avc'
include_context 'when there are no ivc'
include_context 'when there are no avc and ivc'
end
context 'dslogon verified accounts' do
let(:expected_provider) { :dslogon }
include_context 'when there are avc'
include_context 'when there are ivc'
include_context 'when there are no avc'
include_context 'when there are no ivc'
include_context 'when there are no avc and ivc'
end
context 'mhv verified accounts' do
let(:expected_provider) { :mhv }
include_context 'when there are avc'
include_context 'when there are ivc'
include_context 'when there are no avc'
include_context 'when there are no ivc'
include_context 'when there are no avc and ivc'
end
context 'combined mhv and dslogon accounts' do
let(:expected_provider) { :mhv }
let!(:dslogon_avcs) do
create_list(:user_acceptable_verified_credential,
expected_count,
:dslogon_verified_account,
:"#{expected_scope}")
end
let(:expected_statsd_key) { "worker.user_avc_totals.mhv_dslogon.#{expected_scope}.total" }
let(:expected_statsd_count) { UserAcceptableVerifiedCredential.all.count }
include_context 'when there are avc'
include_context 'when there are ivc'
include_context 'when there are no avc'
include_context 'when there are no ivc'
include_context 'when there are no avc and ivc'
end
end
end
|
0
|
code_files/vets-api-private/spec/sidekiq
|
code_files/vets-api-private/spec/sidekiq/vre/submit1900_job_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
require 'vre/notification_email'
require 'vre/notification_callback'
require 'vre/vre_monitor'
describe VRE::Submit1900Job do
let(:user_struct) do
OpenStruct.new(
edipi: '1007697216',
birls_id: '796043735',
participant_id: '600061742',
pid: '600061742',
birth_date: '1986-05-06T00:00:00+00:00'.to_date,
ssn: '796043735',
vet360_id: '1781151',
loa3?: true,
icn: '1013032368V065534',
uuid: 'b2fab2b5-6af0-45e1-a9e2-394347af91ef',
va_profile_email: 'test@test.com'
)
end
let(:encrypted_user) { KmsEncrypted::Box.new.encrypt(user_struct.to_h.to_json) }
let(:user) { OpenStruct.new(JSON.parse(KmsEncrypted::Box.new.decrypt(encrypted_user))) }
let(:monitor) { double('monitor') }
let(:exhaustion_msg) do
{ 'args' => [], 'class' => 'VRE::Submit1900Job', 'error_message' => 'An error occurred',
'queue' => 'default' }
end
let(:claim) { create(:veteran_readiness_employment_claim) }
describe '#perform' do
subject { described_class.new.perform(claim.id, encrypted_user) }
before do
allow(SavedClaim::VeteranReadinessEmploymentClaim).to receive(:find).and_return(claim)
end
after do
subject
end
it 'calls claim.add_claimant_info' do
allow(claim).to receive(:send_to_lighthouse!)
allow(claim).to receive(:send_to_res)
expect(claim).to receive(:add_claimant_info).with(user)
end
it 'calls claim.send_to_vre' do
expect(claim).to receive(:send_to_vre).with(user)
end
end
describe 'queue exhaustion' do
before do
allow(SavedClaim::VeteranReadinessEmploymentClaim).to receive(:find).and_return(claim)
end
describe 'with feature toggle vre_use_new_vfs_notification_library enabled' do
before do
allow(Flipper).to receive(:enabled?)
.with(:vre_use_new_vfs_notification_library).and_return(true)
end
it 'sends a failure email when retries are exhausted' do
notification_email = double('notification_email')
expect(VRE::NotificationEmail).to receive(:new).with(claim.id).and_return(notification_email)
expect(notification_email).to receive(:deliver).with(SavedClaim::VeteranReadinessEmploymentClaim::ERROR_EMAIL_TEMPLATE)
VRE::Submit1900Job.within_sidekiq_retries_exhausted_block({ 'args' => [claim.id, encrypted_user] }) do
exhaustion_msg['args'] = [claim.id, encrypted_user]
end
end
end
describe 'with feature toggle disabled' do
before do
allow(Flipper).to receive(:enabled?)
.with(:vre_use_new_vfs_notification_library).and_return(false)
allow(VRE::Monitor).to receive(:new).and_return(monitor)
allow(monitor).to receive :track_submission_exhaustion
end
it 'sends a failure email' do
VRE::Submit1900Job.within_sidekiq_retries_exhausted_block({ 'args' => [claim.id, encrypted_user] }) do
exhaustion_msg['args'] = [claim.id, encrypted_user]
expect(monitor).to receive(:track_submission_exhaustion).with(exhaustion_msg, claim.email)
expect(VANotify::EmailJob).to receive(:perform_async).with(
'email@test.com',
'form1900_action_needed_email_template_id',
{
'first_name' => 'First',
'date_submitted' => Time.zone.today.strftime('%B %d, %Y'),
'confirmation_number' => claim.confirmation_number
}
)
end
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.