diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 38896fc21..1eacd7b52 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -40,7 +40,7 @@ jobs: run: bundle exec rake db:migrate db:test:prepare - name: Run rspec - run: bundle exec rake + run: bundle exec rake spec - name: Upload coverage results uses: actions/upload-artifact@v2 diff --git a/Rakefile b/Rakefile index 41f4f9cd1..6dc616de5 100644 --- a/Rakefile +++ b/Rakefile @@ -25,18 +25,18 @@ require 'bundler/gem_tasks' require 'solr_wrapper/rake_task' unless Rails.env.production? +require 'rubocop/rake_task' + +RuboCop::RakeTask.new(:rubocop) do |t| + t.options = ['--display-cop-names', '--ignore-parent-exclusion', '-a'] +end + begin require 'rspec/core/rake_task' RSpec::Core::RakeTask.new(:spec) - task default: :spec + task default: [:rubocop, :spec] rescue LoadError # rubocop:disable Lint/HandleExceptions # no rspec available end - -require 'rubocop/rake_task' - -RuboCop::RakeTask.new(:rubocop) do |t| - t.options = ['--display-cop-names'] -end diff --git a/app/assets/javascripts/bulkrax/datatables.js b/app/assets/javascripts/bulkrax/datatables.js new file mode 100644 index 000000000..eabd2f716 --- /dev/null +++ b/app/assets/javascripts/bulkrax/datatables.js @@ -0,0 +1,139 @@ +Blacklight.onLoad(function() { + if($('#importer-show-table').length) { + $('#importer-show-table').DataTable( { + 'processing': true, + 'serverSide': true, + "ajax": window.location.href.replace(/(\/(importers|exporters)\/\d+)/, "$1/entry_table.json"), + "pageLength": 30, + "lengthMenu": [[30, 100, 200], [30, 100, 200]], + "columns": [ + { "data": "identifier" }, + { "data": "id" }, + { "data": "status_message" }, + { "data": "type" }, + { "data": "updated_at" }, + { "data": "errors", "orderable": false }, + { "data": "actions", "orderable": false } + ], + initComplete: function () { + // Add entry class filter + entrySelect.bind(this)() + // Add status filter + statusSelect.bind(this)() + // Add refresh link + refreshLink.bind(this)() + } + } ); + } + + if($('#importers-table').length) { + $('#importers-table').DataTable( { + 'processing': true, + 'serverSide': true, + "ajax": window.location.href.replace(/(\/importers)/, "$1/importer_table.json"), + "pageLength": 30, + "lengthMenu": [[30, 100, 200], [30, 100, 200]], + "columns": [ + { "data": "name" }, + { "data": "status_message" }, + { "data": "last_imported_at" }, + { "data": "next_import_at" }, + { "data": "enqueued_records", "orderable": false }, + { "data": "processed_records", "orderable": false }, + { "data": "failed_records", "orderable": false }, + { "data": "deleted_records", "orderable": false }, + { "data": "total_collection_entries", "orderable": false }, + { "data": "total_work_entries", "orderable": false }, + { "data": "total_file_set_entries", "orderable": false }, + { "data": "actions", "orderable": false } + ], + initComplete: function () { + // Add status filter + statusSelect.bind(this)() + // Add refresh link + refreshLink.bind(this)() + } + } ); + } + + if($('#exporters-table').length) { + $('#exporters-table').DataTable( { + 'processing': true, + 'serverSide': true, + "ajax": window.location.href.replace(/(\/exporters)/, "$1/exporter_table.json"), + "pageLength": 30, + "lengthMenu": [[30, 100, 200], [30, 100, 200]], + "columns": [ + { "data": "name" }, + { "data": "status_message" }, + { "data": "created_at" }, + { "data": "download" }, + { "data": "actions", "orderable": false } + ], + initComplete: function () { + // Add status filter + statusSelect.bind(this)() + // Add refresh link + refreshLink.bind(this)() + } + } ); + } + +}) + +function entrySelect() { + let entrySelect = document.createElement('select') + entrySelect.id = 'entry-filter' + entrySelect.classList.value = 'form-control input-sm' + entrySelect.style.marginRight = '10px' + + entrySelect.add(new Option('Filter by Entry Class', '')) + // Read the options from the footer and add them to the entrySelect + $('#importer-entry-classes').text().split('|').forEach(function (col, i) { + entrySelect.add(new Option(col.trim())) + }) + document.querySelector('div#importer-show-table_filter').firstChild.prepend(entrySelect) + + // Apply listener for user change in value + entrySelect.addEventListener('change', function () { + var val = entrySelect.value; + this.api() + .search(val ? val : '', false, false) + .draw(); + }.bind(this)); +} + +function statusSelect() { + let statusSelect = document.createElement('select'); + statusSelect.id = 'status-filter' + statusSelect.classList.value = 'form-control input-sm' + statusSelect.style.marginRight = '10px' + + statusSelect.add(new Option('Filter by Status', '')); + statusSelect.add(new Option('Complete')) + statusSelect.add(new Option('Pending')) + statusSelect.add(new Option('Failed')) + statusSelect.add(new Option('Skipped')) + statusSelect.add(new Option('Deleted')) + statusSelect.add(new Option('Complete (with failures)')) + + document.querySelector('div.dataTables_filter').firstChild.prepend(statusSelect) + + // Apply listener for user change in value + statusSelect.addEventListener('change', function () { + var val = statusSelect.value; + this.api() + .search(val ? val : '', false, false) + .draw(); + }.bind(this)); +} + +function refreshLink() { + let refreshLink = document.createElement('a'); + refreshLink.onclick = function() { + this.api().ajax.reload(null, false) + }.bind(this) + refreshLink.classList.value = 'glyphicon glyphicon-refresh' + refreshLink.style.marginLeft = '10px' + document.querySelector('div.dataTables_filter').firstChild.append(refreshLink) +} diff --git a/app/assets/javascripts/bulkrax/exporters.js b/app/assets/javascripts/bulkrax/exporters.js index 67944d41a..eaf90b26a 100644 --- a/app/assets/javascripts/bulkrax/exporters.js +++ b/app/assets/javascripts/bulkrax/exporters.js @@ -26,14 +26,14 @@ function removeRequired(allSources) { // hide all export_source function hide(allSources) { - allSources.addClass('d-none'); - allSources.find('#exporter_export_source').addClass('.d-none').attr('type', 'd-none'); + allSources.addClass('d-none hidden'); + allSources.find('#exporter_export_source').addClass('.d-none hidden').attr('type', 'd-none hidden'); } // unhide selected export_source function unhideSelected(selectedSource) { - selectedSource.removeClass('d-none').removeAttr('type'); - selectedSource.parent().removeClass('d-none').removeAttr('type'); + selectedSource.removeClass('d-none hidden').removeAttr('type'); + selectedSource.parent().removeClass('d-none hidden').removeAttr('type'); }; // add the autocomplete javascript diff --git a/app/assets/javascripts/bulkrax/importers.js.erb b/app/assets/javascripts/bulkrax/importers.js.erb index 87c0f7b1d..e254c60e2 100644 --- a/app/assets/javascripts/bulkrax/importers.js.erb +++ b/app/assets/javascripts/bulkrax/importers.js.erb @@ -74,12 +74,14 @@ function handleFileToggle(file_path) { $('#file_path').hide() $('#file_upload').hide() $('#cloud').hide() + $('#existing_options').hide() $('#file_path input').attr('required', null) $('#file_upload input').attr('required', null) } else { $('#file_path').show() $('#file_upload').hide() $('#cloud').hide() + $('#existing_options').hide() $('#file_path input').attr('required', 'required') $('#file_upload input').attr('required', null) $('#importer_parser_fields_file_style_specify_a_path_on_the_server').attr('checked', true) @@ -89,6 +91,7 @@ function handleFileToggle(file_path) { $('#file_path').hide() $('#file_upload').show() $('#cloud').hide() + $('#existing_options').hide() $('#file_path input').attr('required', null) $('#file_upload input').attr('required', 'required') }) @@ -96,6 +99,7 @@ function handleFileToggle(file_path) { $('#file_path').show() $('#file_upload').hide() $('#cloud').hide() + $('#existing_options').hide() $('#file_path input').attr('required', 'required') $('#file_upload input').attr('required', null) }) @@ -103,9 +107,19 @@ function handleFileToggle(file_path) { $('#file_path').hide() $('#file_upload').hide() $('#cloud').show() + $('#existing_options').hide() $('#file_path input').attr('required', null) $('#file_upload input').attr('required', null) }) + $('#importer_parser_fields_file_style_existing_entries').click(function(e){ + $('#file_path').hide() + $('#file_upload').hide() + $('#cloud').hide() + $('#existing_options').show() + $('#file_path input').attr('required', null) + $('#file_upload input').attr('required', null) + }) + } function handleParserKlass() { @@ -189,4 +203,4 @@ function setError(selector, error) { selector.attr('disabled', true) } -$(document).on({'ready': prepBulkrax, 'turbolinks:load': prepBulkrax}) \ No newline at end of file +$(document).on({'ready': prepBulkrax, 'turbolinks:load': prepBulkrax}) diff --git a/app/assets/stylesheets/bulkrax/import_export.scss b/app/assets/stylesheets/bulkrax/import_export.scss index 1834840ac..0e182842f 100644 --- a/app/assets/stylesheets/bulkrax/import_export.scss +++ b/app/assets/stylesheets/bulkrax/import_export.scss @@ -34,4 +34,9 @@ div#s2id_exporter_export_source_collection { .bulkrax-clear-toggles { clear: both; -} \ No newline at end of file +} + +#existing_options .collection_check_boxes { + margin-left: 10px; + margin-right: 10px; +} diff --git a/app/controllers/bulkrax/entries_controller.rb b/app/controllers/bulkrax/entries_controller.rb index 4328ed0bf..ea1e4feea 100644 --- a/app/controllers/bulkrax/entries_controller.rb +++ b/app/controllers/bulkrax/entries_controller.rb @@ -17,13 +17,29 @@ def show def update @entry = Entry.find(params[:id]) - @entry.factory&.find&.destroy if params[:destroy_first] - @entry.build - @entry.save + type = case @entry.type.downcase + when /fileset/ + 'file_set' + when /collection/ + 'collection' + else + 'work' + end item = @entry.importerexporter + # do not run counters as it loads the whole parser + current_run = item.current_run(skip_counts: true) + @entry.set_status_info('Pending', current_run) + ScheduleRelationshipsJob.set(wait: 5.minutes).perform_later(importer_id: @entry.importer.id) + + if params[:destroy_first] + "Bulkrax::DeleteAndImport#{type.camelize}Job".constantize.perform_later(@entry, current_run) + else + "Bulkrax::Import#{type.camelize}Job".constantize.perform_later(@entry.id, current_run.id) + end + entry_path = item.class.to_s.include?('Importer') ? bulkrax.importer_entry_path(item.id, @entry.id) : bulkrax.exporter_entry_path(item.id, @entry.id) - redirect_back fallback_location: entry_path, notice: "Entry update ran, new status is #{@entry.status}" + redirect_back fallback_location: entry_path, notice: "Entry #{@entry.id} update has been queued" end def destroy diff --git a/app/controllers/bulkrax/exporters_controller.rb b/app/controllers/bulkrax/exporters_controller.rb index 02ab2b95f..ea4ed99b1 100644 --- a/app/controllers/bulkrax/exporters_controller.rb +++ b/app/controllers/bulkrax/exporters_controller.rb @@ -4,9 +4,10 @@ module Bulkrax class ExportersController < ApplicationController include Hyrax::ThemedLayoutController if defined?(::Hyrax) include Bulkrax::DownloadBehavior + include Bulkrax::DatatablesBehavior before_action :authenticate_user! before_action :check_permissions - before_action :set_exporter, only: [:show, :edit, :update, :destroy] + before_action :set_exporter, only: [:show, :entry_table, :edit, :update, :destroy] with_themed_layout 'dashboard' if defined?(::Hyrax) # GET /exporters @@ -17,16 +18,29 @@ def index add_exporter_breadcrumbs if defined?(::Hyrax) end + def exporter_table + @exporters = Exporter.order(table_order).page(table_page).per(table_per_page) + @exporters = @exporters.where(exporter_table_search) if exporter_table_search.present? + respond_to do |format| + format.json { render json: format_exporters(@exporters) } + end + end + # GET /exporters/1 def show if defined?(::Hyrax) add_exporter_breadcrumbs add_breadcrumb @exporter.name end + @first_entry = @exporter.entries.first + end - @work_entries = @exporter.entries.where(type: @exporter.parser.entry_class.to_s).page(params[:work_entries_page]).per(30) - @collection_entries = @exporter.entries.where(type: @exporter.parser.collection_entry_class.to_s).page(params[:collections_entries_page]).per(30) - @file_set_entries = @exporter.entries.where(type: @exporter.parser.file_set_entry_class.to_s).page(params[:file_set_entries_page]).per(30) + def entry_table + @entries = @exporter.entries.order(table_order).page(table_page).per(table_per_page) + @entries = @entries.where(entry_table_search) if entry_table_search.present? + respond_to do |format| + format.json { render json: format_entries(@entries, @exporter) } + end end # GET /exporters/new @@ -100,7 +114,7 @@ def download # Use callbacks to share common setup or constraints between actions. def set_exporter - @exporter = Exporter.find(params[:id]) + @exporter = Exporter.find(params[:id] || params[:exporter_id]) end # Only allow a trusted parameters through. diff --git a/app/controllers/bulkrax/importers_controller.rb b/app/controllers/bulkrax/importers_controller.rb index 8248975cd..7fcbeff04 100644 --- a/app/controllers/bulkrax/importers_controller.rb +++ b/app/controllers/bulkrax/importers_controller.rb @@ -6,26 +6,35 @@ class ImportersController < ::Bulkrax::ApplicationController include Hyrax::ThemedLayoutController if defined?(::Hyrax) include Bulkrax::DownloadBehavior include Bulkrax::API + include Bulkrax::DatatablesBehavior include Bulkrax::ValidationHelper protect_from_forgery unless: -> { api_request? } before_action :token_authenticate!, if: -> { api_request? }, only: [:create, :update, :delete] before_action :authenticate_user!, unless: -> { api_request? } before_action :check_permissions - before_action :set_importer, only: [:show, :edit, :update, :destroy] + before_action :set_importer, only: [:show, :entry_table, :edit, :update, :destroy] with_themed_layout 'dashboard' if defined?(::Hyrax) # GET /importers def index # NOTE: We're paginating this in the browser. - @importers = Importer.order(created_at: :desc).all if api_request? + @importers = Importer.order(created_at: :desc).all json_response('index') elsif defined?(::Hyrax) add_importer_breadcrumbs end end + def importer_table + @importers = Importer.order(table_order).page(table_page).per(table_per_page) + @importers = @importers.where(importer_table_search) if importer_table_search.present? + respond_to do |format| + format.json { render json: format_importers(@importers) } + end + end + # GET /importers/1 def show if api_request? @@ -34,9 +43,15 @@ def show add_importer_breadcrumbs add_breadcrumb @importer.name end - @work_entries = @importer.entries.where(type: @importer.parser.entry_class.to_s).page(params[:work_entries_page]).per(30) - @collection_entries = @importer.entries.where(type: @importer.parser.collection_entry_class.to_s).page(params[:collections_entries_page]).per(30) - @file_set_entries = @importer.entries.where(type: @importer.parser.file_set_entry_class.to_s).page(params[:file_set_entries_page]).per(30) + @first_entry = @importer.entries.first + end + + def entry_table + @entries = @importer.entries.order(table_order).page(table_page).per(table_per_page) + @entries = @entries.where(entry_table_search) if entry_table_search.present? + respond_to do |format| + format.json { render json: format_entries(@entries, @importer) } + end end # GET /importers/new @@ -210,7 +225,7 @@ def files_for_import(file, cloud_files) # Use callbacks to share common setup or constraints between actions. def set_importer - @importer = Importer.find(params[:id]) + @importer = Importer.find(params[:id] || params[:importer_id]) end def importable_params @@ -218,7 +233,7 @@ def importable_params end def importable_parser_fields - params&.[](:importer)&.[](:parser_fields)&.except(:file)&.keys + params&.[](:importer)&.[](:parser_fields)&.except(:file, :entry_statuses)&.keys&. + [{ "entry_statuses" => [] }] end # Only allow a trusted parameters through. diff --git a/app/controllers/concerns/bulkrax/datatables_behavior.rb b/app/controllers/concerns/bulkrax/datatables_behavior.rb new file mode 100644 index 000000000..d130de6df --- /dev/null +++ b/app/controllers/concerns/bulkrax/datatables_behavior.rb @@ -0,0 +1,201 @@ +# frozen_string_literal: true + +module Bulkrax + # rubocop:disable Metrics/ModuleLength + module DatatablesBehavior + extend ActiveSupport::Concern + + def table_per_page + per_page = params[:length].to_i + per_page < 1 ? 30 : per_page + end + + def order_value(column) + params['columns']&.[](column)&.[]('data') + end + + def table_order + "#{order_value(params&.[]('order')&.[]('0')&.[]('column'))} #{params&.[]('order')&.[]('0')&.[]('dir')}" if params&.[]('order')&.[]('0')&.[]('column').present? + end + + # convert offset to page number + def table_page + params[:start].blank? ? 1 : (params[:start].to_i / params[:length].to_i) + 1 + end + + def entry_table_search + return @entry_table_search if @entry_table_search + return @entry_table_search = false if params['search']&.[]('value').blank? + + table_search_value = params['search']&.[]('value')&.downcase + + ['identifier', 'id', 'status_message', 'type', 'updated_at'].map do |col| + column = Bulkrax::Entry.arel_table[col] + column = Arel::Nodes::NamedFunction.new('CAST', [column.as('text')]) + column = Arel::Nodes::NamedFunction.new('LOWER', [column]) + @entry_table_search = if @entry_table_search + @entry_table_search.or(column.matches("%#{table_search_value}%")) + else + column.matches("%#{table_search_value}%") + end + end + + @entry_table_search + end + + def importer_table_search + return @importer_table_search if @importer_table_search + return @importer_table_search = false if params['search']&.[]('value').blank? + + table_search_value = params['search']&.[]('value')&.downcase + + ['name', 'id', 'status_message', 'last_error_at', 'last_succeeded_at', 'updated_at'].map do |col| + column = Bulkrax::Importer.arel_table[col] + column = Arel::Nodes::NamedFunction.new('CAST', [column.as('text')]) + column = Arel::Nodes::NamedFunction.new('LOWER', [column]) + @importer_table_search = if @importer_table_search + @importer_table_search.or(column.matches("%#{table_search_value}%")) + else + column.matches("%#{table_search_value}%") + end + end + + @importer_table_search + end + + def exporter_table_search + return @exporter_table_search if @exporter_table_search + return @exporter_table_search = false if params['search']&.[]('value').blank? + + table_search_value = params['search']&.[]('value')&.downcase + + ['name', 'status_message', 'created_at'].map do |col| + column = Bulkrax::Exporter.arel_table[col] + column = Arel::Nodes::NamedFunction.new('CAST', [column.as('text')]) + column = Arel::Nodes::NamedFunction.new('LOWER', [column]) + @exporter_table_search = if @exporter_table_search + @exporter_table_search.or(column.matches("%#{table_search_value}%")) + else + column.matches("%#{table_search_value}%") + end + end + + @exporter_table_search + end + + def format_importers(importers) + result = importers.map do |i| + { + name: view_context.link_to(i.name, view_context.importer_path(i)), + status_message: status_message_for(i), + last_imported_at: i.last_imported_at&.strftime("%b %d, %Y"), + next_import_at: i.next_import_at&.strftime("%b %d, %Y"), + enqueued_records: i.last_run&.enqueued_records, + processed_records: i.last_run&.processed_records || 0, + failed_records: i.last_run&.failed_records || 0, + deleted_records: i.last_run&.deleted_records, + total_collection_entries: i.last_run&.total_collection_entries, + total_work_entries: i.last_run&.total_work_entries, + total_file_set_entries: i.last_run&.total_file_set_entries, + actions: importer_util_links(i) + } + end + { + data: result, + recordsTotal: Bulkrax::Importer.count, + recordsFiltered: importers.size + } + end + + def format_exporters(exporters) + result = exporters.map do |e| + { + name: view_context.link_to(e.name, view_context.exporter_path(e)), + status_message: status_message_for(e), + created_at: e.created_at, + download: download_zip(e), + actions: exporter_util_links(e) + } + end + { + data: result, + recordsTotal: Bulkrax::Exporter.count, + recordsFiltered: exporters.size + } + end + + def format_entries(entries, item) + result = entries.map do |e| + { + identifier: view_context.link_to(e.identifier, view_context.item_entry_path(item, e)), + id: e.id, + status_message: status_message_for(e), + type: e.type, + updated_at: e.updated_at, + errors: e.latest_status&.error_class&.present? ? view_context.link_to(e.latest_status.error_class, view_context.item_entry_path(item, e), title: e.latest_status.error_message) : "", + actions: entry_util_links(e, item) + } + end + { + data: result, + recordsTotal: item.entries.size, + recordsFiltered: item.entries.size + } + end + + def entry_util_links(e, item) + links = [] + links << view_context.link_to(view_context.raw(''), view_context.item_entry_path(item, e)) + links << "" if view_context.an_importer?(item) + links << view_context.link_to(view_context.raw(''), view_context.item_entry_path(item, e), method: :delete, data: { confirm: 'This will delete the entry and any work associated with it. Are you sure?' }) + links.join(" ") + end + + def status_message_for(e) + if e.status_message == "Complete" + " #{e.status_message}" + elsif e.status_message == "Pending" + " #{e.status_message}" + elsif e.status_message == "Skipped" + " #{e.status_message}" + else + " #{e.status_message}" + end + end + + def importer_util_links(i) + links = [] + links << view_context.link_to(view_context.raw(''), importer_path(i)) + links << view_context.link_to(view_context.raw(''), edit_importer_path(i)) + links << view_context.link_to(view_context.raw(''), i, method: :delete, data: { confirm: 'Are you sure?' }) + links.join(" ") + end + + def exporter_util_links(i) + links = [] + links << view_context.link_to(view_context.raw(''), exporter_path(i)) + links << view_context.link_to(view_context.raw(''), edit_exporter_path(i), data: { turbolinks: false }) + links << view_context.link_to(view_context.raw(''), i, method: :delete, data: { confirm: 'Are you sure?' }) + links.join(" ") + end + + def download_zip(e) + return unless File.exist?(e.exporter_export_zip_path) + + options_html = e.exporter_export_zip_files.flatten.map do |file_name| + "" + end.join + + form_html = "
" + form_html += "" + form_html += "\n" # add newline here to add a space between the dropdown and the download button + form_html += "" + form_html += "
" + + form_html + end + end + # rubocop:enable Metrics/ModuleLength +end diff --git a/app/jobs/bulkrax/create_relationships_job.rb b/app/jobs/bulkrax/create_relationships_job.rb index cc954947f..7ad684a79 100644 --- a/app/jobs/bulkrax/create_relationships_job.rb +++ b/app/jobs/bulkrax/create_relationships_job.rb @@ -40,7 +40,7 @@ class CreateRelationshipsJob < ApplicationJob include DynamicRecordLookup - queue_as :import + queue_as Bulkrax.config.ingest_queue_name # @param parent_identifier [String] Work/Collection ID or Bulkrax::Entry source_identifiers # @param importer_run [Bulkrax::ImporterRun] current importer run (needed to properly update counters) diff --git a/app/jobs/bulkrax/delete_and_import_collection_job.rb b/app/jobs/bulkrax/delete_and_import_collection_job.rb new file mode 100644 index 000000000..2e434fb6b --- /dev/null +++ b/app/jobs/bulkrax/delete_and_import_collection_job.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +module Bulkrax + class DeleteAndImportCollectionJob < DeleteAndImportJob + DELETE_CLASS = Bulkrax::DeleteCollectionJob + IMPORT_CLASS = Bulkrax::ImportCollectionJob + end +end diff --git a/app/jobs/bulkrax/delete_and_import_file_set_job.rb b/app/jobs/bulkrax/delete_and_import_file_set_job.rb new file mode 100644 index 000000000..8660a082e --- /dev/null +++ b/app/jobs/bulkrax/delete_and_import_file_set_job.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +module Bulkrax + class DeleteAndImportFileSetJob < DeleteAndImportJob + DELETE_CLASS = Bulkrax::DeleteFileSetJob + IMPORT_CLASS = Bulkrax::ImportFileSetJob + end +end diff --git a/app/jobs/bulkrax/delete_and_import_job.rb b/app/jobs/bulkrax/delete_and_import_job.rb new file mode 100644 index 000000000..03be3f142 --- /dev/null +++ b/app/jobs/bulkrax/delete_and_import_job.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module Bulkrax + class DeleteAndImportJob < ApplicationJob + queue_as :import + + def perform(entry, importer_run) + status = self.class::DELETE_CLASS.perform_now(entry, importer_run) + if status.status_message == "Deleted" + entry = Bulkrax::Entry.find(entry.id) # maximum reload + self.class::IMPORT_CLASS.perform_now(entry.id, importer_run.id) + end + + rescue => e + entry.set_status_info(e) + # this causes caught exception to be reraised + raise + end + end +end diff --git a/app/jobs/bulkrax/delete_and_import_work_job.rb b/app/jobs/bulkrax/delete_and_import_work_job.rb new file mode 100644 index 000000000..318982cf3 --- /dev/null +++ b/app/jobs/bulkrax/delete_and_import_work_job.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +module Bulkrax + class DeleteAndImportWorkJob < DeleteAndImportJob + DELETE_CLASS = Bulkrax::DeleteWorkJob + IMPORT_CLASS = Bulkrax::ImportWorkJob + end +end diff --git a/app/jobs/bulkrax/delete_job.rb b/app/jobs/bulkrax/delete_job.rb index f1c389fc7..6f213634c 100644 --- a/app/jobs/bulkrax/delete_job.rb +++ b/app/jobs/bulkrax/delete_job.rb @@ -2,7 +2,7 @@ module Bulkrax class DeleteJob < ApplicationJob - queue_as :import + queue_as Bulkrax.config.ingest_queue_name def perform(entry, importer_run) user = importer_run.importer.user @@ -16,6 +16,10 @@ def perform(entry, importer_run) entry.importer.current_run = ImporterRun.find(importer_run.id) entry.importer.record_status entry.set_status_info("Deleted", ImporterRun.find(importer_run.id)) + rescue => e + entry.set_status_info(e) + # this causes caught exception to be reraised + raise end end end diff --git a/app/jobs/bulkrax/download_cloud_file_job.rb b/app/jobs/bulkrax/download_cloud_file_job.rb index f56e81285..313c2f010 100644 --- a/app/jobs/bulkrax/download_cloud_file_job.rb +++ b/app/jobs/bulkrax/download_cloud_file_job.rb @@ -2,7 +2,7 @@ module Bulkrax class DownloadCloudFileJob < ApplicationJob - queue_as :import + queue_as Bulkrax.config.ingest_queue_name # Retrieve cloud file and write to the imports directory # Note: if using the file system, the mounted directory in diff --git a/app/jobs/bulkrax/import_collection_job.rb b/app/jobs/bulkrax/import_collection_job.rb index 03405180c..8bbdfc430 100644 --- a/app/jobs/bulkrax/import_collection_job.rb +++ b/app/jobs/bulkrax/import_collection_job.rb @@ -2,7 +2,7 @@ module Bulkrax class ImportCollectionJob < ApplicationJob - queue_as :import + queue_as Bulkrax.config.ingest_queue_name # rubocop:disable Rails/SkipsModelValidations def perform(*args) diff --git a/app/jobs/bulkrax/import_file_set_job.rb b/app/jobs/bulkrax/import_file_set_job.rb index 07fc6a388..b29c57bbb 100644 --- a/app/jobs/bulkrax/import_file_set_job.rb +++ b/app/jobs/bulkrax/import_file_set_job.rb @@ -6,7 +6,7 @@ class MissingParentError < ::StandardError; end class ImportFileSetJob < ApplicationJob include DynamicRecordLookup - queue_as :import + queue_as Bulkrax.config.ingest_queue_name attr_reader :importer_run_id diff --git a/app/jobs/bulkrax/import_job.rb b/app/jobs/bulkrax/import_job.rb new file mode 100644 index 000000000..b8ff2d5dd --- /dev/null +++ b/app/jobs/bulkrax/import_job.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module Bulkrax + class ImportJob < ApplicationJob + queue_as :import + end +end diff --git a/app/jobs/bulkrax/import_work_job.rb b/app/jobs/bulkrax/import_work_job.rb index a3a22ea86..8374aca2a 100644 --- a/app/jobs/bulkrax/import_work_job.rb +++ b/app/jobs/bulkrax/import_work_job.rb @@ -2,7 +2,7 @@ module Bulkrax class ImportWorkJob < ApplicationJob - queue_as :import + queue_as Bulkrax.config.ingest_queue_name # rubocop:disable Rails/SkipsModelValidations # diff --git a/app/jobs/bulkrax/importer_job.rb b/app/jobs/bulkrax/importer_job.rb index 42691b4b1..9fb0f4456 100644 --- a/app/jobs/bulkrax/importer_job.rb +++ b/app/jobs/bulkrax/importer_job.rb @@ -2,7 +2,7 @@ module Bulkrax class ImporterJob < ApplicationJob - queue_as :import + queue_as Bulkrax.config.ingest_queue_name def perform(importer_id, only_updates_since_last_import = false) importer = Importer.find(importer_id) diff --git a/app/models/bulkrax/csv_entry.rb b/app/models/bulkrax/csv_entry.rb index 602a63237..903b016c2 100644 --- a/app/models/bulkrax/csv_entry.rb +++ b/app/models/bulkrax/csv_entry.rb @@ -16,11 +16,12 @@ def self.fields_from_data(data) class_attribute(:csv_read_data_options, default: {}) # there's a risk that this reads the whole file into memory and could cause a memory leak + # we strip any special characters out of the headers. looking at you Excel def self.read_data(path) raise StandardError, 'CSV path empty' if path.blank? options = { headers: true, - header_converters: ->(h) { h.to_s.strip.to_sym }, + header_converters: ->(h) { h.to_s.gsub(/[^\w\d\. -]+/, '').strip.to_sym }, encoding: 'utf-8' }.merge(csv_read_data_options) diff --git a/app/models/bulkrax/exporter.rb b/app/models/bulkrax/exporter.rb index 42b62c13e..de054a593 100644 --- a/app/models/bulkrax/exporter.rb +++ b/app/models/bulkrax/exporter.rb @@ -23,6 +23,10 @@ def export set_status_info(e) end + def remove_and_rerun + self.parser_fields['remove_and_rerun'] + end + # #export_source accessors # Used in form to prevent it from getting confused as to which value to populate #export_source with. # Also, used to display the correct selected value when rendering edit form. @@ -102,9 +106,12 @@ def importers_list Importer.all.map { |i| [i.name, i.id] } end - def current_run + def current_run(skip_counts: false) + @current_run ||= self.exporter_runs.create! if skip_counts + return @current_run if @current_run + total = self.limit || parser.total - @current_run ||= self.exporter_runs.create!(total_work_entries: total, enqueued_records: total) + @current_run = self.exporter_runs.create!(total_work_entries: total, enqueued_records: total) end def last_run diff --git a/app/models/bulkrax/importer.rb b/app/models/bulkrax/importer.rb index c577638ba..75474eba7 100644 --- a/app/models/bulkrax/importer.rb +++ b/app/models/bulkrax/importer.rb @@ -103,11 +103,12 @@ def schedulable? frequency.to_seconds != 0 end - def current_run + def current_run(skip_counts: false) return @current_run if @current_run.present? @current_run = self.importer_runs.create! return @current_run if file? && zip? + return @current_run if skip_counts entry_counts = { total_work_entries: self.limit || parser.works_total, @@ -123,16 +124,16 @@ def last_run @last_run ||= self.importer_runs.last end + def failed_entries? + entries.failed.any? + end + def failed_statuses @failed_statuses ||= Bulkrax::Status.latest_by_statusable .includes(:statusable) .where('bulkrax_statuses.statusable_id IN (?) AND bulkrax_statuses.statusable_type = ? AND status_message = ?', self.entries.pluck(:id), 'Bulkrax::Entry', 'Failed') end - def failed_entries - @failed_entries ||= failed_statuses.map(&:statusable) - end - def failed_messages failed_statuses.each_with_object({}) do |e, i| i[e.error_message] ||= [] @@ -146,10 +147,6 @@ def completed_statuses .where('bulkrax_statuses.statusable_id IN (?) AND bulkrax_statuses.statusable_type = ? AND status_message = ?', self.entries.pluck(:id), 'Bulkrax::Entry', 'Complete') end - def completed_entries - @completed_entries ||= completed_statuses.map(&:statusable) - end - def seen @seen ||= {} end @@ -170,6 +167,10 @@ def metadata_only? parser.parser_fields['metadata_only'] == true end + def existing_entries? + parser.parser_fields['file_style']&.match(/Existing Entries/) + end + def import_works import_objects(['work']) end @@ -192,17 +193,20 @@ def import_objects(types_array = nil) self.only_updates ||= false self.save if self.new_record? # Object needs to be saved for statuses types = types_array || DEFAULT_OBJECT_TYPES - if remove_and_rerun - self.entries.find_each do |e| - e.factory.find&.destroy! - e.destroy! - end - end - parser.create_objects(types) + existing_entries? ? parser.rebuild_entries(types) : parser.create_objects(types) + mark_unseen_as_skipped rescue StandardError => e set_status_info(e) end + # After an import any entries we did not touch are skipped. + # They are not really pending, complete for the last run, or failed + def mark_unseen_as_skipped + entries.where.not(identifier: seen.keys).find_each do |entry| + entry.set_status_info('Skipped') + end + end + # Prepend the base_url to ensure unique set identifiers # @todo - move to parser, as this is OAI specific def unique_collection_identifier(id) diff --git a/app/models/bulkrax/status.rb b/app/models/bulkrax/status.rb index fbcb5a872..86c850b0f 100644 --- a/app/models/bulkrax/status.rb +++ b/app/models/bulkrax/status.rb @@ -2,7 +2,7 @@ module Bulkrax class Status < ApplicationRecord - belongs_to :statusable, polymorphic: true + belongs_to :statusable, polymorphic: true, denormalize: { fields: %i[status_message], if: :latest? } belongs_to :runnable, polymorphic: true serialize :error_backtrace, Array @@ -21,5 +21,14 @@ def self.latest_by_statusable_subtable status_table.join(latest_status_query.as(latest_status_table.name.to_s), Arel::Nodes::InnerJoin) .on(status_table[:id].eq(latest_status_table[:latest_status_id])) end + + def latest? + # TODO: remove if statment when we stop supporting Hyrax < 4 + self.id == if Gem::Version.new(Rails::VERSION::STRING) >= Gem::Version.new('6.0.0') + self.class.where(statusable_id: self.statusable_id, statusable_type: self.statusable_type).order('id desc').pick(:id) + else + self.class.where(statusable_id: self.statusable_id, statusable_type: self.statusable_type).order('id desc').pluck(:id).first # rubocop:disable Rails/Pick + end + end end end diff --git a/app/models/concerns/bulkrax/status_info.rb b/app/models/concerns/bulkrax/status_info.rb index 5edc22388..c48dc58ea 100644 --- a/app/models/concerns/bulkrax/status_info.rb +++ b/app/models/concerns/bulkrax/status_info.rb @@ -10,6 +10,10 @@ module StatusInfo as: :statusable, class_name: "Bulkrax::Status", inverse_of: :statusable + scope :failed, -> { where(status_message: 'Failed') } + scope :complete, -> { where(status_message: 'Complete') } + scope :pending, -> { where(status_message: 'Pending') } + scope :skipped, -> { where(status_message: 'Skipped') } end def current_status @@ -25,6 +29,10 @@ def succeeded? current_status&.status_message&.match(/^Complete$/) end + def skipped? + current_status&.status_message&.match('Skipped') + end + def status current_status&.status_message || 'Pending' end diff --git a/app/parsers/bulkrax/application_parser.rb b/app/parsers/bulkrax/application_parser.rb index f6c05c1fb..a514787cb 100644 --- a/app/parsers/bulkrax/application_parser.rb +++ b/app/parsers/bulkrax/application_parser.rb @@ -14,7 +14,7 @@ class ApplicationParser # rubocop:disable Metrics/ClassLength :seen, :increment_counters, :parser_fields, :user, :keys_without_numbers, :key_without_numbers, :status, :set_status_info, :status_info, :status_at, :exporter_export_path, :exporter_export_zip_path, :importer_unzip_path, :validate_only, - :zip?, :file?, + :zip?, :file?, :remove_and_rerun, to: :importerexporter # @todo Convert to `class_attribute :parser_fiels, default: {}` @@ -47,6 +47,10 @@ def entry_class raise NotImplementedError, 'must be defined' end + def work_entry_class + entry_class + end + # @api public # @abstract Subclass and override {#collection_entry_class} to implement behavior for the parser. def collection_entry_class @@ -157,6 +161,22 @@ def visibility @visibility ||= self.parser_fields['visibility'] || 'open' end + def create_collections + create_objects(['collection']) + end + + def create_works + create_objects(['work']) + end + + def create_file_sets + create_objects(['file_set']) + end + + def create_relationships + create_objects(['relationship']) + end + # @api public # # @param types [Array] the types of objects that we'll create. @@ -166,30 +186,77 @@ def visibility # @see #create_works # @see #create_file_sets # @see #create_relationships - def create_objects(types = []) - types.each do |object_type| - send("create_#{object_type.pluralize}") + def create_objects(types_array = nil) + index = 0 + (types_array || %w[collection work file_set relationship]).each do |type| + if type.eql?('relationship') + ScheduleRelationshipsJob.set(wait: 5.minutes).perform_later(importer_id: importerexporter.id) + next + end + send(type.pluralize).each do |current_record| + next unless record_has_source_identifier(current_record, index) + break if limit_reached?(limit, index) + seen[current_record[source_identifier]] = true + create_entry_and_job(current_record, type) + increment_counters(index, "#{type}": true) + index += 1 + end + importer.record_status + end + true + rescue StandardError => e + set_status_info(e) + end + + def rebuild_entries(types_array = nil) + index = 0 + (types_array || %w[collection work file_set relationship]).each do |type| + # works are not gurneteed to have Work in the type + + importer.entries.where(rebuild_entry_query(type, parser_fields['entry_statuses'])).find_each do |e| + seen[e.identifier] = true + e.status_info('Pending', importer.current_run) + if remove_and_rerun + delay = calculate_type_delay(type) + "Bulkrax::DeleteAndImport#{type.camelize}Job".constantize.set(wait: delay).send(perform_method, e, current_run) + else + "Bulkrax::Import#{type.camelize}Job".constantize.send(perform_method, e.id, current_run.id) + end + increment_counters(index) + index += 1 + end end end - # @abstract Subclass and override {#create_collections} to implement behavior for the parser. - def create_collections - raise NotImplementedError, 'must be defined' if importer? - end + def rebuild_entry_query(type, statuses) + type_col = Bulkrax::Entry.arel_table['type'] + status_col = Bulkrax::Entry.arel_table['status_message'] - # @abstract Subclass and override {#create_works} to implement behavior for the parser. - def create_works - raise NotImplementedError, 'must be defined' if importer? + query = (type == 'work' ? type_col.not.matches(%w[collection file_set]) : type_col.matches(type.camelize)) + query.and(status_col.in(statuses)) end - # @abstract Subclass and override {#create_file_sets} to implement behavior for the parser. - def create_file_sets - raise NotImplementedError, 'must be defined' if importer? + def calculate_type_delay(type) + return 2.minutes if type == 'file_set' + return 1.minute if type == 'work' + return 0 end - # @abstract Subclass and override {#create_relationships} to implement behavior for the parser. - def create_relationships - raise NotImplementedError, 'must be defined' if importer? + def create_entry_and_job(current_record, type, identifier = nil) + identifier ||= current_record[source_identifier] + new_entry = find_or_create_entry(send("#{type}_entry_class"), + identifier, + 'Bulkrax::Importer', + current_record.to_h) + new_entry.status_info('Pending', importer.current_run) + if current_record[:delete].present? + "Bulkrax::Delete#{type.camelize}Job".constantize.send(perform_method, new_entry, current_run) + elsif current_record[:remove_and_rerun].present? || remove_and_rerun + delay = calculate_type_delay(type) + "Bulkrax::DeleteAndImport#{type.camelize}Job".constantize.set(wait: delay).send(perform_method, new_entry, current_run) + else + "Bulkrax::Import#{type.camelize}Job".constantize.send(perform_method, new_entry.id, current_run.id) + end end # Optional, define if using browse everything for file upload @@ -305,11 +372,15 @@ def new_entry(entryclass, type) end def find_or_create_entry(entryclass, identifier, type, raw_metadata = nil) - entry = entryclass.where( + # limit entry search to just this importer or exporter. Don't go moving them + entry = importerexporter.entries.where( + identifier: identifier + ).first + entry ||= entryclass.new( importerexporter_id: importerexporter.id, importerexporter_type: type, identifier: identifier - ).first_or_create! + ) entry.raw_metadata = raw_metadata # Setting parsed_metadata specifically for the id so we can find the object via the # id in a delete. This is likely to get clobbered in a regular import, which is fine. diff --git a/app/parsers/bulkrax/bagit_parser.rb b/app/parsers/bulkrax/bagit_parser.rb index 29b2f5809..1b51f302f 100644 --- a/app/parsers/bulkrax/bagit_parser.rb +++ b/app/parsers/bulkrax/bagit_parser.rb @@ -63,29 +63,6 @@ def get_data(bag, data) data end - def create_works - entry_class == CsvEntry ? super : create_rdf_works - end - - def create_rdf_works - records.each_with_index do |record, index| - next unless record_has_source_identifier(record, index) - break if limit_reached?(limit, index) - - seen[record[source_identifier]] = true - new_entry = find_or_create_entry(entry_class, record[source_identifier], 'Bulkrax::Importer', record) - if record[:delete].present? - DeleteWorkJob.send(perform_method, new_entry, current_run) - else - ImportWorkJob.send(perform_method, new_entry.id, current_run.id) - end - increment_counters(index, work: true) - end - importer.record_status - rescue StandardError => e - set_status_info(e) - end - # export methods # rubocop:disable Metrics/MethodLength, Metrics/AbcSize diff --git a/app/parsers/bulkrax/csv_parser.rb b/app/parsers/bulkrax/csv_parser.rb index f93fe32fe..c6409e8d9 100644 --- a/app/parsers/bulkrax/csv_parser.rb +++ b/app/parsers/bulkrax/csv_parser.rb @@ -113,57 +113,6 @@ def valid_import? false end - def create_collections - create_objects(['collection']) - end - - def create_works - create_objects(['work']) - end - - def create_file_sets - create_objects(['file_set']) - end - - def create_relationships - create_objects(['relationship']) - end - - def create_objects(types_array = nil) - index = 0 - (types_array || %w[collection work file_set relationship]).each do |type| - if type.eql?('relationship') - ScheduleRelationshipsJob.set(wait: 5.minutes).perform_later(importer_id: importerexporter.id) - next - end - send(type.pluralize).each do |current_record| - next unless record_has_source_identifier(current_record, index) - break if limit_reached?(limit, index) - - seen[current_record[source_identifier]] = true - create_entry_and_job(current_record, type) - increment_counters(index, "#{type}": true) - index += 1 - end - importer.record_status - end - true - rescue StandardError => e - set_status_info(e) - end - - def create_entry_and_job(current_record, type) - new_entry = find_or_create_entry(send("#{type}_entry_class"), - current_record[source_identifier], - 'Bulkrax::Importer', - current_record.to_h) - if current_record[:delete].present? - "Bulkrax::Delete#{type.camelize}Job".constantize.send(perform_method, new_entry, current_run) - else - "Bulkrax::Import#{type.camelize}Job".constantize.send(perform_method, new_entry.id, current_run.id) - end - end - def write_partial_import_file(file) import_filename = import_file_path.split('/').last partial_import_filename = "#{File.basename(import_filename, '.csv')}_corrected_entries.csv" @@ -204,7 +153,6 @@ def create_new_entries def entry_class CsvEntry end - alias work_entry_class entry_class def collection_entry_class CsvCollectionEntry diff --git a/app/parsers/bulkrax/oai_dc_parser.rb b/app/parsers/bulkrax/oai_dc_parser.rb index 4319ab3f1..03a3a663d 100644 --- a/app/parsers/bulkrax/oai_dc_parser.rb +++ b/app/parsers/bulkrax/oai_dc_parser.rb @@ -63,6 +63,12 @@ def import_fields delegate :list_sets, to: :client + def create_objects(types = []) + types.each do |object_type| + send("create_#{object_type.pluralize}") + end + end + def create_collections metadata = { visibility: 'open' @@ -86,27 +92,31 @@ def create_works results = self.records(quick: true) return if results.blank? results.full.each_with_index do |record, index| - identifier = record.send(source_identifier) - if identifier.blank? - if Bulkrax.fill_in_blank_source_identifiers.present? - identifier = Bulkrax.fill_in_blank_source_identifiers.call(self, index) - else - invalid_record("Missing #{source_identifier} for #{record.to_h}\n") - next - end - end - + identifier = record_has_source_identifier(record, index) + next unless identifier break if limit_reached?(limit, index) + seen[identifier] = true - new_entry = entry_class.where(importerexporter: self.importerexporter, identifier: identifier).first_or_create! - if record.deleted? - DeleteWorkJob.send(perform_method, new_entry, importerexporter.current_run) - else - ImportWorkJob.send(perform_method, new_entry.id, importerexporter.current_run.id) - end + create_entry_and_job(record, 'work', identifier) increment_counters(index, work: true) end importer.record_status + rescue StandardError => e + set_status_info(e) + end + + # oai records so not let us set the source identifier easily + def record_has_source_identifier(record, index) + identifier = record.send(source_identifier) + if identifier.blank? + if Bulkrax.fill_in_blank_source_identifiers.present? + identifier = Bulkrax.fill_in_blank_source_identifiers.call(self, index) + else + invalid_record("Missing #{source_identifier} for #{record.to_h}\n") + return false + end + end + identifier end def collections diff --git a/app/parsers/bulkrax/parser_export_record_set.rb b/app/parsers/bulkrax/parser_export_record_set.rb index 55abfae38..414ff4430 100644 --- a/app/parsers/bulkrax/parser_export_record_set.rb +++ b/app/parsers/bulkrax/parser_export_record_set.rb @@ -113,14 +113,14 @@ def each # # @see #file_sets def candidate_file_set_ids - @candidate_file_set_ids ||= works.flat_map { |work| work.fetch("#{Bulkrax.file_model_class.to_s.underscore}_ids_ssim", []) } + @candidate_file_set_ids ||= works.flat_map { |work| work.fetch(Bulkrax.solr_key_for_member_file_ids, []) } end # @note Specifically not memoizing this so we can merge values without changing the object. # # No sense attempting to query for more than the limit. def query_kwargs - { fl: "id,#{Bulkrax.file_model_class.to_s.underscore}_ids_ssim", method: :post, rows: row_limit } + { fl: "id,#{Bulkrax.solr_key_for_member_file_ids}", method: :post, rows: row_limit } end # If we have a limit, we need not query beyond that limit diff --git a/app/parsers/bulkrax/xml_parser.rb b/app/parsers/bulkrax/xml_parser.rb index c76d9a485..a5854e3c2 100644 --- a/app/parsers/bulkrax/xml_parser.rb +++ b/app/parsers/bulkrax/xml_parser.rb @@ -11,13 +11,29 @@ def entry_class def collection_entry_class; end # @todo not yet supported - def create_collections; end + def create_collections + raise NotImplementedError + end # @todo not yet supported def file_set_entry_class; end # @todo not yet supported - def create_file_sets; end + def create_file_sets + raise NotImplementedError + end + + def file_sets + raise NotImplementedError + end + + def collections + raise NotImplementedError + end + + def works + records + end # TODO: change to differentiate between collection and work records when adding ability to import collection metadata def works_total @@ -92,25 +108,6 @@ def good_file_type?(path) %w[.xml .xls .xsd].include?(File.extname(path)) || ::Marcel::MimeType.for(path).include?('application/xml') end - def create_works - records.each_with_index do |record, index| - next unless record_has_source_identifier(record, index) - break if !limit.nil? && index >= limit - - seen[record[source_identifier]] = true - new_entry = find_or_create_entry(entry_class, record[source_identifier], 'Bulkrax::Importer', record) - if record[:delete].present? - DeleteWorkJob.send(perform_method, new_entry, current_run) - else - ImportWorkJob.send(perform_method, new_entry.id, current_run.id) - end - increment_counters(index, work: true) - end - importer.record_status - rescue StandardError => e - set_status_info(e) - end - def total records.size end diff --git a/app/views/bulkrax/exporters/_form.html.erb b/app/views/bulkrax/exporters/_form.html.erb index df3af0b38..72477b19f 100644 --- a/app/views/bulkrax/exporters/_form.html.erb +++ b/app/views/bulkrax/exporters/_form.html.erb @@ -33,8 +33,8 @@ label: t('bulkrax.exporter.labels.importer'), required: true, prompt: 'Select from the list', - label_html: { class: 'importer export-source-option d-none' }, - input_html: { class: 'importer export-source-option d-none form-control' }, + label_html: { class: 'importer export-source-option d-none hidden' }, + input_html: { class: 'importer export-source-option d-none hidden form-control' }, collection: form.object.importers_list.sort %> <%= form.input :export_source_collection, @@ -42,9 +42,9 @@ label: t('bulkrax.exporter.labels.collection'), required: true, placeholder: @collection&.title&.first, - label_html: { class: 'collection export-source-option d-none' }, + label_html: { class: 'collection export-source-option d-none hidden' }, input_html: { - class: 'collection export-source-option d-none form-control', + class: 'collection export-source-option d-none hidden form-control', data: { 'autocomplete-url' => '/authorities/search/collections', 'autocomplete' => 'collection' @@ -56,8 +56,8 @@ label: t('bulkrax.exporter.labels.worktype'), required: true, prompt: 'Select from the list', - label_html: { class: 'worktype export-source-option d-none' }, - input_html: { class: 'worktype export-source-option d-none form-control' }, + label_html: { class: 'worktype export-source-option d-none hidden' }, + input_html: { class: 'worktype export-source-option d-none hidden form-control' }, collection: Bulkrax.curation_concerns.map { |cc| [cc.to_s, cc.to_s] } %> <%= form.input :limit, @@ -80,7 +80,7 @@ as: :boolean, label: t('bulkrax.exporter.labels.filter_by_date') %> -
+