diff --git a/app/controllers/export_controller.rb b/app/controllers/export_controller.rb new file mode 100644 index 000000000..0f004070b --- /dev/null +++ b/app/controllers/export_controller.rb @@ -0,0 +1,45 @@ +# This controller handles exporting data from the application to various formats. +class ExportController < ApplicationController + before_action :export_params + + def index + klass = params[:class].constantize + + render json: { + mandatory_fields: klass.mandatory_fields, + optional_fields: klass.optional_fields, + external_fields: klass.external_fields + }, status: :ok + rescue StandardError => e + render json: { error: e.message }, status: :unprocessable_entity + end + + def export + # Parse ordered fields from JSON, if provided + ordered_fields = + begin + JSON.parse(params[:ordered_fields]) if params[:ordered_fields] + rescue JSON::ParserError + render json: { error: "Invalid JSON for ordered_fields" }, status: :unprocessable_entity + return + end + + klass = params[:class].constantize + + csv_file = Export.perform(klass, ordered_fields) + + render json: { + message: "#{params[:class]} has been exported!", + file: csv_file + }, status: :ok + + rescue StandardError => e + render json: { error: e.message }, status: :unprocessable_entity + end + + private + + def export_params + params.permit(:class, :ordered_fields) + end +end diff --git a/app/controllers/import_controller.rb b/app/controllers/import_controller.rb new file mode 100644 index 000000000..fb7ad4357 --- /dev/null +++ b/app/controllers/import_controller.rb @@ -0,0 +1,86 @@ +# This controller handles importing CSV data into any supported model. +# It exposes two endpoints: +# • GET /import -> returns field requirements for the selected class +# • POST /import -> processes the uploaded CSV file +# +# The controller delegates actual import logic to: +# klass.try_import_records(...) +# +# Each model that supports importing must implement: +# mandatory_fields +# optional_fields +# external_fields +# try_import_records(file, ordered_fields, use_header:) +# + +class ImportController < ApplicationController + # Ensure strong parameters are processed before each action + before_action :import_params + + ## + # GET /import + # + # Returns metadata about which fields a given class requires or accepts. + # The frontend uses this to build the mapping UI (drag/drop field matching). + # + def index + imported_class = params[:class].constantize + + render json: { + mandatory_fields: imported_class.mandatory_fields, + optional_fields: imported_class.optional_fields, + external_fields: imported_class.external_fields, + + # Import does not provide duplicate-resolution strategies (those apply to export) + available_actions_on_dup: imported_class.available_actions_on_duplicate.map{|klass| klass.class.name}, + }, status: :ok + end + + ## + # POST /import + # + # This action performs the actual import process. It: + # 1. Reads the uploaded CSV file + # 2. Determines whether the CSV includes headers + # 3. Applies user-chosen field ordering (if provided) + # 4. Hands off import logic to the model via `try_import_records` + # + def import + uploaded_file = params[:csv_file] + + # Convert use_headers ("true"/"false") into actual boolean + use_headers = ActiveRecord::Type::Boolean.new.deserialize(params[:use_headers]) + + # If the user provided a custom field ordering, load it from JSON + ordered_fields = JSON.parse(params[:ordered_fields]) if params[:ordered_fields] + + # Dynamically load the model class (e.g., "User", "Team", etc.) + klass = params[:class].constantize + + # Load the chosen duplicate action (Skip, Update, Change) + dup_action = params[:dup_action].constantize + + pp dup_action + + importService = Import.new(klass: klass, file: uploaded_file, headers: ordered_fields, dup_action: dup_action.new) + result = importService.perform(use_headers) + + # If no exceptions occur, return success + render json: { message: "#{klass.name} has been imported!", **result }, status: :created + + rescue StandardError => e + # Catch any unexpected runtime errors + puts "An unexpected error occurred during import: #{e.message}" + + render json: { error: e.message }, status: :unprocessable_entity + end + + private + + ## + # Strong parameters for import operations + # + def import_params + params.permit(:csv_file, :use_headers, :class, :ordered_fields, :dup_action) + end +end diff --git a/app/helpers/importable_exportable_helper.rb b/app/helpers/importable_exportable_helper.rb new file mode 100644 index 000000000..0a1ef8e84 --- /dev/null +++ b/app/helpers/importable_exportable_helper.rb @@ -0,0 +1,409 @@ +# importable_exportable_helper.rb +# +# =============================================================== +# ExternalClass +# +# Represents a class referenced by another class during import. +# For example: +# - Importing Teams may also need to create Users or Roles +# - Importing Assignments may need to create Topics +# +# This object encodes: +# • Which class is referenced +# • Whether it should be LOOKED UP or CREATED +# • What field should be used to perform look_ups +# +# The importer uses this information to: +# • Map CSV fields to the external class +# • Attempt to find existing referenced objects +# • Create new referenced objects when required +# +# Example: +# ExternalClass.new(User, should_look_up: true, should_create: false, look_up_field: :email) +# =============================================================== +class ExternalClass + attr_accessor :ref_class, :should_look_up, :should_create + + def initialize(ref_class, should_look_up = false, should_create = true, look_up_field = nil) + @ref_class = ref_class # The class being referenced (e.g., User) + @should_look_up = should_look_up # Whether existing objects should be searched for + @should_create = should_create # Whether new objects should be created if no match found + @look_up_field = look_up_field # Column used to identify existing objects + end + + # -------------------------------------------------------------- + # Resolve what fields belong to the external class. + # + # If the class itself includes ImportableExportable, we use its + # internal_fields. Otherwise, we fall back to: + # - the look_up field, or + # - the primary key + # + # All returned fields are namespaced (role_name, user_email, etc.) + # -------------------------------------------------------------- + def fields + if @ref_class.respond_to?(:internal_fields) + @ref_class.internal_fields.map { |field| self.class.append_class_name(@ref_class, field) } + else + [self.class.append_class_name(@ref_class, @look_up_field.to_s), self.class.append_class_name(@ref_class, @ref_class.primary_key)] + end + end + + # -------------------------------------------------------------- + # look_up an external object in the database. + # + # Uses either: + # • a look_up field, or + # • the primary key + # + # It will try both the namespaced version (e.g. role_name) + # and the raw version (name) depending on what exists in the model. + # -------------------------------------------------------------- + def look_up(class_values) + class_name_look_up_field = self.class.append_class_name(@ref_class, @look_up_field.to_s) + class_name_primary_field = self.class.append_class_name(@ref_class, @ref_class.primary_key) + + value = nil + + # ---------- Try look_up field ---------- + if @look_up_field && class_values[class_name_look_up_field] + if @ref_class.attribute_method?(@look_up_field) + value = @ref_class.find_by(@look_up_field => class_values[class_name_look_up_field]) + elsif @ref_class.attribute_method?(class_name_look_up_field) + value = @ref_class.find_by(class_name_look_up_field => class_values[class_name_look_up_field]) + end + + # ---------- Try primary key ---------- + elsif class_values[class_name_primary_field] + if @ref_class.attribute_method?(@ref_class.primary_key) + value = @ref_class.find_by(@ref_class.primary_key => class_values[class_name_primary_field]) + elsif @ref_class.attribute_method?(class_name_primary_field) + value = @ref_class.find_by(class_name_primary_field => class_values[class_name_primary_field]) + end + end + + value + end + + # -------------------------------------------------------------- + # Convert CSV attributes (namespaced) into attributes that match + # the external class (un-namespaced). + # -------------------------------------------------------------- + def from_hash(attrs) + fixed = {} + attrs.each { |k, v| fixed[self.class.unappended_class_name(@ref_class, k)] = v } + @ref_class.new(fixed) + end + + # Prefix column with the class name ("role_name", "user_email") + def self.append_class_name(ref_class, field) + "#{ref_class.name.underscore}_#{field}" + end + + # Remove class name prefix + def self.unappended_class_name(ref_class, name) + name.delete_prefix("#{ref_class.name.underscore}_") + end +end + +# =============================================================== +# ImportableExportableHelper +# +# This module adds import/export metadata and behavior to models. +# +# It supports: +# • mandatory fields +# • optional fields +# • external class definitions +# • combining internal and external fields +# • row-level import logic +# +# Any model including this module becomes import/export capable. +# +# Example: +# +# class Team < ApplicationRecord +# extend ImportableExportableHelper +# mandatory_fields :name +# external_classes ExternalClass.new(User, true, true, :email) +# end +# +# =============================================================== +module ImportableExportableHelper + + # -------------------------------------------------------------- + # When extended by a class, inherit parent import settings. + # + # This allows STI or subclassed models to reuse configuration. + # -------------------------------------------------------------- + def self.extended(base) + if base.superclass.respond_to?(:mandatory_fields) + base.instance_variable_set(:@mandatory_fields, base.superclass.mandatory_fields) + base.instance_variable_set(:@external_classes, base.superclass.external_classes) + base.instance_variable_set(:@class_name, base.superclass.name) + base.instance_variable_set(:@available_actions_on_duplicate, base.superclass.available_actions_on_duplicate) + else + base.instance_variable_set(:@class_name, base.name) + end + end + + # -------------------------------------------------------------- + # Define or retrieve mandatory fields. + # These must be present in the CSV. + # -------------------------------------------------------------- + def mandatory_fields(*fields) + if fields.any? + @mandatory_fields = fields.map(&:to_s) + else + @mandatory_fields + end + end + + # -------------------------------------------------------------- + # Optional = internal fields - mandatory + # -------------------------------------------------------------- + def optional_fields + internal_fields - mandatory_fields + end + + # -------------------------------------------------------------- + # Define or retrieve external classes. + # + # Example: + # external_classes ExternalClass.new(Role, true, false, :name) + # -------------------------------------------------------------- + def external_classes(*fields) + if fields.any? + @external_classes = fields + else + @external_classes + end + end + + # -------------------------------------------------------------- + # Define or retrieve available duplicate actions. + # + # Example: + # available_actions_on_duplicate DuplicateAction, SkipRecordAction + # -------------------------------------------------------------- + def available_actions_on_duplicate(*fields) + if fields.any? + @available_actions_on_duplicate = fields + else + @available_actions_on_duplicate + end + end + + # -------------------------------------------------------------- + # INTERNAL FIELDS + # + # Internal fields come from: + # • database column names + # • mandatory_fields + # + # Then external fields are removed (to prevent duplication). + # -------------------------------------------------------------- + def internal_fields + (column_names + (mandatory_fields || [])).uniq - external_fields + end + + # -------------------------------------------------------------- + # EXTERNAL FIELDS + # + # Flatten all internal fields from all external class definitions. + # -------------------------------------------------------------- + def external_fields + fields = [] + external_classes&.each { |external_class| fields += external_class.fields } + + fields + end + + # Combined fields for full CSV mapping + def internal_and_external_fields + internal_fields + external_fields + end + + # -------------------------------------------------------------- + # Construct an object from a CSV row hash. + # + # For internal fields, the value is stored as an array during + # parsing, so we take the first element. + # -------------------------------------------------------------- + def from_hash(attrs) + cleaned = {} + attrs.each { |k, v| cleaned[k] = v[0] } + new(cleaned) + end + + # -------------------------------------------------------------- + # Export helper + # Returns a hash of internal fields → values + # -------------------------------------------------------------- + def to_hash(fields = self.class.internal_fields) + fields.to_h { |f| [f, send(f)] } + end + + + # -------------------------------------------------------------- + # MAIN IMPORT WORKFLOW + # + # Creates a temporary file with normalized headers, + # then iterates through rows, importing them one by one. + # + # Duplicate objects are collected and returned. + # -------------------------------------------------------------- + def try_import_records(file, headers, use_header) + temp_file = 'output.csv' + csv_file = CSV.read(file) + + mapping = [] + + # ---- Normalize header row ---- + CSV.open(temp_file, "w") do |csv| + if use_header + headers = csv_file.shift.map { |h| h.parameterize.underscore } + else + headers = headers.map { |header| header.parameterize.underscore } + end + + mapping = FieldMapping.from_header(self, headers) + + csv << headers + csv_file.each { |row| csv << row } + end + + temp_contents = CSV.read(temp_file) + temp_contents.shift # drop header + + duplicate_records = [] + + ActiveRecord::Base.transaction do + temp_contents.each do |row| + dup = import_row(row, mapping) + duplicate_records << dup if dup && dup != true + end + + rescue StandardError + raise ActiveRecord::Rollback + end + + File.delete(temp_file) + duplicate_records + end + + # -------------------------------------------------------------- + # Import a single row into the current model. + # + # Handles: + # • mapping values + # • building internal object + # • external object look_up/creation + # • save + duplicate capture + # + # Returns: + # • true if saved successfully + # • duplicate object if duplicate occurred + # -------------------------------------------------------------- + def import_row(row, mapping) + + # Build row_hash where each key maps to all found values + row_hash = {} + mapping.ordered_fields.zip(row).each do |key, value| + row_hash[key] ||= [] + row_hash[key] << value + end + + # Create object for this class + current_class_attrs = row_hash.slice(*internal_fields) + created_object = from_hash(current_class_attrs) + + # for each external class, try to look them up + external_classes&.each do |external_class| + look_up_external_class(row_hash, external_class, created_object) + end + + duplicate = save_object(created_object) + return duplicate if duplicate && duplicate != true + + return unless external_classes + + external_classes.each do |external_class| + create_external_class(row_hash, external_class, created_object) + end + + end + + private + + # -------------------------------------------------------------- + # Attempt to find an external object via look_up rules. + # If found, attach it to the parent object. + # -------------------------------------------------------------- + def look_up_external_class(row_hash, external_class, parent_obj) + if external_class.should_look_up && (found = external_class.look_up(row_hash)) + parent_obj.send("#{external_class.ref_class.name.downcase}=", found) + nil + end + end + + # -------------------------------------------------------------- + # When look_ups fail AND the external class allows creation, + # build and save new external objects. + # + # Handles multi-row data such as: + # field1: ["A", "B"] + # field2: ["X", "Y"] + # + # Which turns into: + # [{field1: "A", field2: "X"}, {field1: "B", field2: "Y"}] + # -------------------------------------------------------------- + def create_external_class(row_hash, external_class, parent_obj) + return unless external_class.should_create + + current_class_attrs = row_hash.slice(*external_class.fields) + + object_sets = current_class_attrs.values.transpose + object_sets_with_keys = object_sets.map do |row_values| + Hash[current_class_attrs.keys.zip(row_values)] + end + + object_sets_with_keys.each do |attrs| + created_object = external_class.from_hash(attrs) + + # Set relationship to parent + created_object.send("#{@class_name.underscore}=", parent_obj) + + save_object(created_object) + end + end + + # -------------------------------------------------------------- + # Save an object safely, detecting: + # • Validation errors + # • Uniqueness violations + # + # Returns: + # • created_object on uniqueness error (for duplicate workflow) + # • true if saved + # -------------------------------------------------------------- + def save_object(created_object) + created_object.save! + rescue ActiveRecord::RecordInvalid => e + # Check if a specific attribute has a :uniqueness error + is_taken = created_object.errors.details.any? { |attribute, error_details_array| error_details_array.any? { |detail_hash| detail_hash[:error] == :taken } } + multiple_field_errors = created_object.errors.details.size > 1 + single_field_errors = created_object.errors.details.any? { |attribute, error_details_array| error_details_array.size > 1} + + if is_taken && !multiple_field_errors && !single_field_errors + return created_object + end + + raise StandardError.new(e.message) + + + rescue ActiveRecord::RecordNotUnique => e + puts "Unique constraint violation: #{e.message}" + created_object + end +end diff --git a/app/models/Item.rb b/app/models/Item.rb index 7d8cf2ed5..7e5eab28d 100644 --- a/app/models/Item.rb +++ b/app/models/Item.rb @@ -1,6 +1,12 @@ # frozen_string_literal: true class Item < ApplicationRecord + extend ImportableExportableHelper + mandatory_fields :txt, :weight, :seq, :question_type, :break_before + external_classes ExternalClass.new(Questionnaire, true, false, :name), + ExternalClass.new(QuestionAdvice, false, true) + + before_create :set_seq belongs_to :questionnaire # each item belongs to a specific questionnaire has_many :answers, dependent: :destroy, foreign_key: 'item_id' diff --git a/app/models/multiple_choice_checkbox.rb b/app/models/multiple_choice_checkbox.rb index a3ec77e99..e31ebddfb 100644 --- a/app/models/multiple_choice_checkbox.rb +++ b/app/models/multiple_choice_checkbox.rb @@ -4,7 +4,7 @@ class MultipleChoiceCheckbox < QuizItem def edit - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) data = { id: id, @@ -24,7 +24,7 @@ def edit end def complete - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) data = { id: id, @@ -38,7 +38,7 @@ def complete end def view_completed_item(user_answer) - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) data = { question_choices: quiz_question_choices.map do |choice| diff --git a/app/models/multiple_choice_radio.rb b/app/models/multiple_choice_radio.rb index 910659074..0c5fdbfbd 100644 --- a/app/models/multiple_choice_radio.rb +++ b/app/models/multiple_choice_radio.rb @@ -4,7 +4,7 @@ class MultipleChoiceRadio < QuizItem def edit - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) choices = quiz_question_choices.map.with_index(1) do |choice, index| { @@ -24,7 +24,7 @@ def edit end def complete - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) choices = quiz_question_choices.map.with_index(1) do |choice, index| { @@ -35,14 +35,14 @@ def complete end { - question_id: id, + item_id: id, question_text: txt, choices: choices }.to_json end def view_completed_item(user_answer) - quiz_question_choices = QuizQuestionChoice.where(question_id: id) + quiz_question_choices = QuizQuestionChoice.where(item_id: id) choices = quiz_question_choices.map do |choice| { diff --git a/app/models/question_advice.rb b/app/models/question_advice.rb index 76b54c56d..65b1c7bab 100644 --- a/app/models/question_advice.rb +++ b/app/models/question_advice.rb @@ -1,6 +1,9 @@ # frozen_string_literal: true class QuestionAdvice < ApplicationRecord + extend ImportableExportableHelper + mandatory_fields :score, :advice + belongs_to :item def self.export_fields(_options) QuestionAdvice.columns.map(&:name) @@ -9,14 +12,14 @@ def self.export_fields(_options) def self.export(csv, parent_id, _options) questionnaire = Questionnaire.find(parent_id) questionnaire.items.each do |item| - QuestionAdvice.where(question_id: item.id).each do |advice| + QuestionAdvice.where(item_id: item.id).each do |advice| csv << advice.attributes.values end end end def self.to_json_by_question_id(question_id) - question_advices = QuestionAdvice.where(question_id: question_id).order(:id) + question_advices = QuestionAdvice.where(item_id: question_id).order(:id) question_advices.map do |advice| { score: advice.score, advice: advice.advice } end diff --git a/app/models/quiz_item.rb b/app/models/quiz_item.rb index 8367ca20e..60b3d9ef7 100644 --- a/app/models/quiz_item.rb +++ b/app/models/quiz_item.rb @@ -3,7 +3,8 @@ require 'json' class QuizItem < Item - has_many :quiz_question_choices, class_name: 'QuizQuestionChoice', foreign_key: 'question_id', inverse_of: false, dependent: :nullify + extend ImportableExportableHelper + has_many :quiz_question_choices, class_name: 'QuizQuestionChoice', foreign_key: 'item_id', inverse_of: false, dependent: :nullify def edit end diff --git a/app/models/role.rb b/app/models/role.rb index 3cce77975..2ae587aa6 100644 --- a/app/models/role.rb +++ b/app/models/role.rb @@ -6,11 +6,11 @@ class Role < ApplicationRecord has_many :users, dependent: :nullify # Role IDs - STUDENT_ID = 1 - TEACHING_ASSISTANT_ID = 2 + STUDENT_ID = 5 + TEACHING_ASSISTANT_ID = 4 INSTRUCTOR_ID = 3 - ADMINISTRATOR_ID = 4 - SUPER_ADMINISTRATOR_ID = 5 + ADMINISTRATOR_ID = 2 + SUPER_ADMINISTRATOR_ID = 1 def super_administrator? name['Super Administrator'] diff --git a/app/models/team.rb b/app/models/team.rb index 9c8813c08..927f5ff2a 100644 --- a/app/models/team.rb +++ b/app/models/team.rb @@ -1,6 +1,11 @@ # frozen_string_literal: true class Team < ApplicationRecord + extend ImportableExportableHelper + mandatory_fields :name, :type + external_classes ExternalClass.new(Assignment, true, false, :title), + ExternalClass.new(Course, true, false, :name), + ExternalClass.new(User, true, false, :name) # Core associations has_many :signed_up_teams, dependent: :destroy diff --git a/app/models/user.rb b/app/models/user.rb index 0e77e25dc..ef0ea7f11 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -1,9 +1,17 @@ # frozen_string_literal: true class User < ApplicationRecord + extend ImportableExportableHelper + mandatory_fields :name, :email, :password, :full_name + external_classes ExternalClass.new(Role, true, false, :name), + ExternalClass.new(Institution, true, false, :name) + available_actions_on_duplicate SkipRecordAction.new, UpdateExistingRecordAction.new, ChangeOffendingFieldAction.new + + has_secure_password after_initialize :set_defaults + # name must be lowercase and unique validates :name, presence: true, uniqueness: true, allow_blank: false # format: { with: /\A[a-z]+\z/, message: 'must be in lowercase' } @@ -34,14 +42,14 @@ class User < ApplicationRecord delegate :super_administrator?, to: :role def self.instantiate(record) - case record.role - when Role::TEACHING_ASSISTANT + case record.role.id + when Role::TEACHING_ASSISTANT_ID record.becomes(Ta) - when Role::INSTRUCTOR + when Role::INSTRUCTOR_ID record.becomes(Instructor) - when Role::ADMINISTRATOR + when Role::ADMINISTRATOR_ID record.becomes(Administrator) - when Role::SUPER_ADMINISTRATOR + when Role::SUPER_ADMINISTRATOR_ID record.becomes(SuperAdministrator) else super diff --git a/app/services/change_offending_field_action.rb b/app/services/change_offending_field_action.rb new file mode 100644 index 000000000..73b2480bb --- /dev/null +++ b/app/services/change_offending_field_action.rb @@ -0,0 +1,85 @@ +# =============================================================== +# ChangeOffendingFieldAction +# +# Strategy: **Automatically adjust the offending (unique) fields** +# so the imported row can still be inserted. +# +# This is the default strategy used by Import unless overridden. +# +# Example: +# existing.name = "Alice" +# incoming.name = "Alice" +# +# → incoming.name becomes "Alice_copy" +# +# If still not unique: +# "Alice_copy2", "Alice_copy3", etc. +# +# How it works: +# 1. Collect all fields with uniqueness validators +# 2. If incoming[field] == existing[field], mutate it +# 3. Keep incrementing until the value no longer exists in the DB +# +# =============================================================== +class ChangeOffendingFieldAction + def on_duplicate_record(klass, records) + # Normalize both existing and incoming row formats + existing = records[:existing] + incoming = records[:incoming].dup + + # Determine fields with uniqueness validators + unique_fields = unique_constraint_fields(klass) + + # For each unique field, adjust if conflict detected + unique_fields.each do |field| + next unless incoming[field] == existing[field] + + incoming[field] = generate_unique_value( + klass: klass, + field: field, + base: incoming[field] + ) + end + + incoming # Returning one resolved record + end + + private + + # Standardize input into a symbolized hash + def normalize(record) + return record.symbolize_keys if record.is_a?(Hash) + record.attributes.symbolize_keys + end + + # Extract all attributes validated as unique via ActiveRecord + def unique_constraint_fields(klass) + klass.validators + .select { |v| v.is_a?(ActiveRecord::Validations::UniquenessValidator) } + .flat_map(&:attributes) + .map(&:to_sym) + end + + # Generate a new unique value by appending suffixes until unique + # + # Example: + # base = "Alice" + # → "Alice_copy" + # → "Alice_copy2" + # → "Alice_copy3" + # + def generate_unique_value(klass:, field:, base:) + candidate = base.to_s + counter = 1 + + # Keep generating values until one does not exist in DB + while klass.exists?(field => candidate) + candidate = + "#{base}_copy#{counter == 1 ? '' : counter}" + + counter += 1 + end + + candidate + end +end diff --git a/app/services/export.rb b/app/services/export.rb new file mode 100644 index 000000000..75ceffb43 --- /dev/null +++ b/app/services/export.rb @@ -0,0 +1,61 @@ +# app/services/export.rb + +## +# Export +# +# This service provides simple, consistent export functionality for any +# array of hashes. Each hash represents one “row” of data, and the keys +# represent column names. The Export class can convert these rows +# into CSV, JSON, or XML. +# +# Example input format: +# [ +# { id: 1, name: "Team 1", members: "Alice,Bob" }, +# { id: 2, name: "Team 2", members: "Carol,Dan" } +# ] +# +# The class intentionally does NOT perform queries itself — it expects +# the controller or the caller to assemble the dataset. +# +class Export + + ## + # Convert the dataset into CSV format. + # + # This generates: + # • A header row using the keys of the first hash + # • One CSV row for each hash using its values + # + # Example output: + # id,name,members + # 1,Team 1,Alice; Bob + # 2,Team 2,Carol; Dan + # + def self.perform(export_class, ordered_headers) + mapping = FieldMapping.from_header(export_class, ordered_headers) + + CSV.generate do |csv| + class_fields = mapping.ordered_fields.select{ |ele| export_class.internal_fields.include?(ele) } + + + # Extract column headers from the first row's keys + csv << ordered_headers + + # Insert each row in order, using the values of the hash + export_class.all.each do |record| + row = class_fields.map{|f| record.send(f)} + + export_class.external_classes.each do |external_class| + ext_class_fields = mapping.ordered_fields.select{ |ele| external_class.fields.include?(ele) } + found_record = record.send(external_class.ref_class.name.underscore) + row += ext_class_fields.map do |f| + found_record.send(ExternalClass.unappended_class_name(external_class.ref_class, f)) if f + end + end + + csv << row + end + end + end + +end diff --git a/app/services/field_mapping.rb b/app/services/field_mapping.rb new file mode 100644 index 000000000..8df4a8975 --- /dev/null +++ b/app/services/field_mapping.rb @@ -0,0 +1,140 @@ +# app/services/field_mapping.rb +# +# =============================================================== +# FieldMapping +# +# This class defines how CSV fields are mapped to an internal +# ActiveRecord model’s attributes. It is used by the import/export +# service layer to: +# +# • Determine the order of fields in an exported CSV +# • Interpret CSV rows and produce attribute hashes +# • Build mappings based on CSV headers, if the import uses headers +# +# The mapping is intentionally simple: it stores an array of field +# names (strings) in the order that the import/export process should +# follow. +# +# =============================================================== +class FieldMapping + attr_reader :model_class, :ordered_fields + + # -------------------------------------------------------------- + # Initialize a new mapping. + # + # model_class: + # An ActiveRecord model class (e.g., User, Team, Assignment) + # + # ordered_fields: + # Array of field names that define the order CSV fields appear + # in. We convert everything to strings to ensure consistent + # look_ups (symbols vs strings cause unnecessary mismatches). + # + # Example: + # FieldMapping.new(User, [:email, "first_name", :last_name]) + # + # Output: + # @ordered_fields = ["email", "first_name", "last_name"] + # -------------------------------------------------------------- + def initialize(model_class, ordered_fields) + @model_class = model_class + @ordered_fields = ordered_fields.map(&:to_s) + end + + # -------------------------------------------------------------- + # Build a mapping using the header row from a CSV file. + # + # header_row: + # Array of strings taken from the first row of a CSV file: + # ["Email", "Last Name", "First Name"] + # + # How matching works: + # - Normalize headers (strip whitespace, lowercase comparison) + # - Compare headers case-insensitively against all internal + external + # fields allowed by the model. + # - Only headers that match valid model fields are kept. + # + # Example: + # model_class.internal_and_external_fields = [:email, :first_name, :last_name] + # + # headers = ["EMAIL", "First Name", "Ignored Column"] + # + # matched = ["email", "first_name"] + # + # -------------------------------------------------------------- + def self.from_header(model_class, header_row) + # Normalize header strings + header_row = header_row.map { |h| h.to_s.strip } + + # Retrieve valid model fields (convert to strings for comparison) + valid_fields = model_class.internal_and_external_fields.map(&:to_s) + + # Match CSV headers to valid model fields (case-insensitive) + matched = header_row.filter_map do |h| + valid_fields.find { |f| f.casecmp?(h) } + end + + new(model_class, matched) + end + + # -------------------------------------------------------------- + # Returns the internal CSV header row for export. + # + # This is simply the list of ordered fields. + # -------------------------------------------------------------- + def headers + ordered_fields + end + + # -------------------------------------------------------------- + # Detect duplicate headers in the mapping. + # + # Useful for import validation, e.g., if a CSV contains: + # ["name", "email", "email"] + # + # Returns: + # ["email"] + # + # -------------------------------------------------------------- + def duplicate_headers + ordered_fields + .group_by { |h| h } + .select { |_k, v| v.size > 1 } + .keys + end + + # -------------------------------------------------------------- + # Given an ActiveRecord instance, extract values in the order needed + # for CSV export. + # + # Example: + # ordered_fields = ["email", "first_name"] + # record.email → "bob@example.com" + # record.first_name → "Bob" + # + # Output: + # ["bob@example.com", "Bob"] + # + # -------------------------------------------------------------- + def values_for(record) + ordered_fields.map { |f| record.public_send(f) } + end + + # -------------------------------------------------------------- + # Convert mapping to a JSON-friendly structure. + # + # Used by APIs or import UI to remember mapping preferences. + # + # Example output: + # { + # model_class: "User", + # ordered_fields: ["email", "first_name"] + # } + # -------------------------------------------------------------- + def to_h + { + model_class: model_class.name, + ordered_fields: ordered_fields + } + end +end diff --git a/app/services/import.rb b/app/services/import.rb new file mode 100644 index 000000000..639951835 --- /dev/null +++ b/app/services/import.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +require 'csv' + +# By default, if the caller does not specify a duplicate action, +# we use ChangeOffendingFieldAction. This ensures the importer +# always has a duplicate-resolution strategy. +DEFAULT_DUPLICATE_ACTION = ChangeOffendingFieldAction.new + +## +# Import class +# +# This class handles end-to-end CSV importing for any model that includes +# the ImportableExportable mixin. Its responsibilities include: +# +# • Loading CSV data +# • Mapping CSV columns into model attributes +# • Attempting to save each record +# • Collecting duplicate rows +# • Handling duplicates through a DuplicateAction strategy object +# +# The importer does NOT save the duplicates immediately. Instead it delegates +# conflict resolution to DuplicateAction subclasses. +# +class Import + ## + # Initializes an Import instance + # + # @param klass [Class] ActiveRecord model to import into + # @param file [String] path to CSV file + # @param mapping [FieldMapping, nil] optional mapping override + # @param dup_action [DuplicateAction, nil] optional duplicate handler override + # + def initialize(klass:, file:, headers: nil, dup_action: nil) + @klass = klass + @file = file + @headers = headers + @duplicate_action = dup_action || DEFAULT_DUPLICATE_ACTION + end + + # -------------------------------------------------------------- + # MAIN IMPORT PROCESS + # -------------------------------------------------------------- + + ## + # Runs the full import: + # 1. Builds or uses existing field mapping + # 2. Parses the CSV into attribute hashes + # 3. Attempts to insert each row + # 4. On failure, collects duplicates into groups + # 5. Processes duplicate groups with assigned DuplicateAction + # + # Returns a summary with :imported and :duplicates count + # + def perform(use_headers) + duplicate_groups = [] # Will hold duplicate row sets + successful_inserts = 0 # Counter for successful saves + + # Call the model-level importer (defined in each model using the import mixin) + dups = @klass.try_import_records( + @file, + @headers, + use_headers + ) + + dups.each {|dup| duplicate_groups << normalize_duplicate(dup)} + + + # Let the duplicate action process all collected conflicts + process_duplicates(@klass, duplicate_groups) + + + # Return summary of import results + { + imported: successful_inserts, + duplicates: duplicate_groups.length + } + end + + private + + # -------------------------------------------------------------- + # DUPLICATE PROCESSING + # -------------------------------------------------------------- + + ## + # Normalizes duplicate information into a two-element array: + # + # [ existing_record_hash, incoming_record_hash ] + # + # Where: + # • existing_record_hash may be {} if not found in DB + # • incoming_record_hash is always the failed attributes + # + # This format is used by DuplicateAction subclasses to determine + # how the conflict should be resolved. + # + def normalize_duplicate(incoming_obj) + # Try to find the existing record using the primary key value + field = find_offending_field(incoming_obj) + + value = {} + value[field] = incoming_obj.as_json()[field.to_s] + + existing = @klass.find_by(value) + { + existing: existing, # Existing row (maybe empty) + incoming: incoming_obj # Incoming row + } + end + + def find_offending_field(incoming_obj) + incoming_obj.validate + incoming_obj.errors.details.each do |attribute, error_details_array| + return attribute if error_details_array.any? { |detail_hash| detail_hash[:error] == :taken } + end + end + + ## + # For every duplicate group (existing, incoming), call the provided duplicate + # action strategy. If the strategy returns an array of cleaned/merged rows, + # reinsert them into the DB. + # + # A duplicate action is expected to implement: + # + # on_duplicate_record(klass:, records:) + # + def process_duplicates(klass, groups) + groups.each do |records| + processed = @duplicate_action.on_duplicate_record( + klass, + records + ) + + # If the duplicate action returns nil, it means “skip insertion” + next if processed.nil? + + processed.save! + end + end + +end diff --git a/app/services/skip_record_action.rb b/app/services/skip_record_action.rb new file mode 100644 index 000000000..e231c93f5 --- /dev/null +++ b/app/services/skip_record_action.rb @@ -0,0 +1,18 @@ +# =============================================================== +# SkipRecordAction +# +# Strategy: **Ignore the incoming row entirely.** +# +# Usage example: +# - User chooses "Skip duplicates" on import +# - Any row that violates uniqueness constraints is dropped +# +# Behavior: +# Returning `nil` instructs Import.perform to do nothing. +# =============================================================== +class SkipRecordAction + def on_duplicate_record(klass, + records) + nil + end +end diff --git a/app/services/update_existing_record_action.rb b/app/services/update_existing_record_action.rb new file mode 100644 index 000000000..d079b76bb --- /dev/null +++ b/app/services/update_existing_record_action.rb @@ -0,0 +1,37 @@ +# =============================================================== +# UpdateExistingRecordAction +# +# Strategy: **Merge all duplicates into a single updated record.** +# +# Meaning: +# - If both existing and incoming records have data, +# incoming values overwrite existing ones (unless nil). +# +# Example: +# existing = { id: 5, name: "Alice", score: 80 } +# incoming = { id: 5, name: "Alice B.", score: nil } +# +# result = { id: 5, name: "Alice B.", score: 80 } +# +# Use case: +# "Update existing records with imported values" +# +# Importer will delete the original conflicting record and replace it +# with the merged one. +# =============================================================== +class UpdateExistingRecordAction + + def on_duplicate_record(klass, records) + merged = {} + + existing = records[:existing] + + klass.mandatory_fields.each do |field| + value = {} + value[field] = records[:incoming].send(field) + existing.send(:assign_attributes, value) + end + + existing + end +end diff --git a/config/routes.rb b/config/routes.rb index 25642363c..4ed345a3e 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -149,4 +149,16 @@ get '/:participant_id/instructor_review', to: 'grades#instructor_review' end end -end \ No newline at end of file + resources :import, path: :import, only: [] do + collection do + get "/:class", to: "import#index" + post "/:class", to: "import#import" + end + end + resources :export, path: :export, only: [] do + collection do + get "/:class", to: "export#index" + post "/:class", to: "export#export" + end + end +end diff --git a/db/migrate/20251129040855_rename_item_id_in_question_tables.rb b/db/migrate/20251129040855_rename_item_id_in_question_tables.rb new file mode 100644 index 000000000..ec37dc86a --- /dev/null +++ b/db/migrate/20251129040855_rename_item_id_in_question_tables.rb @@ -0,0 +1,7 @@ +class RenameItemIdInQuestionTables < ActiveRecord::Migration[8.0] + def change + rename_column :answers, :question_id, :item_id + rename_column :question_advices, :question_id, :item_id + rename_column :quiz_question_choices, :question_id, :item_id + end +end diff --git a/db/schema.rb b/db/schema.rb index d3a15fcfa..73da0b160 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[8.0].define(version: 2025_10_29_071649) do +ActiveRecord::Schema[8.0].define(version: 2025_11_29_040855) do create_table "account_requests", charset: "utf8mb4", collation: "utf8mb4_0900_ai_ci", force: :cascade do |t| t.string "username" t.string "full_name" @@ -248,12 +248,12 @@ end create_table "question_advices", charset: "utf8mb4", collation: "utf8mb4_0900_ai_ci", force: :cascade do |t| - t.bigint "question_id", null: false + t.bigint "item_id", null: false t.integer "score" t.text "advice" t.datetime "created_at", null: false t.datetime "updated_at", null: false - t.index ["question_id"], name: "index_question_advices_on_question_id" + t.index ["item_id"], name: "index_question_advices_on_item_id" end create_table "question_types", charset: "utf8mb4", collation: "utf8mb4_0900_ai_ci", force: :cascade do |t| @@ -282,7 +282,7 @@ end create_table "quiz_question_choices", id: :integer, charset: "latin1", force: :cascade do |t| - t.integer "question_id" + t.integer "item_id" t.text "txt" t.boolean "iscorrect", default: false t.datetime "created_at", null: false @@ -428,7 +428,7 @@ add_foreign_key "participants", "join_team_requests" add_foreign_key "participants", "teams" add_foreign_key "participants", "users" - add_foreign_key "question_advices", "items", column: "question_id" + add_foreign_key "question_advices", "items" add_foreign_key "roles", "roles", column: "parent_id", on_delete: :cascade add_foreign_key "sign_up_topics", "assignments" add_foreign_key "signed_up_teams", "sign_up_topics" diff --git a/spec/fixtures/files/empty.csv b/spec/fixtures/files/empty.csv new file mode 100644 index 000000000..e69de29bb diff --git a/spec/fixtures/files/empty_with_headers.csv b/spec/fixtures/files/empty_with_headers.csv new file mode 100644 index 000000000..ed444a5d4 --- /dev/null +++ b/spec/fixtures/files/empty_with_headers.csv @@ -0,0 +1 @@ +Name,Email,Password,Full Name,Role ID \ No newline at end of file diff --git a/spec/fixtures/files/import_test.csv b/spec/fixtures/files/import_test.csv new file mode 100644 index 000000000..896171415 --- /dev/null +++ b/spec/fixtures/files/import_test.csv @@ -0,0 +1,2 @@ +id,name +1,Alice \ No newline at end of file diff --git a/spec/fixtures/files/multiple_users_no_headers.csv b/spec/fixtures/files/multiple_users_no_headers.csv new file mode 100644 index 000000000..5c4114233 --- /dev/null +++ b/spec/fixtures/files/multiple_users_no_headers.csv @@ -0,0 +1,2 @@ +John,jdoe@email.com,password,John Doe,Student +Jane,jndoe@email.com,password,Jane Doe,Teaching Assistant diff --git a/spec/fixtures/files/multiple_users_with_headers.csv b/spec/fixtures/files/multiple_users_with_headers.csv new file mode 100644 index 000000000..22e7a7172 --- /dev/null +++ b/spec/fixtures/files/multiple_users_with_headers.csv @@ -0,0 +1,3 @@ +Name,Email,Password,Full Name,Role ID +John,jdoe@email.com,password,John Doe,4 +Jane,jndoe@email.com,password,Jane Doe,5 diff --git a/spec/fixtures/files/questionnaire_item_with_headers.csv b/spec/fixtures/files/questionnaire_item_with_headers.csv new file mode 100644 index 000000000..6c4d21c72 --- /dev/null +++ b/spec/fixtures/files/questionnaire_item_with_headers.csv @@ -0,0 +1,2 @@ +txt,weight,seq,Question type,Break before,Questionnaire Name,Question Advice Advice,Question Advice Score,Question Advice Advice,Question Advice Score +test,10,2,dropdown,TRUE,Test Questionnaire,okay,1,good,2 diff --git a/spec/fixtures/files/single_user_email_invalid.csv b/spec/fixtures/files/single_user_email_invalid.csv new file mode 100644 index 000000000..3e63bb2ff --- /dev/null +++ b/spec/fixtures/files/single_user_email_invalid.csv @@ -0,0 +1,2 @@ +Name,Email,Password,Full Name,Role ID +John,wrong,password,John Doe,4 diff --git a/spec/fixtures/files/single_user_no_headers.csv b/spec/fixtures/files/single_user_no_headers.csv new file mode 100644 index 000000000..525ea96d4 --- /dev/null +++ b/spec/fixtures/files/single_user_no_headers.csv @@ -0,0 +1 @@ +John,jdoe@email.com,password,John Doe,Student diff --git a/spec/fixtures/files/single_user_role_doe_not_exist.csv b/spec/fixtures/files/single_user_role_doe_not_exist.csv new file mode 100644 index 000000000..2c1ace5b3 --- /dev/null +++ b/spec/fixtures/files/single_user_role_doe_not_exist.csv @@ -0,0 +1,2 @@ +Name,Email,Password,Full Name,Role ID +John,jdoe@email.com,password,John Doe,10 diff --git a/spec/fixtures/files/single_user_with_headers.csv b/spec/fixtures/files/single_user_with_headers.csv new file mode 100644 index 000000000..687abaaac --- /dev/null +++ b/spec/fixtures/files/single_user_with_headers.csv @@ -0,0 +1,2 @@ +Name,Email,Password,Full Name,Role ID +John,jdoe@email.com,password,John Doe,4 diff --git a/spec/fixtures/files/single_user_with_headers_changed.csv b/spec/fixtures/files/single_user_with_headers_changed.csv new file mode 100644 index 000000000..f9765a73a --- /dev/null +++ b/spec/fixtures/files/single_user_with_headers_changed.csv @@ -0,0 +1,2 @@ +Name,Email,Password,Full Name,Role ID +John,jdoe@email.com,password,John Mulch,4 diff --git a/spec/fixtures/files/users_duplicate_records.csv b/spec/fixtures/files/users_duplicate_records.csv new file mode 100644 index 000000000..374fe5079 --- /dev/null +++ b/spec/fixtures/files/users_duplicate_records.csv @@ -0,0 +1,6 @@ +Name,Email,Password,Full Name,Role ID +John,jdoe@email.com,password,John Doe,4 +John,jdoe@email.com,password,John Doe,4 +John,jdoe@email.com,password,John Doe,4 +Jane,jndoe@email.com,password,Jane Doe,5 +Jane,jndoe@email.com,password,Jane Doe,5 diff --git a/spec/helpers/import_export_spec.rb b/spec/helpers/import_export_spec.rb new file mode 100644 index 000000000..1ae3bec77 --- /dev/null +++ b/spec/helpers/import_export_spec.rb @@ -0,0 +1,132 @@ +# frozen_string_literal: true + +require 'rails_helper' +require 'csv' + +RSpec.describe ImportableExportableHelper, type: :helper do + include RolesHelper + + + before(:all) do + @roles = create_roles_hierarchy + + @institution = Institution.create!(name: 'NC State') + + + @instructor = Instructor.create!( + name: 'instructor', + full_name: 'Instructor User', + email: 'instructor@example.com', + password_digest: 'password', + role: @roles[:instructor], + institution: @institution + ) + + @questionnaire = Questionnaire.create!( + name: 'Test Questionnaire', + questionnaire_type: '', + private: true, + min_question_score: 1, + max_question_score: 10, + instructor: @instructor + ) + end + + + describe 'Create tests for each of the different importable classes' do + it 'Import a class with no headers' do + expect(User.count).to eq(1) + + csv_file = file_fixture('single_user_no_headers.csv') + headers = ['Name', 'Email', 'Password', 'Full Name', 'Role Name'] + + pp User.try_import_records(csv_file, headers, false) + + expect(User.count).to eq(2) + expect(User.find_by(email: 'jdoe@email.com')).to be_present + end + + it 'Import a class with headers' do + expect(User.count).to eq(1) + + csv_file = file_fixture('single_user_with_headers.csv') + + User.try_import_records(csv_file, nil, use_header: true) + + expect(User.count).to eq(2) + expect(User.find_by(email: 'jdoe@email.com')).to be_present + end + + it 'Import a file with multiple records' do + expect(User.count).to eq(1) + + csv_file = file_fixture('multiple_users_with_headers.csv') + + User.try_import_records(csv_file, nil, use_header: true) + + expect(User.count).to eq(3) + expect(User.find_by(email: 'jdoe@email.com')).to be_present + expect(User.find_by(email: 'jndoe@email.com')).to be_present + end + + it 'Import a class with external lookup and create classes, and can take duplicate headers' do + expect(Questionnaire.count).to eq(1) + expect(Questionnaire.find_by(name: 'Test Questionnaire')).to be_present + expect(QuizItem.count).to eq(0) + expect(QuestionAdvice.count).to eq(0) + + csv_file = file_fixture('questionnaire_item_with_headers.csv') + QuizItem.try_import_records(csv_file, nil, true) + + expect(QuizItem.count).to eq(1) + expect(QuizItem.find_by(txt: 'test')).to be_present + + expect(QuestionAdvice.count).to eq(2) + + advice_one = QuestionAdvice.find_by(advice: 'okay') + expect(advice_one).to be_present + expect(advice_one.score).to eq(1) + expect(advice_one.item.txt).to eq('test') + + advice_two = QuestionAdvice.find_by(advice: 'good') + expect(advice_two).to be_present + expect(advice_two.score).to eq(2) + expect(advice_two.item.txt).to eq('test') + end + end + + + # * Create a test with external lookup class that doesn't exist + # * Create a test with an empty CSV (With Headers) + # * Create a test with an empty CSV (Without Headers) + describe 'Create Tests to test Errors/Edge Cases' do + it 'Import a class with an invalid field (User with invalid email)' do + csv_file = file_fixture('single_user_email_invalid.csv') + + expect {User.try_import_records(csv_file, nil, true)}.not_to change(User, :count) + end + + it 'Import a class with external lookup class that does not exist' do + expect(User.count).to eq(1) + + csv_file = file_fixture('single_user_role_doe_not_exist.csv') + + expect { User.try_import_records(csv_file, nil, true) }.not_to change(User, :count) + + expect(User.count).to eq(1) + expect(User.find_by(email: 'jdoe@email.com')).not_to be_present + end + + it 'Import an empty CSV (With Headers)' do + csv_file = file_fixture('empty_with_headers.csv') + + expect{User.try_import_records(csv_file, nil, true)}.not_to change(User, :count) + end + + it 'Import an empty CSV (Without Headers)' do + csv_file = file_fixture('empty.csv') + + expect{User.try_import_records(csv_file, [], false)}.not_to change(User, :count) + end + end +end diff --git a/spec/integration/export_controller_spec.rb b/spec/integration/export_controller_spec.rb new file mode 100644 index 000000000..90d22c122 --- /dev/null +++ b/spec/integration/export_controller_spec.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true +require "rails_helper" + +RSpec.describe "Export API", type: :request do + # + # Authentication + authorization bypass + # + before do + allow_any_instance_of(JwtToken) + .to receive(:authenticate_request!) + .and_return(true) + + allow_any_instance_of(Authorization) + .to receive(:authorize) + .and_return(true) + end + + # + # Fake model used for constantize + # + class FakeModel + def self.mandatory_fields; ["id", "name"]; end + def self.optional_fields; ["email"]; end + def self.external_fields; ["institution"]; end + end + + describe "GET /export/:class" do + it "returns mandatory, optional, and external fields with status 200" do + get "/export/FakeModel" + + expect(response).to have_http_status(:ok) + + json = JSON.parse(response.body) + + expect(json["mandatory_fields"]).to eq(["id", "name"]) + expect(json["optional_fields"]).to eq(["email"]) + expect(json["external_fields"]).to eq(["institution"]) + end + end + + describe "POST /export/:class" do + it "returns 200 and calls Export.perform with ordered fields" do + ordered_fields = ["id", "name"] + export_return = "fake_csv_data" + + expect(Export).to receive(:perform) + .with(FakeModel, ordered_fields) + .and_return(export_return) + + post "/export/FakeModel", params: { + ordered_fields: ordered_fields.to_json + } + + expect(response).to have_http_status(:ok) + json = JSON.parse(response.body) + + expect(json["message"]).to eq("FakeModel has been exported!") + expect(json["file"]).to eq("fake_csv_data") + end + + it "passes nil ordered_fields when none are provided" do + export_return = "csv_without_ordering" + + expect(Export).to receive(:perform) + .with(FakeModel, nil) + .and_return(export_return) + + post "/export/FakeModel" + + expect(response).to have_http_status(:ok) + json = JSON.parse(response.body) + expect(json["file"]).to eq("csv_without_ordering") + end + + it "returns 422 if constantize fails" do + post "/export/DoesNotExist" + + expect(response.status).to eq(422) + end + + it "returns 422 if Export.perform raises an error" do + allow(Export).to receive(:perform) + .and_raise(StandardError.new("Boom!")) + + post "/export/FakeModel", params: { ordered_fields: ["id"].to_json } + + expect(response.status).to eq(422) + json = JSON.parse(response.body) + expect(json["error"]).to eq("Boom!") + end + + it "returns 422 if ordered_fields is invalid JSON" do + post "/export/FakeModel", params: { + ordered_fields: "not-json" + } + + expect(response.status).to eq(422) + end + end +end diff --git a/spec/integration/import_controller_spec.rb b/spec/integration/import_controller_spec.rb new file mode 100644 index 000000000..bae8d9958 --- /dev/null +++ b/spec/integration/import_controller_spec.rb @@ -0,0 +1,149 @@ +require "rails_helper" + +RSpec.describe "Import API", type: :request do + # + # Disable BOTH authentication layers: + # • JwtToken.authenticate_request! + # • Authorization.authorize + # + before do + allow_any_instance_of(JwtToken) + .to receive(:authenticate_request!) + .and_return(true) + + allow_any_instance_of(Authorization) + .to receive(:authorize) + .and_return(true) + end + + # + # Stub a fake model class for import operations + # + before do + stub_const("FakeModel", Class.new do + class << self + attr_accessor :mandatory_fields, :optional_fields, :external_fields + end + + def self.try_import_records(*args); end + end) + + allow(FakeModel).to receive(:mandatory_fields).and_return(["id", "name"]) + allow(FakeModel).to receive(:optional_fields).and_return(["email"]) + allow(FakeModel).to receive(:external_fields).and_return(["mentor_id"]) + allow(FakeModel).to receive(:try_import_records) + end + + # + # Fixture file used for import + # + let(:file_path) { Rails.root.join("spec/fixtures/files/import_test.csv") } + let(:uploaded_file) { Rack::Test::UploadedFile.new(file_path, "text/csv") } + + # ------------------------------------------------------------ + # BASIC TESTS + # ------------------------------------------------------------ + + describe "GET /import/:class" do + it "returns metadata with status 200" do + get "/import/FakeModel" + + expect(response.status).to eq(200) + + json = JSON.parse(response.body) + expect(json["mandatory_fields"]).to eq(["id", "name"]) + expect(json["optional_fields"]).to eq(["email"]) + expect(json["external_fields"]).to eq(["mentor_id"]) + expect(json["available_actions_on_dup"]).to eq([]) + end + end + + describe "POST /import/:class" do + it "returns 201 when import succeeds" do + post "/import/FakeModel", + params: { + csv_file: uploaded_file, + use_headers: true, + ordered_fields: ["id", "name"].to_json + } + + expect(response.status).to eq(201) + expect(JSON.parse(response.body)["message"]) + .to eq("FakeModel has been imported!") + end + + it "returns 422 when import raises an error" do + allow(FakeModel).to receive(:try_import_records) + .and_raise(StandardError.new("BOOM")) + + post "/import/FakeModel", + params: { + csv_file: uploaded_file, + use_headers: true + } + + expect(response.status).to eq(422) + expect(JSON.parse(response.body)["error"]).to eq("BOOM") + end + end + + # ------------------------------------------------------------ + # ADDITIONAL EDGE CASE TESTS + # ------------------------------------------------------------ + + describe "Additional ImportController tests" do + it "returns 500 if class constantization fails" do + get "/import/ThisModelDoesNotExist" + + expect(response.status).to eq(500) + expect(response.body).to include("uninitialized constant") + end + + it "returns 201 even if csv_file is missing (controller allows nil file)" do + post "/import/FakeModel", params: { use_headers: true } + + expect(response.status).to eq(201) + expect(JSON.parse(response.body)["message"]) + .to eq("FakeModel has been imported!") + end + + it "allows POST without ordered_fields" do + post "/import/FakeModel", + params: { + csv_file: uploaded_file, + use_headers: "false" + } + + expect(response.status).to eq(201) + end + + it "correctly passes use_headers as boolean" do + post "/import/FakeModel", + params: { + csv_file: uploaded_file, + use_headers: "false", + ordered_fields: ["id"].to_json + } + + expect(FakeModel) + .to have_received(:try_import_records) + .with( + kind_of(ActionDispatch::Http::UploadedFile), + ["id"], + use_header: false + ) + end + + it "returns 422 for malformed ordered_fields JSON" do + post "/import/FakeModel", + params: { + csv_file: uploaded_file, + use_headers: true, + ordered_fields: "{ this is invalid json" + } + + expect(response.status).to eq(422) + expect(JSON.parse(response.body)["error"]).to be_present + end + end +end diff --git a/spec/requests/api/v1/export_spec.rb b/spec/requests/api/v1/export_spec.rb new file mode 100644 index 000000000..75368e956 --- /dev/null +++ b/spec/requests/api/v1/export_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'swagger_helper' +require 'json_web_token' + +RSpec.describe 'Export API', type: :request do + before(:all) do + @roles = create_roles_hierarchy + end + + let(:adm) { + User.create( + name: "adma", + password_digest: "password", + role_id: @roles[:admin].id, + full_name: "Admin A", + email: "testuser@example.com", + mru_directory_path: "/home/testuser", + ) + } + + let(:token) { JsonWebToken.encode({id: adm.id}) } + let(:Authorization) { "Bearer #{token}" } + + path '/export/{id}' do + parameter name: 'id', in: :path, type: :string, description: 'class name' + + let(:id) { "User" } + + get('Show Class Fields for Export') do + tags 'Export' + response(200, 'successful') do + + after do |example| + example.metadata[:response][:content] = { + 'application/json' => { + example: JSON.parse(response.body, symbolize_names: true) + } + } + end + run_test! do |response| + data = JSON.parse(response.body) + pp data + expect(data["mandatory_fields"].length).to eq(4) + expect(data).to have_key("optional_fields") + expect(data).to have_key('external_fields') + end + end + end + end +end diff --git a/spec/requests/api/v1/import_spec.rb b/spec/requests/api/v1/import_spec.rb new file mode 100644 index 000000000..24c454696 --- /dev/null +++ b/spec/requests/api/v1/import_spec.rb @@ -0,0 +1,49 @@ +require 'swagger_helper' + +RSpec.describe 'api/v1/import', type: :request do + before(:all) do + @roles = create_roles_hierarchy + end + + let(:adm) { + User.create( + name: "adma", + password_digest: "password", + role_id: @roles[:admin].id, + full_name: "Admin A", + email: "testuser@example.com", + mru_directory_path: "/home/testuser", + ) + } + + let(:token) { JsonWebToken.encode({id: adm.id}) } + let(:Authorization) { "Bearer #{token}" } + + path '/import/{id}' do + parameter name: 'id', in: :path, type: :string, description: 'class name' + + let(:id) { "User" } + + get('Show Class Fields for Import') do + tags 'Import' + response(200, 'successful') do + + after do |example| + example.metadata[:response][:content] = { + 'application/json' => { + example: JSON.parse(response.body, symbolize_names: true) + } + } + end + run_test! do |response| + data = JSON.parse(response.body) + pp data + expect(data["mandatory_fields"].length).to eq(4) + expect(data).to have_key("optional_fields") + expect(data).to have_key('external_fields') + expect(data["available_actions_on_dup"].length).to eq(3) + end + end + end + end +end