diff --git a/cypress.config.ts b/cypress.config.ts index 668f4100a..106a068eb 100644 --- a/cypress.config.ts +++ b/cypress.config.ts @@ -15,7 +15,8 @@ export default defineConfig({ supportFile: 'tests/support/e2e.ts', setupNodeEvents(on) { on('file:preprocessor', vitePreprocessor(path.resolve(__dirname, './vite.config.ts'))) - } + }, + defaultCommandTimeout: 10_000 // https://docs.cypress.io/guides/references/configuration#Timeouts }, component: { indexHtmlFile: 'tests/support/component-index.html', diff --git a/package-lock.json b/package-lock.json index 7513fce0b..6af6ac5b3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "0.1.0", "dependencies": { "@faker-js/faker": "7.6.0", - "@iqss/dataverse-client-javascript": "2.0.0-pr169.aa49f06", + "@iqss/dataverse-client-javascript": "2.0.0-pr187.f29c0e6", "@iqss/dataverse-design-system": "*", "@istanbuljs/nyc-config-typescript": "1.0.2", "@tanstack/react-table": "8.9.2", @@ -19,12 +19,14 @@ "@types/node": "16.18.12", "@types/react": "18.0.27", "@types/react-dom": "18.0.10", + "async-mutex": "0.5.0", "bootstrap": "5.2.3", "classnames": "2.5.1", "html-react-parser": "3.0.16", "i18next": "22.4.9", "i18next-browser-languagedetector": "7.0.1", "i18next-http-backend": "2.1.1", + "js-md5": "0.8.3", "lodash": "^4.17.21", "moment-timezone": "0.5.43", "react-bootstrap": "2.7.2", @@ -3672,9 +3674,9 @@ }, "node_modules/@iqss/dataverse-client-javascript": { "name": "@IQSS/dataverse-client-javascript", - "version": "2.0.0-pr169.aa49f06", - "resolved": "https://npm.pkg.github.com/download/@IQSS/dataverse-client-javascript/2.0.0-pr169.aa49f06/d8061b0af0068e530c6ef78b89e0a4ce668df4b3", - "integrity": "sha512-2D3wxWA87kU8EXltK7pBMGX9OoK7aecX869zbalbdtSNcPz9ATbZOOhDYbfaU+J526AyNTVQ6xlPTY5hWIRFvQ==", + "version": "2.0.0-pr187.f29c0e6", + "resolved": "https://npm.pkg.github.com/download/@IQSS/dataverse-client-javascript/2.0.0-pr187.f29c0e6/324f487a2b1437df668e34159404161888279638", + "integrity": "sha512-0OX9nmh7dY3Gg5euE7buCqTeyh+1B+GhFcDz2gJoND4oM3kIZalYS+bLsEoEekR2o25agP6b+ANyQ5kvZeFuig==", "license": "MIT", "dependencies": { "@types/node": "^18.15.11", @@ -17815,6 +17817,21 @@ "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==", "dev": true }, + "node_modules/async-mutex": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.5.0.tgz", + "integrity": "sha512-1A94B18jkJ3DYq284ohPxoXbfTA5HsQ7/Mf4DEhcyLx3Bz27Rh59iScbB6EPiP+B+joue6YCxcMXSbFC1tZKwA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/async-mutex/node_modules/tslib": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", + "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==", + "license": "0BSD" + }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -29214,6 +29231,11 @@ "@sideway/pinpoint": "^2.0.0" } }, + "node_modules/js-md5": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/js-md5/-/js-md5-0.8.3.tgz", + "integrity": "sha512-qR0HB5uP6wCuRMrWPTrkMaev7MJZwJuuw4fnwAzRgP4J4/F8RwtodOKpGp4XpqsLBFzzgqIO42efFAyz2Et6KQ==" + }, "node_modules/js-sdsl": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.2.tgz", diff --git a/package.json b/package.json index 894d361f3..446462aaa 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ }, "dependencies": { "@faker-js/faker": "7.6.0", - "@iqss/dataverse-client-javascript": "2.0.0-pr169.aa49f06", + "@iqss/dataverse-client-javascript": "2.0.0-pr187.f29c0e6", "@iqss/dataverse-design-system": "*", "@istanbuljs/nyc-config-typescript": "1.0.2", "@tanstack/react-table": "8.9.2", @@ -44,7 +44,9 @@ "typescript": "4.9.5", "use-deep-compare": "1.2.1", "vite-plugin-istanbul": "4.0.1", - "web-vitals": "2.1.4" + "web-vitals": "2.1.4", + "js-md5": "0.8.3", + "async-mutex": "0.5.0" }, "scripts": { "start": "vite --base=/spa", diff --git a/src/files/domain/models/FileUploadState.ts b/src/files/domain/models/FileUploadState.ts index 854cb3035..672e9c42b 100644 --- a/src/files/domain/models/FileUploadState.ts +++ b/src/files/domain/models/FileUploadState.ts @@ -1,4 +1,6 @@ +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { FileSize, FileSizeUnit } from './FileMetadata' +import { UploadedFileDTOMapper } from '../../infrastructure/mappers/UploadedFileDTOMapper' export interface FileUploadState { progress: number @@ -17,6 +19,7 @@ export interface FileUploadState { description?: string tags: string[] restricted: boolean + checksumValue?: string } export interface FileUploaderState { @@ -94,6 +97,18 @@ export class FileUploadTools { return { state: oldState.state, uploaded: this.toUploaded(oldState.state) } } + static checksum( + file: File, + checksumValue: string, + oldState: FileUploaderState + ): FileUploaderState { + const fileUploadState = oldState.state.get(this.key(file)) + if (fileUploadState) { + fileUploadState.checksumValue = checksumValue + } + return { state: oldState.state, uploaded: this.toUploaded(oldState.state) } + } + static failed(file: File, oldState: FileUploaderState): FileUploaderState { const fileUploadState = oldState.state.get(this.key(file)) if (fileUploadState) { @@ -127,6 +142,21 @@ export class FileUploadTools { return { state: oldState.state, uploaded: this.toUploaded(oldState.state) } } + static mapToUploadedFilesDTOs(state: FileUploadState[]): UploadedFileDTO[] { + return state.map((uploadedFile) => + UploadedFileDTOMapper.toUploadedFileDTO( + uploadedFile.fileName, + uploadedFile.description, + uploadedFile.fileDir, + uploadedFile.tags, + uploadedFile.restricted, + uploadedFile.storageId as string, + uploadedFile.checksumValue as string, + uploadedFile.fileType === '' ? 'application/octet-stream' : uploadedFile.fileType // some browsers (e.g., chromium for .java files) fail to detect the mime type for some files and leave the fileType as an empty string, we use the default value 'application/octet-stream' in that case + ) + ) + } + private static toNewState( file: File, oldState: FileUploaderState diff --git a/src/files/domain/repositories/FileRepository.ts b/src/files/domain/repositories/FileRepository.ts index 9c2c30478..6888bd3a1 100644 --- a/src/files/domain/repositories/FileRepository.ts +++ b/src/files/domain/repositories/FileRepository.ts @@ -7,7 +7,7 @@ import { FilePaginationInfo } from '../models/FilePaginationInfo' import { FilePreview } from '../models/FilePreview' import { FilesWithCount } from '../models/FilesWithCount' import { FileHolder } from '../models/FileHolder' -import { FileUploadState } from '../models/FileUploadState' +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' export interface FileRepository { getAllByDatasetPersistentId: ( @@ -42,10 +42,5 @@ export interface FileRepository { abortController: AbortController, storageIdSetter: (storageId: string) => void ) => Promise - addUploadedFiles: (datasetId: number | string, files: FileUploadState[]) => Promise - addUploadedFile: ( - datasetId: number | string, - file: FileHolder, - storageId: string - ) => Promise + addUploadedFiles: (datasetId: number | string, files: UploadedFileDTO[]) => Promise } diff --git a/src/files/domain/useCases/addUploadedFiles.ts b/src/files/domain/useCases/addUploadedFiles.ts index b1ef42065..360a8497a 100644 --- a/src/files/domain/useCases/addUploadedFiles.ts +++ b/src/files/domain/useCases/addUploadedFiles.ts @@ -1,31 +1,16 @@ -import { FileUploadState } from '../models/FileUploadState' +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { FileRepository } from '../repositories/FileRepository' export function addUploadedFiles( fileRepository: FileRepository, datasetId: number | string, - files: FileUploadState[], + files: UploadedFileDTO[], done: () => void ): void { fileRepository .addUploadedFiles(datasetId, files) + .then(done) .catch((error: Error) => { throw new Error(error.message) }) - .finally(done) -} - -export function addUploadedFile( - fileRepository: FileRepository, - datasetId: number | string, - file: File, - storageId: string, - done: () => void -): void { - fileRepository - .addUploadedFile(datasetId, { file: file }, storageId) - .catch((error: Error) => { - throw new Error(error.message) - }) - .finally(done) } diff --git a/src/files/infrastructure/FileJSDataverseRepository.ts b/src/files/infrastructure/FileJSDataverseRepository.ts index de9ece5e6..2b204a17f 100644 --- a/src/files/infrastructure/FileJSDataverseRepository.ts +++ b/src/files/infrastructure/FileJSDataverseRepository.ts @@ -15,6 +15,8 @@ import { getFileDownloadCount, getFileUserPermissions, uploadFile as jsUploadFile, + addUploadedFilesToDataset, + UploadedFileDTO, ReadError } from '@iqss/dataverse-client-javascript' import { FileCriteria } from '../domain/models/FileCriteria' @@ -31,7 +33,6 @@ import { FilePermissions } from '../domain/models/FilePermissions' import { JSFilePermissionsMapper } from './mappers/JSFilePermissionsMapper' import { FilesWithCount } from '../domain/models/FilesWithCount' import { FileHolder } from '../domain/models/FileHolder' -import { FileUploadState } from '../domain/models/FileUploadState' const includeDeaccessioned = true @@ -300,13 +301,7 @@ export class FileJSDataverseRepository implements FileRepository { }) } - addUploadedFiles(_datasetId: number | string, _files: FileUploadState[]): Promise { - // TODO: not yet implemented - return new Promise(() => {}) - } - - addUploadedFile(datasetId: number | string, file: FileHolder, storageId: string): Promise { - return new Promise(() => {}) - // return addUploadedFilesToDataset.execute(datasetId, file.file) + addUploadedFiles(datasetId: number | string, uploadedFiles: UploadedFileDTO[]): Promise { + return addUploadedFilesToDataset.execute(datasetId, uploadedFiles) } } diff --git a/src/files/infrastructure/mappers/UploadedFileDTOMapper.ts b/src/files/infrastructure/mappers/UploadedFileDTOMapper.ts new file mode 100644 index 000000000..238027eba --- /dev/null +++ b/src/files/infrastructure/mappers/UploadedFileDTOMapper.ts @@ -0,0 +1,26 @@ +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' + +export class UploadedFileDTOMapper { + static toUploadedFileDTO( + fileName: string, + description: string | undefined, + fileDir: string, + tags: string[], + restricted: boolean, + storageId: string, + checksumValue: string, + fileType: string + ): UploadedFileDTO { + return { + fileName: fileName, + description: description, + directoryLabel: fileDir, + categories: tags, + restrict: restricted, + storageId: storageId, + checksumValue: checksumValue, + checksumType: 'md5', + mimeType: fileType + } + } +} diff --git a/src/sections/upload-dataset-files/FileUploader.tsx b/src/sections/upload-dataset-files/FileUploader.tsx index 2807722a3..99846397a 100644 --- a/src/sections/upload-dataset-files/FileUploader.tsx +++ b/src/sections/upload-dataset-files/FileUploader.tsx @@ -6,7 +6,7 @@ import { FileUploadTools, FileUploaderState } from '../../files/domain/models/Fi import styles from './FileUploader.module.scss' export interface FileUploaderProps { - upload: (files: File[]) => void + upload: (file: File) => void cancelTitle: string info: string selectText: string @@ -30,21 +30,14 @@ export function FileUploader({ const addFiles = (selectedFiles: FileList | null) => { if (selectedFiles && selectedFiles.length > 0) { - setFiles((alreadyAdded) => { - const selectedFilesArray = Array.from(selectedFiles) - const selectedFilesSet = new Set(selectedFilesArray.map((x) => FileUploadTools.key(x))) - const alreadyAddedFiltered = alreadyAdded.filter( - /* istanbul ignore next */ - (x) => !selectedFilesSet.has(FileUploadTools.key(x)) - ) - return [...alreadyAddedFiltered, ...selectedFilesArray] - }) + Array.from(selectedFiles).forEach((file) => addFile(file)) } } const addFile = (file: File) => { if (!files.some((x) => FileUploadTools.key(x) === FileUploadTools.key(file))) { setFiles((oldFiles) => [...oldFiles, file]) + upload(file) } } @@ -106,15 +99,11 @@ export function FileUploader({ } useEffect(() => { - upload(files) - }, [files, upload]) - - useEffect(() => { - setFiles((newFiles) => - newFiles.filter((x) => { - const res = !FileUploadTools.get(x, fileUploaderState).removed + setFiles((currentFiles) => + currentFiles.filter((file) => { + const res = !FileUploadTools.get(file, fileUploaderState).removed if (!res) { - cleanFileState(x) + cleanFileState(file) } return res }) diff --git a/src/sections/upload-dataset-files/UploadDatasetFiles.tsx b/src/sections/upload-dataset-files/UploadDatasetFiles.tsx index 7a21e05ed..6846a8a1c 100644 --- a/src/sections/upload-dataset-files/UploadDatasetFiles.tsx +++ b/src/sections/upload-dataset-files/UploadDatasetFiles.tsx @@ -1,5 +1,8 @@ import { useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' +import { md5 } from 'js-md5' +import { Semaphore } from 'async-mutex' +import { useNavigate } from 'react-router-dom' import { FileRepository } from '../../files/domain/repositories/FileRepository' import { useLoading } from '../loading/LoadingContext' import { useDataset } from '../dataset/DatasetContext' @@ -9,7 +12,9 @@ import { FileUploader } from './FileUploader' import { FileUploadState, FileUploadTools } from '../../files/domain/models/FileUploadState' import { uploadFile } from '../../files/domain/useCases/uploadFile' import { UploadedFiles } from './uploaded-files-list/UploadedFiles' -import { addUploadedFile, addUploadedFiles } from '../../files/domain/useCases/addUploadedFiles' +import { addUploadedFiles } from '../../files/domain/useCases/addUploadedFiles' +import { Route } from '../Route.enum' +import { Stack } from '@iqss/dataverse-design-system' interface UploadDatasetFilesProps { fileRepository: FileRepository @@ -21,34 +26,39 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat const { t } = useTranslation('uploadDatasetFiles') const [fileUploaderState, setState] = useState(FileUploadTools.createNewState([])) const [uploadingToCancelMap, setUploadingToCancelMap] = useState(new Map void>()) - const [semaphore, setSemaphore] = useState(new Set()) + const navigate = useNavigate() - const sleep = (delay: number) => new Promise((res) => setTimeout(res, delay)) const limit = 6 + const semaphore = new Semaphore(limit) - const acquireSemaphore = async (file: File) => { - const key = FileUploadTools.key(file) - setSemaphore((x) => (x.size >= limit ? x : x.add(key))) - while (!semaphore.has(key)) { - await sleep(500) - setSemaphore((x) => (x.size >= limit ? x : x.add(key))) - } - } - - const releaseSemaphore = (file: File) => { - setSemaphore((x) => { + const fileUploadFailed = (file: File) => { + setUploadingToCancelMap((x) => { x.delete(FileUploadTools.key(file)) return x }) + semaphore.release(1) } const fileUploadFinished = (file: File) => { - const key = FileUploadTools.key(file) - setUploadingToCancelMap((x) => { - x.delete(key) - return x - }) - releaseSemaphore(file) + const hash = md5.create() + const reader = file.stream().getReader() + reader + .read() + .then(async function updateHash({ done, value }) { + if (done) { + FileUploadTools.checksum(file, hash.hex(), fileUploaderState) + } else { + hash.update(value) + await updateHash(await reader.read()) + } + }) + .finally(() => { + setUploadingToCancelMap((x) => { + x.delete(FileUploadTools.key(file)) + return x + }) + semaphore.release(1) + }) } const canUpload = (file: File) => @@ -71,17 +81,10 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat () => { setState(FileUploadTools.done(file, fileUploaderState)) fileUploadFinished(file) - addUploadedFile( - fileRepository, - dataset?.persistentId as string, - file, - FileUploadTools.get(file, fileUploaderState).storageId as string, - () => {} - ) }, () => { setState(FileUploadTools.failed(file, fileUploaderState)) - fileUploadFinished(file) + fileUploadFailed(file) }, (now) => setState(FileUploadTools.progress(file, now, fileUploaderState)), (storageId) => setState(FileUploadTools.storageId(file, storageId, fileUploaderState)) @@ -89,13 +92,9 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat setUploadingToCancelMap((x) => x.set(key, cancel)) } - const upload = async (files: File[]) => { - for (const file of files) { - if (canUpload(file)) { - await acquireSemaphore(file) - uploadOneFile(file) - } - } + const upload = async (file: File) => { + await semaphore.acquire(1) + uploadOneFile(file) } const cleanup = (file: File) => { @@ -108,7 +107,6 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat x.delete(key) return x }) - releaseSemaphore(file) } const cancelUpload = (file: File) => { @@ -141,8 +139,12 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat const addFiles = (state: FileUploadState[]) => { setIsLoading(true) - const done = () => setIsLoading(false) - addUploadedFiles(fileRepository, dataset?.persistentId as string, state, done) + const done = () => { + setIsLoading(false) + navigate(`${Route.DATASETS}?persistentId=${dataset?.persistentId as string}&version=:draft`) + } + const uploadedFiles = FileUploadTools.mapToUploadedFilesDTOs(state) + addUploadedFiles(fileRepository, dataset?.persistentId as string, uploadedFiles, done) cleanAllState() } @@ -169,23 +171,25 @@ export const UploadDatasetFiles = ({ fileRepository: fileRepository }: UploadDat actionItemText={t('breadcrumbActionItem')} />
- - + + + +
)} diff --git a/src/sections/upload-dataset-files/uploaded-files-list/UploadedFiles.tsx b/src/sections/upload-dataset-files/uploaded-files-list/UploadedFiles.tsx index afd54715d..0dc60a345 100644 --- a/src/sections/upload-dataset-files/uploaded-files-list/UploadedFiles.tsx +++ b/src/sections/upload-dataset-files/uploaded-files-list/UploadedFiles.tsx @@ -79,14 +79,13 @@ export function UploadedFiles({ updateFiles([file]) } const updateSelected = (file: FileUploadState) => { - setSelected((current) => { - if (current.has(file)) { - current.delete(file) - } else { - current.add(file) - } - return new Set(current) - }) + const newSelected = new Set(selected) + if (newSelected.has(file)) { + newSelected.delete(file) + } else { + newSelected.add(file) + } + setSelected(newSelected) } const save = () => { addFiles(fileUploadState) diff --git a/src/stories/file/FileMockRepository.ts b/src/stories/file/FileMockRepository.ts index 48e7064b9..9bc8aba55 100644 --- a/src/stories/file/FileMockRepository.ts +++ b/src/stories/file/FileMockRepository.ts @@ -13,7 +13,7 @@ import { FilePreview } from '../../files/domain/models/FilePreview' import { FakerHelper } from '../../../tests/component/shared/FakerHelper' import { FilesWithCount } from '../../files/domain/models/FilesWithCount' import { FileHolder } from '../../files/domain/models/FileHolder' -import { FileUploadState } from '../../files/domain/models/FileUploadState' +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' export class FileMockRepository implements FileRepository { constructor(public readonly fileMock?: File) {} @@ -107,19 +107,7 @@ export class FileMockRepository implements FileRepository { return res() } - addUploadedFiles(_datasetId: number | string, _files: FileUploadState[]): Promise { - return new Promise((resolve) => { - setTimeout(() => { - resolve() - }, FakerHelper.loadingTimout()) - }) - } - - addUploadedFile( - _datasetId: number | string, - _file: FileHolder, - _storageId: string - ): Promise { + addUploadedFiles(_datasetId: number | string, _files: UploadedFileDTO[]): Promise { return new Promise((resolve) => { setTimeout(() => { resolve() diff --git a/tests/component/sections/upload-dataset-files/UploadDatasetFiles.spec.tsx b/tests/component/sections/upload-dataset-files/UploadDatasetFiles.spec.tsx index 7de43e55a..5a30bc95d 100644 --- a/tests/component/sections/upload-dataset-files/UploadDatasetFiles.spec.tsx +++ b/tests/component/sections/upload-dataset-files/UploadDatasetFiles.spec.tsx @@ -217,58 +217,6 @@ describe('UploadDatasetFiles', () => { cy.findByText('Select files to add').should('exist') }) - it('prevents more than 6 simultaneous uploads', () => { - const testDataset = DatasetMother.create() - - mountWithDataset(, testDataset) - - cy.findByTestId('drag-and-drop').as('dnd') - cy.get('@dnd').should('exist') - - const filenames: string[] = [ - 'users1.json', - 'users2.json', - 'users3.json', - 'users4.json', - 'users5.json', - 'users6.json', - 'users7.json', - 'users8.json', - 'users9.json', - 'users10.json' - ] - filenames.forEach((element) => { - cy.get('@dnd').selectFile( - { fileName: element, contents: [{ name: 'John Doe' }] }, - { action: 'drag-drop' } - ) - }) - cy.findAllByTitle('Cancel upload').should('have.length', 10) - cy.findAllByRole('progressbar').should('have.length', 6) - cy.findByText('Select files to add').should('exist') - const filenames2: string[] = [ - 'users11.json', - 'users12.json', - 'users13.json', - 'users14.json', - 'users15.json', - 'users16.json', - 'users17.json', - 'users18.json', - 'users19.json', - 'users20.json' - ] - filenames2.forEach((element) => { - cy.get('@dnd').selectFile( - { fileName: element, contents: [{ name: 'John Doe' }] }, - { action: 'drag-drop' } - ) - }) - cy.findByText('users20.json').should('exist') - cy.findAllByRole('progressbar').should('have.length', 6) - cy.findByText('Select files to add').should('exist') - }) - it('saves uploaded files', () => { const testDataset = DatasetMother.create() diff --git a/tests/e2e-integration/integration/files/FileUpload.spec.ts b/tests/e2e-integration/integration/files/FileUpload.spec.ts new file mode 100644 index 000000000..64b2ed688 --- /dev/null +++ b/tests/e2e-integration/integration/files/FileUpload.spec.ts @@ -0,0 +1,186 @@ +import { TestsUtils } from '../../shared/TestsUtils' +import { FileJSDataverseRepository } from '../../../../src/files/infrastructure/FileJSDataverseRepository' +import { DatasetJSDataverseRepository } from '../../../../src/dataset/infrastructure/repositories/DatasetJSDataverseRepository' +import { DatasetHelper } from '../../shared/datasets/DatasetHelper' +import { FileHelper } from '../../shared/files/FileHelper' +import { DatasetNonNumericVersion } from '../../../../src/dataset/domain/models/Dataset' +import chaiAsPromised from 'chai-as-promised' + +chai.use(chaiAsPromised) +const expect = chai.expect + +const fileRepository = new FileJSDataverseRepository() +const datasetRepository = new DatasetJSDataverseRepository() + +describe('DirectUpload', () => { + before(() => { + TestsUtils.setup() + }) + + beforeEach(() => { + TestsUtils.login() + }) + + it('should upload file and add it to the dataset', async () => { + const dataset = await DatasetHelper.create().then((datasetResponse) => + datasetRepository.getByPersistentId( + datasetResponse.persistentId, + DatasetNonNumericVersion.DRAFT + ) + ) + if (!dataset) throw new Error('Dataset not found') + + const singlePartFile = FileHelper.createSinglePartFileBlob() + const controller = new AbortController() + let storageId: string | undefined = undefined + + await fileRepository.uploadFile( + dataset.persistentId, + { file: singlePartFile }, + () => {}, + controller, + (sId) => { + storageId = sId + } + ) + + expect(storageId).to.be.not.undefined + if (storageId == undefined) { + throw new Error('storageId is undefined') + } + + await fileRepository.addUploadedFiles(dataset.persistentId, [ + { + fileName: 'test.json', + description: 'description text', + directoryLabel: '', + categories: ['tag'], + restrict: false, + storageId: storageId, + checksumValue: 'abc123', + checksumType: 'md5', + mimeType: 'application/json' + } + ]) + + const files = await fileRepository.getAllByDatasetPersistentId( + dataset.persistentId, + dataset.version + ) + expect(files).to.be.not.empty + expect(files[0].name).to.be.equal('test.json') + + await DatasetHelper.destroy(dataset.persistentId) + }) + + it('should upload 2 files and add it to the dataset', async () => { + const dataset = await DatasetHelper.create().then((datasetResponse) => + datasetRepository.getByPersistentId( + datasetResponse.persistentId, + DatasetNonNumericVersion.DRAFT + ) + ) + if (!dataset) throw new Error('Dataset not found') + + const singlePartFile1 = FileHelper.createSinglePartFileBlob() + const singlePartFile2 = FileHelper.createSinglePartFileBlob() + let storageId1: string | undefined = undefined + let storageId2: string | undefined = undefined + + const upload1 = fileRepository.uploadFile( + dataset.persistentId, + { file: singlePartFile1 }, + () => {}, + new AbortController(), + (sId) => { + storageId1 = sId + } + ) + + const upload2 = fileRepository.uploadFile( + dataset.persistentId, + { file: singlePartFile2 }, + () => {}, + new AbortController(), + (sId) => { + storageId2 = sId + } + ) + + await upload1 + await upload2 + + expect(storageId1).to.be.not.undefined + expect(storageId2).to.be.not.undefined + if (storageId1 == undefined) { + throw new Error('storageId1 is undefined') + } + if (storageId2 == undefined) { + throw new Error('storageId2 is undefined') + } + + await fileRepository.addUploadedFiles(dataset.persistentId, [ + { + fileName: 'test1.json', + description: 'description text', + directoryLabel: '', + categories: ['tag'], + restrict: false, + storageId: storageId1, + checksumValue: 'abc123', + checksumType: 'md5', + mimeType: 'application/json' + }, + { + fileName: 'test2.json', + description: 'description text', + directoryLabel: '', + categories: ['tag'], + restrict: false, + storageId: storageId2, + checksumValue: 'def456', + checksumType: 'md5', + mimeType: 'application/json' + } + ]) + + const files = await fileRepository.getAllByDatasetPersistentId( + dataset.persistentId, + dataset.version + ) + expect(files).to.be.not.empty + expect(files[0].name).to.be.equal('test1.json') + expect(files[1].name).to.be.equal('test2.json') + + await DatasetHelper.destroy(dataset.persistentId) + }) + + it('should not finish uploading file to destinations when user cancels immediately', async () => { + const dataset = await DatasetHelper.create().then((datasetResponse) => + datasetRepository.getByPersistentId( + datasetResponse.persistentId, + DatasetNonNumericVersion.DRAFT + ) + ) + if (!dataset) throw new Error('Dataset not found') + + const multipartFile = FileHelper.createMultipartFileBlob() + const controller = new AbortController() + + const upload = fileRepository.uploadFile( + dataset.persistentId, + { file: multipartFile }, + () => {}, + controller, + () => {} + ) + controller.abort() + await upload + .then(() => { + throw new Error('upload succeeded') + }) + .catch(() => {}) + + await DatasetHelper.destroy(dataset.persistentId) + }) +}) diff --git a/tests/e2e-integration/shared/files/FileHelper.ts b/tests/e2e-integration/shared/files/FileHelper.ts index fbf783dbb..a36958217 100644 --- a/tests/e2e-integration/shared/files/FileHelper.ts +++ b/tests/e2e-integration/shared/files/FileHelper.ts @@ -156,4 +156,30 @@ export class FileHelper extends DataverseApiHelper { static async delete(id: number) { return this.request(`/files/${id}`, 'DELETE') } + + static createSinglePartFileBlob(): File { + try { + return FileHelper.createFileBlobWithSize(1000, 'singlepart-file') + } catch (error) { + throw new Error(`Error while creating test singlepart file`) + } + } + + static createMultipartFileBlob(): File { + try { + return FileHelper.createFileBlobWithSize(1273741824, 'multipart-file') + } catch (error) { + throw new Error(`Error while creating test multipart file`) + } + } + + private static createFileBlobWithSize(fileSizeInBytes: number, fileName: string): File { + const blob = FileHelper.createBlobWithSize(fileSizeInBytes) + return new File([blob], fileName, { type: 'text/plain' }) + } + + private static createBlobWithSize(size: number): Blob { + const arrayBuffer = new ArrayBuffer(size) + return new Blob([arrayBuffer]) + } }