From 209371da22ef56ab32462570ae5f9f45e719e08a Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Tue, 26 Aug 2025 13:25:10 -0700 Subject: [PATCH 01/11] update registerUpload to fss4.0 --- .../services/file-management-system/index.ts | 1 - .../services/file-storage-service/index.ts | 19 +++++-------- .../test/file-storage-service.test.ts | 27 ++++++++----------- 3 files changed, 17 insertions(+), 30 deletions(-) diff --git a/src/renderer/services/file-management-system/index.ts b/src/renderer/services/file-management-system/index.ts index bc4833ff..23788f1a 100644 --- a/src/renderer/services/file-management-system/index.ts +++ b/src/renderer/services/file-management-system/index.ts @@ -147,7 +147,6 @@ export default class FileManagementSystem { const registration = await this.fss.registerUpload( fileName, fileType, - fileSize, upload.serviceFields.localNasShortcut ? this.posixPath(source) : undefined, isMultifile, shouldBeInLocal, diff --git a/src/renderer/services/file-storage-service/index.ts b/src/renderer/services/file-storage-service/index.ts index d5d39a38..dee6a403 100644 --- a/src/renderer/services/file-storage-service/index.ts +++ b/src/renderer/services/file-storage-service/index.ts @@ -69,7 +69,7 @@ interface FileRecord { * This acts as an interface for interacting with the File Storage Service (FSS). */ export default class FileStorageService extends HttpCacheClient { - public static readonly ENDPOINT = "fss2/v3.0"; + public static readonly ENDPOINT = "fss2/v4.0"; private static readonly BASE_FILE_PATH = `${FileStorageService.ENDPOINT}/file`; private static readonly BASE_UPLOAD_PATH = `${FileStorageService.ENDPOINT}/upload`; constructor(httpClient: HttpClient, localStorage: LocalStorage) { @@ -100,24 +100,17 @@ export default class FileStorageService extends HttpCacheClient { public registerUpload( fileName: string, fileType: FileType, - fileSize: number, localNasPath?: string, isMultifile?: boolean, shouldBeInLocal?: boolean, ): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/register`; + const url = `${FileStorageService.BASE_UPLOAD_PATH}`; const postBody = { - // Unfortunately FSS expects snake_case - // so the conversion must be manual each request - file_name: fileName, - file_type: fileType, - // Unfortunately FSS expects snake_case - // so the conversion must be manual each request - file_size: fileSize, - local_nas_shortcut: localNasPath !== undefined, - local_nas_path: localNasPath, + fileName: fileName, + fileType: fileType, + localNasPath: localNasPath, multifile: !!isMultifile, - should_be_in_local: shouldBeInLocal, + shouldBeInLocal: shouldBeInLocal, }; return this.post( url, diff --git a/src/renderer/services/file-storage-service/test/file-storage-service.test.ts b/src/renderer/services/file-storage-service/test/file-storage-service.test.ts index 7df52afb..10888e48 100644 --- a/src/renderer/services/file-storage-service/test/file-storage-service.test.ts +++ b/src/renderer/services/file-storage-service/test/file-storage-service.test.ts @@ -42,20 +42,18 @@ describe("FileStorageService", () => { const postStub = sandbox.stub().resolves(response); const fileName = "my_cool_czi.czi"; const fileType = FileType.IMAGE; - const fileSize = 13941234; const expectedPostBody = { - file_name: fileName, - file_type: fileType, - file_size: fileSize, - local_nas_path: undefined, - local_nas_shortcut: false, + fileName: fileName, + fileType: fileType, + localNasPath: undefined, + localNasShortcut: false, multifile: false, - should_be_in_local: undefined + shouldBeInLocal: undefined }; sandbox.replace(httpClient, "post", postStub as SinonStub); // Act - const actual = await fss.registerUpload(fileName, fileType, fileSize); + const actual = await fss.registerUpload(fileName, fileType); // Assert expect(actual).to.deep.equal(expectedResponse); @@ -79,20 +77,17 @@ describe("FileStorageService", () => { const postStub = sandbox.stub().resolves(response); const fileName = "my_cool_czi.czi"; const fileType = FileType.IMAGE; - const fileSize = 13941234; const expectedPostBody = { - file_name: fileName, - file_type: fileType, - file_size: fileSize, - local_nas_path: localNasPath, - local_nas_shortcut: true, + fileName: fileName, + fileType: fileType, + localNasPath: localNasPath, multifile: false, - should_be_in_local: undefined + shouldBeInLocal: undefined }; sandbox.replace(httpClient, "post", postStub as SinonStub); // Act - const actual = await fss.registerUpload(fileName, fileType, fileSize, localNasPath); + const actual = await fss.registerUpload(fileName, fileType, localNasPath); // Assert expect(actual).to.deep.equal(expectedResponse); From 7007dca0400c1b09da85e4ad2249af234807a8b5 Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Mon, 15 Sep 2025 14:43:44 -0700 Subject: [PATCH 02/11] lint --- package-lock.json | 272 ++++-- package.json | 6 +- src/main/index.ts | 42 +- .../containers/App/handleUploadJobUpdates.ts | 83 +- src/renderer/containers/App/test/App.test.ts | 76 +- .../Table/CustomCells/StatusCell/Step.tsx | 4 +- .../Table/CustomCells/StatusCell/index.tsx | 6 +- .../StatusCell/test/StatusCell.test.tsx | 31 +- .../services/file-management-system/index.ts | 443 ++-------- .../test/file-management-system.test.ts | 775 +----------------- .../services/file-storage-service/index.ts | 179 +--- .../test/file-storage-service.test.ts | 141 +--- src/renderer/state/configure-store.ts | 4 +- 13 files changed, 472 insertions(+), 1590 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2192b89c..c817b50f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -77,6 +77,7 @@ "chai": "~4.3.4", "classnames": "~2.3.1", "clean-webpack-plugin": "4.x", + "concurrently": "^9.2.1", "cross-env": "~7.0.3", "css-loader": "7.x", "electron": "~16.0.6", @@ -108,6 +109,7 @@ "sinon-chai": "~3.7.0", "ts-loader": "^6.2.2", "typescript": "~4.5.4", + "wait-on": "^8.0.4", "webpack": "5.x", "webpack-cli": "5.x", "webpack-dev-server": "5.x" @@ -2979,6 +2981,21 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@hapi/hoek": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", + "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==", + "dev": true + }, + "node_modules/@hapi/topo": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", + "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", + "dev": true, + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, "node_modules/@humanwhocodes/config-array": { "version": "0.9.5", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", @@ -3271,6 +3288,27 @@ "node": ">=14" } }, + "node_modules/@sideway/address": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", + "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==", + "dev": true, + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, + "node_modules/@sideway/formula": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", + "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==", + "dev": true + }, + "node_modules/@sideway/pinpoint": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", + "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==", + "dev": true + }, "node_modules/@sindresorhus/is": { "version": "0.14.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", @@ -5659,6 +5697,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -6167,6 +6217,45 @@ "typedarray": "^0.0.6" } }, + "node_modules/concurrently": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-9.2.1.tgz", + "integrity": "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng==", + "dev": true, + "dependencies": { + "chalk": "4.1.2", + "rxjs": "7.8.2", + "shell-quote": "1.8.3", + "supports-color": "8.1.1", + "tree-kill": "1.2.2", + "yargs": "17.7.2" + }, + "bin": { + "conc": "dist/bin/concurrently.js", + "concurrently": "dist/bin/concurrently.js" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/open-cli-tools/concurrently?sponsor=1" + } + }, + "node_modules/concurrently/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/conf": { "version": "10.2.0", "resolved": "https://registry.npmjs.org/conf/-/conf-10.2.0.tgz", @@ -7587,6 +7676,19 @@ "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", "dev": true }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/duplexer3": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", @@ -8133,13 +8235,9 @@ "dev": true }, "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.2.4" - }, + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", "engines": { "node": ">= 0.4" } @@ -8148,7 +8246,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, "engines": { "node": ">= 0.4" } @@ -8159,10 +8256,9 @@ "integrity": "sha512-i1gCgmR9dCl6Vil6UKPI/trA69s08g/syhiDK9TG0Nf1RJjjFI+AzoWW7sPufzkgYAn861skuCwJa0pIIHYxvg==" }, "node_modules/es-object-atoms": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", - "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", - "dev": true, + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", "dependencies": { "es-errors": "^1.3.0" }, @@ -8171,14 +8267,14 @@ } }, "node_modules/es-set-tostringtag": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", - "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", - "dev": true, + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", "dependencies": { - "get-intrinsic": "^1.2.4", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", - "hasown": "^2.0.1" + "hasown": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -9364,12 +9460,14 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { @@ -9474,7 +9572,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -9553,16 +9650,20 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", - "dev": true, + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" }, "engines": { "node": ">= 0.4" @@ -9577,6 +9678,18 @@ "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==", "dev": true }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", @@ -9780,12 +9893,11 @@ } }, "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "dev": true, - "dependencies": { - "get-intrinsic": "^1.1.3" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9902,10 +10014,9 @@ } }, "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true, + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", "engines": { "node": ">= 0.4" }, @@ -9917,7 +10028,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, "dependencies": { "has-symbols": "^1.0.3" }, @@ -9946,7 +10056,6 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, "dependencies": { "function-bind": "^1.1.2" }, @@ -11296,6 +11405,19 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/joi": { + "version": "17.13.3", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.13.3.tgz", + "integrity": "sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==", + "dev": true, + "dependencies": { + "@hapi/hoek": "^9.3.0", + "@hapi/topo": "^5.1.0", + "@sideway/address": "^4.1.5", + "@sideway/formula": "^3.0.1", + "@sideway/pinpoint": "^2.0.0" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -12150,6 +12272,14 @@ "node": ">=10" } }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -14750,6 +14880,12 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", @@ -16415,9 +16551,9 @@ } }, "node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", "dev": true, "dependencies": { "tslib": "^2.1.0" @@ -16888,10 +17024,13 @@ } }, "node_modules/shell-quote": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz", - "integrity": "sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==", + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz", + "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -17668,6 +17807,15 @@ "node": ">=12" } }, + "node_modules/tree-kill": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz", + "integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==", + "dev": true, + "bin": { + "tree-kill": "cli.js" + } + }, "node_modules/truncate-utf8-bytes": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", @@ -18404,6 +18552,36 @@ "node": ">=12" } }, + "node_modules/wait-on": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-8.0.4.tgz", + "integrity": "sha512-8f9LugAGo4PSc0aLbpKVCVtzayd36sSCp4WLpVngkYq6PK87H79zt77/tlCU6eKCLqR46iFvcl0PU5f+DmtkwA==", + "dev": true, + "dependencies": { + "axios": "^1.11.0", + "joi": "^17.13.3", + "lodash": "^4.17.21", + "minimist": "^1.2.8", + "rxjs": "^7.8.2" + }, + "bin": { + "wait-on": "bin/wait-on" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/wait-on/node_modules/axios": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", + "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "dev": true, + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, "node_modules/walkdir": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", diff --git a/package.json b/package.json index b640f17b..04d8dbd2 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,9 @@ } }, "scripts": { - "dev": "node scripts/start-dev.js", + "start:renderer": "webpack serve --config webpack/webpack.renderer.config.js --hot --port 8080", + "start:main": "cross-env NODE_ENV=development electron ./dist/main/main.js", + "dev": "concurrently \"cross-env NODE_ENV=development npm run start:renderer\" \"wait-on http://localhost:8080 && cross-env NODE_ENV=development npm run start:main\"", "compile": "webpack --config webpack/webpack.main.config.js && webpack --config webpack/webpack.renderer.config.js", "compile-prod": "webpack --config webpack/webpack.main.config.js --env production && webpack --config webpack/webpack.renderer.config.js --env production", "build-executable": "npm run compile-prod && npx electron-builder", @@ -129,6 +131,7 @@ "chai": "~4.3.4", "classnames": "~2.3.1", "clean-webpack-plugin": "4.x", + "concurrently": "^9.2.1", "cross-env": "~7.0.3", "css-loader": "7.x", "electron": "~16.0.6", @@ -160,6 +163,7 @@ "sinon-chai": "~3.7.0", "ts-loader": "^6.2.2", "typescript": "~4.5.4", + "wait-on": "^8.0.4", "webpack": "5.x", "webpack-cli": "5.x", "webpack-dev-server": "5.x" diff --git a/src/main/index.ts b/src/main/index.ts index 8b233df1..c95411c5 100644 --- a/src/main/index.ts +++ b/src/main/index.ts @@ -15,7 +15,9 @@ import { import { setMenu } from "./menu"; -const isDevelopment = process.env.NODE_ENV !== "production"; +const isDevelopment = process.env.NODE_ENV === "development"; + + ElectronStore.initRenderer(); @@ -23,7 +25,7 @@ ElectronStore.initRenderer(); let mainWindow: BrowserWindow | undefined; function createMainWindow() { - const window = new BrowserWindow({ + const window = new BrowserWindow({ height: 750, webPreferences: { contextIsolation: false, @@ -41,34 +43,23 @@ function createMainWindow() { const { webContents } = window; setMenu(webContents); - if (isDevelopment) { + if (isDevelopment) { installExtension(REACT_DEVELOPER_TOOLS) .then((name: string) => { - console.log(`Added extension: ${name}`); - - if (!mainWindow) { - throw new Error("mainWindow not defined"); - } - - mainWindow - .loadURL(`http://${devServer.host}:${devServer.port}`) // todo pull from constants file - .then(() => { - if (mainWindow) { - mainWindow.webContents.openDevTools(); - } - }) - .catch((error: Error) => { - console.error("Failed to load from webpack-dev-server", error); - }); + console.log(`Added extension: ${name}`); }) .catch((err: Error) => { - console.log(`Failed to load React devtools \n ${err}`); - window.loadFile(path.join("dist", "renderer", "index.html")).catch((error: Error) => { - console.error("Failed to load from file", error); + console.log(`Failed to load React devtools \n ${err}`); + }) + .finally(() => { + window.loadURL(`http://${devServer.host}:${devServer.port}`) + .then(() => { + window.webContents.openDevTools(); + }) + .catch((error: Error) => { + console.error("Failed to load from webpack-dev-server", error); }); - } - ) - .finally(() => window.webContents.openDevTools()); + }); } else { window.loadFile(path.join("dist", "renderer", "index.html")).catch((error: Error) => { console.error("Failed to load from file", error); @@ -91,6 +82,7 @@ function createMainWindow() { }); }); + mainWindow = window; return window; } diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index d879fefd..f2b5c272 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -18,40 +18,55 @@ import { Step } from "../Table/CustomCells/StatusCell/Step"; * Reports progress on the pre-upload MD5 step, the upload step, and then the post-upload MD5 step. */ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { - const totalBytes = job.serviceFields.fileSize || 0; // 0 is a safe default, but in practice filesize is initialized immediately after job creation. - if ( - job.serviceFields?.preUploadMd5 && - job.serviceFields.preUploadMd5 !== job.serviceFields.fileSize - ) { - dispatch( - updateUploadProgressInfo(job.jobId, { - bytesUploaded: job.serviceFields?.preUploadMd5, - totalBytes, - step: Step.ONE, - }) - ); - } else if ( - job.serviceFields?.currentFileSize && - job.serviceFields.currentFileSize !== job.serviceFields?.fileSize - ) { - dispatch( - updateUploadProgressInfo(job.jobId, { - bytesUploaded: job.serviceFields?.currentFileSize, - totalBytes, + const totalBytes = job.serviceFields.fileSize ?? 0; + const copyProgress = job.serviceFields.copyToFmsCacheProgress; // number | undefined + const checksumProgress = job.serviceFields.checksumProgress ?? 0; + const s3Progress = job.serviceFields.s3UploadProgress ?? 0; + + const isHybrid = typeof copyProgress === "number"; + + // cloud only upload + if (!isHybrid) { + if (checksumProgress < totalBytes) { + // update checksum progress as step 1 + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: checksumProgress, + totalBytes: totalBytes, + step: Step.ONE_CHECKSUM, + })); + } else if (checksumProgress === totalBytes && s3Progress < totalBytes) { + // update s3 upload progress as step 2 + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: s3Progress, + totalBytes: totalBytes, step: Step.TWO, - }) - ); - } else if ( - job.serviceFields?.postUploadMd5 && - job.serviceFields.postUploadMd5 !== job.serviceFields?.fileSize - ) { - dispatch( - updateUploadProgressInfo(job.jobId, { - bytesUploaded: job.serviceFields?.postUploadMd5, - totalBytes, - step: Step.THREE, - }) - ); + })); + } + + // hybrid uploads + } else { + if (copyProgress < totalBytes) { + // copy progress + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: copyProgress, + totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 + step: Step.ONE_COPY, + })); + } else if (copyProgress === totalBytes && checksumProgress < totalBytes) { + // checksum progress + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: totalBytes + checksumProgress, + totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 + step: Step.ONE_CHECKSUM, + })); + } else if (checksumProgress === totalBytes && s3Progress < totalBytes) { + // upload progress + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: s3Progress, + totalBytes: totalBytes, + step: Step.TWO, // Upload + })); + } } } @@ -69,7 +84,7 @@ function handleFSSMultifileJobUpdate(job: FSSUpload, dispatch: Dispatch) { updateUploadProgressInfo(job.jobId, { bytesUploaded: totalBytesUploaded, totalBytes: job.serviceFields?.fileSize || 0, - step: Step.THREE, + step: Step.TWO, }) ); } diff --git a/src/renderer/containers/App/test/App.test.ts b/src/renderer/containers/App/test/App.test.ts index d8f5484b..8b857758 100644 --- a/src/renderer/containers/App/test/App.test.ts +++ b/src/renderer/containers/App/test/App.test.ts @@ -12,54 +12,48 @@ describe("App", () => { [ { serviceFields: { - preUploadMd5: 5, - fileSize: 10 - }, - step: Step.ONE, - progressField: "preUploadMd5" + checksumProgress: 5, + fileSize: 10 + }, + step: Step.ONE_CHECKSUM, + progressField: "checksumProgress" }, { serviceFields: { - preUploadMd5: 10, - fileSize: 10, - currentFileSize: 5, - }, + checksumProgress: 10, + fileSize: 10, + s3UploadProgress: 5, + }, step: Step.TWO, - progressField: "currentFileSize" - }, - { - serviceFields: { - preUploadMd5: 10, - currentFileSize: 10, - postUploadMd5: 5, - fileSize: 10, - }, - step: Step.THREE, - progressField: "postUploadMd5" + progressField: "s3UploadProgress" }, - ].forEach(({serviceFields, step, progressField}) => { - it("dispatches updateUploadProgressInfo when pre-upload-md5 is in progress", () => { - // Arrange + ].forEach(({ serviceFields, step, progressField }) => { + it("dispatches updateUploadProgressInfo when upload is in progress", () => { const fssJob: JSSJob = { - created: new Date(), - jobId: "foo123", - jobName: "test_file.txt", - modified: new Date(), - originationHost: "dev-aics-fup-001", - service: Service.FILE_STORAGE_SERVICE, - updateParent: false, - user: "fakeuser", - status: JSSJobStatus.WORKING, - serviceFields, + created: new Date(), + jobId: "foo123", + jobName: "test_file.txt", + modified: new Date(), + originationHost: "dev-aics-fup-001", + service: Service.FILE_STORAGE_SERVICE, + updateParent: false, + user: "fakeuser", + status: JSSJobStatus.WORKING, + serviceFields, }; - let actionPersisted = undefined; - const dispatch = (action: Action)=>{ - actionPersisted = action; - }; - const expectedAction = updateUploadProgressInfo(fssJob.jobId, { bytesUploaded: fssJob.serviceFields && fssJob.serviceFields[progressField], totalBytes: fssJob.serviceFields?.fileSize, step: step }) - // Act + + let actionPersisted: any = undefined; + const dispatch = (action: Action) => { + actionPersisted = action; + }; + + const expectedAction = updateUploadProgressInfo(fssJob.jobId, { + bytesUploaded: fssJob.serviceFields?.[progressField], + totalBytes: fssJob.serviceFields?.fileSize, + step, + }); + handleUploadJobUpdates(fssJob, dispatch); - // Assert expect(actionPersisted).to.deep.equal(expectedAction); }); }); @@ -90,7 +84,7 @@ describe("App", () => { const dispatch = (action: Action)=>{ actionPersisted = action; }; - const expectedAction = updateUploadProgressInfo(fssJob.jobId, { bytesUploaded: 35, totalBytes: fssJob.serviceFields?.fileSize, step: Step.THREE }) + const expectedAction = updateUploadProgressInfo(fssJob.jobId, { bytesUploaded: 35, totalBytes: fssJob.serviceFields?.fileSize, step: Step.TWO }) // Act handleUploadJobUpdates(fssJob, dispatch); // Assert diff --git a/src/renderer/containers/Table/CustomCells/StatusCell/Step.tsx b/src/renderer/containers/Table/CustomCells/StatusCell/Step.tsx index b38f5c80..0e577fc6 100644 --- a/src/renderer/containers/Table/CustomCells/StatusCell/Step.tsx +++ b/src/renderer/containers/Table/CustomCells/StatusCell/Step.tsx @@ -1,5 +1,5 @@ export enum Step { - ONE, + ONE_COPY, + ONE_CHECKSUM, TWO, - THREE } diff --git a/src/renderer/containers/Table/CustomCells/StatusCell/index.tsx b/src/renderer/containers/Table/CustomCells/StatusCell/index.tsx index aa2304a1..a9b863cc 100644 --- a/src/renderer/containers/Table/CustomCells/StatusCell/index.tsx +++ b/src/renderer/containers/Table/CustomCells/StatusCell/index.tsx @@ -24,9 +24,9 @@ const POWER_OF_1000_TO_ABBREV = new Map([ ]); const STEP_INFO = { - [Step.ONE]: "Step 1 of 3: Pre-upload tasks", - [Step.TWO]: "Step 2 of 3: Uploading file", - [Step.THREE]: "Step 3 of 3: Post-upload tasks", + [Step.ONE_COPY]: "Step 1 of 2: Pre-upload, copying to FMS cache", + [Step.ONE_CHECKSUM]: "Step 1 of 2: Pre-upload, calculating MD5 checksum", + [Step.TWO]: "Step 2 of 2: Uploading file", }; function getBytesDisplay(bytes: number): string { diff --git a/src/renderer/containers/Table/CustomCells/StatusCell/test/StatusCell.test.tsx b/src/renderer/containers/Table/CustomCells/StatusCell/test/StatusCell.test.tsx index e1d0d987..a7e6067e 100644 --- a/src/renderer/containers/Table/CustomCells/StatusCell/test/StatusCell.test.tsx +++ b/src/renderer/containers/Table/CustomCells/StatusCell/test/StatusCell.test.tsx @@ -71,7 +71,7 @@ describe("", () => { progress: { bytesUploaded: 4245, totalBytes: 82341, - step: Step.ONE, + step: Step.ONE_CHECKSUM, }, }, }; @@ -83,7 +83,7 @@ describe("", () => { // Assert expect(wrapper.find(Tooltip).prop("title")).to.equal( - "WORKING - Step 1 of 3: Pre-upload tasks" + "WORKING - Step 1 of 2: Pre-upload, calculating MD5 checksum" ); expect(wrapper.find(Progress).prop("percent")).to.equal(5); }); @@ -107,7 +107,7 @@ describe("", () => { // Assert expect(wrapper.find(Tooltip).prop("title")).to.equal( - "WORKING - Step 2 of 3: Uploading file" + "WORKING - Step 2 of 2: Uploading file" ); expect(wrapper.find(Progress).prop("percent")).to.equal(0); }); @@ -131,31 +131,8 @@ describe("", () => { // Assert expect(wrapper.find(Tooltip).prop("title")).to.equal( - "WORKING - Step 2 of 3: Uploading file" + "WORKING - Step 2 of 2: Uploading file" ); expect(wrapper.find(Progress).prop("percent")).to.equal(60); }); - - it("shows step 3 when in third step of upload", () => { - // Arrange - const row = { - original: { - progress: { - bytesUploaded: 82341, - totalBytes: 82341, - step: Step.THREE, - }, - }, - }; - - // Act - const wrapper = mount( - - ); - - // Assert - expect(wrapper.find(Tooltip).prop("title")).to.equal( - "WORKING - Step 3 of 3: Post-upload tasks" - ); - }); }); diff --git a/src/renderer/services/file-management-system/index.ts b/src/renderer/services/file-management-system/index.ts index 23788f1a..58a85389 100644 --- a/src/renderer/services/file-management-system/index.ts +++ b/src/renderer/services/file-management-system/index.ts @@ -1,14 +1,11 @@ -import * as fs from "fs"; import * as path from "path"; -import { uniq } from "lodash"; import * as uuid from "uuid"; import { Step } from "../../containers/Table/CustomCells/StatusCell/Step"; -import { extensionToFileTypeMap, FileType, getDirectorySize} from "../../util"; +import { extensionToFileTypeMap, FileType} from "../../util"; import FileStorageService, { UploadStatus, - UploadStatusResponse, } from "../file-storage-service"; import JobStatusService from "../job-status-service"; import { @@ -21,11 +18,7 @@ import { import MetadataManagementService from "../metadata-management-service"; import { UploadRequest } from "../types"; -import ChunkedFileReader, { CancellationError } from "./ChunkedFileReader"; -import Md5Hasher from "./Md5Hasher"; - interface FileManagementClientConfig { - fileReader: ChunkedFileReader; fss: FileStorageService; jss: JobStatusService; mms: MetadataManagementService; @@ -44,13 +37,10 @@ export interface UploadProgressInfo { * the FMS. */ export default class FileManagementSystem { - private readonly fileReader: ChunkedFileReader; private readonly fss: FileStorageService; private readonly jss: JobStatusService; private readonly mms: MetadataManagementService; - private static readonly CHUNKS_CEILING_INFLIGHT_REQUEST_CEILING = 20; //ceiling on concurrent chunk requests (even if more can fit in memory) - /** * Returns JSS friendly UUID to group files * uploaded together @@ -59,12 +49,7 @@ export default class FileManagementSystem { return uuid.v1().replace(/-/g, ""); } - private static sleep(timeoutInMs = 2000){ - return new Promise(resolve => setTimeout(resolve, timeoutInMs)) - } - public constructor(config: FileManagementClientConfig) { - this.fileReader = config.fileReader; this.fss = config.fss; this.jss = config.jss; this.mms = config.mms; @@ -113,94 +98,6 @@ export default class FileManagementSystem { return replaced; } - private async register( - upload: UploadJob, - ): Promise<[UploadStatusResponse, string, number]> { - // Grab file details - const source = upload.serviceFields.files[0]?.file.originalPath; - const fileName = path.basename(source); - const isMultifile = upload.serviceFields?.multifile; - - const shouldBeInLocal = upload.serviceFields.files[0]?.file.shouldBeInLocal; - const sourceStat = await fs.promises.stat(source); - let { size: fileSize } = sourceStat; - const { mtime: fileLastModified } = sourceStat - - // Multifile uploads require us to get the total size of all relevant sub-files - // For any other upload, we can just grab the size returned by fs - if (isMultifile) { - fileSize = await getDirectorySize(source); - } - - const fileLastModifiedInMs = fileLastModified.getTime(); - // Heuristic which in most cases, prevents attempting to upload a duplicate - if (await this.fss.fileExistsByNameAndSize(fileName, fileSize)) { - throw new Error( - `File ${fileName} with size ${fileSize} already exists in FMS` - ); - } - - const fileType = extensionToFileTypeMap[ - path.extname(upload.serviceFields.files[0]?.file.originalPath).toLowerCase() - ] || FileType.OTHER; - - const registration = await this.fss.registerUpload( - fileName, - fileType, - upload.serviceFields.localNasShortcut ? this.posixPath(source) : undefined, - isMultifile, - shouldBeInLocal, - ); - - // Update parent job with upload job created by FSS - // for tracking in the event of a retry - await this.jss.updateJob(upload.jobId, { - serviceFields: { - fssUploadId: registration.uploadId, - lastModifiedInMS: fileLastModifiedInMs, - }, - }); - return [registration, source, fileSize]; - } - /** - * Uploads the file at the given path and metadata. - * Sends upload in chunks reporting the total bytes - * read on each chunk submission. - * Does not complete the upload, FSS must do some work asynchronously - * before we can do so. This app will track the FSS upload job to - * determine when it is time to complete the upload. - */ - public async upload( - upload: UploadJob, - ): Promise { - try { - const [fssStatus, source] = await this.register(upload); - if (!upload.serviceFields.localNasShortcut) { - const isAlreadyInProgress = fssStatus.chunkStatuses && fssStatus.chunkStatuses[0]; - if(isAlreadyInProgress) { - //Handles the case where FUA believes this is a new upload, - //but actually, it is partially complete already. - await this.retry(upload.jobId); - } else { - await this.uploadInChunks({ - fssStatus, - source, - user: upload.user - }); - } - } - } catch (error) { - // Ignore cancellation errors - if (!(error instanceof CancellationError)) { - // Fail job in JSS with error - const errMsg = `Something went wrong uploading ${upload.jobName}. Details: ${error?.message}`; - console.error(errMsg); - await this.failUpload(upload.jobId, errMsg); - throw error; - } - } - } - /** * Finishes the remaining work to finalize the upload after * FSS's portion has been completed asynchronously @@ -252,117 +149,39 @@ export default class FileManagementSystem { } /** - * Attempts to retry the upload for the given failed job. The job will not - * be reused, but will instead be replaced. - * - * Will attempt to first resume an ongoing upload before retrying it completely. - * - * Backwards compatible with uploads that have a many to many relationship - * with files in which case this will split the uploads into many different uploads. + * Attempts to retry the upload for the given failed job. */ - public async retry( - uploadId: string - ): Promise { - // Request job from JSS & validate if it is retryable + public async retry(uploadId: string): Promise { const fuaUpload = (await this.jss.getJob(uploadId)) as UploadJob; - // Avoid attempting to retry a successful job, should be an update in that case if (fuaUpload.status === JSSJobStatus.SUCCEEDED) { - throw new Error( - `Upload cannot be retried if already successful, actual status is ${fuaUpload.status}.` + this.succeedUpload(uploadId, + fuaUpload.serviceFields.result?.[0].fileId || '', + fuaUpload.serviceFields.result?.[0].fileName || '', + fuaUpload.serviceFields.result?.[0].readPath || '', ); + throw new Error(`Upload cannot be retried if already successful.`); } - // Attempt to resume an ongoing upload if possible before scraping this one entirely const { fssUploadId } = fuaUpload.serviceFields; - let resumeError: Error | undefined; if (fssUploadId) { - try { - // In case the req fails, handle by cancelling in the catch, - // so that the FUA can then create a new upload for the file - const fssStatus = await this.fss.getStatus(fssUploadId); - if (fssStatus?.status !== UploadStatus.INACTIVE) { - await this.resume(fuaUpload, fssStatus); - return; - } - } catch (error) { - // Cancel FSS upload to retry again from scratch - resumeError = error - try{ - await this.fss.cancelUpload(fssUploadId); - } catch (calcelError){ - // In case fssUploadId does not exist on server, - // no-op so that control continues to routine where new jobs are created (below) - } - } + // let fss try retry + await this.fss.retryUpload(fssUploadId); + return; } - // Start new upload jobs that will replace the current one - const newJobServiceFields = { - groupId: - fuaUpload.serviceFields?.groupId || - FileManagementSystem.createUploadGroupId(), - originalJobId: uploadId, - localNasShortcut: fuaUpload.serviceFields?.localNasShortcut - }; - - // Create a separate upload for each file in this job - // One job for multiple files is deprecated, this is here - // for backwards-compatibility - const results = await Promise.all( - (fuaUpload.serviceFields?.files || []).map(async (metadata) => { - try { - // Get a fresh upload job to track the upload with - const newUpload = await this.initiateUpload( - metadata, - fuaUpload.user, - newJobServiceFields - ); - - try { - // Update the current job with information about the replacement - let errorMessage = `This job has been replaced with Job ID: ${newUpload.jobId}` - if (resumeError) { - errorMessage += ` after attempting to resume resulting in error ${resumeError?.message}` - } - await this.jss.updateJob( - uploadId, - { - status: JSSJobStatus.FAILED, - serviceFields: { - error: errorMessage, - replacementJobIds: uniq([ - ...(fuaUpload?.serviceFields?.replacementJobIds || []), - newUpload.jobId, - ]), - }, - }, - false - ); - } catch (error) { - // Cancel the new job if unable to update the old one - await this.cancel(newUpload.jobId); - throw error; - } - - // Perform upload with new job and current job's metadata, forgoing the current job - await this.upload(newUpload); - return; - } catch (error) { - // Catch exceptions to allow other jobs to run before re-throwing the error - return { error }; - } - }) - ); - - // Evaluate the results throwing the first error seen (if any) - results.forEach((result) => { - const errorCase = result as { error?: Error }; - if (errorCase?.error) { - throw errorCase.error; - } + // update existing job with retry info + await this.jss.updateJob(uploadId, { + status: JSSJobStatus.WAITING, + serviceFields: { + ...fuaUpload.serviceFields, + error: undefined, // clear previous error + cancelled: false, + }, }); + + await this.upload(fuaUpload); } /** @@ -381,9 +200,6 @@ export default class FileManagementSystem { ); } - // Cancel any web worker currently active just in case - this.fileReader.cancel(uploadId); - // If we haven't saved the FSS Job ID this either failed miserably or hasn't progressed much let fssStatus; const { fssUploadId } = upload.serviceFields; @@ -428,180 +244,67 @@ export default class FileManagementSystem { } /** - * Attempts to resume the given "in progress" chunked upload. This is *not* meant to be - * used in regular upload circumstances, but rather should be used - * when an upload was stopped while it was ongoing due to an event - * like an app crash. - * - * This will try to take advantage of any work - * already done to upload the file. + * Marks the given upload as a Success */ - private async resume( - fuaUpload: UploadJob, - fssStatus: UploadStatusResponse + public async succeedUpload( + uploadId: string, + fileId: string, + fileName: string, + readPath: string, ): Promise { - const { localNasShortcut, lastModifiedInMS, files } = fuaUpload.serviceFields; - const { mtime: fileLastModified } = await fs.promises.stat(files[0].file.originalPath); - const fileLastModifiedInMs = fileLastModified.getTime(); - if (lastModifiedInMS !== fileLastModifiedInMs) { - throw new Error("File has been modified since last upload attempt"); - } - switch (fssStatus.status) { - case UploadStatus.WORKING: - // Update status to reflect the resume going smoothly - await this.jss.updateJob(fuaUpload.jobId, { - status: JSSJobStatus.RETRYING, + await this.jss.updateJob(uploadId, { + status: JSSJobStatus.SUCCEEDED, + serviceFields: { + result: [ + { + fileId, + fileName, + readPath, + }, + ], + }, }); - if (localNasShortcut) { - // For localNasShortcut uploads, the way to reume an in progress upload is to call /register on it again. - // eslint-disable-next-line @typescript-eslint/no-unused-vars - await this.register(fuaUpload); - } else { - await this.resumeUploadInChunks(fuaUpload, fssStatus); - } - break; - case UploadStatus.RETRY: - if (localNasShortcut) { - await this.fss.retryFinalizeForLocalNasShortcutUpload(fssStatus.uploadId); - } else { - await this.retryFinalizeForChunkedUpload(fssStatus); - } - break; - case UploadStatus.COMPLETE: - // If an FSS upload status is complete it has performed everything - // it needs to and may just need the client to finish its portion - const { fileId } = fssStatus; - if(!fileId){ - throw new Error("FileId was not published on COMPLETE upload: " + fssStatus.uploadId) - } - await this.complete(fuaUpload, fileId) - break; - case UploadStatus.POST_PROCESSING: - break; - default: - throw new Error(`Unexpected FSS UploadStatus encountered: ${fssStatus?.status}`); - } - return; - } - - private async getChunkedUploadProgress( - fssStatus: UploadStatusResponse - ): Promise<[number, string?]> { - // If FSS is still available to continue receiving chunks of this upload - // simply continue sending the chunks - let lastChunkNumber = fssStatus.chunkStatuses.findIndex( - (status) => status !== UploadStatus.COMPLETE - ); - if (lastChunkNumber === -1) { - lastChunkNumber = fssStatus.chunkStatuses.length; - } - - let partiallyCalculatedMd5 = undefined; - if (lastChunkNumber > 0) { - const chunkResponse = await this.fss.getChunkInfo(fssStatus.uploadId, lastChunkNumber); - partiallyCalculatedMd5 = chunkResponse.cumulativeMD5; - if (!partiallyCalculatedMd5) { - throw new Error('No partial MD5 for chunk ' + lastChunkNumber); } - } - return [lastChunkNumber, partiallyCalculatedMd5]; - } - - private async retryFinalizeForChunkedUpload( - fssStatus: UploadStatusResponse - ) { - const [lastChunkNumber, partiallyCalculatedMd5] = await this.getChunkedUploadProgress(fssStatus); - if (!partiallyCalculatedMd5) { - throw new Error('No partial MD5 for chunk ' + lastChunkNumber); - } - const deserailizedMd5Hasher = await Md5Hasher.deserialize(partiallyCalculatedMd5); - await this.fss.retryFinalizeMd5(fssStatus.uploadId, deserailizedMd5Hasher.digest()); - } - - private async resumeUploadInChunks( - upload: UploadJob, - fssStatus: UploadStatusResponse, - ) { - const { originalPath } = upload.serviceFields.files[0].file; - const [lastChunkNumber, partiallyCalculatedMd5] = await this.getChunkedUploadProgress(fssStatus); - - await this.uploadInChunks({ - fssStatus, - source: originalPath, - user: upload.user, - initialChunkNumber: lastChunkNumber, - partiallyCalculatedMd5 - }); - } /** - * Uploads the given file to FSS in chunks asynchronously using a NodeJS. + * Uploads the given file to FSS. */ - private async uploadInChunks(config: { - fssStatus: UploadStatusResponse, - source: string, - user: string, - initialChunkNumber?: number, - partiallyCalculatedMd5?: string, - }): Promise { - const { fssStatus, source, user, initialChunkNumber = 0, partiallyCalculatedMd5 } = config; - const fssUploadId = fssStatus.uploadId; - const chunkSize = fssStatus.chunkSize; - let chunkNumber = initialChunkNumber; - - const uploadChunkPromises: Promise[] = []; - - // For rate throttling how many chunks are sent in parallel - let chunksInFlight = 0; - const chunksInFlightLimit = FileManagementSystem.CHUNKS_CEILING_INFLIGHT_REQUEST_CEILING; - - // Handles submitting chunks to FSS, and updating progress - const uploadChunk = async (chunk: Uint8Array, chunkNumber: number, md5ThusFar: string): Promise => { - chunksInFlight++; - // Upload chunk - await this.fss.sendUploadChunk( - fssUploadId, - chunkNumber, - chunkSize * (chunkNumber-1), - md5ThusFar, - chunk, - user + public async upload(upload: UploadJob): Promise { + try { + const source = upload.serviceFields.files[0]?.file.originalPath; + const fileName = path.basename(source); + const isMultifile = upload.serviceFields?.multifile; + const shouldBeInLocal = upload.serviceFields.files[0]?.file.shouldBeInLocal; + + const fileType = extensionToFileTypeMap[ + path.extname(fileName).toLowerCase() + ] || FileType.OTHER; + + // v4: single upload call + const fssStatus = await this.fss.upload( + fileName, + fileType, + this.posixPath(source), + "VAST", // hard coded for now since we're not planning on bucket to bucket uploads + isMultifile, + shouldBeInLocal, ); - // Submit progress to callback - chunksInFlight--; - }; - /** - * A callback for ChunkedFileReader::read - * Responsible for throttling the reader when the desired number of chunks "in flight" (submitted to fss, and not yet resolved) has been reached. - * It accomplishes this by checking the state of chunksInFlight, and pausing (reading of the file) if needed. - * - * When chunksInFlight is not saturated, onChunkRead is also responsible for submitting chunks to this.fss (via uploadChunk) and has the - * side effect of populating uploadChunkPromises. - */ - const onChunkRead = async (chunk:Uint8Array, md5ThusFar: string): Promise => { - // Throttle how many chunks will be uploaded in parallel - while (chunksInFlight >= chunksInFlightLimit) { - await FileManagementSystem.sleep(); - } - chunkNumber += 1; - uploadChunkPromises.push(uploadChunk(chunk, chunkNumber, md5ThusFar)); + // track using this upload.jobID + await this.jss.updateJob(upload.jobId, { + serviceFields: { + fssUploadId: fssStatus.uploadId, + }, + }); + } catch (error) { + await this.jss.updateJob(upload.jobId, { + status: JSSJobStatus.FAILED, + serviceFields: { + ...upload.serviceFields, + error: error?.message ?? "Upload failed", + }, + }); + throw error; } - const offset = chunkSize * initialChunkNumber; - //TODO SWE-865 only read if offset < fssStatus - const md5 = await this.fileReader.read({ - uploadId: fssUploadId, - source, - onProgress: onChunkRead, - chunkSize, - offset, - partiallyCalculatedMd5, - }); - - //Block until all chunk uploads have completed - await Promise.all(uploadChunkPromises); - - // Trigger asynchrous finalize step in FSS - await this.fss.finalize(fssUploadId, md5); } } diff --git a/src/renderer/services/file-management-system/test/file-management-system.test.ts b/src/renderer/services/file-management-system/test/file-management-system.test.ts index a73265e9..ea4f0542 100644 --- a/src/renderer/services/file-management-system/test/file-management-system.test.ts +++ b/src/renderer/services/file-management-system/test/file-management-system.test.ts @@ -12,24 +12,14 @@ import { MetadataManagementService, } from "../.."; import { mockJob, mockWorkingUploadJob } from "../../../state/test/mocks"; -import { FSSUpload, UploadStatus } from "../../file-storage-service"; +import { UploadStatus } from "../../file-storage-service"; import { - JSSJob, JSSJobStatus, UploadJob, } from "../../job-status-service/types"; -import ChunkedFileReader from "../ChunkedFileReader"; - -class TestError extends Error { - constructor() { - super("Test."); - this.name = "TestError"; - } -} describe("FileManagementSystem", () => { const sandbox = createSandbox(); - let fileReader: SinonStubbedInstance; let fss: SinonStubbedInstance; let jss: SinonStubbedInstance; let mms: SinonStubbedInstance; @@ -46,13 +36,11 @@ describe("FileManagementSystem", () => { }); beforeEach(() => { - fileReader = sandbox.createStubInstance(ChunkedFileReader); fss = sandbox.createStubInstance(FileStorageService); jss = sandbox.createStubInstance(JobStatusService); mms = sandbox.createStubInstance(MetadataManagementService); fms = new FileManagementSystem({ - fileReader: fileReader as any, fss: fss as any, jss: jss as any, mms: mms as any, @@ -81,682 +69,27 @@ describe("FileManagementSystem", () => { }); describe("upload", () => { - - it("restarts 'WORKING' job in FSS", async () => { - // Arrange - const uploadId = "elephant091234124"; - const { mtime: fileLastModified } = - await fs.promises.stat(testFilePath); - const fileLastModifiedInMs = fileLastModified.getTime(); - const uploadJob: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const workingUploadJob: UploadJob = { - ...mockJob, - status: JSSJobStatus.WORKING, - serviceFields: { - fssUploadId: uploadId, - lastModifiedInMS: fileLastModifiedInMs, - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const completeUploadJob: UploadJob = { - ...mockJob, - status: JSSJobStatus.SUCCEEDED, - serviceFields: { - fssUploadId: uploadId, - lastModifiedInMS: fileLastModifiedInMs, - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.onFirstCall().resolves({ status: UploadStatus.WORKING, chunkStatuses: [UploadStatus.WORKING], uploadId, chunkSize: 2424, currentFileSize: -1, fileSize: -1}) - .onSecondCall().resolves({ status: UploadStatus.WORKING, chunkStatuses: [UploadStatus.WORKING,UploadStatus.WORKING], uploadId, chunkSize: 2424, currentFileSize: -1, fileSize: -1 }); - jss.updateJob.resolves(); - jss.createJob.resolves(workingUploadJob); - jss.getJob - .onFirstCall().resolves(uploadJob) - .onSecondCall().resolves(workingUploadJob) - .onThirdCall().resolves(completeUploadJob); - fss.getStatus - .onFirstCall().resolves({ status: UploadStatus.WORKING, chunkStatuses: [UploadStatus.WORKING], uploadId: "091234124", chunkSize: 2424, currentFileSize: -1, fileSize: -1 }) - .onSecondCall().resolves({ status: UploadStatus.COMPLETE, chunkStatuses: [UploadStatus.COMPLETE], uploadId: "091234124", chunkSize: 2424, currentFileSize: -1, fileSize: -1}) - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: uploadJob.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - - // Act - await fms.upload(uploadJob); - - // Assert - // not called by upload unless redirected to retry - expect(fss.getStatus.called).to.be.true; - // Make sure the job get set to state RETRYING - expect(jss.updateJob.calledWith(uploadJob.jobId, { - status: JSSJobStatus.RETRYING, - })).to.be.true; - }); - - it("Inits chunked upload instead of local_nas_shortcut", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - localNasShortcut: false - }, - }; - const uploadId = "091234124"; - const expectedMd5 = "testMd5"; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fileReader.read.resolves(expectedMd5) - - // Act - await fms.upload(upload); - - // Assert - expect(fileReader.read).to.have.been.calledOnce; - expect(fss.finalize.calledOnceWithExactly(uploadId, expectedMd5)).to.be.true; - }); - - it("Inits local_nas_shortcut instead of chunked.", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - localNasShortcut: true - }, - }; - const uploadId = "091234124"; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fss.getStatus.resolves({ - status: UploadStatus.COMPLETE, - uploadId, - chunkSize: 2424, - chunkStatuses: [], - currentFileSize: 99, - fileSize: -1 - }); - // Act - await fms.upload(upload); - - // Assert - expect(fileReader.read).to.have.not.been.called; - expect(fss.finalize).to.have.not.been.called; - }); - - it("creates appropriate metadata & completes tracking job", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const uploadId = "091234124"; - const expectedMd5 = "testMd5"; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fileReader.read.resolves(expectedMd5) - - // Act - await fms.upload(upload); - - // Assert - expect( - fss.fileExistsByNameAndSize.calledOnceWithExactly( - path.basename(testFilePath), - testFileSize - ) - ).to.be.true; - expect(fileReader.read).to.have.been.calledOnce; - expect(fss.finalize.calledOnceWithExactly(uploadId, expectedMd5)).to.be.true; - }); - - it("makes requests to FSS asyncronously", async () => { - // Arrange - const md5 = "09k2341234k"; + it("stores FSS uploadId in JSS after starting upload", async () => { const upload: UploadJob = { ...mockJob, serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], + files: [{ file: { fileType: "text", originalPath: testFilePath } }], type: "upload", }, }; - const uploadId = "091234124"; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fileReader.read.callsFake( - async ( - args:{uploadId: string, source: string, onProgress: (chunk: Uint8Array, partialMd5: string) => Promise}):Promise=>{ - for(let i = 0; i < 5; i++){ - await args.onProgress(new Uint8Array(), ""); - } - return md5; - }); - let inFlightFssRequests = 0; - let wasParallelising = false; - fss.sendUploadChunk.callsFake(async ()=>{ - inFlightFssRequests++; - await new Promise((resolve)=>setTimeout(resolve, 25)); - if(inFlightFssRequests > 1){ - wasParallelising = true; - } - inFlightFssRequests--; - }); - // Act - await fms.upload(upload); - // Assert - expect(wasParallelising).to.be.true; - expect(inFlightFssRequests).to.be.equal(0); - }); - - it("fails upload if error occurs during read", async () => { - // Arrange - const error = "Test failure during read"; - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId: "091234124", chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fileReader.read.rejects(new Error(error)); - - // Act - await expect(fms.upload(upload)).to.be.rejectedWith(Error); - - // Assert - expect( - fss.fileExistsByNameAndSize.calledOnceWithExactly( - path.basename(testFilePath), - testFileSize - ) - ).to.be.true; - expect( - jss.updateJob.calledWithExactly(upload.jobId, { - status: JSSJobStatus.FAILED, - serviceFields: { - error: `Something went wrong uploading ${upload.jobName}. Details: ${error}`, - cancelled: false, - }, - }) - ).to.be.true; - expect(fileReader.read).to.have.been.calledOnce; - }); - - it("fails upload if fss errors bubble up from reader", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const uploadId = "091234124"; - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - // p.getName.callsFake(() => { return "Alex Smith"; }); - fileReader.read.callsFake(async (args:{uploadId: string, source: string, onProgress: (chunk: Uint8Array, partialMd5: string) => Promise}):Promise=>{ - await args.onProgress(new Uint8Array(), "testMd5"); - return "completeMd5"; - }); - fss.sendUploadChunk.callsFake(async ()=>{ - throw new TestError(); - }); - // Act, Assert - expect(fms.upload(upload)).to.be.rejectedWith(TestError); - }); - }); - - describe("retry", () => { - it("creates new upload if fss upload not tracked", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - jss.getJob.resolves(upload); - jss.createJob.resolves(upload); - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId: "091234124", chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: upload.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - - // Act - await fms.retry("mockUploadId"); - - // Assert - expect(jss.createJob).to.have.been.calledOnce; - }); - - it("creates new upload if fss upload not in progress (able to resume)", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - fssUploadId: "234124141", - type: "upload", - }, - }; - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - const inactiveUploadId = "mockUploadId"; - const newUploadId = "091234124"; - jss.getJob.resolves(upload); - jss.createJob.resolves(upload); - fss.getStatus.onFirstCall().resolves({ - uploadId: inactiveUploadId, - chunkSize: -1, - status: UploadStatus.INACTIVE, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }); - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId: newUploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: upload.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - - // Act - await fms.retry(inactiveUploadId); - - // Assert - expect(jss.createJob).to.have.been.calledOnce; - }); - - it("Recreated upload succeeds if fss upload not in progress (able to resume)", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - fssUploadId: "234124141", - type: "upload", - }, - }; - const md5 = "test_md5"; - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - const inactiveUploadId = "mockUploadId"; - const newUploadId = "091234124"; - jss.getJob.resolves(upload); - jss.createJob.resolves(upload); - fss.getStatus.onFirstCall().resolves({ - uploadId: inactiveUploadId, - chunkSize: -1, - status: UploadStatus.INACTIVE, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }).onSecondCall().resolves( - { - uploadId: inactiveUploadId, - chunkSize: -1, - status: UploadStatus.COMPLETE, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }); - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId: newUploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: upload.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - fileReader.read.resolves(md5) - - // Act - await fms.retry(inactiveUploadId); - - // Assert - expect(fss.finalize.calledWith(newUploadId, md5)).to.be.true; - expect(jss.createJob).to.have.been.calledOnce; - }); - - it("creates multiple new uploads for backwards compatibility", async () => { - // Arrange - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - type: "upload", - }, - }; - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - jss.getJob.resolves(upload); - jss.createJob.resolves(upload); - fss.fileExistsByNameAndSize.resolves(false); - fss.registerUpload.resolves({ status: UploadStatus.WORKING, uploadId: "091234124", chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 }); - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: upload.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - - // Act - await fms.retry("mockUploadId"); - - // Assert - expect(fss.finalize.calledTwice).to.be.true; - expect(fss.registerUpload.calledTwice).to.be.true; - expect(jss.createJob.getCalls()).to.be.lengthOf(2); - }); - - it(`resumes sending chunks for an upload with an WORKING FSS status`, async () => { - // Arrange - const { mtime: fileLastModified } = - await fs.promises.stat(testFilePath); - const fileLastModifiedInMs = fileLastModified.getTime(); - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - fssUploadId: "234124141", - type: "upload", - lastModifiedInMS: fileLastModifiedInMs, - }, - }; - const fssUpload: JSSJob = { - ...mockJob, - }; - fss.getChunkInfo.resolves({ cumulativeMD5: "anyMd5", size: 0, status: UploadStatus.COMPLETE }) - jss.getJob.onFirstCall().resolves(upload); - fss.getStatus.resolves({ + fss.upload.resolves({ status: UploadStatus.WORKING, - chunkSize: -1, - uploadId: "-1", - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 + uploadId: "mockUploadId", + fileId: "mockFileId", }); - jss.getJob.onSecondCall().resolves(fssUpload); - - // Act - await fms.retry("mockUploadId"); - - // Assert - expect(jss.createJob.called).to.be.false; - expect(fileReader.read).to.have.been.calledOnce; - }); - it(`resumes local_nas_shortcut upload with an WORKING FSS status`, async () => { - // Arrange - const uploadId = "234124141"; - const { mtime: fileLastModified } = - await fs.promises.stat(testFilePath); - const fileLastModifiedInMs = fileLastModified.getTime(); - const upload: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - localNasShortcut: true, - fssUploadId: uploadId, - type: "upload", - lastModifiedInMS: fileLastModifiedInMs, - }, - }; - const fssUpload: JSSJob = { - ...mockJob, - }; - jss.getJob.onFirstCall().resolves(upload); - jss.getJob.onSecondCall().resolves(fssUpload); - fss.registerUpload.resolves({ - status: UploadStatus.WORKING, uploadId, chunkSize: 2424, chunkStatuses: [], currentFileSize: -1, fileSize: -1 - }); - fss.getStatus.onFirstCall().resolves({ - status: UploadStatus.WORKING, - chunkSize: -1, - uploadId: "-1", - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }); - fss.getStatus.onSecondCall().resolves({ - status: UploadStatus.COMPLETE, - chunkSize: -1, - uploadId: "-1", - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }); - - // Act - await fms.retry(uploadId); + await fms.upload(upload); - // Assert - // expect(fss.cancelUpload.called).to.be.false; - expect(jss.createJob.called).to.be.false; + expect(fss.upload.calledOnce).to.be.true; expect(jss.updateJob.calledWith(upload.jobId, { - status: JSSJobStatus.RETRYING, + serviceFields: { fssUploadId: "mockUploadId" }, })).to.be.true; - expect(fss.registerUpload.called).to.be.true; - expect(fileReader.read.called).to.be.false; - }); - - it("calls retryFinalize on a chunked upload", async () => { - // Arrange - const { mtime: fileLastModified } = - await fs.promises.stat(testFilePath); - const fileLastModifiedInMs = fileLastModified.getTime(); - const fssUploadId = "234124141"; - const fuaUploadJob: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - localNasShortcut: false, - fssUploadId, - type: "upload", - lastModifiedInMS: fileLastModifiedInMs, - }, - }; - jss.getJob.onFirstCall().resolves(fuaUploadJob); - fss.getStatus.resolves({ - uploadId: fssUploadId, - chunkSize: 5, - status: UploadStatus.RETRY, - chunkStatuses: [UploadStatus.COMPLETE], - currentFileSize: 5, - fileSize: -1 - }); - fss.getChunkInfo.resolves({ - cumulativeMD5: "155,15,172,125,232,3,0,0,0,0,0,0,251,202,102,144,7,248,49,200,135,184,123,11,17,82,191,19,96,32,156,6,0,0,0,0,1,0,128,19,0,0,0,0,100,0,0,0,153,6,0,102,144,31,156,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0", - size: 5, - status: UploadStatus.COMPLETE, - }); - - // Act - await fms.retry(fssUploadId); - - // Assert - expect(fss.retryFinalizeMd5.calledWith(fssUploadId, "770134b98f3fc804f593ea0098af8490")).to.be.true; }); it("calls retryFinalize on a localNasShortcut upload", async () => { @@ -785,18 +118,12 @@ describe("FileManagementSystem", () => { jss.getJob.onFirstCall().resolves(fuaUploadJob); fss.getStatus.onFirstCall().resolves({ uploadId: fssUploadId, - chunkSize: -1, + fileId: "mockFileId", status: UploadStatus.RETRY, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 }).onSecondCall().resolves({ uploadId: fssUploadId, - chunkSize: -1, + fileId: "mockFileId", status: UploadStatus.COMPLETE, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 }); // Act @@ -804,71 +131,10 @@ describe("FileManagementSystem", () => { // Assert expect(jss.createJob.called).to.be.false; - expect(fss.retryFinalizeForLocalNasShortcutUpload.calledWith(fssUploadId)).to.be.true; - }); - - it("resumes an upload that just needs finalizing", async () => { - // Arrange - const { mtime: fileLastModified } = - await fs.promises.stat(testFilePath); - const fileLastModifiedInMs = fileLastModified.getTime(); - const fuaUploadJob: UploadJob = { - ...mockJob, - serviceFields: { - files: [ - { - file: { - fileType: "text", - originalPath: testFilePath, - }, - }, - ], - fssUploadId: "234124141", - type: "upload", - lastModifiedInMS: fileLastModifiedInMs, - }, - }; - const fssUploadJob: FSSUpload = { - ...mockJob, - serviceFields: { - fileId: "testFileId" - }, - }; - const fileId = "12343124"; - const localPath = "/some/path/into/fms/at/test_file.txt"; - jss.getJob.onFirstCall().resolves(fuaUploadJob).onSecondCall().resolves(fssUploadJob); - fss.getStatus.resolves({ - uploadId: "-1", - chunkSize: -1, - status: UploadStatus.WORKING, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 - }); - fss.finalize.resolves({ - errorCount: 0, - chunkNumber: 14, - uploadId: fuaUploadJob.jobId, - }); - fss.getFileAttributes.resolves({ - fileId, - localPath, - name: "", - size: 4, - md5: "", - }); - - // Act - await fms.retry("mockUploadId"); - - // Assert - expect(jss.createJob.called).to.be.false; - // TODO SWE-865 update so that read is skipped if all chunks are uploaded - // expect(fileReader.read.called).to.be.false; - expect(fss.finalize.called).to.be.true; }); }); + describe("complete", () => { it("fails upload job on error", async () => { // Arrange @@ -897,21 +163,19 @@ describe("FileManagementSystem", () => { fssUploadId: "12412m4413", }, }); + + // still in progress fss.getStatus.resolves({ - uploadId: "-1", - chunkSize: -1, + uploadId: "12412m4413", status: UploadStatus.WORKING, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 + fileId: "mockFileId", }); // Act await fms.cancel(mockUploadId); // Assert - expect(fileReader.cancel).to.have.been.calledOnce; - expect(fss.cancelUpload).to.have.been.calledOnce; + expect(fss.cancelUpload).to.have.been.calledOnceWith("12412m4413"); }); it("sets job status to FAILED with cancellation flag", async () => { @@ -945,11 +209,8 @@ describe("FileManagementSystem", () => { }); fss.getStatus.resolves({ uploadId: "-1", - chunkSize: -1, status: UploadStatus.COMPLETE, - chunkStatuses: [], - currentFileSize: -1, - fileSize: -1 + fileId: "mockFileId", }); // Act / Assert diff --git a/src/renderer/services/file-storage-service/index.ts b/src/renderer/services/file-storage-service/index.ts index dee6a403..96942181 100644 --- a/src/renderer/services/file-storage-service/index.ts +++ b/src/renderer/services/file-storage-service/index.ts @@ -11,12 +11,12 @@ import { HttpClient } from "../types"; export interface FSSUpload extends JSSJob { serviceFields: { fileId?: string; - preUploadMd5?: number; - postUploadMd5?: number; fileSize?: number; - currentFileSize?: number; + copyToFmsCacheProgress?: number; // hybrid only + checksumProgress?: number; // cloud + hybrid + s3UploadProgress?: number; // final stage multifile?: boolean; - subfiles?: { [fileId: string]: number }; // mapping of subfile IDs to their respective # of bytes uploaded + subfiles?: { [fileId: string]: number }; }; } @@ -31,27 +31,8 @@ export enum UploadStatus { POST_PROCESSING = "POST_PROCESSING" // Chunks were all recieved, /finalize was called, and post upload processing is happening. } -// RESPONSE TYPES - -// Receipt of chunk submission -interface UploadChunkResponse { - chunkNumber: number; - errorCount: number; - uploadId: string; -} - -interface ChunkInfoResponse { - cumulativeMD5?: string; - size: number; - status: UploadStatus; -} - export interface UploadStatusResponse { - chunkSize: number; - chunkStatuses: UploadStatus[]; - currentFileSize: number; - fileSize: number; - fileId?: string; + fileId: string; status: UploadStatus; uploadId: string; } @@ -69,9 +50,10 @@ interface FileRecord { * This acts as an interface for interacting with the File Storage Service (FSS). */ export default class FileStorageService extends HttpCacheClient { - public static readonly ENDPOINT = "fss2/v4.0"; - private static readonly BASE_FILE_PATH = `${FileStorageService.ENDPOINT}/file`; - private static readonly BASE_UPLOAD_PATH = `${FileStorageService.ENDPOINT}/upload`; + public static readonly ENDPOINT_V4 = "fss2/v4.0"; + public static readonly ENDPOINT_V3 = "fss2/v3.0"; + private static readonly BASE_FILE_PATH = `${FileStorageService.ENDPOINT_V3}/file`; + private static readonly BASE_UPLOAD_PATH = `${FileStorageService.ENDPOINT_V4}/upload`; constructor(httpClient: HttpClient, localStorage: LocalStorage) { super(httpClient, localStorage, false); } @@ -92,25 +74,27 @@ export default class FileStorageService extends HttpCacheClient { } } + /** - * This is the first step to an upload. Before the app can start sending - * chunks of the file to upload it must first make the service aware of the - * file itself. - */ - public registerUpload( - fileName: string, - fileType: FileType, - localNasPath?: string, - isMultifile?: boolean, - shouldBeInLocal?: boolean, - ): Promise { + * FSS v4: Create a new upload. + * This replaces registerUpload and chunked upload logic from v3. + */ +public upload( + fileName: string, + fileType: FileType, + path: string, + source = "VAST", // hardcoded for now + isMultifile?: boolean, + shouldBeInLocal?: boolean, +): Promise { const url = `${FileStorageService.BASE_UPLOAD_PATH}`; const postBody = { - fileName: fileName, - fileType: fileType, - localNasPath: localNasPath, + fileName, + fileType, + source, + path, multifile: !!isMultifile, - shouldBeInLocal: shouldBeInLocal, + shouldBeInLocal, }; return this.post( url, @@ -120,102 +104,11 @@ export default class FileStorageService extends HttpCacheClient { } /** - * This is an incremental upload act, after an upload has been registered - * the file can be send in chunked of pretermined size to the service. - */ - public async sendUploadChunk( - uploadId: string, - chunkNumber: number, - rangeStart: number, - md5ThusFar: string, - postBody: Uint8Array, - user: string - ): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/chunk/${chunkNumber}`; - const rangeEnd = rangeStart + postBody.byteLength - 1; - const attemptRequest = () => ( - this.post(url, postBody, { - ...FileStorageService.getHttpRequestConfig(), - headers: { - "Content-Type": "application/octet-stream", - "Cumulative-MD5": md5ThusFar, - Range: `bytes=${rangeStart}-${rangeEnd}`, - "X-User-Id": user, - }, - } - ) - ); - try { - await attemptRequest(); - } catch (error) { - // Re-throw error if status can't be determine (like when the error doesn't have to do with the server/HTTP request - // or if the status is between acceptable ranges indicating another problem is afoot - if (!error.response || !error.response.status || (error.response.status >= 200 && error.response.status < 300)) { - throw error; - } - - // Currently there are infrastructure performance bottlenecks that cause the upload chunk endpoint - // to timeout unexpectedly. In this case this client needs to be robust - // enough to wait to see what ended up happening the chunk (within a reasonable timeframe) - // Additionally, this includes a feature to auto-retry chunks that are determined to need it - for (let statusCheckAttemptNumber = 0; statusCheckAttemptNumber < 8; statusCheckAttemptNumber++) { - // Geometric backoff up to 15 minutes - await new Promise((resolve) => setTimeout(resolve, statusCheckAttemptNumber * 30 * 1000)) - - const { chunkStatuses } = await this.getStatus(uploadId); - const chunkStatusForThisChunk = chunkStatuses[chunkNumber - 1]; - if (chunkStatusForThisChunk === UploadStatus.INACTIVE) { - throw new Error( - `Something went wrong uploading chunk number ${chunkNumber} for upload ${uploadId}. ` + - "Chunk was determined to have failed uploading." - ) - } else if (chunkStatusForThisChunk === UploadStatus.COMPLETE) { - return; - } else if (chunkStatusForThisChunk === UploadStatus.RETRY) { - try { - await attemptRequest(); - return; - } catch (error) { - // no-op, continue loop - } - } - } - - throw new Error( - `Timed out while waiting for chunk ${chunkNumber} to upload` - ); - } - } - - /** - * This is the final step to an upload. The MD5 is included, and will be used by the server for a checksum. - * Other post upload tasks may also occur. - */ - public finalize(uploadId: string, md5: string): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/finalize?md5=${md5}`; - return this.patch(url, undefined); - } - - /** - * This is a retry of the final asynchronous step of the upload, this might be necessary in cases where something goes awry - * on the server's side during this step of the upload. - * The MD5 is included, and will be used by the server for a checksum. - * Other post upload tasks may also occur. + * Retry a failed or canceled upload (FSS v4). */ - public retryFinalizeMd5(uploadId: string, md5?: string): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/finalize?md5=${md5}`; - return this.patch(url, undefined); - } - - /** - * This is a retry of the final asynchronous step of the upload, this might be necessary in cases where something goes awry - * on the server's side during this step of the upload. - * - * This method is meant for locaNasShortcut upload only; MD5 is not included. - */ - public retryFinalizeForLocalNasShortcutUpload(uploadId: string): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/finalize?localNasShortcut=true`; //TODO SWE-867 - return this.patch(url, undefined); + public retryUpload(uploadId: string): Promise { + const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/retry`; + return this.put(url, undefined, FileStorageService.getHttpRequestConfig()); } /** @@ -223,16 +116,8 @@ export default class FileStorageService extends HttpCacheClient { * service's perspective. */ public cancelUpload(uploadId: string): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}`; - return this.delete(url, undefined); - } - - /** - * Get information about the specific chunk requested - */ - public getChunkInfo(uploadId: string, chunkNumber: number): Promise { - const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/chunk/${chunkNumber}`; - return this.get(url, undefined); + const url = `${FileStorageService.BASE_UPLOAD_PATH}/${uploadId}/cancel`; + return this.put(url, undefined, FileStorageService.getHttpRequestConfig()); } /** diff --git a/src/renderer/services/file-storage-service/test/file-storage-service.test.ts b/src/renderer/services/file-storage-service/test/file-storage-service.test.ts index 10888e48..1213afb3 100644 --- a/src/renderer/services/file-storage-service/test/file-storage-service.test.ts +++ b/src/renderer/services/file-storage-service/test/file-storage-service.test.ts @@ -28,147 +28,22 @@ describe("FileStorageService", () => { sandbox.restore(); }); - describe("registerUpload", () => { - it("sends correctly formatted post body", async () => { - // Arrange - const expectedResponse = { - uploadId: "12930132", - chunkSize: 14, - }; - const response = { - status: 200, - data: expectedResponse, - }; + describe("upload", () => { + it("check to see if we send correctly formatted post body", async () => { + const expectedResponse = { uploadId: "12930132", status: UploadStatus.WORKING, fileId: "abc123" }; + const response = { status: 200, data: expectedResponse }; const postStub = sandbox.stub().resolves(response); - const fileName = "my_cool_czi.czi"; - const fileType = FileType.IMAGE; - const expectedPostBody = { - fileName: fileName, - fileType: fileType, - localNasPath: undefined, - localNasShortcut: false, - multifile: false, - shouldBeInLocal: undefined - }; sandbox.replace(httpClient, "post", postStub as SinonStub); - // Act - const actual = await fss.registerUpload(fileName, fileType); - - // Assert - expect(actual).to.deep.equal(expectedResponse); - const actualPostBody = postStub.firstCall.args[1]; - expect(actualPostBody).to.deep.equal(expectedPostBody); - }); - }); - - describe("registerUpload", () => { - it("sends correctly formatted post body", async () => { - // Arrange - const expectedResponse = { - uploadId: "12930132", - chunkSize: 14, - }; - const response = { - status: 200, - data: expectedResponse, - }; - const localNasPath = '/test/nas/path'; - const postStub = sandbox.stub().resolves(response); const fileName = "my_cool_czi.czi"; const fileType = FileType.IMAGE; - const expectedPostBody = { - fileName: fileName, - fileType: fileType, - localNasPath: localNasPath, - multifile: false, - shouldBeInLocal: undefined - }; - sandbox.replace(httpClient, "post", postStub as SinonStub); + const sourcePath = "/allen/aics/test/my_cool_czi.czi"; - // Act - const actual = await fss.registerUpload(fileName, fileType, localNasPath); + const actual = await fss.upload(fileName, fileType, sourcePath, "VAST"); - // Assert expect(actual).to.deep.equal(expectedResponse); const actualPostBody = postStub.firstCall.args[1]; - expect(actualPostBody).to.deep.equal(expectedPostBody); - }); - }); - - describe("sendUploadChunk", () => { - class AxiosError extends Error { - public response: any; - - constructor(response: any) { - super("test error"); - this.response = response; - } - } - - it("retries chunk when error occurs and server status indicates RETRY", async () => { - // Arrange - const chunkNumber = 2; - httpClient.get.resolves({ - data: { - chunkStatuses: [UploadStatus.COMPLETE, UploadStatus.RETRY], - status: UploadStatus.WORKING, - uploadId: "anyId", - chunkSize: 2, - } - }) - httpClient.post.onFirstCall().rejects(new AxiosError({status: 400})); - httpClient.post.onSecondCall().resolves({ - status: 200, - data: { - uploadId: "anyId", - chunkNumber: 0, - }, - }); - - // Act - await fss.sendUploadChunk( - "9021312", - chunkNumber, - 1, - "anyMd5", - new Uint8Array(), - "testUser" - ); - - // Assert - expect(httpClient.post.callCount).to.equal(2); - }); - - it("creates the correct range header for the chunk", async () => { - // Arrange - const uploadId = "132390123"; - const chunkSize = 132413; - const chunkNumber = 9; - const postBody = new Uint8Array(); - const rangeStart = (chunkNumber - 1) * chunkSize; - const expectedRange = `bytes=${rangeStart}-${rangeStart - 1}`; - httpClient.post.resolves({ - status: 200, - data: { - uploadId, - chunkNumber: chunkNumber + 1, - }, - }); - - // Act - await fss.sendUploadChunk( - uploadId, - chunkNumber, - rangeStart, - "anyMd5", - postBody, - "testUser" - ); - - // Assert - const actualRange = httpClient.post.firstCall.args[2]?.headers?.Range; - expect(actualRange).to.deep.equal(expectedRange); + expect(actualPostBody).to.include({ fileName, fileType }); }); }); -}); +}); \ No newline at end of file diff --git a/src/renderer/state/configure-store.ts b/src/renderer/state/configure-store.ts index 896ef80c..0cd706ea 100644 --- a/src/renderer/state/configure-store.ts +++ b/src/renderer/state/configure-store.ts @@ -23,7 +23,6 @@ import { } from "../services"; import { FileManagementSystem, FileStorageService } from "../services"; import ApplicationInfoService from "../services/application-info-service"; -import ChunkedFileReader from "../services/file-management-system/ChunkedFileReader"; import EnvironmentAwareStorage from "./EnvironmentAwareStorage"; import { addEvent } from "./feedback/actions"; @@ -77,7 +76,7 @@ const storage = new EnvironmentAwareStorage(new ElectronStore()); // issues with Electron and/or Node running on // Linux (https://github.com/electron/electron/issues/10570). axios.defaults.adapter = require("axios/lib/adapters/xhr"); -const resourcesValidForRetryPaths = [FileStorageService.ENDPOINT]; +const resourcesValidForRetryPaths = [FileStorageService.ENDPOINT_V4]; axiosRetry(axios, { retries: 3, retryDelay: () => 10000, @@ -101,7 +100,6 @@ const applicationInfoService = new ApplicationInfoService( export const reduxLogicDependencies: Partial = { applicationInfoService, fms: new FileManagementSystem({ - fileReader: new ChunkedFileReader(), fss: new FileStorageService(httpClient, storage), jss: jssClient, mms: mmsClient, From 391f955436da432485bc4ed6eec9b81adee621d7 Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:24:47 -0700 Subject: [PATCH 03/11] update dev docs --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 038e5baf..ce6ffda8 100644 --- a/README.md +++ b/README.md @@ -14,10 +14,21 @@ npm i ### Step 2: Run Dev Server +Use the following command to start the dev server (reccommended). ```bash npm run dev ``` +Additionally, you can start the react renderer +```bash +npm run start:renderer +``` +or the electron main process separately +```bash +npm run start:main +``` + + ### Step 3: Switch to Staging Environment Switch to the staging environment after the app starts up through the File menu: File > Switch Environments, select the "Staging" button in the dialog. From 190d1b46080314f44d6f0fe970d2814b289e8c35 Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:31:30 -0700 Subject: [PATCH 04/11] invert if statement --- .../containers/App/handleUploadJobUpdates.ts | 39 +++++++++---------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index f2b5c272..15e00e8e 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -25,26 +25,8 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { const isHybrid = typeof copyProgress === "number"; - // cloud only upload - if (!isHybrid) { - if (checksumProgress < totalBytes) { - // update checksum progress as step 1 - dispatch(updateUploadProgressInfo(job.jobId, { - bytesUploaded: checksumProgress, - totalBytes: totalBytes, - step: Step.ONE_CHECKSUM, - })); - } else if (checksumProgress === totalBytes && s3Progress < totalBytes) { - // update s3 upload progress as step 2 - dispatch(updateUploadProgressInfo(job.jobId, { - bytesUploaded: s3Progress, - totalBytes: totalBytes, - step: Step.TWO, - })); - } - - // hybrid uploads - } else { + // hybrid upload + if (isHybrid) { if (copyProgress < totalBytes) { // copy progress dispatch(updateUploadProgressInfo(job.jobId, { @@ -67,6 +49,23 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { step: Step.TWO, // Upload })); } + // cloud-only uploads + } else { + if (checksumProgress < totalBytes) { + // update checksum progress as step 1 + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: checksumProgress, + totalBytes: totalBytes, + step: Step.ONE_CHECKSUM, + })); + } else if (checksumProgress === totalBytes && s3Progress < totalBytes) { + // update s3 upload progress as step 2 + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: s3Progress, + totalBytes: totalBytes, + step: Step.TWO, + })); + } } } From 020c4e6a6329958c3d4454d4ba9268f09c3e780d Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:34:43 -0700 Subject: [PATCH 05/11] clean up if statement --- .../containers/App/handleUploadJobUpdates.ts | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index 15e00e8e..4a8b9801 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -34,21 +34,23 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 step: Step.ONE_COPY, })); - } else if (copyProgress === totalBytes && checksumProgress < totalBytes) { - // checksum progress - dispatch(updateUploadProgressInfo(job.jobId, { - bytesUploaded: totalBytes + checksumProgress, - totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 - step: Step.ONE_CHECKSUM, - })); - } else if (checksumProgress === totalBytes && s3Progress < totalBytes) { - // upload progress - dispatch(updateUploadProgressInfo(job.jobId, { - bytesUploaded: s3Progress, - totalBytes: totalBytes, - step: Step.TWO, // Upload - })); - } + } else if (copyProgress === totalBytes) { + if (checksumProgress < totalBytes) { + // checksum progress + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: totalBytes + checksumProgress, + totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 + step: Step.ONE_CHECKSUM, + })); + + } else if (s3Progress < totalBytes) { + // upload progress + dispatch(updateUploadProgressInfo(job.jobId, { + bytesUploaded: s3Progress, + totalBytes: totalBytes, + step: Step.TWO, // Upload + })); + } // cloud-only uploads } else { if (checksumProgress < totalBytes) { From 1f251a66512860fd28218d5136c06c69537dacae Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:38:18 -0700 Subject: [PATCH 06/11] oops --- .../containers/App/handleUploadJobUpdates.ts | 60 +------------------ 1 file changed, 1 insertion(+), 59 deletions(-) diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index 4a8b9801..041fdac0 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -42,7 +42,6 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { totalBytes: totalBytes * 2, // double totalBytes to account for copy + checksum for step 1 step: Step.ONE_CHECKSUM, })); - } else if (s3Progress < totalBytes) { // upload progress dispatch(updateUploadProgressInfo(job.jobId, { @@ -51,6 +50,7 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { step: Step.TWO, // Upload })); } + } // cloud-only uploads } else { if (checksumProgress < totalBytes) { @@ -70,61 +70,3 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { } } } - -/** - * Handles a job update for a multifile (.zarr, .sldy) upload. - * Jumps straight to step 3 of 3 and reports progress as the sum of all bytes uploaded for relevant subfiles divided by - * the expected total size of the upload. - */ -function handleFSSMultifileJobUpdate(job: FSSUpload, dispatch: Dispatch) { - if (job.serviceFields?.subfiles) { - const totalBytesUploaded: number = Object.values( - job.serviceFields.subfiles - ).reduce((accum: number, value: number) => accum + value, 0); - dispatch( - updateUploadProgressInfo(job.jobId, { - bytesUploaded: totalBytesUploaded, - totalBytes: job.serviceFields?.fileSize || 0, - step: Step.TWO, - }) - ); - } - // TODO?: Maybe raise error here if there is no subfiles? -} - -/** - * Updates the upload progress UI when JSS jobs get updated. - * - * @param job JSSJob that has been recently updated. - * @param dispatch The Redux Dispatch function. - */ -export function handleUploadJobUpdates(job: JSSJob, dispatch: Dispatch) { - if (job.service === Service.FILE_STORAGE_SERVICE) { - // An FSS job update happens when: - // * fileId has been published - // * progress has been published on a pre-upload md5, file upload, or post-upload md5 - // * progress has been published on a multifile upload's subfile - // * the upload app has initialized a retry - const fssJob = job as FSSUpload; - - // If a fileId is present, the upload has completed and should be marked as such. - // If the upload job has become inactive or requires a retry, mark it as "failed". - if ( - job.serviceFields?.fileId || - job.currentStage === UploadStatus.INACTIVE || - job.currentStage === UploadStatus.RETRY - ) { - dispatch(receiveFSSJobCompletionUpdate(fssJob)); - } else { - // Otherwise, report progress - if (fssJob.serviceFields?.multifile) { - handleFSSMultifileJobUpdate(fssJob, dispatch); - } else { - handleFSSJobUpdate(fssJob, dispatch); - } - } - } else if (job.serviceFields?.type === "upload") { - // Otherwise separate user's other jobs from ones created by this app - dispatch(receiveJobUpdate(job as UploadJob)); - } -} From 8f12b855b3977c34d0d5bcf6294b007cef420e3e Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:44:08 -0700 Subject: [PATCH 07/11] fix handlerUploadJobUpdates --- .../containers/App/handleUploadJobUpdates.ts | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index 041fdac0..4d3fc33a 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -3,6 +3,7 @@ import { Dispatch } from "react"; import { FSSUpload, UploadStatus } from "../../services/file-storage-service"; import { JSSJob, + JSSJobStatus, Service, UploadJob, } from "../../services/job-status-service/types"; @@ -70,3 +71,33 @@ function handleFSSJobUpdate(job: FSSUpload, dispatch: Dispatch) { } } } + +/** + * Updates the upload progress UI when JSS jobs get updated. + * + * @param job JSSJob that has been recently updated. + * @param dispatch The Redux Dispatch function. + */ +export function handleUploadJobUpdates(job: JSSJob, dispatch: Dispatch) { + if (job.service === Service.FILE_STORAGE_SERVICE) { + // An FSS job update happens when: + // * fileId has been published + // * progress has been published on a pre-upload md5, file upload, or post-upload md5 + // * progress has been published on a multifile upload's subfile + // * the upload app has initialized a retry + const fssJob = job as FSSUpload; + + // If a fileId is present, the upload has completed and should be marked as such. + // If the upload job has become inactive or requires a retry, mark it as "failed". + if (job.status === JSSJobStatus.SUCCEEDED || job.status === JSSJobStatus.FAILED) { + // Job is finished, either successfully or with failure + dispatch(receiveFSSJobCompletionUpdate(fssJob)); + } else { + // Job still in progress, report progress + handleFSSJobUpdate(fssJob, dispatch); + } + } else if (job.serviceFields?.type === "upload") { + // Otherwise separate user's other jobs from ones created by this app + dispatch(receiveJobUpdate(job as UploadJob)); + } +} From 0b91b769ad3905037ac47e3eb1b5539c57cf372b Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:45:46 -0700 Subject: [PATCH 08/11] remove subfiles --- src/renderer/services/file-storage-service/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/renderer/services/file-storage-service/index.ts b/src/renderer/services/file-storage-service/index.ts index 96942181..e84e5c6f 100644 --- a/src/renderer/services/file-storage-service/index.ts +++ b/src/renderer/services/file-storage-service/index.ts @@ -16,7 +16,6 @@ export interface FSSUpload extends JSSJob { checksumProgress?: number; // cloud + hybrid s3UploadProgress?: number; // final stage multifile?: boolean; - subfiles?: { [fileId: string]: number }; }; } From dc185583cfee390111da2eae4fefbd32bad22dac Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:49:40 -0700 Subject: [PATCH 09/11] fix multifile --- src/renderer/services/file-storage-service/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/renderer/services/file-storage-service/index.ts b/src/renderer/services/file-storage-service/index.ts index e84e5c6f..7a697421 100644 --- a/src/renderer/services/file-storage-service/index.ts +++ b/src/renderer/services/file-storage-service/index.ts @@ -92,7 +92,7 @@ public upload( fileType, source, path, - multifile: !!isMultifile, + multifile: isMultifile ?? false, shouldBeInLocal, }; return this.post( From 281d1b13fd4743788c32e5ca5f679cd656db2387 Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 09:51:02 -0700 Subject: [PATCH 10/11] lint --- src/renderer/containers/App/handleUploadJobUpdates.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/renderer/containers/App/handleUploadJobUpdates.ts b/src/renderer/containers/App/handleUploadJobUpdates.ts index 4d3fc33a..b541f7d9 100644 --- a/src/renderer/containers/App/handleUploadJobUpdates.ts +++ b/src/renderer/containers/App/handleUploadJobUpdates.ts @@ -1,6 +1,6 @@ import { Dispatch } from "react"; -import { FSSUpload, UploadStatus } from "../../services/file-storage-service"; +import { FSSUpload } from "../../services/file-storage-service"; import { JSSJob, JSSJobStatus, From 2a43ef6a61b72853851021d21bc62fd0a3d1ab0d Mon Sep 17 00:00:00 2001 From: "brian.kim" Date: Thu, 18 Sep 2025 10:08:16 -0700 Subject: [PATCH 11/11] removing this multifile test since we no longer do multifile status updates --- src/renderer/containers/App/test/App.test.ts | 32 -------------------- 1 file changed, 32 deletions(-) diff --git a/src/renderer/containers/App/test/App.test.ts b/src/renderer/containers/App/test/App.test.ts index 8b857758..1321e716 100644 --- a/src/renderer/containers/App/test/App.test.ts +++ b/src/renderer/containers/App/test/App.test.ts @@ -58,36 +58,4 @@ describe("App", () => { }); }); }); - it("dispatches updateUploadProgressInfo when multifile progress is updated", () => { - // Arrange - const fssJob: JSSJob = { - created: new Date(), - jobId: "foo123", - jobName: "test_file.txt", - modified: new Date(), - originationHost: "dev-aics-fup-001", - service: Service.FILE_STORAGE_SERVICE, - updateParent: false, - user: "fakeuser", - status: JSSJobStatus.WORKING, - serviceFields: { - multifile: true, - fileSize: 100, - subfiles: { - 'fileid1': 10, - 'fileid2': 10, - 'fileid3': 15 - }, - }, - }; - let actionPersisted = undefined; - const dispatch = (action: Action)=>{ - actionPersisted = action; - }; - const expectedAction = updateUploadProgressInfo(fssJob.jobId, { bytesUploaded: 35, totalBytes: fssJob.serviceFields?.fileSize, step: Step.TWO }) - // Act - handleUploadJobUpdates(fssJob, dispatch); - // Assert - expect(actionPersisted).to.deep.equal(expectedAction); - }); }); \ No newline at end of file