From e3ca7df6b30a02cf2cb5fd1f162f72b31291fa15 Mon Sep 17 00:00:00 2001 From: Eryk Kullikowski Date: Fri, 5 Dec 2025 16:02:11 +0100 Subject: [PATCH 1/8] feat: add standalone DVWebloader V2 uploader bundle --- .gitignore | 1 + CHANGELOG.md | 4 +- package-lock.json | 92 ++-- package.json | 3 +- src/files/domain/models/FixityAlgorithm.ts | 1 + src/files/domain/useCases/addUploadedFiles.ts | 8 +- src/files/domain/useCases/replaceFile.ts | 8 +- src/files/domain/useCases/uploadFile.ts | 8 +- src/sections/Route.enum.ts | 4 +- .../edit-file-metadata/EditFileMetadata.tsx | 10 +- .../EditFileMetadataReferrer.ts | 8 + src/sections/replace-file/ReplaceFile.tsx | 10 +- .../replace-file/ReplaceFileReferrer.ts | 8 + .../shared/file-uploader/FileUploader.tsx | 2 +- .../file-uploader/FileUploaderHelper.ts | 5 +- .../file-uploader/FileUploaderPanel.tsx | 96 ++-- .../file-uploader/FileUploaderPanelCore.tsx | 99 +++++ .../file-upload-input/FileUploadInput.tsx | 171 +++---- src/sections/shared/file-uploader/types.ts | 24 + .../uploaded-files-list/UploadedFilesList.tsx | 17 +- .../useAddUploadedFilesToDataset.ts | 22 +- .../file-uploader/useFileUploadOperations.ts | 252 +++++++++++ .../file-uploader/useFileUploadState.ts | 203 +++++++++ .../file-uploader/useGetFixityAlgorithm.tsx | 5 +- .../shared/file-uploader/useReplaceFile.ts | 33 +- .../StandaloneFileRepository.ts | 130 ++++++ .../StandaloneFileUploaderPanel.tsx | 72 +++ src/standalone-uploader/config.ts | 106 +++++ src/standalone-uploader/dvwebloaderV2.html | 13 + src/standalone-uploader/index.tsx | 183 ++++++++ src/standalone-uploader/standalone.scss | 78 ++++ .../UploadedFilesList.stories.tsx | 1 + .../useFileUploadOperations.spec.tsx | 206 +++++++++ .../file-uploader/useFileUploadState.spec.tsx | 420 ++++++++++++++++++ vite.config.uploader.ts | 65 +++ 35 files changed, 2103 insertions(+), 265 deletions(-) create mode 100644 src/sections/edit-file-metadata/EditFileMetadataReferrer.ts create mode 100644 src/sections/replace-file/ReplaceFileReferrer.ts create mode 100644 src/sections/shared/file-uploader/FileUploaderPanelCore.tsx create mode 100644 src/sections/shared/file-uploader/types.ts create mode 100644 src/sections/shared/file-uploader/useFileUploadOperations.ts create mode 100644 src/sections/shared/file-uploader/useFileUploadState.ts create mode 100644 src/standalone-uploader/StandaloneFileRepository.ts create mode 100644 src/standalone-uploader/StandaloneFileUploaderPanel.tsx create mode 100644 src/standalone-uploader/config.ts create mode 100644 src/standalone-uploader/dvwebloaderV2.html create mode 100644 src/standalone-uploader/index.tsx create mode 100644 src/standalone-uploader/standalone.scss create mode 100644 tests/component/sections/shared/file-uploader/useFileUploadOperations.spec.tsx create mode 100644 tests/component/sections/shared/file-uploader/useFileUploadState.spec.tsx create mode 100644 vite.config.uploader.ts diff --git a/.gitignore b/.gitignore index 8f36e4547..6857817ad 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ # production /dist +/dist-uploader # storybook /storybook-static diff --git a/CHANGELOG.md b/CHANGELOG.md index b9d236d0b..3f640922b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,9 @@ This changelog follows the principles of [Keep a Changelog](https://keepachangel ### Added -- Added the value entered by the user in the error messages for metadata field validation errors in EMAIL and URL type fields. For example, instead of showing “Point of Contact E-mail is not a valid email address.“, we now show “Point of Contact E-mail foo is not a valid email address.” +- DVWebloader V2: A standalone file uploader build that reuses React file upload components, supporting S3 direct uploads with configurable tagging. +- Shared file upload hooks (`useFileUploadState`, `useFileUploadOperations`) for better code reuse between the main SPA and standalone uploader. +- Added the value entered by the user in the error messages for metadata field validation errors in EMAIL and URL type fields. For example, instead of showing "Point of Contact E-mail is not a valid email address.", we now show "Point of Contact E-mail foo is not a valid email address." - Contact Owner button in File Page. - Share button in File Page. - Link Collection and Link Dataset features. diff --git a/package-lock.json b/package-lock.json index ccf50a516..428d38675 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ "@dnd-kit/sortable": "8.0.0", "@dnd-kit/utilities": "3.2.2", "@faker-js/faker": "7.6.0", - "@iqss/dataverse-client-javascript": "2.0.0-alpha.79", + "@iqss/dataverse-client-javascript": "file:../dataverse-client-javascript", "@iqss/dataverse-design-system": "*", "@istanbuljs/nyc-config-typescript": "1.0.2", "@tanstack/react-table": "8.9.2", @@ -127,6 +127,38 @@ ] } }, + "../dataverse-client-javascript": { + "name": "@iqss/dataverse-client-javascript", + "version": "2.1.0", + "license": "MIT", + "dependencies": { + "@types/node": "^18.15.11", + "@types/turndown": "^5.0.1", + "axios": "^1.12.2", + "turndown": "^7.1.2", + "typescript": "^4.9.5" + }, + "devDependencies": { + "@types/jest": "^29.5.12", + "@typescript-eslint/eslint-plugin": "5.51.0", + "@typescript-eslint/parser": "5.51.0", + "@web-std/file": "3.0.3", + "eslint": "8.33.0", + "eslint-config-prettier": "8.6.0", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-jest": "27.2.1", + "eslint-plugin-prettier": "4.2.1", + "eslint-plugin-simple-import-sort": "10.0.0", + "eslint-plugin-unused-imports": "2.0.0", + "husky": "9.1.7", + "jest": "^29.4.3", + "jest-environment-jsdom": "29.7.0", + "prettier": "2.8.4", + "testcontainers": "^10.11.0", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.2" + } + }, "node_modules/@adobe/css-tools": { "version": "4.4.4", "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.4.tgz", @@ -1953,40 +1985,8 @@ } }, "node_modules/@iqss/dataverse-client-javascript": { - "name": "@IQSS/dataverse-client-javascript", - "version": "2.0.0-alpha.79", - "resolved": "https://npm.pkg.github.com/download/@IQSS/dataverse-client-javascript/2.0.0-alpha.79/4128665172f9569fa40f60ca1c1d205f7fe8a401", - "integrity": "sha512-NfjzwOz06QJzSYAQ2eZ20tRINO49LrBaWQ9JTQNK0cLPTRdmYzUJgB2zzGzH/c/bi7LfGgfkPqO+6MH9HPFxpg==", - "license": "MIT", - "dependencies": { - "@types/node": "^18.15.11", - "@types/turndown": "^5.0.1", - "axios": "^1.12.2", - "turndown": "^7.1.2", - "typescript": "^4.9.5" - } - }, - "node_modules/@iqss/dataverse-client-javascript/node_modules/@types/node": { - "version": "18.19.127", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.127.tgz", - "integrity": "sha512-gSjxjrnKXML/yo0BO099uPixMqfpJU0TKYjpfLU7TrtA2WWDki412Np/RSTPRil1saKBhvVVKzVx/p/6p94nVA==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" - } - }, - "node_modules/@iqss/dataverse-client-javascript/node_modules/typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } + "resolved": "../dataverse-client-javascript", + "link": true }, "node_modules/@iqss/dataverse-design-system": { "resolved": "packages/design-system", @@ -8569,12 +8569,6 @@ "license": "MIT", "optional": true }, - "node_modules/@types/turndown": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/@types/turndown/-/turndown-5.0.5.tgz", - "integrity": "sha512-TL2IgGgc7B5j78rIccBtlYAnkuv8nUQqhQc+DSYV5j9Be9XOcm/SKOVRuA47xAVI3680Tk9B1d8flK2GWT2+4w==", - "license": "MIT" - }, "node_modules/@types/unist": { "version": "2.0.11", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", @@ -9670,6 +9664,7 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true, "license": "MIT" }, "node_modules/at-least-node": { @@ -9760,6 +9755,7 @@ "version": "1.12.2", "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.2.tgz", "integrity": "sha512-vMJzPewAlRyOgxV2dU0Cuz2O8zzzx9VYtbJOaBgXFeLc4IV/Eg50n4LowmehOOR61S8ZMpc2K5Sa7g6A4jfkUw==", + "dev": true, "license": "MIT", "dependencies": { "follow-redirects": "^1.15.6", @@ -11017,6 +11013,7 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" @@ -12026,6 +12023,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.4.0" @@ -12693,6 +12691,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -13964,6 +13963,7 @@ "version": "1.15.11", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "dev": true, "funding": [ { "type": "individual", @@ -14039,6 +14039,7 @@ "version": "4.0.4", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -21346,6 +21347,7 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -21355,6 +21357,7 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, "license": "MIT", "dependencies": { "mime-db": "1.52.0" @@ -24527,6 +24530,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true, "license": "MIT" }, "node_modules/psl": { @@ -28414,12 +28418,6 @@ "react": ">=15.0.0" } }, - "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "license": "MIT" - }, "node_modules/unified": { "version": "10.1.2", "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", diff --git a/package.json b/package.json index 45cfb2198..10412c3e5 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "@dnd-kit/sortable": "8.0.0", "@dnd-kit/utilities": "3.2.2", "@faker-js/faker": "7.6.0", - "@iqss/dataverse-client-javascript": "2.0.0-alpha.79", + "@iqss/dataverse-client-javascript": "file:../dataverse-client-javascript", "@iqss/dataverse-design-system": "*", "@istanbuljs/nyc-config-typescript": "1.0.2", "@tanstack/react-table": "8.9.2", @@ -64,6 +64,7 @@ "scripts": { "start": "vite --base=/spa", "build": "tsc && vite build", + "build-uploader": "vite build --config vite.config.uploader.ts && cp -r public/locales dist-uploader/ && cp src/standalone-uploader/dvwebloaderV2.html dist-uploader/", "build-keycloak-theme": "npm run build && keycloakify build", "preview": "vite preview", "lint": "npm run typecheck && npm run lint:eslint && npm run lint:stylelint && npm run lint:prettier", diff --git a/src/files/domain/models/FixityAlgorithm.ts b/src/files/domain/models/FixityAlgorithm.ts index 3f062fcde..a676439ec 100644 --- a/src/files/domain/models/FixityAlgorithm.ts +++ b/src/files/domain/models/FixityAlgorithm.ts @@ -1,4 +1,5 @@ export enum FixityAlgorithm { + NONE = 'NONE', MD5 = 'MD5', SHA1 = 'SHA-1', SHA256 = 'SHA-256', diff --git a/src/files/domain/useCases/addUploadedFiles.ts b/src/files/domain/useCases/addUploadedFiles.ts index 552455b96..1a8970998 100644 --- a/src/files/domain/useCases/addUploadedFiles.ts +++ b/src/files/domain/useCases/addUploadedFiles.ts @@ -1,8 +1,14 @@ import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { FileRepository } from '../repositories/FileRepository' +/** + * Minimal repository type for addUploadedFiles. + * Only requires the addUploadedFiles method. + */ +type AddUploadedFilesRepository = Pick + export function addUploadedFiles( - fileRepository: FileRepository, + fileRepository: AddUploadedFilesRepository, datasetId: number | string, files: UploadedFileDTO[] ): Promise { diff --git a/src/files/domain/useCases/replaceFile.ts b/src/files/domain/useCases/replaceFile.ts index 9240d84d8..29aa537dd 100644 --- a/src/files/domain/useCases/replaceFile.ts +++ b/src/files/domain/useCases/replaceFile.ts @@ -1,8 +1,14 @@ import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { FileRepository } from '../repositories/FileRepository' +/** + * Minimal repository type for replaceFile. + * Only requires the replace method. + */ +type ReplaceFileRepository = Pick + export function replaceFile( - fileRepository: FileRepository, + fileRepository: ReplaceFileRepository, fileId: number | string, newFile: UploadedFileDTO ): Promise { diff --git a/src/files/domain/useCases/uploadFile.ts b/src/files/domain/useCases/uploadFile.ts index 0b4febeac..f90919fc8 100644 --- a/src/files/domain/useCases/uploadFile.ts +++ b/src/files/domain/useCases/uploadFile.ts @@ -1,7 +1,13 @@ import { FileRepository } from '../repositories/FileRepository' +/** + * Minimal repository type for uploadFile. + * Only requires the uploadFile method. + */ +type UploadFileRepository = Pick + export function uploadFile( - fileRepository: FileRepository, + fileRepository: UploadFileRepository, datasetId: number | string, file: File, done: () => void, diff --git a/src/sections/Route.enum.ts b/src/sections/Route.enum.ts index d7d64fb7a..a112b4744 100644 --- a/src/sections/Route.enum.ts +++ b/src/sections/Route.enum.ts @@ -1,5 +1,5 @@ -import { ReplaceFileReferrer } from './replace-file/ReplaceFile' -import { EditFileMetadataReferrer } from '@/sections/edit-file-metadata/EditFileMetadata' +import { ReplaceFileReferrer } from './replace-file/ReplaceFileReferrer' +import { EditFileMetadataReferrer } from '@/sections/edit-file-metadata/EditFileMetadataReferrer' export enum Route { HOME = '/', diff --git a/src/sections/edit-file-metadata/EditFileMetadata.tsx b/src/sections/edit-file-metadata/EditFileMetadata.tsx index 04e11b855..7f4c1f356 100644 --- a/src/sections/edit-file-metadata/EditFileMetadata.tsx +++ b/src/sections/edit-file-metadata/EditFileMetadata.tsx @@ -12,20 +12,18 @@ import { } from '@/sections/edit-file-metadata/EditFilesList' import { useLoading } from '../../shared/contexts/loading/LoadingContext' import { useFile } from '@/sections/file/useFile' +import { EditFileMetadataReferrer } from './EditFileMetadataReferrer' import styles from './EditFileMetadata.module.scss' +// Re-export for backwards compatibility +export { EditFileMetadataReferrer } from './EditFileMetadataReferrer' + interface EditFileMetadataProps { fileId: number fileRepository: FileRepository referrer: EditFileMetadataReferrer } -// From where the user is coming from -export enum EditFileMetadataReferrer { - DATASET = 'dataset', - FILE = 'file' -} - export const EditFileMetadata = ({ fileId, fileRepository, referrer }: EditFileMetadataProps) => { const { t: tEditFileMetadata } = useTranslation('editFileMetadata') const { t: tFiles } = useTranslation('files') diff --git a/src/sections/edit-file-metadata/EditFileMetadataReferrer.ts b/src/sections/edit-file-metadata/EditFileMetadataReferrer.ts new file mode 100644 index 000000000..553ecd82f --- /dev/null +++ b/src/sections/edit-file-metadata/EditFileMetadataReferrer.ts @@ -0,0 +1,8 @@ +/** + * Enum indicating where the user came from when editing file metadata. + * Extracted to its own file to avoid circular import issues. + */ +export enum EditFileMetadataReferrer { + DATASET = 'dataset', + FILE = 'file' +} diff --git a/src/sections/replace-file/ReplaceFile.tsx b/src/sections/replace-file/ReplaceFile.tsx index ea8382456..40a969631 100644 --- a/src/sections/replace-file/ReplaceFile.tsx +++ b/src/sections/replace-file/ReplaceFile.tsx @@ -9,8 +9,12 @@ import { BreadcrumbsGenerator } from '../shared/hierarchy/BreadcrumbsGenerator' import { AppLoader } from '../shared/layout/app-loader/AppLoader' import { NotFoundPage } from '../not-found-page/NotFoundPage' import { FileUploader, OperationType } from '../shared/file-uploader/FileUploader' +import { ReplaceFileReferrer } from './ReplaceFileReferrer' import styles from './ReplaceFile.module.scss' +// Re-export for backwards compatibility +export { ReplaceFileReferrer } from './ReplaceFileReferrer' + interface ReplaceFileProps { fileRepository: FileRepository fileIdFromParams: number @@ -19,12 +23,6 @@ interface ReplaceFileProps { referrer?: ReplaceFileReferrer } -// From where the user is coming from -export enum ReplaceFileReferrer { - DATASET = 'dataset', - FILE = 'file' -} - export const ReplaceFile = ({ fileRepository, fileIdFromParams, diff --git a/src/sections/replace-file/ReplaceFileReferrer.ts b/src/sections/replace-file/ReplaceFileReferrer.ts new file mode 100644 index 000000000..357ca74cf --- /dev/null +++ b/src/sections/replace-file/ReplaceFileReferrer.ts @@ -0,0 +1,8 @@ +/** + * Enum indicating where the user came from when replacing a file. + * Extracted to its own file to avoid circular import issues. + */ +export enum ReplaceFileReferrer { + DATASET = 'dataset', + FILE = 'file' +} diff --git a/src/sections/shared/file-uploader/FileUploader.tsx b/src/sections/shared/file-uploader/FileUploader.tsx index b49a5101e..d522c4e56 100644 --- a/src/sections/shared/file-uploader/FileUploader.tsx +++ b/src/sections/shared/file-uploader/FileUploader.tsx @@ -1,6 +1,6 @@ import { File as FileModel } from '@/files/domain/models/File' import { FileRepository } from '@/files/domain/repositories/FileRepository' -import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFile' +import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFileReferrer' import { FileUploaderProvider } from './context/FileUploaderContext' import { useGetFixityAlgorithm } from './useGetFixityAlgorithm' import { FileUploaderGlobalConfig } from './context/fileUploaderReducer' diff --git a/src/sections/shared/file-uploader/FileUploaderHelper.ts b/src/sections/shared/file-uploader/FileUploaderHelper.ts index 0067fed89..f6edadee8 100644 --- a/src/sections/shared/file-uploader/FileUploaderHelper.ts +++ b/src/sections/shared/file-uploader/FileUploaderHelper.ts @@ -46,7 +46,10 @@ export class FileUploaderHelper { } public static async getChecksum(blob: Blob, algorithm: FixityAlgorithm): Promise { - if (algorithm === FixityAlgorithm.MD5) { + if (algorithm === FixityAlgorithm.NONE) { + // No checksum calculation needed + return '' + } else if (algorithm === FixityAlgorithm.MD5) { return await this.getMD5Checksum(blob) } else { return await this.getSubtleDigestChecksum(blob, algorithm) diff --git a/src/sections/shared/file-uploader/FileUploaderPanel.tsx b/src/sections/shared/file-uploader/FileUploaderPanel.tsx index 5fbb1986b..2fc94e90a 100644 --- a/src/sections/shared/file-uploader/FileUploaderPanel.tsx +++ b/src/sections/shared/file-uploader/FileUploaderPanel.tsx @@ -1,16 +1,18 @@ -import { useMemo } from 'react' -import { useDeepCompareEffect } from 'use-deep-compare' -import { toast } from 'react-toastify' -import { useTranslation } from 'react-i18next' +/** + * SPA File Uploader Panel + * + * This is the React Router-aware wrapper for FileUploaderPanelCore. + * It handles SPA-specific concerns: route navigation, useBlocker for unsaved changes. + */ + +import { useMemo, useCallback } from 'react' import { useBlocker, useNavigate } from 'react-router-dom' -import { Stack } from '@iqss/dataverse-design-system' import { FileRepository } from '@/files/domain/repositories/FileRepository' import { QueryParamKey, Route } from '@/sections/Route.enum' import { DatasetNonNumericVersionSearchParam } from '@/dataset/domain/models/Dataset' -import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFile' +import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFileReferrer' import { useFileUploaderContext } from './context/FileUploaderContext' -import FileUploadInput from './file-upload-input/FileUploadInput' -import { UploadedFilesList } from './uploaded-files-list/UploadedFilesList' +import { FileUploaderPanelCore } from './FileUploaderPanelCore' import { ConfirmLeaveModal } from './confirm-leave-modal/ConfirmLeaveModal' interface FileUploaderPanelProps { @@ -24,21 +26,14 @@ const FileUploaderPanel = ({ datasetPersistentId, referrer }: FileUploaderPanelProps) => { - const { t } = useTranslation('shared') const navigate = useNavigate() const { - fileUploaderState: { - files, - isSaving, - uploadingToCancelMap, - replaceOperationInfo, - addFilesToDatasetOperationInfo - }, - uploadedFiles, + fileUploaderState: { files, isSaving, uploadingToCancelMap }, removeAllFiles } = useFileUploaderContext() + // Block navigation when there are unsaved changes const shouldBlockAwayNavigation = useMemo(() => { return Object.keys(files).length > 0 || isSaving || uploadingToCancelMap.size > 0 }, [files, isSaving, uploadingToCancelMap.size]) @@ -47,15 +42,9 @@ const FileUploaderPanel = ({ const handleConfirmLeavePage = () => { if (navigationBlocker.state === 'blocked') { - // TODO - Remove the files from the S3 bucket we need an API endpoint for this. - removeAllFiles() - - // Cancel all the uploading files if there are any if (uploadingToCancelMap.size > 0) { - uploadingToCancelMap.forEach((cancel) => { - cancel() - }) + uploadingToCancelMap.forEach((cancel) => cancel()) } navigationBlocker.proceed() } @@ -67,55 +56,44 @@ const FileUploaderPanel = ({ } } - useDeepCompareEffect(() => { - const datasetPageRedirectUrl = `${Route.DATASETS}?${QueryParamKey.PERSISTENT_ID}=${datasetPersistentId}&${QueryParamKey.VERSION}=${DatasetNonNumericVersionSearchParam.DRAFT}` + // Navigation callbacks for the core component + const handleCancel = useCallback(() => navigate(-1), [navigate]) - // Listens to the replace operation info result and navigates to the new file page if the operation was successful - if (replaceOperationInfo.success && replaceOperationInfo.newFileIdentifier) { - toast.success(t('fileUploader.fileReplacedSuccessfully')) + const datasetPageUrl = `${Route.DATASETS}?${QueryParamKey.PERSISTENT_ID}=${datasetPersistentId}&${QueryParamKey.VERSION}=${DatasetNonNumericVersionSearchParam.DRAFT}` - if (referrer === ReplaceFileReferrer.DATASET) { - navigate(datasetPageRedirectUrl) - } + const handleFilesAddedSuccess = useCallback(() => { + navigate(datasetPageUrl) + }, [navigate, datasetPageUrl]) - if (referrer === ReplaceFileReferrer.FILE) { + const handleFileReplacedSuccess = useCallback( + (newFileId: number) => { + if (referrer === ReplaceFileReferrer.DATASET) { + navigate(datasetPageUrl) + } else if (referrer === ReplaceFileReferrer.FILE) { navigate( - `${Route.FILES}?id=${replaceOperationInfo.newFileIdentifier}&${QueryParamKey.DATASET_VERSION}=${DatasetNonNumericVersionSearchParam.DRAFT}` + `${Route.FILES}?id=${newFileId}&${QueryParamKey.DATASET_VERSION}=${DatasetNonNumericVersionSearchParam.DRAFT}` ) } - } - - // Listens to the add files to dataset operation info result and navigates to the dataset page if the operation was successful - if (addFilesToDatasetOperationInfo.success) { - toast.success(t('fileUploader.filesAddedToDatasetSuccessfully')) - navigate(datasetPageRedirectUrl) - } - }, [ - replaceOperationInfo, - addFilesToDatasetOperationInfo, - datasetPersistentId, - t, - navigate, - referrer - ]) + }, + [navigate, datasetPageUrl, referrer] + ) return ( - - - - {uploadedFiles.length > 0 && ( - - )} + <> + - + ) } diff --git a/src/sections/shared/file-uploader/FileUploaderPanelCore.tsx b/src/sections/shared/file-uploader/FileUploaderPanelCore.tsx new file mode 100644 index 000000000..5a42c524f --- /dev/null +++ b/src/sections/shared/file-uploader/FileUploaderPanelCore.tsx @@ -0,0 +1,99 @@ +/** + * Core File Uploader Panel + * + * This is the shared core component used by both the SPA (via FileUploaderPanel) + * and standalone mode (DVWebloader V2). It contains all the UI and logic, + * but delegates navigation/blocking behavior to the parent via callbacks. + */ + +import { useEffect } from 'react' +import { useDeepCompareEffect } from 'use-deep-compare' +import { toast } from 'react-toastify' +import { useTranslation } from 'react-i18next' +import { Stack } from '@iqss/dataverse-design-system' +import { useFileUploaderContext } from './context/FileUploaderContext' +import FileUploadInput from './file-upload-input/FileUploadInput' +import { UploadedFilesList } from './uploaded-files-list/UploadedFilesList' +import { UploaderFileRepository } from './types' + +export interface FileUploaderPanelCoreProps { + fileRepository: UploaderFileRepository + datasetPersistentId: string + /** Called when user clicks Cancel */ + onCancel: () => void + /** Called when files are successfully added to dataset */ + onFilesAddedSuccess: () => void + /** Called when file is successfully replaced (for replace mode) */ + onFileReplacedSuccess?: (newFileId: number) => void + /** + * Called to register a cleanup function that should be invoked before leaving. + * The parent can use this with useBlocker (SPA) or beforeunload (standalone). + */ + onRegisterUnsavedChangesCheck?: (hasUnsavedChanges: () => boolean) => void +} + +export const FileUploaderPanelCore = ({ + fileRepository, + datasetPersistentId, + onCancel, + onFilesAddedSuccess, + onFileReplacedSuccess, + onRegisterUnsavedChangesCheck +}: FileUploaderPanelCoreProps) => { + const { t } = useTranslation('shared') + + const { + fileUploaderState: { + files, + isSaving, + uploadingToCancelMap, + replaceOperationInfo, + addFilesToDatasetOperationInfo + }, + uploadedFiles + } = useFileUploaderContext() + + // Register the unsaved changes check with parent + useEffect(() => { + if (onRegisterUnsavedChangesCheck) { + onRegisterUnsavedChangesCheck(() => { + return Object.keys(files).length > 0 || isSaving || uploadingToCancelMap.size > 0 + }) + } + }, [files, isSaving, uploadingToCancelMap.size, onRegisterUnsavedChangesCheck]) + + // Handle successful operations + useDeepCompareEffect(() => { + // Handle replace file success + if (replaceOperationInfo.success && replaceOperationInfo.newFileIdentifier) { + toast.success(t('fileUploader.fileReplacedSuccessfully')) + onFileReplacedSuccess?.(replaceOperationInfo.newFileIdentifier) + } + + // Handle add files success + if (addFilesToDatasetOperationInfo.success) { + toast.success(t('fileUploader.filesAddedToDatasetSuccessfully')) + onFilesAddedSuccess() + } + }, [ + replaceOperationInfo, + addFilesToDatasetOperationInfo, + t, + onFilesAddedSuccess, + onFileReplacedSuccess + ]) + + return ( + + + + {uploadedFiles.length > 0 && ( + + )} + + ) +} diff --git a/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx b/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx index e24995e0c..5c60bdcf4 100644 --- a/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx +++ b/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx @@ -1,28 +1,24 @@ -import { ChangeEventHandler, DragEventHandler, memo, useRef, useState } from 'react' +import { ChangeEventHandler, DragEventHandler, memo, useCallback, useRef, useState } from 'react' import { Accordion, Button, Card, ProgressBar } from '@iqss/dataverse-design-system' import { ExclamationTriangle, Plus, XLg } from 'react-bootstrap-icons' import { Trans, useTranslation } from 'react-i18next' -import { Semaphore } from 'async-mutex' import { toast } from 'react-toastify' import cn from 'classnames' -import { FileRepository } from '@/files/domain/repositories/FileRepository' import MimeTypeDisplay from '@/files/domain/models/FileTypeToFriendlyTypeMap' -import { uploadFile } from '@/files/domain/useCases/uploadFile' import { useFileUploaderContext } from '../context/FileUploaderContext' -import { FileUploadState, FileUploadStatus } from '../context/fileUploaderReducer' +import { FileUploadStatus } from '../context/fileUploaderReducer' import { OperationType } from '../FileUploader' import { FileUploaderHelper } from '../FileUploaderHelper' +import { useFileUploadOperations } from '../useFileUploadOperations' import { SwalModal } from '../../swal-modal/SwalModal' +import { UploaderFileRepository } from '../types' import styles from './FileUploadInput.module.scss' type FileUploadInputProps = { - fileRepository: FileRepository + fileRepository: UploaderFileRepository datasetPersistentId: string } -const limit = 6 -const semaphore = new Semaphore(limit) - const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInputProps) => { const { fileUploaderState, @@ -54,81 +50,56 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu const canKeepUploading = operationType === OperationType.ADD_FILES_TO_DATASET ? true : totalFiles === 0 - const onFileUploadFailed = (file: File) => { - removeUploadingToCancel(FileUploaderHelper.getFileKey(file)) - semaphore.release(1) - } - - const onFileUploadFinished = async (file: File) => { - const fileKey = FileUploaderHelper.getFileKey(file) - - try { - const checksumValue = await FileUploaderHelper.getChecksum(file, checksumAlgorithm) - updateFile(fileKey, { checksumValue }) - } finally { - removeUploadingToCancel(fileKey) - semaphore.release(1) - } - } - - const uploadOneFile = async (file: File) => { - if (FileUploaderHelper.isDS_StoreFile(file)) { - toast.info(t('fileUploader.fileUploadSkipped.dsStore')) - return - } - - if ( - operationType === OperationType.REPLACE_FILE && - originalFile.metadata.type.value !== file.type - ) { - const shouldContinue = await requestFileTypeDifferentConfirmation( - originalFile.metadata.type.value, - file.type - ) - - if (!shouldContinue) { - // Reset the file input, otherwise in case user cancels but then tries to upload the same file again, the input will not trigger the change event - if (inputRef.current) { - inputRef.current.value = '' + // File type validation for replace operation + const validateBeforeUpload = useCallback( + async (file: File): Promise => { + if ( + operationType === OperationType.REPLACE_FILE && + originalFile.metadata.type.value !== file.type + ) { + const shouldContinue = await requestFileTypeDifferentConfirmation( + originalFile.metadata.type.value, + file.type + ) + + if (!shouldContinue) { + // Reset the file input + if (inputRef.current) { + inputRef.current.value = '' + } + return false } - // Stop the upload process for this file - return } - } - // File already uploaded - if (getFileByKey(FileUploaderHelper.getFileKey(file))) { - const fileInfo = getFileByKey(FileUploaderHelper.getFileKey(file)) as FileUploadState - toast.info( - t('fileUploader.fileUploadSkipped.alreadyUploaded', { fileName: fileInfo.fileName }) - ) + return true + }, + // eslint-disable-next-line react-hooks/exhaustive-deps -- requestFileTypeDifferentConfirmation is stable within the component + [operationType, originalFile] + ) - return + // Use the shared upload operations hook + const { uploadOneFile, handleDroppedItems } = useFileUploadOperations({ + fileRepository, + datasetPersistentId, + checksumAlgorithm, + addFile, + updateFile, + getFileByKey, + addUploadingToCancel, + removeUploadingToCancel, + validateBeforeUpload, + onFileSkipped: (reason, file) => { + if (reason === 'ds_store') { + toast.info(t('fileUploader.fileUploadSkipped.dsStore')) + } else if (reason === 'already_uploaded') { + const fileInfo = getFileByKey(FileUploaderHelper.getFileKey(file)) + if (fileInfo) { + toast.info( + t('fileUploader.fileUploadSkipped.alreadyUploaded', { fileName: fileInfo.fileName }) + ) + } + } } - - await semaphore.acquire(1) - - const fileKey = FileUploaderHelper.getFileKey(file) - - addFile(file) - - const cancelFunction = uploadFile( - fileRepository, - datasetPersistentId, - file, - () => { - updateFile(fileKey, { status: FileUploadStatus.DONE }) - void onFileUploadFinished(file) - }, - () => { - updateFile(fileKey, { status: FileUploadStatus.FAILED }) - onFileUploadFailed(file) - }, - (now) => updateFile(fileKey, { progress: now }), - (storageId) => updateFile(fileKey, { storageId }) - ) - - addUploadingToCancel(fileKey, cancelFunction) - } + }) const handleInputFileChange: ChangeEventHandler = (event) => { const filesArray = Array.from(event.target.files || []) @@ -144,35 +115,6 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu } } - // waiting on the possibility to test folder drop: https://github.com/cypress-io/cypress/issues/19696 - const addFromDir = (dir: FileSystemDirectoryEntry) => { - /* istanbul ignore next */ - const reader = dir.createReader() - - reader.readEntries((entries) => { - entries.forEach((entry) => { - if (entry.isFile) { - const fse = entry as FileSystemFileEntry - fse.file((file) => { - const fileWithPath = new File([file], file.name, { - type: file.type, - lastModified: file.lastModified - }) - - Object.defineProperty(fileWithPath, 'webkitRelativePath', { - value: entry.fullPath, - writable: true - }) - - void uploadOneFile(fileWithPath) - }) - } else if (entry.isDirectory) { - addFromDir(entry as FileSystemDirectoryEntry) - } - }) - }) - } - const handleDropFiles: DragEventHandler = (event) => { event.preventDefault() event.stopPropagation() @@ -193,16 +135,7 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu return } - Array.from(droppedItems).forEach((droppedFile) => { - if (droppedFile.webkitGetAsEntry()?.isDirectory) { - addFromDir(droppedFile.webkitGetAsEntry() as FileSystemDirectoryEntry) - } else if (droppedFile.webkitGetAsEntry()?.isFile) { - const fse = droppedFile.webkitGetAsEntry() as FileSystemFileEntry - fse.file((file) => { - void uploadOneFile(file) - }) - } - }) + handleDroppedItems(droppedItems) } } diff --git a/src/sections/shared/file-uploader/types.ts b/src/sections/shared/file-uploader/types.ts new file mode 100644 index 000000000..327186b69 --- /dev/null +++ b/src/sections/shared/file-uploader/types.ts @@ -0,0 +1,24 @@ +/** + * Minimal File Repository Types + * + * These types define the minimal interface needed by the file uploader components. + * This allows the uploader to work with both the full FileRepository (SPA mode) + * and a partial implementation (standalone mode). + */ + +import { FileRepository } from '@/files/domain/repositories/FileRepository' + +/** + * Minimal file repository interface needed by the uploader components. + * Standalone mode only implements these methods. + */ +export type UploaderFileRepository = Pick< + FileRepository, + 'uploadFile' | 'addUploadedFiles' | 'getFixityAlgorithm' +> + +/** + * Extended file repository interface that includes replace functionality. + * Used by components that support file replacement (SPA mode only). + */ +export type FullUploaderFileRepository = UploaderFileRepository & Pick diff --git a/src/sections/shared/file-uploader/uploaded-files-list/UploadedFilesList.tsx b/src/sections/shared/file-uploader/uploaded-files-list/UploadedFilesList.tsx index 8786c6494..5db4b46a8 100644 --- a/src/sections/shared/file-uploader/uploaded-files-list/UploadedFilesList.tsx +++ b/src/sections/shared/file-uploader/uploaded-files-list/UploadedFilesList.tsx @@ -1,6 +1,5 @@ import { KeyboardEvent, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' -import { useNavigate } from 'react-router-dom' import { useDeepCompareEffect } from 'use-deep-compare' import { FormProvider, useFieldArray, useForm } from 'react-hook-form' import { @@ -17,9 +16,9 @@ import { useReplaceFile } from '../useReplaceFile' import { useAddUploadedFilesToDataset } from '../useAddUploadedFilesToDataset' import { UploadedFileRow } from './uploaded-file-row/UploadedFileRow' import { useFileUploaderContext } from '../context/FileUploaderContext' -import { FileRepository } from '@/files/domain/repositories/FileRepository' import { FileUploadStatus, UploadedFile } from '../context/fileUploaderReducer' import { OperationType } from '../FileUploader' +import { UploaderFileRepository } from '../types' import styles from './UploadedFilesList.module.scss' export interface FilesListFormData { @@ -27,16 +26,22 @@ export interface FilesListFormData { } interface UploadedFilesListProps { - fileRepository: FileRepository + fileRepository: UploaderFileRepository datasetPersistentId: string + /** + * Cancel handler. Required - typically navigates back. + * In SPA mode: use `() => navigate(-1)` from React Router's useNavigate + * In standalone mode: use `() => window.history.back()` or redirect to dataset + */ + onCancel: () => void } export const UploadedFilesList = ({ fileRepository, - datasetPersistentId + datasetPersistentId, + onCancel }: UploadedFilesListProps) => { const { t } = useTranslation('shared') - const navigate = useNavigate() const { fileUploaderState: { @@ -130,7 +135,7 @@ export const UploadedFilesList = ({ }) } - const handleCancel = () => navigate(-1) + const handleCancel = () => onCancel() const handleKeyDown = (e: KeyboardEvent) => { if (e.key !== 'Enter') return diff --git a/src/sections/shared/file-uploader/useAddUploadedFilesToDataset.ts b/src/sections/shared/file-uploader/useAddUploadedFilesToDataset.ts index 1b1bba053..77092d802 100644 --- a/src/sections/shared/file-uploader/useAddUploadedFilesToDataset.ts +++ b/src/sections/shared/file-uploader/useAddUploadedFilesToDataset.ts @@ -1,19 +1,25 @@ import { toast } from 'react-toastify' import { useTranslation } from 'react-i18next' -import { UploadedFileDTO, WriteError } from '@iqss/dataverse-client-javascript' +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { addUploadedFiles } from '@/files/domain/useCases/addUploadedFiles' -import { FileRepository } from '@/files/domain/repositories/FileRepository' import { UploadedFileDTOMapper } from '@/files/infrastructure/mappers/UploadedFileDTOMapper' import { JSDataverseWriteErrorHandler } from '@/shared/helpers/JSDataverseWriteErrorHandler' import { useFileUploaderContext } from './context/FileUploaderContext' import { UploadedFile } from './context/fileUploaderReducer' +import { UploaderFileRepository } from './types' + +// WriteError type for error handling - avoid importing from client library due to CommonJS issues with local linked package +interface WriteErrorLike { + reason?: string + message?: string +} interface UseAddUploadedFilesToDatasetReturn { submitUploadedFilesToDataset: (uploadedFiles: UploadedFile[]) => Promise } export const useAddUploadedFilesToDataset = ( - fileRepository: FileRepository, + fileRepository: UploaderFileRepository, datasetPersistentId: string ): UseAddUploadedFilesToDatasetReturn => { const { setIsSaving, setAddFilesToDatasetOperationInfo, removeAllFiles } = @@ -42,9 +48,13 @@ export const useAddUploadedFilesToDataset = ( removeAllFiles() setAddFilesToDatasetOperationInfo({ success: true }) - } catch (err: WriteError | unknown) { - if (err instanceof WriteError) { - const error = new JSDataverseWriteErrorHandler(err) + } catch (err: unknown) { + // Check if error has reason property (WriteError-like) + const writeError = err as WriteErrorLike + if (writeError && (writeError.reason || writeError.message)) { + // Cast to any to satisfy JSDataverseWriteErrorHandler which expects WriteError + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any + const error = new JSDataverseWriteErrorHandler(writeError as any) const formattedError = error.getReasonWithoutStatusCode() ?? /* istanbul ignore next */ error.getErrorMessage() diff --git a/src/sections/shared/file-uploader/useFileUploadOperations.ts b/src/sections/shared/file-uploader/useFileUploadOperations.ts new file mode 100644 index 000000000..cbd5b8d11 --- /dev/null +++ b/src/sections/shared/file-uploader/useFileUploadOperations.ts @@ -0,0 +1,252 @@ +/** + * useFileUploadOperations - Shared hook for file upload operations + * + * This hook provides the core upload logic (uploading files, handling directories, + * computing checksums) that can be shared between the main SPA and standalone uploader. + */ + +import { useCallback, useRef } from 'react' +import { Semaphore } from 'async-mutex' +import { uploadFile } from '@/files/domain/useCases/uploadFile' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' +import { FileUploaderHelper } from './FileUploaderHelper' +import { FileUploadStatus } from './useFileUploadState' +import { UploaderFileRepository } from './types' + +export const CONCURRENT_UPLOADS_LIMIT = 6 + +export interface FileUploadOperationsConfig { + fileRepository: UploaderFileRepository + datasetPersistentId: string + checksumAlgorithm: FixityAlgorithm + // Callbacks for state updates - compatible with both context and hook-based state + addFile: (file: File) => void + updateFile: ( + key: string, + updates: { + status?: FileUploadStatus + progress?: number + storageId?: string + checksumValue?: string + } + ) => void + getFileByKey: (key: string) => { status: string } | undefined + addUploadingToCancel: (key: string, cancel: () => void) => void + removeUploadingToCancel: (key: string) => void + // Optional callbacks for notifications + onFileSkipped?: (reason: 'ds_store' | 'already_uploaded', file: File) => void + onUploadCanceled?: (fileName: string) => void + // Optional callback for pre-upload validation (e.g., file type check for replace) + validateBeforeUpload?: (file: File) => Promise +} + +export interface FileUploadOperations { + /** Upload a single file */ + uploadOneFile: (file: File) => Promise + /** Recursively upload files from a directory */ + addFromDir: (dir: FileSystemDirectoryEntry) => void + /** Handle dropped items (files or directories) */ + handleDroppedItems: (items: DataTransferItemList) => void + /** Retry a failed upload */ + retryUpload: (file: File) => Promise + /** The semaphore used to limit concurrent uploads */ + semaphore: Semaphore +} + +/** + * Hook that provides file upload operations. + * Manages the upload process, directory traversal, and checksum calculation. + */ +export function useFileUploadOperations(config: FileUploadOperationsConfig): FileUploadOperations { + const { + fileRepository, + datasetPersistentId, + checksumAlgorithm, + addFile, + updateFile, + getFileByKey, + addUploadingToCancel, + removeUploadingToCancel, + onFileSkipped, + validateBeforeUpload + } = config + + // Use a ref to persist semaphore across renders + const semaphoreRef = useRef(new Semaphore(CONCURRENT_UPLOADS_LIMIT)) + + const onFileUploadFailed = useCallback( + (file: File) => { + removeUploadingToCancel(FileUploaderHelper.getFileKey(file)) + semaphoreRef.current.release(1) + }, + [removeUploadingToCancel] + ) + + const onFileUploadFinished = useCallback( + async (file: File) => { + const fileKey = FileUploaderHelper.getFileKey(file) + + try { + // Skip checksum calculation if algorithm is NONE + if (checksumAlgorithm !== FixityAlgorithm.NONE) { + const checksumValue = await FileUploaderHelper.getChecksum(file, checksumAlgorithm) + updateFile(fileKey, { checksumValue }) + } + } finally { + removeUploadingToCancel(fileKey) + semaphoreRef.current.release(1) + } + }, + [checksumAlgorithm, updateFile, removeUploadingToCancel] + ) + + const uploadOneFile = useCallback( + async (file: File) => { + // Skip .DS_Store files + if (FileUploaderHelper.isDS_StoreFile(file)) { + onFileSkipped?.('ds_store', file) + return + } + + // Check if file already uploaded + const fileKey = FileUploaderHelper.getFileKey(file) + if (getFileByKey(fileKey)) { + onFileSkipped?.('already_uploaded', file) + return + } + + // Run optional pre-upload validation + if (validateBeforeUpload) { + const shouldContinue = await validateBeforeUpload(file) + if (!shouldContinue) { + return + } + } + + await semaphoreRef.current.acquire(1) + + addFile(file) + + const cancelFunction = uploadFile( + fileRepository, + datasetPersistentId, + file, + () => { + updateFile(fileKey, { status: FileUploadStatus.DONE }) + void onFileUploadFinished(file) + }, + () => { + updateFile(fileKey, { status: FileUploadStatus.FAILED }) + onFileUploadFailed(file) + }, + (now) => updateFile(fileKey, { progress: now }), + (storageId) => updateFile(fileKey, { storageId }) + ) + + addUploadingToCancel(fileKey, cancelFunction) + }, + [ + fileRepository, + datasetPersistentId, + addFile, + updateFile, + getFileByKey, + addUploadingToCancel, + onFileUploadFinished, + onFileUploadFailed, + onFileSkipped, + validateBeforeUpload + ] + ) + + const addFromDir = useCallback( + (dir: FileSystemDirectoryEntry) => { + const reader = dir.createReader() + + reader.readEntries((entries) => { + entries.forEach((entry) => { + if (entry.isFile) { + const fse = entry as FileSystemFileEntry + fse.file((file) => { + const fileWithPath = new File([file], file.name, { + type: file.type, + lastModified: file.lastModified + }) + + Object.defineProperty(fileWithPath, 'webkitRelativePath', { + value: entry.fullPath, + writable: true + }) + + void uploadOneFile(fileWithPath) + }) + } else if (entry.isDirectory) { + addFromDir(entry as FileSystemDirectoryEntry) + } + }) + }) + }, + [uploadOneFile] + ) + + const handleDroppedItems = useCallback( + (items: DataTransferItemList) => { + Array.from(items).forEach((item) => { + const entry = item.webkitGetAsEntry() + if (entry?.isDirectory) { + addFromDir(entry as FileSystemDirectoryEntry) + } else if (entry?.isFile) { + const fse = entry as FileSystemFileEntry + fse.file((file) => { + void uploadOneFile(file) + }) + } + }) + }, + [addFromDir, uploadOneFile] + ) + + const retryUpload = useCallback( + async (file: File) => { + const fileKey = FileUploaderHelper.getFileKey(file) + // Reset status to uploading before retry + updateFile(fileKey, { status: FileUploadStatus.UPLOADING, progress: 0 }) + + await semaphoreRef.current.acquire(1) + + const cancelFunction = uploadFile( + fileRepository, + datasetPersistentId, + file, + () => { + updateFile(fileKey, { status: FileUploadStatus.DONE }) + void onFileUploadFinished(file) + }, + () => { + updateFile(fileKey, { status: FileUploadStatus.FAILED }) + onFileUploadFailed(file) + }, + (now) => updateFile(fileKey, { progress: now }), + (storageId) => updateFile(fileKey, { storageId }) + ) + + addUploadingToCancel(fileKey, cancelFunction) + }, + [ + fileRepository, + datasetPersistentId, + updateFile, + addUploadingToCancel, + onFileUploadFinished, + onFileUploadFailed + ] + ) + + return { + uploadOneFile, + addFromDir, + handleDroppedItems, + retryUpload, + semaphore: semaphoreRef.current + } +} diff --git a/src/sections/shared/file-uploader/useFileUploadState.ts b/src/sections/shared/file-uploader/useFileUploadState.ts new file mode 100644 index 000000000..3031db361 --- /dev/null +++ b/src/sections/shared/file-uploader/useFileUploadState.ts @@ -0,0 +1,203 @@ +/** + * useFileUploadState - Shared hook for managing file upload state + * + * This hook provides the core state management logic for file uploads, + * usable by both the main SPA (via context) and the standalone uploader. + */ + +import { useState, useMemo, useCallback } from 'react' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' +import { FileUploaderHelper } from './FileUploaderHelper' + +// Re-export from reducer for backward compatibility, but define our own minimal enum +// for the standalone uploader that doesn't need the REMOVED status +export enum FileUploadStatus { + UPLOADING = 'uploading', + DONE = 'done', + FAILED = 'failed' +} + +export interface FileUploadState { + key: string + progress: number + status: FileUploadStatus + fileName: string + fileDir: string + fileType: string + fileSizeString: string + fileSize: number + fileLastModified: number + description: string + tags: string[] + restricted: boolean + storageId?: string + checksumValue?: string + checksumAlgorithm: FixityAlgorithm +} + +export type UploadedFile = FileUploadState & { storageId: string; checksumValue: string } + +/** + * Interface for the file upload state and actions. + * This is what the useFileUploadState hook returns. + */ +export interface FileUploadStateActions { + /** All files keyed by their unique file key */ + files: Record + /** Files that have completed upload and have both storageId and checksumValue */ + uploadedFiles: UploadedFile[] + /** Files that are still uploading or have failed */ + uploadingFilesInProgress: FileUploadState[] + /** True if any file is currently uploading */ + anyFileUploading: boolean + /** Map of file keys to cancel functions for in-progress uploads */ + uploadingToCancelMap: Map void> + /** True while saving files to the dataset */ + isSaving: boolean + setIsSaving: (isSaving: boolean) => void + addFile: ( + file: File, + checksumAlgorithm: FixityAlgorithm, + defaults?: Partial + ) => void + updateFile: (key: string, updates: Partial) => void + removeFile: (key: string) => void + removeAllFiles: () => void + getFileByKey: (key: string) => FileUploadState | undefined + addUploadingToCancel: (key: string, cancel: () => void) => void + removeUploadingToCancel: (key: string) => void + /** Reset all state to initial values */ + reset: () => void +} + +export function formatFileSize(bytes: number): string { + if (bytes === 0) return '0 Bytes' + const k = 1024 + const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'] + const i = Math.floor(Math.log(bytes) / Math.log(k)) + return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}` +} + +/** + * Hook that manages file upload state. + * Can be used standalone or integrated with React Context. + */ +export function useFileUploadState(): FileUploadStateActions { + const [files, setFiles] = useState>({}) + const [uploadingToCancelMap, setUploadingToCancelMap] = useState void>>( + new Map() + ) + const [isSaving, setIsSaving] = useState(false) + + // Computed values + const uploadedFiles = useMemo(() => { + return Object.values(files).filter( + (f): f is UploadedFile => + f.status === FileUploadStatus.DONE && !!f.storageId && !!f.checksumValue + ) + }, [files]) + + const uploadingFilesInProgress = useMemo(() => { + return Object.values(files).filter((file) => file.status !== FileUploadStatus.DONE) + }, [files]) + + const anyFileUploading = useMemo(() => { + return Object.values(files).some((file) => file.status === FileUploadStatus.UPLOADING) + }, [files]) + + // Actions + const addFile = useCallback( + (file: File, checksumAlgorithm: FixityAlgorithm, defaults?: Partial) => { + const fileKey = FileUploaderHelper.getFileKey(file) + const fileDir = file.webkitRelativePath + ? file.webkitRelativePath.substring(0, file.webkitRelativePath.lastIndexOf('/')) + : '' + + setFiles((prev) => { + if (prev[fileKey]) return prev // Already exists + + return { + ...prev, + [fileKey]: { + key: fileKey, + progress: 0, + status: FileUploadStatus.UPLOADING, + fileName: file.name, + fileDir: defaults?.fileDir ?? fileDir, + fileType: file.type, + fileSizeString: formatFileSize(file.size), + fileSize: file.size, + fileLastModified: file.lastModified, + description: defaults?.description ?? '', + tags: defaults?.tags ?? [], + restricted: defaults?.restricted ?? false, + checksumAlgorithm + } + } + }) + }, + [] + ) + + const updateFile = useCallback((key: string, updates: Partial) => { + setFiles((prev) => { + if (!prev[key]) return prev + return { + ...prev, + [key]: { ...prev[key], ...updates } + } + }) + }, []) + + const removeFile = useCallback((key: string) => { + setFiles((prev) => { + const newFiles = { ...prev } + delete newFiles[key] + return newFiles + }) + }, []) + + const removeAllFiles = useCallback(() => { + setFiles({}) + }, []) + + const getFileByKey = useCallback((key: string) => files[key], [files]) + + const addUploadingToCancel = useCallback((key: string, cancel: () => void) => { + setUploadingToCancelMap((prev) => new Map(prev).set(key, cancel)) + }, []) + + const removeUploadingToCancel = useCallback((key: string) => { + setUploadingToCancelMap((prev) => { + const newMap = new Map(prev) + newMap.delete(key) + return newMap + }) + }, []) + + const reset = useCallback(() => { + // Cancel all in-progress uploads before resetting + uploadingToCancelMap.forEach((cancel) => cancel()) + setFiles({}) + setUploadingToCancelMap(new Map()) + setIsSaving(false) + }, [uploadingToCancelMap]) + + return { + files, + uploadedFiles, + uploadingFilesInProgress, + anyFileUploading, + uploadingToCancelMap, + isSaving, + setIsSaving, + addFile, + updateFile, + removeFile, + removeAllFiles, + getFileByKey, + addUploadingToCancel, + removeUploadingToCancel, + reset + } +} diff --git a/src/sections/shared/file-uploader/useGetFixityAlgorithm.tsx b/src/sections/shared/file-uploader/useGetFixityAlgorithm.tsx index ad283e81a..f729b0086 100644 --- a/src/sections/shared/file-uploader/useGetFixityAlgorithm.tsx +++ b/src/sections/shared/file-uploader/useGetFixityAlgorithm.tsx @@ -2,7 +2,10 @@ import { useEffect, useState } from 'react' import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' import { FileRepository } from '@/files/domain/repositories/FileRepository' -export const useGetFixityAlgorithm = (fileRepository: FileRepository) => { +/** Minimal interface for fixity algorithm fetching */ +type FixityAlgorithmProvider = Pick + +export const useGetFixityAlgorithm = (fileRepository: FixityAlgorithmProvider) => { const [fixityAlgorithm, setFixityAlgorithm] = useState(FixityAlgorithm.MD5) const [isLoadingFixityAlgorithm, setIsLoadingFixityAlgorithm] = useState(true) const [errorLoadingFixityAlgorithm, setErrorLoadingFixityAlgorithm] = useState(false) diff --git a/src/sections/shared/file-uploader/useReplaceFile.ts b/src/sections/shared/file-uploader/useReplaceFile.ts index 5b4f7357c..6291907a8 100644 --- a/src/sections/shared/file-uploader/useReplaceFile.ts +++ b/src/sections/shared/file-uploader/useReplaceFile.ts @@ -1,22 +1,39 @@ import { toast } from 'react-toastify' import { useTranslation } from 'react-i18next' -import { UploadedFileDTO, WriteError } from '@iqss/dataverse-client-javascript' +import { UploadedFileDTO } from '@iqss/dataverse-client-javascript' import { replaceFile } from '@/files/domain/useCases/replaceFile' -import { FileRepository } from '@/files/domain/repositories/FileRepository' import { UploadedFileDTOMapper } from '@/files/infrastructure/mappers/UploadedFileDTOMapper' import { JSDataverseWriteErrorHandler } from '@/shared/helpers/JSDataverseWriteErrorHandler' import { useFileUploaderContext } from './context/FileUploaderContext' import { UploadedFile } from './context/fileUploaderReducer' +import { UploaderFileRepository, FullUploaderFileRepository } from './types' + +// WriteError type for error handling - avoid importing from client library due to CommonJS issues +interface WriteErrorLike { + reason?: string + message?: string +} interface UseReplaceFileReturn { submitReplaceFile: (originalFileID: number, file: UploadedFile) => Promise } -export const useReplaceFile = (fileRepository: FileRepository): UseReplaceFileReturn => { +/** Type guard to check if repository supports replace */ +function hasReplaceMethod(repo: UploaderFileRepository): repo is FullUploaderFileRepository { + return 'replace' in repo && typeof repo.replace === 'function' +} + +export const useReplaceFile = (fileRepository: UploaderFileRepository): UseReplaceFileReturn => { const { setIsSaving, setReplaceOperationInfo, removeAllFiles } = useFileUploaderContext() const { t } = useTranslation('shared') const submitReplaceFile = async (originalFileID: number, newFileInfo: UploadedFile) => { + // Check if replace is supported + if (!hasReplaceMethod(fileRepository)) { + toast.error('File replacement is not supported in standalone mode') + return + } + setIsSaving(true) const newFileDTO: UploadedFileDTO = UploadedFileDTOMapper.toUploadedFileDTO( @@ -37,9 +54,13 @@ export const useReplaceFile = (fileRepository: FileRepository): UseReplaceFileRe removeAllFiles() setReplaceOperationInfo({ success: true, newFileIdentifier }) - } catch (err: WriteError | unknown) { - if (err instanceof WriteError) { - const error = new JSDataverseWriteErrorHandler(err) + } catch (err: unknown) { + // Check if error has reason property (WriteError-like) + const writeError = err as WriteErrorLike + if (writeError && (writeError.reason || writeError.message)) { + // Cast to any to satisfy JSDataverseWriteErrorHandler which expects WriteError + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-explicit-any + const error = new JSDataverseWriteErrorHandler(writeError as any) const formattedError = error.getReasonWithoutStatusCode() ?? /* istanbul ignore next */ error.getErrorMessage() diff --git a/src/standalone-uploader/StandaloneFileRepository.ts b/src/standalone-uploader/StandaloneFileRepository.ts new file mode 100644 index 000000000..e1976d891 --- /dev/null +++ b/src/standalone-uploader/StandaloneFileRepository.ts @@ -0,0 +1,130 @@ +/** + * Standalone File Repository + * + * A simplified file repository for the standalone uploader that doesn't depend on + * the main app's config.js. It only implements the methods needed for uploading files. + */ + +import { + uploadFile as jsUploadFile, + addUploadedFilesToDataset, + UploadedFileDTO +} from '@iqss/dataverse-client-javascript' +import { FileRepository } from '@/files/domain/repositories/FileRepository' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' + +export class StandaloneFileRepository + implements Pick +{ + private siteUrl: string + + constructor(siteUrl: string) { + this.siteUrl = siteUrl + } + + async uploadFile( + datasetId: string | number, + fileHolder: { file: File }, + progress: (now: number) => void, + abortController: AbortController, + getStorageId: (storageId: string) => void + ): Promise { + const storageId = await jsUploadFile.execute( + datasetId, + fileHolder.file, + progress, + abortController + ) + getStorageId(storageId) + } + + async addUploadedFiles( + datasetId: string | number, + uploadedFileDTOs: UploadedFileDTO[] + ): Promise { + await addUploadedFilesToDataset.execute(datasetId, uploadedFileDTOs) + } + + async getFixityAlgorithm(): Promise { + try { + const response = await fetch(`${this.siteUrl}/api/files/fixityAlgorithm`) + if (!response.ok) { + console.warn('Could not fetch fixity algorithm, defaulting to MD5') + return FixityAlgorithm.MD5 + } + const data = (await response.json()) as { data?: { message?: string } } + const algorithm: string = data?.data?.message || 'MD5' + + // Map the string to FixityAlgorithm enum + switch (algorithm.toUpperCase()) { + case 'MD5': + return FixityAlgorithm.MD5 + case 'SHA-1': + case 'SHA1': + return FixityAlgorithm.SHA1 + case 'SHA-256': + case 'SHA256': + return FixityAlgorithm.SHA256 + case 'SHA-512': + case 'SHA512': + return FixityAlgorithm.SHA512 + default: + return FixityAlgorithm.MD5 + } + } catch (error) { + console.warn('Error fetching fixity algorithm:', error) + return FixityAlgorithm.MD5 + } + } + + // These methods are not used by the standalone uploader but are required by the interface + // They will throw if called + getById(): Promise { + throw new Error('Not implemented in standalone mode') + } + getByDatasetPersistentId(): Promise { + throw new Error('Not implemented in standalone mode') + } + getByDatasetPersistentIdAndVersion(): Promise { + throw new Error('Not implemented in standalone mode') + } + getFilesCountInfoByDatasetPersistentId(): Promise { + throw new Error('Not implemented in standalone mode') + } + getFilesTotalDownloadSizeByDatasetPersistentId(): Promise { + throw new Error('Not implemented in standalone mode') + } + getMultipleFileDownloadUrl(): never { + throw new Error('Not implemented in standalone mode') + } + getUserPermissionsById(): Promise { + throw new Error('Not implemented in standalone mode') + } + getDataTablesById(): Promise { + throw new Error('Not implemented in standalone mode') + } + getFileCitation(): Promise { + throw new Error('Not implemented in standalone mode') + } + deleteFile(): Promise { + throw new Error('Not implemented in standalone mode') + } + replaceFile(): Promise { + throw new Error('Not implemented in standalone mode') + } + restrictFile(): Promise { + throw new Error('Not implemented in standalone mode') + } + updateMetadata(): Promise { + throw new Error('Not implemented in standalone mode') + } + getVersionSummaries(): Promise { + throw new Error('Not implemented in standalone mode') + } + updateTabularTags(): Promise { + throw new Error('Not implemented in standalone mode') + } + updateFileCategories(): Promise { + throw new Error('Not implemented in standalone mode') + } +} diff --git a/src/standalone-uploader/StandaloneFileUploaderPanel.tsx b/src/standalone-uploader/StandaloneFileUploaderPanel.tsx new file mode 100644 index 000000000..862184587 --- /dev/null +++ b/src/standalone-uploader/StandaloneFileUploaderPanel.tsx @@ -0,0 +1,72 @@ +/** + * Standalone File Uploader Panel + * + * A thin wrapper around FileUploaderPanelCore for standalone mode (DVWebloader V2). + * Handles standalone-specific concerns: beforeunload warning, redirect to JSF pages. + */ + +import { useEffect, useCallback } from 'react' +import { UploaderFileRepository } from '@/sections/shared/file-uploader/types' +import { useFileUploaderContext } from '@/sections/shared/file-uploader/context/FileUploaderContext' +import { FileUploaderPanelCore } from '@/sections/shared/file-uploader/FileUploaderPanelCore' + +interface StandaloneFileUploaderPanelProps { + fileRepository: UploaderFileRepository + datasetPersistentId: string + siteUrl: string +} + +export const StandaloneFileUploaderPanel = ({ + fileRepository, + datasetPersistentId, + siteUrl +}: StandaloneFileUploaderPanelProps) => { + const { + fileUploaderState: { files, isSaving, uploadingToCancelMap } + } = useFileUploaderContext() + + // Warn before leaving page if there are unsaved changes + useEffect(() => { + const hasUnsavedChanges = + Object.keys(files).length > 0 || isSaving || uploadingToCancelMap.size > 0 + + const handleBeforeUnload = (e: BeforeUnloadEvent) => { + if (hasUnsavedChanges) { + e.preventDefault() + e.returnValue = '' + return '' + } + } + + window.addEventListener('beforeunload', handleBeforeUnload) + return () => window.removeEventListener('beforeunload', handleBeforeUnload) + }, [files, isSaving, uploadingToCancelMap.size]) + + // Build the dataset page URL (JSF page) + const getDatasetUrl = useCallback(() => { + return `${siteUrl}/dataset.xhtml?persistentId=${encodeURIComponent( + datasetPersistentId + )}&version=DRAFT` + }, [siteUrl, datasetPersistentId]) + + // Navigation callbacks + const handleCancel = useCallback(() => { + window.location.href = getDatasetUrl() + }, [getDatasetUrl]) + + const handleFilesAddedSuccess = useCallback(() => { + // Small delay to let toast show before redirect + setTimeout(() => { + window.location.href = getDatasetUrl() + }, 1500) + }, [getDatasetUrl]) + + return ( + + ) +} diff --git a/src/standalone-uploader/config.ts b/src/standalone-uploader/config.ts new file mode 100644 index 000000000..4cf403082 --- /dev/null +++ b/src/standalone-uploader/config.ts @@ -0,0 +1,106 @@ +/** + * Standalone Uploader Configuration + * + * Parses URL parameters for the standalone file uploader. + * Compatible with DVWebloader v1 URL params: + * - siteUrl: Base URL of the Dataverse instance + * - datasetPid: Persistent ID of the dataset + * - key: API key for authentication + * - dvLocale: Optional locale code (e.g., 'en', 'de') + * - useS3Tagging: Optional, set to 'false' to disable S3 tagging (for S3-compatible storage that doesn't support tagging) + * - maxRetries: Optional, maximum number of retries for multipart upload parts (default: 3) + * - uploadTimeoutMs: Optional, timeout in milliseconds for file upload operations (default: 0 = unlimited) + * - disableMD5Checksum: Optional, set to 'true' to disable MD5 checksum calculation + */ + +export interface StandaloneUploaderConfig { + siteUrl: string + datasetPid: string + apiKey: string + dvLocale: string + /** Whether to use S3 object tagging. Set to false for S3-compatible storage that doesn't support tagging. Default: true */ + useS3Tagging: boolean + /** Maximum number of retries for multipart upload parts. Default: 3 */ + maxRetries: number + /** Timeout in milliseconds for file upload operations. 0 means unlimited. Default: 0 (unlimited) */ + uploadTimeoutMs: number + /** Whether to disable MD5 checksum calculation. Default: false */ + disableMD5Checksum: boolean +} + +export interface ConfigResult { + ok: true + config: StandaloneUploaderConfig +} + +export interface ConfigError { + ok: false + error: string + missingParams: string[] +} + +export type ConfigParseResult = ConfigResult | ConfigError + +/** + * Parse URL parameters and return configuration for the standalone uploader. + */ +export function parseUrlConfig(): ConfigParseResult { + const queryParams = new URLSearchParams(window.location.search) + + const siteUrl = queryParams.get('siteUrl') + const datasetPid = queryParams.get('datasetPid') + const apiKey = queryParams.get('key') + const dvLocale = queryParams.get('dvLocale') || 'en' + + // Parse useS3Tagging - default to true (enabled), only false if explicitly set to 'false' + const useS3TaggingParam = queryParams.get('useS3Tagging') + const useS3Tagging = useS3TaggingParam !== 'false' + + // Parse maxRetries - default to 3 + const maxRetriesParam = queryParams.get('maxRetries') + const maxRetries = maxRetriesParam ? parseInt(maxRetriesParam, 10) : 3 + + // Parse uploadTimeoutMs - default to 0 (unlimited) + const uploadTimeoutMsParam = queryParams.get('uploadTimeoutMs') + const uploadTimeoutMs = uploadTimeoutMsParam ? parseInt(uploadTimeoutMsParam, 10) : 0 + + // Parse disableMD5Checksum - default to false + const disableMD5ChecksumParam = queryParams.get('disableMD5Checksum') + const disableMD5Checksum = disableMD5ChecksumParam === 'true' + + const missingParams: string[] = [] + + if (!siteUrl) missingParams.push('siteUrl') + if (!datasetPid) missingParams.push('datasetPid') + if (!apiKey) missingParams.push('key') + + if (missingParams.length > 0) { + return { + ok: false, + error: `Missing required URL parameters: ${missingParams.join(', ')}`, + missingParams + } + } + + return { + ok: true, + config: { + siteUrl: siteUrl as string, + datasetPid: datasetPid as string, + apiKey: apiKey as string, + dvLocale, + useS3Tagging, + maxRetries, + uploadTimeoutMs, + disableMD5Checksum + } + } +} + +/** + * Extract the dataset ID from either a persistent ID or numeric ID. + * The API accepts both formats. + */ +export function getDatasetIdentifier(datasetPid: string): string { + return datasetPid +} diff --git a/src/standalone-uploader/dvwebloaderV2.html b/src/standalone-uploader/dvwebloaderV2.html new file mode 100644 index 000000000..a6efcb5c2 --- /dev/null +++ b/src/standalone-uploader/dvwebloaderV2.html @@ -0,0 +1,13 @@ + + + + + + Dataverse WebLoader V2 + + + + +
+ + diff --git a/src/standalone-uploader/index.tsx b/src/standalone-uploader/index.tsx new file mode 100644 index 000000000..37f4a398f --- /dev/null +++ b/src/standalone-uploader/index.tsx @@ -0,0 +1,183 @@ +/** + * Standalone Uploader Entry Point + * + * This is the entry point for the standalone DVWebloader V2 bundle. + * It initializes React, i18n, and the API client, then mounts the uploader. + * + * This standalone version reuses the SPA's FileUploader components to avoid code duplication. + */ + +import { createRoot } from 'react-dom/client' +import { StrictMode } from 'react' +import i18next from 'i18next' +import { initReactI18next } from 'react-i18next' +import I18NextHttpBackend from 'i18next-http-backend' +import { ApiConfig, FilesConfig } from '@iqss/dataverse-client-javascript' +import { DataverseApiAuthMechanism } from '@iqss/dataverse-client-javascript/dist/core/infra/repositories/ApiConfig' +import { ToastContainer } from 'react-toastify' +import { parseUrlConfig } from './config' +import { StandaloneFileUploaderPanel } from './StandaloneFileUploaderPanel' +import { StandaloneFileRepository } from './StandaloneFileRepository' +import { FileUploaderProvider } from '@/sections/shared/file-uploader/context/FileUploaderContext' +import { FileUploaderGlobalConfig } from '@/sections/shared/file-uploader/context/fileUploaderReducer' +import { OperationType, StorageType } from '@/sections/shared/file-uploader/FileUploader' +import { LoadingConfigSpinner } from '@/sections/shared/file-uploader/loading-config-spinner/LoadingConfigSpinner' +import { useGetFixityAlgorithm } from '@/sections/shared/file-uploader/useGetFixityAlgorithm' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' + +// Import design system styles - use relative path for build compatibility +import '../../packages/design-system/dist/style.css' +import 'bootstrap/dist/css/bootstrap.min.css' +import 'react-toastify/dist/ReactToastify.css' +import './standalone.scss' + +// Error display component +function ConfigErrorDisplay({ error, missingParams }: { error: string; missingParams: string[] }) { + return ( +
+

Configuration Error

+

{error}

+ {missingParams.length > 0 && ( +
+

Expected URL format:

+ + ?siteUrl=https://your-dataverse.edu&datasetPid=doi:10.5072/FK2/XXXXX&key=your-api-key + +
+ )} +
+ ) +} + +// Standalone uploader wrapper that loads fixity algorithm and provides context +interface StandaloneUploaderWrapperProps { + fileRepository: StandaloneFileRepository + datasetPersistentId: string + siteUrl: string + disableMD5Checksum?: boolean +} + +function StandaloneUploaderWrapper({ + fileRepository, + datasetPersistentId, + siteUrl, + disableMD5Checksum +}: StandaloneUploaderWrapperProps) { + const { fixityAlgorithm: fetchedAlgorithm, isLoadingFixityAlgorithm } = + useGetFixityAlgorithm(fileRepository) + + // If checksum is disabled, use NONE. Otherwise use the fetched algorithm. + const fixityAlgorithm = disableMD5Checksum ? FixityAlgorithm.NONE : fetchedAlgorithm + + if (isLoadingFixityAlgorithm) { + return + } + + const initialConfig: FileUploaderGlobalConfig = { + storageType: 'S3' as StorageType, + operationType: OperationType.ADD_FILES_TO_DATASET, + checksumAlgorithm: fixityAlgorithm + } + + return ( + + + + ) +} + +// Initialize the application +async function init() { + const container = document.getElementById('root') + if (!container) { + console.error('Root element not found') + return + } + + const root = createRoot(container) + + // Parse URL configuration + const configResult = parseUrlConfig() + + if (!configResult.ok) { + root.render( + + + + ) + return + } + + const config = configResult.config + + // Initialize the API client with API key authentication + ApiConfig.init(`${config.siteUrl}/api/v1`, DataverseApiAuthMechanism.API_KEY, config.apiKey) + + // Configure file upload settings + // These are critical for S3-compatible storage that may not support all S3 features + FilesConfig.init({ + // useS3Tagging: Set to false for MinIO/S3-compatible storage without tagging support + useS3Tagging: config.useS3Tagging, + // maxMultipartRetries: Number of retry attempts for multipart upload failures + maxMultipartRetries: config.maxRetries, + // fileUploadTimeoutMs: Timeout for upload operations (0 = use axios default) + fileUploadTimeoutMs: config.uploadTimeoutMs || undefined + }) + + // Determine the base path for loading translation files + // In standalone mode, translations are bundled or loaded from the same origin + const basePath = window.location.pathname.substring( + 0, + window.location.pathname.lastIndexOf('/') + 1 + ) + + // Initialize i18next for translations + await i18next + .use(initReactI18next) + .use(I18NextHttpBackend) + .init({ + lng: config.dvLocale || 'en', + fallbackLng: 'en', + supportedLngs: ['en', 'de', 'fr', 'es', 'it', 'nl', 'pt', 'uk'], + lowerCaseLng: true, + ns: ['shared'], + defaultNS: 'shared', + returnNull: false, + backend: { + loadPath: `${basePath}locales/{{lng}}/{{ns}}.json` + } + }) + + // Create standalone file repository (doesn't depend on config.js) + const fileRepository = new StandaloneFileRepository(config.siteUrl) + + // Render the uploader with context provider + root.render( + +
+ +
+

Upload Files

+

+ Uploading to dataset: {config.datasetPid} +

+
+ +
+
+ ) +} + +// Start the application +init().catch((error) => { + console.error('Failed to initialize standalone uploader:', error) +}) diff --git a/src/standalone-uploader/standalone.scss b/src/standalone-uploader/standalone.scss new file mode 100644 index 000000000..8f350468c --- /dev/null +++ b/src/standalone-uploader/standalone.scss @@ -0,0 +1,78 @@ +/** + * Standalone Uploader Base Styles + */ + +// Reset and base styles for standalone mode +html, body { + margin: 0; + padding: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif; + background-color: #f5f5f5; + min-height: 100vh; +} + +#root { + min-height: 100vh; +} + +.standalone-uploader-container { + max-width: 1200px; + margin: 0 auto; + padding: 2rem; +} + +.standalone-uploader-header { + margin-bottom: 2rem; + + h1 { + margin: 0 0 0.5rem 0; + font-size: 1.75rem; + color: #333; + } +} + +.standalone-uploader-dataset-info { + margin: 0; + color: #666; + + code { + background-color: #f8f9fa; + padding: 0.2rem 0.4rem; + border-radius: 4px; + font-family: monospace; + font-size: 0.9rem; + } +} + +.standalone-error { + max-width: 600px; + margin: 2rem auto; + padding: 2rem; + background-color: #fff5f5; + border: 1px solid #dc3545; + border-radius: 8px; + text-align: center; + + h1 { + color: #dc3545; + margin-bottom: 1rem; + font-size: 1.5rem; + } + + p { + color: #721c24; + margin-bottom: 1rem; + } + + code { + display: block; + background-color: #f8f9fa; + padding: 1rem; + border-radius: 4px; + font-family: monospace; + text-align: left; + overflow-x: auto; + font-size: 0.85rem; + word-break: break-all; + } +} diff --git a/src/stories/shared/file-uploader/UploadedFilesList.stories.tsx b/src/stories/shared/file-uploader/UploadedFilesList.stories.tsx index 5c95ed48d..5dd0d7cf3 100644 --- a/src/stories/shared/file-uploader/UploadedFilesList.stories.tsx +++ b/src/stories/shared/file-uploader/UploadedFilesList.stories.tsx @@ -69,6 +69,7 @@ export const Default: Story = { console.log('Cancel clicked')} /> ) diff --git a/tests/component/sections/shared/file-uploader/useFileUploadOperations.spec.tsx b/tests/component/sections/shared/file-uploader/useFileUploadOperations.spec.tsx new file mode 100644 index 000000000..f956e564b --- /dev/null +++ b/tests/component/sections/shared/file-uploader/useFileUploadOperations.spec.tsx @@ -0,0 +1,206 @@ +import { act, renderHook } from '@testing-library/react' +import { + useFileUploadOperations, + FileUploadOperationsConfig, + CONCURRENT_UPLOADS_LIMIT +} from '@/sections/shared/file-uploader/useFileUploadOperations' +import { FileUploadStatus } from '@/sections/shared/file-uploader/useFileUploadState' +import { FileRepository } from '@/files/domain/repositories/FileRepository' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' +import { FileMockRepository } from '@/stories/file/FileMockRepository' + +describe('useFileUploadOperations', () => { + const createMockFile = (name: string, size = 1024): File => { + const content = new Array(size).fill('x').join('') + return new File([content], name, { type: 'text/plain', lastModified: Date.now() }) + } + + const createConfig = ( + overrides: Partial = {} + ): FileUploadOperationsConfig => ({ + fileRepository: new FileMockRepository() as unknown as FileRepository, + datasetPersistentId: 'doi:10.5072/FK2/TEST', + checksumAlgorithm: FixityAlgorithm.MD5, + addFile: cy.stub(), + updateFile: cy.stub(), + getFileByKey: cy.stub().returns(undefined), + addUploadingToCancel: cy.stub(), + removeUploadingToCancel: cy.stub(), + ...overrides + }) + + describe('constants', () => { + it('should have CONCURRENT_UPLOADS_LIMIT defined', () => { + expect(CONCURRENT_UPLOADS_LIMIT).to.equal(6) + }) + }) + + describe('uploadOneFile', () => { + it('should skip .DS_Store files', async () => { + const onFileSkipped = cy.stub() + const addFile = cy.stub() + const config = createConfig({ onFileSkipped, addFile }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const dsStoreFile = createMockFile('.DS_Store') + + await act(async () => { + await result.current.uploadOneFile(dsStoreFile) + }) + + expect(onFileSkipped).to.have.been.calledWith('ds_store', dsStoreFile) + expect(addFile).to.not.have.been.called + }) + + it('should skip already uploaded files', async () => { + const onFileSkipped = cy.stub() + const addFile = cy.stub() + const getFileByKey = cy.stub().returns({ status: FileUploadStatus.DONE }) + const config = createConfig({ onFileSkipped, addFile, getFileByKey }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.uploadOneFile(mockFile) + }) + + expect(onFileSkipped).to.have.been.calledWith('already_uploaded', mockFile) + expect(addFile).to.not.have.been.called + }) + + it('should call addFile for new files', async () => { + const addFile = cy.stub() + const config = createConfig({ addFile }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.uploadOneFile(mockFile) + }) + + expect(addFile).to.have.been.calledWith(mockFile) + }) + + it('should call addUploadingToCancel with cancel function', async () => { + const addUploadingToCancel = cy.stub() + const config = createConfig({ addUploadingToCancel }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.uploadOneFile(mockFile) + }) + + expect(addUploadingToCancel).to.have.been.called + const [key, cancelFn] = addUploadingToCancel.firstCall.args + expect(key).to.be.a('string') + expect(cancelFn).to.be.a('function') + }) + + it('should run validateBeforeUpload if provided', async () => { + const validateBeforeUpload = cy.stub().resolves(true) + const addFile = cy.stub() + const config = createConfig({ validateBeforeUpload, addFile }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.uploadOneFile(mockFile) + }) + + expect(validateBeforeUpload).to.have.been.calledWith(mockFile) + expect(addFile).to.have.been.called + }) + + it('should not upload if validateBeforeUpload returns false', async () => { + const validateBeforeUpload = cy.stub().resolves(false) + const addFile = cy.stub() + const config = createConfig({ validateBeforeUpload, addFile }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.uploadOneFile(mockFile) + }) + + expect(validateBeforeUpload).to.have.been.calledWith(mockFile) + expect(addFile).to.not.have.been.called + }) + }) + + describe('semaphore', () => { + it('should return a semaphore for concurrent upload control', () => { + const config = createConfig() + const { result } = renderHook(() => useFileUploadOperations(config)) + + expect(result.current.semaphore).to.exist + expect(result.current.semaphore.acquire).to.be.a('function') + expect(result.current.semaphore.release).to.be.a('function') + }) + }) + + describe('retryUpload', () => { + it('should reset file status to uploading before retry', async () => { + const updateFile = cy.stub() + const config = createConfig({ updateFile }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.retryUpload(mockFile) + }) + + // First call should be to reset status + expect(updateFile.firstCall.args[1]).to.deep.include({ + status: FileUploadStatus.UPLOADING, + progress: 0 + }) + }) + + it('should register cancel function for retry', async () => { + const addUploadingToCancel = cy.stub() + const config = createConfig({ addUploadingToCancel }) + + const { result } = renderHook(() => useFileUploadOperations(config)) + + const mockFile = createMockFile('test.txt') + + await act(async () => { + await result.current.retryUpload(mockFile) + }) + + expect(addUploadingToCancel).to.have.been.called + }) + }) + + describe('handleDroppedItems', () => { + it('should be a function', () => { + const config = createConfig() + const { result } = renderHook(() => useFileUploadOperations(config)) + + expect(result.current.handleDroppedItems).to.be.a('function') + }) + }) + + describe('addFromDir', () => { + it('should be a function', () => { + const config = createConfig() + const { result } = renderHook(() => useFileUploadOperations(config)) + + expect(result.current.addFromDir).to.be.a('function') + }) + }) +}) diff --git a/tests/component/sections/shared/file-uploader/useFileUploadState.spec.tsx b/tests/component/sections/shared/file-uploader/useFileUploadState.spec.tsx new file mode 100644 index 000000000..e7fd4fc42 --- /dev/null +++ b/tests/component/sections/shared/file-uploader/useFileUploadState.spec.tsx @@ -0,0 +1,420 @@ +import { act, renderHook } from '@testing-library/react' +import { + useFileUploadState, + FileUploadStatus, + formatFileSize +} from '@/sections/shared/file-uploader/useFileUploadState' +import { FixityAlgorithm } from '@/files/domain/models/FixityAlgorithm' + +describe('useFileUploadState', () => { + const createMockFile = (name: string, size = 1024): File => { + const content = new Array(size).fill('x').join('') + return new File([content], name, { type: 'text/plain', lastModified: Date.now() }) + } + + describe('formatFileSize', () => { + it('should format 0 bytes correctly', () => { + expect(formatFileSize(0)).to.equal('0 Bytes') + }) + + it('should format bytes correctly', () => { + expect(formatFileSize(500)).to.equal('500 Bytes') + }) + + it('should format KB correctly', () => { + expect(formatFileSize(1024)).to.equal('1 KB') + expect(formatFileSize(2048)).to.equal('2 KB') + }) + + it('should format MB correctly', () => { + expect(formatFileSize(1024 * 1024)).to.equal('1 MB') + expect(formatFileSize(1.5 * 1024 * 1024)).to.equal('1.5 MB') + }) + + it('should format GB correctly', () => { + expect(formatFileSize(1024 * 1024 * 1024)).to.equal('1 GB') + }) + + it('should format TB correctly', () => { + expect(formatFileSize(1024 * 1024 * 1024 * 1024)).to.equal('1 TB') + }) + }) + + describe('initial state', () => { + it('should return empty files initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.files).to.deep.equal({}) + }) + + it('should return empty uploadedFiles initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.uploadedFiles).to.deep.equal([]) + }) + + it('should return empty uploadingFilesInProgress initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.uploadingFilesInProgress).to.deep.equal([]) + }) + + it('should return false for anyFileUploading initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.anyFileUploading).to.equal(false) + }) + + it('should return empty uploadingToCancelMap initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.uploadingToCancelMap.size).to.equal(0) + }) + + it('should return false for isSaving initially', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.isSaving).to.equal(false) + }) + }) + + describe('addFile', () => { + it('should add a file to the state', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const filesKeys = Object.keys(result.current.files) + expect(filesKeys).to.have.length(1) + + const addedFile = Object.values(result.current.files)[0] + expect(addedFile.fileName).to.equal('test.txt') + expect(addedFile.status).to.equal(FileUploadStatus.UPLOADING) + expect(addedFile.progress).to.equal(0) + expect(addedFile.checksumAlgorithm).to.equal(FixityAlgorithm.MD5) + }) + + it('should set anyFileUploading to true after adding a file', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + expect(result.current.anyFileUploading).to.equal(true) + }) + + it('should not add duplicate files with the same key', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const filesKeys = Object.keys(result.current.files) + expect(filesKeys).to.have.length(1) + }) + + it('should use custom defaults when provided', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5, { + description: 'Custom description', + tags: ['tag1', 'tag2'], + restricted: true, + fileDir: 'custom/path' + }) + }) + + const addedFile = Object.values(result.current.files)[0] + expect(addedFile.description).to.equal('Custom description') + expect(addedFile.tags).to.deep.equal(['tag1', 'tag2']) + expect(addedFile.restricted).to.equal(true) + expect(addedFile.fileDir).to.equal('custom/path') + }) + }) + + describe('updateFile', () => { + it('should update file properties', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.updateFile(fileKey, { progress: 50 }) + }) + + expect(result.current.files[fileKey].progress).to.equal(50) + }) + + it('should update status to DONE', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.updateFile(fileKey, { + status: FileUploadStatus.DONE, + storageId: 'storage-123', + checksumValue: 'abc123' + }) + }) + + expect(result.current.files[fileKey].status).to.equal(FileUploadStatus.DONE) + expect(result.current.anyFileUploading).to.equal(false) + }) + + it('should not update non-existent file', () => { + const { result } = renderHook(() => useFileUploadState()) + + const initialFiles = { ...result.current.files } + + act(() => { + result.current.updateFile('non-existent-key', { progress: 50 }) + }) + + expect(result.current.files).to.deep.equal(initialFiles) + }) + + it('should add file to uploadedFiles when status is DONE with storageId and checksumValue', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.updateFile(fileKey, { + status: FileUploadStatus.DONE, + storageId: 'storage-123', + checksumValue: 'abc123' + }) + }) + + expect(result.current.uploadedFiles).to.have.length(1) + expect(result.current.uploadedFiles[0].storageId).to.equal('storage-123') + expect(result.current.uploadedFiles[0].checksumValue).to.equal('abc123') + }) + }) + + describe('removeFile', () => { + it('should remove a file from the state', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.removeFile(fileKey) + }) + + expect(Object.keys(result.current.files)).to.have.length(0) + }) + }) + + describe('removeAllFiles', () => { + it('should remove all files from the state', () => { + const { result } = renderHook(() => useFileUploadState()) + + act(() => { + result.current.addFile(createMockFile('test1.txt'), FixityAlgorithm.MD5) + result.current.addFile(createMockFile('test2.txt'), FixityAlgorithm.MD5) + }) + + expect(Object.keys(result.current.files)).to.have.length(2) + + act(() => { + result.current.removeAllFiles() + }) + + expect(Object.keys(result.current.files)).to.have.length(0) + }) + }) + + describe('getFileByKey', () => { + it('should return file by key', () => { + const { result } = renderHook(() => useFileUploadState()) + const mockFile = createMockFile('test.txt') + + act(() => { + result.current.addFile(mockFile, FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + const file = result.current.getFileByKey(fileKey) + + expect(file?.fileName).to.equal('test.txt') + }) + + it('should return undefined for non-existent key', () => { + const { result } = renderHook(() => useFileUploadState()) + + const file = result.current.getFileByKey('non-existent-key') + + expect(file).to.be.undefined + }) + }) + + describe('uploadingToCancelMap', () => { + it('should add cancel function to map', () => { + const { result } = renderHook(() => useFileUploadState()) + const cancelFn = cy.stub() + + act(() => { + result.current.addUploadingToCancel('file-key', cancelFn) + }) + + expect(result.current.uploadingToCancelMap.size).to.equal(1) + expect(result.current.uploadingToCancelMap.get('file-key')).to.equal(cancelFn) + }) + + it('should remove cancel function from map', () => { + const { result } = renderHook(() => useFileUploadState()) + const cancelFn = cy.stub() + + act(() => { + result.current.addUploadingToCancel('file-key', cancelFn) + }) + + act(() => { + result.current.removeUploadingToCancel('file-key') + }) + + expect(result.current.uploadingToCancelMap.size).to.equal(0) + }) + }) + + describe('isSaving', () => { + it('should set isSaving state', () => { + const { result } = renderHook(() => useFileUploadState()) + + expect(result.current.isSaving).to.equal(false) + + act(() => { + result.current.setIsSaving(true) + }) + + expect(result.current.isSaving).to.equal(true) + + act(() => { + result.current.setIsSaving(false) + }) + + expect(result.current.isSaving).to.equal(false) + }) + }) + + describe('reset', () => { + it('should reset all state to initial values', () => { + const { result } = renderHook(() => useFileUploadState()) + const cancelFn = cy.stub() + + act(() => { + result.current.addFile(createMockFile('test.txt'), FixityAlgorithm.MD5) + result.current.addUploadingToCancel('file-key', cancelFn) + result.current.setIsSaving(true) + }) + + expect(Object.keys(result.current.files)).to.have.length(1) + expect(result.current.uploadingToCancelMap.size).to.equal(1) + expect(result.current.isSaving).to.equal(true) + + act(() => { + result.current.reset() + }) + + expect(Object.keys(result.current.files)).to.have.length(0) + expect(result.current.uploadingToCancelMap.size).to.equal(0) + expect(result.current.isSaving).to.equal(false) + }) + + it('should call cancel functions when resetting', () => { + const { result } = renderHook(() => useFileUploadState()) + const cancelFn = cy.stub() + + act(() => { + result.current.addUploadingToCancel('file-key', cancelFn) + }) + + act(() => { + result.current.reset() + }) + + expect(cancelFn).to.have.been.called + }) + }) + + describe('uploadingFilesInProgress', () => { + it('should include uploading files', () => { + const { result } = renderHook(() => useFileUploadState()) + + act(() => { + result.current.addFile(createMockFile('test.txt'), FixityAlgorithm.MD5) + }) + + expect(result.current.uploadingFilesInProgress).to.have.length(1) + }) + + it('should include failed files', () => { + const { result } = renderHook(() => useFileUploadState()) + + act(() => { + result.current.addFile(createMockFile('test.txt'), FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.updateFile(fileKey, { status: FileUploadStatus.FAILED }) + }) + + expect(result.current.uploadingFilesInProgress).to.have.length(1) + expect(result.current.uploadingFilesInProgress[0].status).to.equal(FileUploadStatus.FAILED) + }) + + it('should exclude completed files', () => { + const { result } = renderHook(() => useFileUploadState()) + + act(() => { + result.current.addFile(createMockFile('test.txt'), FixityAlgorithm.MD5) + }) + + const fileKey = Object.keys(result.current.files)[0] + + act(() => { + result.current.updateFile(fileKey, { + status: FileUploadStatus.DONE, + storageId: 'storage-123', + checksumValue: 'abc123' + }) + }) + + expect(result.current.uploadingFilesInProgress).to.have.length(0) + }) + }) +}) diff --git a/vite.config.uploader.ts b/vite.config.uploader.ts new file mode 100644 index 000000000..292093227 --- /dev/null +++ b/vite.config.uploader.ts @@ -0,0 +1,65 @@ +/** + * Vite Configuration for Standalone DVWebloader V2 Bundle + * + * This configuration builds the file uploader as a standalone bundle + * that can be used independently from the main Dataverse SPA. + */ + +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' +import cssInjectedByJsPlugin from 'vite-plugin-css-injected-by-js' +import * as path from 'path' + +export default defineConfig({ + plugins: [ + react(), + // Inject CSS into the JS bundle so we only have a single file to load + cssInjectedByJsPlugin() + ], + // Don't copy public folder contents + publicDir: false, + // Optimize deps to properly handle the local linked CommonJS package + optimizeDeps: { + include: ['@iqss/dataverse-client-javascript'] + }, + build: { + outDir: 'dist-uploader', + emptyOutDir: true, + // Target modern browsers for smaller bundle size + target: 'es2020', + // Force CommonJS interop for linked packages + commonjsOptions: { + include: [/node_modules/, /dataverse-client-javascript/], + transformMixedEsModules: true + }, + rollupOptions: { + input: path.resolve(__dirname, 'src/standalone-uploader/index.tsx'), + output: { + // Single entry file + entryFileNames: 'dvwebloader-v2.js', + // Inline all chunks into the main bundle + inlineDynamicImports: true, + // Asset file naming + assetFileNames: 'assets/[name].[ext]' + } + }, + // Copy translation files to dist + copyPublicDir: false, + // Increase chunk size warning limit since we're bundling everything + chunkSizeWarningLimit: 2000, + // Enable minification + minify: 'esbuild', + // Generate sourcemaps for debugging + sourcemap: true + }, + resolve: { + alias: { + '@': path.resolve(__dirname, 'src'), + '@tests': path.resolve(__dirname, 'tests') + } + }, + define: { + // Define production mode + 'process.env.NODE_ENV': '"production"' + } +}) From 806a25492abb6ad064381a1d39735355ec3599da Mon Sep 17 00:00:00 2001 From: Eryk Kullikowski Date: Fri, 5 Dec 2025 16:48:20 +0100 Subject: [PATCH 2/8] feat: enhance standalone uploader configuration with window variables support --- src/standalone-uploader/config.ts | 55 ++++++++++++++-------- src/standalone-uploader/dvwebloaderV2.html | 9 ++++ 2 files changed, 45 insertions(+), 19 deletions(-) diff --git a/src/standalone-uploader/config.ts b/src/standalone-uploader/config.ts index 4cf403082..b561a22e7 100644 --- a/src/standalone-uploader/config.ts +++ b/src/standalone-uploader/config.ts @@ -1,18 +1,38 @@ /** * Standalone Uploader Configuration * - * Parses URL parameters for the standalone file uploader. - * Compatible with DVWebloader v1 URL params: + * Configuration can be set in two ways: + * 1. Window variables (set via script tag in HTML before the bundle loads): + * - window.dvWebloaderConfig = { useS3Tagging: false, maxRetries: 5, ... } + * 2. URL parameters (for siteUrl, datasetPid, key, dvLocale) + * + * URL Parameters (passed by Dataverse): * - siteUrl: Base URL of the Dataverse instance * - datasetPid: Persistent ID of the dataset * - key: API key for authentication * - dvLocale: Optional locale code (e.g., 'en', 'de') - * - useS3Tagging: Optional, set to 'false' to disable S3 tagging (for S3-compatible storage that doesn't support tagging) - * - maxRetries: Optional, maximum number of retries for multipart upload parts (default: 3) - * - uploadTimeoutMs: Optional, timeout in milliseconds for file upload operations (default: 0 = unlimited) - * - disableMD5Checksum: Optional, set to 'true' to disable MD5 checksum calculation + * + * Window config options (set in HTML): + * - useS3Tagging: Set to false to disable S3 tagging (default: true) + * - maxRetries: Maximum retries for multipart upload parts (default: 3) + * - uploadTimeoutMs: Timeout in ms for uploads, 0 = unlimited (default: 0) + * - disableMD5Checksum: Set to true to skip checksum calculation (default: false) */ +/** Window config interface for type safety */ +interface DvWebloaderWindowConfig { + useS3Tagging?: boolean + maxRetries?: number + uploadTimeoutMs?: number + disableMD5Checksum?: boolean +} + +declare global { + interface Window { + dvWebloaderConfig?: DvWebloaderWindowConfig + } +} + export interface StandaloneUploaderConfig { siteUrl: string datasetPid: string @@ -42,31 +62,28 @@ export interface ConfigError { export type ConfigParseResult = ConfigResult | ConfigError /** - * Parse URL parameters and return configuration for the standalone uploader. + * Parse URL parameters and window config, return configuration for the standalone uploader. */ export function parseUrlConfig(): ConfigParseResult { const queryParams = new URLSearchParams(window.location.search) + const windowConfig = window.dvWebloaderConfig || {} const siteUrl = queryParams.get('siteUrl') const datasetPid = queryParams.get('datasetPid') const apiKey = queryParams.get('key') const dvLocale = queryParams.get('dvLocale') || 'en' - // Parse useS3Tagging - default to true (enabled), only false if explicitly set to 'false' - const useS3TaggingParam = queryParams.get('useS3Tagging') - const useS3Tagging = useS3TaggingParam !== 'false' + // Parse useS3Tagging - window config takes precedence, then default to true + const useS3Tagging = windowConfig.useS3Tagging ?? true - // Parse maxRetries - default to 3 - const maxRetriesParam = queryParams.get('maxRetries') - const maxRetries = maxRetriesParam ? parseInt(maxRetriesParam, 10) : 3 + // Parse maxRetries - window config takes precedence, then default to 3 + const maxRetries = windowConfig.maxRetries ?? 3 - // Parse uploadTimeoutMs - default to 0 (unlimited) - const uploadTimeoutMsParam = queryParams.get('uploadTimeoutMs') - const uploadTimeoutMs = uploadTimeoutMsParam ? parseInt(uploadTimeoutMsParam, 10) : 0 + // Parse uploadTimeoutMs - window config takes precedence, then default to 0 (unlimited) + const uploadTimeoutMs = windowConfig.uploadTimeoutMs ?? 0 - // Parse disableMD5Checksum - default to false - const disableMD5ChecksumParam = queryParams.get('disableMD5Checksum') - const disableMD5Checksum = disableMD5ChecksumParam === 'true' + // Parse disableMD5Checksum - window config takes precedence, then default to false + const disableMD5Checksum = windowConfig.disableMD5Checksum ?? false const missingParams: string[] = [] diff --git a/src/standalone-uploader/dvwebloaderV2.html b/src/standalone-uploader/dvwebloaderV2.html index a6efcb5c2..475ad1cde 100644 --- a/src/standalone-uploader/dvwebloaderV2.html +++ b/src/standalone-uploader/dvwebloaderV2.html @@ -4,6 +4,15 @@ Dataverse WebLoader V2 + + From 06dfa4dfbb16a47568a937784d156e1d2575ba86 Mon Sep 17 00:00:00 2001 From: Eryk Kullikowski Date: Fri, 5 Dec 2025 17:02:53 +0100 Subject: [PATCH 3/8] feat: enhance standalone uploader UI with improved layout and dataset display --- src/standalone-uploader/dvwebloaderV2.html | 68 +++++++++++++++++++++- src/standalone-uploader/index.tsx | 6 -- 2 files changed, 67 insertions(+), 7 deletions(-) diff --git a/src/standalone-uploader/dvwebloaderV2.html b/src/standalone-uploader/dvwebloaderV2.html index 475ad1cde..573a7795a 100644 --- a/src/standalone-uploader/dvwebloaderV2.html +++ b/src/standalone-uploader/dvwebloaderV2.html @@ -13,10 +13,76 @@ disableMD5Checksum: false // Calculate MD5 checksums }; + -
+
+ + + +
+ + +
+ + diff --git a/src/standalone-uploader/index.tsx b/src/standalone-uploader/index.tsx index 37f4a398f..55f7e638c 100644 --- a/src/standalone-uploader/index.tsx +++ b/src/standalone-uploader/index.tsx @@ -160,12 +160,6 @@ async function init() {
-
-

Upload Files

-

- Uploading to dataset: {config.datasetPid} -

-
Date: Fri, 5 Dec 2025 19:45:46 +0100 Subject: [PATCH 4/8] feat: add helper text to standalone file uploader and update build script --- package.json | 2 +- .../FileUploaderPanel.module.scss | 16 ++++++++++ .../file-uploader/FileUploaderPanel.tsx | 18 +++++++++++ .../file-upload-input/FileUploadInput.tsx | 18 +---------- .../StandaloneFileUploaderPanel.module.scss | 16 ++++++++++ .../StandaloneFileUploaderPanel.tsx | 32 +++++++++++++++---- .../embeddedDvWebloader.html | 29 +++++++++++++++++ 7 files changed, 107 insertions(+), 24 deletions(-) create mode 100644 src/sections/shared/file-uploader/FileUploaderPanel.module.scss create mode 100644 src/standalone-uploader/StandaloneFileUploaderPanel.module.scss create mode 100644 src/standalone-uploader/embeddedDvWebloader.html diff --git a/package.json b/package.json index 10412c3e5..75b4d3554 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "scripts": { "start": "vite --base=/spa", "build": "tsc && vite build", - "build-uploader": "vite build --config vite.config.uploader.ts && cp -r public/locales dist-uploader/ && cp src/standalone-uploader/dvwebloaderV2.html dist-uploader/", + "build-uploader": "vite build --config vite.config.uploader.ts && cp -r public/locales dist-uploader/ && cp src/standalone-uploader/dvwebloaderV2.html src/standalone-uploader/embeddedDvWebloader.html dist-uploader/", "build-keycloak-theme": "npm run build && keycloakify build", "preview": "vite preview", "lint": "npm run typecheck && npm run lint:eslint && npm run lint:stylelint && npm run lint:prettier", diff --git a/src/sections/shared/file-uploader/FileUploaderPanel.module.scss b/src/sections/shared/file-uploader/FileUploaderPanel.module.scss new file mode 100644 index 000000000..c1442ddb0 --- /dev/null +++ b/src/sections/shared/file-uploader/FileUploaderPanel.module.scss @@ -0,0 +1,16 @@ +@import 'node_modules/@iqss/dataverse-design-system/src/lib/assets/styles/design-tokens/colors.module'; + +.helper_text { + color: $dv-subtext-color; + font-size: 14px; + margin-bottom: 1rem; + + a { + color: $dv-primary-color; + text-decoration: underline; + + &:hover { + text-decoration: none; + } + } +} diff --git a/src/sections/shared/file-uploader/FileUploaderPanel.tsx b/src/sections/shared/file-uploader/FileUploaderPanel.tsx index 2fc94e90a..62544133a 100644 --- a/src/sections/shared/file-uploader/FileUploaderPanel.tsx +++ b/src/sections/shared/file-uploader/FileUploaderPanel.tsx @@ -6,6 +6,7 @@ */ import { useMemo, useCallback } from 'react' +import { Trans, useTranslation } from 'react-i18next' import { useBlocker, useNavigate } from 'react-router-dom' import { FileRepository } from '@/files/domain/repositories/FileRepository' import { QueryParamKey, Route } from '@/sections/Route.enum' @@ -14,6 +15,7 @@ import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFileReferrer import { useFileUploaderContext } from './context/FileUploaderContext' import { FileUploaderPanelCore } from './FileUploaderPanelCore' import { ConfirmLeaveModal } from './confirm-leave-modal/ConfirmLeaveModal' +import styles from './FileUploaderPanel.module.scss' interface FileUploaderPanelProps { fileRepository: FileRepository @@ -27,6 +29,7 @@ const FileUploaderPanel = ({ referrer }: FileUploaderPanelProps) => { const navigate = useNavigate() + const { t } = useTranslation('shared') const { fileUploaderState: { files, isSaving, uploadingToCancelMap }, @@ -80,6 +83,21 @@ const FileUploaderPanel = ({ return ( <> +

+ + ) + }} + /> +

-

- - ) - }} - /> -

- {t('fileUploader.accordionTitle')} diff --git a/src/standalone-uploader/StandaloneFileUploaderPanel.module.scss b/src/standalone-uploader/StandaloneFileUploaderPanel.module.scss new file mode 100644 index 000000000..c1442ddb0 --- /dev/null +++ b/src/standalone-uploader/StandaloneFileUploaderPanel.module.scss @@ -0,0 +1,16 @@ +@import 'node_modules/@iqss/dataverse-design-system/src/lib/assets/styles/design-tokens/colors.module'; + +.helper_text { + color: $dv-subtext-color; + font-size: 14px; + margin-bottom: 1rem; + + a { + color: $dv-primary-color; + text-decoration: underline; + + &:hover { + text-decoration: none; + } + } +} diff --git a/src/standalone-uploader/StandaloneFileUploaderPanel.tsx b/src/standalone-uploader/StandaloneFileUploaderPanel.tsx index 862184587..57887cbae 100644 --- a/src/standalone-uploader/StandaloneFileUploaderPanel.tsx +++ b/src/standalone-uploader/StandaloneFileUploaderPanel.tsx @@ -6,9 +6,11 @@ */ import { useEffect, useCallback } from 'react' +import { Trans, useTranslation } from 'react-i18next' import { UploaderFileRepository } from '@/sections/shared/file-uploader/types' import { useFileUploaderContext } from '@/sections/shared/file-uploader/context/FileUploaderContext' import { FileUploaderPanelCore } from '@/sections/shared/file-uploader/FileUploaderPanelCore' +import styles from './StandaloneFileUploaderPanel.module.scss' interface StandaloneFileUploaderPanelProps { fileRepository: UploaderFileRepository @@ -21,6 +23,7 @@ export const StandaloneFileUploaderPanel = ({ datasetPersistentId, siteUrl }: StandaloneFileUploaderPanelProps) => { + const { t } = useTranslation('shared') const { fileUploaderState: { files, isSaving, uploadingToCancelMap } } = useFileUploaderContext() @@ -62,11 +65,28 @@ export const StandaloneFileUploaderPanel = ({ }, [getDatasetUrl]) return ( - + <> +

+ + ) + }} + /> +

+ + ) } diff --git a/src/standalone-uploader/embeddedDvWebloader.html b/src/standalone-uploader/embeddedDvWebloader.html new file mode 100644 index 000000000..01f8d1c1a --- /dev/null +++ b/src/standalone-uploader/embeddedDvWebloader.html @@ -0,0 +1,29 @@ + + + + + + Upload Files + + + + + + + +
+ + From c1b0c4bbe0292831572db3b6a97ece0333742001 Mon Sep 17 00:00:00 2001 From: Eryk Kullikowski Date: Fri, 5 Dec 2025 20:43:48 +0100 Subject: [PATCH 5/8] feat: improve iframe dynamic resizing and adjust body padding in standalone uploader --- .../embeddedDvWebloader.html | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/src/standalone-uploader/embeddedDvWebloader.html b/src/standalone-uploader/embeddedDvWebloader.html index 01f8d1c1a..28aa47c36 100644 --- a/src/standalone-uploader/embeddedDvWebloader.html +++ b/src/standalone-uploader/embeddedDvWebloader.html @@ -14,14 +14,35 @@ }; +
From 9d42cdc2f413397a5df93a385a1f465579ce48f6 Mon Sep 17 00:00:00 2001 From: Eryk Kullikowski Date: Mon, 8 Dec 2025 12:22:36 +0100 Subject: [PATCH 6/8] feat: add folder selection support in file uploader --- public/locales/en/shared.json | 1 + .../file-upload-input/FileUploadInput.tsx | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/public/locales/en/shared.json b/public/locales/en/shared.json index d4dbc4e26..6f63dc582 100644 --- a/public/locales/en/shared.json +++ b/public/locales/en/shared.json @@ -218,6 +218,7 @@ "accordionTitle": "Upload with HTTP via your browser", "selectFileSingle": "Select file to add", "selectFileMultiple": "Select files to add", + "selectFolder": "Select folder to add", "dragDropSingle": "Drag and drop file here.", "dragDropMultiple": "Drag and drop files and/or directories here.", "cancelUpload": "Cancel upload", diff --git a/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx b/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx index 724c9fc00..220dbe523 100644 --- a/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx +++ b/src/sections/shared/file-uploader/file-upload-input/FileUploadInput.tsx @@ -38,6 +38,7 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu const { t } = useTranslation('shared') const inputRef = useRef(null) + const folderInputRef = useRef(null) const [isDragging, setIsDragging] = useState(false) @@ -115,6 +116,20 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu } } + const handleFolderInputChange: ChangeEventHandler = (event) => { + const filesArray = Array.from(event.target.files || []) + + if (filesArray && filesArray.length > 0) { + for (const file of filesArray) { + void uploadOneFile(file) + } + } + + if (folderInputRef.current) { + folderInputRef.current.value = '' + } + } + const handleDropFiles: DragEventHandler = (event) => { event.preventDefault() event.stopPropagation() @@ -195,6 +210,15 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu ? t('fileUploader.selectFileMultiple') : t('fileUploader.selectFileSingle')} + {operationType === OperationType.ADD_FILES_TO_DATASET && ( + + )}