diff --git a/src/services/upload/fileService.ts b/src/services/upload/fileService.ts index 939e769e6..bdb793e38 100644 --- a/src/services/upload/fileService.ts +++ b/src/services/upload/fileService.ts @@ -68,6 +68,7 @@ export async function readFile( } export async function extractFileMetadata( + worker, parsedMetadataJSONMap: ParsedMetadataJSONMap, rawFile: File | ElectronFile, collectionID: number, @@ -79,6 +80,7 @@ export async function extractFileMetadata( getMetadataJSONMapKey(collectionID, originalName) ) ?? {}; const extractedMetadata: Metadata = await extractMetadata( + worker, rawFile, fileTypeInfo ); diff --git a/src/services/upload/hashService.tsx b/src/services/upload/hashService.tsx index fb8dfb0a5..ed5ba6c5a 100644 --- a/src/services/upload/hashService.tsx +++ b/src/services/upload/hashService.tsx @@ -1,11 +1,13 @@ import { FILE_READER_CHUNK_SIZE } from 'constants/upload'; import { getFileStream, getElectronFileStream } from 'services/readerService'; import { ElectronFile, DataStream } from 'types/upload'; -import CryptoWorker from 'utils/crypto'; +import { CustomError } from 'utils/error'; +import { addLogLine, getFileNameSize } from 'utils/logging'; import { logError } from 'utils/sentry'; -export async function getFileHash(file: File | ElectronFile) { +export async function getFileHash(worker, file: File | ElectronFile) { try { + addLogLine(`getFileHash called for ${getFileNameSize(file)}`); let filedata: DataStream; if (file instanceof File) { filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); @@ -15,22 +17,29 @@ export async function getFileHash(file: File | ElectronFile) { FILE_READER_CHUNK_SIZE ); } - const cryptoWorker = await new CryptoWorker(); - const hashState = await cryptoWorker.initChunkHashing(); + const hashState = await worker.initChunkHashing(); - const reader = filedata.stream.getReader(); - // eslint-disable-next-line no-constant-condition - while (true) { - const { done, value: chunk } = await reader.read(); + const streamReader = filedata.stream.getReader(); + for (let i = 0; i < filedata.chunkCount; i++) { + const { done, value: chunk } = await streamReader.read(); if (done) { break; } - await cryptoWorker.hashFileChunk(hashState, Uint8Array.from(chunk)); + await worker.hashFileChunk(hashState, Uint8Array.from(chunk)); } - const hash = await cryptoWorker.completeChunkHashing(hashState); + const { done } = await streamReader.read(); + if (!done) { + throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); + } + const hash = await worker.completeChunkHashing(hashState); + addLogLine( + `file hashing completed successfully ${getFileNameSize(file)}` + ); return hash; } catch (e) { logError(e, 'getFileHash failed'); - throw e; + addLogLine( + `file hashing failed ${getFileNameSize(file)} ,${e.message} ` + ); } } diff --git a/src/services/upload/metadataService.ts b/src/services/upload/metadataService.ts index a6cf979ae..a49a383db 100644 --- a/src/services/upload/metadataService.ts +++ b/src/services/upload/metadataService.ts @@ -30,6 +30,7 @@ const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = { }; export async function extractMetadata( + worker, receivedFile: File | ElectronFile, fileTypeInfo: FileTypeInfo ) { @@ -39,7 +40,7 @@ export async function extractMetadata( } else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) { extractedMetadata = await getVideoMetadata(receivedFile); } - const fileHash = await getFileHash(receivedFile); + const fileHash = await getFileHash(worker, receivedFile); const metadata: Metadata = { title: receivedFile.name, diff --git a/src/services/upload/uploadManager.ts b/src/services/upload/uploadManager.ts index b5768b100..c17e86395 100644 --- a/src/services/upload/uploadManager.ts +++ b/src/services/upload/uploadManager.ts @@ -107,6 +107,9 @@ class UploadManager { throw Error("can't run multiple uploads at once"); } this.uploadInProgress = true; + for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) { + this.cryptoWorkers[i] = getDedicatedCryptoWorker(); + } await this.updateExistingFilesAndCollections(collections); addLogLine( `received ${filesWithCollectionToUploadIn.length} files to upload` @@ -135,7 +138,8 @@ class UploadManager { } if (mediaFiles.length) { UIService.setUploadStage(UPLOAD_STAGES.EXTRACTING_METADATA); - await this.extractMetadataFromFiles(mediaFiles); + const worker = await new this.cryptoWorkers[0].comlink(); + await this.extractMetadataFromFiles(worker, mediaFiles); UploadService.setMetadataAndFileTypeInfoMap( this.metadataAndFileTypeInfoMap @@ -273,7 +277,10 @@ class UploadManager { } } - private async extractMetadataFromFiles(mediaFiles: FileWithCollection[]) { + private async extractMetadataFromFiles( + worker, + mediaFiles: FileWithCollection[] + ) { try { addLogLine(`extractMetadataFromFiles executed`); UIService.reset(mediaFiles.length); @@ -290,6 +297,7 @@ class UploadManager { `metadata extraction started ${getFileNameSize(file)} ` ); const result = await this.extractFileTypeAndMetadata( + worker, file, collectionID ); @@ -331,6 +339,7 @@ class UploadManager { } private async extractFileTypeAndMetadata( + worker, file: File | ElectronFile, collectionID: number ) { @@ -354,6 +363,7 @@ class UploadManager { let metadata: Metadata; try { metadata = await UploadService.extractFileMetadata( + worker, file, collectionID, fileTypeInfo @@ -386,16 +396,8 @@ class UploadManager { i < MAX_CONCURRENT_UPLOADS && this.filesToBeUploaded.length > 0; i++ ) { - const cryptoWorker = getDedicatedCryptoWorker(); - if (!cryptoWorker) { - throw Error(CustomError.FAILED_TO_LOAD_WEB_WORKER); - } - this.cryptoWorkers[i] = cryptoWorker; - uploadProcesses.push( - this.uploadNextFileInQueue( - await new this.cryptoWorkers[i].comlink() - ) - ); + const worker = await new this.cryptoWorkers[i].comlink(); + uploadProcesses.push(this.uploadNextFileInQueue(worker)); } await Promise.all(uploadProcesses); } diff --git a/src/services/upload/uploadService.ts b/src/services/upload/uploadService.ts index 429dfde98..22439f394 100644 --- a/src/services/upload/uploadService.ts +++ b/src/services/upload/uploadService.ts @@ -92,11 +92,13 @@ class UploadService { } async extractFileMetadata( + worker, file: File | ElectronFile, collectionID: number, fileTypeInfo: FileTypeInfo ): Promise { return extractFileMetadata( + worker, this.parsedMetadataJSONMap, file, collectionID,