Merge pull request #816 from ente-io/add-better-check-file-hashing

prevent possible infinite loop during file hash generation
This commit is contained in:
Abhinav Kumar 2022-12-13 12:55:02 +05:30 committed by GitHub
commit 08b9f35306
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 40 additions and 24 deletions

View file

@ -68,6 +68,7 @@ export async function readFile(
}
export async function extractFileMetadata(
worker,
parsedMetadataJSONMap: ParsedMetadataJSONMap,
rawFile: File | ElectronFile,
collectionID: number,
@ -79,6 +80,7 @@ export async function extractFileMetadata(
getMetadataJSONMapKey(collectionID, originalName)
) ?? {};
const extractedMetadata: Metadata = await extractMetadata(
worker,
rawFile,
fileTypeInfo
);

View file

@ -1,11 +1,13 @@
import { FILE_READER_CHUNK_SIZE } from 'constants/upload';
import { getFileStream, getElectronFileStream } from 'services/readerService';
import { ElectronFile, DataStream } from 'types/upload';
import CryptoWorker from 'utils/crypto';
import { CustomError } from 'utils/error';
import { addLogLine, getFileNameSize } from 'utils/logging';
import { logError } from 'utils/sentry';
export async function getFileHash(file: File | ElectronFile) {
export async function getFileHash(worker, file: File | ElectronFile) {
try {
addLogLine(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
@ -15,22 +17,29 @@ export async function getFileHash(file: File | ElectronFile) {
FILE_READER_CHUNK_SIZE
);
}
const cryptoWorker = await new CryptoWorker();
const hashState = await cryptoWorker.initChunkHashing();
const hashState = await worker.initChunkHashing();
const reader = filedata.stream.getReader();
// eslint-disable-next-line no-constant-condition
while (true) {
const { done, value: chunk } = await reader.read();
const streamReader = filedata.stream.getReader();
for (let i = 0; i < filedata.chunkCount; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
break;
}
await cryptoWorker.hashFileChunk(hashState, Uint8Array.from(chunk));
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
}
const hash = await cryptoWorker.completeChunkHashing(hashState);
const { done } = await streamReader.read();
if (!done) {
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
const hash = await worker.completeChunkHashing(hashState);
addLogLine(
`file hashing completed successfully ${getFileNameSize(file)}`
);
return hash;
} catch (e) {
logError(e, 'getFileHash failed');
throw e;
addLogLine(
`file hashing failed ${getFileNameSize(file)} ,${e.message} `
);
}
}

View file

@ -30,6 +30,7 @@ const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
};
export async function extractMetadata(
worker,
receivedFile: File | ElectronFile,
fileTypeInfo: FileTypeInfo
) {
@ -39,7 +40,7 @@ export async function extractMetadata(
} else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
extractedMetadata = await getVideoMetadata(receivedFile);
}
const fileHash = await getFileHash(receivedFile);
const fileHash = await getFileHash(worker, receivedFile);
const metadata: Metadata = {
title: receivedFile.name,

View file

@ -107,6 +107,9 @@ class UploadManager {
throw Error("can't run multiple uploads at once");
}
this.uploadInProgress = true;
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
this.cryptoWorkers[i] = getDedicatedCryptoWorker();
}
await this.updateExistingFilesAndCollections(collections);
addLogLine(
`received ${filesWithCollectionToUploadIn.length} files to upload`
@ -135,7 +138,8 @@ class UploadManager {
}
if (mediaFiles.length) {
UIService.setUploadStage(UPLOAD_STAGES.EXTRACTING_METADATA);
await this.extractMetadataFromFiles(mediaFiles);
const worker = await new this.cryptoWorkers[0].comlink();
await this.extractMetadataFromFiles(worker, mediaFiles);
UploadService.setMetadataAndFileTypeInfoMap(
this.metadataAndFileTypeInfoMap
@ -273,7 +277,10 @@ class UploadManager {
}
}
private async extractMetadataFromFiles(mediaFiles: FileWithCollection[]) {
private async extractMetadataFromFiles(
worker,
mediaFiles: FileWithCollection[]
) {
try {
addLogLine(`extractMetadataFromFiles executed`);
UIService.reset(mediaFiles.length);
@ -290,6 +297,7 @@ class UploadManager {
`metadata extraction started ${getFileNameSize(file)} `
);
const result = await this.extractFileTypeAndMetadata(
worker,
file,
collectionID
);
@ -331,6 +339,7 @@ class UploadManager {
}
private async extractFileTypeAndMetadata(
worker,
file: File | ElectronFile,
collectionID: number
) {
@ -354,6 +363,7 @@ class UploadManager {
let metadata: Metadata;
try {
metadata = await UploadService.extractFileMetadata(
worker,
file,
collectionID,
fileTypeInfo
@ -386,16 +396,8 @@ class UploadManager {
i < MAX_CONCURRENT_UPLOADS && this.filesToBeUploaded.length > 0;
i++
) {
const cryptoWorker = getDedicatedCryptoWorker();
if (!cryptoWorker) {
throw Error(CustomError.FAILED_TO_LOAD_WEB_WORKER);
}
this.cryptoWorkers[i] = cryptoWorker;
uploadProcesses.push(
this.uploadNextFileInQueue(
await new this.cryptoWorkers[i].comlink()
)
);
const worker = await new this.cryptoWorkers[i].comlink();
uploadProcesses.push(this.uploadNextFileInQueue(worker));
}
await Promise.all(uploadProcesses);
}

View file

@ -92,11 +92,13 @@ class UploadService {
}
async extractFileMetadata(
worker,
file: File | ElectronFile,
collectionID: number,
fileTypeInfo: FileTypeInfo
): Promise<Metadata> {
return extractFileMetadata(
worker,
this.parsedMetadataJSONMap,
file,
collectionID,