persist file paths for uploaded files

This commit is contained in:
Rushikesh Tote 2022-06-21 14:02:20 +05:30
parent 380768c4c7
commit 59e8112bd2
6 changed files with 128 additions and 36 deletions

View file

@ -19,7 +19,7 @@ import UIService from './uiService';
import UploadService from './uploadService';
import { CustomError } from 'utils/error';
import { Collection } from 'types/collection';
import { EnteFile } from 'types/file';
import { EnteFile, FileMagicMetadata } from 'types/file';
import {
FileWithCollection,
MetadataAndFileTypeInfo,
@ -40,6 +40,7 @@ import isElectron from 'is-electron';
import ImportService from 'services/importService';
import watchFolderService from 'services/watchFolder/watchFolderService';
import { ProgressUpdater } from 'types/upload/ui';
import { NEW_FILE_MAGIC_METADATA } from 'types/magicMetadata';
const MAX_CONCURRENT_UPLOADS = 4;
const FILE_UPLOAD_COMPLETED = 100;
@ -229,7 +230,8 @@ class UploadManager {
UIService.reset(mediaFiles.length);
for (const { file, localID, collectionID } of mediaFiles) {
try {
const { fileTypeInfo, metadata } = await (async () => {
const { fileTypeInfo, metadata, magicMetadata } =
await (async () => {
if (file.size >= MAX_FILE_SIZE_SUPPORTED) {
logUploadInfo(
`${getFileNameSize(
@ -239,9 +241,8 @@ class UploadManager {
return { fileTypeInfo: null, metadata: null };
}
const fileTypeInfo = await UploadService.getFileType(
file
);
const fileTypeInfo =
await UploadService.getFileType(file);
if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) {
logUploadInfo(
`${getFileNameSize(
@ -259,7 +260,13 @@ class UploadManager {
collectionID,
fileTypeInfo
)) || null;
return { fileTypeInfo, metadata };
const magicMetadata = {
...NEW_FILE_MAGIC_METADATA,
data: {
filePaths: [(file as any).path as string],
},
} as FileMagicMetadata;
return { fileTypeInfo, metadata, magicMetadata };
})();
logUploadInfo(
@ -270,6 +277,7 @@ class UploadManager {
this.metadataAndFileTypeInfoMap.set(localID, {
fileTypeInfo: fileTypeInfo && { ...fileTypeInfo },
metadata: metadata && { ...metadata },
magicMetadata: magicMetadata && { ...magicMetadata },
});
UIService.increaseFileUploaded();
} catch (e) {

View file

@ -1,9 +1,10 @@
import { EnteFile } from 'types/file';
import { EnteFile, FileMagicMetadata } from 'types/file';
import { handleUploadError, CustomError } from 'utils/error';
import { logError } from 'utils/sentry';
import {
findSameFileInCollection,
findSameFileInOtherCollection,
getMergedMagicMetadataFilePaths,
shouldDedupeAcrossCollection,
} from 'utils/upload';
import UploadHttpClient from './uploadHttpClient';
@ -16,12 +17,36 @@ import { logUploadInfo } from 'utils/upload';
import { convertBytesToHumanReadable } from 'utils/billing';
import { sleep } from 'utils/common';
import { addToCollection } from 'services/collectionService';
import { updateMagicMetadataProps } from 'utils/magicMetadata';
import { updateFileMagicMetadata } from 'services/fileService';
import { NEW_FILE_MAGIC_METADATA } from 'types/magicMetadata';
import { getFileKey } from 'utils/file';
interface UploadResponse {
fileUploadResult: UPLOAD_RESULT;
uploadedFile?: EnteFile;
skipDecryption?: boolean;
}
const updateMagicMetadata = async (
file: EnteFile,
magicMetadata: FileMagicMetadata,
collectionKey: string
) => {
magicMetadata.data.filePaths = getMergedMagicMetadataFilePaths(
file.magicMetadata,
magicMetadata
);
file.key = await getFileKey(file, collectionKey);
const updatedMagicMetadata = await updateMagicMetadataProps(
file.magicMetadata ?? NEW_FILE_MAGIC_METADATA,
file.key,
{ filePaths: magicMetadata.data.filePaths }
);
file.magicMetadata = updatedMagicMetadata;
await updateFileMagicMetadata([file]);
};
export default async function uploader(
worker: any,
existingFilesInCollection: EnteFile[],
@ -36,7 +61,7 @@ export default async function uploader(
logUploadInfo(`uploader called for ${fileNameSize}`);
UIService.setFileProgress(localID, 0);
await sleep(0);
const { fileTypeInfo, metadata } =
const { fileTypeInfo, metadata, magicMetadata } =
UploadService.getFileMetadataAndFileTypeInfo(localID);
try {
const fileSize = UploadService.getAssetSize(uploadAsset);
@ -56,6 +81,11 @@ export default async function uploader(
);
if (sameFileInSameCollection) {
logUploadInfo(`skipped upload for ${fileNameSize}`);
await updateMagicMetadata(
sameFileInSameCollection,
magicMetadata,
fileWithCollection.collection.key
);
return {
fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
uploadedFile: sameFileInSameCollection,
@ -74,6 +104,11 @@ export default async function uploader(
const resultFile = Object.assign({}, sameFileInOtherCollection);
resultFile.collectionID = collection.id;
await addToCollection(collection, [resultFile]);
await updateMagicMetadata(
resultFile,
magicMetadata,
fileWithCollection.collection.key
);
return {
fileUploadResult: UPLOAD_RESULT.UPLOADED,
uploadedFile: resultFile,
@ -91,6 +126,11 @@ export default async function uploader(
metadata
);
if (sameFileInOtherCollection) {
await updateMagicMetadata(
sameFileInOtherCollection,
magicMetadata,
fileWithCollection.collection.key
);
return {
fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
uploadedFile: sameFileInOtherCollection,
@ -136,6 +176,11 @@ export default async function uploader(
UIService.increaseFileUploaded();
logUploadInfo(`${fileNameSize} successfully uploaded`);
await updateMagicMetadata(
uploadedFile,
magicMetadata,
fileWithCollection.collection.key
);
return {
fileUploadResult: metadata.hasStaticThumbnail
? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL

View file

@ -9,6 +9,7 @@ export interface fileAttribute {
export interface FileMagicMetadataProps {
visibility?: VISIBILITY_STATE;
filePaths?: string[];
}
export interface FileMagicMetadata extends Omit<MagicMetadataCore, 'data'> {

View file

@ -1,6 +1,6 @@
import { FILE_TYPE } from 'constants/file';
import { Collection } from 'types/collection';
import { fileAttribute } from 'types/file';
import { fileAttribute, FileMagicMetadata } from 'types/file';
export interface DataStream {
stream: ReadableStream<Uint8Array>;
@ -92,6 +92,7 @@ export interface FileWithCollection extends UploadAsset {
export interface MetadataAndFileTypeInfo {
metadata: Metadata;
fileTypeInfo: FileTypeInfo;
magicMetadata: FileMagicMetadata;
}
export type MetadataAndFileTypeInfoMap = Map<number, MetadataAndFileTypeInfo>;

View file

@ -274,14 +274,20 @@ export async function decryptFile(file: EnteFile, collectionKey: string) {
encryptedMetadata.decryptionHeader,
file.key
);
if (file.magicMetadata?.data) {
if (
file.magicMetadata?.data &&
typeof file.magicMetadata.data === 'string'
) {
file.magicMetadata.data = await worker.decryptMetadata(
file.magicMetadata.data,
file.magicMetadata.header,
file.key
);
}
if (file.pubMagicMetadata?.data) {
if (
file.pubMagicMetadata?.data &&
typeof file.pubMagicMetadata.data === 'string'
) {
file.pubMagicMetadata.data = await worker.decryptMetadata(
file.pubMagicMetadata.data,
file.pubMagicMetadata.header,
@ -295,6 +301,21 @@ export async function decryptFile(file: EnteFile, collectionKey: string) {
}
}
export async function getFileKey(file: EnteFile, collectionKey: string) {
try {
const worker = await new CryptoWorker();
file.key = await worker.decryptB64(
file.encryptedKey,
file.keyDecryptionNonce,
collectionKey
);
return file.key;
} catch (e) {
logError(e, 'get file key failed');
throw e;
}
}
export const preservePhotoswipeProps =
(newFiles: EnteFile[]) =>
(currentFiles: EnteFile[]): EnteFile[] => {

View file

@ -1,5 +1,5 @@
import { ElectronFile, FileWithCollection, Metadata } from 'types/upload';
import { EnteFile } from 'types/file';
import { EnteFile, FileMagicMetadata } from 'types/file';
import { convertBytesToHumanReadable } from 'utils/billing';
import { formatDateTime } from 'utils/file';
import { getLogs, saveLogLine } from 'utils/storage';
@ -141,3 +141,19 @@ export function areFileWithCollectionsSame(
): boolean {
return firstFile.localID === secondFile.localID;
}
export function getMergedMagicMetadataFilePaths(
oldMetadata: FileMagicMetadata,
newMetadata: FileMagicMetadata
): string[] {
if (!oldMetadata || !oldMetadata.data.filePaths) {
return newMetadata.data.filePaths;
}
const mergedMetadataFilePaths = [...oldMetadata.data.filePaths];
newMetadata.data.filePaths.forEach((newMetadataFilePath) => {
if (!mergedMetadataFilePaths.includes(newMetadataFilePath)) {
mergedMetadataFilePaths.push(newMetadataFilePath);
}
});
return mergedMetadataFilePaths;
}