refactor and create typeDetectionService and readerService

This commit is contained in:
Abhinav 2022-03-07 17:25:07 +05:30
parent 326370ab00
commit 47737d2dea
10 changed files with 163 additions and 168 deletions

View file

@ -8,7 +8,7 @@ import HTTPService from 'services/HTTPService';
import CryptoWorker from 'utils/crypto';
import uploadHttpClient from 'services/upload/uploadHttpClient';
import { SetProgressTracker } from 'components/FixLargeThumbnail';
import { getFileType } from './upload/readFileService';
import { getFileType } from 'services/typeDetectionService';
import { getLocalTrash, getTrashedFiles } from './trashService';
import { EncryptionResult, UploadURL } from 'types/upload';
import { fileAttribute } from 'types/file';

View file

@ -0,0 +1,57 @@
export async function getUint8ArrayView(
reader: FileReader,
file: Blob
): Promise<Uint8Array> {
return await new Promise((resolve, reject) => {
reader.onabort = () => reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => {
// Do whatever you want with the file contents
const result =
typeof reader.result === 'string'
? new TextEncoder().encode(reader.result)
: new Uint8Array(reader.result);
resolve(result);
};
reader.readAsArrayBuffer(file);
});
}
export function getFileStream(
reader: FileReader,
file: File,
chunkSize: number
) {
const fileChunkReader = fileChunkReaderMaker(reader, file, chunkSize);
const stream = new ReadableStream<Uint8Array>({
async pull(controller: ReadableStreamDefaultController) {
const chunk = await fileChunkReader.next();
if (chunk.done) {
controller.close();
} else {
controller.enqueue(chunk.value);
}
},
});
const chunkCount = Math.ceil(file.size / chunkSize);
return {
stream,
chunkCount,
};
}
async function* fileChunkReaderMaker(
reader: FileReader,
file: File,
chunkSize: number
) {
let offset = 0;
while (offset < file.size) {
const blob = file.slice(offset, chunkSize + offset);
const fileChunk = await getUint8ArrayView(reader, blob);
yield fileChunk;
offset += chunkSize;
}
return null;
}

View file

@ -0,0 +1,63 @@
import { FILE_TYPE } from 'constants/file';
import { FORMAT_MISSED_BY_FILE_TYPE_LIB } from 'constants/upload';
import { FileTypeInfo } from 'types/upload';
import { CustomError } from 'utils/error';
import { getFileExtension } from 'utils/file';
import { logError } from 'utils/sentry';
import { getUint8ArrayView } from './readerService';
import FileType from 'file-type/browser';
const TYPE_VIDEO = 'video';
const TYPE_IMAGE = 'image';
const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
export async function getFileType(
reader: FileReader,
receivedFile: File
): Promise<FileTypeInfo> {
try {
let fileType: FILE_TYPE;
const typeResult = await extractFileType(reader, receivedFile);
const mimTypeParts = typeResult.mime?.split('/');
if (mimTypeParts?.length !== 2) {
throw Error(CustomError.TYPE_DETECTION_FAILED);
}
switch (mimTypeParts[0]) {
case TYPE_IMAGE:
fileType = FILE_TYPE.IMAGE;
break;
case TYPE_VIDEO:
fileType = FILE_TYPE.VIDEO;
break;
default:
fileType = FILE_TYPE.OTHERS;
}
return { fileType, exactType: typeResult.ext };
} catch (e) {
const fileFormat = getFileExtension(receivedFile.name);
const formatMissedByTypeDetection = FORMAT_MISSED_BY_FILE_TYPE_LIB.find(
(a) => a.exactType === fileFormat
);
if (formatMissedByTypeDetection) {
return formatMissedByTypeDetection;
}
logError(e, CustomError.TYPE_DETECTION_FAILED, {
fileFormat,
});
return { fileType: FILE_TYPE.OTHERS, exactType: fileFormat };
}
}
async function extractFileType(reader: FileReader, file: File) {
const fileChunkBlob = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
return getFileTypeFromBlob(reader, fileChunkBlob);
}
export async function getFileTypeFromBlob(reader: FileReader, fileBlob: Blob) {
try {
const initialFiledata = await getUint8ArrayView(reader, fileBlob);
return await FileType.fromBuffer(initialFiledata);
} catch (e) {
throw Error(CustomError.TYPE_DETECTION_FAILED);
}
}

View file

@ -11,7 +11,7 @@ import { updatePublicMagicMetadata } from './fileService';
import { EnteFile } from 'types/file';
import { getRawExif } from './upload/exifService';
import { getFileType } from './upload/readFileService';
import { getFileType } from 'services/typeDetectionService';
import { FILE_TYPE } from 'constants/file';
import { getUnixTimeInMicroSeconds } from 'utils/time';

View file

@ -1,3 +1,4 @@
import { MULTIPART_PART_SIZE, FILE_READER_CHUNK_SIZE } from 'constants/upload';
import {
FileTypeInfo,
FileInMemory,
@ -7,14 +8,18 @@ import {
EncryptionResult,
FileWithMetadata,
ParsedMetadataJSONMap,
DataStream,
} from 'types/upload';
import { splitFilenameAndExtension } from 'utils/file';
import { logError } from 'utils/sentry';
import { getFileNameSize, logUploadInfo } from 'utils/upload';
import { encryptFiledata } from './encryptionService';
import { extractMetadata, getMetadataJSONMapKey } from './metadataService';
import { getFileData, getFileOriginalName } from './readFileService';
import { getFileStream, getUint8ArrayView } from '../readerService';
import { generateThumbnail } from './thumbnailService';
const EDITED_FILE_SUFFIX = '-edited';
export function getFileSize(file: File) {
return file.size;
}
@ -34,8 +39,12 @@ export async function readFile(
fileTypeInfo
);
logUploadInfo(`reading file datal${getFileNameSize(rawFile)} `);
const filedata = await getFileData(reader, rawFile);
let filedata: Uint8Array | DataStream;
if (rawFile.size > MULTIPART_PART_SIZE) {
filedata = getFileStream(reader, rawFile, FILE_READER_CHUNK_SIZE);
} else {
filedata = await getUint8ArrayView(reader, rawFile);
}
logUploadInfo(`read file data successfully ${getFileNameSize(rawFile)} `);
@ -107,3 +116,28 @@ export async function encryptFile(
throw e;
}
}
/*
Get the original file name for edited file to associate it to original file's metadataJSON file
as edited file doesn't have their own metadata file
*/
function getFileOriginalName(file: File) {
let originalName: string = null;
const [nameWithoutExtension, extension] = splitFilenameAndExtension(
file.name
);
const isEditedFile = nameWithoutExtension.endsWith(EDITED_FILE_SUFFIX);
if (isEditedFile) {
originalName = nameWithoutExtension.slice(
0,
-1 * EDITED_FILE_SUFFIX.length
);
} else {
originalName = nameWithoutExtension;
}
if (extension) {
originalName += '.' + extension;
}
return originalName;
}

View file

@ -10,7 +10,7 @@ import {
import { CustomError } from 'utils/error';
import { isImageOrVideo, splitFilenameAndExtension } from 'utils/file';
import { logError } from 'utils/sentry';
import { getUint8ArrayView } from './readFileService';
import { getUint8ArrayView } from '../readerService';
import { generateThumbnail } from './thumbnailService';
import uploadService from './uploadService';
import UploadService from './uploadService';

View file

@ -1,159 +0,0 @@
import { FILE_TYPE } from 'constants/file';
import { logError } from 'utils/sentry';
import {
FILE_READER_CHUNK_SIZE,
FORMAT_MISSED_BY_FILE_TYPE_LIB,
MULTIPART_PART_SIZE,
} from 'constants/upload';
import FileType from 'file-type/browser';
import { CustomError } from 'utils/error';
import { getFileExtension, splitFilenameAndExtension } from 'utils/file';
import { FileTypeInfo } from 'types/upload';
const TYPE_VIDEO = 'video';
const TYPE_IMAGE = 'image';
const EDITED_FILE_SUFFIX = '-edited';
const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
export async function getFileData(reader: FileReader, file: File) {
if (file.size > MULTIPART_PART_SIZE) {
return getFileStream(reader, file, FILE_READER_CHUNK_SIZE);
} else {
return await getUint8ArrayView(reader, file);
}
}
export async function getFileType(
reader: FileReader,
receivedFile: File
): Promise<FileTypeInfo> {
try {
let fileType: FILE_TYPE;
const typeResult = await extractFileType(reader, receivedFile);
const mimTypeParts = typeResult.mime?.split('/');
if (mimTypeParts?.length !== 2) {
throw Error(CustomError.TYPE_DETECTION_FAILED);
}
switch (mimTypeParts[0]) {
case TYPE_IMAGE:
fileType = FILE_TYPE.IMAGE;
break;
case TYPE_VIDEO:
fileType = FILE_TYPE.VIDEO;
break;
default:
fileType = FILE_TYPE.OTHERS;
}
return { fileType, exactType: typeResult.ext };
} catch (e) {
const fileFormat = getFileExtension(receivedFile.name);
const formatMissedByTypeDetection = FORMAT_MISSED_BY_FILE_TYPE_LIB.find(
(a) => a.exactType === fileFormat
);
if (formatMissedByTypeDetection) {
return formatMissedByTypeDetection;
}
logError(e, CustomError.TYPE_DETECTION_FAILED, {
fileFormat,
});
return { fileType: FILE_TYPE.OTHERS, exactType: fileFormat };
}
}
/*
Get the original file name for edited file to associate it to original file's metadataJSON file
as edited file doesn't have their own metadata file
*/
export function getFileOriginalName(file: File) {
let originalName: string = null;
const [nameWithoutExtension, extension] = splitFilenameAndExtension(
file.name
);
const isEditedFile = nameWithoutExtension.endsWith(EDITED_FILE_SUFFIX);
if (isEditedFile) {
originalName = nameWithoutExtension.slice(
0,
-1 * EDITED_FILE_SUFFIX.length
);
} else {
originalName = nameWithoutExtension;
}
if (extension) {
originalName += '.' + extension;
}
return originalName;
}
async function extractFileType(reader: FileReader, file: File) {
const fileChunkBlob = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
return getFileTypeFromBlob(reader, fileChunkBlob);
}
export async function getFileTypeFromBlob(reader: FileReader, fileBlob: Blob) {
try {
const initialFiledata = await getUint8ArrayView(reader, fileBlob);
return await FileType.fromBuffer(initialFiledata);
} catch (e) {
throw Error(CustomError.TYPE_DETECTION_FAILED);
}
}
function getFileStream(reader: FileReader, file: File, chunkSize: number) {
const fileChunkReader = fileChunkReaderMaker(reader, file, chunkSize);
const stream = new ReadableStream<Uint8Array>({
async pull(controller: ReadableStreamDefaultController) {
const chunk = await fileChunkReader.next();
if (chunk.done) {
controller.close();
} else {
controller.enqueue(chunk.value);
}
},
});
const chunkCount = Math.ceil(file.size / chunkSize);
return {
stream,
chunkCount,
};
}
async function* fileChunkReaderMaker(
reader: FileReader,
file: File,
chunkSize: number
) {
let offset = 0;
while (offset < file.size) {
const blob = file.slice(offset, chunkSize + offset);
const fileChunk = await getUint8ArrayView(reader, blob);
yield fileChunk;
offset += chunkSize;
}
return null;
}
export async function getUint8ArrayView(
reader: FileReader,
file: Blob
): Promise<Uint8Array> {
try {
return await new Promise((resolve, reject) => {
reader.onabort = () => reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => {
// Do whatever you want with the file contents
const result =
typeof reader.result === 'string'
? new TextEncoder().encode(reader.result)
: new Uint8Array(reader.result);
resolve(result);
};
reader.readAsArrayBuffer(file);
});
} catch (e) {
logError(e, 'error reading file to byte-array');
throw e;
}
}

View file

@ -6,7 +6,7 @@ import FFmpegService from 'services/ffmpegService';
import { convertToHumanReadable } from 'utils/billing';
import { isFileHEIC } from 'utils/file';
import { FileTypeInfo } from 'types/upload';
import { getUint8ArrayView } from './readFileService';
import { getUint8ArrayView } from '../readerService';
import HEICConverter from 'services/HEICConverter';
import { getFileNameSize, logUploadInfo } from 'utils/upload';

View file

@ -2,7 +2,7 @@ import { Collection } from 'types/collection';
import { logError } from 'utils/sentry';
import UploadHttpClient from './uploadHttpClient';
import { extractFileMetadata, getFilename } from './fileService';
import { getFileType } from './readFileService';
import { getFileType } from '../typeDetectionService';
import { handleUploadError } from 'utils/error';
import {
B64EncryptionResult,

View file

@ -7,7 +7,7 @@ import {
PublicMagicMetadataProps,
} from 'types/file';
import { decodeMotionPhoto } from 'services/motionPhotoService';
import { getFileTypeFromBlob } from 'services/upload/readFileService';
import { getFileTypeFromBlob } from 'services/typeDetectionService';
import DownloadManager from 'services/downloadManager';
import { logError } from 'utils/sentry';
import { User } from 'types/user';