Merge pull request #281 from ente-io/reuse-file-readers

reuse file readers
This commit is contained in:
abhinavkgrd 2022-01-09 18:00:51 +05:30 committed by GitHub
commit 71a97caa03
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 73 additions and 57 deletions

View file

@ -62,6 +62,7 @@ class ExportService {
private stopExport: boolean = false; private stopExport: boolean = false;
private pauseExport: boolean = false; private pauseExport: boolean = false;
private allElectronAPIsExist: boolean = false; private allElectronAPIsExist: boolean = false;
private fileReader: FileReader = null;
constructor() { constructor() {
this.ElectronAPIs = runningInBrowser() && window['ElectronAPIs']; this.ElectronAPIs = runningInBrowser() && window['ElectronAPIs'];
@ -438,7 +439,11 @@ class ExportService {
(fileType === TYPE_JPEG || fileType === TYPE_JPG) (fileType === TYPE_JPEG || fileType === TYPE_JPG)
) { ) {
const fileBlob = await new Response(fileStream).blob(); const fileBlob = await new Response(fileStream).blob();
if (!this.fileReader) {
this.fileReader = new FileReader();
}
const updatedFileBlob = await updateFileCreationDateInEXIF( const updatedFileBlob = await updateFileCreationDateInEXIF(
this.fileReader,
fileBlob, fileBlob,
new Date(file.pubMagicMetadata.data.editedTime / 1000) new Date(file.pubMagicMetadata.data.editedTime / 1000)
); );

View file

@ -7,6 +7,7 @@ import { getUint8ArrayView } from './upload/readFileService';
class FFmpegService { class FFmpegService {
private ffmpeg: FFmpeg = null; private ffmpeg: FFmpeg = null;
private isLoading = null; private isLoading = null;
private fileReader: FileReader = null;
private generateThumbnailProcessor = new QueueProcessor<Uint8Array>(1); private generateThumbnailProcessor = new QueueProcessor<Uint8Array>(1);
async init() { async init() {
@ -29,11 +30,19 @@ class FFmpegService {
if (!this.ffmpeg) { if (!this.ffmpeg) {
await this.init(); await this.init();
} }
if (!this.fileReader) {
this.fileReader = new FileReader();
}
if (this.isLoading) { if (this.isLoading) {
await this.isLoading; await this.isLoading;
} }
const response = this.generateThumbnailProcessor.queueUpRequest( const response = this.generateThumbnailProcessor.queueUpRequest(
generateThumbnailHelper.bind(null, this.ffmpeg, file) generateThumbnailHelper.bind(
null,
this.ffmpeg,
this.fileReader,
file
)
); );
try { try {
return await response.promise; return await response.promise;
@ -49,14 +58,18 @@ class FFmpegService {
} }
} }
async function generateThumbnailHelper(ffmpeg: FFmpeg, file: File) { async function generateThumbnailHelper(
ffmpeg: FFmpeg,
reader: FileReader,
file: File
) {
try { try {
const inputFileName = `${Date.now().toString()}-${file.name}`; const inputFileName = `${Date.now().toString()}-${file.name}`;
const thumbFileName = `${Date.now().toString()}-thumb.jpeg`; const thumbFileName = `${Date.now().toString()}-thumb.jpeg`;
ffmpeg.FS( ffmpeg.FS(
'writeFile', 'writeFile',
inputFileName, inputFileName,
await getUint8ArrayView(new FileReader(), file) await getUint8ArrayView(reader, file)
); );
let seekTime = 1.0; let seekTime = 1.0;
let thumb = null; let thumb = null;

View file

@ -44,6 +44,7 @@ export async function replaceThumbnail(
try { try {
const token = getToken(); const token = getToken();
const worker = await new CryptoWorker(); const worker = await new CryptoWorker();
const reader = new FileReader();
const files = await getLocalFiles(); const files = await getLocalFiles();
const trash = await getLocalTrash(); const trash = await getLocalTrash();
const trashFiles = getTrashedFiles(trash); const trashFiles = getTrashedFiles(trash);
@ -76,9 +77,10 @@ export async function replaceThumbnail(
[originalThumbnail], [originalThumbnail],
file.metadata.title file.metadata.title
); );
const fileTypeInfo = await getFileType(worker, dummyImageFile); const fileTypeInfo = await getFileType(reader, dummyImageFile);
const { thumbnail: newThumbnail } = await generateThumbnail( const { thumbnail: newThumbnail } = await generateThumbnail(
worker, worker,
reader,
dummyImageFile, dummyImageFile,
fileTypeInfo fileTypeInfo
); );

View file

@ -1,6 +1,5 @@
import { FIX_OPTIONS } from 'components/FixCreationTime'; import { FIX_OPTIONS } from 'components/FixCreationTime';
import { SetProgressTracker } from 'components/FixLargeThumbnail'; import { SetProgressTracker } from 'components/FixLargeThumbnail';
import CryptoWorker from 'utils/crypto';
import { import {
changeFileCreationTime, changeFileCreationTime,
getFileFromURL, getFileFromURL,
@ -38,8 +37,8 @@ export async function updateCreationTimeWithExif(
} else { } else {
const fileURL = await downloadManager.getFile(file); const fileURL = await downloadManager.getFile(file);
const fileObject = await getFileFromURL(fileURL); const fileObject = await getFileFromURL(fileURL);
const worker = await new CryptoWorker(); const reader = new FileReader();
const fileTypeInfo = await getFileType(worker, fileObject); const fileTypeInfo = await getFileType(reader, fileObject);
const exifData = await getRawExif(fileObject, fileTypeInfo); const exifData = await getRawExif(fileObject, fileTypeInfo);
if (fixOption === FIX_OPTIONS.DATE_TIME_ORIGINAL) { if (fixOption === FIX_OPTIONS.DATE_TIME_ORIGINAL) {
correctCreationTime = getUNIXTime( correctCreationTime = getUNIXTime(

View file

@ -57,12 +57,13 @@ export async function getExifData(
} }
export async function updateFileCreationDateInEXIF( export async function updateFileCreationDateInEXIF(
reader: FileReader,
fileBlob: Blob, fileBlob: Blob,
updatedDate: Date updatedDate: Date
) { ) {
try { try {
const fileURL = URL.createObjectURL(fileBlob); const fileURL = URL.createObjectURL(fileBlob);
let imageDataURL = await convertImageToDataURL(fileURL); let imageDataURL = await convertImageToDataURL(reader, fileURL);
imageDataURL = imageDataURL =
'data:image/jpeg;base64' + 'data:image/jpeg;base64' +
imageDataURL.slice(imageDataURL.indexOf(',')); imageDataURL.slice(imageDataURL.indexOf(','));
@ -82,10 +83,9 @@ export async function updateFileCreationDateInEXIF(
} }
} }
export async function convertImageToDataURL(url: string) { export async function convertImageToDataURL(reader: FileReader, url: string) {
const blob = await fetch(url).then((r) => r.blob()); const blob = await fetch(url).then((r) => r.blob());
const dataUrl = await new Promise<string>((resolve) => { const dataUrl = await new Promise<string>((resolve) => {
const reader = new FileReader();
reader.onload = () => resolve(reader.result as string); reader.onload = () => resolve(reader.result as string);
reader.readAsDataURL(blob); reader.readAsDataURL(blob);
}); });

View file

@ -44,10 +44,12 @@ export async function extractMetadata(
export const getMetadataMapKey = (collectionID: number, title: string) => export const getMetadataMapKey = (collectionID: number, title: string) =>
`${collectionID}_${title}`; `${collectionID}_${title}`;
export async function parseMetadataJSON(receivedFile: File) { export async function parseMetadataJSON(
reader: FileReader,
receivedFile: File
) {
try { try {
const metadataJSON: object = await new Promise((resolve, reject) => { const metadataJSON: object = await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onabort = () => reject(Error('file reading was aborted')); reader.onabort = () => reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed')); reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => { reader.onload = () => {

View file

@ -15,21 +15,21 @@ const TYPE_IMAGE = 'image';
const EDITED_FILE_SUFFIX = '-edited'; const EDITED_FILE_SUFFIX = '-edited';
const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100; const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
export async function getFileData(worker, file: File) { export async function getFileData(reader: FileReader, file: File) {
if (file.size > MULTIPART_PART_SIZE) { if (file.size > MULTIPART_PART_SIZE) {
return getFileStream(worker, file, FILE_READER_CHUNK_SIZE); return getFileStream(reader, file, FILE_READER_CHUNK_SIZE);
} else { } else {
return await worker.getUint8ArrayView(file); return await getUint8ArrayView(reader, file);
} }
} }
export async function getFileType( export async function getFileType(
worker, reader: FileReader,
receivedFile: File receivedFile: File
): Promise<FileTypeInfo> { ): Promise<FileTypeInfo> {
try { try {
let fileType: FILE_TYPE; let fileType: FILE_TYPE;
const mimeType = await getMimeType(worker, receivedFile); const mimeType = await getMimeType(reader, receivedFile);
const typeParts = mimeType?.split('/'); const typeParts = mimeType?.split('/');
if (typeParts?.length !== 2) { if (typeParts?.length !== 2) {
throw Error(CustomError.TYPE_DETECTION_FAILED); throw Error(CustomError.TYPE_DETECTION_FAILED);
@ -85,14 +85,14 @@ export function getFileOriginalName(file: File) {
return originalName; return originalName;
} }
async function getMimeType(worker, file: File) { async function getMimeType(reader: FileReader, file: File) {
const fileChunkBlob = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION); const fileChunkBlob = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
return getMimeTypeFromBlob(worker, fileChunkBlob); return getMimeTypeFromBlob(reader, fileChunkBlob);
} }
export async function getMimeTypeFromBlob(worker, fileBlob: Blob) { export async function getMimeTypeFromBlob(reader: FileReader, fileBlob: Blob) {
try { try {
const initialFiledata = await worker.getUint8ArrayView(fileBlob); const initialFiledata = await getUint8ArrayView(reader, fileBlob);
const result = await FileType.fromBuffer(initialFiledata); const result = await FileType.fromBuffer(initialFiledata);
return result.mime; return result.mime;
} catch (e) { } catch (e) {
@ -100,8 +100,8 @@ export async function getMimeTypeFromBlob(worker, fileBlob: Blob) {
} }
} }
function getFileStream(worker, file: File, chunkSize: number) { function getFileStream(reader: FileReader, file: File, chunkSize: number) {
const fileChunkReader = fileChunkReaderMaker(worker, file, chunkSize); const fileChunkReader = fileChunkReaderMaker(reader, file, chunkSize);
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async pull(controller: ReadableStreamDefaultController) { async pull(controller: ReadableStreamDefaultController) {
@ -120,11 +120,15 @@ function getFileStream(worker, file: File, chunkSize: number) {
}; };
} }
async function* fileChunkReaderMaker(worker, file: File, chunkSize: number) { async function* fileChunkReaderMaker(
reader: FileReader,
file: File,
chunkSize: number
) {
let offset = 0; let offset = 0;
while (offset < file.size) { while (offset < file.size) {
const blob = file.slice(offset, chunkSize + offset); const blob = file.slice(offset, chunkSize + offset);
const fileChunk = await worker.getUint8ArrayView(blob); const fileChunk = await getUint8ArrayView(reader, blob);
yield fileChunk; yield fileChunk;
offset += chunkSize; offset += chunkSize;
} }

View file

@ -6,6 +6,7 @@ import FFmpegService from 'services/ffmpegService';
import { convertToHumanReadable } from 'utils/billing'; import { convertToHumanReadable } from 'utils/billing';
import { isFileHEIC } from 'utils/file'; import { isFileHEIC } from 'utils/file';
import { FileTypeInfo } from 'types/upload'; import { FileTypeInfo } from 'types/upload';
import { getUint8ArrayView } from './readFileService';
const MAX_THUMBNAIL_DIMENSION = 720; const MAX_THUMBNAIL_DIMENSION = 720;
const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10; const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
@ -22,6 +23,7 @@ interface Dimension {
export async function generateThumbnail( export async function generateThumbnail(
worker, worker,
reader: FileReader,
file: File, file: File,
fileTypeInfo: FileTypeInfo fileTypeInfo: FileTypeInfo
): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> { ): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
@ -50,7 +52,7 @@ export async function generateThumbnail(
} }
} }
const thumbnailBlob = await thumbnailCanvasToBlob(canvas); const thumbnailBlob = await thumbnailCanvasToBlob(canvas);
thumbnail = await worker.getUint8ArrayView(thumbnailBlob); thumbnail = await getUint8ArrayView(reader, thumbnailBlob);
if (thumbnail.length === 0) { if (thumbnail.length === 0) {
throw Error('EMPTY THUMBNAIL'); throw Error('EMPTY THUMBNAIL');
} }

View file

@ -92,9 +92,10 @@ class UploadManager {
private async seedMetadataMap(metadataFiles: FileWithCollection[]) { private async seedMetadataMap(metadataFiles: FileWithCollection[]) {
try { try {
UIService.reset(metadataFiles.length); UIService.reset(metadataFiles.length);
const reader = new FileReader();
for (const fileWithCollection of metadataFiles) { for (const fileWithCollection of metadataFiles) {
const parsedMetaDataJSONWithTitle = await parseMetadataJSON( const parsedMetaDataJSONWithTitle = await parseMetadataJSON(
reader,
fileWithCollection.file fileWithCollection.file
); );
if (parsedMetaDataJSONWithTitle) { if (parsedMetaDataJSONWithTitle) {
@ -137,14 +138,15 @@ class UploadManager {
this.cryptoWorkers[i] = cryptoWorker; this.cryptoWorkers[i] = cryptoWorker;
uploadProcesses.push( uploadProcesses.push(
this.uploadNextFileInQueue( this.uploadNextFileInQueue(
await new this.cryptoWorkers[i].comlink() await new this.cryptoWorkers[i].comlink(),
new FileReader()
) )
); );
} }
await Promise.all(uploadProcesses); await Promise.all(uploadProcesses);
} }
private async uploadNextFileInQueue(worker: any) { private async uploadNextFileInQueue(worker: any, reader: FileReader) {
while (this.filesToBeUploaded.length > 0) { while (this.filesToBeUploaded.length > 0) {
const fileWithCollection = this.filesToBeUploaded.pop(); const fileWithCollection = this.filesToBeUploaded.pop();
const existingFilesInCollection = const existingFilesInCollection =
@ -157,6 +159,7 @@ class UploadManager {
fileWithCollection.collection = collection; fileWithCollection.collection = collection;
const { fileUploadResult, file } = await uploader( const { fileUploadResult, file } = await uploader(
worker, worker,
reader,
existingFilesInCollection, existingFilesInCollection,
fileWithCollection fileWithCollection
); );

View file

@ -38,16 +38,18 @@ class UploadService {
async readFile( async readFile(
worker: any, worker: any,
reader: FileReader,
rawFile: File, rawFile: File,
fileTypeInfo: FileTypeInfo fileTypeInfo: FileTypeInfo
): Promise<FileInMemory> { ): Promise<FileInMemory> {
const { thumbnail, hasStaticThumbnail } = await generateThumbnail( const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
worker, worker,
reader,
rawFile, rawFile,
fileTypeInfo fileTypeInfo
); );
const filedata = await getFileData(worker, rawFile); const filedata = await getFileData(reader, rawFile);
return { return {
filedata, filedata,

View file

@ -30,6 +30,7 @@ interface UploadResponse {
} }
export default async function uploader( export default async function uploader(
worker: any, worker: any,
reader: FileReader,
existingFilesInCollection: EnteFile[], existingFilesInCollection: EnteFile[],
fileWithCollection: FileWithCollection fileWithCollection: FileWithCollection
): Promise<UploadResponse> { ): Promise<UploadResponse> {
@ -53,7 +54,7 @@ export default async function uploader(
await sleep(TwoSecondInMillSeconds); await sleep(TwoSecondInMillSeconds);
return { fileUploadResult: FileUploadResults.TOO_LARGE }; return { fileUploadResult: FileUploadResults.TOO_LARGE };
} }
fileTypeInfo = await getFileType(worker, rawFile); fileTypeInfo = await getFileType(reader, rawFile);
if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) { if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) {
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
} }
@ -70,7 +71,12 @@ export default async function uploader(
return { fileUploadResult: FileUploadResults.SKIPPED }; return { fileUploadResult: FileUploadResults.SKIPPED };
} }
file = await UploadService.readFile(worker, rawFile, fileTypeInfo); file = await UploadService.readFile(
worker,
reader,
rawFile,
fileTypeInfo
);
if (file.hasStaticThumbnail) { if (file.hasStaticThumbnail) {
metadata.hasStaticThumbnail = true; metadata.hasStaticThumbnail = true;
} }

View file

@ -60,6 +60,7 @@ export async function downloadFile(file: EnteFile) {
let fileBlob = await (await fetch(fileURL)).blob(); let fileBlob = await (await fetch(fileURL)).blob();
fileBlob = await updateFileCreationDateInEXIF( fileBlob = await updateFileCreationDateInEXIF(
new FileReader(),
fileBlob, fileBlob,
new Date(file.pubMagicMetadata.data.editedTime / 1000) new Date(file.pubMagicMetadata.data.editedTime / 1000)
); );
@ -295,9 +296,10 @@ export async function convertForPreview(file: EnteFile, fileBlob: Blob) {
const typeFromExtension = getFileExtension(file.metadata.title); const typeFromExtension = getFileExtension(file.metadata.title);
const worker = await new CryptoWorker(); const worker = await new CryptoWorker();
const reader = new FileReader();
const mimeType = const mimeType =
(await getMimeTypeFromBlob(worker, fileBlob)) ?? typeFromExtension; (await getMimeTypeFromBlob(reader, fileBlob)) ?? typeFromExtension;
if (isFileHEIC(mimeType)) { if (isFileHEIC(mimeType)) {
fileBlob = await worker.convertHEIC2JPEG(fileBlob); fileBlob = await worker.convertHEIC2JPEG(fileBlob);
} }

View file

@ -149,30 +149,6 @@ export class Crypto {
return libsodium.fromHex(string); return libsodium.fromHex(string);
} }
// temporary fix for https://github.com/vercel/next.js/issues/25484
async getUint8ArrayView(file) {
try {
return await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onabort = () =>
reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => {
// Do whatever you want with the file contents
const result =
typeof reader.result === 'string'
? new TextEncoder().encode(reader.result)
: new Uint8Array(reader.result);
resolve(result);
};
reader.readAsArrayBuffer(file);
});
} catch (e) {
console.log(e, 'error reading file to byte-array');
throw e;
}
}
async convertHEIC2JPEG(file) { async convertHEIC2JPEG(file) {
return convertHEIC2JPEG(file); return convertHEIC2JPEG(file);
} }