refactored code to not create stream for livePhotodata assets as they zipping library doesn't support streams

This commit is contained in:
Abhinav 2022-02-15 09:20:41 +05:30
parent 8b668b924a
commit 80db61133a
3 changed files with 43 additions and 24 deletions

View file

@ -41,3 +41,5 @@ export enum FileUploadResults {
} }
export const MAX_FILE_SIZE_SUPPORTED = 5 * 1024 * 1024 * 1024; // 5 GB export const MAX_FILE_SIZE_SUPPORTED = 5 * 1024 * 1024 * 1024; // 5 GB
export const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB

View file

@ -1,16 +1,17 @@
import { FILE_TYPE } from 'constants/file'; import { FILE_TYPE } from 'constants/file';
import { MULTIPART_PART_SIZE } from 'constants/upload'; import { LIVE_PHOTO_ASSET_SIZE_LIMIT } from 'constants/upload';
import { encodeMotionPhoto } from 'services/motionPhotoService'; import { encodeMotionPhoto } from 'services/motionPhotoService';
import { import {
FileTypeInfo, FileTypeInfo,
FileWithCollection, FileWithCollection,
isDataStream,
LivePhotoAssets, LivePhotoAssets,
Metadata, Metadata,
} from 'types/upload'; } from 'types/upload';
import { CustomError } from 'utils/error';
import { splitFilenameAndExtension } from 'utils/file'; import { splitFilenameAndExtension } from 'utils/file';
import { readFile } from './fileService'; import { logError } from 'utils/sentry';
import { getFileData } from './readFileService'; import { getUint8ArrayView } from './readFileService';
import { generateThumbnail } from './thumbnailService';
import uploadService from './uploadService'; import uploadService from './uploadService';
import UploadService from './uploadService'; import UploadService from './uploadService';
@ -53,31 +54,26 @@ export async function readLivePhoto(
fileTypeInfo: FileTypeInfo, fileTypeInfo: FileTypeInfo,
livePhotoAssets: LivePhotoAssets livePhotoAssets: LivePhotoAssets
) { ) {
const image = await readFile( const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
worker, worker,
reader, reader,
{ exactType: fileTypeInfo.exactType, fileType: FILE_TYPE.IMAGE }, livePhotoAssets.image,
livePhotoAssets.image { exactType: fileTypeInfo.exactType, fileType: FILE_TYPE.IMAGE }
); );
const video = await getFileData(reader, livePhotoAssets.video); const image = await getUint8ArrayView(reader, livePhotoAssets.image);
const video = await getUint8ArrayView(reader, livePhotoAssets.video);
/*
did it based on the assumption that live photo assets ideally would not be larger than MULTIPART_PART_SIZE and hence not require to be streamed
also, allowing that would require a small amount of code changes as the zipping library doesn't support stream as a input
*/
if (isDataStream(video) || isDataStream(image.filedata)) {
throw new Error('too large live photo assets');
}
return { return {
filedata: await encodeMotionPhoto({ filedata: await encodeMotionPhoto({
image: image.filedata as Uint8Array, image,
video: video as Uint8Array, video,
imageNameTitle: livePhotoAssets.image.name, imageNameTitle: livePhotoAssets.image.name,
videoNameTitle: livePhotoAssets.video.name, videoNameTitle: livePhotoAssets.video.name,
}), }),
thumbnail: image.thumbnail, thumbnail,
hasStaticThumbnail: image.hasStaticThumbnail, hasStaticThumbnail,
}; };
} }
@ -171,15 +167,35 @@ function areFilesLivePhotoAssets(
firstFileIdentifier: LivePhotoIdentifier, firstFileIdentifier: LivePhotoIdentifier,
secondFileIdentifier: LivePhotoIdentifier secondFileIdentifier: LivePhotoIdentifier
) { ) {
return ( if (
firstFileIdentifier.collectionID === firstFileIdentifier.collectionID ===
secondFileIdentifier.collectionID && secondFileIdentifier.collectionID &&
firstFileIdentifier.fileType !== secondFileIdentifier.fileType && firstFileIdentifier.fileType !== secondFileIdentifier.fileType &&
firstFileIdentifier.fileType !== FILE_TYPE.OTHERS && firstFileIdentifier.fileType !== FILE_TYPE.OTHERS &&
secondFileIdentifier.fileType !== FILE_TYPE.OTHERS && secondFileIdentifier.fileType !== FILE_TYPE.OTHERS &&
splitFilenameAndExtension(firstFileIdentifier.name)[0] === splitFilenameAndExtension(firstFileIdentifier.name)[0] ===
splitFilenameAndExtension(secondFileIdentifier.name)[0] && splitFilenameAndExtension(secondFileIdentifier.name)[0]
firstFileIdentifier.size <= MULTIPART_PART_SIZE && // so that they are small enough to be read and uploaded in single chunk ) {
secondFileIdentifier.size <= MULTIPART_PART_SIZE // checks size of live Photo assets are less than allowed limit
); // I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
// also zipping library doesn't support stream as a input
if (
firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT &&
secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT
) {
return true;
} else {
logError(
new Error(CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS),
CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS,
{
fileSizes: [
firstFileIdentifier.size,
secondFileIdentifier.size,
],
}
);
}
}
return false;
} }

View file

@ -39,6 +39,7 @@ export enum CustomError {
SUBSCRIPTION_NEEDED = 'subscription not present', SUBSCRIPTION_NEEDED = 'subscription not present',
NOT_FOUND = 'not found ', NOT_FOUND = 'not found ',
NO_METADATA = 'no metadata', NO_METADATA = 'no metadata',
TOO_LARGE_LIVE_PHOTO_ASSETS = 'too large live photo assets',
} }
function parseUploadErrorCodes(error) { function parseUploadErrorCodes(error) {