created s3Service layer
This commit is contained in:
parent
141924bbf6
commit
b95a560072
|
@ -3,8 +3,7 @@ import { retryAsyncFunction } from 'utils/common';
|
||||||
import { getEndpoint } from 'utils/common/apiUtil';
|
import { getEndpoint } from 'utils/common/apiUtil';
|
||||||
import { getToken } from 'utils/common/key';
|
import { getToken } from 'utils/common/key';
|
||||||
import { logError } from 'utils/sentry';
|
import { logError } from 'utils/sentry';
|
||||||
import { CHUNKS_COMBINED_FOR_UPLOAD, MultipartUploadURLs, RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, UploadFile } from './uploadService';
|
import { MultipartUploadURLs, UploadFile, UploadURL } from './uploadService';
|
||||||
import * as convert from 'xml-js';
|
|
||||||
import { File } from '../fileService';
|
import { File } from '../fileService';
|
||||||
import { CustomError } from 'utils/common/errorUtil';
|
import { CustomError } from 'utils/common/errorUtil';
|
||||||
|
|
||||||
|
@ -12,10 +11,6 @@ const ENDPOINT = getEndpoint();
|
||||||
const MAX_URL_REQUESTS = 50;
|
const MAX_URL_REQUESTS = 50;
|
||||||
|
|
||||||
|
|
||||||
export interface UploadURL {
|
|
||||||
url: string;
|
|
||||||
objectKey: string;
|
|
||||||
}
|
|
||||||
class NetworkClient {
|
class NetworkClient {
|
||||||
private uploadURLFetchInProgress=null;
|
private uploadURLFetchInProgress=null;
|
||||||
|
|
||||||
|
@ -116,72 +111,49 @@ class NetworkClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async putFileInParts(
|
|
||||||
multipartUploadURLs: MultipartUploadURLs,
|
async putFilePart(
|
||||||
file: ReadableStream<Uint8Array>,
|
partUploadURL: string,
|
||||||
filename: string,
|
filePart: Uint8Array,
|
||||||
uploadPartCount: number,
|
progressTracker,
|
||||||
trackUploadProgress,
|
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
const streamEncryptedFileReader = file.getReader();
|
const response=await retryAsyncFunction(async ()=>{
|
||||||
const percentPerPart = Math.round(
|
const resp =await HTTPService.put(
|
||||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount,
|
partUploadURL,
|
||||||
);
|
filePart,
|
||||||
const resParts = [];
|
null,
|
||||||
for (const [
|
null,
|
||||||
index,
|
progressTracker(),
|
||||||
fileUploadURL,
|
);
|
||||||
] of multipartUploadURLs.partURLs.entries()) {
|
if (!resp?.headers?.etag) {
|
||||||
const combinedChunks = [];
|
const err=Error(CustomError.ETAG_MISSING);
|
||||||
for (let i = 0; i < CHUNKS_COMBINED_FOR_UPLOAD; i++) {
|
logError(err);
|
||||||
const { done, value: chunk } =
|
throw err;
|
||||||
await streamEncryptedFileReader.read();
|
|
||||||
if (done) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
for (let index = 0; index < chunk.length; index++) {
|
|
||||||
combinedChunks.push(chunk[index]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const uploadChunk = Uint8Array.from(combinedChunks);
|
return resp;
|
||||||
const response=await retryAsyncFunction(async ()=>{
|
});
|
||||||
const resp =await HTTPService.put(
|
return response.headers.etag;
|
||||||
fileUploadURL,
|
} catch (e) {
|
||||||
uploadChunk,
|
logError(e, 'put filePart failed');
|
||||||
null,
|
throw e;
|
||||||
null,
|
}
|
||||||
trackUploadProgress(filename, percentPerPart, index),
|
}
|
||||||
);
|
|
||||||
if (!resp?.headers?.etag) {
|
async completeMultipartUpload(completeURL:string, reqBody:any) {
|
||||||
const err=Error(CustomError.ETAG_MISSING);
|
try {
|
||||||
logError(err);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
return resp;
|
|
||||||
});
|
|
||||||
resParts.push({
|
|
||||||
PartNumber: index + 1,
|
|
||||||
ETag: response.headers.etag,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const options = { compact: true, ignoreComment: true, spaces: 4 };
|
|
||||||
const body = convert.js2xml(
|
|
||||||
{ CompleteMultipartUpload: { Part: resParts } },
|
|
||||||
options,
|
|
||||||
);
|
|
||||||
await retryAsyncFunction(()=>
|
await retryAsyncFunction(()=>
|
||||||
HTTPService.post(multipartUploadURLs.completeURL, body, null, {
|
HTTPService.post(completeURL, reqBody, null, {
|
||||||
'content-type': 'text/xml',
|
'content-type': 'text/xml',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
return multipartUploadURLs.objectKey;
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logError(e, 'put file in parts failed');
|
logError(e, 'put file in parts failed');
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default new NetworkClient();
|
export default new NetworkClient();
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import { FILE_TYPE } from 'pages/gallery';
|
import { FILE_TYPE } from 'pages/gallery';
|
||||||
import { ENCRYPTION_CHUNK_SIZE } from 'types';
|
import { ENCRYPTION_CHUNK_SIZE } from 'types';
|
||||||
import { logError } from 'utils/sentry';
|
import { logError } from 'utils/sentry';
|
||||||
|
import { MIN_STREAM_FILE_SIZE } from './uploadService';
|
||||||
|
|
||||||
const TYPE_VIDEO = 'video';
|
const TYPE_VIDEO = 'video';
|
||||||
const TYPE_HEIC = 'HEIC';
|
const TYPE_HEIC = 'HEIC';
|
||||||
export const TYPE_IMAGE = 'image';
|
export const TYPE_IMAGE = 'image';
|
||||||
const MIN_STREAM_FILE_SIZE = 20 * 1024 * 1024;
|
|
||||||
const EDITED_FILE_SUFFIX = '-edited';
|
const EDITED_FILE_SUFFIX = '-edited';
|
||||||
|
|
||||||
|
|
||||||
|
|
87
src/services/upload/s3Service.ts
Normal file
87
src/services/upload/s3Service.ts
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
import { CHUNKS_COMBINED_FOR_A_UPLOAD_PART, DataStream, MultipartUploadURLs, RANDOM_PERCENTAGE_PROGRESS_FOR_PUT } from './uploadService';
|
||||||
|
import NetworkClient from './networkClient';
|
||||||
|
import * as convert from 'xml-js';
|
||||||
|
|
||||||
|
|
||||||
|
interface PartEtag{
|
||||||
|
PartNumber:number;
|
||||||
|
Etag:string;
|
||||||
|
}
|
||||||
|
export function calculatePartCount(encryptedChunkCount: number) {
|
||||||
|
const partCount = Math.ceil(
|
||||||
|
encryptedChunkCount / CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
||||||
|
);
|
||||||
|
return partCount;
|
||||||
|
}
|
||||||
|
export async function uploadStreamUsingMultipart(filename:string, encryptedData:DataStream, progressTracker) {
|
||||||
|
const { chunkCount, stream } = encryptedData;
|
||||||
|
const uploadPartCount = calculatePartCount(chunkCount);
|
||||||
|
const filePartUploadURLs = await NetworkClient.fetchMultipartUploadURLs(
|
||||||
|
uploadPartCount,
|
||||||
|
);
|
||||||
|
const fileObjectKey = await uploadStreamInParts(
|
||||||
|
filePartUploadURLs,
|
||||||
|
stream,
|
||||||
|
filename,
|
||||||
|
uploadPartCount,
|
||||||
|
progressTracker,
|
||||||
|
);
|
||||||
|
return fileObjectKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function uploadStreamInParts(
|
||||||
|
multipartUploadURLs: MultipartUploadURLs,
|
||||||
|
file: ReadableStream<Uint8Array>,
|
||||||
|
filename: string,
|
||||||
|
uploadPartCount: number,
|
||||||
|
progressTracker,
|
||||||
|
) {
|
||||||
|
const encryptedFileStreamReader = file.getReader();
|
||||||
|
const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount);
|
||||||
|
|
||||||
|
const partEtags:PartEtag[] = [];
|
||||||
|
for (const [
|
||||||
|
index,
|
||||||
|
fileUploadURL,
|
||||||
|
] of multipartUploadURLs.partURLs.entries()) {
|
||||||
|
const uploadChunk = await combineChunksToFormUploadPart(encryptedFileStreamReader);
|
||||||
|
const eTag= await NetworkClient.putFilePart(fileUploadURL, uploadChunk, progressTracker.bind(null, filename, percentPerPart, index));
|
||||||
|
partEtags.push({ PartNumber: index+1, Etag: eTag });
|
||||||
|
}
|
||||||
|
await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL);
|
||||||
|
return multipartUploadURLs.objectKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export function getRandomProgressPerPartUpload(uploadPartCount:number) {
|
||||||
|
const percentPerPart = Math.round(
|
||||||
|
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount,
|
||||||
|
);
|
||||||
|
return percentPerPart;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export async function combineChunksToFormUploadPart(dataStreamReader:ReadableStreamDefaultReader<Uint8Array>) {
|
||||||
|
const combinedChunks = [];
|
||||||
|
for (let i = 0; i < CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
|
||||||
|
const { done, value: chunk } =
|
||||||
|
await dataStreamReader.read();
|
||||||
|
if (done) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
for (let index = 0; index < chunk.length; index++) {
|
||||||
|
combinedChunks.push(chunk[index]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Uint8Array.from(combinedChunks);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function completeMultipartUpload(partEtags:PartEtag[], completeURL:string) {
|
||||||
|
const options = { compact: true, ignoreComment: true, spaces: 4 };
|
||||||
|
const body = convert.js2xml(
|
||||||
|
{ CompleteMultipartUpload: { Part: partEtags } },
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
await NetworkClient.completeMultipartUpload(completeURL, body);
|
||||||
|
}
|
|
@ -16,11 +16,13 @@ import {
|
||||||
import { logError } from 'utils/sentry';
|
import { logError } from 'utils/sentry';
|
||||||
import localForage from 'utils/storage/localForage';
|
import localForage from 'utils/storage/localForage';
|
||||||
import { sleep } from 'utils/common';
|
import { sleep } from 'utils/common';
|
||||||
import NetworkClient, { UploadURL } from './networkClient';
|
import NetworkClient from './networkClient';
|
||||||
import { extractMetatdata, ParsedMetaDataJSON, parseMetadataJSON } from './metadataService';
|
import { extractMetatdata, ParsedMetaDataJSON, parseMetadataJSON } from './metadataService';
|
||||||
import { generateThumbnail } from './thumbnailService';
|
import { generateThumbnail } from './thumbnailService';
|
||||||
import { getFileType, getFileOriginalName, getFileData } from './readFileService';
|
import { getFileType, getFileOriginalName, getFileData } from './readFileService';
|
||||||
import { encryptFiledata } from './encryptionService';
|
import { encryptFiledata } from './encryptionService';
|
||||||
|
import { ENCRYPTION_CHUNK_SIZE } from 'types';
|
||||||
|
import { uploadStreamUsingMultipart } from './s3Service';
|
||||||
|
|
||||||
|
|
||||||
const MAX_CONCURRENT_UPLOADS = 4;
|
const MAX_CONCURRENT_UPLOADS = 4;
|
||||||
|
@ -28,7 +30,8 @@ const TYPE_JSON = 'json';
|
||||||
const FILE_UPLOAD_COMPLETED = 100;
|
const FILE_UPLOAD_COMPLETED = 100;
|
||||||
const TwoSecondInMillSeconds = 2000;
|
const TwoSecondInMillSeconds = 2000;
|
||||||
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
|
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
|
||||||
export const CHUNKS_COMBINED_FOR_UPLOAD = 5;
|
export const MIN_STREAM_FILE_SIZE = 20 * 1024 * 1024;
|
||||||
|
export const CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(MIN_STREAM_FILE_SIZE/ENCRYPTION_CHUNK_SIZE);
|
||||||
|
|
||||||
export enum FileUploadResults {
|
export enum FileUploadResults {
|
||||||
FAILED = -1,
|
FAILED = -1,
|
||||||
|
@ -38,6 +41,11 @@ export enum FileUploadResults {
|
||||||
UPLOADED = 100,
|
UPLOADED = 100,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface UploadURL {
|
||||||
|
url: string;
|
||||||
|
objectKey: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface FileWithCollection {
|
export interface FileWithCollection {
|
||||||
file: globalThis.File;
|
file: globalThis.File;
|
||||||
collection: Collection;
|
collection: Collection;
|
||||||
|
@ -87,7 +95,7 @@ interface EncryptedFile {
|
||||||
file: ProcessedFile;
|
file: ProcessedFile;
|
||||||
fileKey: B64EncryptionResult;
|
fileKey: B64EncryptionResult;
|
||||||
}
|
}
|
||||||
interface ProcessedFile {
|
export interface ProcessedFile {
|
||||||
file: fileAttribute;
|
file: fileAttribute;
|
||||||
thumbnail: fileAttribute;
|
thumbnail: fileAttribute;
|
||||||
metadata: fileAttribute;
|
metadata: fileAttribute;
|
||||||
|
@ -420,25 +428,13 @@ class UploadService {
|
||||||
|
|
||||||
private async uploadToBucket(file: ProcessedFile): Promise<BackupedFile> {
|
private async uploadToBucket(file: ProcessedFile): Promise<BackupedFile> {
|
||||||
try {
|
try {
|
||||||
let fileObjectKey;
|
let fileObjectKey:string=null;
|
||||||
if (isDataStream(file.file.encryptedData)) {
|
if (isDataStream(file.file.encryptedData)) {
|
||||||
const { chunkCount, stream } = file.file.encryptedData;
|
const progressTracker=this.trackUploadProgress.bind(this);
|
||||||
const uploadPartCount = Math.ceil(
|
fileObjectKey=await uploadStreamUsingMultipart(file.filename, file.file.encryptedData, progressTracker);
|
||||||
chunkCount / CHUNKS_COMBINED_FOR_UPLOAD,
|
|
||||||
);
|
|
||||||
const filePartUploadURLs = await NetworkClient.fetchMultipartUploadURLs(
|
|
||||||
uploadPartCount,
|
|
||||||
);
|
|
||||||
fileObjectKey = await NetworkClient.putFileInParts(
|
|
||||||
filePartUploadURLs,
|
|
||||||
stream,
|
|
||||||
file.filename,
|
|
||||||
uploadPartCount,
|
|
||||||
this.trackUploadProgress.bind(this),
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
const fileUploadURL = await this.getUploadURL();
|
|
||||||
const progressTracker=this.trackUploadProgress.bind(this, file.filename);
|
const progressTracker=this.trackUploadProgress.bind(this, file.filename);
|
||||||
|
const fileUploadURL = await this.getUploadURL();
|
||||||
fileObjectKey = await NetworkClient.putFile(
|
fileObjectKey = await NetworkClient.putFile(
|
||||||
fileUploadURL,
|
fileUploadURL,
|
||||||
file.file.encryptedData,
|
file.file.encryptedData,
|
||||||
|
|
Loading…
Reference in a new issue