This commit is contained in:
Manav Rathi 2024-04-26 18:16:22 +05:30
parent 3074bc108f
commit e786bed078
No known key found for this signature in database
3 changed files with 27 additions and 24 deletions

View file

@ -1,15 +1,5 @@
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
import { Location } from "types/metadata";
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(
MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE,
);
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
export const NULL_LOCATION: Location = { latitude: null, longitude: null };

View file

@ -6,6 +6,7 @@ import { basename } from "@/next/file";
import log from "@/next/log";
import { CustomErrorMessage } from "@/next/types/ipc";
import { ensure } from "@/utils/ensure";
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import {
B64EncryptionResult,
@ -16,9 +17,6 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream";
import { Remote } from "comlink";
import {
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
FILE_READER_CHUNK_SIZE,
MULTIPART_PART_SIZE,
NULL_LOCATION,
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
UPLOAD_RESULT,
@ -61,6 +59,18 @@ import {
import UploadHttpClient from "./uploadHttpClient";
import type { UploadableFile } from "./uploadManager";
/** Allow up to 5 ENCRYPTION_CHUNK_SIZE chunks in an upload part */
const maximumChunksPerUploadPart = 5;
/**
* The chunk size of the un-encrypted file which is read and encrypted before
* uploading it as a single part of a multipart upload.
*
* ENCRYPTION_CHUNK_SIZE is 4 MB, and maximum number of chunks in a single
* upload part is 5, so this is 20 MB.
* */
const multipartPartSize = ENCRYPTION_CHUNK_SIZE * maximumChunksPerUploadPart;
/** Upload files to cloud storage */
class UploadService {
private uploadURLs: UploadURL[] = [];
@ -464,8 +474,8 @@ const readFileOrPath = async (
fileSize = file.size;
lastModifiedMs = file.lastModified;
dataOrStream =
fileSize > MULTIPART_PART_SIZE
? getFileStream(file, FILE_READER_CHUNK_SIZE)
fileSize > multipartPartSize
? getFileStream(file, ENCRYPTION_CHUNK_SIZE)
: new Uint8Array(await file.arrayBuffer());
} else {
const path = fileOrPath;
@ -476,8 +486,8 @@ const readFileOrPath = async (
} = await readStream(ensureElectron(), path);
fileSize = size;
lastModifiedMs = lm;
if (size > MULTIPART_PART_SIZE) {
const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
if (size > multipartPartSize) {
const chunkCount = Math.ceil(size / ENCRYPTION_CHUNK_SIZE);
dataOrStream = { stream: response.body, chunkCount };
} else {
dataOrStream = new Uint8Array(await response.arrayBuffer());
@ -492,13 +502,13 @@ const readFileOrPathStream = async (
fileOrPath: File | string,
): Promise<DataStream> => {
if (fileOrPath instanceof File) {
return getFileStream(fileOrPath, FILE_READER_CHUNK_SIZE);
return getFileStream(fileOrPath, ENCRYPTION_CHUNK_SIZE);
} else {
const { response, size } = await readStream(
ensureElectron(),
fileOrPath,
);
const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
const chunkCount = Math.ceil(size / ENCRYPTION_CHUNK_SIZE);
return { stream: response.body, chunkCount };
}
};
@ -760,6 +770,8 @@ const computeHash = async (
worker: Remote<DedicatedCryptoWorker>,
) => {
const { stream, chunkCount } = await readFileOrPathStream(fileOrPath);
// TODO(MR): ElectronFile
console.log("got stream and chunks", stream, chunkCount);
const hashState = await worker.initChunkHashing();
const streamReader = stream.getReader();
@ -1195,7 +1207,7 @@ async function uploadStreamUsingMultipart(
abortIfCancelled: () => void,
) {
const uploadPartCount = Math.ceil(
dataStream.chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
dataStream.chunkCount / maximumChunksPerUploadPart,
);
const multipartUploadURLs =
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
@ -1255,7 +1267,7 @@ async function combineChunksToFormUploadPart(
streamReader: ReadableStreamDefaultReader<Uint8Array>,
) {
const combinedChunks = [];
for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
for (let i = 0; i < maximumChunksPerUploadPart; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
break;

View file

@ -1,6 +1,7 @@
import { getFileNameSize } from "@/next/file";
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
import type { DataStream } from "@ente/shared/utils/data-stream";
import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
import { PICKED_UPLOAD_TYPE } from "constants/upload";
import { getElectronFileStream, getFileStream } from "services/readerService";
import { getImportSuggestion } from "utils/upload";
@ -35,11 +36,11 @@ export const testZipFileReading = async () => {
i++;
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
filedata = getFileStream(file, ENCRYPTION_CHUNK_SIZE);
} else {
filedata = await getElectronFileStream(
file,
FILE_READER_CHUNK_SIZE,
ENCRYPTION_CHUNK_SIZE,
);
}
const streamReader = filedata.stream.getReader();