Inline
This commit is contained in:
parent
3074bc108f
commit
e786bed078
|
@ -1,15 +1,5 @@
|
||||||
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
|
|
||||||
import { Location } from "types/metadata";
|
import { Location } from "types/metadata";
|
||||||
|
|
||||||
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
|
|
||||||
export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
|
|
||||||
|
|
||||||
export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
|
|
||||||
|
|
||||||
export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(
|
|
||||||
MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE,
|
|
||||||
);
|
|
||||||
|
|
||||||
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
|
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
|
||||||
|
|
||||||
export const NULL_LOCATION: Location = { latitude: null, longitude: null };
|
export const NULL_LOCATION: Location = { latitude: null, longitude: null };
|
||||||
|
|
|
@ -6,6 +6,7 @@ import { basename } from "@/next/file";
|
||||||
import log from "@/next/log";
|
import log from "@/next/log";
|
||||||
import { CustomErrorMessage } from "@/next/types/ipc";
|
import { CustomErrorMessage } from "@/next/types/ipc";
|
||||||
import { ensure } from "@/utils/ensure";
|
import { ensure } from "@/utils/ensure";
|
||||||
|
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
|
||||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||||
import {
|
import {
|
||||||
B64EncryptionResult,
|
B64EncryptionResult,
|
||||||
|
@ -16,9 +17,6 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
|
||||||
import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream";
|
import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream";
|
||||||
import { Remote } from "comlink";
|
import { Remote } from "comlink";
|
||||||
import {
|
import {
|
||||||
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
|
||||||
FILE_READER_CHUNK_SIZE,
|
|
||||||
MULTIPART_PART_SIZE,
|
|
||||||
NULL_LOCATION,
|
NULL_LOCATION,
|
||||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
|
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
|
||||||
UPLOAD_RESULT,
|
UPLOAD_RESULT,
|
||||||
|
@ -61,6 +59,18 @@ import {
|
||||||
import UploadHttpClient from "./uploadHttpClient";
|
import UploadHttpClient from "./uploadHttpClient";
|
||||||
import type { UploadableFile } from "./uploadManager";
|
import type { UploadableFile } from "./uploadManager";
|
||||||
|
|
||||||
|
/** Allow up to 5 ENCRYPTION_CHUNK_SIZE chunks in an upload part */
|
||||||
|
const maximumChunksPerUploadPart = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The chunk size of the un-encrypted file which is read and encrypted before
|
||||||
|
* uploading it as a single part of a multipart upload.
|
||||||
|
*
|
||||||
|
* ENCRYPTION_CHUNK_SIZE is 4 MB, and maximum number of chunks in a single
|
||||||
|
* upload part is 5, so this is 20 MB.
|
||||||
|
* */
|
||||||
|
const multipartPartSize = ENCRYPTION_CHUNK_SIZE * maximumChunksPerUploadPart;
|
||||||
|
|
||||||
/** Upload files to cloud storage */
|
/** Upload files to cloud storage */
|
||||||
class UploadService {
|
class UploadService {
|
||||||
private uploadURLs: UploadURL[] = [];
|
private uploadURLs: UploadURL[] = [];
|
||||||
|
@ -464,8 +474,8 @@ const readFileOrPath = async (
|
||||||
fileSize = file.size;
|
fileSize = file.size;
|
||||||
lastModifiedMs = file.lastModified;
|
lastModifiedMs = file.lastModified;
|
||||||
dataOrStream =
|
dataOrStream =
|
||||||
fileSize > MULTIPART_PART_SIZE
|
fileSize > multipartPartSize
|
||||||
? getFileStream(file, FILE_READER_CHUNK_SIZE)
|
? getFileStream(file, ENCRYPTION_CHUNK_SIZE)
|
||||||
: new Uint8Array(await file.arrayBuffer());
|
: new Uint8Array(await file.arrayBuffer());
|
||||||
} else {
|
} else {
|
||||||
const path = fileOrPath;
|
const path = fileOrPath;
|
||||||
|
@ -476,8 +486,8 @@ const readFileOrPath = async (
|
||||||
} = await readStream(ensureElectron(), path);
|
} = await readStream(ensureElectron(), path);
|
||||||
fileSize = size;
|
fileSize = size;
|
||||||
lastModifiedMs = lm;
|
lastModifiedMs = lm;
|
||||||
if (size > MULTIPART_PART_SIZE) {
|
if (size > multipartPartSize) {
|
||||||
const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
|
const chunkCount = Math.ceil(size / ENCRYPTION_CHUNK_SIZE);
|
||||||
dataOrStream = { stream: response.body, chunkCount };
|
dataOrStream = { stream: response.body, chunkCount };
|
||||||
} else {
|
} else {
|
||||||
dataOrStream = new Uint8Array(await response.arrayBuffer());
|
dataOrStream = new Uint8Array(await response.arrayBuffer());
|
||||||
|
@ -492,13 +502,13 @@ const readFileOrPathStream = async (
|
||||||
fileOrPath: File | string,
|
fileOrPath: File | string,
|
||||||
): Promise<DataStream> => {
|
): Promise<DataStream> => {
|
||||||
if (fileOrPath instanceof File) {
|
if (fileOrPath instanceof File) {
|
||||||
return getFileStream(fileOrPath, FILE_READER_CHUNK_SIZE);
|
return getFileStream(fileOrPath, ENCRYPTION_CHUNK_SIZE);
|
||||||
} else {
|
} else {
|
||||||
const { response, size } = await readStream(
|
const { response, size } = await readStream(
|
||||||
ensureElectron(),
|
ensureElectron(),
|
||||||
fileOrPath,
|
fileOrPath,
|
||||||
);
|
);
|
||||||
const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
|
const chunkCount = Math.ceil(size / ENCRYPTION_CHUNK_SIZE);
|
||||||
return { stream: response.body, chunkCount };
|
return { stream: response.body, chunkCount };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -760,6 +770,8 @@ const computeHash = async (
|
||||||
worker: Remote<DedicatedCryptoWorker>,
|
worker: Remote<DedicatedCryptoWorker>,
|
||||||
) => {
|
) => {
|
||||||
const { stream, chunkCount } = await readFileOrPathStream(fileOrPath);
|
const { stream, chunkCount } = await readFileOrPathStream(fileOrPath);
|
||||||
|
// TODO(MR): ElectronFile
|
||||||
|
console.log("got stream and chunks", stream, chunkCount);
|
||||||
const hashState = await worker.initChunkHashing();
|
const hashState = await worker.initChunkHashing();
|
||||||
|
|
||||||
const streamReader = stream.getReader();
|
const streamReader = stream.getReader();
|
||||||
|
@ -1195,7 +1207,7 @@ async function uploadStreamUsingMultipart(
|
||||||
abortIfCancelled: () => void,
|
abortIfCancelled: () => void,
|
||||||
) {
|
) {
|
||||||
const uploadPartCount = Math.ceil(
|
const uploadPartCount = Math.ceil(
|
||||||
dataStream.chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
dataStream.chunkCount / maximumChunksPerUploadPart,
|
||||||
);
|
);
|
||||||
const multipartUploadURLs =
|
const multipartUploadURLs =
|
||||||
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
|
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
|
||||||
|
@ -1255,7 +1267,7 @@ async function combineChunksToFormUploadPart(
|
||||||
streamReader: ReadableStreamDefaultReader<Uint8Array>,
|
streamReader: ReadableStreamDefaultReader<Uint8Array>,
|
||||||
) {
|
) {
|
||||||
const combinedChunks = [];
|
const combinedChunks = [];
|
||||||
for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
|
for (let i = 0; i < maximumChunksPerUploadPart; i++) {
|
||||||
const { done, value: chunk } = await streamReader.read();
|
const { done, value: chunk } = await streamReader.read();
|
||||||
if (done) {
|
if (done) {
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { getFileNameSize } from "@/next/file";
|
import { getFileNameSize } from "@/next/file";
|
||||||
|
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
|
||||||
import type { DataStream } from "@ente/shared/utils/data-stream";
|
import type { DataStream } from "@ente/shared/utils/data-stream";
|
||||||
import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
|
import { PICKED_UPLOAD_TYPE } from "constants/upload";
|
||||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||||
import { getImportSuggestion } from "utils/upload";
|
import { getImportSuggestion } from "utils/upload";
|
||||||
|
|
||||||
|
@ -35,11 +36,11 @@ export const testZipFileReading = async () => {
|
||||||
i++;
|
i++;
|
||||||
let filedata: DataStream;
|
let filedata: DataStream;
|
||||||
if (file instanceof File) {
|
if (file instanceof File) {
|
||||||
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
|
filedata = getFileStream(file, ENCRYPTION_CHUNK_SIZE);
|
||||||
} else {
|
} else {
|
||||||
filedata = await getElectronFileStream(
|
filedata = await getElectronFileStream(
|
||||||
file,
|
file,
|
||||||
FILE_READER_CHUNK_SIZE,
|
ENCRYPTION_CHUNK_SIZE,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const streamReader = filedata.stream.getReader();
|
const streamReader = filedata.stream.getReader();
|
||||||
|
|
Loading…
Reference in a new issue