Merge pull request #694 from ente-io/cancel-upload-without-reload

Cancel upload without reload
This commit is contained in:
Abhinav Kumar 2022-09-05 15:43:15 +05:30 committed by GitHub
commit 0a8f2f798b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 201 additions and 73 deletions

View file

@ -22,7 +22,6 @@ import {
ElectronFile,
FileWithCollection,
} from 'types/upload';
import Router from 'next/router';
import { isCanvasBlocked } from 'utils/upload/isCanvasBlocked';
import { downloadApp } from 'utils/common';
import DiscFullIcon from '@mui/icons-material/DiscFull';
@ -328,10 +327,14 @@ export default function Uploader(props: Props) {
)
);
}
await uploadManager.queueFilesForUpload(
filesWithCollectionToUploadIn,
collections
);
const shouldCloseUploadProgress =
await uploadManager.queueFilesForUpload(
filesWithCollectionToUploadIn,
collections
);
if (shouldCloseUploadProgress) {
closeUploadProgress();
}
} catch (err) {
showUserFacingError(err.message);
closeUploadProgress();
@ -482,13 +485,8 @@ export default function Uploader(props: Props) {
}
};
const cancelUploads = async () => {
closeUploadProgress();
if (isElectron()) {
ImportService.cancelRemainingUploads();
}
props.setUploadInProgress(false);
Router.reload();
const cancelUploads = () => {
uploadManager.cancelRunningUpload();
};
const handleUpload = (type) => () => {

View file

@ -32,6 +32,7 @@ export enum UPLOAD_STAGES {
READING_GOOGLE_METADATA_FILES,
EXTRACTING_METADATA,
UPLOADING,
CANCELLING,
FINISH,
}
@ -45,6 +46,7 @@ export enum UPLOAD_RESULT {
UPLOADED,
UPLOADED_WITH_STATIC_THUMBNAIL,
ADDED_SYMLINK,
CANCELLED,
}
export enum UPLOAD_STRATEGY {

View file

@ -8,6 +8,7 @@ import UploadHttpClient from './uploadHttpClient';
import * as convert from 'xml-js';
import { CustomError } from 'utils/error';
import { DataStream, MultipartUploadURLs } from 'types/upload';
import uploadCancelService from './uploadCancelService';
interface PartEtag {
PartNumber: number;
@ -51,6 +52,9 @@ export async function uploadStreamInParts(
index,
fileUploadURL,
] of multipartUploadURLs.partURLs.entries()) {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
const uploadChunk = await combineChunksToFormUploadPart(streamReader);
const progressTracker = UIService.trackUploadProgress(
fileLocalID,

View file

@ -10,7 +10,11 @@ import {
ProgressUpdater,
SegregatedFinishedUploads,
} from 'types/upload/ui';
import { Canceler } from 'axios';
import { CustomError } from 'utils/error';
import uploadCancelService from './uploadCancelService';
const REQUEST_TIMEOUT_TIME = 30 * 1000; // 30 sec;
class UIService {
private perFileProgress: number;
private filesUploaded: number;
@ -72,7 +76,19 @@ class UIService {
this.updateProgressBarUI();
}
updateProgressBarUI() {
hasFilesInResultList() {
const finishedUploadsList = segregatedFinishedUploadsToList(
this.finishedUploads
);
for (const x of finishedUploadsList.values()) {
if (x.length > 0) {
return true;
}
}
return false;
}
private updateProgressBarUI() {
const {
setPercentComplete,
setUploadCounter,
@ -99,10 +115,10 @@ class UIService {
setPercentComplete(percentComplete);
setInProgressUploads(
this.convertInProgressUploadsToList(this.inProgressUploads)
convertInProgressUploadsToList(this.inProgressUploads)
);
setFinishedUploads(
this.segregatedFinishedUploadsToList(this.finishedUploads)
segregatedFinishedUploadsToList(this.finishedUploads)
);
}
@ -111,13 +127,19 @@ class UIService {
percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(),
index = 0
) {
const cancel = { exec: null };
const cancel: { exec: Canceler } = { exec: () => {} };
const cancelTimedOutRequest = () =>
cancel.exec(CustomError.REQUEST_TIMEOUT);
const cancelCancelledUploadRequest = () =>
cancel.exec(CustomError.UPLOAD_CANCELLED);
let timeout = null;
const resetTimeout = () => {
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(() => cancel.exec(), 30 * 1000);
timeout = setTimeout(cancelTimedOutRequest, REQUEST_TIMEOUT_TIME);
};
return {
cancel,
@ -138,31 +160,33 @@ class UIService {
} else {
resetTimeout();
}
if (uploadCancelService.isUploadCancelationRequested()) {
cancelCancelledUploadRequest();
}
},
};
}
convertInProgressUploadsToList(inProgressUploads) {
return [...inProgressUploads.entries()].map(
([localFileID, progress]) =>
({
localFileID,
progress,
} as InProgressUpload)
);
}
segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) {
const segregatedFinishedUploads =
new Map() as SegregatedFinishedUploads;
for (const [localID, result] of finishedUploads) {
if (!segregatedFinishedUploads.has(result)) {
segregatedFinishedUploads.set(result, []);
}
segregatedFinishedUploads.get(result).push(localID);
}
return segregatedFinishedUploads;
}
}
export default new UIService();
function convertInProgressUploadsToList(inProgressUploads) {
return [...inProgressUploads.entries()].map(
([localFileID, progress]) =>
({
localFileID,
progress,
} as InProgressUpload)
);
}
function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) {
const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads;
for (const [localID, result] of finishedUploads) {
if (!segregatedFinishedUploads.has(result)) {
segregatedFinishedUploads.set(result, []);
}
segregatedFinishedUploads.get(result).push(localID);
}
return segregatedFinishedUploads;
}

View file

@ -0,0 +1,23 @@
interface UploadCancelStatus {
value: boolean;
}
class UploadCancelService {
private shouldUploadBeCancelled: UploadCancelStatus = {
value: false,
};
reset() {
this.shouldUploadBeCancelled.value = false;
}
requestUploadCancelation() {
this.shouldUploadBeCancelled.value = true;
}
isUploadCancelationRequested(): boolean {
return this.shouldUploadBeCancelled.value;
}
}
export default new UploadCancelService();

View file

@ -92,18 +92,22 @@ class UploadHttpClient {
progressTracker
): Promise<string> {
try {
await retryHTTPCall(() =>
HTTPService.put(
fileUploadURL.url,
file,
null,
null,
progressTracker
)
await retryHTTPCall(
() =>
HTTPService.put(
fileUploadURL.url,
file,
null,
null,
progressTracker
),
handleUploadError
);
return fileUploadURL.objectKey;
} catch (e) {
logError(e, 'putFile to dataStore failed ');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'putFile to dataStore failed ');
}
throw e;
}
}
@ -127,7 +131,9 @@ class UploadHttpClient {
);
return fileUploadURL.objectKey;
} catch (e) {
logError(e, 'putFile to dataStore failed ');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'putFile to dataStore failed ');
}
throw e;
}
}
@ -152,10 +158,12 @@ class UploadHttpClient {
throw err;
}
return resp;
});
}, handleUploadError);
return response.headers.etag as string;
} catch (e) {
logError(e, 'put filePart failed');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'put filePart failed');
}
throw e;
}
}
@ -185,7 +193,9 @@ class UploadHttpClient {
});
return response.data.etag as string;
} catch (e) {
logError(e, 'put filePart failed');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'put filePart failed');
}
throw e;
}
}

View file

@ -40,6 +40,7 @@ import { addLogLine, getFileNameSize } from 'utils/logging';
import isElectron from 'is-electron';
import ImportService from 'services/importService';
import { ProgressUpdater } from 'types/upload/ui';
import uploadCancelService from './uploadCancelService';
const MAX_CONCURRENT_UPLOADS = 4;
const FILE_UPLOAD_COMPLETED = 100;
@ -78,6 +79,7 @@ class UploadManager {
prepareForNewUpload() {
this.resetState();
UIService.reset();
uploadCancelService.reset();
UIService.setUploadStage(UPLOAD_STAGES.START);
}
@ -173,21 +175,37 @@ class UploadManager {
await this.uploadMediaFiles(allFiles);
}
} catch (e) {
if (e.message === CustomError.UPLOAD_CANCELLED) {
if (isElectron()) {
ImportService.cancelRemainingUploads();
}
} else {
logError(e, 'uploading failed with error');
addLogLine(
`uploading failed with error -> ${e.message}
${(e as Error).stack}`
);
throw e;
}
} finally {
UIService.setUploadStage(UPLOAD_STAGES.FINISH);
UIService.setPercentComplete(FILE_UPLOAD_COMPLETED);
} catch (e) {
logError(e, 'uploading failed with error');
addLogLine(
`uploading failed with error -> ${e.message}
${(e as Error).stack}`
);
throw e;
} finally {
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
this.cryptoWorkers[i]?.worker.terminate();
}
this.uploadInProgress = false;
}
try {
if (!UIService.hasFilesInResultList()) {
return true;
} else {
return false;
}
} catch (e) {
logError(e, ' failed to return shouldCloseProgressBar');
return false;
}
}
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
@ -198,6 +216,9 @@ class UploadManager {
for (const { file, collectionID } of metadataFiles) {
try {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(
`parsing metadata json file ${getFileNameSize(file)}`
);
@ -220,7 +241,12 @@ class UploadManager {
)}`
);
} catch (e) {
logError(e, 'parsing failed for a file');
if (e.message === CustomError.UPLOAD_CANCELLED) {
throw e;
} else {
// and don't break for subsequent files just log and move on
logError(e, 'parsing failed for a file');
}
addLogLine(
`failed to parse metadata json file ${getFileNameSize(
file
@ -229,8 +255,10 @@ class UploadManager {
}
}
} catch (e) {
logError(e, 'error seeding MetadataMap');
// silently ignore the error
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'error seeding MetadataMap');
}
throw e;
}
}
@ -239,6 +267,9 @@ class UploadManager {
addLogLine(`extractMetadataFromFiles executed`);
UIService.reset(mediaFiles.length);
for (const { file, localID, collectionID } of mediaFiles) {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
let fileTypeInfo = null;
let metadata = null;
try {
@ -257,7 +288,12 @@ class UploadManager {
)} `
);
} catch (e) {
logError(e, 'extractFileTypeAndMetadata failed');
if (e.message === CustomError.UPLOAD_CANCELLED) {
throw e;
} else {
// and don't break for subsequent files just log and move on
logError(e, 'extractFileTypeAndMetadata failed');
}
addLogLine(
`metadata extraction failed ${getFileNameSize(
file
@ -271,7 +307,9 @@ class UploadManager {
UIService.increaseFileUploaded();
}
} catch (e) {
logError(e, 'error extracting metadata');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'error extracting metadata');
}
throw e;
}
}
@ -347,6 +385,9 @@ class UploadManager {
private async uploadNextFileInQueue(worker: any) {
while (this.filesToBeUploaded.length > 0) {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
let fileWithCollection = this.filesToBeUploaded.pop();
const { collectionID } = fileWithCollection;
const collection = this.collections.get(collectionID);
@ -397,6 +438,9 @@ class UploadManager {
);
break;
case UPLOAD_RESULT.ALREADY_UPLOADED:
case UPLOAD_RESULT.UNSUPPORTED:
case UPLOAD_RESULT.TOO_LARGE:
case UPLOAD_RESULT.CANCELLED:
// no-op
break;
default:
@ -422,6 +466,11 @@ class UploadManager {
}
}
public cancelRunningUpload() {
UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
uploadCancelService.requestUploadCancelation();
}
async getFailedFilesWithCollections() {
return {
files: this.failedFiles,

View file

@ -3,7 +3,6 @@ import { logError } from 'utils/sentry';
import UploadHttpClient from './uploadHttpClient';
import { extractFileMetadata, getFilename } from './fileService';
import { getFileType } from '../typeDetectionService';
import { handleUploadError } from 'utils/error';
import {
B64EncryptionResult,
BackupedFile,
@ -33,6 +32,7 @@ import { encryptFile, getFileSize, readFile } from './fileService';
import { uploadStreamUsingMultipart } from './multiPartUploadService';
import UIService from './uiService';
import { USE_CF_PROXY } from 'constants/upload';
import { CustomError, handleUploadError } from 'utils/error';
class UploadService {
private uploadURLs: UploadURL[] = [];
@ -185,7 +185,9 @@ class UploadService {
};
return backupedFile;
} catch (e) {
logError(e, 'error uploading to bucket');
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'error uploading to bucket');
}
throw e;
}
}

View file

@ -12,6 +12,7 @@ import { addLocalLog, addLogLine } from 'utils/logging';
import { convertBytesToHumanReadable } from 'utils/file/size';
import { sleep } from 'utils/common';
import { addToCollection } from 'services/collectionService';
import uploadCancelService from './uploadCancelService';
interface UploadResponse {
fileUploadResult: UPLOAD_RESULT;
@ -82,6 +83,9 @@ export default async function uploader(
};
}
}
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`reading asset ${fileNameSize}`);
@ -96,6 +100,9 @@ export default async function uploader(
thumbnail: file.thumbnail,
metadata,
};
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`encryptAsset ${fileNameSize}`);
const encryptedFile = await UploadService.encryptAsset(
@ -104,6 +111,9 @@ export default async function uploader(
collection.key
);
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`uploadToBucket ${fileNameSize}`);
const backupedFile: BackupedFile = await UploadService.uploadToBucket(
@ -130,12 +140,15 @@ export default async function uploader(
};
} catch (e) {
addLogLine(`upload failed for ${fileNameSize} ,error: ${e.message}`);
logError(e, 'file upload failed', {
fileFormat: fileTypeInfo?.exactType,
});
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, 'file upload failed', {
fileFormat: fileTypeInfo?.exactType,
});
}
const error = handleUploadError(e);
switch (error.message) {
case CustomError.UPLOAD_CANCELLED:
return { fileUploadResult: UPLOAD_RESULT.CANCELLED };
case CustomError.ETAG_MISSING:
return { fileUploadResult: UPLOAD_RESULT.BLOCKED };
case CustomError.UNSUPPORTED_FILE_FORMAT:

View file

@ -45,6 +45,8 @@ export enum CustomError {
FILE_ID_NOT_FOUND = 'file with id not found',
WEAK_DEVICE = 'password decryption failed on the device',
INCORRECT_PASSWORD = 'incorrect password',
UPLOAD_CANCELLED = 'upload cancelled',
REQUEST_TIMEOUT = 'request taking too long',
}
function parseUploadErrorCodes(error) {
@ -81,6 +83,7 @@ export function handleUploadError(error): Error {
case CustomError.SUBSCRIPTION_EXPIRED:
case CustomError.STORAGE_QUOTA_EXCEEDED:
case CustomError.SESSION_EXPIRED:
case CustomError.UPLOAD_CANCELLED:
throw parsedError;
}
return parsedError;

View file

@ -110,8 +110,8 @@ const englishConstants = {
2: 'Reading file metadata',
3: (fileCounter) =>
`${fileCounter.finished} / ${fileCounter.total} files backed up`,
4: 'Backup complete',
5: 'Cancelling remaining uploads',
4: 'Cancelling remaining uploads',
5: 'Backup complete',
},
UPLOADING_FILES: 'File upload',
FILE_NOT_UPLOADED_LIST: 'The following files were not uploaded',