Add API batching (#971)

This commit is contained in:
Abhinav Kumar 2023-03-06 15:23:45 +05:30 committed by GitHub
commit 07c51dd5d0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 105 additions and 111 deletions

View file

@ -0,0 +1 @@
export const REQUEST_BATCH_SIZE = 1000;

View file

@ -3,7 +3,6 @@ export const MAX_EDITED_CREATION_TIME = new Date();
export const MAX_EDITED_FILE_NAME_LENGTH = 100;
export const MAX_CAPTION_SIZE = 5000;
export const MAX_TRASH_BATCH_SIZE = 1000;
export const TYPE_HEIC = 'heic';
export const TYPE_HEIF = 'heif';

View file

@ -66,6 +66,8 @@ import {
} from 'utils/collection';
import ComlinkCryptoWorker from 'utils/comlink/ComlinkCryptoWorker';
import { getLocalFiles } from './fileService';
import { REQUEST_BATCH_SIZE } from 'constants/api';
import { batch } from 'utils/common';
const ENDPOINT = getEndpoint();
const COLLECTION_TABLE = 'collections';
@ -405,21 +407,24 @@ export const addToCollection = async (
) => {
try {
const token = getToken();
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(collection, files);
const batchedFiles = batch(files, REQUEST_BATCH_SIZE);
for (const batch of batchedFiles) {
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(collection, batch);
const requestBody: AddToCollectionRequest = {
collectionID: collection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/add-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
const requestBody: AddToCollectionRequest = {
collectionID: collection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/add-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
}
} catch (e) {
logError(e, 'Add to collection Failed ');
throw e;
@ -432,21 +437,24 @@ export const restoreToCollection = async (
) => {
try {
const token = getToken();
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(collection, files);
const batchedFiles = batch(files, REQUEST_BATCH_SIZE);
for (const batch of batchedFiles) {
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(collection, batch);
const requestBody: AddToCollectionRequest = {
collectionID: collection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/restore-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
const requestBody: AddToCollectionRequest = {
collectionID: collection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/restore-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
}
} catch (e) {
logError(e, 'restore to collection Failed ');
throw e;
@ -459,22 +467,25 @@ export const moveToCollection = async (
) => {
try {
const token = getToken();
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(toCollection, files);
const batchedFiles = batch(files, REQUEST_BATCH_SIZE);
for (const batch of batchedFiles) {
const fileKeysEncryptedWithNewCollection =
await encryptWithNewCollectionKey(toCollection, batch);
const requestBody: MoveToCollectionRequest = {
fromCollectionID: fromCollectionID,
toCollectionID: toCollection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/move-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
const requestBody: MoveToCollectionRequest = {
fromCollectionID: fromCollectionID,
toCollectionID: toCollection.id,
files: fileKeysEncryptedWithNewCollection,
};
await HTTPService.post(
`${ENDPOINT}/collections/move-files`,
requestBody,
null,
{
'X-Auth-Token': token,
}
);
}
} catch (e) {
logError(e, 'move to collection Failed ');
throw e;
@ -605,18 +616,20 @@ export const removeNonUserFiles = async (
try {
const fileIDs = nonUserFiles.map((f) => f.id);
const token = getToken();
const batchedFileIDs = batch(fileIDs, REQUEST_BATCH_SIZE);
for (const batch of batchedFileIDs) {
const request: RemoveFromCollectionRequest = {
collectionID,
fileIDs: batch,
};
const request: RemoveFromCollectionRequest = {
collectionID,
fileIDs,
};
await HTTPService.post(
`${ENDPOINT}/collections/v3/remove-files`,
request,
null,
{ 'X-Auth-Token': token }
);
await HTTPService.post(
`${ENDPOINT}/collections/v3/remove-files`,
request,
null,
{ 'X-Auth-Token': token }
);
}
} catch (e) {
logError(e, 'remove non user files failed ');
throw e;

View file

@ -14,7 +14,6 @@ import {
import { eventBus, Events } from './events';
import { EnteFile, EncryptedEnteFile, TrashRequest } from 'types/file';
import { SetFiles } from 'types/gallery';
import { MAX_TRASH_BATCH_SIZE } from 'constants/file';
import { BulkUpdateMagicMetadataRequest } from 'types/magicMetadata';
import { addLogLine } from 'utils/logging';
import { isCollectionHidden } from 'utils/collection';
@ -24,6 +23,8 @@ import {
getCollectionLastSyncTime,
setCollectionLastSyncTime,
} from './collectionService';
import { REQUEST_BATCH_SIZE } from 'constants/api';
import { batch } from 'utils/common';
const ENDPOINT = getEndpoint();
const FILES_TABLE = 'files';
@ -161,24 +162,22 @@ export const trashFiles = async (filesToTrash: EnteFile[]) => {
if (!token) {
return;
}
const trashBatch: TrashRequest = {
items: [],
};
for (const file of filesToTrash) {
trashBatch.items.push({
collectionID: file.collectionID,
fileID: file.id,
});
if (trashBatch.items.length >= MAX_TRASH_BATCH_SIZE) {
await trashFilesFromServer(trashBatch, token);
trashBatch.items = [];
}
}
if (trashBatch.items.length > 0) {
await trashFilesFromServer(trashBatch, token);
const batchedFilesToTrash = batch(filesToTrash, REQUEST_BATCH_SIZE);
for (const batch of batchedFilesToTrash) {
const trashRequest: TrashRequest = {
items: batch.map((file) => ({
fileID: file.id,
collectionID: file.collectionID,
})),
};
await HTTPService.post(
`${ENDPOINT}/files/trash`,
trashRequest,
null,
{
'X-Auth-Token': token,
}
);
}
} catch (e) {
logError(e, 'trash file failed');
@ -192,16 +191,17 @@ export const deleteFromTrash = async (filesToDelete: number[]) => {
if (!token) {
return;
}
let deleteBatch: number[] = [];
for (const fileID of filesToDelete) {
deleteBatch.push(fileID);
if (deleteBatch.length >= MAX_TRASH_BATCH_SIZE) {
await deleteBatchFromTrash(token, deleteBatch);
deleteBatch = [];
}
}
if (deleteBatch.length > 0) {
await deleteBatchFromTrash(token, deleteBatch);
const batchedFilesToDelete = batch(filesToDelete, REQUEST_BATCH_SIZE);
for (const batch of batchedFilesToDelete) {
await HTTPService.post(
`${ENDPOINT}/trash/delete`,
{ fileIDs: batch },
null,
{
'X-Auth-Token': token,
}
);
}
} catch (e) {
logError(e, 'deleteFromTrash failed');
@ -209,22 +209,6 @@ export const deleteFromTrash = async (filesToDelete: number[]) => {
}
};
const deleteBatchFromTrash = async (token: string, deleteBatch: number[]) => {
try {
await HTTPService.post(
`${ENDPOINT}/trash/delete`,
{ fileIDs: deleteBatch },
null,
{
'X-Auth-Token': token,
}
);
} catch (e) {
logError(e, 'deleteBatchFromTrash failed');
throw e;
}
};
export const updateFileMagicMetadata = async (files: EnteFile[]) => {
const token = getToken();
if (!token) {
@ -303,14 +287,3 @@ export const updateFilePublicMagicMetadata = async (files: EnteFile[]) => {
})
);
};
async function trashFilesFromServer(trashBatch: TrashRequest, token: any) {
try {
await HTTPService.post(`${ENDPOINT}/files/trash`, trashBatch, null, {
'X-Auth-Token': token,
});
} catch (e) {
logError(e, 'trash files from server failed');
throw e;
}
}

View file

@ -125,3 +125,11 @@ function isPromise(p: any) {
export function isClipboardItemPresent() {
return typeof ClipboardItem !== 'undefined';
}
export function batch<T>(arr: T[], batchSize: number): T[][] {
const batches: T[][] = [];
for (let i = 0; i < arr.length; i += batchSize) {
batches.push(arr.slice(i, i + batchSize));
}
return batches;
}