ente/src/services/deduplicationService.ts

146 lines
4.1 KiB
TypeScript
Raw Normal View History

2022-03-22 13:01:59 +00:00
import { EnteFile } from 'types/file';
import { getEndpoint } from 'utils/common/apiUtil';
import { getToken } from 'utils/common/key';
import { logError } from 'utils/sentry';
2022-04-17 09:30:26 +00:00
import { getLocalCollections } from './collectionService';
2022-03-22 13:01:59 +00:00
import { getLocalFiles } from './fileService';
import HTTPService from './HTTPService';
const ENDPOINT = getEndpoint();
interface DuplicatesResponse {
duplicates: Array<{
fileIDs: number[];
size: number;
}>;
}
2022-04-17 09:30:26 +00:00
const DuplicateItemSortingOrderDesc = new Map(
Object.entries({
'icloud library': 0,
icloudlibrary: 1,
recents: 2,
'recently added': 3,
'my photo stream': 4,
})
);
2022-03-22 13:01:59 +00:00
interface DuplicateFiles {
files: EnteFile[];
size: number;
}
2022-03-26 14:38:52 +00:00
export async function getDuplicateFiles() {
try {
const dupes = await fetchDuplicateFileIDs();
const localFiles = await getLocalFiles();
const fileMap = new Map<number, EnteFile>();
for (const file of localFiles) {
fileMap.set(file.id, file);
2022-03-22 13:01:59 +00:00
}
const result: DuplicateFiles[] = [];
2022-03-26 14:38:52 +00:00
2022-03-22 13:01:59 +00:00
for (const dupe of dupes) {
2022-04-17 09:30:26 +00:00
let files: EnteFile[] = [];
2022-03-26 14:38:52 +00:00
for (const fileID of dupe.fileIDs) {
if (fileMap.has(fileID)) {
files.push(fileMap.get(fileID));
2022-03-22 13:01:59 +00:00
}
}
2022-04-17 09:30:26 +00:00
files = await sortDuplicateFiles(files);
2022-03-22 13:01:59 +00:00
if (files.length > 1) {
result.push({
files,
size: dupe.size,
});
}
}
return result;
2022-03-26 14:38:52 +00:00
} catch (e) {
logError(e, 'failed to get duplicate files');
2022-03-22 13:01:59 +00:00
}
2022-03-26 14:38:52 +00:00
}
2022-03-22 13:01:59 +00:00
2022-04-03 07:33:42 +00:00
export function clubDuplicatesByTime(dupes: DuplicateFiles[]) {
2022-03-26 14:38:52 +00:00
const result: DuplicateFiles[] = [];
for (const dupe of dupes) {
let files: EnteFile[] = [];
const creationTimeCounter = new Map<number, number>();
let mostFreqCreationTime = 0;
let mostFreqCreationTimeCount = 0;
for (const file of dupe.files) {
const creationTime = file.metadata.creationTime;
if (creationTimeCounter.has(creationTime)) {
creationTimeCounter.set(
creationTime,
creationTimeCounter.get(creationTime) + 1
);
} else {
creationTimeCounter.set(creationTime, 1);
}
if (
creationTimeCounter.get(creationTime) >
mostFreqCreationTimeCount
) {
mostFreqCreationTime = creationTime;
mostFreqCreationTimeCount =
creationTimeCounter.get(creationTime);
}
files.push(file);
}
files = files.filter((file) => {
return file.metadata.creationTime === mostFreqCreationTime;
});
if (files.length > 1) {
result.push({
files,
size: dupe.size,
});
2022-03-22 13:01:59 +00:00
}
}
2022-03-26 14:38:52 +00:00
return result;
2022-03-22 13:01:59 +00:00
}
2022-03-26 14:38:52 +00:00
async function fetchDuplicateFileIDs() {
try {
const response = await HTTPService.get(
`${ENDPOINT}/files/duplicates`,
null,
{
'X-Auth-Token': getToken(),
}
);
return (response.data as DuplicatesResponse).duplicates;
} catch (e) {
logError(e, 'failed to fetch duplicate file IDs');
}
}
2022-04-17 09:30:26 +00:00
async function sortDuplicateFiles(files: EnteFile[]) {
const localCollections = await getLocalCollections();
const collectionMap = new Map<number, string>();
for (const collection of localCollections) {
collectionMap.set(collection.id, collection.name);
}
return files.sort((firstFile, secondFile) => {
const firstCollectionName = collectionMap
.get(firstFile.collectionID)
.toLocaleLowerCase();
const secondCollectionName = collectionMap
.get(secondFile.collectionID)
.toLocaleLowerCase();
return (
DuplicateItemSortingOrderDesc[secondCollectionName] -
DuplicateItemSortingOrderDesc[firstCollectionName]
);
});
}