Use IndexedDB without forage to use indexes

This commit is contained in:
Shailesh Pandit 2022-01-04 11:26:16 +05:30
parent fa7453323d
commit 4cb552046a
11 changed files with 409 additions and 103 deletions

View file

@ -45,6 +45,7 @@
"hdbscanjs": "^1.0.12",
"heic-convert": "^1.2.4",
"http-proxy-middleware": "^1.0.5",
"idb": "^7.0.0",
"is-electron": "^2.2.0",
"jpeg-js": "^0.4.3",
"jszip": "3.7.1",

View file

@ -7,13 +7,14 @@ import { PAGES } from 'types';
import * as Comlink from 'comlink';
import { runningInBrowser } from 'utils/common';
import TFJSImage from './TFJSImage';
import { Face, MLDebugResult, MLLibraryData } from 'types/machineLearning';
import { Face, MLDebugResult } from 'types/machineLearning';
import Tree from 'react-d3-tree';
import MLFileDebugView from './MLFileDebugView';
import mlWorkManager from 'services/machineLearning/mlWorkManager';
import { getAllFacesMap, mlLibraryStore } from 'utils/storage/mlStorage';
// import { getAllFacesMap, mlLibraryStore } from 'utils/storage/mlStorage';
import { getAllFacesFromMap } from 'utils/machineLearning';
import { FaceImagesRow, ImageBlobView } from './ImageViews';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
interface TSNEProps {
mlResult: MLDebugResult;
@ -170,10 +171,12 @@ export default function MLDebug() {
};
const onLoadNoiseFaces = async () => {
const mlLibraryData = await mlLibraryStore.getItem<MLLibraryData>(
'data'
);
const allFacesMap = await getAllFacesMap();
// const mlLibraryData = await mlLibraryStore.getItem<MLLibraryData>(
// 'data'
// );
const mlLibraryData = await mlIDbStorage.getLibraryData();
const allFacesMap = await mlIDbStorage.getAllFacesMap();
const allFaces = getAllFacesFromMap(allFacesMap);
const noiseFaces = mlLibraryData?.faceClusteringResults?.noise

View file

@ -94,7 +94,7 @@ export class LocalMLSyncContext implements MLSyncContext {
public faceEmbeddingService: FaceEmbeddingService;
public faceClusteringService: ClusteringService;
public localFiles: Promise<File[]>;
public localFilesMap: Map<number, File>;
public outOfSyncFiles: File[];
public syncedFiles: File[];
public syncedFaces: Face[];
@ -167,7 +167,7 @@ export class LocalMLSyncContext implements MLSyncContext {
// await this.faceDetectionService.dispose();
// await this.faceEmbeddingService.dispose();
this.localFiles = undefined;
this.localFilesMap = undefined;
await this.syncQueue.onIdle();
this.syncQueue.removeAllListeners();
for (const enteComlinkWorker of this.enteComlinkWorkers) {

View file

@ -9,7 +9,6 @@ import '@tensorflow/tfjs-backend-cpu';
import {
Face,
MlFileData,
MLLibraryData,
MLSyncConfig,
MLSyncContext,
MLSyncFileContext,
@ -18,16 +17,10 @@ import {
} from 'types/machineLearning';
import { toTSNE } from 'utils/machineLearning/visualization';
import {
getIndexVersion,
incrementIndexVersion,
mlFilesStore,
mlPeopleStore,
mlLibraryStore,
newMlData,
setIndexVersion,
getAllFacesMap,
} from 'utils/storage/mlStorage';
// import {
// incrementIndexVersion,
// mlFilesStore
// } from 'utils/storage/mlStorage';
import {
findFirstIfSorted,
getAllFacesFromMap,
@ -39,6 +32,7 @@ import {
isDifferentOrOld,
} from 'utils/machineLearning';
import { MLFactory } from './machineLearningFactory';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
class MachineLearningService {
private initialized = false;
@ -77,7 +71,7 @@ class MachineLearningService {
true
);
await this.syncRemovedFiles(syncContext);
await this.syncLocalFiles(syncContext);
await this.getOutOfSyncFiles(syncContext);
@ -112,47 +106,105 @@ class MachineLearningService {
return mlSyncResult;
}
private async getMLFileVersion(file: File) {
const mlFileData: MlFileData = await mlFilesStore.getItem(
file.id.toString()
);
return mlFileData && mlFileData.mlVersion;
private newMlData(fileId: number) {
return {
fileId,
mlVersion: 0,
errorCount: 0,
} as MlFileData;
}
private async getLocalFiles(syncContext: MLSyncContext) {
if (!syncContext.localFiles) {
syncContext.localFiles = getLocalFiles();
private async getLocalFilesMap(syncContext: MLSyncContext) {
if (!syncContext.localFilesMap) {
const localFiles = await getLocalFiles();
syncContext.localFilesMap = new Map<number, File>();
localFiles.forEach((f) => syncContext.localFilesMap.set(f.id, f));
}
return syncContext.localFiles;
return syncContext.localFilesMap;
}
private async syncLocalFiles(syncContext: MLSyncContext) {
console.time('syncLocalFiles');
const localFilesMap = await this.getLocalFilesMap(syncContext);
const db = await mlIDbStorage.db;
const tx = db.transaction('files', 'readwrite');
const mlFileIdsArr = await mlIDbStorage.getAllFileIdsForUpdate(tx);
const mlFileIds = new Set<number>();
mlFileIdsArr.forEach((mlFileId) => mlFileIds.add(mlFileId));
const newFileIds: Array<number> = [];
for (const localFileId of localFilesMap.keys()) {
if (!mlFileIds.has(localFileId)) {
newFileIds.push(localFileId);
}
}
if (newFileIds.length > 0) {
console.log('newFiles: ', newFileIds.length);
const newFiles = newFileIds.map((fileId) => this.newMlData(fileId));
await mlIDbStorage.putAllFiles(newFiles, tx);
}
const removedFileIds: Array<number> = [];
for (const mlFileId of mlFileIds) {
if (!localFilesMap.has(mlFileId)) {
removedFileIds.push(mlFileId);
}
}
if (removedFileIds.length > 0) {
console.log('removedFiles: ', removedFileIds.length);
await mlIDbStorage.removeAllFiles(removedFileIds, tx);
}
await tx.done;
console.timeEnd('syncLocalFiles');
}
// TODO: not required if ml data is stored as field inside ente file object
// it removes ml data for files in trash, they will be resynced if restored
private async syncRemovedFiles(syncContext: MLSyncContext) {
const localFiles = await this.getLocalFiles(syncContext);
const localFileIdMap = new Map<number, boolean>();
localFiles.forEach((f) => localFileIdMap.set(f.id, true));
// private async syncRemovedFiles(syncContext: MLSyncContext) {
// const db = await mlIDbStorage.db;
// const localFileIdMap = await this.getLocalFilesMap(syncContext);
const removedFileIds: Array<string> = [];
await mlFilesStore.iterate((file, idStr) => {
if (!localFileIdMap.has(parseInt(idStr))) {
removedFileIds.push(idStr);
}
});
// const removedFileIds: Array<string> = [];
// await mlFilesStore.iterate((file, idStr) => {
// if (!localFileIdMap.has(parseInt(idStr))) {
// removedFileIds.push(idStr);
// }
// });
if (removedFileIds.length < 1) {
return;
}
// if (removedFileIds.length < 1) {
// return;
// }
removedFileIds.forEach((fileId) => mlFilesStore.removeItem(fileId));
console.log('Removed local file ids: ', removedFileIds);
// removedFileIds.forEach((fileId) => mlFilesStore.removeItem(fileId));
// console.log('Removed local file ids: ', removedFileIds);
await incrementIndexVersion('files');
// await incrementIndexVersion('files');
// }
private async getOutOfSyncFiles(syncContext: MLSyncContext) {
console.time('getOutOfSyncFiles');
const fileIds = await mlIDbStorage.getFileIds(
syncContext.config.batchSize,
syncContext.config.mlVersion,
2
);
console.log('fileIds: ', fileIds);
const localFilesMap = await this.getLocalFilesMap(syncContext);
syncContext.outOfSyncFiles = fileIds.map((fileId) =>
localFilesMap.get(fileId)
);
console.timeEnd('getOutOfSyncFiles');
}
// TODO: optimize, use indexdb indexes, move facecrops to cache to reduce io
private async getUniqueOutOfSyncFiles(
private async getUniqueOutOfSyncFilesNoIdx(
syncContext: MLSyncContext,
files: File[]
) {
@ -160,7 +212,7 @@ class MachineLearningService {
const mlVersion = syncContext.config.mlVersion;
const uniqueFiles: Map<number, File> = new Map<number, File>();
for (let i = 0; uniqueFiles.size < limit && i < files.length; i++) {
const mlFileData = await this.getMLFileData(files[i].id.toString());
const mlFileData = await this.getMLFileData(files[i].id);
const mlFileVersion = mlFileData?.mlVersion || 0;
if (
!uniqueFiles.has(files[i].id) &&
@ -175,15 +227,15 @@ class MachineLearningService {
return [...uniqueFiles.values()];
}
private async getOutOfSyncFiles(syncContext: MLSyncContext) {
const existingFiles = await this.getLocalFiles(syncContext);
private async getOutOfSyncFilesNoIdx(syncContext: MLSyncContext) {
const existingFilesMap = await this.getLocalFilesMap(syncContext);
// existingFiles.sort(
// (a, b) => b.metadata.creationTime - a.metadata.creationTime
// );
console.time('getUniqueOutOfSyncFiles');
syncContext.outOfSyncFiles = await this.getUniqueOutOfSyncFiles(
syncContext.outOfSyncFiles = await this.getUniqueOutOfSyncFilesNoIdx(
syncContext,
existingFiles
[...existingFilesMap.values()]
);
console.timeEnd('getUniqueOutOfSyncFiles');
console.log(
@ -225,7 +277,7 @@ class MachineLearningService {
await syncContext.syncQueue.onIdle();
console.log('allFaces: ', syncContext.syncedFaces);
await incrementIndexVersion('files');
await mlIDbStorage.incrementIndexVersion('files');
// await this.disposeMLModels();
}
@ -263,11 +315,9 @@ class MachineLearningService {
localFile?: globalThis.File
) {
const fileContext: MLSyncFileContext = { enteFile, localFile };
fileContext.oldMLFileData = await this.getMLFileData(
enteFile.id.toString()
);
fileContext.oldMLFileData = await this.getMLFileData(enteFile.id);
if (!fileContext.oldMLFileData) {
fileContext.newMLFileData = newMlData(syncContext, enteFile);
fileContext.newMLFileData = this.newMlData(enteFile.id);
} else if (
fileContext.oldMLFileData?.mlVersion ===
syncContext.config.mlVersion &&
@ -276,6 +326,7 @@ class MachineLearningService {
) {
return fileContext.oldMLFileData;
} else {
// TODO: let rest of sync populate new file data correctly
fileContext.newMLFileData = { ...fileContext.oldMLFileData };
fileContext.newMLFileData.imageSource =
syncContext.config.imageSource;
@ -367,6 +418,10 @@ class MachineLearningService {
fileContext.oldMLFileData?.imageSource !==
syncContext.config.imageSource
) {
fileContext.newMLFileData.detectionMethod =
syncContext.faceDetectionService.method;
fileContext.newMLFileData.imageSource =
syncContext.config.imageSource;
fileContext.newDetection = true;
await this.getImageBitmap(syncContext, fileContext);
const detectedFaces =
@ -400,6 +455,8 @@ class MachineLearningService {
) {
return;
}
fileContext.newMLFileData.faceCropMethod =
syncContext.faceCropService.method;
for (const face of fileContext.filtertedFaces) {
face.faceCrop = await syncContext.faceCropService.getFaceCrop(
@ -421,6 +478,8 @@ class MachineLearningService {
syncContext.faceAlignmentService.method
)
) {
fileContext.newMLFileData.alignmentMethod =
syncContext.faceAlignmentService.method;
fileContext.newAlignment = true;
fileContext.alignedFaces =
syncContext.faceAlignmentService.getAlignedFaces(
@ -447,6 +506,8 @@ class MachineLearningService {
syncContext.faceEmbeddingService.method
)
) {
fileContext.newMLFileData.embeddingMethod =
syncContext.faceEmbeddingService.method;
// TODO: when not storing face crops image will be needed to extract faces
// fileContext.imageBitmap ||
// (await this.getImageBitmap(syncContext, fileContext));
@ -495,15 +556,17 @@ class MachineLearningService {
// console.log('13 TF Memory stats: ', tf.memory());
}
private async getMLFileData(fileId: string) {
return mlFilesStore.getItem<MlFileData>(fileId);
private async getMLFileData(fileId: number) {
// return mlFilesStore.getItem<MlFileData>(fileId);
return mlIDbStorage.getFile(fileId);
}
private async persistMLFileData(
syncContext: MLSyncContext,
mlFileData: MlFileData
) {
return mlFilesStore.setItem(mlFileData.fileId.toString(), mlFileData);
// return mlFilesStore.setItem(mlFileData.fileId.toString(), mlFileData);
mlIDbStorage.putFile(mlFileData);
}
private async persistMLFileSyncError(
@ -512,19 +575,14 @@ class MachineLearningService {
e: Error
) {
try {
const oldMlFileData = await this.getMLFileData(
enteFile.id.toString()
);
const oldMlFileData = await this.getMLFileData(enteFile.id);
let mlFileData = oldMlFileData;
if (!mlFileData) {
mlFileData = newMlData(syncContext, enteFile);
mlFileData = this.newMlData(enteFile.id);
}
mlFileData.errorCount = (mlFileData.errorCount || 0) + 1;
mlFileData.lastErrorMessage = e.message;
return mlFilesStore.setItem(
mlFileData.fileId.toString(),
mlFileData
);
return this.persistMLFileData(syncContext, mlFileData);
} catch (e) {
// TODO: logError or stop sync job after most of the requests are failed
console.error('Error while storing ml sync error', e);
@ -532,16 +590,15 @@ class MachineLearningService {
}
private async getMLLibraryData(syncContext: MLSyncContext) {
syncContext.mlLibraryData = await mlLibraryStore.getItem<MLLibraryData>(
'data'
);
syncContext.mlLibraryData = await mlIDbStorage.getLibraryData();
if (!syncContext.mlLibraryData) {
syncContext.mlLibraryData = {};
}
}
private async persistMLLibraryData(syncContext: MLSyncContext) {
return mlLibraryStore.setItem('data', syncContext.mlLibraryData);
// return mlLibraryStore.setItem('data', syncContext.mlLibraryData);
return mlIDbStorage.putLibraryData(syncContext.mlLibraryData);
}
public async syncIndex(syncContext: MLSyncContext) {
@ -554,9 +611,9 @@ class MachineLearningService {
}
private async syncPeopleIndex(syncContext: MLSyncContext) {
const filesVersion = await getIndexVersion('files');
const filesVersion = await mlIDbStorage.getIndexVersion('files');
if (
filesVersion <= (await getIndexVersion('people')) &&
filesVersion <= (await mlIDbStorage.getIndexVersion('people')) &&
!isDifferentOrOld(
syncContext.mlLibraryData?.faceClusteringMethod,
syncContext.faceClusteringService.method
@ -577,7 +634,7 @@ class MachineLearningService {
await this.runFaceClustering(syncContext, allFaces);
await this.syncPeopleFromClusters(syncContext, allFacesMap, allFaces);
await setIndexVersion('people', filesVersion);
await mlIDbStorage.setIndexVersion('people', filesVersion);
}
private async getAllSyncedFacesMap(syncContext: MLSyncContext) {
@ -585,7 +642,7 @@ class MachineLearningService {
return syncContext.allSyncedFacesMap;
}
syncContext.allSyncedFacesMap = await getAllFacesMap();
syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap();
return syncContext.allSyncedFacesMap;
}
@ -639,7 +696,7 @@ class MachineLearningService {
return;
}
await mlPeopleStore.clear();
await mlIDbStorage.clearAllPeople();
for (const [index, cluster] of clusters.entries()) {
const faces = cluster.map((f) => allFaces[f]).filter((f) => f);
@ -659,7 +716,7 @@ class MachineLearningService {
faceImage,
};
await mlPeopleStore.setItem(person.id.toString(), person);
await mlIDbStorage.putPerson(person);
faces.forEach((face) => {
face.personId = person.id;
@ -667,10 +724,7 @@ class MachineLearningService {
// console.log("Creating person: ", person, faces);
}
await mlFilesStore.iterate((mlFileData: MlFileData, key) => {
mlFileData.faces = allFacesMap.get(mlFileData.fileId);
mlFilesStore.setItem(key, mlFileData);
});
await mlIDbStorage.updateFaces(allFacesMap);
}
private async runTSNE(syncContext: MLSyncContext) {

View file

@ -32,12 +32,14 @@ class MLSyncJob {
console.log('Running ML Sync');
try {
const results = await mlWorker.sync(token);
const mlSyncConfig = await this.mlSyncConfig;
if (results.nOutOfSyncFiles < 1) {
const mlSyncConfig = await this.mlSyncConfig;
this.intervalSec = Math.min(
mlSyncConfig.maxSyncIntervalSec,
this.intervalSec * 2
);
} else {
this.intervalSec = mlSyncConfig.syncIntervalSec;
}
console.log('Ran machine learning sync from worker', results);
} catch (e) {

View file

@ -7,7 +7,7 @@ import { getToken } from 'utils/common/key';
import { getDedicatedMLWorker } from 'utils/machineLearning/worker';
import { logError } from 'utils/sentry';
import { getData, LS_KEYS } from 'utils/storage/localStorage';
import { clearMLStorage } from 'utils/storage/mlStorage';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import MLSyncJob from './mlSyncJob';
class MLWorkManager {
@ -48,7 +48,7 @@ class MLWorkManager {
private async logoutHandler() {
try {
await this.stopSyncJob();
await clearMLStorage();
await mlIDbStorage.clearMLDB();
} catch (e) {
logError(e, 'Failed in ML logout Handler');
}

View file

@ -164,14 +164,14 @@ export interface Person {
export interface MlFileData {
fileId: number;
faces?: Face[];
imageSource: ImageType;
imageSource?: ImageType;
imageDimentions?: Dimensions;
detectionMethod: Versioned<FaceDetectionMethod>;
faceCropMethod: Versioned<FaceCropMethod>;
alignmentMethod: Versioned<FaceAlignmentMethod>;
embeddingMethod: Versioned<FaceEmbeddingMethod>;
detectionMethod?: Versioned<FaceDetectionMethod>;
faceCropMethod?: Versioned<FaceCropMethod>;
alignmentMethod?: Versioned<FaceAlignmentMethod>;
embeddingMethod?: Versioned<FaceEmbeddingMethod>;
mlVersion: number;
errorCount?: number;
errorCount: number;
lastErrorMessage?: string;
}
@ -240,7 +240,7 @@ export interface MLSyncContext {
faceEmbeddingService: FaceEmbeddingService;
faceClusteringService: ClusteringService;
localFiles: Promise<File[]>;
localFilesMap: Map<number, File>;
outOfSyncFiles: File[];
syncedFiles: File[];
syncedFaces: Face[];

View file

@ -12,12 +12,13 @@ import {
Versioned,
} from 'types/machineLearning';
import { ibExtractFaceImage } from './faceAlign';
import { mlFilesStore, mlPeopleStore } from 'utils/storage/mlStorage';
// import { mlFilesStore, mlPeopleStore } from 'utils/storage/mlStorage';
import { convertForPreview, needsConversionForPreview } from 'utils/file';
import { cached } from 'utils/storage/cache';
import { imageBitmapToBlob } from 'utils/image';
import { NormalizedFace } from '@tensorflow-models/blazeface';
import PQueue from 'p-queue';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
export function f32Average(descriptors: Float32Array[]) {
if (descriptors.length < 1) {
@ -300,9 +301,7 @@ export async function getLocalFileImageBitmap(localFile: globalThis.File) {
export async function getPeopleList(file: File): Promise<Array<Person>> {
console.time('getPeopleList:mlFilesStore:getItem');
const mlFileData: MlFileData = await mlFilesStore.getItem(
file.id.toString()
);
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
console.timeEnd('getPeopleList:mlFilesStore:getItem');
if (!mlFileData?.faces || mlFileData.faces.length < 1) {
return [];
@ -317,7 +316,7 @@ export async function getPeopleList(file: File): Promise<Array<Person>> {
// console.log("peopleIds: ", peopleIds);
console.time('getPeopleList:mlPeopleStore:getItems');
const peoplePromises = peopleIds.map(
(p) => mlPeopleStore.getItem(p.toString()) as Promise<Person>
(p) => mlIDbStorage.getPerson(p) as Promise<Person>
);
const peopleList = await Promise.all(peoplePromises);
console.timeEnd('getPeopleList:mlPeopleStore:getItems');
@ -329,9 +328,7 @@ export async function getPeopleList(file: File): Promise<Array<Person>> {
export async function getUnidentifiedFaces(
file: File
): Promise<Array<FaceImageBlob>> {
const mlFileData: MlFileData = await mlFilesStore.getItem(
file.id.toString()
);
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
const faceImages = mlFileData?.faces
?.filter((f) => f.personId === null || f.personId === undefined)
@ -342,10 +339,10 @@ export async function getUnidentifiedFaces(
}
export async function getAllPeople() {
const people: Array<Person> = [];
await mlPeopleStore.iterate<Person, void>((person) => {
people.push(person);
});
const people: Array<Person> = await mlIDbStorage.getAllPeople();
// await mlPeopleStore.iterate<Person, void>((person) => {
// people.push(person);
// });
return people.sort((p1, p2) => p2.files.length - p1.files.length);
}

View file

@ -0,0 +1,243 @@
import {
openDB,
deleteDB,
DBSchema,
IDBPDatabase,
IDBPTransaction,
StoreNames,
} from 'idb';
import { Face, MlFileData, MLLibraryData, Person } from 'types/machineLearning';
import { runningInBrowser } from 'utils/common';
import { mlFilesStore } from './mlStorage';
interface MLDb extends DBSchema {
files: {
key: number;
value: MlFileData;
indexes: { mlVersion: [number, number] };
};
people: {
key: number;
value: Person;
};
versions: {
key: string;
value: number;
};
library: {
key: string;
value: MLLibraryData;
};
}
class MLIDbStorage {
public db: Promise<IDBPDatabase<MLDb>>;
constructor() {
if (!runningInBrowser()) {
return;
}
this.db = openDB<MLDb>('mldata', 1, {
upgrade(db) {
const filesStore = db.createObjectStore('files', {
keyPath: 'fileId',
});
filesStore.createIndex('mlVersion', [
'mlVersion',
'errorCount',
]);
db.createObjectStore('people', {
keyPath: 'id',
});
db.createObjectStore('versions');
db.createObjectStore('library');
// TODO: for migrating existing data, to be removed
mlFilesStore.iterate((mlFileData: MlFileData) => {
mlFileData.errorCount = mlFileData.errorCount || 0;
db.put('files', mlFileData);
});
db.put('versions', 1, 'files');
},
});
}
public async clearMLDB() {
const db = await this.db;
db.close();
return deleteDB('mldata');
}
public async getAllFileIds1() {
const db = await this.db;
return db.getAllKeys('files');
}
public async putAllFiles1(mlFiles: Array<MlFileData>) {
const db = await this.db;
const tx = db.transaction('files', 'readwrite');
await Promise.all(mlFiles.map((mlFile) => tx.store.put(mlFile)));
await tx.done;
}
public async removeAllFiles1(fileIds: Array<number>) {
const db = await this.db;
const tx = db.transaction('files', 'readwrite');
await Promise.all(fileIds.map((fileId) => tx.store.delete(fileId)));
await tx.done;
}
public async newTransaction<
Name extends StoreNames<MLDb>,
Mode extends IDBTransactionMode = 'readonly'
>(storeNames: Name, mode?: Mode) {
const db = await this.db;
return db.transaction(storeNames, mode);
}
public async commit(tx: IDBPTransaction<MLDb>) {
return tx.done;
}
public async getAllFileIdsForUpdate(
tx: IDBPTransaction<MLDb, ['files'], 'readwrite'>
) {
return tx.store.getAllKeys();
}
public async getFileIds(
count: number,
limitMlVersion: number,
maxErrorCount: number
) {
const db = await this.db;
const tx = db.transaction('files', 'readonly');
const index = tx.store.index('mlVersion');
let cursor = await index.openKeyCursor(
IDBKeyRange.upperBound([limitMlVersion], true)
);
const fileIds: number[] = [];
while (cursor && fileIds.length < count) {
if (
cursor.key[0] < limitMlVersion &&
cursor.key[1] <= maxErrorCount
) {
fileIds.push(cursor.primaryKey);
}
cursor = await cursor.continue();
}
await tx.done;
return fileIds;
}
public async getFile(fileId: number) {
const db = await this.db;
return db.get('files', fileId);
}
public async putFile(mlFile: MlFileData) {
const db = await this.db;
return db.put('files', mlFile);
}
public async putAllFiles(
mlFiles: Array<MlFileData>,
tx: IDBPTransaction<MLDb, ['files'], 'readwrite'>
) {
await Promise.all(mlFiles.map((mlFile) => tx.store.put(mlFile)));
}
public async removeAllFiles(
fileIds: Array<number>,
tx: IDBPTransaction<MLDb, ['files'], 'readwrite'>
) {
await Promise.all(fileIds.map((fileId) => tx.store.delete(fileId)));
}
public async getAllFacesMap() {
console.time('getAllFacesMap');
const db = await this.db;
const allFiles = await db.getAll('files');
const allFacesMap = new Map<number, Array<Face>>();
allFiles.forEach((mlFileData) =>
allFacesMap.set(mlFileData.fileId, mlFileData.faces)
);
console.timeEnd('getAllFacesMap');
return allFacesMap;
}
public async updateFaces(allFacesMap: Map<number, Face[]>) {
console.time('updateFaces');
const db = await this.db;
const tx = db.transaction('files', 'readwrite');
let cursor = await tx.store.openCursor();
while (cursor) {
const mlFileData = { ...cursor.value };
mlFileData.faces = allFacesMap.get(cursor.key);
cursor.update(mlFileData);
cursor = await cursor.continue();
}
await tx.done;
console.timeEnd('updateFaces');
}
public async getPerson(id: number) {
const db = await this.db;
return db.get('people', id);
}
public async getAllPeople() {
const db = await this.db;
return db.getAll('people');
}
public async putPerson(person: Person) {
const db = await this.db;
return db.put('people', person);
}
public async clearAllPeople() {
const db = await this.db;
return db.clear('people');
}
public async getIndexVersion(index: string) {
const db = await this.db;
return db.get('versions', index);
}
public async incrementIndexVersion(index: string) {
const db = await this.db;
const tx = db.transaction('versions', 'readwrite');
let version = await tx.store.get(index);
version = (version || 0) + 1;
tx.store.put(version, index);
await tx.done;
return version;
}
public async setIndexVersion(index: string, version: number) {
const db = await this.db;
return db.put('versions', version, index);
}
public async getLibraryData() {
const db = await this.db;
return db.get('library', 'data');
}
public async putLibraryData(data: MLLibraryData) {
const db = await this.db;
return db.put('library', data, 'data');
}
}
export default new MLIDbStorage();

View file

@ -81,6 +81,7 @@ export function newMlData(
faceCropMethod: syncContext.faceCropService.method,
alignmentMethod: syncContext.faceAlignmentService.method,
embeddingMethod: syncContext.faceEmbeddingService.method,
errorCount: 0,
mlVersion: 0,
};
}

View file

@ -4248,6 +4248,11 @@ idb@^6.0.0:
resolved "https://registry.npmjs.org/idb/-/idb-6.1.3.tgz"
integrity sha512-oIRDpVcs5KXpI1hRnTJUwkY63RB/7iqu9nSNuzXN8TLHjs7oO20IoPFbBTsqxIL5IjzIUDi+FXlVcK4zm26J8A==
idb@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/idb/-/idb-7.0.0.tgz#f349b418c128f625961147a7d6b0e4b526fd34ed"
integrity sha512-jSx0WOY9Nj+QzP6wX5e7g64jqh8ExtDs/IAuOrOEZCD/h6+0HqyrKsDMfdJc0hqhSvh0LsrwqrkDn+EtjjzSRA==
ieee754@^1.1.4, ieee754@^1.2.1:
version "1.2.1"
resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz"