Merge branch 'main' into mobile_face

This commit is contained in:
Neeraj Gupta 2024-04-11 11:10:54 +05:30
commit b18734f63e
91 changed files with 2455 additions and 739 deletions

BIN
desktop/build/icon.icns Normal file

Binary file not shown.

View file

@ -1,5 +1,9 @@
# Dependencies
* [Electron](#electron)
* [Dev dependencies](#dev)
* [Functionality](#functionality)
## Electron
[Electron](https://www.electronjs.org) is a cross-platform (Linux, Windows,
@ -73,7 +77,7 @@ Electron process. This allows us to directly use the output produced by
## Dev
See [web/docs/dependencies#DX](../../web/docs/dependencies.md#dev) for the
See [web/docs/dependencies#dev](../../web/docs/dependencies.md#dev) for the
general development experience related dependencies like TypeScript etc, which
are similar to that in the web code.
@ -88,7 +92,7 @@ Some extra ones specific to the code here are:
## Functionality
### Conversion
### Format conversion
The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
(platform specific) static binary of ffmpeg with our app, we use
@ -104,20 +108,23 @@ resources (`build`) folder. This is used for thumbnail generation on Linux.
On macOS, we use the `sips` CLI tool for conversion, but that is already
available on the host machine, and is not bundled with our app.
### AI/ML
[onnxruntime-node](https://github.com/Microsoft/onnxruntime) is used as the
AI/ML runtime. It powers both natural language searches (using CLIP) and face
detection (using YOLO).
[jpeg-js](https://github.com/jpeg-js/jpeg-js#readme) is used for decoding
JPEG data into raw RGB bytes before passing it to ONNX.
html-entities is used by the bundled clip-bpe-ts tokenizer for CLIP.
### Watch Folders
[chokidar](https://github.com/paulmillr/chokidar) is used as a file system
watcher for the watch folders functionality.
### AI/ML
- [onnxruntime-node](https://github.com/Microsoft/onnxruntime) is used for
natural language searches based on CLIP.
- html-entities is used by the bundled clip-bpe-ts tokenizer.
- [jpeg-js](https://github.com/jpeg-js/jpeg-js#readme) is used for decoding
JPEG data into raw RGB bytes before passing it to ONNX.
## ZIP
### ZIP
[node-stream-zip](https://github.com/antelle/node-stream-zip) is used for
reading of large ZIP files (e.g. during imports of Google Takeout ZIPs).

View file

@ -1,5 +1,15 @@
appId: io.ente.bhari-frame
artifactName: ${productName}-${version}-${arch}.${ext}
files:
- app/**/*
- out
extraFiles:
- from: build
to: resources
win:
target:
- target: nsis
arch: [x64, arm64]
nsis:
deleteAppDataOnUninstall: true
linux:
@ -20,9 +30,3 @@ mac:
category: public.app-category.photography
hardenedRuntime: true
afterSign: electron-builder-notarize
extraFiles:
- from: build
to: resources
files:
- app/**/*
- out

View file

@ -26,9 +26,9 @@ import {
import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
import log, { initLogging } from "./main/log";
import { createApplicationMenu } from "./main/menu";
import { setupAutoUpdater } from "./main/services/app-update";
import { initWatcher } from "./main/services/chokidar";
import { isDev } from "./main/util";
import { setupAutoUpdater } from "./services/appUpdater";
import { initWatcher } from "./services/chokidar";
let appIsQuitting = false;
@ -142,9 +142,10 @@ const deleteLegacyDiskCacheDirIfExists = async () => {
};
const attachEventHandlers = (mainWindow: BrowserWindow) => {
// Let ipcRenderer know when mainWindow is in the foreground.
// Let ipcRenderer know when mainWindow is in the foreground so that it can
// in turn inform the renderer process.
mainWindow.on("focus", () =>
mainWindow.webContents.send("app-in-foreground"),
mainWindow.webContents.send("mainWindowFocus"),
);
};

View file

@ -1,8 +1,8 @@
import { dialog } from "electron/main";
import path from "node:path";
import { getDirFilePaths, getElectronFile } from "../services/fs";
import { getElectronFilesFromGoogleZip } from "../services/upload";
import type { ElectronFile } from "../types/ipc";
import { getDirFilePaths, getElectronFile } from "./services/fs";
import { getElectronFilesFromGoogleZip } from "./services/upload";
export const selectDirectory = async () => {
const result = await dialog.showOpenDialog({

View file

@ -1,12 +1,12 @@
import { app, BrowserWindow, nativeImage, Tray } from "electron";
import { BrowserWindow, Tray, app, nativeImage, shell } from "electron";
import { existsSync } from "node:fs";
import path from "node:path";
import { isAppQuitting, rendererURL } from "../main";
import autoLauncher from "../services/autoLauncher";
import { getHideDockIconPreference } from "../services/userPreference";
import { isPlatform } from "../utils/common/platform";
import log from "./log";
import { createTrayContextMenu } from "./menu";
import { isPlatform } from "./platform";
import autoLauncher from "./services/autoLauncher";
import { getHideDockIconPreference } from "./services/userPreference";
import { isDev } from "./util";
/**
@ -77,16 +77,24 @@ export const createWindow = async () => {
};
export const setupTrayItem = (mainWindow: BrowserWindow) => {
const iconName = isPlatform("mac")
? "taskbar-icon-Template.png"
: "taskbar-icon.png";
// There are a total of 6 files corresponding to this tray icon.
//
// On macOS, use template images (filename needs to end with "Template.ext")
// https://www.electronjs.org/docs/latest/api/native-image#template-image-macos
//
// And for each (template or otherwise), there are 3 "retina" variants
// https://www.electronjs.org/docs/latest/api/native-image#high-resolution-image
const iconName =
process.platform == "darwin"
? "taskbar-icon-Template.png"
: "taskbar-icon.png";
const trayImgPath = path.join(
isDev ? "build" : process.resourcesPath,
iconName,
);
const trayIcon = nativeImage.createFromPath(trayImgPath);
const tray = new Tray(trayIcon);
tray.setToolTip("ente");
tray.setToolTip("Ente Photos");
tray.setContextMenu(createTrayContextMenu(mainWindow));
};
@ -101,7 +109,7 @@ export function handleDownloads(mainWindow: BrowserWindow) {
export function handleExternalLinks(mainWindow: BrowserWindow) {
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
if (!url.startsWith(rendererURL)) {
require("electron").shell.openExternal(url);
shell.openExternal(url);
return { action: "deny" };
} else {
return { action: "allow" };

View file

@ -10,40 +10,6 @@
import type { FSWatcher } from "chokidar";
import { ipcMain } from "electron/main";
import {
appVersion,
muteUpdateNotification,
skipAppUpdate,
updateAndRestart,
} from "../services/appUpdater";
import {
clipImageEmbedding,
clipTextEmbedding,
} from "../services/clip-service";
import { runFFmpegCmd } from "../services/ffmpeg";
import { getDirFiles } from "../services/fs";
import {
convertToJPEG,
generateImageThumbnail,
} from "../services/imageProcessor";
import {
clearElectronStore,
getEncryptionKey,
setEncryptionKey,
} from "../services/store";
import {
getElectronFilesFromGoogleZip,
getPendingUploads,
setToUploadCollection,
setToUploadFiles,
} from "../services/upload";
import {
addWatchMapping,
getWatchMappings,
removeWatchMapping,
updateWatchMappingIgnoredFiles,
updateWatchMappingSyncedFiles,
} from "../services/watch";
import type { ElectronFile, FILE_PATH_TYPE, WatchMapping } from "../types/ipc";
import {
selectDirectory,
@ -64,6 +30,37 @@ import {
saveStreamToDisk,
} from "./fs";
import { logToDisk } from "./log";
import {
appVersion,
skipAppUpdate,
updateAndRestart,
updateOnNextRestart,
} from "./services/app-update";
import { clipImageEmbedding, clipTextEmbedding } from "./services/clip";
import { runFFmpegCmd } from "./services/ffmpeg";
import { getDirFiles } from "./services/fs";
import {
convertToJPEG,
generateImageThumbnail,
} from "./services/imageProcessor";
import {
clearStores,
encryptionKey,
saveEncryptionKey,
} from "./services/store";
import {
getElectronFilesFromGoogleZip,
getPendingUploads,
setToUploadCollection,
setToUploadFiles,
} from "./services/upload";
import {
addWatchMapping,
getWatchMappings,
removeWatchMapping,
updateWatchMappingIgnoredFiles,
updateWatchMappingSyncedFiles,
} from "./services/watch";
import { openDirectory, openLogDirectory } from "./util";
/**
@ -98,26 +95,24 @@ export const attachIPCHandlers = () => {
// See [Note: Catching exception during .send/.on]
ipcMain.on("logToDisk", (_, message) => logToDisk(message));
ipcMain.on("clear-electron-store", () => {
clearElectronStore();
});
ipcMain.on("clearStores", () => clearStores());
ipcMain.handle("setEncryptionKey", (_, encryptionKey) =>
setEncryptionKey(encryptionKey),
ipcMain.handle("saveEncryptionKey", (_, encryptionKey) =>
saveEncryptionKey(encryptionKey),
);
ipcMain.handle("getEncryptionKey", () => getEncryptionKey());
ipcMain.handle("encryptionKey", () => encryptionKey());
// - App update
ipcMain.on("update-and-restart", () => updateAndRestart());
ipcMain.on("updateAndRestart", () => updateAndRestart());
ipcMain.on("skip-app-update", (_, version) => skipAppUpdate(version));
ipcMain.on("mute-update-notification", (_, version) =>
muteUpdateNotification(version),
ipcMain.on("updateOnNextRestart", (_, version) =>
updateOnNextRestart(version),
);
ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version));
// - Conversion
ipcMain.handle("convertToJPEG", (_, fileData, filename) =>

View file

@ -19,6 +19,16 @@ export const initLogging = () => {
log.transports.file.format = "[{y}-{m}-{d}T{h}:{i}:{s}{z}] {text}";
log.transports.console.level = false;
// Log unhandled errors and promise rejections.
log.errorHandler.startCatching({
onError: ({ error, errorName }) => {
logError(errorName, error);
// Prevent the default electron-log actions (e.g. showing a dialog)
// from getting triggered.
return false;
},
});
};
/**

View file

@ -6,12 +6,12 @@ import {
shell,
} from "electron";
import { setIsAppQuitting } from "../main";
import { forceCheckForUpdateAndNotify } from "../services/appUpdater";
import autoLauncher from "../services/autoLauncher";
import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/autoLauncher";
import {
getHideDockIconPreference,
setHideDockIconPreference,
} from "../services/userPreference";
} from "./services/userPreference";
import { openLogDirectory } from "./util";
/** Create and return the entries in the app's main menu bar */
@ -26,8 +26,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
const handleCheckForUpdates = () =>
forceCheckForUpdateAndNotify(mainWindow);
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
shell.openExternal(
@ -54,7 +53,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
return Menu.buildFromTemplate([
{
label: "ente",
label: "Ente Photos",
submenu: [
...macOSOnly([
{
@ -156,7 +155,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
{ type: "separator" },
{ label: "Bring All to Front", role: "front" },
{ type: "separator" },
{ label: "Ente", role: "window" },
{ label: "Ente Photos", role: "window" },
]),
],
},

View file

@ -0,0 +1,98 @@
import { compareVersions } from "compare-versions";
import { app, BrowserWindow } from "electron";
import { default as electronLog } from "electron-log";
import { autoUpdater } from "electron-updater";
import { setIsAppQuitting, setIsUpdateAvailable } from "../../main";
import { AppUpdateInfo } from "../../types/ipc";
import log from "../log";
import { userPreferencesStore } from "../stores/user-preferences";
export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
const oneDay = 1 * 24 * 60 * 60 * 1000;
setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay);
checkForUpdatesAndNotify(mainWindow);
};
/**
* Check for app update check ignoring any previously saved skips / mutes.
*/
export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => {
userPreferencesStore.delete("skipAppVersion");
userPreferencesStore.delete("muteUpdateNotificationVersion");
checkForUpdatesAndNotify(mainWindow);
};
const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
const updateCheckResult = await autoUpdater.checkForUpdates();
if (!updateCheckResult) {
log.error("Failed to check for updates");
return;
}
const { version } = updateCheckResult.updateInfo;
log.debug(() => `Update check found version ${version}`);
if (compareVersions(version, app.getVersion()) <= 0) {
log.debug(() => "Skipping update, already at latest version");
return;
}
if (version === userPreferencesStore.get("skipAppVersion")) {
log.info(`User chose to skip version ${version}`);
return;
}
const mutedVersion = userPreferencesStore.get(
"muteUpdateNotificationVersion",
);
if (version === mutedVersion) {
log.info(`User has muted update notifications for version ${version}`);
return;
}
const showUpdateDialog = (updateInfo: AppUpdateInfo) =>
mainWindow.webContents.send("appUpdateAvailable", updateInfo);
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
let timeout: NodeJS.Timeout;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
timeout = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeout);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});
setIsUpdateAvailable(true);
};
/**
* Return the version of the desktop app
*
* The return value is of the form `v1.2.3`.
*/
export const appVersion = () => `v${app.getVersion()}`;
export const updateAndRestart = () => {
log.info("Restarting the app to apply update");
setIsAppQuitting(true);
autoUpdater.quitAndInstall();
};
export const updateOnNextRestart = (version: string) =>
userPreferencesStore.set("muteUpdateNotificationVersion", version);
export const skipAppUpdate = (version: string) =>
userPreferencesStore.set("skipAppVersion", version);

View file

@ -1,5 +1,5 @@
import { AutoLauncherClient } from "../types/main";
import { isPlatform } from "../utils/common/platform";
import { AutoLauncherClient } from "../../types/main";
import { isPlatform } from "../platform";
import linuxAndWinAutoLauncher from "./autoLauncherClients/linuxAndWinAutoLauncher";
import macAutoLauncher from "./autoLauncherClients/macAutoLauncher";

View file

@ -1,6 +1,6 @@
import AutoLaunch from "auto-launch";
import { app } from "electron";
import { AutoLauncherClient } from "../../types/main";
import { AutoLauncherClient } from "../../../types/main";
const LAUNCHED_AS_HIDDEN_FLAG = "hidden";

View file

@ -1,5 +1,5 @@
import { app } from "electron";
import { AutoLauncherClient } from "../../types/main";
import { AutoLauncherClient } from "../../../types/main";
class MacAutoLauncher implements AutoLauncherClient {
async isEnabled() {

View file

@ -1,9 +1,9 @@
import chokidar from "chokidar";
import { BrowserWindow } from "electron";
import path from "path";
import log from "../main/log";
import { getWatchMappings } from "../services/watch";
import log from "../log";
import { getElectronFile } from "./fs";
import { getWatchMappings } from "./watch";
/**
* Convert a file system {@link filePath} that uses the local system specific

View file

@ -11,16 +11,16 @@
*/
import { app, net } from "electron/main";
import { existsSync } from "fs";
import jpeg from "jpeg-js";
import fs from "node:fs/promises";
import path from "node:path";
import { writeStream } from "../main/fs";
import log from "../main/log";
import { CustomErrors } from "../types/ipc";
import Tokenizer from "../utils/clip-bpe-ts/mod";
import { generateTempFilePath } from "../utils/temp";
import * as ort from "onnxruntime-node";
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
import { CustomErrors } from "../../types/ipc";
import { writeStream } from "../fs";
import log from "../log";
import { generateTempFilePath } from "../temp";
import { deleteTempFile } from "./ffmpeg";
const jpeg = require("jpeg-js");
const ort = require("onnxruntime-node");
const textModelName = "clip-text-vit-32-uint8.onnx";
const textModelByteSize = 64173509; // 61.2 MB

View file

@ -1,11 +1,11 @@
import pathToFfmpeg from "ffmpeg-static";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import { writeStream } from "../main/fs";
import log from "../main/log";
import { execAsync } from "../main/util";
import { ElectronFile } from "../types/ipc";
import { generateTempFilePath, getTempDirPath } from "../utils/temp";
import { ElectronFile } from "../../types/ipc";
import { writeStream } from "../fs";
import log from "../log";
import { generateTempFilePath, getTempDirPath } from "../temp";
import { execAsync } from "../util";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG";

View file

@ -2,8 +2,8 @@ import StreamZip from "node-stream-zip";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import log from "../main/log";
import { ElectronFile } from "../types/ipc";
import { ElectronFile } from "../../types/ipc";
import log from "../log";
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;

View file

@ -1,12 +1,12 @@
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "path";
import { writeStream } from "../main/fs";
import log from "../main/log";
import { execAsync, isDev } from "../main/util";
import { CustomErrors, ElectronFile } from "../types/ipc";
import { isPlatform } from "../utils/common/platform";
import { generateTempFilePath } from "../utils/temp";
import { CustomErrors, ElectronFile } from "../../types/ipc";
import { writeStream } from "../fs";
import log from "../log";
import { isPlatform } from "../platform";
import { generateTempFilePath } from "../temp";
import { execAsync, isDev } from "../util";
import { deleteTempFile } from "./ffmpeg";
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";

View file

@ -4,23 +4,22 @@ import { safeStorageStore } from "../stores/safeStorage.store";
import { uploadStatusStore } from "../stores/upload.store";
import { watchStore } from "../stores/watch.store";
export const clearElectronStore = () => {
export const clearStores = () => {
uploadStatusStore.clear();
keysStore.clear();
safeStorageStore.clear();
watchStore.clear();
};
export async function setEncryptionKey(encryptionKey: string) {
export const saveEncryptionKey = async (encryptionKey: string) => {
const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
safeStorageStore.set("encryptionKey", b64EncryptedKey);
}
};
export async function getEncryptionKey(): Promise<string> {
export const encryptionKey = async (): Promise<string | undefined> => {
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
if (b64EncryptedKey) {
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
return await safeStorage.decryptString(keyBuffer);
}
}
if (!b64EncryptedKey) return undefined;
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
return await safeStorage.decryptString(keyBuffer);
};

View file

@ -1,10 +1,9 @@
import StreamZip from "node-stream-zip";
import path from "path";
import { getElectronFile } from "../services/fs";
import { ElectronFile, FILE_PATH_TYPE } from "../../types/ipc";
import { FILE_PATH_KEYS } from "../../types/main";
import { uploadStatusStore } from "../stores/upload.store";
import { ElectronFile, FILE_PATH_TYPE } from "../types/ipc";
import { FILE_PATH_KEYS } from "../types/main";
import { getValidPaths, getZipFileStream } from "./fs";
import { getElectronFile, getValidPaths, getZipFileStream } from "./fs";
export const getPendingUploads = async () => {
const filePaths = getSavedFilePaths(FILE_PATH_TYPE.FILES);

View file

@ -0,0 +1,9 @@
import { userPreferencesStore } from "../stores/user-preferences";
export function getHideDockIconPreference() {
return userPreferencesStore.get("hideDockIcon");
}
export function setHideDockIconPreference(shouldHideDockIcon: boolean) {
userPreferencesStore.set("hideDockIcon", shouldHideDockIcon);
}

View file

@ -1,8 +1,7 @@
import type { FSWatcher } from "chokidar";
import ElectronLog from "electron-log";
import { WatchMapping, WatchStoreType } from "../../types/ipc";
import { watchStore } from "../stores/watch.store";
import { WatchMapping, WatchStoreType } from "../types/ipc";
import { isMappingPresent } from "../utils/watch";
export const addWatchMapping = async (
watcher: FSWatcher,
@ -29,6 +28,13 @@ export const addWatchMapping = async (
setWatchMappings(watchMappings);
};
function isMappingPresent(watchMappings: WatchMapping[], folderPath: string) {
const watchMapping = watchMappings?.find(
(mapping) => mapping.folderPath === folderPath,
);
return !!watchMapping;
}
export const removeWatchMapping = async (
watcher: FSWatcher,
folderPath: string,

View file

@ -1,5 +1,5 @@
import Store, { Schema } from "electron-store";
import type { KeysStoreType } from "../types/main";
import type { KeysStoreType } from "../../types/main";
const keysStoreSchema: Schema<KeysStoreType> = {
AnonymizeUserID: {

View file

@ -1,5 +1,5 @@
import Store, { Schema } from "electron-store";
import type { SafeStorageStoreType } from "../types/main";
import type { SafeStorageStoreType } from "../../types/main";
const safeStorageSchema: Schema<SafeStorageStoreType> = {
encryptionKey: {

View file

@ -1,5 +1,5 @@
import Store, { Schema } from "electron-store";
import type { UploadStoreType } from "../types/main";
import type { UploadStoreType } from "../../types/main";
const uploadStoreSchema: Schema<UploadStoreType> = {
filePaths: {

View file

@ -1,7 +1,12 @@
import Store, { Schema } from "electron-store";
import type { UserPreferencesType } from "../types/main";
const userPreferencesSchema: Schema<UserPreferencesType> = {
interface UserPreferencesSchema {
hideDockIcon: boolean;
skipAppVersion?: string;
muteUpdateNotificationVersion?: string;
}
const userPreferencesSchema: Schema<UserPreferencesSchema> = {
hideDockIcon: {
type: "boolean",
},

View file

@ -1,5 +1,5 @@
import Store, { Schema } from "electron-store";
import { WatchStoreType } from "../types/ipc";
import { WatchStoreType } from "../../types/ipc";
const watchStoreSchema: Schema<WatchStoreType> = {
mappings: {

View file

@ -0,0 +1,9 @@
/**
* Types for [onnxruntime-node](https://onnxruntime.ai/docs/api/js/index.html).
*
* Note: these are not the official types but are based on a temporary
* [workaround](https://github.com/microsoft/onnxruntime/issues/17979).
*/
declare module "onnxruntime-node" {
export * from "onnxruntime-common";
}

View file

@ -52,58 +52,55 @@ import type {
const appVersion = (): Promise<string> => ipcRenderer.invoke("appVersion");
const logToDisk = (message: string): void =>
ipcRenderer.send("logToDisk", message);
const openDirectory = (dirPath: string): Promise<void> =>
ipcRenderer.invoke("openDirectory", dirPath);
const openLogDirectory = (): Promise<void> =>
ipcRenderer.invoke("openLogDirectory");
const logToDisk = (message: string): void =>
ipcRenderer.send("logToDisk", message);
const clearStores = () => ipcRenderer.send("clearStores");
const encryptionKey = (): Promise<string | undefined> =>
ipcRenderer.invoke("encryptionKey");
const saveEncryptionKey = (encryptionKey: string): Promise<void> =>
ipcRenderer.invoke("saveEncryptionKey", encryptionKey);
const onMainWindowFocus = (cb?: () => void) => {
ipcRenderer.removeAllListeners("mainWindowFocus");
if (cb) ipcRenderer.on("mainWindowFocus", cb);
};
// - App update
const onAppUpdateAvailable = (
cb?: ((updateInfo: AppUpdateInfo) => void) | undefined,
) => {
ipcRenderer.removeAllListeners("appUpdateAvailable");
if (cb) {
ipcRenderer.on("appUpdateAvailable", (_, updateInfo: AppUpdateInfo) =>
cb(updateInfo),
);
}
};
const updateAndRestart = () => ipcRenderer.send("updateAndRestart");
const updateOnNextRestart = (version: string) =>
ipcRenderer.send("updateOnNextRestart", version);
const skipAppUpdate = (version: string) => {
ipcRenderer.send("skipAppUpdate", version);
};
const fsExists = (path: string): Promise<boolean> =>
ipcRenderer.invoke("fsExists", path);
// - AUDIT below this
const registerForegroundEventListener = (onForeground: () => void) => {
ipcRenderer.removeAllListeners("app-in-foreground");
ipcRenderer.on("app-in-foreground", onForeground);
};
const clearElectronStore = () => {
ipcRenderer.send("clear-electron-store");
};
const setEncryptionKey = (encryptionKey: string): Promise<void> =>
ipcRenderer.invoke("setEncryptionKey", encryptionKey);
const getEncryptionKey = (): Promise<string> =>
ipcRenderer.invoke("getEncryptionKey");
// - App update
const registerUpdateEventListener = (
showUpdateDialog: (updateInfo: AppUpdateInfo) => void,
) => {
ipcRenderer.removeAllListeners("show-update-dialog");
ipcRenderer.on("show-update-dialog", (_, updateInfo: AppUpdateInfo) => {
showUpdateDialog(updateInfo);
});
};
const updateAndRestart = () => {
ipcRenderer.send("update-and-restart");
};
const skipAppUpdate = (version: string) => {
ipcRenderer.send("skip-app-update", version);
};
const muteUpdateNotification = (version: string) => {
ipcRenderer.send("mute-update-notification", version);
};
// - Conversion
const convertToJPEG = (
@ -303,21 +300,19 @@ const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
contextBridge.exposeInMainWorld("electron", {
// - General
appVersion,
openDirectory,
registerForegroundEventListener,
clearElectronStore,
getEncryptionKey,
setEncryptionKey,
// - Logging
openLogDirectory,
logToDisk,
openDirectory,
openLogDirectory,
clearStores,
encryptionKey,
saveEncryptionKey,
onMainWindowFocus,
// - App update
onAppUpdateAvailable,
updateAndRestart,
updateOnNextRestart,
skipAppUpdate,
muteUpdateNotification,
registerUpdateEventListener,
// - Conversion
convertToJPEG,

View file

@ -1,120 +0,0 @@
import { compareVersions } from "compare-versions";
import { app, BrowserWindow } from "electron";
import { default as electronLog } from "electron-log";
import { autoUpdater } from "electron-updater";
import { setIsAppQuitting, setIsUpdateAvailable } from "../main";
import log from "../main/log";
import { AppUpdateInfo } from "../types/ipc";
import {
clearMuteUpdateNotificationVersion,
clearSkipAppVersion,
getMuteUpdateNotificationVersion,
getSkipAppVersion,
setMuteUpdateNotificationVersion,
setSkipAppVersion,
} from "./userPreference";
const FIVE_MIN_IN_MICROSECOND = 5 * 60 * 1000;
const ONE_DAY_IN_MICROSECOND = 1 * 24 * 60 * 60 * 1000;
export function setupAutoUpdater(mainWindow: BrowserWindow) {
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
checkForUpdateAndNotify(mainWindow);
setInterval(
() => checkForUpdateAndNotify(mainWindow),
ONE_DAY_IN_MICROSECOND,
);
}
export function forceCheckForUpdateAndNotify(mainWindow: BrowserWindow) {
try {
clearSkipAppVersion();
clearMuteUpdateNotificationVersion();
checkForUpdateAndNotify(mainWindow);
} catch (e) {
log.error("forceCheckForUpdateAndNotify failed", e);
}
}
async function checkForUpdateAndNotify(mainWindow: BrowserWindow) {
try {
log.debug(() => "checkForUpdateAndNotify");
const { updateInfo } = await autoUpdater.checkForUpdates();
log.debug(() => `Update version ${updateInfo.version}`);
if (compareVersions(updateInfo.version, app.getVersion()) <= 0) {
log.debug(() => "Skipping update, already at latest version");
return;
}
const skipAppVersion = getSkipAppVersion();
if (skipAppVersion && updateInfo.version === skipAppVersion) {
log.info(`User chose to skip version ${updateInfo.version}`);
return;
}
let timeout: NodeJS.Timeout;
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
const muteUpdateNotificationVersion =
getMuteUpdateNotificationVersion();
if (
muteUpdateNotificationVersion &&
updateInfo.version === muteUpdateNotificationVersion
) {
log.info(
`User has muted update notifications for version ${updateInfo.version}`,
);
return;
}
autoUpdater.on("update-downloaded", () => {
timeout = setTimeout(
() =>
showUpdateDialog(mainWindow, {
autoUpdatable: true,
version: updateInfo.version,
}),
FIVE_MIN_IN_MICROSECOND,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeout);
log.error("Auto update failed", error);
showUpdateDialog(mainWindow, {
autoUpdatable: false,
version: updateInfo.version,
});
});
setIsUpdateAvailable(true);
} catch (e) {
log.error("checkForUpdateAndNotify failed", e);
}
}
export function updateAndRestart() {
log.info("user quit the app");
setIsAppQuitting(true);
autoUpdater.quitAndInstall();
}
/**
* Return the version of the desktop app
*
* The return value is of the form `v1.2.3`.
*/
export const appVersion = () => `v${app.getVersion()}`;
export function skipAppUpdate(version: string) {
setSkipAppVersion(version);
}
export function muteUpdateNotification(version: string) {
setMuteUpdateNotificationVersion(version);
}
function showUpdateDialog(
mainWindow: BrowserWindow,
updateInfo: AppUpdateInfo,
) {
mainWindow.webContents.send("show-update-dialog", updateInfo);
}

View file

@ -1,33 +0,0 @@
import { userPreferencesStore } from "../stores/userPreferences.store";
export function getHideDockIconPreference() {
return userPreferencesStore.get("hideDockIcon");
}
export function setHideDockIconPreference(shouldHideDockIcon: boolean) {
userPreferencesStore.set("hideDockIcon", shouldHideDockIcon);
}
export function getSkipAppVersion() {
return userPreferencesStore.get("skipAppVersion");
}
export function setSkipAppVersion(version: string) {
userPreferencesStore.set("skipAppVersion", version);
}
export function getMuteUpdateNotificationVersion() {
return userPreferencesStore.get("muteUpdateNotificationVersion");
}
export function setMuteUpdateNotificationVersion(version: string) {
userPreferencesStore.set("muteUpdateNotificationVersion", version);
}
export function clearSkipAppVersion() {
userPreferencesStore.delete("skipAppVersion");
}
export function clearMuteUpdateNotificationVersion() {
userPreferencesStore.delete("muteUpdateNotificationVersion");
}

View file

@ -29,9 +29,3 @@ export const FILE_PATH_KEYS: {
export interface SafeStorageStoreType {
encryptionKey: string;
}
export interface UserPreferencesType {
hideDockIcon: boolean;
skipAppVersion: string;
muteUpdateNotificationVersion: string;
}

View file

@ -1,11 +0,0 @@
import { WatchMapping } from "../types/ipc";
export function isMappingPresent(
watchMappings: WatchMapping[],
folderPath: string,
) {
const watchMapping = watchMappings?.find(
(mapping) => mapping.folderPath === folderPath,
);
return !!watchMapping;
}

View file

@ -139,7 +139,17 @@ export const sidebar = [
text: "Auth",
items: [
{ text: "Introduction", link: "/auth/" },
{ text: "FAQ", link: "/auth/faq/" },
{
text: "FAQ",
collapsed: true,
items: [
{ text: "General", link: "/auth/faq/" },
{
text: "Enteception",
link: "/auth/faq/enteception/",
},
],
},
{
text: "Migration",
collapsed: true,

View file

@ -0,0 +1,51 @@
---
title: Enteception
description: Using Ente Auth to store 2FA for your Ente account
---
# Enteception
Your 2FA codes are in Ente Auth, but if you enable 2FA for your Ente account
itself, where should the 2FA for your Ente account be stored?
There are multiple answers, none of which are better or worse, they just depend
on your situation and risk tolerance.
If you are using the same account for both Ente Photos and Ente Auth and have
enabled 2FA from the ente Photos app, we recommend that you ensure you store
your recovery key in a safe place (writing it down on a paper is a good idea).
This key can be used to bypass Ente 2FA in case you are locked out.
Another option is to use a separate account for Ente Auth.
Also, taking exporting the encrypted backup is also another good way to reduce
the risk (you can easily import the encrypted backup without signing in).
Finally, we have on our roadmap some features like adding support for
emergency/legacy-contacts, passkeys, and hardware security keys. Beyond other
benefits, all of these would further reduce the risk of users getting locked out
of their accounts.
## Email verification for Ente Auth
There is a related ouroboros scenario where if email verification is enabled in
the Ente Auth app _and_ the 2FA for your email provider is stored in Ente Auth,
then you might need a code from your email to log into Ente Auth, but to log
into your email you needed the Auth code.
To prevent people from accidentally locking themselves out this way, email
verification is disabled by default in the auth app. We also try to show a
warning when you try to enable email verification in the auth app:
<div align="center">
![Warning shown when enabling 2FA in Ente Auth](warning.png){width=400px}
</div>
The solution here are the same as the Ente-in-Ente case.
## TL;DR;
Ideally, you should **note down your recovery key in a safe place (may be on a
paper)**, using which you will be able to by-pass the two factor.

Binary file not shown.

After

Width:  |  Height:  |  Size: 516 KiB

View file

@ -31,3 +31,22 @@ You can enable FaceID lock under Settings → Security → Lockscreen.
### Why does the desktop and mobile app displays different code?
Please verify that the time on both your mobile and desktop is same.
### Does ente Authenticator require an account?
Answer: No, ente Authenticator does not require an account. You can choose to
use the app without backups if you prefer.
### Can I use the Ente 2FA app on multiple devices and sync them?
Yes, you can download the Ente app on multiple devices and sync the codes,
end-to-end encrypted.
### What does it mean when I receive a message saying my current device is not powerful enough to verify my password?
This means that the parameters that were used to derive your master-key on your
original device, are incompatible with your current device (likely because it's
less powerful).
If you recover your account via your current device and reset the password, it
will re-generate a key that will be compatible on both devices.

View file

@ -109,3 +109,13 @@ or "dog playing at the beach".
Check the sections within the upload progress bar for "Failed Uploads," "Ignored
Uploads," and "Unsuccessful Uploads."
## How do i keep NAS and Ente photos synced?
Please try using our CLI to pull data into your NAS
https://github.com/ente-io/ente/tree/main/cli#readme .
## Is there a way to view all albums on the map view?
Currently, the Ente mobile app allows you to see a map view of all the albums by
clicking on "Your map" under "Locations" on the search screen.

View file

@ -80,3 +80,10 @@ and is never sent to our servers.
Please note that only users on the paid plan are allowed to share albums. The
receiver just needs a free Ente account.
## Has the Ente Photos app been audited by a credible source?
Yes, Ente Photos has undergone a thorough security audit conducted by Cure53, in
collaboration with Symbolic Software. Cure53 is a prominent German cybersecurity
firm, while Symbolic Software specializes in applied cryptography. Please find
the full report here: https://ente.io/blog/cryptography-audit/

View file

@ -64,6 +64,6 @@ data reflects the latest album states with new files, moves, and deletions.
If you run into any issues during your data export, please reach out to
[support@ente.io](mailto:support@ente.io) and we will be happy to help you!
Note that we also provide a [CLI
tool](https://github.com/ente-io/ente/tree/main/cli#export) to export your data.
Please find more details [here](/photos/faq/export).
Note that we also provide a
[CLI tool](https://github.com/ente-io/ente/tree/main/cli#export) to export your
data. Please find more details [here](/photos/faq/export).

View file

@ -573,31 +573,34 @@ class FilesDB {
bool applyOwnerCheck = false,
}) async {
final stopWatch = EnteWatch('getAllPendingOrUploadedFiles')..start();
late String whereQuery;
late List<Object?>? whereArgs;
final order = (asc ?? false ? 'ASC' : 'DESC');
late String query;
late List<Object?>? args;
if (applyOwnerCheck) {
whereQuery = '$columnCreationTime >= ? AND $columnCreationTime <= ? '
query =
'SELECT * FROM $filesTable WHERE $columnCreationTime >= ? AND $columnCreationTime <= ? '
'AND ($columnOwnerID IS NULL OR $columnOwnerID = ?) '
'AND ($columnCollectionID IS NOT NULL AND $columnCollectionID IS NOT -1)'
' AND $columnMMdVisibility = ?';
whereArgs = [startTime, endTime, ownerID, visibility];
' AND $columnMMdVisibility = ? ORDER BY $columnCreationTime $order, $columnModificationTime $order';
args = [startTime, endTime, ownerID, visibility];
} else {
whereQuery =
'$columnCreationTime >= ? AND $columnCreationTime <= ? AND ($columnCollectionID IS NOT NULL AND $columnCollectionID IS NOT -1)'
' AND $columnMMdVisibility = ?';
whereArgs = [startTime, endTime, visibility];
query =
'SELECT * FROM $filesTable WHERE $columnCreationTime >= ? AND $columnCreationTime <= ? '
'AND ($columnCollectionID IS NOT NULL AND $columnCollectionID IS NOT -1)'
' AND $columnMMdVisibility = ? ORDER BY $columnCreationTime $order, $columnModificationTime $order';
args = [startTime, endTime, visibility];
}
final db = await instance.database;
final order = (asc ?? false ? 'ASC' : 'DESC');
final results = await db.query(
filesTable,
where: whereQuery,
whereArgs: whereArgs,
orderBy:
'$columnCreationTime ' + order + ', $columnModificationTime ' + order,
limit: limit,
);
if (limit != null) {
query += ' LIMIT ?';
args.add(limit);
}
final db = await instance.sqliteAsyncDB;
final results = await db.getAll(query, args);
_logger.info("message");
stopWatch.log('queryDone');
final files = convertToFiles(results);
stopWatch.log('convertDone');
@ -609,23 +612,25 @@ class FilesDB {
Future<FileLoadResult> getAllLocalAndUploadedFiles(
int startTime,
int endTime,
int ownerID, {
int endTime, {
int? limit,
bool? asc,
required DBFilterOptions filterOptions,
}) async {
final db = await instance.database;
final db = await instance.sqliteAsyncDB;
final order = (asc ?? false ? 'ASC' : 'DESC');
final results = await db.query(
filesTable,
where:
'$columnCreationTime >= ? AND $columnCreationTime <= ? AND ($columnMMdVisibility IS NULL OR $columnMMdVisibility = ?)'
' AND ($columnLocalID IS NOT NULL OR ($columnCollectionID IS NOT NULL AND $columnCollectionID IS NOT -1))',
whereArgs: [startTime, endTime, visibleVisibility],
orderBy:
'$columnCreationTime ' + order + ', $columnModificationTime ' + order,
limit: limit,
final args = [startTime, endTime, visibleVisibility];
String query =
'SELECT * FROM $filesTable WHERE $columnCreationTime >= ? AND $columnCreationTime <= ? AND ($columnMMdVisibility IS NULL OR $columnMMdVisibility = ?)'
' AND ($columnLocalID IS NOT NULL OR ($columnCollectionID IS NOT NULL AND $columnCollectionID IS NOT -1))'
' ORDER BY $columnCreationTime $order, $columnModificationTime $order';
if (limit != null) {
query += ' LIMIT ?';
args.add(limit);
}
final results = await db.getAll(
query,
args,
);
final files = convertToFiles(results);
final List<EnteFile> filteredFiles =
@ -658,19 +663,18 @@ class FilesDB {
bool? asc,
int visibility = visibleVisibility,
}) async {
final db = await instance.database;
final db = await instance.sqliteAsyncDB;
final order = (asc ?? false ? 'ASC' : 'DESC');
const String whereClause =
'$columnCollectionID = ? AND $columnCreationTime >= ? AND $columnCreationTime <= ?';
final List<Object> whereArgs = [collectionID, startTime, endTime];
final results = await db.query(
filesTable,
where: whereClause,
whereArgs: whereArgs,
orderBy:
'$columnCreationTime ' + order + ', $columnModificationTime ' + order,
limit: limit,
String query =
'SELECT * FROM $filesTable WHERE $columnCollectionID = ? AND $columnCreationTime >= ? AND $columnCreationTime <= ? ORDER BY $columnCreationTime $order, $columnModificationTime $order';
final List<Object> args = [collectionID, startTime, endTime];
if (limit != null) {
query += ' LIMIT ?';
args.add(limit);
}
final results = await db.getAll(
query,
args,
);
final files = convertToFiles(results);
return FileLoadResult(files, files.length == limit);
@ -1618,7 +1622,6 @@ class FilesDB {
bool dedupeByUploadId = true,
}) async {
final db = await instance.sqliteAsyncDB;
final result = await db.getAll(
'SELECT * FROM $filesTable ORDER BY $columnCreationTime DESC',
);

View file

@ -46,7 +46,6 @@ class HomeGalleryWidget extends StatelessWidget {
result = await FilesDB.instance.getAllLocalAndUploadedFiles(
creationStartTime,
creationEndTime,
ownerID!,
limit: limit,
asc: asc,
filterOptions: filterOptions,

View file

@ -108,6 +108,7 @@ class GalleryState extends State<Gallery> {
@override
void initState() {
super.initState();
_logTag =
"Gallery_${widget.tagPrefix}${kDebugMode ? "_" + widget.albumName! : ""}";
_logger = Logger(_logTag);
@ -172,7 +173,6 @@ class GalleryState extends State<Gallery> {
_setFilesAndReload(result.files);
}
});
super.initState();
}
void _setFilesAndReload(List<EnteFile> files) {

View file

@ -2,6 +2,7 @@ import "dart:math";
import "package:flutter/foundation.dart";
import "package:flutter/material.dart";
import "package:flutter_animate/flutter_animate.dart";
import "package:modal_bottom_sheet/modal_bottom_sheet.dart";
import "package:photos/core/configuration.dart";
import "package:photos/db/files_db.dart";
@ -15,6 +16,7 @@ import "package:photos/theme/colors.dart";
import "package:photos/theme/ente_theme.dart";
import "package:photos/ui/actions/collection/collection_file_actions.dart";
import "package:photos/ui/actions/collection/collection_sharing_actions.dart";
import "package:photos/ui/common/loading_widget.dart";
import "package:photos/ui/components/bottom_of_title_bar_widget.dart";
import "package:photos/ui/components/buttons/button_widget.dart";
import "package:photos/ui/components/models/button_type.dart";
@ -91,33 +93,9 @@ class AddPhotosPhotoWidget extends StatelessWidget {
showCloseButton: true,
),
Expanded(
child: Gallery(
inSelectionMode: true,
asyncLoader: (
creationStartTime,
creationEndTime, {
limit,
asc,
}) {
return FilesDB.instance
.getAllPendingOrUploadedFiles(
creationStartTime,
creationEndTime,
Configuration.instance.getUserID()!,
limit: limit,
asc: asc,
filterOptions: DBFilterOptions(
hideIgnoredForUpload: true,
dedupeUploadID: true,
ignoredCollectionIDs: hiddenCollectionIDs,
),
applyOwnerCheck: true,
);
},
tagPrefix: "pick_add_photos_gallery",
child: DelayedGallery(
hiddenCollectionIDs: hiddenCollectionIDs,
selectedFiles: selectedFiles,
showSelectAllByDefault: true,
sortAsyncFn: () => false,
),
),
],
@ -227,3 +205,71 @@ class AddPhotosPhotoWidget extends StatelessWidget {
}
}
}
class DelayedGallery extends StatefulWidget {
const DelayedGallery({
super.key,
required this.hiddenCollectionIDs,
required this.selectedFiles,
});
final Set<int> hiddenCollectionIDs;
final SelectedFiles selectedFiles;
@override
State<DelayedGallery> createState() => _DelayedGalleryState();
}
class _DelayedGalleryState extends State<DelayedGallery> {
bool _showGallery = false;
@override
void initState() {
super.initState();
Future.delayed(const Duration(milliseconds: 500), () {
if (mounted) {
setState(() {
_showGallery = true;
});
}
});
}
@override
Widget build(BuildContext context) {
if (_showGallery) {
return Gallery(
inSelectionMode: true,
asyncLoader: (
creationStartTime,
creationEndTime, {
limit,
asc,
}) {
return FilesDB.instance.getAllPendingOrUploadedFiles(
creationStartTime,
creationEndTime,
Configuration.instance.getUserID()!,
limit: limit,
asc: asc,
filterOptions: DBFilterOptions(
hideIgnoredForUpload: true,
dedupeUploadID: true,
ignoredCollectionIDs: widget.hiddenCollectionIDs,
),
applyOwnerCheck: true,
);
},
tagPrefix: "pick_add_photos_gallery",
selectedFiles: widget.selectedFiles,
showSelectAllByDefault: true,
sortAsyncFn: () => false,
).animate().fadeIn(
duration: const Duration(milliseconds: 175),
curve: Curves.easeOutCirc,
);
} else {
return const EnteLoadingWidget();
}
}
}

View file

@ -12,7 +12,7 @@ description: ente photos application
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
version: 0.8.77+597
version: 0.8.78+598
publish_to: none
environment:

View file

@ -52,7 +52,7 @@ func (c *Controller) PaymentUpgradeOrDowngradeCron() {
return
}
if len(bonusPenaltyCandidates) > 0 {
logger.WithField("count", len(bonusPenaltyCandidates)).Error("candidates found for downgrade penalty")
// todo: implement downgrade penalty
logger.WithField("count", len(bonusPenaltyCandidates)).Warn("candidates found for downgrade penalty")
}
}

View file

@ -2,6 +2,8 @@ package user
import (
"context"
"database/sql"
"errors"
"github.com/ente-io/museum/ente"
"github.com/ente-io/museum/pkg/utils/auth"
"github.com/ente-io/stacktrace"
@ -88,7 +90,11 @@ func (c *UserController) UpdateSrpAndKeyAttributes(context *gin.Context,
func (c *UserController) GetSRPAttributes(context *gin.Context, email string) (*ente.GetSRPAttributesResponse, error) {
userID, err := c.UserRepo.GetUserIDWithEmail(email)
if err != nil {
return nil, stacktrace.Propagate(err, "user does not exist")
if errors.Is(err, sql.ErrNoRows) {
return nil, stacktrace.Propagate(ente.ErrNotFound, "user does not exist")
} else {
return nil, stacktrace.Propagate(err, "failed to get user")
}
}
srpAttributes, err := c.UserAuthRepo.GetSRPAttributes(userID)
if err != nil {

View file

@ -30,6 +30,7 @@ func Error(c *gin.Context, err error) {
// echo "GET /ping HTTP/1.0\r\nContent-Length: 300\r\n\r\n" | nc localhost 8080
if errors.Is(err, ente.ErrStorageLimitExceeded) ||
errors.Is(err, ente.ErrNoActiveSubscription) ||
errors.Is(err, ente.ErrInvalidPassword) ||
errors.Is(err, io.ErrUnexpectedEOF) ||
errors.Is(err, syscall.EPIPE) ||
errors.Is(err, syscall.ECONNRESET) {

View file

@ -1,5 +1,6 @@
import { CustomHead } from "@/next/components/Head";
import { setupI18n } from "@/next/i18n";
import { logUnhandledErrorsAndRejections } from "@/next/log-web";
import { APPS, APP_TITLES } from "@ente/shared/apps/constants";
import { Overlay } from "@ente/shared/components/Container";
import DialogBoxV2 from "@ente/shared/components/DialogBoxV2";
@ -54,6 +55,8 @@ export default function App({ Component, pageProps }: AppProps) {
useEffect(() => {
setupI18n().finally(() => setIsI18nReady(true));
logUnhandledErrorsAndRejections(true);
return () => logUnhandledErrorsAndRejections(false);
}, []);
const setupPackageName = () => {

View file

@ -1,6 +1,9 @@
import { CustomHead } from "@/next/components/Head";
import { setupI18n } from "@/next/i18n";
import { logStartupBanner } from "@/next/log-web";
import {
logStartupBanner,
logUnhandledErrorsAndRejections,
} from "@/next/log-web";
import {
APPS,
APP_TITLES,
@ -68,9 +71,11 @@ export default function App({ Component, pageProps }: AppProps) {
setupI18n().finally(() => setIsI18nReady(true));
const userId = (getData(LS_KEYS.USER) as User)?.id;
logStartupBanner(APPS.AUTH, userId);
logUnhandledErrorsAndRejections(true);
HTTPService.setHeaders({
"X-Client-Package": CLIENT_PACKAGE_NAMES.get(APPS.AUTH),
});
return () => logUnhandledErrorsAndRejections(false);
}, []);
const setUserOnline = () => setOffline(false);

View file

@ -1,12 +1,20 @@
import { CustomHead } from "@/next/components/Head";
import { logUnhandledErrorsAndRejections } from "@/next/log-web";
import { APPS, APP_TITLES } from "@ente/shared/apps/constants";
import { getTheme } from "@ente/shared/themes";
import { THEME_COLOR } from "@ente/shared/themes/constants";
import { CssBaseline, ThemeProvider } from "@mui/material";
import type { AppProps } from "next/app";
import { useEffect } from "react";
import "styles/global.css";
export default function App({ Component, pageProps }: AppProps) {
useEffect(() => {
logUnhandledErrorsAndRejections(true);
return () => logUnhandledErrorsAndRejections(false);
}, []);
return (
<>
<CustomHead title={APP_TITLES.get(APPS.PHOTOS)} />

View file

@ -12,8 +12,7 @@ export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = {
batchSize: 200,
imageSource: "Original",
faceDetection: {
method: "BlazeFace",
minFaceSize: 32,
method: "YoloFace",
},
faceCrop: {
enabled: true,
@ -28,6 +27,10 @@ export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = {
faceAlignment: {
method: "ArcFace",
},
blurDetection: {
method: "Laplacian",
threshold: 15,
},
faceEmbedding: {
method: "MobileFaceNet",
faceSize: 112,
@ -70,7 +73,7 @@ export const ML_SYNC_DOWNLOAD_TIMEOUT_MS = 300000;
export const MAX_FACE_DISTANCE_PERCENT = Math.sqrt(2) / 100;
export const MAX_ML_SYNC_ERROR_COUNT = 4;
export const MAX_ML_SYNC_ERROR_COUNT = 1;
export const TEXT_DETECTION_TIMEOUT_MS = [10000, 30000, 60000, 120000, 240000];
@ -81,6 +84,7 @@ export const BLAZEFACE_SCORE_THRESHOLD = 0.75;
export const BLAZEFACE_PASS1_SCORE_THRESHOLD = 0.4;
export const BLAZEFACE_FACE_SIZE = 112;
export const MOBILEFACENET_FACE_SIZE = 112;
export const MOBILEFACENET_EMBEDDING_SIZE = 192;
// scene detection model takes fixed-shaped (224x224) inputs
// https://tfhub.dev/sayannath/lite-model/image-scene/1

View file

@ -1,7 +1,10 @@
import { CustomHead } from "@/next/components/Head";
import { setupI18n } from "@/next/i18n";
import log from "@/next/log";
import { logStartupBanner } from "@/next/log-web";
import {
logStartupBanner,
logUnhandledErrorsAndRejections,
} from "@/next/log-web";
import { AppUpdateInfo } from "@/next/types/ipc";
import {
APPS,
@ -147,35 +150,35 @@ export default function App({ Component, pageProps }: AppProps) {
setupI18n().finally(() => setIsI18nReady(true));
const userId = (getData(LS_KEYS.USER) as User)?.id;
logStartupBanner(APPS.PHOTOS, userId);
logUnhandledErrorsAndRejections(true);
HTTPService.setHeaders({
"X-Client-Package": CLIENT_PACKAGE_NAMES.get(APPS.PHOTOS),
});
return () => logUnhandledErrorsAndRejections(false);
}, []);
useEffect(() => {
const electron = globalThis.electron;
if (electron) {
const showUpdateDialog = (updateInfo: AppUpdateInfo) => {
if (updateInfo.autoUpdatable) {
setDialogMessage(
getUpdateReadyToInstallMessage(updateInfo),
);
} else {
setNotificationAttributes({
endIcon: <ArrowForward />,
variant: "secondary",
message: t("UPDATE_AVAILABLE"),
onClick: () =>
setDialogMessage(
getUpdateAvailableForDownloadMessage(
updateInfo,
),
),
});
}
};
electron.registerUpdateEventListener(showUpdateDialog);
}
if (!electron) return;
const showUpdateDialog = (updateInfo: AppUpdateInfo) => {
if (updateInfo.autoUpdatable) {
setDialogMessage(getUpdateReadyToInstallMessage(updateInfo));
} else {
setNotificationAttributes({
endIcon: <ArrowForward />,
variant: "secondary",
message: t("UPDATE_AVAILABLE"),
onClick: () =>
setDialogMessage(
getUpdateAvailableForDownloadMessage(updateInfo),
),
});
}
};
electron.onAppUpdateAvailable(showUpdateDialog);
return () => electron.onAppUpdateAvailable(undefined);
}, []);
useEffect(() => {

View file

@ -105,7 +105,7 @@ import { AppContext } from "pages/_app";
import { clipService } from "services/clip-service";
import { constructUserIDToEmailMap } from "services/collectionService";
import downloadManager from "services/download";
import { syncEmbeddings } from "services/embeddingService";
import { syncEmbeddings, syncFileEmbeddings } from "services/embeddingService";
import { syncEntities } from "services/entityService";
import locationSearchService from "services/locationSearchService";
import { getLocalTrashedFiles, syncTrash } from "services/trashService";
@ -363,16 +363,14 @@ export default function Gallery() {
}, SYNC_INTERVAL_IN_MICROSECONDS);
if (electron) {
void clipService.setupOnFileUploadListener();
electron.registerForegroundEventListener(() => {
syncWithRemote(false, true);
});
electron.onMainWindowFocus(() => syncWithRemote(false, true));
}
};
main();
return () => {
clearInterval(syncInterval.current);
if (electron) {
electron.registerForegroundEventListener(() => {});
electron.onMainWindowFocus(undefined);
clipService.removeOnFileUploadListener();
}
};
@ -704,6 +702,10 @@ export default function Gallery() {
await syncEntities();
await syncMapEnabled();
await syncEmbeddings();
const electron = globalThis.electron;
if (electron) {
await syncFileEmbeddings();
}
if (clipService.isPlatformSupported()) {
void clipService.scheduleImageEmbeddingExtraction();
}

View file

@ -133,9 +133,9 @@ export default function LandingPage() {
const electron = globalThis.electron;
if (!key && electron) {
try {
key = await electron.getEncryptionKey();
key = await electron.encryptionKey();
} catch (e) {
log.error("getEncryptionKey failed", e);
log.error("Failed to get encryption key from electron", e);
}
if (key) {
await saveKeyInSessionStore(

View file

@ -13,7 +13,11 @@ import type {
PutEmbeddingRequest,
} from "types/embedding";
import { EnteFile } from "types/file";
import { getLatestVersionEmbeddings } from "utils/embedding";
import {
getLatestVersionEmbeddings,
getLatestVersionFileEmbeddings,
} from "utils/embedding";
import { FileML } from "utils/machineLearning/mldataMappers";
import { getLocalCollections } from "./collectionService";
import { getAllLocalFiles } from "./fileService";
import { getLocalTrashedFiles } from "./trashService";
@ -24,6 +28,7 @@ const DIFF_LIMIT = 500;
const EMBEDDINGS_TABLE_V1 = "embeddings";
const EMBEDDINGS_TABLE = "embeddings_v2";
const FILE_EMBEDING_TABLE = "file_embeddings";
const EMBEDDING_SYNC_TIME_TABLE = "embedding_sync_time";
export const getAllLocalEmbeddings = async () => {
@ -38,6 +43,15 @@ export const getAllLocalEmbeddings = async () => {
return embeddings;
};
export const getFileMLEmbeddings = async (): Promise<FileML[]> => {
const embeddings: Array<FileML> =
await localForage.getItem<FileML[]>(FILE_EMBEDING_TABLE);
if (!embeddings) {
return [];
}
return embeddings;
};
export const getLocalEmbeddings = async () => {
const embeddings = await getAllLocalEmbeddings();
return embeddings.filter((embedding) => embedding.model === "onnx-clip");
@ -140,6 +154,83 @@ export const syncEmbeddings = async () => {
}
};
export const syncFileEmbeddings = async () => {
const models: EmbeddingModel[] = ["file-ml-clip-face"];
try {
let allEmbeddings: FileML[] = await getFileMLEmbeddings();
const localFiles = await getAllLocalFiles();
const hiddenAlbums = await getLocalCollections("hidden");
const localTrashFiles = await getLocalTrashedFiles();
const fileIdToKeyMap = new Map<number, string>();
const allLocalFiles = [...localFiles, ...localTrashFiles];
allLocalFiles.forEach((file) => {
fileIdToKeyMap.set(file.id, file.key);
});
await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
for (const model of models) {
let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
log.info(
`Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
);
let response: GetEmbeddingDiffResponse;
do {
response = await getEmbeddingsDiff(modelLastSinceTime, model);
if (!response.diff?.length) {
return;
}
const newEmbeddings = await Promise.all(
response.diff.map(async (embedding) => {
try {
const worker =
await ComlinkCryptoWorker.getInstance();
const fileKey = fileIdToKeyMap.get(
embedding.fileID,
);
if (!fileKey) {
throw Error(CustomError.FILE_NOT_FOUND);
}
const decryptedData = await worker.decryptMetadata(
embedding.encryptedEmbedding,
embedding.decryptionHeader,
fileIdToKeyMap.get(embedding.fileID),
);
return {
...decryptedData,
updatedAt: embedding.updatedAt,
} as unknown as FileML;
} catch (e) {
let hasHiddenAlbums = false;
if (e.message === CustomError.FILE_NOT_FOUND) {
hasHiddenAlbums = hiddenAlbums?.length > 0;
}
log.error(
`decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
e,
);
}
}),
);
allEmbeddings = getLatestVersionFileEmbeddings([
...allEmbeddings,
...newEmbeddings,
]);
if (response.diff.length) {
modelLastSinceTime = response.diff.slice(-1)[0].updatedAt;
}
await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings);
await setModelEmbeddingSyncTime(model, modelLastSinceTime);
log.info(
`Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
);
} while (response.diff.length === DIFF_LIMIT);
}
} catch (e) {
log.error("Sync embeddings failed", e);
}
};
export const getEmbeddingsDiff = async (
sinceTime: number,
model: EmbeddingModel,
@ -173,7 +264,8 @@ export const putEmbedding = async (
try {
const token = getToken();
if (!token) {
return;
log.info("putEmbedding failed: token not found");
throw Error(CustomError.TOKEN_MISSING);
}
const resp = await HTTPService.put(
`${ENDPOINT}/embeddings`,
@ -192,7 +284,7 @@ export const putEmbedding = async (
export const cleanupDeletedEmbeddings = async (
allLocalFiles: EnteFile[],
allLocalEmbeddings: Embedding[],
allLocalEmbeddings: Embedding[] | FileML[],
) => {
const activeFileIds = new Set<number>();
allLocalFiles.forEach((file) => {

View file

@ -51,6 +51,11 @@ class BlazeFaceDetectionService implements FaceDetectionService {
this.desiredFaceSize = desiredFaceSize;
}
public getRelativeDetection(): FaceDetection {
// TODO(MR): onnx-yolo
throw new Error();
}
private async init() {
this.blazeFaceModel = blazeFaceLoad({
maxFaces: BLAZEFACE_MAX_FACES,

View file

@ -8,7 +8,7 @@ import {
import { imageBitmapToBlob } from "utils/image";
import {
areFaceIdsSame,
extractFaceImages,
extractFaceImagesToFloat32,
getFaceId,
getLocalFile,
getOriginalImageBitmap,
@ -49,8 +49,12 @@ class FaceService {
syncContext,
fileContext,
);
const timerId = `faceDetection-${fileContext.enteFile.id}`;
console.time(timerId);
const faceDetections =
await syncContext.faceDetectionService.detectFaces(imageBitmap);
console.timeEnd(timerId);
console.log("faceDetections: ", faceDetections?.length);
// log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedFaces = faceDetections?.map((detection) => {
@ -104,7 +108,7 @@ class FaceService {
async syncFileFaceAlignments(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext,
) {
): Promise<Float32Array> {
const { oldMlFile, newMlFile } = fileContext;
if (
!fileContext.newDetection &&
@ -123,18 +127,37 @@ class FaceService {
newMlFile.faceAlignmentMethod = syncContext.faceAlignmentService.method;
fileContext.newAlignment = true;
const imageBitmap =
fileContext.imageBitmap ||
(await ReaderService.getImageBitmap(syncContext, fileContext));
// Execute the face alignment calculations
for (const face of newMlFile.faces) {
face.alignment = syncContext.faceAlignmentService.getFaceAlignment(
face.detection,
);
}
// Extract face images and convert to Float32Array
const faceAlignments = newMlFile.faces.map((f) => f.alignment);
const faceImages = await extractFaceImagesToFloat32(
faceAlignments,
syncContext.faceEmbeddingService.faceSize,
imageBitmap,
);
const blurValues =
syncContext.blurDetectionService.detectBlur(faceImages);
newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i]));
imageBitmap.close();
log.info("[MLService] alignedFaces: ", newMlFile.faces?.length);
// log.info('4 TF Memory stats: ',JSON.stringify(tf.memory()));
return faceImages;
}
async syncFileFaceEmbeddings(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext,
alignedFacesInput: Float32Array,
) {
const { oldMlFile, newMlFile } = fileContext;
if (
@ -156,22 +179,43 @@ class FaceService {
// TODO: when not storing face crops, image will be needed to extract faces
// fileContext.imageBitmap ||
// (await this.getImageBitmap(syncContext, fileContext));
const faceImages = await extractFaceImages(
newMlFile.faces,
syncContext.faceEmbeddingService.faceSize,
);
const embeddings =
await syncContext.faceEmbeddingService.getFaceEmbeddings(
faceImages,
alignedFacesInput,
);
faceImages.forEach((faceImage) => faceImage.close());
newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i]));
log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// log.info('5 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async syncFileFaceMakeRelativeDetections(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext,
) {
const { oldMlFile, newMlFile } = fileContext;
if (
!fileContext.newAlignment &&
!isDifferentOrOld(
oldMlFile?.faceEmbeddingMethod,
syncContext.faceEmbeddingService.method,
) &&
areFaceIdsSame(newMlFile.faces, oldMlFile?.faces)
) {
return;
}
for (let i = 0; i < newMlFile.faces.length; i++) {
const face = newMlFile.faces[i];
if (face.detection.box.x + face.detection.box.width < 2) continue; // Skip if somehow already relative
face.detection =
syncContext.faceDetectionService.getRelativeDetection(
face.detection,
newMlFile.imageDimensions,
);
}
}
async saveFaceCrop(
imageBitmap: ImageBitmap,
face: Face,

View file

@ -0,0 +1,131 @@
import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig";
import {
BlurDetectionMethod,
BlurDetectionService,
Versioned,
} from "types/machineLearning";
import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image";
class LaplacianBlurDetectionService implements BlurDetectionService {
public method: Versioned<BlurDetectionMethod>;
public constructor() {
this.method = {
value: "Laplacian",
version: 1,
};
}
public detectBlur(alignedFaces: Float32Array): number[] {
const numFaces = Math.round(
alignedFaces.length /
(MOBILEFACENET_FACE_SIZE * MOBILEFACENET_FACE_SIZE * 3),
);
const blurValues: number[] = [];
for (let i = 0; i < numFaces; i++) {
const faceImage = createGrayscaleIntMatrixFromNormalized2List(
alignedFaces,
i,
);
const laplacian = this.applyLaplacian(faceImage);
const variance = this.calculateVariance(laplacian);
blurValues.push(variance);
}
return blurValues;
}
private calculateVariance(matrix: number[][]): number {
const numRows = matrix.length;
const numCols = matrix[0].length;
const totalElements = numRows * numCols;
// Calculate the mean
let mean: number = 0;
matrix.forEach((row) => {
row.forEach((value) => {
mean += value;
});
});
mean /= totalElements;
// Calculate the variance
let variance: number = 0;
matrix.forEach((row) => {
row.forEach((value) => {
const diff: number = value - mean;
variance += diff * diff;
});
});
variance /= totalElements;
return variance;
}
private padImage(image: number[][]): number[][] {
const numRows = image.length;
const numCols = image[0].length;
// Create a new matrix with extra padding
const paddedImage: number[][] = Array.from(
{ length: numRows + 2 },
() => new Array(numCols + 2).fill(0),
);
// Copy original image into the center of the padded image
for (let i = 0; i < numRows; i++) {
for (let j = 0; j < numCols; j++) {
paddedImage[i + 1][j + 1] = image[i][j];
}
}
// Reflect padding
// Top and bottom rows
for (let j = 1; j <= numCols; j++) {
paddedImage[0][j] = paddedImage[2][j]; // Top row
paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row
}
// Left and right columns
for (let i = 0; i < numRows + 2; i++) {
paddedImage[i][0] = paddedImage[i][2]; // Left column
paddedImage[i][numCols + 1] = paddedImage[i][numCols - 1]; // Right column
}
return paddedImage;
}
private applyLaplacian(image: number[][]): number[][] {
const paddedImage: number[][] = this.padImage(image);
const numRows = image.length;
const numCols = image[0].length;
// Create an output image initialized to 0
const outputImage: number[][] = Array.from({ length: numRows }, () =>
new Array(numCols).fill(0),
);
// Define the Laplacian kernel
const kernel: number[][] = [
[0, 1, 0],
[1, -4, 1],
[0, 1, 0],
];
// Apply the kernel to each pixel
for (let i = 0; i < numRows; i++) {
for (let j = 0; j < numCols; j++) {
let sum = 0;
for (let ki = 0; ki < 3; ki++) {
for (let kj = 0; kj < 3; kj++) {
sum += paddedImage[i + ki][j + kj] * kernel[ki][kj];
}
}
// Adjust the output value if necessary (e.g., clipping)
outputImage[i][j] = sum;
}
}
return outputImage;
}
}
export default new LaplacianBlurDetectionService();

View file

@ -6,6 +6,8 @@ import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worke
import PQueue from "p-queue";
import { EnteFile } from "types/file";
import {
BlurDetectionMethod,
BlurDetectionService,
ClusteringMethod,
ClusteringService,
Face,
@ -28,19 +30,20 @@ import {
import { logQueueStats } from "utils/machineLearning";
import arcfaceAlignmentService from "./arcfaceAlignmentService";
import arcfaceCropService from "./arcfaceCropService";
import blazeFaceDetectionService from "./blazeFaceDetectionService";
import dbscanClusteringService from "./dbscanClusteringService";
import hdbscanClusteringService from "./hdbscanClusteringService";
import imageSceneService from "./imageSceneService";
import laplacianBlurDetectionService from "./laplacianBlurDetectionService";
import mobileFaceNetEmbeddingService from "./mobileFaceNetEmbeddingService";
import ssdMobileNetV2Service from "./ssdMobileNetV2Service";
import yoloFaceDetectionService from "./yoloFaceDetectionService";
export class MLFactory {
public static getFaceDetectionService(
method: FaceDetectionMethod,
): FaceDetectionService {
if (method === "BlazeFace") {
return blazeFaceDetectionService;
if (method === "YoloFace") {
return yoloFaceDetectionService;
}
throw Error("Unknon face detection method: " + method);
@ -84,6 +87,16 @@ export class MLFactory {
throw Error("Unknon face alignment method: " + method);
}
public static getBlurDetectionService(
method: BlurDetectionMethod,
): BlurDetectionService {
if (method === "Laplacian") {
return laplacianBlurDetectionService;
}
throw Error("Unknon blur detection method: " + method);
}
public static getFaceEmbeddingService(
method: FaceEmbeddingMethod,
): FaceEmbeddingService {
@ -131,6 +144,7 @@ export class LocalMLSyncContext implements MLSyncContext {
public faceDetectionService: FaceDetectionService;
public faceCropService: FaceCropService;
public faceAlignmentService: FaceAlignmentService;
public blurDetectionService: BlurDetectionService;
public faceEmbeddingService: FaceEmbeddingService;
public faceClusteringService: ClusteringService;
public objectDetectionService: ObjectDetectionService;
@ -178,6 +192,9 @@ export class LocalMLSyncContext implements MLSyncContext {
this.faceAlignmentService = MLFactory.getFaceAlignmentService(
this.config.faceAlignment.method,
);
this.blurDetectionService = MLFactory.getBlurDetectionService(
this.config.blurDetection.method,
);
this.faceEmbeddingService = MLFactory.getFaceEmbeddingService(
this.config.faceEmbedding.method,
);
@ -196,7 +213,7 @@ export class LocalMLSyncContext implements MLSyncContext {
this.nSyncedFiles = 0;
this.nSyncedFaces = 0;
this.concurrency = concurrency || getConcurrency();
this.concurrency = concurrency ?? getConcurrency();
log.info("Using concurrency: ", this.concurrency);
// timeout is added on downloads
@ -212,6 +229,7 @@ export class LocalMLSyncContext implements MLSyncContext {
public async getEnteWorker(id: number): Promise<any> {
const wid = id % this.enteWorkers.length;
console.log("getEnteWorker: ", id, wid);
if (!this.enteWorkers[wid]) {
this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker();
this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote;

View file

@ -1,11 +1,13 @@
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl";
import * as tf from "@tensorflow/tfjs-core";
import { MAX_ML_SYNC_ERROR_COUNT } from "constants/mlConfig";
import downloadManager from "services/download";
import { putEmbedding } from "services/embeddingService";
import { getLocalFiles } from "services/fileService";
import { EnteFile } from "types/file";
import {
@ -15,6 +17,7 @@ import {
MlFileData,
} from "types/machineLearning";
import { getMLSyncConfig } from "utils/machineLearning/config";
import { LocalFileMlDataToServerFileMl } from "utils/machineLearning/mldataMappers";
import mlIDbStorage from "utils/storage/mlIDbStorage";
import FaceService from "./faceService";
import { MLFactory } from "./machineLearningFactory";
@ -34,11 +37,6 @@ class MachineLearningService {
}
await downloadManager.init(APPS.PHOTOS, { token });
// await this.init();
// Used to debug tf memory leak, all tf memory
// needs to be cleaned using tf.dispose or tf.tidy
// tf.engine().startScope();
const syncContext = await this.getSyncContext(token, userID);
@ -185,6 +183,50 @@ class MachineLearningService {
log.info("getOutOfSyncFiles", Date.now() - startTime, "ms");
}
// TODO: optimize, use indexdb indexes, move facecrops to cache to reduce io
// remove, already done
private async getUniqueOutOfSyncFilesNoIdx(
syncContext: MLSyncContext,
files: EnteFile[],
) {
const limit = syncContext.config.batchSize;
const mlVersion = syncContext.config.mlVersion;
const uniqueFiles: Map<number, EnteFile> = new Map<number, EnteFile>();
for (let i = 0; uniqueFiles.size < limit && i < files.length; i++) {
const mlFileData = await this.getMLFileData(files[i].id);
const mlFileVersion = mlFileData?.mlVersion || 0;
if (
!uniqueFiles.has(files[i].id) &&
(!mlFileData?.errorCount || mlFileData.errorCount < 2) &&
(mlFileVersion < mlVersion ||
syncContext.config.imageSource !== mlFileData.imageSource)
) {
uniqueFiles.set(files[i].id, files[i]);
}
}
return [...uniqueFiles.values()];
}
private async getOutOfSyncFilesNoIdx(syncContext: MLSyncContext) {
const existingFilesMap = await this.getLocalFilesMap(syncContext);
// existingFiles.sort(
// (a, b) => b.metadata.creationTime - a.metadata.creationTime
// );
console.time("getUniqueOutOfSyncFiles");
syncContext.outOfSyncFiles = await this.getUniqueOutOfSyncFilesNoIdx(
syncContext,
[...existingFilesMap.values()],
);
log.info("getUniqueOutOfSyncFiles");
log.info(
"Got unique outOfSyncFiles: ",
syncContext.outOfSyncFiles.length,
"for batchSize: ",
syncContext.config.batchSize,
);
}
private async syncFiles(syncContext: MLSyncContext) {
try {
const functions = syncContext.outOfSyncFiles.map(
@ -283,6 +325,11 @@ class MachineLearningService {
textDetectionTimeoutIndex?: number,
): Promise<MlFileData> {
try {
console.log(
"Start index for ",
enteFile.title ?? "no title",
enteFile.id,
);
const mlFileData = await this.syncFile(
syncContext,
enteFile,
@ -319,6 +366,12 @@ class MachineLearningService {
await this.persistMLFileSyncError(syncContext, enteFile, error);
syncContext.nSyncedFiles += 1;
} finally {
console.log(
"done index for ",
enteFile.title ?? "no title",
enteFile.id,
);
// addLogLine('TF Memory stats: ', JSON.stringify(tf.memory()));
log.info("TF Memory stats: ", JSON.stringify(tf.memory()));
}
}
@ -330,6 +383,7 @@ class MachineLearningService {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
textDetectionTimeoutIndex?: number,
) {
console.log("Syncing for file" + enteFile.title);
const fileContext: MLSyncFileContext = { enteFile, localFile };
const oldMlFile =
(fileContext.oldMlFile = await this.getMLFileData(enteFile.id)) ??
@ -351,14 +405,20 @@ class MachineLearningService {
try {
await ReaderService.getImageBitmap(syncContext, fileContext);
await Promise.all([
this.syncFaceDetections(syncContext, fileContext),
ObjectService.syncFileObjectDetections(
syncContext,
fileContext,
),
this.syncFileAnalyzeFaces(syncContext, fileContext),
// ObjectService.syncFileObjectDetections(
// syncContext,
// fileContext
// ),
// TextService.syncFileTextDetections(
// syncContext,
// fileContext,
// textDetectionTimeoutIndex
// ),
]);
newMlFile.errorCount = 0;
newMlFile.lastErrorMessage = undefined;
await this.persistOnServer(newMlFile, enteFile);
await this.persistMLFileData(syncContext, newMlFile);
} catch (e) {
log.error("ml detection failed", e);
@ -379,6 +439,25 @@ class MachineLearningService {
return newMlFile;
}
private async persistOnServer(mlFileData: MlFileData, enteFile: EnteFile) {
const serverMl = LocalFileMlDataToServerFileMl(mlFileData);
log.info(mlFileData);
const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance();
const { file: encryptedEmbeddingData } =
await comlinkCryptoWorker.encryptMetadata(serverMl, enteFile.key);
log.info(
`putEmbedding embedding to server for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
const res = await putEmbedding({
fileID: enteFile.id,
encryptedEmbedding: encryptedEmbeddingData.encryptedData,
decryptionHeader: encryptedEmbeddingData.decryptionHeader,
model: "file-ml-clip-face",
});
log.info("putEmbedding response: ", res);
}
public async init() {
if (this.initialized) {
return;
@ -448,7 +527,7 @@ class MachineLearningService {
await this.persistMLLibraryData(syncContext);
}
private async syncFaceDetections(
private async syncFileAnalyzeFaces(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext,
) {
@ -459,9 +538,21 @@ class MachineLearningService {
if (newMlFile.faces && newMlFile.faces.length > 0) {
await FaceService.syncFileFaceCrops(syncContext, fileContext);
await FaceService.syncFileFaceAlignments(syncContext, fileContext);
const alignedFacesData = await FaceService.syncFileFaceAlignments(
syncContext,
fileContext,
);
await FaceService.syncFileFaceEmbeddings(syncContext, fileContext);
await FaceService.syncFileFaceEmbeddings(
syncContext,
fileContext,
alignedFacesData,
);
await FaceService.syncFileFaceMakeRelativeDetections(
syncContext,
fileContext,
);
}
log.info(
`face detection time taken ${fileContext.enteFile.id}`,

View file

@ -1,23 +1,38 @@
import log from "@/next/log";
import * as tf from "@tensorflow/tfjs-core";
import { TFLiteModel } from "@tensorflow/tfjs-tflite";
import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig";
import PQueue from "p-queue";
import {
MOBILEFACENET_EMBEDDING_SIZE,
MOBILEFACENET_FACE_SIZE,
} from "constants/mlConfig";
// import { TFLiteModel } from "@tensorflow/tfjs-tflite";
// import PQueue from "p-queue";
import {
FaceEmbedding,
FaceEmbeddingMethod,
FaceEmbeddingService,
Versioned,
} from "types/machineLearning";
import { imageBitmapsToTensor4D } from "utils/machineLearning";
// TODO(MR): onnx-yolo
// import * as ort from "onnxruntime-web";
// import { env } from "onnxruntime-web";
const ort: any = {};
import {
clamp,
getPixelBilinear,
normalizePixelBetweenMinus1And1,
} from "utils/image";
// TODO(MR): onnx-yolo
// env.wasm.wasmPaths = "/js/onnx/";
class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
// TODO(MR): onnx-yolo
// private onnxInferenceSession?: ort.InferenceSession;
private onnxInferenceSession?: any;
public method: Versioned<FaceEmbeddingMethod>;
public faceSize: number;
private mobileFaceNetModel: Promise<TFLiteModel>;
private serialQueue: PQueue;
public constructor(faceSize: number = MOBILEFACENET_FACE_SIZE) {
this.method = {
value: "MobileFaceNet",
@ -25,81 +40,156 @@ class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
};
this.faceSize = faceSize;
// TODO: set timeout
this.serialQueue = new PQueue({ concurrency: 1 });
}
private async init() {
// TODO: can also create new instance per new syncContext
const tflite = await import("@tensorflow/tfjs-tflite");
tflite.setWasmPath("/js/tflite/");
private async initOnnx() {
console.log("start ort mobilefacenet");
this.onnxInferenceSession = await ort.InferenceSession.create(
"/models/mobilefacenet/mobilefacenet_opset15.onnx",
);
const faceBatchSize = 1;
const data = new Float32Array(
faceBatchSize * 3 * this.faceSize * this.faceSize,
);
const inputTensor = new ort.Tensor("float32", data, [
faceBatchSize,
this.faceSize,
this.faceSize,
3,
]);
// TODO(MR): onnx-yolo
// const feeds: Record<string, ort.Tensor> = {};
const feeds: Record<string, any> = {};
const name = this.onnxInferenceSession.inputNames[0];
feeds[name] = inputTensor;
await this.onnxInferenceSession.run(feeds);
console.log("start end mobilefacenet");
}
this.mobileFaceNetModel = tflite.loadTFLiteModel(
"/models/mobilefacenet/mobilefacenet.tflite",
private async getOnnxInferenceSession() {
if (!this.onnxInferenceSession) {
await this.initOnnx();
}
return this.onnxInferenceSession;
}
private preprocessImageBitmapToFloat32(
imageBitmap: ImageBitmap,
requiredWidth: number = this.faceSize,
requiredHeight: number = this.faceSize,
maintainAspectRatio: boolean = true,
normFunction: (
pixelValue: number,
) => number = normalizePixelBetweenMinus1And1,
) {
// Create an OffscreenCanvas and set its size
const offscreenCanvas = new OffscreenCanvas(
imageBitmap.width,
imageBitmap.height,
);
const ctx = offscreenCanvas.getContext("2d");
ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height);
const imageData = ctx.getImageData(
0,
0,
imageBitmap.width,
imageBitmap.height,
);
const pixelData = imageData.data;
let scaleW = requiredWidth / imageBitmap.width;
let scaleH = requiredHeight / imageBitmap.height;
if (maintainAspectRatio) {
const scale = Math.min(
requiredWidth / imageBitmap.width,
requiredHeight / imageBitmap.height,
);
scaleW = scale;
scaleH = scale;
}
const scaledWidth = clamp(
Math.round(imageBitmap.width * scaleW),
0,
requiredWidth,
);
const scaledHeight = clamp(
Math.round(imageBitmap.height * scaleH),
0,
requiredHeight,
);
const processedImage = new Float32Array(
1 * requiredWidth * requiredHeight * 3,
);
log.info("loaded mobileFaceNetModel: ", tf.getBackend());
}
private async getMobileFaceNetModel() {
if (!this.mobileFaceNetModel) {
await this.init();
// Populate the Float32Array with normalized pixel values
for (let h = 0; h < requiredHeight; h++) {
for (let w = 0; w < requiredWidth; w++) {
let pixel: {
r: number;
g: number;
b: number;
};
if (w >= scaledWidth || h >= scaledHeight) {
pixel = { r: 114, g: 114, b: 114 };
} else {
pixel = getPixelBilinear(
w / scaleW,
h / scaleH,
pixelData,
imageBitmap.width,
imageBitmap.height,
);
}
const pixelIndex = 3 * (h * requiredWidth + w);
processedImage[pixelIndex] = normFunction(pixel.r);
processedImage[pixelIndex + 1] = normFunction(pixel.g);
processedImage[pixelIndex + 2] = normFunction(pixel.b);
}
}
return this.mobileFaceNetModel;
}
public getFaceEmbeddingTF(
faceTensor: tf.Tensor4D,
mobileFaceNetModel: TFLiteModel,
): tf.Tensor2D {
return tf.tidy(() => {
const normalizedFace = tf.sub(tf.div(faceTensor, 127.5), 1.0);
return mobileFaceNetModel.predict(normalizedFace) as tf.Tensor2D;
});
}
// Do not use this, use getFaceEmbedding which calls this through serialqueue
private async getFaceEmbeddingNoQueue(
faceImage: ImageBitmap,
): Promise<FaceEmbedding> {
const mobileFaceNetModel = await this.getMobileFaceNetModel();
const embeddingTensor = tf.tidy(() => {
const faceTensor = imageBitmapsToTensor4D([faceImage]);
const embeddingsTensor = this.getFaceEmbeddingTF(
faceTensor,
mobileFaceNetModel,
);
return tf.squeeze(embeddingsTensor, [0]);
});
const embedding = new Float32Array(await embeddingTensor.data());
embeddingTensor.dispose();
return embedding;
}
// TODO: TFLiteModel seems to not work concurrenly,
// remove serialqueue if that is not the case
private async getFaceEmbedding(
faceImage: ImageBitmap,
): Promise<FaceEmbedding> {
// @ts-expect-error "TODO: Fix ML related type errors"
return this.serialQueue.add(() =>
this.getFaceEmbeddingNoQueue(faceImage),
);
return processedImage;
}
public async getFaceEmbeddings(
faceImages: Array<ImageBitmap>,
faceData: Float32Array,
): Promise<Array<FaceEmbedding>> {
return Promise.all(
faceImages.map((faceImage) => this.getFaceEmbedding(faceImage)),
const inputTensor = new ort.Tensor("float32", faceData, [
Math.round(faceData.length / (this.faceSize * this.faceSize * 3)),
this.faceSize,
this.faceSize,
3,
]);
// TODO(MR): onnx-yolo
// const feeds: Record<string, ort.Tensor> = {};
const feeds: Record<string, any> = {};
feeds["img_inputs"] = inputTensor;
const inferenceSession = await this.getOnnxInferenceSession();
// TODO(MR): onnx-yolo
// const runout: ort.InferenceSession.OnnxValueMapType =
const runout: any = await inferenceSession.run(feeds);
// const test = runout.embeddings;
// const test2 = test.cpuData;
const outputData = runout.embeddings["cpuData"] as Float32Array;
const embeddings = new Array<FaceEmbedding>(
outputData.length / MOBILEFACENET_EMBEDDING_SIZE,
);
for (let i = 0; i < embeddings.length; i++) {
embeddings[i] = new Float32Array(
outputData.slice(
i * MOBILEFACENET_EMBEDDING_SIZE,
(i + 1) * MOBILEFACENET_EMBEDDING_SIZE,
),
);
}
return embeddings;
}
public async dispose() {
this.mobileFaceNetModel = undefined;
const inferenceSession = await this.getOnnxInferenceSession();
inferenceSession?.release();
this.onnxInferenceSession = undefined;
}
}

View file

@ -0,0 +1,331 @@
import {
BLAZEFACE_FACE_SIZE,
MAX_FACE_DISTANCE_PERCENT,
} from "constants/mlConfig";
import { Dimensions } from "types/image";
import {
FaceDetection,
FaceDetectionMethod,
FaceDetectionService,
Versioned,
} from "types/machineLearning";
import {
clamp,
getPixelBilinear,
normalizePixelBetween0And1,
} from "utils/image";
import { newBox } from "utils/machineLearning";
import { removeDuplicateDetections } from "utils/machineLearning/faceDetection";
import {
computeTransformToBox,
transformBox,
transformPoints,
} from "utils/machineLearning/transform";
import { Box, Point } from "../../../thirdparty/face-api/classes";
// TODO(MR): onnx-yolo
// import * as ort from "onnxruntime-web";
// import { env } from "onnxruntime-web";
const ort: any = {};
// TODO(MR): onnx-yolo
// env.wasm.wasmPaths = "/js/onnx/";
class YoloFaceDetectionService implements FaceDetectionService {
// TODO(MR): onnx-yolo
// private onnxInferenceSession?: ort.InferenceSession;
private onnxInferenceSession?: any;
public method: Versioned<FaceDetectionMethod>;
private desiredFaceSize;
public constructor(desiredFaceSize: number = BLAZEFACE_FACE_SIZE) {
this.method = {
value: "YoloFace",
version: 1,
};
this.desiredFaceSize = desiredFaceSize;
}
private async initOnnx() {
console.log("start ort");
this.onnxInferenceSession = await ort.InferenceSession.create(
"/models/yoloface/yolov5s_face_640_640_dynamic.onnx",
);
const data = new Float32Array(1 * 3 * 640 * 640);
const inputTensor = new ort.Tensor("float32", data, [1, 3, 640, 640]);
// TODO(MR): onnx-yolo
// const feeds: Record<string, ort.Tensor> = {};
const feeds: Record<string, any> = {};
const name = this.onnxInferenceSession.inputNames[0];
feeds[name] = inputTensor;
await this.onnxInferenceSession.run(feeds);
console.log("start end");
}
private async getOnnxInferenceSession() {
if (!this.onnxInferenceSession) {
await this.initOnnx();
}
return this.onnxInferenceSession;
}
private preprocessImageBitmapToFloat32ChannelsFirst(
imageBitmap: ImageBitmap,
requiredWidth: number,
requiredHeight: number,
maintainAspectRatio: boolean = true,
normFunction: (
pixelValue: number,
) => number = normalizePixelBetween0And1,
) {
// Create an OffscreenCanvas and set its size
const offscreenCanvas = new OffscreenCanvas(
imageBitmap.width,
imageBitmap.height,
);
const ctx = offscreenCanvas.getContext("2d");
ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height);
const imageData = ctx.getImageData(
0,
0,
imageBitmap.width,
imageBitmap.height,
);
const pixelData = imageData.data;
let scaleW = requiredWidth / imageBitmap.width;
let scaleH = requiredHeight / imageBitmap.height;
if (maintainAspectRatio) {
const scale = Math.min(
requiredWidth / imageBitmap.width,
requiredHeight / imageBitmap.height,
);
scaleW = scale;
scaleH = scale;
}
const scaledWidth = clamp(
Math.round(imageBitmap.width * scaleW),
0,
requiredWidth,
);
const scaledHeight = clamp(
Math.round(imageBitmap.height * scaleH),
0,
requiredHeight,
);
const processedImage = new Float32Array(
1 * 3 * requiredWidth * requiredHeight,
);
// Populate the Float32Array with normalized pixel values
let pixelIndex = 0;
const channelOffsetGreen = requiredHeight * requiredWidth;
const channelOffsetBlue = 2 * requiredHeight * requiredWidth;
for (let h = 0; h < requiredHeight; h++) {
for (let w = 0; w < requiredWidth; w++) {
let pixel: {
r: number;
g: number;
b: number;
};
if (w >= scaledWidth || h >= scaledHeight) {
pixel = { r: 114, g: 114, b: 114 };
} else {
pixel = getPixelBilinear(
w / scaleW,
h / scaleH,
pixelData,
imageBitmap.width,
imageBitmap.height,
);
}
processedImage[pixelIndex] = normFunction(pixel.r);
processedImage[pixelIndex + channelOffsetGreen] = normFunction(
pixel.g,
);
processedImage[pixelIndex + channelOffsetBlue] = normFunction(
pixel.b,
);
pixelIndex++;
}
}
return {
data: processedImage,
originalSize: {
width: imageBitmap.width,
height: imageBitmap.height,
},
newSize: { width: scaledWidth, height: scaledHeight },
};
}
/**
* @deprecated The method should not be used
*/
private imageBitmapToTensorData(imageBitmap) {
// Create an OffscreenCanvas and set its size
const offscreenCanvas = new OffscreenCanvas(
imageBitmap.width,
imageBitmap.height,
);
const ctx = offscreenCanvas.getContext("2d");
ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height);
const imageData = ctx.getImageData(
0,
0,
imageBitmap.width,
imageBitmap.height,
);
const pixelData = imageData.data;
const data = new Float32Array(
1 * 3 * imageBitmap.width * imageBitmap.height,
);
// Populate the Float32Array with normalized pixel values
for (let i = 0; i < pixelData.length; i += 4) {
// Normalize pixel values to the range [0, 1]
data[i / 4] = pixelData[i] / 255.0; // Red channel
data[i / 4 + imageBitmap.width * imageBitmap.height] =
pixelData[i + 1] / 255.0; // Green channel
data[i / 4 + 2 * imageBitmap.width * imageBitmap.height] =
pixelData[i + 2] / 255.0; // Blue channel
}
return {
data: data,
shape: [1, 3, imageBitmap.width, imageBitmap.height],
};
}
// The rowOutput is a Float32Array of shape [25200, 16], where each row represents a bounding box.
private getFacesFromYoloOutput(
rowOutput: Float32Array,
minScore: number,
): Array<FaceDetection> {
const faces: Array<FaceDetection> = [];
// iterate over each row
for (let i = 0; i < rowOutput.length; i += 16) {
const score = rowOutput[i + 4];
if (score < minScore) {
continue;
}
// The first 4 values represent the bounding box's coordinates (x1, y1, x2, y2)
const xCenter = rowOutput[i];
const yCenter = rowOutput[i + 1];
const width = rowOutput[i + 2];
const height = rowOutput[i + 3];
const xMin = xCenter - width / 2.0; // topLeft
const yMin = yCenter - height / 2.0; // topLeft
const leftEyeX = rowOutput[i + 5];
const leftEyeY = rowOutput[i + 6];
const rightEyeX = rowOutput[i + 7];
const rightEyeY = rowOutput[i + 8];
const noseX = rowOutput[i + 9];
const noseY = rowOutput[i + 10];
const leftMouthX = rowOutput[i + 11];
const leftMouthY = rowOutput[i + 12];
const rightMouthX = rowOutput[i + 13];
const rightMouthY = rowOutput[i + 14];
const box = new Box({
x: xMin,
y: yMin,
width: width,
height: height,
});
const probability = score as number;
const landmarks = [
new Point(leftEyeX, leftEyeY),
new Point(rightEyeX, rightEyeY),
new Point(noseX, noseY),
new Point(leftMouthX, leftMouthY),
new Point(rightMouthX, rightMouthY),
];
const face: FaceDetection = {
box,
landmarks,
probability,
// detectionMethod: this.method,
};
faces.push(face);
}
return faces;
}
public getRelativeDetection(
faceDetection: FaceDetection,
dimensions: Dimensions,
): FaceDetection {
const oldBox: Box = faceDetection.box;
const box = new Box({
x: oldBox.x / dimensions.width,
y: oldBox.y / dimensions.height,
width: oldBox.width / dimensions.width,
height: oldBox.height / dimensions.height,
});
const oldLandmarks: Point[] = faceDetection.landmarks;
const landmarks = oldLandmarks.map((l) => {
return new Point(l.x / dimensions.width, l.y / dimensions.height);
});
return {
box,
landmarks,
probability: faceDetection.probability,
};
}
private async estimateOnnx(imageBitmap: ImageBitmap) {
const maxFaceDistance = imageBitmap.width * MAX_FACE_DISTANCE_PERCENT;
const preprocessResult =
this.preprocessImageBitmapToFloat32ChannelsFirst(
imageBitmap,
640,
640,
);
const data = preprocessResult.data;
const resized = preprocessResult.newSize;
const inputTensor = new ort.Tensor("float32", data, [1, 3, 640, 640]);
// TODO(MR): onnx-yolo
// const feeds: Record<string, ort.Tensor> = {};
const feeds: Record<string, any> = {};
feeds["input"] = inputTensor;
const inferenceSession = await this.getOnnxInferenceSession();
const runout = await inferenceSession.run(feeds);
const outputData = runout.output.data;
const faces = this.getFacesFromYoloOutput(
outputData as Float32Array,
0.7,
);
const inBox = newBox(0, 0, resized.width, resized.height);
const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height);
const transform = computeTransformToBox(inBox, toBox);
const faceDetections: Array<FaceDetection> = faces?.map((f) => {
const box = transformBox(f.box, transform);
const normLandmarks = f.landmarks;
const landmarks = transformPoints(normLandmarks, transform);
return {
box,
landmarks,
probability: f.probability as number,
} as FaceDetection;
});
return removeDuplicateDetections(faceDetections, maxFaceDistance);
}
public async detectFaces(
imageBitmap: ImageBitmap,
): Promise<Array<FaceDetection>> {
// measure time taken
const facesFromOnnx = await this.estimateOnnx(imageBitmap);
return facesFromOnnx;
}
public async dispose() {
const inferenceSession = await this.getOnnxInferenceSession();
inferenceSession?.release();
this.onnxInferenceSession = undefined;
}
}
export default new YoloFaceDetectionService();

View file

@ -332,8 +332,10 @@ function searchCollection(
}
function searchFilesByName(searchPhrase: string, files: EnteFile[]) {
return files.filter((file) =>
file.metadata.title.toLowerCase().includes(searchPhrase),
return files.filter(
(file) =>
file.id.toString().includes(searchPhrase) ||
file.metadata.title.toLowerCase().includes(searchPhrase),
);
}

View file

@ -5,7 +5,7 @@
* embeddings on the server. However, we should be prepared to receive an
* {@link EncryptedEmbedding} with a model value distinct from one of these.
*/
export type EmbeddingModel = "onnx-clip";
export type EmbeddingModel = "onnx-clip" | "file-ml-clip-face";
export interface EncryptedEmbedding {
fileID: number;
@ -21,7 +21,7 @@ export interface Embedding
EncryptedEmbedding,
"encryptedEmbedding" | "decryptionHeader"
> {
embedding: Float32Array;
embedding?: Float32Array;
}
export interface GetEmbeddingDiffResponse {

View file

@ -6,3 +6,11 @@ export const ARCFACE_LANDMARKS = [
] as Array<[number, number]>;
export const ARCFACE_LANDMARKS_FACE_SIZE = 112;
export const ARC_FACE_5_LANDMARKS = [
[38.2946, 51.6963],
[73.5318, 51.5014],
[56.0252, 71.7366],
[41.5493, 92.3655],
[70.7299, 92.2041],
] as Array<[number, number]>;

View file

@ -0,0 +1,4 @@
export interface ClipEmbedding {
embedding: Float32Array;
model: "ggml-clip" | "onnx-clip";
}

View file

@ -0,0 +1,27 @@
/// [`x`] and [y] are the coordinates of the top left corner of the box, so the minimim values
/// [width] and [height] are the width and height of the box.
/// All values are in absolute pixels relative to the original image size.
export interface CenterBox {
x: number;
y: number;
height: number;
width: number;
}
export interface Point {
x: number;
y: number;
}
export interface Detection {
box: CenterBox;
landmarks: Point[];
}
export interface Face {
id: string;
confidence: number;
blur: number;
embedding: Float32Array;
detection: Detection;
}

View file

@ -0,0 +1,12 @@
import { ClipEmbedding } from "./clip";
import { Face } from "./face";
export interface FileML {
fileID: number;
clip?: ClipEmbedding;
faces: Face[];
height: number;
width: number;
version: number;
error?: string;
}

View file

@ -1,4 +1,5 @@
import * as tf from "@tensorflow/tfjs-core";
import { DebugInfo } from "hdbscan";
import PQueue from "p-queue";
import { EnteFile } from "types/file";
@ -15,6 +16,14 @@ export interface MLSyncResult {
error?: Error;
}
export interface DebugFace {
fileId: string;
// face: FaceApiResult;
face: AlignedFace;
embedding: FaceEmbedding;
faceImage: FaceImage;
}
export declare type FaceImage = Array<Array<Array<number>>>;
export declare type FaceImageBlob = Blob;
@ -50,7 +59,10 @@ export declare type Landmark = Point;
export declare type ImageType = "Original" | "Preview";
export declare type FaceDetectionMethod = "BlazeFace" | "FaceApiSSD";
export declare type FaceDetectionMethod =
| "BlazeFace"
| "FaceApiSSD"
| "YoloFace";
export declare type ObjectDetectionMethod = "SSDMobileNetV2";
@ -65,6 +77,8 @@ export declare type FaceAlignmentMethod =
export declare type FaceEmbeddingMethod = "MobileFaceNet" | "FaceApiDlib";
export declare type BlurDetectionMethod = "Laplacian";
export declare type ClusteringMethod = "Hdbscan" | "Dbscan";
export class AlignedBox {
@ -120,6 +134,7 @@ export interface FaceAlignment {
export interface AlignedFace extends CroppedFace {
alignment?: FaceAlignment;
blurValue?: number;
}
export declare type FaceEmbedding = Float32Array;
@ -186,7 +201,6 @@ export interface MlFileData {
export interface FaceDetectionConfig {
method: FaceDetectionMethod;
minFaceSize: number;
}
export interface ObjectDetectionConfig {
@ -215,6 +229,11 @@ export interface FaceAlignmentConfig {
method: FaceAlignmentMethod;
}
export interface BlurDetectionConfig {
method: BlurDetectionMethod;
threshold: number;
}
export interface FaceEmbeddingConfig {
method: FaceEmbeddingMethod;
faceSize: number;
@ -241,6 +260,7 @@ export interface MLSyncConfig {
faceDetection: FaceDetectionConfig;
faceCrop: FaceCropConfig;
faceAlignment: FaceAlignmentConfig;
blurDetection: BlurDetectionConfig;
faceEmbedding: FaceEmbeddingConfig;
faceClustering: FaceClusteringConfig;
objectDetection: ObjectDetectionConfig;
@ -263,6 +283,7 @@ export interface MLSyncContext {
faceCropService: FaceCropService;
faceAlignmentService: FaceAlignmentService;
faceEmbeddingService: FaceEmbeddingService;
blurDetectionService: BlurDetectionService;
faceClusteringService: ClusteringService;
objectDetectionService: ObjectDetectionService;
sceneDetectionService: SceneDetectionService;
@ -312,6 +333,10 @@ export interface FaceDetectionService {
method: Versioned<FaceDetectionMethod>;
// init(): Promise<void>;
detectFaces(image: ImageBitmap): Promise<Array<FaceDetection>>;
getRelativeDetection(
faceDetection: FaceDetection,
imageDimensions: Dimensions,
): FaceDetection;
dispose(): Promise<void>;
}
@ -354,12 +379,15 @@ export interface FaceEmbeddingService {
method: Versioned<FaceEmbeddingMethod>;
faceSize: number;
// init(): Promise<void>;
getFaceEmbeddings(
faceImages: Array<ImageBitmap>,
): Promise<Array<FaceEmbedding>>;
getFaceEmbeddings(faceImages: Float32Array): Promise<Array<FaceEmbedding>>;
dispose(): Promise<void>;
}
export interface BlurDetectionService {
method: Versioned<BlurDetectionMethod>;
detectBlur(alignedFaces: Float32Array): number[];
}
export interface ClusteringService {
method: Versioned<ClusteringMethod>;
@ -396,18 +424,3 @@ export interface MachineLearningWorker {
close(): void;
}
// export class TFImageBitmap {
// imageBitmap: ImageBitmap;
// tfImage: tf.Tensor3D;
// constructor(imageBitmap: ImageBitmap, tfImage: tf.Tensor3D) {
// this.imageBitmap = imageBitmap;
// this.tfImage = tfImage;
// }
// async dispose() {
// this.tfImage && (await tf.dispose(this.tfImage));
// this.imageBitmap && this.imageBitmap.close();
// }
// }

View file

@ -1,4 +1,5 @@
import { Embedding } from "types/embedding";
import { FileML } from "./machineLearning/mldataMappers";
export const getLatestVersionEmbeddings = (embeddings: Embedding[]) => {
const latestVersionEntities = new Map<number, Embedding>();
@ -16,3 +17,20 @@ export const getLatestVersionEmbeddings = (embeddings: Embedding[]) => {
});
return Array.from(latestVersionEntities.values());
};
export const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => {
const latestVersionEntities = new Map<number, FileML>();
embeddings.forEach((embedding) => {
if (!embedding?.fileID) {
return;
}
const existingEmbeddings = latestVersionEntities.get(embedding.fileID);
if (
!existingEmbeddings ||
existingEmbeddings.updatedAt < embedding.updatedAt
) {
latestVersionEntities.set(embedding.fileID, embedding);
}
});
return Array.from(latestVersionEntities.values());
};

View file

@ -1,9 +1,324 @@
// these utils only work in env where OffscreenCanvas is available
import { Matrix, inverse } from "ml-matrix";
import { BlobOptions, Dimensions } from "types/image";
import { FaceAlignment } from "types/machineLearning";
import { enlargeBox } from "utils/machineLearning";
import { Box } from "../../../thirdparty/face-api/classes";
export function normalizePixelBetween0And1(pixelValue: number) {
return pixelValue / 255.0;
}
export function normalizePixelBetweenMinus1And1(pixelValue: number) {
return pixelValue / 127.5 - 1.0;
}
export function unnormalizePixelFromBetweenMinus1And1(pixelValue: number) {
return clamp(Math.round((pixelValue + 1.0) * 127.5), 0, 255);
}
export function readPixelColor(
imageData: Uint8ClampedArray,
width: number,
height: number,
x: number,
y: number,
) {
if (x < 0 || x >= width || y < 0 || y >= height) {
return { r: 0, g: 0, b: 0, a: 0 };
}
const index = (y * width + x) * 4;
return {
r: imageData[index],
g: imageData[index + 1],
b: imageData[index + 2],
a: imageData[index + 3],
};
}
export function clamp(value: number, min: number, max: number) {
return Math.min(max, Math.max(min, value));
}
export function getPixelBicubic(
fx: number,
fy: number,
imageData: Uint8ClampedArray,
imageWidth: number,
imageHeight: number,
) {
// Clamp to image boundaries
fx = clamp(fx, 0, imageWidth - 1);
fy = clamp(fy, 0, imageHeight - 1);
const x = Math.trunc(fx) - (fx >= 0.0 ? 0 : 1);
const px = x - 1;
const nx = x + 1;
const ax = x + 2;
const y = Math.trunc(fy) - (fy >= 0.0 ? 0 : 1);
const py = y - 1;
const ny = y + 1;
const ay = y + 2;
const dx = fx - x;
const dy = fy - y;
function cubic(
dx: number,
ipp: number,
icp: number,
inp: number,
iap: number,
) {
return (
icp +
0.5 *
(dx * (-ipp + inp) +
dx * dx * (2 * ipp - 5 * icp + 4 * inp - iap) +
dx * dx * dx * (-ipp + 3 * icp - 3 * inp + iap))
);
}
const icc = readPixelColor(imageData, imageWidth, imageHeight, x, y);
const ipp =
px < 0 || py < 0
? icc
: readPixelColor(imageData, imageWidth, imageHeight, px, py);
const icp =
px < 0
? icc
: readPixelColor(imageData, imageWidth, imageHeight, x, py);
const inp =
py < 0 || nx >= imageWidth
? icc
: readPixelColor(imageData, imageWidth, imageHeight, nx, py);
const iap =
ax >= imageWidth || py < 0
? icc
: readPixelColor(imageData, imageWidth, imageHeight, ax, py);
const ip0 = cubic(dx, ipp.r, icp.r, inp.r, iap.r);
const ip1 = cubic(dx, ipp.g, icp.g, inp.g, iap.g);
const ip2 = cubic(dx, ipp.b, icp.b, inp.b, iap.b);
// const ip3 = cubic(dx, ipp.a, icp.a, inp.a, iap.a);
const ipc =
px < 0
? icc
: readPixelColor(imageData, imageWidth, imageHeight, px, y);
const inc =
nx >= imageWidth
? icc
: readPixelColor(imageData, imageWidth, imageHeight, nx, y);
const iac =
ax >= imageWidth
? icc
: readPixelColor(imageData, imageWidth, imageHeight, ax, y);
const ic0 = cubic(dx, ipc.r, icc.r, inc.r, iac.r);
const ic1 = cubic(dx, ipc.g, icc.g, inc.g, iac.g);
const ic2 = cubic(dx, ipc.b, icc.b, inc.b, iac.b);
// const ic3 = cubic(dx, ipc.a, icc.a, inc.a, iac.a);
const ipn =
px < 0 || ny >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, px, ny);
const icn =
ny >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, x, ny);
const inn =
nx >= imageWidth || ny >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, nx, ny);
const ian =
ax >= imageWidth || ny >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, ax, ny);
const in0 = cubic(dx, ipn.r, icn.r, inn.r, ian.r);
const in1 = cubic(dx, ipn.g, icn.g, inn.g, ian.g);
const in2 = cubic(dx, ipn.b, icn.b, inn.b, ian.b);
// const in3 = cubic(dx, ipn.a, icn.a, inn.a, ian.a);
const ipa =
px < 0 || ay >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, px, ay);
const ica =
ay >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, x, ay);
const ina =
nx >= imageWidth || ay >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, nx, ay);
const iaa =
ax >= imageWidth || ay >= imageHeight
? icc
: readPixelColor(imageData, imageWidth, imageHeight, ax, ay);
const ia0 = cubic(dx, ipa.r, ica.r, ina.r, iaa.r);
const ia1 = cubic(dx, ipa.g, ica.g, ina.g, iaa.g);
const ia2 = cubic(dx, ipa.b, ica.b, ina.b, iaa.b);
// const ia3 = cubic(dx, ipa.a, ica.a, ina.a, iaa.a);
const c0 = Math.trunc(clamp(cubic(dy, ip0, ic0, in0, ia0), 0, 255));
const c1 = Math.trunc(clamp(cubic(dy, ip1, ic1, in1, ia1), 0, 255));
const c2 = Math.trunc(clamp(cubic(dy, ip2, ic2, in2, ia2), 0, 255));
// const c3 = cubic(dy, ip3, ic3, in3, ia3);
return { r: c0, g: c1, b: c2 };
}
/// Returns the pixel value (RGB) at the given coordinates using bilinear interpolation.
export function getPixelBilinear(
fx: number,
fy: number,
imageData: Uint8ClampedArray,
imageWidth: number,
imageHeight: number,
) {
// Clamp to image boundaries
fx = clamp(fx, 0, imageWidth - 1);
fy = clamp(fy, 0, imageHeight - 1);
// Get the surrounding coordinates and their weights
const x0 = Math.floor(fx);
const x1 = Math.ceil(fx);
const y0 = Math.floor(fy);
const y1 = Math.ceil(fy);
const dx = fx - x0;
const dy = fy - y0;
const dx1 = 1.0 - dx;
const dy1 = 1.0 - dy;
// Get the original pixels
const pixel1 = readPixelColor(imageData, imageWidth, imageHeight, x0, y0);
const pixel2 = readPixelColor(imageData, imageWidth, imageHeight, x1, y0);
const pixel3 = readPixelColor(imageData, imageWidth, imageHeight, x0, y1);
const pixel4 = readPixelColor(imageData, imageWidth, imageHeight, x1, y1);
function bilinear(val1: number, val2: number, val3: number, val4: number) {
return Math.round(
val1 * dx1 * dy1 +
val2 * dx * dy1 +
val3 * dx1 * dy +
val4 * dx * dy,
);
}
// Interpolate the pixel values
const red = bilinear(pixel1.r, pixel2.r, pixel3.r, pixel4.r);
const green = bilinear(pixel1.g, pixel2.g, pixel3.g, pixel4.g);
const blue = bilinear(pixel1.b, pixel2.b, pixel3.b, pixel4.b);
return { r: red, g: green, b: blue };
}
export function warpAffineFloat32List(
imageBitmap: ImageBitmap,
faceAlignment: FaceAlignment,
faceSize: number,
inputData: Float32Array,
inputStartIndex: number,
): void {
// Get the pixel data
const offscreenCanvas = new OffscreenCanvas(
imageBitmap.width,
imageBitmap.height,
);
const ctx = offscreenCanvas.getContext("2d");
ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height);
const imageData = ctx.getImageData(
0,
0,
imageBitmap.width,
imageBitmap.height,
);
const pixelData = imageData.data;
const transformationMatrix = faceAlignment.affineMatrix.map((row) =>
row.map((val) => (val != 1.0 ? val * faceSize : 1.0)),
); // 3x3
const A: Matrix = new Matrix([
[transformationMatrix[0][0], transformationMatrix[0][1]],
[transformationMatrix[1][0], transformationMatrix[1][1]],
]);
const Ainverse = inverse(A);
const b00 = transformationMatrix[0][2];
const b10 = transformationMatrix[1][2];
const a00Prime = Ainverse.get(0, 0);
const a01Prime = Ainverse.get(0, 1);
const a10Prime = Ainverse.get(1, 0);
const a11Prime = Ainverse.get(1, 1);
for (let yTrans = 0; yTrans < faceSize; ++yTrans) {
for (let xTrans = 0; xTrans < faceSize; ++xTrans) {
// Perform inverse affine transformation
const xOrigin =
a00Prime * (xTrans - b00) + a01Prime * (yTrans - b10);
const yOrigin =
a10Prime * (xTrans - b00) + a11Prime * (yTrans - b10);
// Get the pixel from interpolation
const pixel = getPixelBicubic(
xOrigin,
yOrigin,
pixelData,
imageBitmap.width,
imageBitmap.height,
);
// Set the pixel in the input data
const index = (yTrans * faceSize + xTrans) * 3;
inputData[inputStartIndex + index] =
normalizePixelBetweenMinus1And1(pixel.r);
inputData[inputStartIndex + index + 1] =
normalizePixelBetweenMinus1And1(pixel.g);
inputData[inputStartIndex + index + 2] =
normalizePixelBetweenMinus1And1(pixel.b);
}
}
}
export function createGrayscaleIntMatrixFromNormalized2List(
imageList: Float32Array,
faceNumber: number,
width: number = 112,
height: number = 112,
): number[][] {
const startIndex = faceNumber * width * height * 3;
return Array.from({ length: height }, (_, y) =>
Array.from({ length: width }, (_, x) => {
// 0.299 ∙ Red + 0.587 ∙ Green + 0.114 ∙ Blue
const pixelIndex = startIndex + 3 * (y * width + x);
return clamp(
Math.round(
0.299 *
unnormalizePixelFromBetweenMinus1And1(
imageList[pixelIndex],
) +
0.587 *
unnormalizePixelFromBetweenMinus1And1(
imageList[pixelIndex + 1],
) +
0.114 *
unnormalizePixelFromBetweenMinus1And1(
imageList[pixelIndex + 2],
),
),
0,
255,
);
}),
);
}
export function resizeToSquare(img: ImageBitmap, size: number) {
const scale = size / Math.max(img.height, img.width);
const width = scale * img.width;

View file

@ -6,6 +6,7 @@ import { FaceAlignment, FaceDetection } from "types/machineLearning";
import {
ARCFACE_LANDMARKS,
ARCFACE_LANDMARKS_FACE_SIZE,
ARC_FACE_5_LANDMARKS,
} from "types/machineLearning/archface";
import { cropWithRotation, transform } from "utils/image";
import {
@ -21,7 +22,7 @@ import { Box, Point } from "../../../thirdparty/face-api/classes";
export function normalizeLandmarks(
landmarks: Array<[number, number]>,
faceSize: number,
) {
): Array<[number, number]> {
return landmarks.map((landmark) =>
landmark.map((p) => p / faceSize),
) as Array<[number, number]>;
@ -74,9 +75,13 @@ export function getFaceAlignmentUsingSimilarityTransform(
export function getArcfaceAlignment(
faceDetection: FaceDetection,
): FaceAlignment {
const landmarkCount = faceDetection.landmarks.length;
return getFaceAlignmentUsingSimilarityTransform(
faceDetection,
normalizeLandmarks(ARCFACE_LANDMARKS, ARCFACE_LANDMARKS_FACE_SIZE),
normalizeLandmarks(
landmarkCount === 5 ? ARC_FACE_5_LANDMARKS : ARCFACE_LANDMARKS,
ARCFACE_LANDMARKS_FACE_SIZE,
),
);
}
@ -161,6 +166,7 @@ export function ibExtractFaceImage(
);
}
// Used in MLDebugViewOnly
export function ibExtractFaceImageUsingTransform(
image: ImageBitmap,
alignment: FaceAlignment,
@ -183,42 +189,6 @@ export function ibExtractFaceImages(
);
}
export function extractArcfaceAlignedFaceImage(
image: tf.Tensor4D,
faceDetection: FaceDetection,
faceSize: number,
): tf.Tensor4D {
const alignment = getFaceAlignmentUsingSimilarityTransform(
faceDetection,
ARCFACE_LANDMARKS,
);
return extractFaceImage(image, alignment, faceSize);
}
export function extractArcfaceAlignedFaceImages(
image: tf.Tensor3D | tf.Tensor4D,
faceDetections: Array<FaceDetection>,
faceSize: number,
): tf.Tensor4D {
return tf.tidy(() => {
const tf4dFloat32Image = toTensor4D(image, "float32");
const faceImages = new Array<tf.Tensor3D>(faceDetections.length);
for (let i = 0; i < faceDetections.length; i++) {
faceImages[i] = tf.squeeze(
extractArcfaceAlignedFaceImage(
tf4dFloat32Image,
faceDetections[i],
faceSize,
),
[0],
);
}
return tf.stack(faceImages) as tf.Tensor4D;
});
}
const BLAZEFACE_LEFT_EYE_INDEX = 0;
const BLAZEFACE_RIGHT_EYE_INDEX = 1;
// const BLAZEFACE_NOSE_INDEX = 2;

View file

@ -35,6 +35,18 @@ export function getDetectionCenter(detection: FaceDetection) {
return center.div({ x: 4, y: 4 });
}
/**
* Finds the nearest face detection from a list of detections to a specified detection.
*
* This function calculates the center of each detection and then finds the detection whose center is nearest to the center of the specified detection.
* If a maximum distance is specified, only detections within that distance are considered.
*
* @param toDetection - The face detection to find the nearest detection to.
* @param fromDetections - An array of face detections to search in.
* @param maxDistance - The maximum distance between the centers of the two detections for a detection to be considered. If not specified, all detections are considered.
*
* @returns The nearest face detection from the list, or `undefined` if no detection is within the maximum distance.
*/
export function getNearestDetection(
toDetection: FaceDetection,
fromDetections: Array<FaceDetection>,
@ -47,7 +59,18 @@ export function getNearestDetection(
return nearestIndex >= 0 && fromDetections[nearestIndex];
}
// TODO: can also be done through tf.image.nonMaxSuppression
/**
* Removes duplicate face detections from an array of detections.
*
* This function sorts the detections by their probability in descending order, then iterates over them.
* For each detection, it calculates the Euclidean distance to all other detections.
* If the distance is less than or equal to the specified threshold (`withinDistance`), the other detection is considered a duplicate and is removed.
*
* @param detections - An array of face detections to remove duplicates from.
* @param withinDistance - The maximum Euclidean distance between two detections for them to be considered duplicates.
*
* @returns An array of face detections with duplicates removed.
*/
export function removeDuplicateDetections(
detections: Array<FaceDetection>,
withinDistance: number,

View file

@ -17,6 +17,7 @@ import {
DetectedFace,
DetectedObject,
Face,
FaceAlignment,
FaceImageBlob,
MlFileData,
Person,
@ -24,18 +25,11 @@ import {
Versioned,
} from "types/machineLearning";
import { getRenderableImage } from "utils/file";
import { imageBitmapToBlob } from "utils/image";
import { clamp, imageBitmapToBlob, warpAffineFloat32List } from "utils/image";
import mlIDbStorage from "utils/storage/mlIDbStorage";
import { Box, Point } from "../../../thirdparty/face-api/classes";
import {
getArcfaceAlignment,
ibExtractFaceImage,
ibExtractFaceImages,
} from "./faceAlign";
import {
getFaceCropBlobFromStorage,
ibExtractFaceImagesFromCrops,
} from "./faceCrop";
import { ibExtractFaceImage, ibExtractFaceImages } from "./faceAlign";
import { getFaceCropBlobFromStorage } from "./faceCrop";
export function f32Average(descriptors: Float32Array[]) {
if (descriptors.length < 1) {
@ -241,9 +235,10 @@ export async function extractFaceImages(
faceSize: number,
image?: ImageBitmap,
) {
if (faces.length === faces.filter((f) => f.crop).length) {
return ibExtractFaceImagesFromCrops(faces, faceSize);
} else if (image) {
// if (faces.length === faces.filter((f) => f.crop).length) {
// return ibExtractFaceImagesFromCrops(faces, faceSize);
// } else
if (image) {
const faceAlignments = faces.map((f) => f.alignment);
return ibExtractFaceImages(image, faceAlignments, faceSize);
} else {
@ -253,31 +248,68 @@ export async function extractFaceImages(
}
}
export async function extractFaceImagesToFloat32(
faceAlignments: Array<FaceAlignment>,
faceSize: number,
image: ImageBitmap,
): Promise<Float32Array> {
const faceData = new Float32Array(
faceAlignments.length * faceSize * faceSize * 3,
);
for (let i = 0; i < faceAlignments.length; i++) {
const alignedFace = faceAlignments[i];
const faceDataOffset = i * faceSize * faceSize * 3;
warpAffineFloat32List(
image,
alignedFace,
faceSize,
faceData,
faceDataOffset,
);
}
return faceData;
}
export function leftFillNum(num: number, length: number, padding: number) {
return num.toString().padStart(length, padding.toString());
}
// TODO: same face can not be only based on this id,
// this gives same id to faces whose arcface center lies in same box of 1% image grid
// maximum distance for same id will be around √2%
// will give same id in most of the cases, except for face centers lying near grid edges
// faces with same id should be treated as same face, and diffrent id should be tested further
// further test can rely on nearest face within certain threshold in same image
// can also explore spatial index similar to Geohash for indexing, but overkill
// for mostly single digit faces in one image
// also check if this needs to be globally unique or unique for a user
export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) {
const arcFaceAlignedFace = getArcfaceAlignment(detectedFace.detection);
const imgDimPoint = new Point(imageDims.width, imageDims.height);
const gridPt = arcFaceAlignedFace.center
.mul(new Point(100, 100))
.div(imgDimPoint)
.floor()
.bound(0, 99);
const gridPaddedX = leftFillNum(gridPt.x, 2, 0);
const gridPaddedY = leftFillNum(gridPt.y, 2, 0);
const xMin = clamp(
detectedFace.detection.box.x / imageDims.width,
0.0,
0.999999,
)
.toFixed(5)
.substring(2);
const yMin = clamp(
detectedFace.detection.box.y / imageDims.height,
0.0,
0.999999,
)
.toFixed(5)
.substring(2);
const xMax = clamp(
(detectedFace.detection.box.x + detectedFace.detection.box.width) /
imageDims.width,
0.0,
0.999999,
)
.toFixed(5)
.substring(2);
const yMax = clamp(
(detectedFace.detection.box.y + detectedFace.detection.box.height) /
imageDims.height,
0.0,
0.999999,
)
.toFixed(5)
.substring(2);
return `${detectedFace.fileId}-${gridPaddedX}-${gridPaddedY}`;
const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`;
const faceID = `${detectedFace.fileId}_${rawFaceID}`;
return faceID;
}
export function getObjectId(

View file

@ -0,0 +1,265 @@
import {
Face,
FaceDetection,
Landmark,
MlFileData,
} from "types/machineLearning";
import { ClipEmbedding } from "types/machineLearning/data/clip";
export interface FileML extends ServerFileMl {
updatedAt: number;
}
class ServerFileMl {
public fileID: number;
public height?: number;
public width?: number;
public faceEmbedding: ServerFaceEmbeddings;
public clipEmbedding?: ClipEmbedding;
public constructor(
fileID: number,
faceEmbedding: ServerFaceEmbeddings,
clipEmbedding?: ClipEmbedding,
height?: number,
width?: number,
) {
this.fileID = fileID;
this.height = height;
this.width = width;
this.faceEmbedding = faceEmbedding;
this.clipEmbedding = clipEmbedding;
}
toJson(): string {
return JSON.stringify(this);
}
static fromJson(json: string): ServerFileMl {
return JSON.parse(json);
}
}
class ServerFaceEmbeddings {
public faces: ServerFace[];
public version: number;
public client?: string;
public error?: boolean;
public constructor(
faces: ServerFace[],
version: number,
client?: string,
error?: boolean,
) {
this.faces = faces;
this.version = version;
this.client = client;
this.error = error;
}
toJson(): string {
return JSON.stringify(this);
}
static fromJson(json: string): ServerFaceEmbeddings {
return JSON.parse(json);
}
}
class ServerFace {
public fileID: number;
public faceID: string;
public embeddings: number[];
public detection: ServerDetection;
public score: number;
public blur: number;
public fileInfo?: ServerFileInfo;
public constructor(
fileID: number,
faceID: string,
embeddings: number[],
detection: ServerDetection,
score: number,
blur: number,
fileInfo?: ServerFileInfo,
) {
this.fileID = fileID;
this.faceID = faceID;
this.embeddings = embeddings;
this.detection = detection;
this.score = score;
this.blur = blur;
this.fileInfo = fileInfo;
}
toJson(): string {
return JSON.stringify(this);
}
static fromJson(json: string): ServerFace {
return JSON.parse(json);
}
}
class ServerFileInfo {
public imageWidth?: number;
public imageHeight?: number;
public constructor(imageWidth?: number, imageHeight?: number) {
this.imageWidth = imageWidth;
this.imageHeight = imageHeight;
}
}
class ServerDetection {
public box: ServerFaceBox;
public landmarks: Landmark[];
public constructor(box: ServerFaceBox, landmarks: Landmark[]) {
this.box = box;
this.landmarks = landmarks;
}
toJson(): string {
return JSON.stringify(this);
}
static fromJson(json: string): ServerDetection {
return JSON.parse(json);
}
}
class ServerFaceBox {
public xMin: number;
public yMin: number;
public width: number;
public height: number;
public constructor(
xMin: number,
yMin: number,
width: number,
height: number,
) {
this.xMin = xMin;
this.yMin = yMin;
this.width = width;
this.height = height;
}
toJson(): string {
return JSON.stringify(this);
}
static fromJson(json: string): ServerFaceBox {
return JSON.parse(json);
}
}
export function LocalFileMlDataToServerFileMl(
localFileMlData: MlFileData,
): ServerFileMl {
if (
localFileMlData.errorCount > 0 &&
localFileMlData.lastErrorMessage !== undefined
) {
return null;
}
const imageDimensions = localFileMlData.imageDimensions;
const fileInfo = new ServerFileInfo(
imageDimensions.width,
imageDimensions.height,
);
const faces: ServerFace[] = [];
for (let i = 0; i < localFileMlData.faces.length; i++) {
const face: Face = localFileMlData.faces[i];
const faceID = face.id;
const embedding = face.embedding;
const score = face.detection.probability;
const blur = face.blurValue;
const detection: FaceDetection = face.detection;
const box = detection.box;
const landmarks = detection.landmarks;
const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height);
const newLandmarks: Landmark[] = [];
for (let j = 0; j < landmarks.length; j++) {
newLandmarks.push({
x: landmarks[j].x,
y: landmarks[j].y,
} as Landmark);
}
const newFaceObject = new ServerFace(
localFileMlData.fileId,
faceID,
Array.from(embedding),
new ServerDetection(newBox, newLandmarks),
score,
blur,
fileInfo,
);
faces.push(newFaceObject);
}
const faceEmbeddings = new ServerFaceEmbeddings(
faces,
1,
localFileMlData.lastErrorMessage,
);
return new ServerFileMl(
localFileMlData.fileId,
faceEmbeddings,
null,
imageDimensions.height,
imageDimensions.width,
);
}
// // Not sure if this actually works
// export function ServerFileMlToLocalFileMlData(
// serverFileMl: ServerFileMl,
// ): MlFileData {
// const faces: Face[] = [];
// const mlVersion: number = serverFileMl.faceEmbeddings.version;
// const errorCount = serverFileMl.faceEmbeddings.error ? 1 : 0;
// for (let i = 0; i < serverFileMl.faceEmbeddings.faces.length; i++) {
// const face = serverFileMl.faceEmbeddings.faces[i];
// if(face.detection.landmarks.length === 0) {
// continue;
// }
// const detection = face.detection;
// const box = detection.box;
// const landmarks = detection.landmarks;
// const newBox = new FaceBox(
// box.xMin,
// box.yMin,
// box.width,
// box.height,
// );
// const newLandmarks: Landmark[] = [];
// for (let j = 0; j < landmarks.length; j++) {
// newLandmarks.push(
// {
// x: landmarks[j].x,
// y: landmarks[j].y,
// } as Landmark
// );
// }
// const newDetection = new Detection(newBox, newLandmarks);
// const newFace = {
// } as Face
// faces.push(newFace);
// }
// return {
// fileId: serverFileMl.fileID,
// imageDimensions: {
// width: serverFileMl.width,
// height: serverFileMl.height,
// },
// faces,
// mlVersion,
// errorCount,
// };
// }

View file

@ -1,3 +1,4 @@
import { ensureElectron } from "@/next/electron";
import { AppUpdateInfo } from "@/next/types/ipc";
import { logoutUser } from "@ente/accounts/services/user";
import { DialogBoxAttributes } from "@ente/shared/components/DialogBox/types";
@ -52,35 +53,34 @@ export const getTrashFileMessage = (deleteFileHelper): DialogBoxAttributes => ({
close: { text: t("CANCEL") },
});
export const getUpdateReadyToInstallMessage = (
updateInfo: AppUpdateInfo,
): DialogBoxAttributes => ({
export const getUpdateReadyToInstallMessage = ({
version,
}: AppUpdateInfo): DialogBoxAttributes => ({
icon: <AutoAwesomeOutlinedIcon />,
title: t("UPDATE_AVAILABLE"),
content: t("UPDATE_INSTALLABLE_MESSAGE"),
proceed: {
action: () => globalThis.electron?.updateAndRestart(),
action: () => ensureElectron().updateAndRestart(),
text: t("INSTALL_NOW"),
variant: "accent",
},
close: {
text: t("INSTALL_ON_NEXT_LAUNCH"),
variant: "secondary",
action: () =>
globalThis.electron?.muteUpdateNotification(updateInfo.version),
action: () => ensureElectron().updateOnNextRestart(version),
},
});
export const getUpdateAvailableForDownloadMessage = (
updateInfo: AppUpdateInfo,
): DialogBoxAttributes => ({
export const getUpdateAvailableForDownloadMessage = ({
version,
}: AppUpdateInfo): DialogBoxAttributes => ({
icon: <AutoAwesomeOutlinedIcon />,
title: t("UPDATE_AVAILABLE"),
content: t("UPDATE_AVAILABLE_MESSAGE"),
close: {
text: t("IGNORE_THIS_VERSION"),
variant: "secondary",
action: () => globalThis.electron?.skipAppUpdate(updateInfo.version),
action: () => ensureElectron().skipAppUpdate(version),
},
proceed: {
action: downloadApp,

View file

@ -1,4 +1,3 @@
import log from "@/next/log";
import {
RecoveryKey,
TwoFactorRecoveryResponse,
@ -62,7 +61,6 @@ export const _logout = async () => {
) {
return;
}
log.error("/users/logout failed", e);
throw e;
}
};

View file

@ -70,9 +70,9 @@ export default function Credentials({ appContext, appName }: PageProps) {
const electron = globalThis.electron;
if (!key && electron) {
try {
key = await electron.getEncryptionKey();
key = await electron.encryptionKey();
} catch (e) {
log.error("getEncryptionKey failed", e);
log.error("Failed to get encryption key from electron", e);
}
if (key) {
await saveKeyInSessionStore(

View file

@ -11,49 +11,44 @@ import { PAGES } from "../constants/pages";
export const logoutUser = async () => {
try {
try {
await _logout();
} catch (e) {
// ignore
}
try {
InMemoryStore.clear();
} catch (e) {
// ignore
log.error("clear InMemoryStore failed", e);
}
try {
clearKeys();
} catch (e) {
log.error("clearKeys failed", e);
}
try {
clearData();
} catch (e) {
log.error("clearData failed", e);
}
try {
await deleteAllCache();
} catch (e) {
log.error("deleteAllCache failed", e);
}
try {
await clearFiles();
} catch (e) {
log.error("clearFiles failed", e);
}
try {
globalThis.electron?.clearElectronStore();
} catch (e) {
log.error("clearElectronStore failed", e);
}
try {
eventBus.emit(Events.LOGOUT);
} catch (e) {
log.error("Error in logout handlers", e);
}
router.push(PAGES.ROOT);
await _logout();
} catch (e) {
log.error("logoutUser failed", e);
log.error("Ignoring error during POST /users/logout", e);
}
try {
InMemoryStore.clear();
} catch (e) {
log.error("Ignoring error when clearing in-memory store", e);
}
try {
clearKeys();
} catch (e) {
log.error("Ignoring error when clearing keys", e);
}
try {
clearData();
} catch (e) {
log.error("Ignoring error when clearing data", e);
}
try {
await deleteAllCache();
} catch (e) {
log.error("Ignoring error when clearing caches", e);
}
try {
await clearFiles();
} catch (e) {
log.error("Ignoring error when clearing files", e);
}
try {
globalThis.electron?.clearStores();
} catch (e) {
log.error("Ignoring error when clearing electron stores", e);
}
try {
eventBus.emit(Events.LOGOUT);
} catch (e) {
log.error("Ignoring error in event-bus logout handlers", e);
}
router.push(PAGES.ROOT);
};

View file

@ -24,7 +24,8 @@ module.exports = {
"max-len": "off",
"new-cap": "off",
"no-invalid-this": "off",
eqeqeq: "error",
// TODO(MR): We want this off anyway, for now forcing it here
eqeqeq: "off",
"object-curly-spacing": ["error", "always"],
"space-before-function-paren": "off",
"operator-linebreak": [

View file

@ -18,6 +18,33 @@ export const logStartupBanner = (appId: string, userId?: number) => {
log.info(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`);
};
/**
* Attach handlers to log any unhandled exceptions and promise rejections.
*
* @param attach If true, attach handlers, and if false, remove them. This
* allows us to use this in a React hook that cleans up after itself.
*/
export const logUnhandledErrorsAndRejections = (attach: boolean) => {
const handleError = (event: ErrorEvent) => {
log.error("Unhandled error", event.error);
};
const handleUnhandledRejection = (event: PromiseRejectionEvent) => {
log.error("Unhandled promise rejection", event.reason);
};
if (attach) {
window.addEventListener("error", handleError);
window.addEventListener("unhandledrejection", handleUnhandledRejection);
} else {
window.removeEventListener("error", handleError);
window.removeEventListener(
"unhandledrejection",
handleUnhandledRejection,
);
}
};
interface LogEntry {
timestamp: number;
logLine: string;

View file

@ -37,9 +37,22 @@ export enum PICKED_UPLOAD_TYPE {
export interface Electron {
// - General
/** Return the version of the desktop app. */
/**
* Return the version of the desktop app.
*
* The return value is of the form `v1.2.3`.
*/
appVersion: () => Promise<string>;
/**
* Log the given {@link message} to the on-disk log file maintained by the
* desktop app.
*
* Note: Unlike the other functions exposed over the Electron bridge,
* logToDisk is fire-and-forget and does not return a promise.
*/
logToDisk: (message: string) => void;
/**
* Open the given {@link dirPath} in the system's folder viewer.
*
@ -55,13 +68,75 @@ export interface Electron {
openLogDirectory: () => Promise<void>;
/**
* Log the given {@link message} to the on-disk log file maintained by the
* desktop app.
* Clear any stored data.
*
* Note: Unlike the other functions exposed over the Electron bridge,
* logToDisk is fire-and-forget and does not return a promise.
* This is a coarse single shot cleanup, meant for use in clearing any
* Electron side state during logout.
*/
logToDisk: (message: string) => void;
clearStores: () => void;
/**
* Return the previously saved encryption key from persistent safe storage.
*
* If no such key is found, return `undefined`.
*
* @see {@link saveEncryptionKey}.
*/
encryptionKey: () => Promise<string | undefined>;
/**
* Save the given {@link encryptionKey} into persistent safe storage.
*/
saveEncryptionKey: (encryptionKey: string) => Promise<void>;
/**
* Set or clear the callback {@link cb} to invoke whenever the app comes
* into the foreground. More precisely, the callback gets invoked when the
* main window gets focus.
*
* Note: Setting a callback clears any previous callbacks.
*
* @param cb The function to call when the main window gets focus. Pass
* `undefined` to clear the callback.
*/
onMainWindowFocus: (cb?: () => void) => void;
// - App update
/**
* Set or clear the callback {@link cb} to invoke whenever a new
* (actionable) app update is available. This allows the Node.js layer to
* ask the renderer to show an "Update available" dialog to the user.
*
* Note: Setting a callback clears any previous callbacks.
*/
onAppUpdateAvailable: (
cb?: ((updateInfo: AppUpdateInfo) => void) | undefined,
) => void;
/**
* Restart the app to apply the latest available update.
*
* This is expected to be called in response to {@link onAppUpdateAvailable}
* if the user so wishes.
*/
updateAndRestart: () => void;
/**
* Mute update notifications for the given {@link version}. This allows us
* to implement the "Install on next launch" functionality in response to
* the {@link onAppUpdateAvailable} event.
*/
updateOnNextRestart: (version: string) => void;
/**
* Skip the app update with the given {@link version}.
*
* This is expected to be called in response to {@link onAppUpdateAvailable}
* if the user so wishes. It will remember this {@link version} as having
* been marked as skipped so that we don't prompt the user again.
*/
skipAppUpdate: (version: string) => void;
/**
* A subset of filesystem access APIs.
@ -98,28 +173,6 @@ export interface Electron {
* the dataflow.
*/
// - General
registerForegroundEventListener: (onForeground: () => void) => void;
clearElectronStore: () => void;
setEncryptionKey: (encryptionKey: string) => Promise<void>;
getEncryptionKey: () => Promise<string>;
// - App update
updateAndRestart: () => void;
skipAppUpdate: (version: string) => void;
muteUpdateNotification: (version: string) => void;
registerUpdateEventListener: (
showUpdateDialog: (updateInfo: AppUpdateInfo) => void,
) => void;
// - Conversion
convertToJPEG: (

View file

@ -103,7 +103,7 @@ export const saveKeyInSessionStore = async (
setKey(keyType, sessionKeyAttributes);
const electron = globalThis.electron;
if (electron && !fromDesktop && keyType === SESSION_KEYS.ENCRYPTION_KEY) {
electron.setEncryptionKey(key);
electron.saveEncryptionKey(key);
}
};