Merge pull request #842 from ente-io/ml-alpha

ML Alpha
This commit is contained in:
Abhinav Kumar 2023-02-10 19:09:02 +05:30 committed by GitHub
commit 02b059b5c4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
169 changed files with 27583 additions and 624 deletions

View file

@ -33,6 +33,12 @@
"after", "after",
{ "overrides": { "?": "before", ":": "before" } } { "overrides": { "?": "before", ":": "before" } }
], ],
"import/no-anonymous-default-export": [
"error",
{
"allowNew": true
}
],
"@typescript-eslint/no-unsafe-member-access": "off", "@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off", "@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unsafe-assignment": "off", "@typescript-eslint/no-unsafe-assignment": "off",

4
.gitmodules vendored
View file

@ -2,6 +2,10 @@
path = thirdparty/photoswipe path = thirdparty/photoswipe
url = https://github.com/ente-io/PhotoSwipe.git url = https://github.com/ente-io/PhotoSwipe.git
branch = master branch = master
[submodule "thirdparty/tesseract"]
path = thirdparty/tesseract
url = git@github.com:abhinavkgrd/tesseract.js.git
branch = worker-support
[submodule "ffmpeg-wasm"] [submodule "ffmpeg-wasm"]
path = thirdparty/ffmpeg-wasm path = thirdparty/ffmpeg-wasm
url = https://github.com/abhinavkgrd/ffmpeg.wasm.git url = https://github.com/abhinavkgrd/ffmpeg.wasm.git

View file

@ -21,36 +21,64 @@
"@mui/x-date-pickers": "^5.0.0-alpha.6", "@mui/x-date-pickers": "^5.0.0-alpha.6",
"@sentry/nextjs": "^6.7.1", "@sentry/nextjs": "^6.7.1",
"@stripe/stripe-js": "^1.13.2", "@stripe/stripe-js": "^1.13.2",
"@tensorflow-models/coco-ssd": "^2.2.2",
"@tensorflow/tfjs-backend-cpu": "^3.13.0",
"@tensorflow/tfjs-backend-webgl": "^3.11.0",
"@tensorflow/tfjs-converter": "^3.11.0",
"@tensorflow/tfjs-core": "^3.11.0",
"@tensorflow/tfjs-tflite": "^0.0.1-alpha.7",
"@zip.js/zip.js": "^2.4.2",
"axios": "^0.21.3", "axios": "^0.21.3",
"bip39": "^3.0.4", "bip39": "^3.0.4",
"blazeface-back": "^0.0.8",
"bootstrap": "^4.5.2", "bootstrap": "^4.5.2",
"bs58": "^4.0.1", "bs58": "^4.0.1",
"chrono-node": "^2.2.6", "chrono-node": "^2.2.6",
"comlink": "^4.3.0", "comlink": "^4.3.0",
"debounce-promise": "^3.1.2", "debounce-promise": "^3.1.2",
"density-clustering": "^1.3.0",
"eventemitter3": "^4.0.7",
"exifr": "^7.1.3", "exifr": "^7.1.3",
"ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm", "ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm",
"file-type": "^16.5.4", "file-type": "^16.5.4",
"formik": "^2.1.5", "formik": "^2.1.5",
"hdbscan": "0.0.1-alpha.5",
"heic-convert": "^1.2.4", "heic-convert": "^1.2.4",
"http-proxy-middleware": "^1.0.5",
"idb": "^7.0.0",
"is-electron": "^2.2.0", "is-electron": "^2.2.0",
"jszip": "3.8.0", "jszip": "3.8.0",
"libsodium-wrappers": "^0.7.8", "libsodium-wrappers": "^0.7.8",
"localforage": "^1.9.0", "localforage": "^1.9.0",
"ml-matrix": "^6.8.2",
"next": "^13.1.2", "next": "^13.1.2",
"next-transpile-modules": "^10.0.0",
"p-queue": "^7.1.0",
"photoswipe": "file:./thirdparty/photoswipe", "photoswipe": "file:./thirdparty/photoswipe",
"piexifjs": "^1.0.6", "piexifjs": "^1.0.6",
"react": "^18.2.0", "react": "^18.2.0",
"react-bootstrap": "^1.3.0", "react-bootstrap": "^1.3.0",
"react-d3-tree": "^3.1.1",
"react-datepicker": "^4.3.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-dropzone": "^11.2.4", "react-dropzone": "^11.2.4",
"react-otp-input": "^2.3.1", "react-otp-input": "^2.3.1",
"react-select": "^4.3.1", "react-select": "^4.3.1",
"react-simple-code-editor": "^0.11.0",
"react-top-loading-bar": "^2.0.1", "react-top-loading-bar": "^2.0.1",
"react-virtualized-auto-sizer": "^1.0.2", "react-virtualized-auto-sizer": "^1.0.2",
"react-window": "^1.8.6", "react-window": "^1.8.6",
"sanitize-filename": "^1.6.3", "sanitize-filename": "^1.6.3",
"similarity-transformation": "^0.0.1",
"styled-components": "^5.3.5", "styled-components": "^5.3.5",
"tesseract.js": "file:./thirdparty/tesseract",
"transformation-matrix": "^2.10.0",
"tsne-js": "^1.0.3",
"workbox-precaching": "^6.1.5",
"workbox-recipes": "^6.1.5",
"workbox-routing": "^6.1.5",
"workbox-strategies": "^6.1.5",
"workbox-window": "^6.1.5",
"xml-js": "^1.6.11", "xml-js": "^1.6.11",
"yup": "^0.29.3" "yup": "^0.29.3"
}, },
@ -68,6 +96,7 @@
"@types/react-window": "^1.8.2", "@types/react-window": "^1.8.2",
"@types/react-window-infinite-loader": "^1.0.3", "@types/react-window-infinite-loader": "^1.0.3",
"@types/styled-components": "^5.1.25", "@types/styled-components": "^5.1.25",
"@types/wicg-file-system-access": "^2020.9.5",
"@types/yup": "^0.29.7", "@types/yup": "^0.29.7",
"@typescript-eslint/eslint-plugin": "^5.43.0", "@typescript-eslint/eslint-plugin": "^5.43.0",
"eslint": "^8.28.0", "eslint": "^8.28.0",
@ -76,7 +105,6 @@
"husky": "^7.0.1", "husky": "^7.0.1",
"lint-staged": "^11.1.2", "lint-staged": "^11.1.2",
"prettier": "2.3.2", "prettier": "2.3.2",
"react-icons": "^4.3.1",
"typescript": "^4.1.3" "typescript": "^4.1.3"
}, },
"standard": { "standard": {

File diff suppressed because one or more lines are too long

10
public/js/tesseract/worker.min.js vendored Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View file

@ -0,0 +1 @@
"use strict";var Module={};var initializedJS=false;function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(" ");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(" ");postMessage({cmd:"alert",text:text,threadId:Module["_pthread_self"]()})}var err=threadPrintErr;self.alert=threadAlert;Module["instantiateWasm"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module["wasmModule"],info);receiveInstance(instance);Module["wasmModule"]=null;return instance.exports};function moduleLoaded(){}self.onmessage=function(e){try{if(e.data.cmd==="load"){Module["wasmModule"]=e.data.wasmModule;Module["wasmMemory"]=e.data.wasmMemory;Module["buffer"]=Module["wasmMemory"].buffer;Module["ENVIRONMENT_IS_PTHREAD"]=true;if(typeof e.data.urlOrBlob==="string"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}tflite_web_api_ModuleFactory(Module).then(function(instance){Module=instance;moduleLoaded()})}else if(e.data.cmd==="objectTransfer"){Module["PThread"].receiveObjectTransfer(e.data)}else if(e.data.cmd==="run"){Module["__performance_now_clock_drift"]=performance.now()-e.data.time;Module["__emscripten_thread_init"](e.data.threadInfoStruct,0,0);var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module["establishStackSpace"](top,max);Module["PThread"].receiveObjectTransfer(e.data);Module["PThread"].threadInit();if(!initializedJS){Module["___embind_register_native_and_builtin_types"]();initializedJS=true}try{var result=Module["invokeEntryPoint"](e.data.start_routine,e.data.arg);if(Module["keepRuntimeAlive"]()){Module["PThread"].setExitStatus(result)}else{Module["PThread"].threadExit(result)}}catch(ex){if(ex==="Canceled!"){Module["PThread"].threadCancel()}else if(ex!="unwind"){if(ex instanceof Module["ExitStatus"]){if(Module["keepRuntimeAlive"]()){}else{Module["PThread"].threadExit(ex.status)}}else{Module["PThread"].threadExit(-2);throw ex}}}}else if(e.data.cmd==="cancel"){if(Module["_pthread_self"]()){Module["PThread"].threadCancel()}}else if(e.data.target==="setimmediate"){}else if(e.data.cmd==="processThreadQueue"){if(Module["_pthread_self"]()){Module["_emscripten_current_thread_process_queued_calls"]()}}else{err("worker.js received unknown command "+e.data.cmd);err(e.data)}}catch(ex){err("worker.js onmessage() captured an uncaught exception: "+ex);if(ex&&ex.stack)err(ex.stack);throw ex}};

File diff suppressed because one or more lines are too long

Binary file not shown.

View file

@ -0,0 +1 @@
"use strict";var Module={};var initializedJS=false;function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(" ");console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(" ");postMessage({cmd:"alert",text:text,threadId:Module["_pthread_self"]()})}var err=threadPrintErr;self.alert=threadAlert;Module["instantiateWasm"]=function(info,receiveInstance){var instance=new WebAssembly.Instance(Module["wasmModule"],info);receiveInstance(instance);Module["wasmModule"]=null;return instance.exports};function moduleLoaded(){}self.onmessage=function(e){try{if(e.data.cmd==="load"){Module["wasmModule"]=e.data.wasmModule;Module["wasmMemory"]=e.data.wasmMemory;Module["buffer"]=Module["wasmMemory"].buffer;Module["ENVIRONMENT_IS_PTHREAD"]=true;if(typeof e.data.urlOrBlob==="string"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}tflite_web_api_ModuleFactory(Module).then(function(instance){Module=instance;moduleLoaded()})}else if(e.data.cmd==="objectTransfer"){Module["PThread"].receiveObjectTransfer(e.data)}else if(e.data.cmd==="run"){Module["__performance_now_clock_drift"]=performance.now()-e.data.time;Module["__emscripten_thread_init"](e.data.threadInfoStruct,0,0);var max=e.data.stackBase;var top=e.data.stackBase+e.data.stackSize;Module["establishStackSpace"](top,max);Module["PThread"].receiveObjectTransfer(e.data);Module["PThread"].threadInit();if(!initializedJS){Module["___embind_register_native_and_builtin_types"]();initializedJS=true}try{var result=Module["invokeEntryPoint"](e.data.start_routine,e.data.arg);if(Module["keepRuntimeAlive"]()){Module["PThread"].setExitStatus(result)}else{Module["PThread"].threadExit(result)}}catch(ex){if(ex==="Canceled!"){Module["PThread"].threadCancel()}else if(ex!="unwind"){if(ex instanceof Module["ExitStatus"]){if(Module["keepRuntimeAlive"]()){}else{Module["PThread"].threadExit(ex.status)}}else{Module["PThread"].threadExit(-2);throw ex}}}}else if(e.data.cmd==="cancel"){if(Module["_pthread_self"]()){Module["PThread"].threadCancel()}}else if(e.data.target==="setimmediate"){}else if(e.data.cmd==="processThreadQueue"){if(Module["_pthread_self"]()){Module["_emscripten_current_thread_process_queued_calls"]()}}else{err("worker.js received unknown command "+e.data.cmd);err(e.data)}}catch(ex){err("worker.js onmessage() captured an uncaught exception: "+ex);if(ex&&ex.stack)err(ex.stack);throw ex}};

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,32 @@
{
"0": "waterfall",
"1": "snow",
"2": "landscape",
"3": "underwater",
"4": "architecture",
"5": "sunset / sunrise",
"6": "blue sky",
"7": "cloudy sky",
"8": "greenery",
"9": "autumn leaves",
"10": "potrait",
"11": "flower",
"12": "night shot",
"13": "stage concert",
"14": "fireworks",
"15": "candle light",
"16": "neon lights",
"17": "indoor",
"18": "backlight",
"19": "text documents",
"20": "qr images",
"21": "group potrait",
"22": "computer screens",
"23": "kids",
"24": "dog",
"25": "cat",
"26": "macro",
"27": "food",
"28": "beach",
"29": "mountain"
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,70 @@
import React, { useEffect, useState } from 'react';
import { Row, Col, Button } from 'react-bootstrap';
import Editor from 'react-simple-code-editor';
import { Config } from 'types/common/config';
export function ConfigEditor(props: {
name: string;
getConfig: () => Promise<Config>;
defaultConfig: () => Promise<Config>;
setConfig: (config: Config) => Promise<string>;
}) {
const [configStr, setConfigStr] = useState('');
useEffect(() => {
loadConfig();
}, []);
const loadConfig = async () => {
const config = await props.getConfig();
setConfigStr(JSON.stringify(config, null, '\t'));
};
const loadDefaultConfig = async () => {
const config = await props.defaultConfig();
setConfigStr(JSON.stringify(config, null, '\t'));
};
const updateConfig = async () => {
const configObj = JSON.parse(configStr);
props.setConfig(configObj);
};
return (
<>
<Row>{props.name} Config:</Row>
<Row
style={{
height: '200px',
overflow: 'auto',
marginTop: '15px',
marginBottom: '15px',
}}>
<Col>
<Editor
value={configStr}
onValueChange={(config) => setConfigStr(config)}
highlight={(code) => code}
padding={10}
style={{
background: 'white',
}}
/>
</Col>
</Row>
<Row>
<Col>
<Button onClick={() => loadConfig()}>Reload</Button>
</Col>
<Col>
<Button onClick={() => loadDefaultConfig()}>
Defaults
</Button>
</Col>
<Col>
<Button onClick={() => updateConfig()}>Update</Button>
</Col>
</Row>
</>
);
}

View file

@ -0,0 +1,100 @@
import React, { useState, useEffect } from 'react';
import styled from 'styled-components';
import { imageBitmapToBlob } from 'utils/image';
import { logError } from 'utils/sentry';
import { getBlobFromCache } from 'utils/storage/cache';
export const Image = styled.img``;
export const FaceCropsRow = styled.div`
& > img {
width: 256px;
height: 256px;
}
`;
export const FaceImagesRow = styled.div`
& > img {
width: 112px;
height: 112px;
}
`;
export function ImageCacheView(props: { url: string; cacheName: string }) {
const [imageBlob, setImageBlob] = useState<Blob>();
useEffect(() => {
let didCancel = false;
async function loadImage() {
try {
let blob: Blob;
if (!props.url || !props.cacheName) {
blob = undefined;
} else {
blob = await getBlobFromCache(props.cacheName, props.url);
}
!didCancel && setImageBlob(blob);
} catch (e) {
logError(e, 'ImageCacheView useEffect failed');
}
}
loadImage();
return () => {
didCancel = true;
};
}, [props.url, props.cacheName]);
return (
<>
<ImageBlobView blob={imageBlob}></ImageBlobView>
</>
);
}
export function ImageBitmapView(props: { image: ImageBitmap }) {
const [imageBlob, setImageBlob] = useState<Blob>();
useEffect(() => {
let didCancel = false;
async function loadImage() {
const blob = props.image && (await imageBitmapToBlob(props.image));
!didCancel && setImageBlob(blob);
}
loadImage();
return () => {
didCancel = true;
};
}, [props.image]);
return (
<>
<ImageBlobView blob={imageBlob}></ImageBlobView>
</>
);
}
export function ImageBlobView(props: { blob: Blob }) {
const [imgUrl, setImgUrl] = useState<string>();
useEffect(() => {
try {
setImgUrl(props.blob && URL.createObjectURL(props.blob));
} catch (e) {
console.error(
'ImageBlobView: can not create object url for blob: ',
props.blob,
e
);
}
}, [props.blob]);
return (
<>
<Image src={imgUrl}></Image>
</>
);
}

View file

@ -0,0 +1,236 @@
import React, { useEffect, useRef, useState } from 'react';
import '@tensorflow/tfjs-backend-webgl';
import '@tensorflow/tfjs-backend-cpu';
import arcfaceAlignmentService from 'services/machineLearning/arcfaceAlignmentService';
import arcfaceCropService from 'services/machineLearning/arcfaceCropService';
import blazeFaceDetectionService from 'services/machineLearning/blazeFaceDetectionService';
import { AlignedFace, FaceCrop, ObjectDetection } from 'types/machineLearning';
import { getMLSyncConfig } from 'utils/machineLearning/config';
import {
getAlignedFaceBox,
ibExtractFaceImage,
ibExtractFaceImageUsingTransform,
} from 'utils/machineLearning/faceAlign';
import { ibExtractFaceImageFromCrop } from 'utils/machineLearning/faceCrop';
import { FaceCropsRow, FaceImagesRow, ImageBitmapView } from './ImageViews';
import ssdMobileNetV2Service from 'services/machineLearning/ssdMobileNetV2Service';
import { DEFAULT_ML_SYNC_CONFIG } from 'constants/machineLearning/config';
// import tesseractService from 'services/machineLearning/tesseractService';
import imageSceneService from 'services/machineLearning/imageSceneService';
import { addLogLine } from 'utils/logging';
interface MLFileDebugViewProps {
file: File;
}
function drawFaceDetection(face: AlignedFace, ctx: CanvasRenderingContext2D) {
const pointSize = Math.ceil(
Math.max(ctx.canvas.width / 512, face.detection.box.width / 32)
);
ctx.save();
ctx.strokeStyle = 'rgba(255, 0, 0, 0.8)';
ctx.lineWidth = pointSize;
ctx.strokeRect(
face.detection.box.x,
face.detection.box.y,
face.detection.box.width,
face.detection.box.height
);
ctx.restore();
ctx.save();
ctx.strokeStyle = 'rgba(0, 255, 0, 0.8)';
ctx.lineWidth = Math.round(pointSize * 1.5);
const alignedBox = getAlignedFaceBox(face.alignment);
ctx.strokeRect(
alignedBox.x,
alignedBox.y,
alignedBox.width,
alignedBox.height
);
ctx.restore();
ctx.save();
ctx.fillStyle = 'rgba(0, 0, 255, 0.8)';
face.detection.landmarks.forEach((l) => {
ctx.beginPath();
ctx.arc(l.x, l.y, pointSize, 0, Math.PI * 2, true);
ctx.fill();
});
ctx.restore();
}
function drawBbox(object: ObjectDetection, ctx: CanvasRenderingContext2D) {
ctx.font = '100px Arial';
ctx.save();
ctx.restore();
ctx.rect(...object.bbox);
ctx.lineWidth = 10;
ctx.strokeStyle = 'green';
ctx.fillStyle = 'green';
ctx.stroke();
ctx.fillText(
object.score.toFixed(3) + ' ' + object.class,
object.bbox[0],
object.bbox[1] > 10 ? object.bbox[1] - 5 : 10
);
}
export default function MLFileDebugView(props: MLFileDebugViewProps) {
// const [imageBitmap, setImageBitmap] = useState<ImageBitmap>();
const [faceCrops, setFaceCrops] = useState<FaceCrop[]>();
const [facesUsingCrops, setFacesUsingCrops] = useState<ImageBitmap[]>();
const [facesUsingImage, setFacesUsingImage] = useState<ImageBitmap[]>();
const [facesUsingTransform, setFacesUsingTransform] =
useState<ImageBitmap[]>();
const canvasRef = useRef(null);
useEffect(() => {
let didCancel = false;
const loadFile = async () => {
// TODO: go through worker for these apis, to not include ml code in main bundle
const imageBitmap = await createImageBitmap(props.file);
const faceDetections = await blazeFaceDetectionService.detectFaces(
imageBitmap
);
addLogLine('detectedFaces: ', faceDetections.length);
const objectDetections = await ssdMobileNetV2Service.detectObjects(
imageBitmap,
DEFAULT_ML_SYNC_CONFIG.objectDetection.maxNumBoxes,
DEFAULT_ML_SYNC_CONFIG.objectDetection.minScore
);
addLogLine('detectedObjects: ', JSON.stringify(objectDetections));
const sceneDetections = await imageSceneService.detectScenes(
imageBitmap,
DEFAULT_ML_SYNC_CONFIG.sceneDetection.minScore
);
addLogLine('detectedScenes: ', JSON.stringify(sceneDetections));
// const textDetections = await tesseractService.detectText(
// imageBitmap,
// DEFAULT_ML_SYNC_CONFIG.textDetection.minAccuracy,
// 0
// );
// addLogLine('detectedTexts: ', textDetections);
const mlSyncConfig = await getMLSyncConfig();
const faceCropPromises = faceDetections.map(async (faceDetection) =>
arcfaceCropService.getFaceCrop(
imageBitmap,
faceDetection,
mlSyncConfig.faceCrop
)
);
const faceCrops = await Promise.all(faceCropPromises);
if (didCancel) return;
setFaceCrops(faceCrops);
const faceAlignments = faceDetections.map((detection) =>
arcfaceAlignmentService.getFaceAlignment(detection)
);
addLogLine('alignedFaces: ', JSON.stringify(faceAlignments));
const canvas: HTMLCanvasElement = canvasRef.current;
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
const ctx = canvas.getContext('2d');
if (didCancel) return;
ctx.drawImage(imageBitmap, 0, 0);
const alignedFaces = faceAlignments.map((alignment, i) => {
return {
detection: faceDetections[i],
alignment,
} as AlignedFace;
});
alignedFaces.forEach((alignedFace) =>
drawFaceDetection(alignedFace, ctx)
);
objectDetections.forEach((object) => drawBbox(object, ctx));
const facesUsingCrops = await Promise.all(
alignedFaces.map((face, i) => {
return ibExtractFaceImageFromCrop(
faceCrops[i],
face.alignment,
112
);
})
);
const facesUsingImage = await Promise.all(
alignedFaces.map((face) => {
return ibExtractFaceImage(imageBitmap, face.alignment, 112);
})
);
const facesUsingTransform = await Promise.all(
alignedFaces.map((face) => {
return ibExtractFaceImageUsingTransform(
imageBitmap,
face.alignment,
112
);
})
);
if (didCancel) return;
setFacesUsingCrops(facesUsingCrops);
setFacesUsingImage(facesUsingImage);
setFacesUsingTransform(facesUsingTransform);
};
props.file && loadFile();
return () => {
didCancel = true;
};
}, [props.file]);
return (
<div>
<p></p>
{/* <ImageBitmapView image={imageBitmap}></ImageBitmapView> */}
<canvas
ref={canvasRef}
width={0}
height={0}
style={{ maxWidth: '100%' }}
/>
<p></p>
<div>Face Crops:</div>
<FaceCropsRow>
{faceCrops?.map((faceCrop, i) => (
<ImageBitmapView
key={i}
image={faceCrop.image}></ImageBitmapView>
))}
</FaceCropsRow>
<p></p>
<div>Face Images using face crops:</div>
<FaceImagesRow>
{facesUsingCrops?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
<div>Face Images using original image:</div>
<FaceImagesRow>
{facesUsingImage?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
<div>Face Images using transfrom:</div>
<FaceImagesRow>
{facesUsingTransform?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
</div>
);
}

View file

@ -0,0 +1,95 @@
import {
Stack,
Box,
Button,
FormGroup,
Checkbox,
FormControlLabel,
DialogProps,
} from '@mui/material';
import { EnteDrawer } from 'components/EnteDrawer';
import Titlebar from 'components/Titlebar';
import { useEffect, useState } from 'react';
import constants from 'utils/strings/constants';
export default function EnableFaceSearch({
open,
onClose,
enableFaceSearch,
onRootClose,
}) {
const [acceptTerms, setAcceptTerms] = useState(false);
useEffect(() => {
setAcceptTerms(false);
}, [open]);
const handleRootClose = () => {
onClose();
onRootClose();
};
const handleDrawerClose: DialogProps['onClose'] = (_, reason) => {
if (reason === 'backdropClick') {
handleRootClose();
} else {
onClose();
}
};
return (
<EnteDrawer
transitionDuration={0}
open={open}
onClose={handleDrawerClose}
BackdropProps={{
sx: { '&&&': { backgroundColor: 'transparent' } },
}}>
<Stack spacing={'4px'} py={'12px'}>
<Titlebar
onClose={onClose}
title={constants.ENABLE_FACE_SEARCH_TITLE}
onRootClose={handleRootClose}
/>
<Stack py={'20px'} px={'8px'} spacing={'32px'}>
<Box px={'8px'}>
{constants.ENABLE_FACE_SEARCH_DESCRIPTION()}
</Box>
<FormGroup sx={{ width: '100%' }}>
<FormControlLabel
sx={{
color: 'text.secondary',
ml: 0,
mt: 2,
}}
control={
<Checkbox
size="small"
checked={acceptTerms}
onChange={(e) =>
setAcceptTerms(e.target.checked)
}
/>
}
label={constants.FACE_SEARCH_CONFIRMATION}
/>
</FormGroup>
<Stack px={'8px'} spacing={'8px'}>
<Button
color={'accent'}
size="large"
disabled={!acceptTerms}
onClick={enableFaceSearch}>
{constants.ENABLE_FACE_SEARCH}
</Button>
<Button
color={'secondary'}
size="large"
onClick={onClose}>
{constants.CANCEL}
</Button>
</Stack>
</Stack>
</Stack>
</EnteDrawer>
);
}

View file

@ -0,0 +1,38 @@
import { Stack, Box, Button } from '@mui/material';
import Titlebar from 'components/Titlebar';
import { ML_BLOG_LINK } from 'constants/urls';
import { openLink } from 'utils/common';
import constants from 'utils/strings/constants';
export default function EnableMLSearch({
onClose,
enableMlSearch,
onRootClose,
}) {
return (
<Stack spacing={'4px'} py={'12px'}>
<Titlebar
onClose={onClose}
title={constants.ML_SEARCH}
onRootClose={onRootClose}
/>
<Stack py={'20px'} px={'8px'} spacing={'32px'}>
<Box px={'8px'}>{constants.ML_SEARCH_DESCRIPTION()}</Box>
<Stack px={'8px'} spacing={'8px'}>
<Button
color={'accent'}
size="large"
onClick={enableMlSearch}>
{constants.ENABLE}
</Button>
<Button
color={'secondary'}
size="large"
onClick={() => openLink(ML_BLOG_LINK, true)}>
{constants.ML_MORE_DETAILS}
</Button>
</Stack>
</Stack>
</Stack>
);
}

View file

@ -0,0 +1,145 @@
import { Box, DialogProps } from '@mui/material';
import { EnteDrawer } from 'components/EnteDrawer';
import { AppContext } from 'pages/_app';
import { useContext, useState } from 'react';
import {
getFaceSearchEnabledStatus,
updateFaceSearchEnabledStatus,
} from 'services/userService';
import { logError } from 'utils/sentry';
import constants from 'utils/strings/constants';
import EnableFaceSearch from './enableFaceSearch';
import EnableMLSearch from './enableMLSearch';
import ManageMLSearch from './manageMLSearch';
const MLSearchSettings = ({ open, onClose, onRootClose }) => {
const {
updateMlSearchEnabled,
mlSearchEnabled,
setDialogMessage,
somethingWentWrong,
startLoading,
finishLoading,
} = useContext(AppContext);
const [enableFaceSearchView, setEnableFaceSearchView] = useState(false);
const openEnableFaceSearch = () => {
setEnableFaceSearchView(true);
};
const closeEnableFaceSearch = () => {
setEnableFaceSearchView(false);
};
const enableMlSearch = async () => {
try {
const hasEnabledFaceSearch = await getFaceSearchEnabledStatus();
if (!hasEnabledFaceSearch) {
openEnableFaceSearch();
} else {
updateMlSearchEnabled(true);
}
} catch (e) {
logError(e, 'Enable ML search failed');
somethingWentWrong();
}
};
const enableFaceSearch = async () => {
try {
startLoading();
await updateFaceSearchEnabledStatus(true);
updateMlSearchEnabled(true);
closeEnableFaceSearch();
finishLoading();
} catch (e) {
logError(e, 'Enable face search failed');
somethingWentWrong();
}
};
const disableMlSearch = async () => {
try {
await updateMlSearchEnabled(false);
onClose();
} catch (e) {
logError(e, 'Disable ML search failed');
somethingWentWrong();
}
};
const disableFaceSearch = async () => {
try {
startLoading();
await updateFaceSearchEnabledStatus(false);
await disableMlSearch();
finishLoading();
} catch (e) {
logError(e, 'Disable face search failed');
somethingWentWrong();
}
};
const confirmDisableFaceSearch = () => {
setDialogMessage({
title: constants.DISABLE_FACE_SEARCH_TITLE,
content: constants.DISABLE_FACE_SEARCH_DESCRIPTION(),
close: { text: constants.CANCEL },
proceed: {
variant: 'primary',
text: constants.DISABLE_FACE_SEARCH,
action: disableFaceSearch,
},
});
};
const handleRootClose = () => {
onClose();
onRootClose();
};
const handleDrawerClose: DialogProps['onClose'] = (_, reason) => {
if (reason === 'backdropClick') {
handleRootClose();
} else {
onClose();
}
};
return (
<Box>
<EnteDrawer
anchor="left"
transitionDuration={0}
open={open}
onClose={handleDrawerClose}
BackdropProps={{
sx: { '&&&': { backgroundColor: 'transparent' } },
}}>
{mlSearchEnabled ? (
<ManageMLSearch
onClose={onClose}
disableMlSearch={disableMlSearch}
handleDisableFaceSearch={confirmDisableFaceSearch}
onRootClose={handleRootClose}
/>
) : (
<EnableMLSearch
onClose={onClose}
enableMlSearch={enableMlSearch}
onRootClose={handleRootClose}
/>
)}
</EnteDrawer>
<EnableFaceSearch
open={enableFaceSearchView}
onClose={closeEnableFaceSearch}
enableFaceSearch={enableFaceSearch}
onRootClose={handleRootClose}
/>
</Box>
);
};
export default MLSearchSettings;

View file

@ -0,0 +1,42 @@
import { Stack, Box, ButtonProps, TypographyVariant } from '@mui/material';
import SidebarButton from 'components/Sidebar/Button';
import Titlebar from 'components/Titlebar';
import constants from 'utils/strings/constants';
type Iprops = ButtonProps<'button', { typographyVariant?: TypographyVariant }>;
const ManageOptions = (props: Iprops) => {
return (
<SidebarButton
variant="contained"
color="secondary"
{...props}></SidebarButton>
);
};
export default function ManageMLSearch({
onClose,
disableMlSearch,
handleDisableFaceSearch,
onRootClose,
}) {
return (
<Stack spacing={'4px'} py={'12px'}>
<Titlebar
onClose={onClose}
title={constants.ML_SEARCH}
onRootClose={onRootClose}
/>
<Box px={'16px'}>
<Stack py={'20px'} spacing={'24px'}>
<ManageOptions onClick={disableMlSearch}>
{constants.DISABLE_BETA}
</ManageOptions>
<ManageOptions onClick={handleDisableFaceSearch}>
{constants.DISABLE_FACE_SEARCH}
</ManageOptions>
</Stack>
</Box>
</Stack>
);
}

View file

@ -0,0 +1,58 @@
import React, { useState } from 'react';
import { Button, Spinner } from 'react-bootstrap';
import { EnteFile } from 'types/file';
import { getToken, getUserID } from 'utils/common/key';
import mlService from '../../services/machineLearning/machineLearningService';
function MLServiceFileInfoButton({
file,
updateMLDataIndex,
setUpdateMLDataIndex,
}: {
file: EnteFile;
updateMLDataIndex: number;
setUpdateMLDataIndex: (num: number) => void;
}) {
const [mlServiceRunning, setMlServiceRunning] = useState(false);
const runMLService = async () => {
setMlServiceRunning(true);
const token = getToken();
const userID = getUserID();
// index 4 is for timeout of 240 seconds
await mlService.syncLocalFile(token, userID, file as EnteFile, null, 4);
setUpdateMLDataIndex(updateMLDataIndex + 1);
setMlServiceRunning(false);
};
return (
<div
style={{
marginTop: '18px',
}}>
<Button
onClick={runMLService}
disabled={mlServiceRunning}
variant={mlServiceRunning ? 'secondary' : 'primary'}>
{!mlServiceRunning ? (
'Run ML Service'
) : (
<>
ML Service Running{' '}
<Spinner
animation="border"
size="sm"
style={{
marginLeft: '5px',
}}
/>
</>
)}
</Button>
</div>
);
}
export default MLServiceFileInfoButton;

View file

@ -0,0 +1,531 @@
export {};
// import React, { useState, useEffect, useContext, ChangeEvent } from 'react';
// import { getData, LS_KEYS } from 'utils/storage/localStorage';
// import { useRouter } from 'next/router';
// import { ComlinkWorker } from 'utils/comlink';
// import { AppContext } from 'pages/_app';
// import { PAGES } from 'constants/pages';
// import * as Comlink from 'comlink';
// import { runningInBrowser } from 'utils/common';
// import TFJSImage from './TFJSImage';
// import {
// Face,
// MLDebugResult,
// MLSyncConfig,
// Person,
// } from 'types/machineLearning';
// import Tree from 'react-d3-tree';
// import MLFileDebugView from './MLFileDebugView';
// import mlWorkManager from 'services/machineLearning/mlWorkManager';
// // import { getAllFacesMap, mlLibraryStore } from 'utils/storage/mlStorage';
// import { getAllFacesFromMap, getAllPeople } from 'utils/machineLearning';
// import { FaceImagesRow, ImageBlobView, ImageCacheView } from './ImageViews';
// import mlIDbStorage from 'utils/storage/mlIDbStorage';
// import { getFaceCropBlobFromStorage } from 'utils/machineLearning/faceCrop';
// import { PeopleList } from './PeopleList';
// import styled from 'styled-components';
// import { RawNodeDatum } from 'react-d3-tree/lib/types/common';
// import { DebugInfo, mstToBinaryTree } from 'hdbscan';
// import { toD3Tree } from 'utils/machineLearning/clustering';
// import {
// getMLSyncConfig,
// getMLSyncJobConfig,
// updateMLSyncConfig,
// updateMLSyncJobConfig,
// } from 'utils/machineLearning/config';
// import { Button, Col, Container, Form, Row } from 'react-bootstrap';
// import { JobConfig } from 'types/common/job';
// import { ConfigEditor } from './ConfigEditor';
// import {
// DEFAULT_ML_SYNC_CONFIG,
// DEFAULT_ML_SYNC_JOB_CONFIG,
// } from 'constants/machineLearning/config';
// import { exportMlData, importMlData } from 'utils/machineLearning/mldataExport';
// import { FACE_CROPS_CACHE } from 'constants/cache';
// interface TSNEProps {
// mlResult: MLDebugResult;
// }
// function TSNEPlot(props: TSNEProps) {
// return (
// <svg
// width={props.mlResult.tsne.width + 40}
// height={props.mlResult.tsne.height + 40}>
// {props.mlResult.tsne.dataset.map((data, i) => (
// <foreignObject
// key={i}
// x={data.x - 20}
// y={data.y - 20}
// width={40}
// height={40}>
// <TFJSImage
// faceImage={props.mlResult.allFaces[i]?.faceImage}
// width={40}
// height={40}></TFJSImage>
// </foreignObject>
// ))}
// </svg>
// );
// }
// const D3ImageContainer = styled.div`
// & > img {
// width: 100%;
// height: 100%;
// }
// `;
// const renderForeignObjectNode = ({ nodeDatum, foreignObjectProps }) => (
// <g>
// <circle r={15}></circle>
// {/* `foreignObject` requires width & height to be explicitly set. */}
// <foreignObject {...foreignObjectProps}>
// <div
// style={{
// border: '1px solid black',
// backgroundColor: '#dedede',
// }}>
// <h3 style={{ textAlign: 'center', color: 'black' }}>
// {nodeDatum.name}
// </h3>
// {!nodeDatum.children && nodeDatum.name && (
// <D3ImageContainer>
// <ImageCacheView
// url={nodeDatum.attributes.face.crop?.imageUrl}
// cacheName={FACE_CROPS_CACHE}
// />
// </D3ImageContainer>
// )}
// </div>
// </foreignObject>
// </g>
// );
// const getFaceCrops = async (faces: Face[]) => {
// const faceCropPromises = faces
// .filter((f) => f?.crop)
// .map((f) => getFaceCropBlobFromStorage(f.crop));
// return Promise.all(faceCropPromises);
// };
// const ClusterFacesRow = styled(FaceImagesRow)`
// display: flex;
// max-width: 100%;
// overflow: auto;
// `;
// const RowWithGap = styled(Row)`
// justify-content: center;
// & > * {
// margin: 10px;
// }
// `;
// export default function MLDebug() {
// const [token, setToken] = useState<string>();
// const [clusterFaceDistance] = useState<number>(0.4);
// // const [minClusterSize, setMinClusterSize] = useState<number>(5);
// // const [minFaceSize, setMinFaceSize] = useState<number>(32);
// // const [batchSize, setBatchSize] = useState<number>(200);
// const [maxFaceDistance] = useState<number>(0.5);
// const [mlResult, setMlResult] = useState<MLDebugResult>({
// allFaces: [],
// clustersWithNoise: {
// clusters: [],
// noise: [],
// },
// tree: null,
// tsne: null,
// });
// const [allPeople, setAllPeople] = useState<Array<Person>>([]);
// const [clusters, setClusters] = useState<Array<Array<Blob>>>([]);
// const [noiseFaces, setNoiseFaces] = useState<Array<Blob>>([]);
// const [minProbability, setMinProbability] = useState<number>(0);
// const [maxProbability, setMaxProbability] = useState<number>(1);
// const [filteredFaces, setFilteredFaces] = useState<Array<Blob>>([]);
// const [mstD3Tree, setMstD3Tree] = useState<RawNodeDatum>(null);
// const [debugFile, setDebugFile] = useState<File>();
// const router = useRouter();
// const appContext = useContext(AppContext);
// const getDedicatedMLWorker = (): ComlinkWorker => {
// if (token) {
// addLogLine('Toen present');
// }
// if (runningInBrowser()) {
// addLogLine('initiating worker');
// const worker = new Worker(
// new URL('worker/machineLearning.worker', import.meta.url),
// { name: 'ml-worker' }
// );
// addLogLine('initiated worker');
// const comlink = Comlink.wrap(worker);
// return { comlink, worker };
// }
// };
// let MLWorker: ComlinkWorker;
// useEffect(() => {
// const user = getData(LS_KEYS.USER);
// if (!user?.token) {
// router.push(PAGES.ROOT);
// } else {
// setToken(user.token);
// }
// appContext.showNavBar(true);
// }, []);
// const onSync = async () => {
// try {
// if (!MLWorker) {
// MLWorker = getDedicatedMLWorker();
// addLogLine('initiated MLWorker');
// }
// const mlWorker = await new MLWorker.comlink();
// const result = await mlWorker.sync(
// token,
// clusterFaceDistance,
// // minClusterSize,
// // minFaceSize,
// // batchSize,
// maxFaceDistance
// );
// setMlResult(result);
// } catch (e) {
// console.error(e);
// throw e;
// } finally {
// // setTimeout(()=>{
// // addLogLine('terminating ml-worker');
// MLWorker.worker.terminate();
// // }, 30000);
// }
// };
// const onStartMLSync = async () => {
// mlWorkManager.startSyncJob();
// };
// const onStopMLSync = async () => {
// mlWorkManager.stopSyncJob();
// };
// // for debug purpose, not a memory efficient implementation
// const onExportMLData = async () => {
// let mlDataZipHandle: FileSystemFileHandle;
// try {
// mlDataZipHandle = await showSaveFilePicker({
// suggestedName: `ente-mldata-${Date.now()}`,
// types: [
// {
// accept: {
// 'application/zip': ['.zip'],
// },
// },
// ],
// });
// } catch (e) {
// console.error(e);
// return;
// }
// try {
// const mlDataZipWritable = await mlDataZipHandle.createWritable();
// await exportMlData(mlDataZipWritable);
// } catch (e) {
// console.error('Error while exporting: ', e);
// }
// };
// const onImportMLData = async () => {
// let mlDataZipHandle: FileSystemFileHandle;
// try {
// [mlDataZipHandle] = await showOpenFilePicker({
// types: [
// {
// accept: {
// 'application/zip': ['.zip'],
// },
// },
// ],
// });
// } catch (e) {
// console.error(e);
// return;
// }
// try {
// const mlDataZipFile = await mlDataZipHandle.getFile();
// await importMlData(mlDataZipFile);
// } catch (e) {
// console.error('Error while importing: ', e);
// }
// };
// const onClearPeopleIndex = async () => {
// mlIDbStorage.setIndexVersion('people', 0);
// };
// const onDebugFile = async (event: ChangeEvent<HTMLInputElement>) => {
// setDebugFile(event.target.files[0]);
// };
// const onLoadAllPeople = async () => {
// const allPeople = await getAllPeople(100);
// setAllPeople(allPeople);
// };
// const onLoadClusteringResults = async () => {
// const mlLibraryData = await mlIDbStorage.getLibraryData();
// const allFacesMap = await mlIDbStorage.getAllFacesMap();
// const allFaces = getAllFacesFromMap(allFacesMap);
// const clusterPromises = mlLibraryData?.faceClusteringResults?.clusters
// .map((cluster) => cluster?.slice(0, 200).map((f) => allFaces[f]))
// .map((faces) => getFaceCrops(faces));
// setClusters(await Promise.all(clusterPromises));
// const noiseFaces = mlLibraryData?.faceClusteringResults?.noise
// ?.slice(0, 200)
// .map((n) => allFaces[n]);
// setNoiseFaces(await getFaceCrops(noiseFaces));
// // TODO: disabling mst binary tree display for faces > 1000
// // can enable once toD3Tree is non recursive
// // and only important part of tree is retrieved
// const clusteringDebugInfo: DebugInfo =
// mlLibraryData?.faceClusteringResults['debugInfo'];
// if (allFaces.length <= 1000 && clusteringDebugInfo) {
// const mstBinaryTree = mstToBinaryTree(clusteringDebugInfo.mst);
// const d3Tree = toD3Tree(mstBinaryTree, allFaces);
// setMstD3Tree(d3Tree);
// }
// };
// const showFilteredFaces = async () => {
// addLogLine('Filtering with: ', minProbability, maxProbability);
// const allFacesMap = await mlIDbStorage.getAllFacesMap();
// const allFaces = getAllFacesFromMap(allFacesMap);
// const filteredFaces = allFaces
// .filter(
// (f) =>
// f.detection.probability >= minProbability &&
// f.detection.probability <= maxProbability
// )
// .slice(0, 200);
// setFilteredFaces(await getFaceCrops(filteredFaces));
// };
// const nodeSize = { x: 180, y: 180 };
// const foreignObjectProps = { width: 112, height: 150, x: -56 };
// // TODO: Remove debug page or config editor from prod
// return (
// <Container>
// {/* <div>ClusterFaceDistance: {clusterFaceDistance}</div>
// <button onClick={() => setClusterFaceDistance(0.35)}>0.35</button>
// <button onClick={() => setClusterFaceDistance(0.4)}>0.4</button>
// <button onClick={() => setClusterFaceDistance(0.45)}>0.45</button>
// <button onClick={() => setClusterFaceDistance(0.5)}>0.5</button>
// <button onClick={() => setClusterFaceDistance(0.55)}>0.55</button>
// <button onClick={() => setClusterFaceDistance(0.6)}>0.6</button>
// <p></p> */}
// <hr />
// <Row>
// <Col>
// <ConfigEditor
// name="ML Sync"
// getConfig={() => getMLSyncConfig()}
// defaultConfig={() =>
// Promise.resolve(DEFAULT_ML_SYNC_CONFIG)
// }
// setConfig={(mlSyncConfig) =>
// updateMLSyncConfig(mlSyncConfig as MLSyncConfig)
// }></ConfigEditor>
// </Col>
// <Col>
// <ConfigEditor
// name="ML Sync Job"
// getConfig={() => getMLSyncJobConfig()}
// defaultConfig={() =>
// Promise.resolve(DEFAULT_ML_SYNC_JOB_CONFIG)
// }
// setConfig={(mlSyncJobConfig) =>
// updateMLSyncJobConfig(mlSyncJobConfig as JobConfig)
// }></ConfigEditor>
// </Col>
// </Row>
// {/* <div>MinFaceSize: {minFaceSize}</div>
// <button onClick={() => setMinFaceSize(16)}>16</button>
// <button onClick={() => setMinFaceSize(24)}>24</button>
// <button onClick={() => setMinFaceSize(32)}>32</button>
// <button onClick={() => setMinFaceSize(64)}>64</button>
// <button onClick={() => setMinFaceSize(112)}>112</button>
// <p></p>
// <div>MinClusterSize: {minClusterSize}</div>
// <button onClick={() => setMinClusterSize(2)}>2</button>
// <button onClick={() => setMinClusterSize(3)}>3</button>
// <button onClick={() => setMinClusterSize(4)}>4</button>
// <button onClick={() => setMinClusterSize(5)}>5</button>
// <button onClick={() => setMinClusterSize(8)}>8</button>
// <button onClick={() => setMinClusterSize(12)}>12</button>
// <p></p>
// <div>Number of Images in Batch: {batchSize}</div>
// <button onClick={() => setBatchSize(50)}>50</button>
// <button onClick={() => setBatchSize(100)}>100</button>
// <button onClick={() => setBatchSize(200)}>200</button>
// <button onClick={() => setBatchSize(500)}>500</button> */}
// {/* <p></p>
// <div>MaxFaceDistance: {maxFaceDistance}</div>
// <button onClick={() => setMaxFaceDistance(0.45)}>0.45</button>
// <button onClick={() => setMaxFaceDistance(0.5)}>0.5</button>
// <button onClick={() => setMaxFaceDistance(0.55)}>0.55</button>
// <button onClick={() => setMaxFaceDistance(0.6)}>0.6</button> */}
// <hr />
// <RowWithGap>
// <Button onClick={onSync} disabled>
// Run ML Sync
// </Button>
// <Button onClick={onStartMLSync}>Start ML Sync</Button>
// <Button onClick={onStopMLSync}>Stop ML Sync</Button>
// </RowWithGap>
// <hr />
// <RowWithGap>
// <Button onClick={onExportMLData}>Export ML Data</Button>
// <Button onClick={onImportMLData}>Import ML Data</Button>
// <Button onClick={onClearPeopleIndex}>Clear People Index</Button>
// </RowWithGap>
// <hr />
// <RowWithGap>
// <Button onClick={onLoadAllPeople}>
// Load All Identified People
// </Button>
// </RowWithGap>
// <Row>All identified people:</Row>
// <PeopleList people={allPeople}></PeopleList>
// <hr />
// <RowWithGap>
// <Button onClick={onLoadClusteringResults}>
// Load Clustering Results
// </Button>
// </RowWithGap>
// <Row>Clusters:</Row>
// {clusters.map((cluster, index) => (
// <ClusterFacesRow key={index}>
// {cluster?.map((face, i) => (
// <ImageBlobView key={i} blob={face}></ImageBlobView>
// ))}
// </ClusterFacesRow>
// ))}
// <p></p>
// <Row>Noise:</Row>
// <ClusterFacesRow>
// {noiseFaces?.map((face, i) => (
// <ImageBlobView key={i} blob={face}></ImageBlobView>
// ))}
// </ClusterFacesRow>
// <hr />
// <Row>Show Faces based on detection probability:</Row>
// <Row style={{ alignItems: 'end' }}>
// <Col>
// <Form.Label htmlFor="minProbability">Min: </Form.Label>
// <Form.Control
// type="number"
// id="minProbability"
// placeholder="e.g. 70"
// onChange={(e) =>
// setMinProbability(
// (parseFloat(e.target.value) || 0) / 100
// )
// }
// />
// </Col>
// <Col>
// <Form.Label htmlFor="maxProbability">Max: </Form.Label>
// <Form.Control
// type="number"
// id="maxProbability"
// placeholder="e.g. 80"
// onChange={(e) =>
// setMaxProbability(
// (parseFloat(e.target.value) || 100) / 100
// )
// }
// />
// </Col>
// <Col>
// <Button onClick={showFilteredFaces}>Show Faces</Button>
// </Col>
// </Row>
// <p></p>
// <ClusterFacesRow>
// {filteredFaces?.map((face, i) => (
// <ImageBlobView key={i} blob={face}></ImageBlobView>
// ))}
// </ClusterFacesRow>
// <hr />
// <Row>Debug File:</Row>
// <input id="debugFile" type="file" onChange={onDebugFile} />
// <MLFileDebugView file={debugFile} />
// <hr />
// <Row>Hdbscan MST: </Row>
// <div
// id="treeWrapper"
// style={{
// width: '100%',
// height: '50em',
// backgroundColor: 'white',
// }}>
// {mstD3Tree && (
// <Tree
// data={mstD3Tree}
// orientation={'vertical'}
// nodeSize={nodeSize}
// zoom={0.25}
// renderCustomNodeElement={(rd3tProps) =>
// renderForeignObjectNode({
// ...rd3tProps,
// foreignObjectProps,
// })
// }
// />
// )}
// </div>
// <hr />
// <Row>TSNE of embeddings: </Row>
// <Row>
// <div
// id="tsneWrapper"
// style={{
// width: '840px',
// height: '840px',
// backgroundColor: 'white',
// overflow: 'auto',
// }}>
// {mlResult.tsne && <TSNEPlot mlResult={mlResult} />}
// </div>
// </Row>
// </Container>
// );
// }

View file

@ -0,0 +1,52 @@
import Box from '@mui/material/Box';
import { Chip } from 'components/Chip';
import { Legend } from 'components/PhotoViewer/styledComponents/Legend';
import React, { useState, useEffect } from 'react';
import { EnteFile } from 'types/file';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import constants from 'utils/strings/constants';
export function ObjectLabelList(props: {
file: EnteFile;
updateMLDataIndex: number;
}) {
const [objects, setObjects] = useState<Array<string>>([]);
useEffect(() => {
let didCancel = false;
const main = async () => {
const objects = await mlIDbStorage.getAllObjectsMap();
const uniqueObjectNames = [
...new Set(
(objects.get(props.file.id) ?? []).map(
(object) => object.detection.class
)
),
];
!didCancel && setObjects(uniqueObjectNames);
};
main();
return () => {
didCancel = true;
};
}, [props.file, props.updateMLDataIndex]);
if (objects.length === 0) return <></>;
return (
<div>
<Legend sx={{ pb: 1, display: 'block' }}>
{constants.OBJECTS}
</Legend>
<Box
display={'flex'}
gap={1}
flexWrap="wrap"
justifyContent={'flex-start'}
alignItems={'flex-start'}>
{objects.map((object) => (
<Chip key={object}>{object}</Chip>
))}
</Box>
</div>
);
}

View file

@ -0,0 +1,183 @@
import React, { useState, useEffect } from 'react';
import { Face, Person } from 'types/machineLearning';
import {
getAllPeople,
getPeopleList,
getUnidentifiedFaces,
} from 'utils/machineLearning';
import styled from 'styled-components';
import { EnteFile } from 'types/file';
import { ImageCacheView } from './ImageViews';
import { CACHES } from 'constants/cache';
import { Legend } from 'components/PhotoViewer/styledComponents/Legend';
import constants from 'utils/strings/constants';
import { addLogLine } from 'utils/logging';
import { logError } from 'utils/sentry';
const FaceChipContainer = styled.div`
display: flex;
flex-wrap: wrap;
justify-content: center;
align-items: center;
margin-top: 5px;
margin-bottom: 5px;
overflow: auto;
`;
const FaceChip = styled.div<{ clickable?: boolean }>`
width: 112px;
height: 112px;
margin: 5px;
border-radius: 50%;
overflow: hidden;
position: relative;
cursor: ${({ clickable }) => (clickable ? 'pointer' : 'normal')};
& > img {
width: 100%;
height: 100%;
}
`;
interface PeopleListPropsBase {
onSelect?: (person: Person, index: number) => void;
}
export interface PeopleListProps extends PeopleListPropsBase {
people: Array<Person>;
maxRows?: number;
}
export function PeopleList(props: PeopleListProps) {
return (
<FaceChipContainer
style={
props.maxRows && {
maxHeight: props.maxRows * 122 + 28,
}
}>
{props.people.map((person, index) => (
<FaceChip
key={index}
clickable={!!props.onSelect}
onClick={() =>
props.onSelect && props.onSelect(person, index)
}>
<ImageCacheView
url={person.displayImageUrl}
cacheName={CACHES.FACE_CROPS}
/>
</FaceChip>
))}
</FaceChipContainer>
);
}
export interface PhotoPeopleListProps extends PeopleListPropsBase {
file: EnteFile;
updateMLDataIndex: number;
}
export function PhotoPeopleList(props: PhotoPeopleListProps) {
const [people, setPeople] = useState<Array<Person>>([]);
useEffect(() => {
let didCancel = false;
async function updateFaceImages() {
addLogLine('calling getPeopleList');
const startTime = Date.now();
const people = await getPeopleList(props.file);
addLogLine('getPeopleList', Date.now() - startTime, 'ms');
addLogLine('getPeopleList done, didCancel: ', didCancel);
!didCancel && setPeople(people);
}
updateFaceImages();
return () => {
didCancel = true;
};
}, [props.file, props.updateMLDataIndex]);
if (people.length === 0) return <></>;
return (
<div>
<Legend>{constants.PEOPLE}</Legend>
<PeopleList people={people} onSelect={props.onSelect}></PeopleList>
</div>
);
}
export interface AllPeopleListProps extends PeopleListPropsBase {
limit?: number;
}
export function AllPeopleList(props: AllPeopleListProps) {
const [people, setPeople] = useState<Array<Person>>([]);
useEffect(() => {
let didCancel = false;
async function updateFaceImages() {
try {
let people = await getAllPeople();
if (props.limit) {
people = people.slice(0, props.limit);
}
!didCancel && setPeople(people);
} catch (e) {
logError(e, 'updateFaceImages failed');
}
}
updateFaceImages();
return () => {
didCancel = true;
};
}, [props.limit]);
return <PeopleList people={people} onSelect={props.onSelect}></PeopleList>;
}
export function UnidentifiedFaces(props: {
file: EnteFile;
updateMLDataIndex: number;
}) {
const [faces, setFaces] = useState<Array<Face>>([]);
useEffect(() => {
let didCancel = false;
async function updateFaceImages() {
const faces = await getUnidentifiedFaces(props.file);
!didCancel && setFaces(faces);
}
updateFaceImages();
return () => {
didCancel = true;
};
}, [props.file, props.updateMLDataIndex]);
if (!faces || faces.length === 0) return <></>;
return (
<>
<div>
<Legend>{constants.UNIDENTIFIED_FACES}</Legend>
</div>
<FaceChipContainer>
{faces &&
faces.map((face, index) => (
<FaceChip key={index}>
<ImageCacheView
url={face.crop?.imageUrl}
cacheName={CACHES.FACE_CROPS}
/>
</FaceChip>
))}
</FaceChipContainer>
</>
);
}

View file

@ -0,0 +1,39 @@
import React, { useEffect, useRef } from 'react';
import * as tf from '@tensorflow/tfjs-core';
import { FaceImage } from 'types/machineLearning';
interface FaceImageProps {
faceImage: FaceImage;
width?: number;
height?: number;
}
export default function TFJSImage(props: FaceImageProps) {
const canvasRef = useRef(null);
useEffect(() => {
if (!props || !props.faceImage) {
return;
}
const canvas = canvasRef.current;
const faceTensor = tf.tensor3d(props.faceImage);
const resized =
props.width && props.height
? tf.image.resizeBilinear(faceTensor, [
props.width,
props.height,
])
: faceTensor;
const normFaceImage = tf.div(tf.add(resized, 1.0), 2);
tf.browser.toPixels(normFaceImage as tf.Tensor3D, canvas);
}, [props]);
return (
<canvas
ref={canvasRef}
width={112}
height={112}
style={{ display: 'inline' }}
/>
);
}

View file

@ -0,0 +1,48 @@
import Box from '@mui/material/Box';
import { Chip } from 'components/Chip';
import { Legend } from 'components/PhotoViewer/styledComponents/Legend';
import React, { useState, useEffect } from 'react';
import { EnteFile } from 'types/file';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import constants from 'utils/strings/constants';
export function WordList(props: { file: EnteFile; updateMLDataIndex: number }) {
const [words, setWords] = useState<string[]>([]);
useEffect(() => {
let didCancel = false;
const main = async () => {
const texts = await mlIDbStorage.getAllTextMap();
const uniqueDetectedWords = [
...new Set(
(texts.get(props.file.id) ?? []).map(
(text) => text.detection.word
)
),
];
!didCancel && setWords(uniqueDetectedWords);
};
main();
return () => {
didCancel = true;
};
}, [props.file, props.updateMLDataIndex]);
if (words.length === 0) return <></>;
return (
<>
<Legend>{constants.TEXT}</Legend>
<Box
display={'flex'}
gap={1}
flexWrap="wrap"
justifyContent={'flex-start'}
alignItems={'flex-start'}>
{words.map((word) => (
<Chip key={word}>{word}</Chip>
))}
</Box>
</>
);
}

View file

@ -0,0 +1,27 @@
import { Box, Stack, Typography } from '@mui/material';
interface Iprops {
title: string;
icon?: JSX.Element;
}
export default function MenuSectionTitle({ title, icon }: Iprops) {
return (
<Stack px="8px" py={'6px'} direction="row" spacing={'8px'}>
{icon && (
<Box
sx={{
'& > svg': {
fontSize: '17px',
color: 'text.secondary',
},
}}>
{icon}
</Box>
)}
<Typography variant="body2" color="text.secondary">
{title}
</Typography>
</Stack>
);
}

View file

@ -82,7 +82,6 @@ const PhotoFrame = ({
openUploader, openUploader,
isInSearchMode, isInSearchMode,
search, search,
resetSearch,
deletedFileIds, deletedFileIds,
setDeletedFileIds, setDeletedFileIds,
activeCollection, activeCollection,
@ -154,8 +153,30 @@ const PhotoFrame = ({
) { ) {
return false; return false;
} }
if (
search?.person &&
search.person.files.indexOf(item.id) === -1
) {
return false;
}
if (
search?.thing &&
search.thing.files.indexOf(item.id) === -1
) {
return false;
}
if (
search?.text &&
search.text.files.indexOf(item.id) === -1
) {
return false;
}
if (search?.files && search.files.indexOf(item.id) === -1) {
return false;
}
if ( if (
!isDeduplicating && !isDeduplicating &&
!isInSearchMode &&
activeCollection === ALL_SECTION && activeCollection === ALL_SECTION &&
(IsArchived(item) || (IsArchived(item) ||
archivedCollections?.has(item.collectionID)) archivedCollections?.has(item.collectionID))
@ -163,6 +184,7 @@ const PhotoFrame = ({
return false; return false;
} }
if ( if (
!isInSearchMode &&
activeCollection === ARCHIVE_SECTION && activeCollection === ARCHIVE_SECTION &&
!IsArchived(item) !IsArchived(item)
) { ) {
@ -170,15 +192,24 @@ const PhotoFrame = ({
} }
if ( if (
isSharedFile(user, item) && (isInSearchMode ||
activeCollection !== item.collectionID activeCollection !== item.collectionID) &&
isSharedFile(user, item)
) { ) {
return false; return false;
} }
if (activeCollection === TRASH_SECTION && !item.isTrashed) { if (
!isInSearchMode &&
activeCollection === TRASH_SECTION &&
!item.isTrashed
) {
return false; return false;
} }
if (activeCollection !== TRASH_SECTION && item.isTrashed) { if (
(isInSearchMode ||
activeCollection !== TRASH_SECTION) &&
item.isTrashed
) {
return false; return false;
} }
if (!idSet.has(item.id)) { if (!idSet.has(item.id)) {
@ -186,8 +217,8 @@ const PhotoFrame = ({
activeCollection === ALL_SECTION || activeCollection === ALL_SECTION ||
activeCollection === ARCHIVE_SECTION || activeCollection === ARCHIVE_SECTION ||
activeCollection === TRASH_SECTION || activeCollection === TRASH_SECTION ||
activeCollection === item.collectionID || isInSearchMode ||
isInSearchMode activeCollection === item.collectionID
) { ) {
idSet.add(item.id); idSet.add(item.id);
return true; return true;
@ -235,7 +266,11 @@ const PhotoFrame = ({
files, files,
deletedFileIds, deletedFileIds,
search?.date, search?.date,
search?.files,
search?.location, search?.location,
search?.person,
search?.thing,
search?.text,
activeCollection, activeCollection,
]); ]);
@ -315,18 +350,6 @@ const PhotoFrame = ({
}; };
}, []); }, []);
useEffect(() => {
if (!isNaN(search?.file)) {
const filteredDataIdx = filteredData.findIndex((file) => {
return file.id === search.file;
});
if (!isNaN(filteredDataIdx)) {
onThumbnailClick(filteredDataIdx)();
}
resetSearch();
}
}, [search, filteredData]);
useEffect(() => { useEffect(() => {
if (selected.count === 0) { if (selected.count === 0) {
setRangeStart(null); setRangeStart(null);

View file

@ -1,4 +1,4 @@
import React, { useEffect, useState } from 'react'; import React, { useContext, useEffect, useState } from 'react';
import constants from 'utils/strings/constants'; import constants from 'utils/strings/constants';
import { RenderFileName } from './RenderFileName'; import { RenderFileName } from './RenderFileName';
import { RenderCreationTime } from './RenderCreationTime'; import { RenderCreationTime } from './RenderCreationTime';
@ -24,6 +24,16 @@ import TextSnippetOutlined from '@mui/icons-material/TextSnippetOutlined';
import FolderOutlined from '@mui/icons-material/FolderOutlined'; import FolderOutlined from '@mui/icons-material/FolderOutlined';
import BackupOutlined from '@mui/icons-material/BackupOutlined'; import BackupOutlined from '@mui/icons-material/BackupOutlined';
import {
PhotoPeopleList,
UnidentifiedFaces,
} from 'components/MachineLearning/PeopleList';
import { ObjectLabelList } from 'components/MachineLearning/ObjectList';
import { WordList } from 'components/MachineLearning/WordList';
// import MLServiceFileInfoButton from 'components/MachineLearning/MLServiceFileInfoButton';
import { AppContext } from 'pages/_app';
export const FileInfoSidebar = styled((props: DialogProps) => ( export const FileInfoSidebar = styled((props: DialogProps) => (
<EnteDrawer {...props} anchor="right" /> <EnteDrawer {...props} anchor="right" />
))({ ))({
@ -79,9 +89,12 @@ export function FileInfo({
collectionNameMap, collectionNameMap,
isTrashCollection, isTrashCollection,
}: Iprops) { }: Iprops) {
const appContext = useContext(AppContext);
const [location, setLocation] = useState<Location>(null); const [location, setLocation] = useState<Location>(null);
const [parsedExifData, setParsedExifData] = useState<Record<string, any>>(); const [parsedExifData, setParsedExifData] = useState<Record<string, any>>();
const [showExif, setShowExif] = useState(false); const [showExif, setShowExif] = useState(false);
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const [updateMLDataIndex, setUpdateMLDataIndex] = useState(0);
const openExif = () => setShowExif(true); const openExif = () => setShowExif(true);
const closeExif = () => setShowExif(false); const closeExif = () => setShowExif(false);
@ -266,6 +279,33 @@ export function FileInfo({
</Box> </Box>
</InfoItem> </InfoItem>
)} )}
{appContext.mlSearchEnabled && (
<>
<PhotoPeopleList
file={file}
updateMLDataIndex={updateMLDataIndex}
/>
<UnidentifiedFaces
file={file}
updateMLDataIndex={updateMLDataIndex}
/>
<ObjectLabelList
file={file}
updateMLDataIndex={updateMLDataIndex}
/>
<WordList
file={file}
updateMLDataIndex={updateMLDataIndex}
/>
{/* <Box pt={1}>
<MLServiceFileInfoButton
file={file}
updateMLDataIndex={updateMLDataIndex}
setUpdateMLDataIndex={setUpdateMLDataIndex}
/>
</Box> */}
</>
)}
</Stack> </Stack>
<ExifData <ExifData
exif={exif} exif={exif}

View file

@ -0,0 +1,832 @@
export {};
// import React, { useContext, useEffect, useRef, useState } from 'react';
// import Photoswipe from 'photoswipe';
// import PhotoswipeUIDefault from 'photoswipe/dist/photoswipe-ui-default';
// import classnames from 'classnames';
// import FavButton from 'components/FavButton';
// import {
// addToFavorites,
// removeFromFavorites,
// } from 'services/collectionService';
// import { updatePublicMagicMetadata } from 'services/fileService';
// import { EnteFile } from 'types/file';
// import constants from 'utils/strings/constants';
// import exifr from 'exifr';
// import Modal from 'react-bootstrap/Modal';
// import Button from 'react-bootstrap/Button';
// import styled from 'styled-components';
// import events from './events';
// import {
// changeFileCreationTime,
// changeFileName,
// downloadFile,
// formatDateTime,
// splitFilenameAndExtension,
// updateExistingFilePubMetadata,
// } from 'utils/file';
// import { Col, Form, FormCheck, FormControl } from 'react-bootstrap';
// import { prettyPrintExif } from 'utils/exif';
// import EditIcon from 'components/icons/EditIcon';
// import {
// FlexWrapper,
// FreeFlowText,
// IconButton,
// Label,
// Row,
// Value,
// } from 'components/Container';
// import { logError } from 'utils/sentry';
// import CloseIcon from 'components/icons/CloseIcon';
// import TickIcon from 'components/icons/TickIcon';
// import {
// PhotoPeopleList,
// UnidentifiedFaces,
// } from 'components/MachineLearning/PeopleList';
// import { Formik } from 'formik';
// import * as Yup from 'yup';
// import EnteSpinner from 'components/EnteSpinner';
// import EnteDateTimePicker from 'components/EnteDateTimePicker';
// // import { AppContext } from 'pages/_app';
// import { MAX_EDITED_FILE_NAME_LENGTH } from 'constants/file';
// import { sleep } from 'utils/common';
// import { PublicCollectionGalleryContext } from 'utils/publicCollectionGallery';
// import { GalleryContext } from 'pages/gallery';
// import { ObjectLabelList } from 'components/MachineLearning/ObjectList';
// import { WordList } from 'components/MachineLearning/WordList';
// import MLServiceFileInfoButton from 'components/MachineLearning/MLServiceFileInfoButton';
// const SmallLoadingSpinner = () => (
// <EnteSpinner
// style={{
// width: '20px',
// height: '20px',
// }}
// />
// );
// interface Iprops {
// isOpen: boolean;
// items: EnteFile[];
// currentIndex?: number;
// onClose?: (needUpdate: boolean) => void;
// gettingData: (instance: any, index: number, item: EnteFile) => void;
// id?: string;
// className?: string;
// favItemIds: Set<number>;
// isSharedCollection: boolean;
// isTrashCollection: boolean;
// }
// const LegendContainer = styled.div`
// display: flex;
// justify-content: space-between;
// `;
// const Legend = styled.span`
// font-size: 20px;
// color: #ddd;
// display: inline;
// `;
// const Pre = styled.pre`
// color: #aaa;
// padding: 7px 15px;
// `;
// const renderInfoItem = (label: string, value: string | JSX.Element) => (
// <Row>
// <Label width="30%">{label}</Label>
// <Value width="70%">{value}</Value>
// </Row>
// );
// function RenderCreationTime({
// shouldDisableEdits,
// file,
// scheduleUpdate,
// }: {
// shouldDisableEdits: boolean;
// file: EnteFile;
// scheduleUpdate: () => void;
// }) {
// const [loading, setLoading] = useState(false);
// const originalCreationTime = new Date(file?.metadata.creationTime / 1000);
// const [isInEditMode, setIsInEditMode] = useState(false);
// const [pickedTime, setPickedTime] = useState(originalCreationTime);
// const openEditMode = () => setIsInEditMode(true);
// const closeEditMode = () => setIsInEditMode(false);
// const saveEdits = async () => {
// try {
// setLoading(true);
// if (isInEditMode && file) {
// const unixTimeInMicroSec = pickedTime.getTime() * 1000;
// if (unixTimeInMicroSec === file?.metadata.creationTime) {
// closeEditMode();
// return;
// }
// let updatedFile = await changeFileCreationTime(
// file,
// unixTimeInMicroSec
// );
// updatedFile = (
// await updatePublicMagicMetadata([updatedFile])
// )[0];
// updateExistingFilePubMetadata(file, updatedFile);
// scheduleUpdate();
// }
// } catch (e) {
// logError(e, 'failed to update creationTime');
// } finally {
// closeEditMode();
// setLoading(false);
// }
// };
// const discardEdits = () => {
// setPickedTime(originalCreationTime);
// closeEditMode();
// };
// const handleChange = (newDate: Date) => {
// if (newDate instanceof Date) {
// setPickedTime(newDate);
// }
// };
// return (
// <>
// <Row>
// <Label width="30%">{constants.CREATION_TIME}</Label>
// <Value width={isInEditMode ? '50%' : '60%'}>
// {isInEditMode ? (
// <EnteDateTimePicker
// loading={loading}
// isInEditMode={isInEditMode}
// pickedTime={pickedTime}
// handleChange={handleChange}
// />
// ) : (
// formatDateTime(pickedTime)
// )}
// </Value>
// <Value
// width={isInEditMode ? '20%' : '10%'}
// style={{ cursor: 'pointer', marginLeft: '10px' }}>
// {!shouldDisableEdits &&
// (!isInEditMode ? (
// <IconButton onClick={openEditMode}>
// <EditIcon />
// </IconButton>
// ) : (
// <>
// <IconButton onClick={saveEdits}>
// {loading ? (
// <SmallLoadingSpinner />
// ) : (
// <TickIcon />
// )}
// </IconButton>
// <IconButton onClick={discardEdits}>
// <CloseIcon />
// </IconButton>
// </>
// ))}
// </Value>
// </Row>
// </>
// );
// }
// const getFileTitle = (filename, extension) => {
// if (extension) {
// return filename + '.' + extension;
// } else {
// return filename;
// }
// };
// interface formValues {
// filename: string;
// }
// const FileNameEditForm = ({ filename, saveEdits, discardEdits, extension }) => {
// const [loading, setLoading] = useState(false);
// const onSubmit = async (values: formValues) => {
// try {
// setLoading(true);
// await saveEdits(values.filename);
// } finally {
// setLoading(false);
// }
// };
// return (
// <Formik<formValues>
// initialValues={{ filename }}
// validationSchema={Yup.object().shape({
// filename: Yup.string()
// .required(constants.REQUIRED)
// .max(
// MAX_EDITED_FILE_NAME_LENGTH,
// constants.FILE_NAME_CHARACTER_LIMIT
// ),
// })}
// validateOnBlur={false}
// onSubmit={onSubmit}>
// {({ values, errors, handleChange, handleSubmit }) => (
// <Form noValidate onSubmit={handleSubmit}>
// <Form.Row>
// <Form.Group
// bsPrefix="ente-form-group"
// as={Col}
// xs={extension ? 7 : 8}>
// <Form.Control
// as="textarea"
// placeholder={constants.FILE_NAME}
// value={values.filename}
// onChange={handleChange('filename')}
// isInvalid={Boolean(errors.filename)}
// autoFocus
// disabled={loading}
// />
// <FormControl.Feedback
// type="invalid"
// style={{ textAlign: 'center' }}>
// {errors.filename}
// </FormControl.Feedback>
// </Form.Group>
// {extension && (
// <Form.Group
// bsPrefix="ente-form-group"
// as={Col}
// xs={1}
// controlId="formHorizontalFileName">
// <FlexWrapper style={{ padding: '5px' }}>
// {`.${extension}`}
// </FlexWrapper>
// </Form.Group>
// )}
// <Form.Group bsPrefix="ente-form-group" as={Col} xs={2}>
// <Value width={'16.67%'}>
// <IconButton type="submit" disabled={loading}>
// {loading ? (
// <SmallLoadingSpinner />
// ) : (
// <TickIcon />
// )}
// </IconButton>
// <IconButton
// onClick={discardEdits}
// disabled={loading}>
// <CloseIcon />
// </IconButton>
// </Value>
// </Form.Group>
// </Form.Row>
// </Form>
// )}
// </Formik>
// );
// };
// function RenderFileName({
// shouldDisableEdits,
// file,
// scheduleUpdate,
// }: {
// shouldDisableEdits: boolean;
// file: EnteFile;
// scheduleUpdate: () => void;
// }) {
// const originalTitle = file?.metadata.title;
// const [isInEditMode, setIsInEditMode] = useState(false);
// const [originalFileName, extension] =
// splitFilenameAndExtension(originalTitle);
// const [filename, setFilename] = useState(originalFileName);
// const openEditMode = () => setIsInEditMode(true);
// const closeEditMode = () => setIsInEditMode(false);
// const saveEdits = async (newFilename: string) => {
// try {
// if (file) {
// if (filename === newFilename) {
// closeEditMode();
// return;
// }
// setFilename(newFilename);
// const newTitle = getFileTitle(newFilename, extension);
// let updatedFile = await changeFileName(file, newTitle);
// updatedFile = (
// await updatePublicMagicMetadata([updatedFile])
// )[0];
// updateExistingFilePubMetadata(file, updatedFile);
// scheduleUpdate();
// }
// } catch (e) {
// logError(e, 'failed to update file name');
// } finally {
// closeEditMode();
// }
// };
// return (
// <>
// <Row>
// <Label width="30%">{constants.FILE_NAME}</Label>
// {!isInEditMode ? (
// <>
// <Value width="60%">
// <FreeFlowText>
// {getFileTitle(filename, extension)}
// </FreeFlowText>
// </Value>
// {!shouldDisableEdits && (
// <Value
// width="10%"
// style={{
// cursor: 'pointer',
// marginLeft: '10px',
// }}>
// <IconButton onClick={openEditMode}>
// <EditIcon />
// </IconButton>
// </Value>
// )}
// </>
// ) : (
// <FileNameEditForm
// extension={extension}
// filename={filename}
// saveEdits={saveEdits}
// discardEdits={closeEditMode}
// />
// )}
// </Row>
// </>
// );
// }
// function ExifData(props: { exif: any }) {
// const { exif } = props;
// const [showAll, setShowAll] = useState(false);
// const changeHandler = (e: React.ChangeEvent<HTMLInputElement>) => {
// setShowAll(e.target.checked);
// };
// const renderAllValues = () => <Pre>{exif.raw}</Pre>;
// const renderSelectedValues = () => (
// <>
// {exif?.Make &&
// exif?.Model &&
// renderInfoItem(constants.DEVICE, `${exif.Make} ${exif.Model}`)}
// {exif?.ImageWidth &&
// exif?.ImageHeight &&
// renderInfoItem(
// constants.IMAGE_SIZE,
// `${exif.ImageWidth} x ${exif.ImageHeight}`
// )}
// {exif?.Flash && renderInfoItem(constants.FLASH, exif.Flash)}
// {exif?.FocalLength &&
// renderInfoItem(
// constants.FOCAL_LENGTH,
// exif.FocalLength.toString()
// )}
// {exif?.ApertureValue &&
// renderInfoItem(
// constants.APERTURE,
// exif.ApertureValue.toString()
// )}
// {exif?.ISOSpeedRatings &&
// renderInfoItem(constants.ISO, exif.ISOSpeedRatings.toString())}
// </>
// );
// return (
// <>
// <LegendContainer>
// <Legend>{constants.EXIF}</Legend>
// <FormCheck>
// <FormCheck.Label>
// <FormCheck.Input onChange={changeHandler} />
// {constants.SHOW_ALL}
// </FormCheck.Label>
// </FormCheck>
// </LegendContainer>
// {showAll ? renderAllValues() : renderSelectedValues()}
// </>
// );
// }
// function InfoModal({
// shouldDisableEdits,
// showInfo,
// handleCloseInfo,
// items,
// photoSwipe,
// metadata,
// exif,
// scheduleUpdate,
// }) {
// // const appContext = useContext(AppContext);
// const [updateMLDataIndex, setUpdateMLDataIndex] = useState(0);
// return (
// <Modal show={showInfo} onHide={handleCloseInfo}>
// <Modal.Header closeButton>
// <Modal.Title>{constants.INFO}</Modal.Title>
// </Modal.Header>
// <Modal.Body>
// <div>
// <Legend>{constants.METADATA}</Legend>
// </div>
// {renderInfoItem(
// constants.FILE_ID,
// items[photoSwipe?.getCurrentIndex()]?.id
// )}
// {metadata?.title && (
// <RenderFileName
// shouldDisableEdits={shouldDisableEdits}
// file={items[photoSwipe?.getCurrentIndex()]}
// scheduleUpdate={scheduleUpdate}
// />
// )}
// {metadata?.creationTime && (
// <RenderCreationTime
// shouldDisableEdits={shouldDisableEdits}
// file={items[photoSwipe?.getCurrentIndex()]}
// scheduleUpdate={scheduleUpdate}
// />
// )}
// {metadata?.modificationTime &&
// renderInfoItem(
// constants.UPDATED_ON,
// formatDateTime(metadata.modificationTime / 1000)
// )}
// {metadata?.longitude > 0 &&
// metadata?.longitude > 0 &&
// renderInfoItem(
// constants.LOCATION,
// <a
// href={`https://www.openstreetmap.org/?mlat=${metadata.latitude}&mlon=${metadata.longitude}#map=15/${metadata.latitude}/${metadata.longitude}`}
// target="_blank"
// rel="noopener noreferrer">
// {constants.SHOW_MAP}
// </a>
// )}
// {/* {appContext.mlSearchEnabled && ( */}
// <>
// <div>
// <Legend>{constants.PEOPLE}</Legend>
// </div>
// <PhotoPeopleList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// <div>
// <Legend>{constants.UNIDENTIFIED_FACES}</Legend>
// </div>
// <UnidentifiedFaces
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// <div>
// <Legend>{constants.OBJECTS}</Legend>
// <ObjectLabelList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// </div>
// <div>
// <Legend>{constants.TEXT}</Legend>
// <WordList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// </div>
// <MLServiceFileInfoButton
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// setUpdateMLDataIndex={setUpdateMLDataIndex}
// />
// </>
// {/* )} */}
// {exif && (
// <>
// <ExifData exif={exif} />
// </>
// )}
// </Modal.Body>
// <Modal.Footer>
// <Button variant="outline-secondary" onClick={handleCloseInfo}>
// {constants.CLOSE}
// </Button>
// </Modal.Footer>
// </Modal>
// );
// }
// function PhotoSwipe(props: Iprops) {
// const pswpElement = useRef<HTMLDivElement>();
// const [photoSwipe, setPhotoSwipe] = useState<Photoswipe<any>>();
// const { isOpen, items } = props;
// const [isFav, setIsFav] = useState(false);
// const [showInfo, setShowInfo] = useState(false);
// const [metadata, setMetaData] = useState<EnteFile['metadata']>(null);
// const [exif, setExif] = useState<any>(null);
// const needUpdate = useRef(false);
// const publicCollectionGalleryContext = useContext(
// PublicCollectionGalleryContext
// );
// const galleryContext = useContext(GalleryContext);
// useEffect(() => {
// if (!pswpElement) return;
// if (isOpen) {
// openPhotoSwipe();
// }
// if (!isOpen) {
// closePhotoSwipe();
// }
// return () => {
// closePhotoSwipe();
// };
// }, [isOpen]);
// useEffect(() => {
// updateItems(items);
// }, [items]);
// // useEffect(() => {
// // if (photoSwipe) {
// // photoSwipe.options.arrowKeys = !showInfo;
// // photoSwipe.options.escKey = !showInfo;
// // }
// // }, [showInfo]);
// function updateFavButton() {
// setIsFav(isInFav(this?.currItem));
// }
// const openPhotoSwipe = () => {
// const { items, currentIndex } = props;
// const options = {
// history: false,
// maxSpreadZoom: 5,
// index: currentIndex,
// showHideOpacity: true,
// getDoubleTapZoom(isMouseClick, item) {
// if (isMouseClick) {
// return 2.5;
// }
// // zoom to original if initial zoom is less than 0.7x,
// // otherwise to 1.5x, to make sure that double-tap gesture always zooms image
// return item.initialZoomLevel < 0.7 ? 1 : 1.5;
// },
// getThumbBoundsFn: (index) => {
// try {
// const file = items[index];
// const ele = document.getElementById(`thumb-${file.id}`);
// if (ele) {
// const rect = ele.getBoundingClientRect();
// const pageYScroll =
// window.pageYOffset ||
// document.documentElement.scrollTop;
// return {
// x: rect.left,
// y: rect.top + pageYScroll,
// w: rect.width,
// };
// }
// return null;
// } catch (e) {
// return null;
// }
// },
// };
// const photoSwipe = new Photoswipe(
// pswpElement.current,
// PhotoswipeUIDefault,
// items,
// options
// );
// events.forEach((event) => {
// const callback = props[event];
// if (callback || event === 'destroy') {
// photoSwipe.listen(event, function (...args) {
// if (callback) {
// args.unshift(this);
// callback(...args);
// }
// if (event === 'destroy') {
// handleClose();
// }
// if (event === 'close') {
// handleClose();
// }
// });
// }
// });
// photoSwipe.listen('beforeChange', function () {
// updateInfo.call(this);
// updateFavButton.call(this);
// });
// photoSwipe.listen('resize', checkExifAvailable);
// photoSwipe.init();
// needUpdate.current = false;
// setPhotoSwipe(photoSwipe);
// };
// const closePhotoSwipe = () => {
// if (photoSwipe) photoSwipe.close();
// };
// const handleClose = () => {
// const { onClose } = props;
// if (typeof onClose === 'function') {
// onClose(needUpdate.current);
// }
// const videoTags = document.getElementsByTagName('video');
// for (const videoTag of videoTags) {
// videoTag.pause();
// }
// handleCloseInfo();
// };
// const isInFav = (file) => {
// const { favItemIds } = props;
// if (favItemIds && file) {
// return favItemIds.has(file.id);
// }
// return false;
// };
// const onFavClick = async (file) => {
// const { favItemIds } = props;
// if (!isInFav(file)) {
// favItemIds.add(file.id);
// addToFavorites(file);
// setIsFav(true);
// } else {
// favItemIds.delete(file.id);
// removeFromFavorites(file);
// setIsFav(false);
// }
// needUpdate.current = true;
// };
// const updateItems = (items = []) => {
// if (photoSwipe) {
// photoSwipe.items.length = 0;
// items.forEach((item) => {
// photoSwipe.items.push(item);
// });
// photoSwipe.invalidateCurrItems();
// // photoSwipe.updateSize(true);
// }
// };
// const checkExifAvailable = async () => {
// setExif(null);
// await sleep(100);
// try {
// const img: HTMLImageElement = document.querySelector(
// '.pswp__img:not(.pswp__img--placeholder)'
// );
// if (img) {
// const exifData = await exifr.parse(img);
// if (!exifData) {
// return;
// }
// exifData.raw = prettyPrintExif(exifData);
// setExif(exifData);
// }
// } catch (e) {
// logError(e, 'exifr parsing failed');
// }
// };
// function updateInfo() {
// const file: EnteFile = this?.currItem;
// if (file?.metadata) {
// setMetaData(file.metadata);
// setExif(null);
// checkExifAvailable();
// }
// }
// const handleCloseInfo = () => {
// setShowInfo(false);
// };
// const handleOpenInfo = () => {
// setShowInfo(true);
// };
// const downloadFileHelper = async (file) => {
// galleryContext.startLoading();
// await downloadFile(
// file,
// publicCollectionGalleryContext.accessedThroughSharedURL,
// publicCollectionGalleryContext.token
// );
// galleryContext.finishLoading();
// };
// const scheduleUpdate = () => (needUpdate.current = true);
// const { id } = props;
// let { className } = props;
// className = classnames(['pswp', className]).trim();
// return (
// <>
// <div
// id={id}
// className={className}
// tabIndex={Number('-1')}
// role="dialog"
// aria-hidden="true"
// ref={pswpElement}>
// <div className="pswp__bg" />
// <div className="pswp__scroll-wrap">
// <div className="pswp__container">
// <div className="pswp__item" />
// <div className="pswp__item" />
// <div className="pswp__item" />
// </div>
// <div className="pswp__ui pswp__ui--hidden">
// <div className="pswp__top-bar">
// <div className="pswp__counter" />
// <button
// className="pswp__button pswp__button--close"
// title={constants.CLOSE}
// />
// <button
// className="pswp-custom download-btn"
// title={constants.DOWNLOAD}
// onClick={() =>
// downloadFileHelper(photoSwipe.currItem)
// }
// />
// <button
// className="pswp__button pswp__button--fs"
// title={constants.TOGGLE_FULLSCREEN}
// />
// <button
// className="pswp__button pswp__button--zoom"
// title={constants.ZOOM_IN_OUT}
// />
// {!props.isSharedCollection &&
// !props.isTrashCollection && (
// <FavButton
// size={44}
// isClick={isFav}
// onClick={() => {
// onFavClick(photoSwipe?.currItem);
// }}
// />
// )}
// <button
// className="pswp-custom info-btn"
// title={constants.INFO}
// onClick={handleOpenInfo}
// />
// <div className="pswp__preloader">
// <div className="pswp__preloader__icn">
// <div className="pswp__preloader__cut">
// <div className="pswp__preloader__donut" />
// </div>
// </div>
// </div>
// </div>
// <div className="pswp__share-modal pswp__share-modal--hidden pswp__single-tap">
// <div className="pswp__share-tooltip" />
// </div>
// <button
// className="pswp__button pswp__button--arrow--left"
// title={constants.PREVIOUS}
// />
// <button
// className="pswp__button pswp__button--arrow--right"
// title={constants.NEXT}
// />
// <div className="pswp__caption">
// <div />
// </div>
// </div>
// </div>
// </div>
// <InfoModal
// shouldDisableEdits={props.isSharedCollection}
// showInfo={showInfo}
// handleCloseInfo={handleCloseInfo}
// items={items}
// photoSwipe={photoSwipe}
// metadata={metadata}
// exif={exif}
// scheduleUpdate={scheduleUpdate}
// />
// </>
// );
// }
// export default PhotoSwipe;

View file

@ -0,0 +1,175 @@
export {}; // import React, { useContext, useEffect, useState } from 'react';
// import constants from 'utils/strings/constants';
// import { formatDateTime } from 'utils/time';
// import { RenderFileName } from './RenderFileName';
// import { ExifData } from './ExifData';
// import { RenderCreationTime } from './RenderCreationTime';
// import { RenderInfoItem } from './RenderInfoItem';
// import DialogTitleWithCloseButton from 'components/DialogBox/TitleWithCloseButton';
// import { Dialog, DialogContent, Link, styled, Typography } from '@mui/material';
// import { AppContext } from 'pages/_app';
// import { Location, Metadata } from 'types/upload';
// import Photoswipe from 'photoswipe';
// import { getEXIFLocation } from 'services/upload/exifService';
// import {
// PhotoPeopleList,
// UnidentifiedFaces,
// } from 'components/MachineLearning/PeopleList';
// import { ObjectLabelList } from 'components/MachineLearning/ObjectList';
// import { WordList } from 'components/MachineLearning/WordList';
// import MLServiceFileInfoButton from 'components/MachineLearning/MLServiceFileInfoButton';
// const FileInfoDialog = styled(Dialog)(({ theme }) => ({
// zIndex: 1501,
// '& .MuiDialog-container': {
// alignItems: 'flex-start',
// },
// '& .MuiDialog-paper': {
// padding: theme.spacing(2),
// },
// }));
// const Legend = styled('span')`
// font-size: 20px;
// color: #ddd;
// display: inline;
// `;
// interface Iprops {
// shouldDisableEdits: boolean;
// showInfo: boolean;
// handleCloseInfo: () => void;
// items: any[];
// photoSwipe: Photoswipe<Photoswipe.Options>;
// metadata: Metadata;
// exif: any;
// scheduleUpdate: () => void;
// }
// export function FileInfo({
// shouldDisableEdits,
// showInfo,
// handleCloseInfo,
// items,
// photoSwipe,
// metadata,
// exif,
// scheduleUpdate,
// }: Iprops) {
// const appContext = useContext(AppContext);
// const [location, setLocation] = useState<Location>(null);
// const [updateMLDataIndex, setUpdateMLDataIndex] = useState(0);
// useEffect(() => {
// if (!location && metadata) {
// if (metadata.longitude || metadata.longitude === 0) {
// setLocation({
// latitude: metadata.latitude,
// longitude: metadata.longitude,
// });
// }
// }
// }, [metadata]);
// useEffect(() => {
// if (!location && exif) {
// const exifLocation = getEXIFLocation(exif);
// if (exifLocation.latitude || exifLocation.latitude === 0) {
// setLocation(exifLocation);
// }
// }
// }, [exif]);
// return (
// <FileInfoDialog
// open={showInfo}
// onClose={handleCloseInfo}
// fullScreen={appContext.isMobile}>
// <DialogTitleWithCloseButton onClose={handleCloseInfo}>
// {constants.INFO}
// </DialogTitleWithCloseButton>
// <DialogContent>
// <Typography variant="subtitle" mb={1}>
// {constants.METADATA}
// </Typography>
// {RenderInfoItem(
// constants.FILE_ID,
// items[photoSwipe?.getCurrentIndex()]?.id
// )}
// {metadata?.title && (
// <RenderFileName
// shouldDisableEdits={shouldDisableEdits}
// file={items[photoSwipe?.getCurrentIndex()]}
// scheduleUpdate={scheduleUpdate}
// />
// )}
// {metadata?.creationTime && (
// <RenderCreationTime
// shouldDisableEdits={shouldDisableEdits}
// file={items[photoSwipe?.getCurrentIndex()]}
// scheduleUpdate={scheduleUpdate}
// />
// )}
// {metadata?.modificationTime &&
// RenderInfoItem(
// constants.UPDATED_ON,
// formatDateTime(metadata.modificationTime / 1000)
// )}
// {location &&
// RenderInfoItem(
// constants.LOCATION,
// <Link
// href={`https://www.openstreetmap.org/?mlat=${metadata.latitude}&mlon=${metadata.longitude}#map=15/${metadata.latitude}/${metadata.longitude}`}
// target="_blank"
// rel="noopener noreferrer">
// {constants.SHOW_MAP}
// </Link>
// )}
// {appContext.mlSearchEnabled && (
// <>
// <div>
// <Legend>{constants.PEOPLE}</Legend>
// </div>
// <PhotoPeopleList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// <div>
// <Legend>{constants.UNIDENTIFIED_FACES}</Legend>
// </div>
// <UnidentifiedFaces
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// <div>
// <Legend>{constants.OBJECTS}</Legend>
// <ObjectLabelList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// </div>
// <div>
// <Legend>{constants.TEXT}</Legend>
// <WordList
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// />
// </div>
// <MLServiceFileInfoButton
// file={items[photoSwipe?.getCurrentIndex()]}
// updateMLDataIndex={updateMLDataIndex}
// setUpdateMLDataIndex={setUpdateMLDataIndex}
// />
// </>
// )}
// {exif && (
// <>
// <ExifData exif={exif} />
// </>
// )}
// </DialogContent>
// </FileInfoDialog>
// );
// }

View file

@ -0,0 +1,77 @@
import React, { useContext } from 'react';
import { PeopleList } from 'components/MachineLearning/PeopleList';
import { IndexStatus } from 'types/machineLearning/ui';
import { SuggestionType, Suggestion } from 'types/search';
import { components } from 'react-select';
import { Row } from 'components/Container';
import { Col } from 'react-bootstrap';
import { AppContext } from 'pages/_app';
import styled from '@mui/styled-engine';
import constants from 'utils/strings/constants';
const { Menu } = components;
const LegendRow = styled(Row)`
align-items: center;
justify-content: space-between;
margin-bottom: 0px;
`;
const Legend = styled('span')`
font-size: 20px;
color: #ddd;
display: inline;
`;
const Caption = styled('span')`
font-size: 12px;
display: inline;
padding: 8px 12px;
`;
const MenuWithPeople = (props) => {
const appContext = useContext(AppContext);
// addLogLine("props.selectProps.options: ", selectRef);
const peopleSuggestions = props.selectProps.options.filter(
(o) => o.type === SuggestionType.PERSON
);
const people = peopleSuggestions.map((o) => o.value);
const indexStatusSuggestion = props.selectProps.options.filter(
(o) => o.type === SuggestionType.INDEX_STATUS
)[0] as Suggestion;
const indexStatus = indexStatusSuggestion?.value as IndexStatus;
return (
<Menu {...props}>
<Col>
{((appContext.mlSearchEnabled && indexStatus) ||
(people && people.length > 0)) && (
<LegendRow>
<Legend>{constants.PEOPLE}</Legend>
</LegendRow>
)}
{appContext.mlSearchEnabled && indexStatus && (
<LegendRow>
<Caption>{indexStatusSuggestion.label}</Caption>
</LegendRow>
)}
{people && people.length > 0 && (
<Row>
<PeopleList
people={people}
maxRows={2}
onSelect={(_, index) => {
props.selectRef.current.blur();
props.setValue(peopleSuggestions[index]);
}}
/>
</Row>
)}
</Col>
{props.children}
</Menu>
);
};
export default MenuWithPeople;

View file

@ -1,8 +1,11 @@
import { IconButton } from '@mui/material'; import { IconButton } from '@mui/material';
import debounce from 'debounce-promise'; import debounce from 'debounce-promise';
import { AppContext } from 'pages/_app'; import { AppContext } from 'pages/_app';
import React, { useContext, useEffect, useState } from 'react'; import React, { useContext, useEffect, useRef, useState } from 'react';
import { getAutoCompleteSuggestions } from 'services/searchService'; import {
getAutoCompleteSuggestions,
getDefaultOptions,
} from 'services/searchService';
import { import {
Bbox, Bbox,
DateValue, DateValue,
@ -20,6 +23,8 @@ import { EnteFile } from 'types/file';
import { Collection } from 'types/collection'; import { Collection } from 'types/collection';
import { OptionWithInfo } from './optionWithInfo'; import { OptionWithInfo } from './optionWithInfo';
import { SearchInputWrapper } from '../styledComponents'; import { SearchInputWrapper } from '../styledComponents';
import MenuWithPeople from './MenuWithPeople';
import { Person, Thing, WordGroup } from 'types/machineLearning';
interface Iprops { interface Iprops {
isOpen: boolean; isOpen: boolean;
@ -31,16 +36,27 @@ interface Iprops {
} }
export default function SearchInput(props: Iprops) { export default function SearchInput(props: Iprops) {
const selectRef = useRef(null);
const [value, setValue] = useState<SearchOption>(null); const [value, setValue] = useState<SearchOption>(null);
const appContext = useContext(AppContext); const appContext = useContext(AppContext);
const handleChange = (value: SearchOption) => { const handleChange = (value: SearchOption) => {
setValue(value); setValue(value);
}; };
const [defaultOptions, setDefaultOptions] = useState([]);
useEffect(() => { useEffect(() => {
search(value); search(value);
}, [value]); }, [value]);
useEffect(() => {
refreshDefaultOptions();
}, []);
async function refreshDefaultOptions() {
const defaultOptions = await getDefaultOptions(props.files);
setDefaultOptions(defaultOptions);
}
const resetSearch = () => { const resetSearch = () => {
if (props.isOpen) { if (props.isOpen) {
appContext.startLoading(); appContext.startLoading();
@ -80,11 +96,17 @@ export default function SearchInput(props: Iprops) {
search = { collection: selectedOption.value as number }; search = { collection: selectedOption.value as number };
setValue(null); setValue(null);
break; break;
case SuggestionType.IMAGE: case SuggestionType.FILE_NAME:
case SuggestionType.VIDEO: search = { files: selectedOption.value as number[] };
search = { file: selectedOption.value as number };
setValue(null);
break; break;
case SuggestionType.PERSON:
search = { person: selectedOption.value as Person };
break;
case SuggestionType.THING:
search = { thing: selectedOption.value as Thing };
break;
case SuggestionType.TEXT:
search = { text: selectedOption.value as WordGroup };
} }
props.updateSearch(search, { props.updateSearch(search, {
optionName: selectedOption.label, optionName: selectedOption.label,
@ -92,20 +114,39 @@ export default function SearchInput(props: Iprops) {
}); });
}; };
// TODO: HACK as AsyncSelect does not support default options reloading on focus/click
// unwanted side effect: placeholder is not shown on focus/click
// https://github.com/JedWatson/react-select/issues/1879
// for correct fix AsyncSelect can be extended to support default options reloading on focus/click
const handleOnFocus = () => {
refreshDefaultOptions();
};
return ( return (
<SearchInputWrapper isOpen={props.isOpen}> <SearchInputWrapper isOpen={props.isOpen}>
<AsyncSelect <AsyncSelect
ref={selectRef}
value={value} value={value}
components={{ components={{
Option: OptionWithInfo, Option: OptionWithInfo,
ValueContainer: ValueContainerWithIcon, ValueContainer: ValueContainerWithIcon,
Menu: (props) => (
<MenuWithPeople
{...props}
setValue={setValue}
selectRef={selectRef}
/>
),
}} }}
placeholder={constants.SEARCH_HINT()} placeholder={constants.SEARCH_HINT()}
loadOptions={getOptions} loadOptions={getOptions}
onChange={handleChange} onChange={handleChange}
onFocus={handleOnFocus}
isClearable isClearable
escapeClearsValue escapeClearsValue
styles={SelectStyles} styles={SelectStyles}
defaultOptions={
appContext.mlSearchEnabled ? defaultOptions : null
}
noOptionsMessage={() => null} noOptionsMessage={() => null}
/> />

View file

@ -15,36 +15,37 @@ export const OptionWithInfo = (props) => (
</Option> </Option>
); );
const LabelWithInfo = ({ data }: { data: SearchOption }) => ( const LabelWithInfo = ({ data }: { data: SearchOption }) =>
<> !data.hide && (
<Box className="main" px={2} py={1}> <>
<Typography variant="caption" mb={1}> <Box className="main" px={2} py={1}>
{constants.SEARCH_TYPE(data.type)} <Typography variant="caption" mb={1}>
</Typography> {constants.SEARCH_TYPE(data.type)}
<SpaceBetweenFlex> </Typography>
<Box mr={1}> <SpaceBetweenFlex>
<FreeFlowText> <Box mr={1}>
<Typography fontWeight={'bold'}> <FreeFlowText>
{data.label} <Typography fontWeight={'bold'}>
{data.label}
</Typography>
</FreeFlowText>
<Typography color="text.secondary">
{constants.PHOTO_COUNT(data.fileCount)}
</Typography> </Typography>
</FreeFlowText> </Box>
<Typography color="text.secondary">
{constants.PHOTO_COUNT(data.fileCount)}
</Typography>
</Box>
<Stack direction={'row'} spacing={1}> <Stack direction={'row'} spacing={1}>
{data.previewFiles.map((file) => ( {data.previewFiles.map((file) => (
<CollectionCard <CollectionCard
key={file.id} key={file.id}
latestFile={file} latestFile={file}
onClick={() => null} onClick={() => null}
collectionTile={ResultPreviewTile} collectionTile={ResultPreviewTile}
/> />
))} ))}
</Stack> </Stack>
</SpaceBetweenFlex> </SpaceBetweenFlex>
</Box> </Box>
<Divider sx={{ mx: 2, my: 1 }} /> <Divider sx={{ mx: 2, my: 1 }} />
</> </>
); );

View file

@ -3,7 +3,6 @@ import FolderIcon from '@mui/icons-material/Folder';
import CalendarIcon from '@mui/icons-material/CalendarMonth'; import CalendarIcon from '@mui/icons-material/CalendarMonth';
import ImageIcon from '@mui/icons-material/Image'; import ImageIcon from '@mui/icons-material/Image';
import LocationIcon from '@mui/icons-material/LocationOn'; import LocationIcon from '@mui/icons-material/LocationOn';
import VideoFileIcon from '@mui/icons-material/VideoFile';
import { components } from 'react-select'; import { components } from 'react-select';
import { SearchOption, SuggestionType } from 'types/search'; import { SearchOption, SuggestionType } from 'types/search';
import SearchIcon from '@mui/icons-material/SearchOutlined'; import SearchIcon from '@mui/icons-material/SearchOutlined';
@ -21,10 +20,8 @@ const getIconByType = (type: SuggestionType) => {
return <LocationIcon />; return <LocationIcon />;
case SuggestionType.COLLECTION: case SuggestionType.COLLECTION:
return <FolderIcon />; return <FolderIcon />;
case SuggestionType.IMAGE: case SuggestionType.FILE_NAME:
return <ImageIcon />; return <ImageIcon />;
case SuggestionType.VIDEO:
return <VideoFileIcon />;
default: default:
return <SearchIcon />; return <SearchIcon />;
} }

View file

@ -0,0 +1,496 @@
export {};
// import React, { useContext, useEffect, useRef, useState } from 'react';
// import styled from 'styled-components';
// import AsyncSelect from 'react-select/async';
// import { components } from 'react-select';
// import debounce from 'debounce-promise';
// import {
// getAllPeopleSuggestion,
// getHolidaySuggestion,
// getIndexStatusSuggestion,
// getYearSuggestion,
// parseHumanDate,
// searchCollection,
// searchFiles,
// searchLocation,
// searchText,
// searchThing,
// } from 'services/searchService';
// import { getFormattedDate, isInsideBox } from 'utils/search';
// import constants from 'utils/strings/constants';
// import LocationIcon from './icons/LocationIcon';
// import DateIcon from './icons/DateIcon';
// import SearchIcon from './icons/SearchIcon';
// import CloseIcon from './icons/CloseIcon';
// import { Collection } from 'types/collection';
// import CollectionIcon from './icons/CollectionIcon';
// import ImageIcon from './icons/ImageIcon';
// import VideoIcon from './icons/VideoIcon';
// import { IconButton, Row } from './Container';
// import { EnteFile } from 'types/file';
// import { Suggestion, SuggestionType, DateValue, Bbox } from 'types/search';
// import { Search, SearchStats } from 'types/gallery';
// import { FILE_TYPE } from 'constants/file';
// import { GalleryContext } from 'pages/gallery';
// import { AppContext } from 'pages/_app';
// import { Col } from 'react-bootstrap';
// import { Person, Thing, WordGroup } from 'types/machineLearning';
// import { IndexStatus } from 'types/machineLearning/ui';
// import { PeopleList } from './MachineLearning/PeopleList';
// import ObjectIcon from './icons/ObjectIcon';
// import TextIcon from './icons/TextIcon';
// const Wrapper = styled.div<{ isDisabled: boolean; isOpen: boolean }>`
// position: fixed;
// top: 0;
// z-index: 1000;
// display: ${({ isOpen }) => (isOpen ? 'flex' : 'none')};
// width: 100%;
// background: #111;
// @media (min-width: 625px) {
// display: flex;
// width: calc(100vw - 140px);
// margin: 0 70px;
// }
// align-items: center;
// min-height: 64px;
// transition: opacity 1s ease;
// opacity: ${(props) => (props.isDisabled ? 0 : 1)};
// margin-bottom: 10px;
// `;
// const SearchButton = styled.div<{ isOpen: boolean }>`
// display: none;
// @media (max-width: 624px) {
// display: ${({ isOpen }) => (!isOpen ? 'flex' : 'none')};
// right: 80px;
// cursor: pointer;
// position: fixed;
// top: 0;
// z-index: 1000;
// align-items: center;
// min-height: 64px;
// }
// `;
// const SearchStatsContainer = styled.div`
// display: flex;
// justify-content: center;
// align-items: center;
// color: #979797;
// margin-bottom: 8px;
// `;
// const SearchInput = styled.div`
// width: 100%;
// display: flex;
// align-items: center;
// max-width: 484px;
// margin: auto;
// `;
// const Legend = styled.span`
// font-size: 20px;
// color: #ddd;
// display: inline;
// padding: 8px 12px;
// `;
// const Caption = styled.span`
// font-size: 12px;
// display: inline;
// padding: 8px 12px;
// `;
// const LegendRow = styled(Row)`
// align-items: center;
// justify-content: space-between;
// margin-bottom: 0px;
// `;
// interface Props {
// isOpen: boolean;
// isFirstFetch: boolean;
// setOpen: (value: boolean) => void;
// setSearch: (search: Search) => void;
// searchStats: SearchStats;
// collections: Collection[];
// setActiveCollection: (id: number) => void;
// files: EnteFile[];
// }
// export default function SearchBar(props: Props) {
// const selectRef = useRef(null);
// const [value, setValue] = useState<Suggestion>(null);
// const appContext = useContext(AppContext);
// const galleryContext = useContext(GalleryContext);
// const handleChange = (value) => {
// setValue(value);
// };
// // TODO: HACK as AsyncSelect does not support default options reloading on focus/click
// // unwanted side effect: placeholder is not shown on focus/click
// // https://github.com/JedWatson/react-select/issues/1879
// // for correct fix AsyncSelect can be extended to support default options reloading on focus/click
// const handleOnFocus = () => {
// if (appContext.mlSearchEnabled) {
// const emptySearch = ' ';
// selectRef.current.state.inputValue = emptySearch;
// selectRef.current.select.state.inputValue = emptySearch;
// selectRef.current.handleInputChange(emptySearch);
// }
// };
// useEffect(() => search(value), [value]);
// // = =========================
// // Functionality
// // = =========================
// const getAutoCompleteSuggestions = async (searchPhrase: string) => {
// const options: Array<Suggestion> = [];
// searchPhrase = searchPhrase.trim().toLowerCase();
// if (appContext.mlSearchEnabled) {
// options.push(await getIndexStatusSuggestion());
// options.push(...(await getAllPeopleSuggestion()));
// }
// if (!searchPhrase?.length) {
// return options;
// }
// options.push(...getHolidaySuggestion(searchPhrase));
// options.push(...getYearSuggestion(searchPhrase));
// const searchedDates = parseHumanDate(searchPhrase);
// options.push(
// ...searchedDates.map((searchedDate) => ({
// type: SuggestionType.DATE,
// value: searchedDate,
// label: getFormattedDate(searchedDate),
// }))
// );
// const collectionResults = searchCollection(
// searchPhrase,
// props.collections
// );
// options.push(
// ...collectionResults.map(
// (searchResult) =>
// ({
// type: SuggestionType.COLLECTION,
// value: searchResult.id,
// label: searchResult.name,
// } as Suggestion)
// )
// );
// const fileResults = searchFiles(searchPhrase, props.files);
// options.push(
// ...fileResults.map((file) => ({
// type:
// file.type === FILE_TYPE.IMAGE
// ? SuggestionType.IMAGE
// : SuggestionType.VIDEO,
// value: file.index,
// label: file.title,
// }))
// );
// const locationResults = await searchLocation(searchPhrase);
// const filteredLocationWithFiles = locationResults.filter(
// (locationResult) =>
// props.files.find((file) =>
// isInsideBox(file.metadata, locationResult.bbox)
// )
// );
// options.push(
// ...filteredLocationWithFiles.map(
// (searchResult) =>
// ({
// type: SuggestionType.LOCATION,
// value: searchResult.bbox,
// label: searchResult.place,
// } as Suggestion)
// )
// );
// const thingResults = await searchThing(searchPhrase);
// options.push(
// ...thingResults.map(
// (searchResult) =>
// ({
// type: SuggestionType.THING,
// value: searchResult,
// label: searchResult.className,
// } as Suggestion)
// )
// );
// const textResults = await searchText(searchPhrase);
// options.push(
// ...textResults.map(
// (searchResult) =>
// ({
// type: SuggestionType.TEXT,
// value: searchResult,
// label: searchResult.word,
// } as Suggestion)
// )
// );
// return options;
// };
// const getOptions = debounce(getAutoCompleteSuggestions, 250);
// const search = (selectedOption: Suggestion) => {
// // addLogLine('search...');
// if (!selectedOption) {
// return;
// }
// switch (selectedOption.type) {
// case SuggestionType.DATE:
// props.setSearch({
// date: selectedOption.value as DateValue,
// });
// props.setOpen(true);
// break;
// case SuggestionType.LOCATION:
// props.setSearch({
// location: selectedOption.value as Bbox,
// });
// props.setOpen(true);
// break;
// case SuggestionType.COLLECTION:
// props.setActiveCollection(selectedOption.value as number);
// setValue(null);
// break;
// case SuggestionType.IMAGE:
// case SuggestionType.VIDEO:
// props.setSearch({ fileIndex: selectedOption.value as number });
// setValue(null);
// break;
// case SuggestionType.PERSON:
// props.setSearch({ person: selectedOption.value as Person });
// props.setOpen(true);
// break;
// case SuggestionType.THING:
// props.setSearch({ thing: selectedOption.value as Thing });
// props.setOpen(true);
// break;
// case SuggestionType.TEXT:
// props.setSearch({ text: selectedOption.value as WordGroup });
// props.setOpen(true);
// break;
// }
// };
// const resetSearch = () => {
// if (props.isOpen) {
// galleryContext.startLoading();
// props.setSearch({});
// setTimeout(() => {
// galleryContext.finishLoading();
// }, 10);
// props.setOpen(false);
// setValue(null);
// }
// };
// // = =========================
// // UI
// // = =========================
// const getIconByType = (type: SuggestionType) => {
// switch (type) {
// case SuggestionType.DATE:
// return <DateIcon />;
// case SuggestionType.LOCATION:
// return <LocationIcon />;
// case SuggestionType.COLLECTION:
// return <CollectionIcon />;
// case SuggestionType.IMAGE:
// return <ImageIcon />;
// case SuggestionType.VIDEO:
// return <VideoIcon />;
// case SuggestionType.THING:
// return <ObjectIcon />;
// case SuggestionType.TEXT:
// return <TextIcon />;
// default:
// return <SearchIcon />;
// }
// };
// const LabelWithIcon = (props: { type: SuggestionType; label: string }) => (
// <div style={{ display: 'flex', alignItems: 'center' }}>
// <span style={{ paddingRight: '10px', paddingBottom: '4px' }}>
// {getIconByType(props.type)}
// </span>
// <span>{props.label}</span>
// </div>
// );
// const { Option, Control, Menu } = components;
// const OptionWithIcon = (props) =>
// !props.data.hide && (
// <Option {...props}>
// <LabelWithIcon
// type={props.data.type}
// label={props.data.label}
// />
// </Option>
// );
// const ControlWithIcon = (props) => (
// <Control {...props}>
// <span
// className="icon"
// style={{
// paddingLeft: '10px',
// paddingBottom: '4px',
// }}>
// {getIconByType(props.getValue()[0]?.type)}
// </span>
// {props.children}
// </Control>
// );
// const CustomMenu = (props) => {
// // addLogLine("props.selectProps.options: ", selectRef);
// const peopleSuggestions = props.selectProps.options.filter(
// (o) => o.type === SuggestionType.PERSON
// );
// const people = peopleSuggestions.map((o) => o.value);
// const indexStatusSuggestion = props.selectProps.options.filter(
// (o) => o.type === SuggestionType.INDEX_STATUS
// )[0] as Suggestion;
// const indexStatus = indexStatusSuggestion?.value as IndexStatus;
// return (
// <Menu {...props}>
// {appContext.mlSearchEnabled && (
// <Col>
// <LegendRow>
// <Legend>{constants.PEOPLE}</Legend>
// {indexStatus && (
// <Caption>{indexStatusSuggestion.label}</Caption>
// )}
// </LegendRow>
// {people && people.length > 0 && (
// <Row>
// <PeopleList
// people={people}
// maxRows={2}
// onSelect={(person, index) => {
// selectRef.current.blur();
// setValue(peopleSuggestions[index]);
// }}></PeopleList>
// </Row>
// )}
// </Col>
// )}
// {props.children}
// </Menu>
// );
// };
// const customStyles = {
// control: (style, { isFocused }) => ({
// ...style,
// backgroundColor: '#282828',
// color: '#d1d1d1',
// borderColor: isFocused ? '#51cd7c' : '#444',
// boxShadow: 'none',
// ':hover': {
// borderColor: '#51cd7c',
// cursor: 'text',
// '&>.icon': { color: '#51cd7c' },
// },
// }),
// input: (style) => ({
// ...style,
// color: '#d1d1d1',
// }),
// menu: (style) => ({
// ...style,
// marginTop: '10px',
// backgroundColor: '#282828',
// }),
// option: (style, { isFocused }) => ({
// ...style,
// backgroundColor: isFocused && '#343434',
// }),
// dropdownIndicator: (style) => ({
// ...style,
// display: 'none',
// }),
// indicatorSeparator: (style) => ({
// ...style,
// display: 'none',
// }),
// clearIndicator: (style) => ({
// ...style,
// display: 'none',
// }),
// singleValue: (style, state) => ({
// ...style,
// backgroundColor: '#282828',
// color: '#d1d1d1',
// display: state.selectProps.menuIsOpen ? 'none' : 'block',
// }),
// placeholder: (style) => ({
// ...style,
// color: '#686868',
// wordSpacing: '2px',
// whiteSpace: 'nowrap',
// }),
// };
// return (
// <>
// {props.searchStats && (
// <SearchStatsContainer>
// {constants.SEARCH_STATS(props.searchStats)}
// </SearchStatsContainer>
// )}
// <Wrapper isDisabled={props.isFirstFetch} isOpen={props.isOpen}>
// <SearchInput>
// <div
// style={{
// flex: 1,
// margin: '10px',
// }}>
// <AsyncSelect
// ref={selectRef}
// value={value}
// components={{
// Menu: CustomMenu,
// Option: OptionWithIcon,
// Control: ControlWithIcon,
// }}
// placeholder={constants.SEARCH_HINT()}
// loadOptions={getOptions}
// onChange={handleChange}
// onFocus={handleOnFocus}
// isClearable
// escapeClearsValue
// styles={customStyles}
// noOptionsMessage={() => null}
// />
// </div>
// {props.isOpen && (
// <IconButton onClick={() => resetSearch()}>
// <CloseIcon />
// </IconButton>
// )}
// </SearchInput>
// </Wrapper>
// <SearchButton
// isOpen={props.isOpen}
// onClick={() => !props.isFirstFetch && props.setOpen(true)}>
// <SearchIcon />
// </SearchButton>
// </>
// );
// }

View file

@ -0,0 +1,455 @@
export {};
// import React, { useContext, useEffect, useState } from 'react';
// import { slide as Menu } from 'react-burger-menu';
// import constants from 'utils/strings/constants';
// import { getData, LS_KEYS, setData } from 'utils/storage/localStorage';
// import { getToken } from 'utils/common/key';
// import { getEndpoint } from 'utils/common/apiUtil';
// import { Button } from 'react-bootstrap';
// import {
// isSubscriptionActive,
// getUserSubscription,
// isOnFreePlan,
// isSubscriptionCancelled,
// isSubscribed,
// convertToHumanReadable,
// } from 'utils/billing';
// import isElectron from 'is-electron';
// import { Collection } from 'types/collection';
// import { useRouter } from 'next/router';
// import LinkButton from './pages/gallery/LinkButton';
// import { downloadApp } from 'utils/common';
// import { getUserDetails, logoutUser } from 'services/userService';
// import { LogoImage } from 'pages/_app';
// import { SetDialogMessage } from './MessageDialog';
// import EnteSpinner from './EnteSpinner';
// import RecoveryKeyModal from './RecoveryKeyModal';
// import TwoFactorModal from './TwoFactorModal';
// import ExportModal from './ExportModal';
// import { GalleryContext } from 'pages/gallery';
// import InProgressIcon from './icons/InProgressIcon';
// import exportService from 'services/exportService';
// import { Subscription } from 'types/billing';
// import { PAGES } from 'constants/pages';
// import { ARCHIVE_SECTION, TRASH_SECTION } from 'constants/collection';
// import FixLargeThumbnails from './FixLargeThumbnail';
// import { AppContext } from 'pages/_app';
// import { canEnableMlSearch } from 'utils/machineLearning/compatibility';
// import { SetLoading } from 'types/gallery';
// import mlIDbStorage from 'utils/storage/mlIDbStorage';
// interface Props {
// collections: Collection[];
// setDialogMessage: SetDialogMessage;
// setLoading: SetLoading;
// }
// export default function Sidebar(props: Props) {
// const [usage, SetUsage] = useState<string>(null);
// const [user, setUser] = useState(null);
// const [subscription, setSubscription] = useState<Subscription>(null);
// useEffect(() => {
// setUser(getData(LS_KEYS.USER));
// setSubscription(getUserSubscription());
// }, []);
// const [isOpen, setIsOpen] = useState(false);
// const [recoverModalView, setRecoveryModalView] = useState(false);
// const [twoFactorModalView, setTwoFactorModalView] = useState(false);
// const [exportModalView, setExportModalView] = useState(false);
// const [fixLargeThumbsView, setFixLargeThumbsView] = useState(false);
// const galleryContext = useContext(GalleryContext);
// const appContext = useContext(AppContext);
// const enableMlSearch = async () => {
// await appContext.updateMlSearchEnabled(true);
// };
// const disableMlSearch = async () => {
// await appContext.updateMlSearchEnabled(false);
// };
// const clearMLDB = async () => {
// await mlIDbStorage.clearMLDB();
// };
// useEffect(() => {
// const main = async () => {
// if (!isOpen) {
// return;
// }
// const userDetails = await getUserDetails();
// setUser({ ...user, email: userDetails.email });
// SetUsage(convertToHumanReadable(userDetails.usage));
// setSubscription(userDetails.subscription);
// setData(LS_KEYS.USER, {
// ...getData(LS_KEYS.USER),
// email: userDetails.email,
// });
// setData(LS_KEYS.SUBSCRIPTION, userDetails.subscription);
// };
// main();
// }, [isOpen]);
// function openFeedbackURL() {
// const feedbackURL: string = `${getEndpoint()}/users/feedback?token=${encodeURIComponent(
// getToken()
// )}`;
// const win = window.open(feedbackURL, '_blank');
// win.focus();
// }
// function initiateEmail(email: string) {
// const a = document.createElement('a');
// a.href = 'mailto:' + email;
// a.rel = 'noreferrer noopener';
// a.click();
// }
// // eslint-disable-next-line @typescript-eslint/no-unused-vars
// function exportFiles() {
// if (isElectron()) {
// setExportModalView(true);
// } else {
// props.setDialogMessage({
// title: constants.DOWNLOAD_APP,
// content: constants.DOWNLOAD_APP_MESSAGE(),
// staticBackdrop: true,
// proceed: {
// text: constants.DOWNLOAD,
// action: downloadApp,
// variant: 'success',
// },
// close: {
// text: constants.CLOSE,
// },
// });
// }
// }
// const router = useRouter();
// function onManageClick() {
// setIsOpen(false);
// galleryContext.showPlanSelectorModal();
// }
// const Divider = () => (
// <div
// style={{
// height: '1px',
// marginTop: '40px',
// background: '#242424',
// width: '100%',
// }}
// />
// );
// return (
// <Menu
// isOpen={isOpen}
// onStateChange={(state) => setIsOpen(state.isOpen)}
// itemListElement="div">
// <div
// style={{
// display: 'flex',
// outline: 'none',
// textAlign: 'center',
// }}>
// <LogoImage
// style={{ height: '24px', padding: '3px' }}
// alt="logo"
// src="/icon.svg"
// />
// </div>
// <div
// style={{
// outline: 'none',
// color: 'rgb(45, 194, 98)',
// fontSize: '16px',
// }}>
// {user?.email}
// </div>
// <div
// style={{
// flex: 1,
// overflow: 'auto',
// outline: 'none',
// paddingTop: '0',
// }}>
// <div style={{ outline: 'none' }}>
// <div style={{ display: 'flex' }}>
// <h5 style={{ margin: '4px 0 12px 2px' }}>
// {constants.SUBSCRIPTION_PLAN}
// </h5>
// </div>
// <div style={{ color: '#959595' }}>
// {isSubscriptionActive(subscription) ? (
// isOnFreePlan(subscription) ? (
// constants.FREE_SUBSCRIPTION_INFO(
// subscription?.expiryTime
// )
// ) : isSubscriptionCancelled(subscription) ? (
// constants.RENEWAL_CANCELLED_SUBSCRIPTION_INFO(
// subscription?.expiryTime
// )
// ) : (
// constants.RENEWAL_ACTIVE_SUBSCRIPTION_INFO(
// subscription?.expiryTime
// )
// )
// ) : (
// <p>{constants.SUBSCRIPTION_EXPIRED}</p>
// )}
// <Button
// variant="outline-success"
// block
// size="sm"
// onClick={onManageClick}>
// {isSubscribed(subscription)
// ? constants.MANAGE
// : constants.SUBSCRIBE}
// </Button>
// </div>
// </div>
// <div style={{ outline: 'none', marginTop: '30px' }} />
// <div>
// <h5 style={{ marginBottom: '12px' }}>
// {constants.USAGE_DETAILS}
// </h5>
// <div style={{ color: '#959595' }}>
// {usage ? (
// constants.USAGE_INFO(
// usage,
// convertToHumanReadable(subscription?.storage)
// )
// ) : (
// <div style={{ textAlign: 'center' }}>
// <EnteSpinner
// style={{
// borderWidth: '2px',
// width: '20px',
// height: '20px',
// }}
// />
// </div>
// )}
// </div>
// </div>
// <Divider />
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// galleryContext.setActiveCollection(ARCHIVE_SECTION);
// setIsOpen(false);
// }}>
// {constants.ARCHIVE}
// </LinkButton>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// galleryContext.setActiveCollection(TRASH_SECTION);
// setIsOpen(false);
// }}>
// {constants.TRASH}
// </LinkButton>
// <>
// <RecoveryKeyModal
// show={recoverModalView}
// onHide={() => setRecoveryModalView(false)}
// somethingWentWrong={() =>
// props.setDialogMessage({
// title: constants.ERROR,
// content:
// constants.RECOVER_KEY_GENERATION_FAILED,
// close: { variant: 'danger' },
// })
// }
// />
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => setRecoveryModalView(true)}>
// {constants.DOWNLOAD_RECOVERY_KEY}
// </LinkButton>
// </>
// <>
// <TwoFactorModal
// show={twoFactorModalView}
// onHide={() => setTwoFactorModalView(false)}
// setDialogMessage={props.setDialogMessage}
// closeSidebar={() => setIsOpen(false)}
// setLoading={props.setLoading}
// />
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => setTwoFactorModalView(true)}>
// {constants.TWO_FACTOR}
// </LinkButton>
// </>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// router.push(PAGES.CHANGE_PASSWORD);
// }}>
// {constants.CHANGE_PASSWORD}
// </LinkButton>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// router.push(PAGES.CHANGE_EMAIL);
// }}>
// {constants.UPDATE_EMAIL}
// </LinkButton>
// <Divider />
// <>
// <FixLargeThumbnails
// isOpen={fixLargeThumbsView}
// hide={() => setFixLargeThumbsView(false)}
// show={() => setFixLargeThumbsView(true)}
// />
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => setFixLargeThumbsView(true)}>
// {constants.FIX_LARGE_THUMBNAILS}
// </LinkButton>
// </>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={openFeedbackURL}>
// {constants.REQUEST_FEATURE}
// </LinkButton>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// if (!appContext.mlSearchEnabled) {
// if (!canEnableMlSearch()) {
// props.setDialogMessage({
// title: constants.ENABLE_ML_SEARCH,
// content: constants.ML_SEARCH_NOT_COMPATIBLE,
// close: { text: constants.OK },
// });
// return;
// }
// props.setDialogMessage({
// title: `${constants.CONFIRM} ${constants.ENABLE_ML_SEARCH}`,
// content: constants.ENABLE_ML_SEARCH_MESSAGE,
// staticBackdrop: true,
// proceed: {
// text: constants.ENABLE_ML_SEARCH,
// action: enableMlSearch,
// variant: 'success',
// },
// close: { text: constants.CANCEL },
// });
// } else {
// disableMlSearch();
// }
// }}>
// {appContext.mlSearchEnabled
// ? constants.DISABLE_ML_SEARCH
// : constants.ENABLE_ML_SEARCH}
// </LinkButton>
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// if (!appContext.mlSearchEnabled) {
// if (!canEnableMlSearch()) {
// props.setDialogMessage({
// title: constants.ENABLE_ML_SEARCH,
// content: constants.ML_SEARCH_NOT_COMPATIBLE,
// close: { text: constants.OK },
// });
// return;
// }
// props.setDialogMessage({
// title: 'clear mb db',
// content: 'clear mb db',
// staticBackdrop: true,
// proceed: {
// text: 'clear',
// action: clearMLDB,
// variant: 'success',
// },
// close: { text: constants.CANCEL },
// });
// } else {
// disableMlSearch();
// }
// }}>
// {'clear ML db'}
// </LinkButton>
// {appContext.mlSearchEnabled && (
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => {
// router.push(PAGES.ML_DEBUG);
// }}>
// {constants.ML_DEBUG}
// </LinkButton>
// )}
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={() => initiateEmail('contact@ente.io')}>
// {constants.SUPPORT}
// </LinkButton>
// <>
// <ExportModal
// show={exportModalView}
// onHide={() => setExportModalView(false)}
// usage={usage}
// />
// <LinkButton
// style={{ marginTop: '30px' }}
// onClick={exportFiles}>
// <div style={{ display: 'flex' }}>
// {constants.EXPORT}
// <div style={{ width: '20px' }} />
// {exportService.isExportInProgress() && (
// <InProgressIcon />
// )}
// </div>
// </LinkButton>
// </>
// <Divider />
// <LinkButton
// variant="danger"
// style={{ marginTop: '30px' }}
// onClick={() =>
// props.setDialogMessage({
// title: `${constants.CONFIRM} ${constants.LOGOUT}`,
// content: constants.LOGOUT_MESSAGE,
// staticBackdrop: true,
// proceed: {
// text: constants.LOGOUT,
// action: logoutUser,
// variant: 'danger',
// },
// close: { text: constants.CANCEL },
// })
// }>
// {constants.LOGOUT}
// </LinkButton>
// <LinkButton
// variant="danger"
// style={{ marginTop: '30px' }}
// onClick={() =>
// props.setDialogMessage({
// title: `${constants.DELETE_ACCOUNT}`,
// content: constants.DELETE_ACCOUNT_MESSAGE(),
// staticBackdrop: true,
// proceed: {
// text: constants.DELETE_ACCOUNT,
// action: () => {
// initiateEmail('account-deletion@ente.io');
// },
// variant: 'danger',
// },
// close: { text: constants.CANCEL },
// })
// }>
// {constants.DELETE_ACCOUNT}
// </LinkButton>
// <div
// style={{
// marginTop: '40px',
// width: '100%',
// }}
// />
// </div>
// </Menu>
// );
// }

View file

@ -0,0 +1,71 @@
import ChevronRight from '@mui/icons-material/ChevronRight';
import ScienceIcon from '@mui/icons-material/Science';
import { Box, DialogProps, Stack } from '@mui/material';
import { EnteDrawer } from 'components/EnteDrawer';
import MLSearchSettings from 'components/MachineLearning/MLSearchSettings';
import MenuSectionTitle from 'components/Menu/MenuSectionTitle';
import Titlebar from 'components/Titlebar';
import { useState } from 'react';
import constants from 'utils/strings/constants';
import SidebarButton from './Button';
export default function AdvancedSettings({ open, onClose, onRootClose }) {
const [mlSearchSettingsView, setMlSearchSettingsView] = useState(false);
const openMlSearchSettings = () => setMlSearchSettingsView(true);
const closeMlSearchSettings = () => setMlSearchSettingsView(false);
const handleRootClose = () => {
onClose();
onRootClose();
};
const handleDrawerClose: DialogProps['onClose'] = (_, reason) => {
if (reason === 'backdropClick') {
handleRootClose();
} else {
onClose();
}
};
return (
<EnteDrawer
transitionDuration={0}
open={open}
onClose={handleDrawerClose}
BackdropProps={{
sx: { '&&&': { backgroundColor: 'transparent' } },
}}>
<Stack spacing={'4px'} py={'12px'}>
<Titlebar
onClose={onClose}
title={constants.ADVANCED}
onRootClose={handleRootClose}
/>
<Box px={'8px'}>
<Stack py="20px" spacing="24px">
<Box>
<MenuSectionTitle
title={constants.LABS}
icon={<ScienceIcon />}
/>
<SidebarButton
variant="contained"
color="secondary"
endIcon={<ChevronRight />}
onClick={openMlSearchSettings}>
{constants.ML_SEARCH}
</SidebarButton>
</Box>
</Stack>
</Box>
</Stack>
<MLSearchSettings
open={mlSearchSettingsView}
onClose={closeMlSearchSettings}
onRootClose={handleRootClose}
/>
</EnteDrawer>
);
}

View file

@ -7,6 +7,7 @@ import TwoFactorModal from 'components/TwoFactor/Modal';
import { PAGES } from 'constants/pages'; import { PAGES } from 'constants/pages';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { AppContext } from 'pages/_app'; import { AppContext } from 'pages/_app';
// import mlIDbStorage from 'utils/storage/mlIDbStorage';
import isElectron from 'is-electron'; import isElectron from 'is-electron';
import WatchFolder from 'components/WatchFolder'; import WatchFolder from 'components/WatchFolder';
import { getDownloadAppMessage } from 'utils/ui'; import { getDownloadAppMessage } from 'utils/ui';
@ -14,6 +15,7 @@ import { getDownloadAppMessage } from 'utils/ui';
import ThemeSwitcher from './ThemeSwitcher'; import ThemeSwitcher from './ThemeSwitcher';
import { SpaceBetweenFlex } from 'components/Container'; import { SpaceBetweenFlex } from 'components/Container';
import { isInternalUser } from 'utils/user'; import { isInternalUser } from 'utils/user';
import AdvancedSettings from './AdvancedSettings';
export default function UtilitySection({ closeSidebar }) { export default function UtilitySection({ closeSidebar }) {
const router = useRouter(); const router = useRouter();
@ -28,7 +30,10 @@ export default function UtilitySection({ closeSidebar }) {
const [recoverModalView, setRecoveryModalView] = useState(false); const [recoverModalView, setRecoveryModalView] = useState(false);
const [twoFactorModalView, setTwoFactorModalView] = useState(false); const [twoFactorModalView, setTwoFactorModalView] = useState(false);
// const [fixLargeThumbsView, setFixLargeThumbsView] = useState(false); const [advancedSettingsView, setAdvancedSettingsView] = useState(false);
const openAdvancedSettings = () => setAdvancedSettingsView(true);
const closeAdvancedSettings = () => setAdvancedSettingsView(false);
const openRecoveryKeyModal = () => setRecoveryModalView(true); const openRecoveryKeyModal = () => setRecoveryModalView(true);
const closeRecoveryKeyModal = () => setRecoveryModalView(false); const closeRecoveryKeyModal = () => setRecoveryModalView(false);
@ -57,8 +62,6 @@ export default function UtilitySection({ closeSidebar }) {
const redirectToDeduplicatePage = () => router.push(PAGES.DEDUPLICATE); const redirectToDeduplicatePage = () => router.push(PAGES.DEDUPLICATE);
// const openThumbnailCompressModal = () => setFixLargeThumbsView(true);
const somethingWentWrong = () => const somethingWentWrong = () =>
setDialogMessage({ setDialogMessage({
title: constants.ERROR, title: constants.ERROR,
@ -94,9 +97,11 @@ export default function UtilitySection({ closeSidebar }) {
<SidebarButton onClick={redirectToDeduplicatePage}> <SidebarButton onClick={redirectToDeduplicatePage}>
{constants.DEDUPLICATE_FILES} {constants.DEDUPLICATE_FILES}
</SidebarButton> </SidebarButton>
{/* <SidebarButton onClick={openThumbnailCompressModal}> {isElectron() && (
{constants.COMPRESS_THUMBNAILS} <SidebarButton onClick={openAdvancedSettings}>
</SidebarButton> */} {constants.ADVANCED}
</SidebarButton>
)}
<RecoveryKey <RecoveryKey
show={recoverModalView} show={recoverModalView}
onHide={closeRecoveryKeyModal} onHide={closeRecoveryKeyModal}
@ -109,11 +114,12 @@ export default function UtilitySection({ closeSidebar }) {
setLoading={startLoading} setLoading={startLoading}
/> />
<WatchFolder open={watchFolderView} onClose={closeWatchFolder} /> <WatchFolder open={watchFolderView} onClose={closeWatchFolder} />
{/* <FixLargeThumbnails
isOpen={fixLargeThumbsView} <AdvancedSettings
hide={() => setFixLargeThumbsView(false)} open={advancedSettingsView}
show={() => setFixLargeThumbsView(true)} onClose={closeAdvancedSettings}
/> */} onRootClose={closeSidebar}
/>
</> </>
); );
} }

View file

@ -0,0 +1,19 @@
import React from 'react';
export default function ObjectIcon(props) {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
height={props.height}
viewBox={props.viewBox}
width={props.width}>
<path d="M11.499 12.03v11.971l-10.5-5.603v-11.835l10.5 5.467zm11.501 6.368l-10.501 5.602v-11.968l10.501-5.404v11.77zm-16.889-15.186l10.609 5.524-4.719 2.428-10.473-5.453 4.583-2.499zm16.362 2.563l-4.664 2.4-10.641-5.54 4.831-2.635 10.474 5.775z" />
</svg>
);
}
ObjectIcon.defaultProps = {
height: 20,
width: 20,
viewBox: '0 0 24 24',
};

View file

@ -0,0 +1,19 @@
import React from 'react';
export default function TextIcon(props) {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
height={props.height}
viewBox={props.viewBox}
width={props.width}>
<path d="M22 0h-20v6h1.999c0-1.174.397-3 2.001-3h4v16.874c0 1.174-.825 2.126-2 2.126h-1v2h9.999v-2h-.999c-1.174 0-2-.952-2-2.126v-16.874h4c1.649 0 2.02 1.826 2.02 3h1.98v-6z" />
</svg>
);
}
TextIcon.defaultProps = {
height: 16,
width: 16,
viewBox: '0 0 28 28',
};

View file

@ -0,0 +1,13 @@
import React from 'react';
// import dynamic from 'next/dynamic';
// const MLDebugWithNoSSR = dynamic(
// () => import('components/MachineLearning/MlDebug-disabled'),
// {
// ssr: false,
// }
// );
export default function MLDebug() {
return <div>{/* <MLDebugWithNoSSR></MLDebugWithNoSSR> */}</div>;
}

View file

@ -43,6 +43,7 @@ interface Props {
isFavoriteCollection: boolean; isFavoriteCollection: boolean;
isUncategorizedCollection: boolean; isUncategorizedCollection: boolean;
isIncomingSharedCollection: boolean; isIncomingSharedCollection: boolean;
isInSearchMode: boolean;
} }
const SelectedFileOptions = ({ const SelectedFileOptions = ({
@ -64,6 +65,7 @@ const SelectedFileOptions = ({
isFavoriteCollection, isFavoriteCollection,
isUncategorizedCollection, isUncategorizedCollection,
isIncomingSharedCollection, isIncomingSharedCollection,
isInSearchMode,
}: Props) => { }: Props) => {
const { setDialogMessage } = useContext(AppContext); const { setDialogMessage } = useContext(AppContext);
const addToCollection = () => const addToCollection = () =>
@ -147,7 +149,30 @@ const SelectedFileOptions = ({
</Box> </Box>
</FluidContainer> </FluidContainer>
<Stack spacing={2} direction="row" mr={2}> <Stack spacing={2} direction="row" mr={2}>
{activeCollection === TRASH_SECTION ? ( {isInSearchMode ? (
<>
<Tooltip title={constants.FIX_CREATION_TIME}>
<IconButton onClick={fixTimeHelper}>
<ClockIcon />
</IconButton>
</Tooltip>
<Tooltip title={constants.DOWNLOAD}>
<IconButton onClick={downloadHelper}>
<DownloadIcon />
</IconButton>
</Tooltip>
<Tooltip title={constants.ADD}>
<IconButton onClick={addToCollection}>
<AddIcon />
</IconButton>
</Tooltip>
<Tooltip title={constants.ARCHIVE}>
<IconButton onClick={archiveFilesHelper}>
<ArchiveIcon />
</IconButton>
</Tooltip>
</>
) : activeCollection === TRASH_SECTION ? (
<> <>
<Tooltip title={constants.RESTORE}> <Tooltip title={constants.RESTORE}>
<IconButton onClick={restoreHandler}> <IconButton onClick={restoreHandler}>

5
src/constants/cache/index.ts vendored Normal file
View file

@ -0,0 +1,5 @@
export enum CACHES {
THUMBS = 'thumbs',
FACE_CROPS = 'face-crops',
FILES = 'files',
}

View file

@ -0,0 +1,101 @@
import { JobConfig } from 'types/common/job';
import { MLSearchConfig, MLSyncConfig } from 'types/machineLearning';
export const DEFAULT_ML_SYNC_JOB_CONFIG: JobConfig = {
intervalSec: 5,
// TODO: finalize this after seeing effects on and from machine sleep
maxItervalSec: 960,
backoffMultiplier: 2,
};
export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = {
batchSize: 200,
imageSource: 'Original',
faceDetection: {
method: 'BlazeFace',
minFaceSize: 32,
},
faceCrop: {
enabled: true,
method: 'ArcFace',
padding: 0.25,
maxSize: 256,
blobOptions: {
type: 'image/jpeg',
quality: 0.8,
},
},
faceAlignment: {
method: 'ArcFace',
},
faceEmbedding: {
method: 'MobileFaceNet',
faceSize: 112,
generateTsne: true,
},
faceClustering: {
method: 'Hdbscan',
minClusterSize: 3,
minSamples: 5,
clusterSelectionEpsilon: 0.6,
clusterSelectionMethod: 'leaf',
minInputSize: 50,
// maxDistanceInsideCluster: 0.4,
generateDebugInfo: true,
},
objectDetection: {
method: 'SSDMobileNetV2',
maxNumBoxes: 20,
minScore: 0.2,
},
sceneDetection: {
method: 'ImageScene',
minScore: 0.1,
},
textDetection: {
method: 'Tesseract',
minAccuracy: 75,
},
// tsne: {
// samples: 200,
// dim: 2,
// perplexity: 10.0,
// learningRate: 10.0,
// metric: 'euclidean',
// },
mlVersion: 3,
};
export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = {
enabled: false,
};
export const ML_SYNC_DOWNLOAD_TIMEOUT_MS = 300000;
export const MAX_FACE_DISTANCE_PERCENT = Math.sqrt(2) / 100;
export const MAX_ML_SYNC_ERROR_COUNT = 4;
export const TEXT_DETECTION_TIMEOUT_MS = [10000, 30000, 60000, 120000, 240000];
export const BLAZEFACE_MAX_FACES = 50;
export const BLAZEFACE_INPUT_SIZE = 256;
export const BLAZEFACE_IOU_THRESHOLD = 0.3;
export const BLAZEFACE_SCORE_THRESHOLD = 0.75;
export const BLAZEFACE_PASS1_SCORE_THRESHOLD = 0.4;
export const BLAZEFACE_FACE_SIZE = 112;
export const MOBILEFACENET_FACE_SIZE = 112;
export const TESSERACT_MIN_IMAGE_WIDTH = 44;
export const TESSERACT_MIN_IMAGE_HEIGHT = 20;
export const TESSERACT_MAX_IMAGE_DIMENSION = 720;
// scene detection model takes fixed-shaped (224x224) inputs
// https://tfhub.dev/sayannath/lite-model/image-scene/1
export const SCENE_DETECTION_IMAGE_SIZE = 224;
// SSD with Mobilenet v2 initialized from Imagenet classification checkpoint. Trained on COCO 2017 dataset (images scaled to 320x320 resolution).
// https://tfhub.dev/tensorflow/ssd_mobilenet_v2/2
export const OBJECT_DETECTION_IMAGE_SIZE = 320;
export const BATCHES_BEFORE_SYNCING_INDEX = 5;

View file

@ -13,5 +13,6 @@ export enum PAGES {
VERIFY = '/verify', VERIFY = '/verify',
ROOT = '/', ROOT = '/',
SHARED_ALBUMS = '/shared-albums', SHARED_ALBUMS = '/shared-albums',
// ML_DEBUG = '/ml-debug',
DEDUPLICATE = '/deduplicate', DEDUPLICATE = '/deduplicate',
} }

View file

@ -0,0 +1,3 @@
export const ALL_SECTION_NAME = 'All';
export const ARCHIVE_SECTION_NAME = 'Archive';
export const TRASH_SECTION_NAME = 'Trash';

View file

@ -1 +1,6 @@
export const ENTE_WEBSITE_LINK = 'https://ente.io'; export const ENTE_WEBSITE_LINK = 'https://ente.io';
export const ML_BLOG_LINK = 'https://ente.io/blog/desktop-ml-beta';
export const FACE_SEARCH_PRIVACY_POLICY_LINK =
'https://ente.io/privacy#8-biometric-information-privacy-policy';

View file

@ -12,6 +12,12 @@ import { getData, LS_KEYS } from 'utils/storage/localStorage';
import HTTPService from 'services/HTTPService'; import HTTPService from 'services/HTTPService';
import FlashMessageBar from 'components/FlashMessageBar'; import FlashMessageBar from 'components/FlashMessageBar';
import Head from 'next/head'; import Head from 'next/head';
import { eventBus, Events } from 'services/events';
import mlWorkManager from 'services/machineLearning/mlWorkManager';
import {
getMLSearchConfig,
updateMLSearchConfig,
} from 'utils/machineLearning/config';
import LoadingBar from 'react-top-loading-bar'; import LoadingBar from 'react-top-loading-bar';
import DialogBox from 'components/DialogBox'; import DialogBox from 'components/DialogBox';
import { styled, ThemeProvider } from '@mui/material/styles'; import { styled, ThemeProvider } from '@mui/material/styles';
@ -69,6 +75,8 @@ type AppContextType = {
setDisappearingFlashMessage: (message: FlashMessage) => void; setDisappearingFlashMessage: (message: FlashMessage) => void;
redirectURL: string; redirectURL: string;
setRedirectURL: (url: string) => void; setRedirectURL: (url: string) => void;
mlSearchEnabled: boolean;
updateMlSearchEnabled: (enabled: boolean) => Promise<void>;
startLoading: () => void; startLoading: () => void;
finishLoading: () => void; finishLoading: () => void;
closeMessageDialog: () => void; closeMessageDialog: () => void;
@ -83,6 +91,7 @@ type AppContextType = {
isMobile: boolean; isMobile: boolean;
theme: THEME_COLOR; theme: THEME_COLOR;
setTheme: SetTheme; setTheme: SetTheme;
somethingWentWrong: () => void;
}; };
export enum FLASH_MESSAGE_TYPE { export enum FLASH_MESSAGE_TYPE {
@ -113,6 +122,7 @@ export default function App({ Component, err }) {
const [redirectName, setRedirectName] = useState<string>(null); const [redirectName, setRedirectName] = useState<string>(null);
const [flashMessage, setFlashMessage] = useState<FlashMessage>(null); const [flashMessage, setFlashMessage] = useState<FlashMessage>(null);
const [redirectURL, setRedirectURL] = useState(null); const [redirectURL, setRedirectURL] = useState(null);
const [mlSearchEnabled, setMlSearchEnabled] = useState(false);
const isLoadingBarRunning = useRef(false); const isLoadingBarRunning = useRef(false);
const loadingBar = useRef(null); const loadingBar = useRef(null);
const [dialogMessage, setDialogMessage] = useState<DialogBoxAttributes>(); const [dialogMessage, setDialogMessage] = useState<DialogBoxAttributes>();
@ -167,6 +177,27 @@ export default function App({ Component, err }) {
} }
}, []); }, []);
useEffect(() => {
const loadMlSearchState = async () => {
try {
const mlSearchConfig = await getMLSearchConfig();
setMlSearchEnabled(mlSearchConfig.enabled);
mlWorkManager.setMlSearchEnabled(mlSearchConfig.enabled);
} catch (e) {
logError(e, 'Error while loading mlSearchEnabled');
}
};
loadMlSearchState();
try {
eventBus.on(Events.LOGOUT, () => {
setMlSearchEnabled(false);
mlWorkManager.setMlSearchEnabled(false);
});
} catch (e) {
logError(e, 'Error while subscribing to logout event');
}
}, []);
const setUserOnline = () => setOffline(false); const setUserOnline = () => setOffline(false);
const setUserOffline = () => setOffline(true); const setUserOffline = () => setOffline(true);
const resetSharedFiles = () => setSharedFiles(null); const resetSharedFiles = () => setSharedFiles(null);
@ -251,6 +282,17 @@ export default function App({ Component, err }) {
setFlashMessage(flashMessages); setFlashMessage(flashMessages);
setTimeout(() => setFlashMessage(null), 5000); setTimeout(() => setFlashMessage(null), 5000);
}; };
const updateMlSearchEnabled = async (enabled: boolean) => {
try {
const mlSearchConfig = await getMLSearchConfig();
mlSearchConfig.enabled = enabled;
await updateMLSearchConfig(mlSearchConfig);
setMlSearchEnabled(enabled);
mlWorkManager.setMlSearchEnabled(enabled);
} catch (e) {
logError(e, 'Error while updating mlSearchEnabled');
}
};
const startLoading = () => { const startLoading = () => {
!isLoadingBarRunning.current && loadingBar.current?.continuousStart(); !isLoadingBarRunning.current && loadingBar.current?.continuousStart();
@ -265,6 +307,13 @@ export default function App({ Component, err }) {
const closeMessageDialog = () => setMessageDialogView(false); const closeMessageDialog = () => setMessageDialogView(false);
const somethingWentWrong = () =>
setDialogMessage({
title: constants.ERROR,
close: { variant: 'danger' },
content: constants.UNKNOWN_ERROR,
});
return ( return (
<> <>
<Head> <Head>
@ -322,6 +371,8 @@ export default function App({ Component, err }) {
<AppContext.Provider <AppContext.Provider
value={{ value={{
showNavBar, showNavBar,
mlSearchEnabled,
updateMlSearchEnabled,
sharedFiles, sharedFiles,
resetSharedFiles, resetSharedFiles,
setDisappearingFlashMessage, setDisappearingFlashMessage,
@ -341,6 +392,7 @@ export default function App({ Component, err }) {
setNotificationAttributes, setNotificationAttributes,
theme, theme,
setTheme, setTheme,
somethingWentWrong,
}}> }}>
{loading ? ( {loading ? (
<VerticallyCentered> <VerticallyCentered>

View file

@ -530,7 +530,7 @@ export default function Gallery() {
} else { } else {
setSearch(newSearch); setSearch(newSearch);
} }
if (!newSearch?.collection && !newSearch?.file) { if (!newSearch?.collection) {
setIsInSearchMode(!!newSearch); setIsInSearchMode(!!newSearch);
setSetSearchResultSummary(summary); setSetSearchResultSummary(summary);
} else { } else {
@ -552,11 +552,6 @@ export default function Gallery() {
finishLoading(); finishLoading();
}; };
const resetSearch = () => {
setSearch(null);
setSetSearchResultSummary(null);
};
const openUploader = () => { const openUploader = () => {
setUploadTypeSelectorView(true); setUploadTypeSelectorView(true);
}; };
@ -697,7 +692,6 @@ export default function Gallery() {
CollectionSummaryType.incomingShare CollectionSummaryType.incomingShare
} }
enableDownload={true} enableDownload={true}
resetSearch={resetSearch}
/> />
{selected.count > 0 && {selected.count > 0 &&
selected.collectionID === activeCollection && ( selected.collectionID === activeCollection && (
@ -756,6 +750,7 @@ export default function Gallery() {
?.type === ?.type ===
CollectionSummaryType.incomingShare CollectionSummaryType.incomingShare
} }
isInSearchMode={isInSearchMode}
/> />
)} )}
</FullScreenDropZone> </FullScreenDropZone>

View file

@ -0,0 +1,41 @@
import { LimitedCacheStorage } from 'types/cache/index';
import { ElectronCacheStorage } from 'services/electron/cache';
import { runningInElectron, runningInWorker } from 'utils/common';
import { WorkerElectronCacheStorageService } from 'services/workerElectronCache/service';
class cacheStorageFactory {
workerElectronCacheStorageServiceInstance: WorkerElectronCacheStorageService;
getCacheStorage(): LimitedCacheStorage {
if (runningInElectron()) {
if (runningInWorker()) {
if (!this.workerElectronCacheStorageServiceInstance) {
this.workerElectronCacheStorageServiceInstance =
new WorkerElectronCacheStorageService();
}
return this.workerElectronCacheStorageServiceInstance;
} else {
return ElectronCacheStorage;
}
} else {
return transformBrowserCacheStorageToLimitedCacheStorage(caches);
}
}
}
export const CacheStorageFactory = new cacheStorageFactory();
function transformBrowserCacheStorageToLimitedCacheStorage(
caches: CacheStorage
): LimitedCacheStorage {
return {
async open(cacheName) {
const cache = await caches.open(cacheName);
return {
match: cache.match.bind(cache),
put: cache.put.bind(cache),
delete: cache.delete.bind(cache),
};
},
delete: caches.delete.bind(caches),
};
}

View file

@ -0,0 +1,21 @@
import { logError } from 'utils/sentry';
import { CacheStorageFactory } from './cacheStorageFactory';
async function openCache(cacheName: string) {
try {
return await CacheStorageFactory.getCacheStorage().open(cacheName);
} catch (e) {
// log and ignore, we don't want to break the caller flow, when cache is not available
logError(e, 'openCache failed');
}
}
async function deleteCache(cacheName: string) {
try {
return await CacheStorageFactory.getCacheStorage().delete(cacheName);
} catch (e) {
// log and ignore, we don't want to break the caller flow, when cache is not available
logError(e, 'deleteCache failed');
}
}
export const CacheStorageService = { open: openCache, delete: deleteCache };

View file

@ -1,31 +0,0 @@
import electronService from './electron/common';
import electronCacheService from './electron/cache';
import { logError } from 'utils/sentry';
const THUMB_CACHE = 'thumbs';
export function getCacheProvider() {
if (electronService.checkIsBundledApp()) {
return electronCacheService;
} else {
return caches;
}
}
export async function openThumbnailCache() {
try {
return await getCacheProvider().open(THUMB_CACHE);
} catch (e) {
logError(e, 'openThumbnailCache failed');
// log and ignore
}
}
export async function deleteThumbnailCache() {
try {
return await getCacheProvider().delete(THUMB_CACHE);
} catch (e) {
logError(e, 'deleteThumbnailCache failed');
// dont throw
}
}

View file

@ -42,12 +42,18 @@ import {
FAVORITE_COLLECTION_NAME, FAVORITE_COLLECTION_NAME,
DUMMY_UNCATEGORIZED_SECTION, DUMMY_UNCATEGORIZED_SECTION,
} from 'constants/collection'; } from 'constants/collection';
// constants strings are used instead of english strings to avoid importing MUI components
// which reference window object, which is not available in web worker
import {
ALL_SECTION_NAME,
ARCHIVE_SECTION_NAME,
TRASH_SECTION_NAME,
} from 'constants/strings';
import { import {
NEW_COLLECTION_MAGIC_METADATA, NEW_COLLECTION_MAGIC_METADATA,
SUB_TYPE, SUB_TYPE,
UpdateMagicMetadataRequest, UpdateMagicMetadataRequest,
} from 'types/magicMetadata'; } from 'types/magicMetadata';
import constants from 'utils/strings/constants';
import { IsArchived, updateMagicMetadataProps } from 'utils/magicMetadata'; import { IsArchived, updateMagicMetadataProps } from 'utils/magicMetadata';
import { User } from 'types/user'; import { User } from 'types/user';
import { import {
@ -652,7 +658,7 @@ export const leaveSharedAlbum = async (collectionID: number) => {
{ 'X-Auth-Token': token } { 'X-Auth-Token': token }
); );
} catch (e) { } catch (e) {
logError(e, constants.LEAVE_SHARED_ALBUM_FAILED); logError(e, 'leave shared album failed ');
throw e; throw e;
} }
}; };
@ -1037,7 +1043,7 @@ function getAllCollectionSummaries(
): CollectionSummary { ): CollectionSummary {
return { return {
id: ALL_SECTION, id: ALL_SECTION,
name: constants.ALL_SECTION_NAME, name: ALL_SECTION_NAME,
type: CollectionSummaryType.all, type: CollectionSummaryType.all,
latestFile: collectionsLatestFile.get(ALL_SECTION), latestFile: collectionsLatestFile.get(ALL_SECTION),
fileCount: collectionFilesCount.get(ALL_SECTION) || 0, fileCount: collectionFilesCount.get(ALL_SECTION) || 0,
@ -1062,7 +1068,7 @@ function getArchivedCollectionSummaries(
): CollectionSummary { ): CollectionSummary {
return { return {
id: ARCHIVE_SECTION, id: ARCHIVE_SECTION,
name: constants.ARCHIVE_SECTION_NAME, name: ARCHIVE_SECTION_NAME,
type: CollectionSummaryType.archive, type: CollectionSummaryType.archive,
latestFile: collectionsLatestFile.get(ARCHIVE_SECTION), latestFile: collectionsLatestFile.get(ARCHIVE_SECTION),
fileCount: collectionFilesCount.get(ARCHIVE_SECTION) ?? 0, fileCount: collectionFilesCount.get(ARCHIVE_SECTION) ?? 0,
@ -1076,7 +1082,7 @@ function getTrashedCollectionSummaries(
): CollectionSummary { ): CollectionSummary {
return { return {
id: TRASH_SECTION, id: TRASH_SECTION,
name: constants.TRASH, name: TRASH_SECTION_NAME,
type: CollectionSummaryType.trash, type: CollectionSummaryType.trash,
latestFile: collectionsLatestFile.get(TRASH_SECTION), latestFile: collectionsLatestFile.get(TRASH_SECTION),
fileCount: collectionFilesCount.get(TRASH_SECTION) ?? 0, fileCount: collectionFilesCount.get(TRASH_SECTION) ?? 0,

View file

@ -11,10 +11,13 @@ import { EnteFile } from 'types/file';
import { logError } from 'utils/sentry'; import { logError } from 'utils/sentry';
import { FILE_TYPE } from 'constants/file'; import { FILE_TYPE } from 'constants/file';
import { CustomError } from 'utils/error'; import { CustomError } from 'utils/error';
import { openThumbnailCache } from './cacheService';
import QueueProcessor, { PROCESSING_STRATEGY } from './queueProcessor'; import QueueProcessor, { PROCESSING_STRATEGY } from './queueProcessor';
import ComlinkCryptoWorker from 'utils/comlink/ComlinkCryptoWorker'; import ComlinkCryptoWorker from 'utils/comlink/ComlinkCryptoWorker';
import { addLogLine } from 'utils/logging'; import { addLogLine } from 'utils/logging';
import { CacheStorageService } from './cache/cacheStorageService';
import { CACHES } from 'constants/cache';
import { Remote } from 'comlink';
import { DedicatedCryptoWorker } from 'worker/crypto.worker';
const MAX_PARALLEL_DOWNLOADS = 10; const MAX_PARALLEL_DOWNLOADS = 10;
@ -30,10 +33,15 @@ class DownloadManager {
PROCESSING_STRATEGY.LIFO PROCESSING_STRATEGY.LIFO
); );
public async getThumbnail(file: EnteFile) { public async getThumbnail(
file: EnteFile,
tokenOverride?: string,
usingWorker?: Remote<DedicatedCryptoWorker>,
timeout?: number
) {
try { try {
addLogLine(`[${file.id}] [DownloadManager] getThumbnail called`); addLogLine(`[${file.id}] [DownloadManager] getThumbnail called`);
const token = getToken(); const token = tokenOverride || getToken();
if (!token) { if (!token) {
return null; return null;
} }
@ -44,7 +52,9 @@ class DownloadManager {
} }
if (!this.thumbnailObjectURLPromise.has(file.id)) { if (!this.thumbnailObjectURLPromise.has(file.id)) {
const downloadPromise = async () => { const downloadPromise = async () => {
const thumbnailCache = await openThumbnailCache(); const thumbnailCache = await CacheStorageService.open(
CACHES.THUMBS
);
const cacheResp: Response = await thumbnailCache?.match( const cacheResp: Response = await thumbnailCache?.match(
file.id.toString() file.id.toString()
@ -60,7 +70,13 @@ class DownloadManager {
); );
const thumb = const thumb =
await this.thumbnailDownloadRequestsProcessor.queueUpRequest( await this.thumbnailDownloadRequestsProcessor.queueUpRequest(
() => this.downloadThumb(token, file) () =>
this.downloadThumb(
token,
file,
usingWorker,
timeout
)
).promise; ).promise;
const thumbBlob = new Blob([thumb]); const thumbBlob = new Blob([thumb]);
@ -83,17 +99,23 @@ class DownloadManager {
} }
} }
downloadThumb = async (token: string, file: EnteFile) => { downloadThumb = async (
token: string,
file: EnteFile,
usingWorker?: Remote<DedicatedCryptoWorker>,
timeout?: number
) => {
const resp = await HTTPService.get( const resp = await HTTPService.get(
getThumbnailURL(file.id), getThumbnailURL(file.id),
null, null,
{ 'X-Auth-Token': token }, { 'X-Auth-Token': token },
{ responseType: 'arraybuffer' } { responseType: 'arraybuffer', timeout }
); );
if (typeof resp.data === 'undefined') { if (typeof resp.data === 'undefined') {
throw Error(CustomError.REQUEST_FAILED); throw Error(CustomError.REQUEST_FAILED);
} }
const cryptoWorker = await ComlinkCryptoWorker.getInstance(); const cryptoWorker =
usingWorker || (await ComlinkCryptoWorker.getInstance());
const decrypted = await cryptoWorker.decryptThumbnail( const decrypted = await cryptoWorker.decryptThumbnail(
new Uint8Array(resp.data), new Uint8Array(resp.data),
await cryptoWorker.fromB64(file.thumbnail.decryptionHeader), await cryptoWorker.fromB64(file.thumbnail.decryptionHeader),
@ -140,10 +162,15 @@ class DownloadManager {
return await this.fileObjectURLPromise.get(file.id.toString()); return await this.fileObjectURLPromise.get(file.id.toString());
} }
async downloadFile(file: EnteFile) { async downloadFile(
const cryptoWorker = await ComlinkCryptoWorker.getInstance(); file: EnteFile,
tokenOverride?: string,
const token = getToken(); usingWorker?: Remote<DedicatedCryptoWorker>,
timeout?: number
) {
const cryptoWorker =
usingWorker || (await ComlinkCryptoWorker.getInstance());
const token = tokenOverride || getToken();
if (!token) { if (!token) {
return null; return null;
} }
@ -155,7 +182,7 @@ class DownloadManager {
getFileURL(file.id), getFileURL(file.id),
null, null,
{ 'X-Auth-Token': token }, { 'X-Auth-Token': token },
{ responseType: 'arraybuffer' } { responseType: 'arraybuffer', timeout }
); );
if (typeof resp.data === 'undefined') { if (typeof resp.data === 'undefined') {
throw Error(CustomError.REQUEST_FAILED); throw Error(CustomError.REQUEST_FAILED);

View file

@ -1,15 +1,16 @@
import { LimitedCache, LimitedCacheStorage } from 'types/cache';
import { ElectronAPIs } from 'types/electron'; import { ElectronAPIs } from 'types/electron';
import { runningInBrowser } from 'utils/common';
class ElectronCacheService { class ElectronCacheStorageService implements LimitedCacheStorage {
private electronAPIs: ElectronAPIs; private electronAPIs: ElectronAPIs;
private allElectronAPIsExist: boolean = false; private allElectronAPIsExist: boolean = false;
constructor() { constructor() {
this.electronAPIs = runningInBrowser() && window['ElectronAPIs']; this.electronAPIs = globalThis['ElectronAPIs'];
this.allElectronAPIsExist = !!this.electronAPIs?.openDiskCache; this.allElectronAPIsExist = !!this.electronAPIs?.openDiskCache;
} }
async open(cacheName: string): Promise<Cache> {
async open(cacheName: string): Promise<LimitedCache> {
if (this.allElectronAPIsExist) { if (this.allElectronAPIsExist) {
return await this.electronAPIs.openDiskCache(cacheName); return await this.electronAPIs.openDiskCache(cacheName);
} }
@ -22,4 +23,4 @@ class ElectronCacheService {
} }
} }
export default new ElectronCacheService(); export const ElectronCacheStorage = new ElectronCacheStorageService();

View file

@ -1,12 +1,11 @@
import { ElectronAPIs } from 'types/electron'; import { ElectronAPIs } from 'types/electron';
import { runningInBrowser } from 'utils/common';
import { logError } from 'utils/sentry'; import { logError } from 'utils/sentry';
class SafeStorageService { class SafeStorageService {
private electronAPIs: ElectronAPIs; private electronAPIs: ElectronAPIs;
private allElectronAPIsExist: boolean = false; private allElectronAPIsExist: boolean = false;
constructor() { constructor() {
this.electronAPIs = runningInBrowser() && window['ElectronAPIs']; this.electronAPIs = globalThis['ElectronAPIs'];
this.allElectronAPIsExist = !!this.electronAPIs?.getEncryptionKey; this.allElectronAPIsExist = !!this.electronAPIs?.getEncryptionKey;
} }

12
src/services/events.ts Normal file
View file

@ -0,0 +1,12 @@
import { EventEmitter } from 'eventemitter3';
// When registering event handlers,
// handle errors to avoid unhandled rejection or propagation to emit call
export enum Events {
LOGOUT = 'logout',
FILE_UPLOADED = 'fileUploaded',
LOCAL_FILES_UPDATED = 'localFilesUpdated',
}
export const eventBus = new EventEmitter<Events>();

View file

@ -32,7 +32,6 @@ import { EnteFile } from 'types/file';
import { decodeMotionPhoto } from './motionPhotoService'; import { decodeMotionPhoto } from './motionPhotoService';
import { import {
fileNameWithoutExtension,
generateStreamFromArrayBuffer, generateStreamFromArrayBuffer,
getFileExtension, getFileExtension,
mergeMetadata, mergeMetadata,
@ -466,8 +465,7 @@ class ExportService {
collectionPath: string collectionPath: string
) { ) {
const fileBlob = await new Response(fileStream).blob(); const fileBlob = await new Response(fileStream).blob();
const originalName = fileNameWithoutExtension(file.metadata.title); const motionPhoto = await decodeMotionPhoto(file, fileBlob);
const motionPhoto = await decodeMotionPhoto(fileBlob, originalName);
const imageStream = generateStreamFromArrayBuffer(motionPhoto.image); const imageStream = generateStreamFromArrayBuffer(motionPhoto.image);
const imageSaveName = getUniqueFileSaveName( const imageSaveName = getUniqueFileSaveName(
collectionPath, collectionPath,

View file

@ -11,6 +11,7 @@ import {
mergeMetadata, mergeMetadata,
sortFiles, sortFiles,
} from 'utils/file'; } from 'utils/file';
import { eventBus, Events } from './events';
import { EnteFile, EncryptedEnteFile, TrashRequest } from 'types/file'; import { EnteFile, EncryptedEnteFile, TrashRequest } from 'types/file';
import { SetFiles } from 'types/gallery'; import { SetFiles } from 'types/gallery';
import { MAX_TRASH_BATCH_SIZE } from 'constants/file'; import { MAX_TRASH_BATCH_SIZE } from 'constants/file';
@ -36,6 +37,11 @@ export const getLocalFiles = async () => {
const setLocalFiles = async (files: EnteFile[]) => { const setLocalFiles = async (files: EnteFile[]) => {
try { try {
await localForage.setItem(FILES_TABLE, files); await localForage.setItem(FILES_TABLE, files);
try {
eventBus.emit(Events.LOCAL_FILES_UPDATED);
} catch (e) {
logError(e, 'Error in localFileUpdated handlers');
}
} catch (e1) { } catch (e1) {
try { try {
const storageEstimate = await navigator.storage.estimate(); const storageEstimate = await navigator.storage.estimate();

View file

@ -0,0 +1,25 @@
import {
FaceAlignment,
FaceAlignmentMethod,
FaceAlignmentService,
FaceDetection,
Versioned,
} from 'types/machineLearning';
import { getArcfaceAlignment } from 'utils/machineLearning/faceAlign';
class ArcfaceAlignmentService implements FaceAlignmentService {
public method: Versioned<FaceAlignmentMethod>;
constructor() {
this.method = {
value: 'ArcFace',
version: 1,
};
}
public getFaceAlignment(faceDetection: FaceDetection): FaceAlignment {
return getArcfaceAlignment(faceDetection);
}
}
export default new ArcfaceAlignmentService();

View file

@ -0,0 +1,34 @@
import {
FaceCrop,
FaceCropConfig,
FaceCropMethod,
FaceCropService,
FaceDetection,
Versioned,
} from 'types/machineLearning';
import { getArcfaceAlignment } from 'utils/machineLearning/faceAlign';
import { getFaceCrop } from 'utils/machineLearning/faceCrop';
class ArcFaceCropService implements FaceCropService {
public method: Versioned<FaceCropMethod>;
constructor() {
this.method = {
value: 'ArcFace',
version: 1,
};
}
public async getFaceCrop(
imageBitmap: ImageBitmap,
faceDetection: FaceDetection,
config: FaceCropConfig
): Promise<FaceCrop> {
const alignedFace = getArcfaceAlignment(faceDetection);
const faceCrop = getFaceCrop(imageBitmap, alignedFace, config);
return faceCrop;
}
}
export default new ArcFaceCropService();

View file

@ -0,0 +1,252 @@
import {
load as blazeFaceLoad,
BlazeFaceModel,
NormalizedFace,
} from 'blazeface-back';
import * as tf from '@tensorflow/tfjs-core';
import { GraphModel } from '@tensorflow/tfjs-converter';
import {
FaceDetection,
FaceDetectionMethod,
FaceDetectionService,
Versioned,
} from 'types/machineLearning';
import { Box, Point } from '../../../thirdparty/face-api/classes';
import { addPadding, crop, resizeToSquare } from 'utils/image';
import {
computeTransformToBox,
transformBox,
transformPoints,
} from 'utils/machineLearning/transform';
import { enlargeBox, newBox, normFaceBox } from 'utils/machineLearning';
import {
getNearestDetection,
removeDuplicateDetections,
transformPaddedToImage,
} from 'utils/machineLearning/faceDetection';
import {
BLAZEFACE_FACE_SIZE,
BLAZEFACE_INPUT_SIZE,
BLAZEFACE_IOU_THRESHOLD,
BLAZEFACE_MAX_FACES,
BLAZEFACE_PASS1_SCORE_THRESHOLD,
BLAZEFACE_SCORE_THRESHOLD,
MAX_FACE_DISTANCE_PERCENT,
} from 'constants/machineLearning/config';
import { addLogLine } from 'utils/logging';
class BlazeFaceDetectionService implements FaceDetectionService {
private blazeFaceModel: Promise<BlazeFaceModel>;
private blazeFaceBackModel: GraphModel;
public method: Versioned<FaceDetectionMethod>;
private desiredLeftEye = [0.36, 0.45];
private desiredFaceSize;
public constructor(desiredFaceSize: number = BLAZEFACE_FACE_SIZE) {
this.method = {
value: 'BlazeFace',
version: 1,
};
this.desiredFaceSize = desiredFaceSize;
}
private async init() {
this.blazeFaceModel = blazeFaceLoad({
maxFaces: BLAZEFACE_MAX_FACES,
scoreThreshold: BLAZEFACE_PASS1_SCORE_THRESHOLD,
iouThreshold: BLAZEFACE_IOU_THRESHOLD,
modelUrl: '/models/blazeface/back/model.json',
inputHeight: BLAZEFACE_INPUT_SIZE,
inputWidth: BLAZEFACE_INPUT_SIZE,
});
addLogLine(
'loaded blazeFaceModel: ',
// await this.blazeFaceModel,
// eslint-disable-next-line @typescript-eslint/await-thenable
await tf.getBackend()
);
}
private getDlibAlignedFace(normFace: NormalizedFace): Box {
const relX = 0.5;
const relY = 0.43;
const relScale = 0.45;
const leftEyeCenter = normFace.landmarks[0];
const rightEyeCenter = normFace.landmarks[1];
const mountCenter = normFace.landmarks[3];
const distToMouth = (pt) => {
const dy = mountCenter[1] - pt[1];
const dx = mountCenter[0] - pt[0];
return Math.sqrt(dx * dx + dy * dy);
};
const eyeToMouthDist =
(distToMouth(leftEyeCenter) + distToMouth(rightEyeCenter)) / 2;
const size = Math.floor(eyeToMouthDist / relScale);
const center = [
(leftEyeCenter[0] + rightEyeCenter[0] + mountCenter[0]) / 3,
(leftEyeCenter[1] + rightEyeCenter[1] + mountCenter[1]) / 3,
];
const left = center[0] - relX * size;
const top = center[1] - relY * size;
const right = center[0] + relX * size;
const bottom = center[1] + relY * size;
return new Box({
left: left,
top: top,
right: right,
bottom: bottom,
});
}
private getAlignedFace(normFace: NormalizedFace): Box {
const leftEye = normFace.landmarks[0];
const rightEye = normFace.landmarks[1];
// const noseTip = normFace.landmarks[2];
const dy = rightEye[1] - leftEye[1];
const dx = rightEye[0] - leftEye[0];
const desiredRightEyeX = 1.0 - this.desiredLeftEye[0];
// const eyesCenterX = (leftEye[0] + rightEye[0]) / 2;
// const yaw = Math.abs(noseTip[0] - eyesCenterX)
const dist = Math.sqrt(dx * dx + dy * dy);
let desiredDist = desiredRightEyeX - this.desiredLeftEye[0];
desiredDist *= this.desiredFaceSize;
const scale = desiredDist / dist;
// addLogLine("scale: ", scale);
const eyesCenter = [];
eyesCenter[0] = Math.floor((leftEye[0] + rightEye[0]) / 2);
eyesCenter[1] = Math.floor((leftEye[1] + rightEye[1]) / 2);
// addLogLine("eyesCenter: ", eyesCenter);
const faceWidth = this.desiredFaceSize / scale;
const faceHeight = this.desiredFaceSize / scale;
// addLogLine("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
const tx = eyesCenter[0] - faceWidth * 0.5;
const ty = eyesCenter[1] - faceHeight * this.desiredLeftEye[1];
// addLogLine("tx: ", tx, "ty: ", ty);
return new Box({
left: tx,
top: ty,
right: tx + faceWidth,
bottom: ty + faceHeight,
});
}
public async detectFacesUsingModel(image: tf.Tensor3D) {
const resizedImage = tf.image.resizeBilinear(image, [256, 256]);
const reshapedImage = tf.reshape(resizedImage, [
1,
resizedImage.shape[0],
resizedImage.shape[1],
3,
]);
const normalizedImage = tf.sub(tf.div(reshapedImage, 127.5), 1.0);
// eslint-disable-next-line @typescript-eslint/await-thenable
const results = await this.blazeFaceBackModel.predict(normalizedImage);
// addLogLine('onFacesDetected: ', results);
return results;
}
private async getBlazefaceModel() {
if (!this.blazeFaceModel) {
await this.init();
}
return this.blazeFaceModel;
}
private async estimateFaces(
imageBitmap: ImageBitmap
): Promise<Array<FaceDetection>> {
const resized = resizeToSquare(imageBitmap, BLAZEFACE_INPUT_SIZE);
const tfImage = tf.browser.fromPixels(resized.image);
const blazeFaceModel = await this.getBlazefaceModel();
// TODO: check if this works concurrently, else use serialqueue
const faces = await blazeFaceModel.estimateFaces(tfImage);
tf.dispose(tfImage);
const inBox = newBox(0, 0, resized.width, resized.height);
const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height);
const transform = computeTransformToBox(inBox, toBox);
// addLogLine("1st pass: ", { transform });
const faceDetections: Array<FaceDetection> = faces?.map((f) => {
const box = transformBox(normFaceBox(f), transform);
const normLandmarks = (f.landmarks as number[][])?.map(
(l) => new Point(l[0], l[1])
);
const landmarks = transformPoints(normLandmarks, transform);
return {
box,
landmarks,
probability: f.probability as number,
// detectionMethod: this.method,
} as FaceDetection;
});
return faceDetections;
}
public async detectFaces(
imageBitmap: ImageBitmap
): Promise<Array<FaceDetection>> {
const maxFaceDistance = imageBitmap.width * MAX_FACE_DISTANCE_PERCENT;
const pass1Detections = await this.estimateFaces(imageBitmap);
// run 2nd pass for accuracy
const detections: Array<FaceDetection> = [];
for (const pass1Detection of pass1Detections) {
const imageBox = enlargeBox(pass1Detection.box, 2);
const faceImage = crop(
imageBitmap,
imageBox,
BLAZEFACE_INPUT_SIZE / 2
);
const paddedImage = addPadding(faceImage, 0.5);
const paddedBox = enlargeBox(imageBox, 2);
const pass2Detections = await this.estimateFaces(paddedImage);
pass2Detections?.forEach((d) =>
transformPaddedToImage(d, faceImage, imageBox, paddedBox)
);
let selected = pass2Detections?.[0];
if (pass2Detections?.length > 1) {
// addLogLine('2nd pass >1 face', pass2Detections.length);
selected = getNearestDetection(
pass1Detection,
pass2Detections
// maxFaceDistance
);
}
// we might miss 1st pass face actually having score within threshold
// it is ok as results will be consistent with 2nd pass only detections
if (selected && selected.probability >= BLAZEFACE_SCORE_THRESHOLD) {
// addLogLine("pass2: ", { imageBox, paddedBox, transform, selected });
detections.push(selected);
}
}
return removeDuplicateDetections(detections, maxFaceDistance);
}
public async dispose() {
const blazeFaceModel = await this.getBlazefaceModel();
blazeFaceModel?.dispose();
this.blazeFaceModel = undefined;
}
}
export default new BlazeFaceDetectionService();

View file

@ -0,0 +1,88 @@
import { DBSCAN, OPTICS, KMEANS } from 'density-clustering';
import {
ClusteringConfig,
ClusteringInput,
ClusteringMethod,
ClusteringResults,
HdbscanResults,
Versioned,
} from 'types/machineLearning';
import { Hdbscan } from 'hdbscan';
import { HdbscanInput } from 'hdbscan/dist/types';
class ClusteringService {
private dbscan: DBSCAN;
private optics: OPTICS;
private kmeans: KMEANS;
constructor() {
this.dbscan = new DBSCAN();
this.optics = new OPTICS();
this.kmeans = new KMEANS();
}
public clusterUsingDBSCAN(
dataset: Array<Array<number>>,
epsilon: number = 1.0,
minPts: number = 2
): ClusteringResults {
// addLogLine("distanceFunction", DBSCAN._);
const clusters = this.dbscan.run(dataset, epsilon, minPts);
const noise = this.dbscan.noise;
return { clusters, noise };
}
public clusterUsingOPTICS(
dataset: Array<Array<number>>,
epsilon: number = 1.0,
minPts: number = 2
) {
const clusters = this.optics.run(dataset, epsilon, minPts);
return { clusters, noise: [] };
}
public clusterUsingKMEANS(
dataset: Array<Array<number>>,
numClusters: number = 5
) {
const clusters = this.kmeans.run(dataset, numClusters);
return { clusters, noise: [] };
}
public clusterUsingHdbscan(hdbscanInput: HdbscanInput): HdbscanResults {
if (hdbscanInput.input.length < 10) {
throw Error('too few samples to run Hdbscan');
}
const hdbscan = new Hdbscan(hdbscanInput);
const clusters = hdbscan.getClusters();
const noise = hdbscan.getNoise();
const debugInfo = hdbscan.getDebugInfo();
return { clusters, noise, debugInfo };
}
public cluster(
method: Versioned<ClusteringMethod>,
input: ClusteringInput,
config: ClusteringConfig
) {
if (method.value === 'Hdbscan') {
return this.clusterUsingHdbscan({
input,
minClusterSize: config.minClusterSize,
debug: config.generateDebugInfo,
});
} else if (method.value === 'Dbscan') {
return this.clusterUsingDBSCAN(
input,
config.maxDistanceInsideCluster,
config.minClusterSize
);
} else {
throw Error('Unknown clustering method: ' + method.value);
}
}
}
export default ClusteringService;

View file

@ -0,0 +1,37 @@
import { DBSCAN } from 'density-clustering';
import {
ClusteringConfig,
ClusteringInput,
ClusteringMethod,
ClusteringService,
HdbscanResults,
Versioned,
} from 'types/machineLearning';
class DbscanClusteringService implements ClusteringService {
public method: Versioned<ClusteringMethod>;
constructor() {
this.method = {
value: 'Dbscan',
version: 1,
};
}
public async cluster(
input: ClusteringInput,
config: ClusteringConfig
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
const dbscan = new DBSCAN();
const clusters = dbscan.run(
input,
config.clusterSelectionEpsilon,
config.minClusterSize
);
const noise = dbscan.noise;
return { clusters, noise };
}
}
export default new DbscanClusteringService();

View file

@ -0,0 +1,239 @@
import {
MLSyncContext,
MLSyncFileContext,
DetectedFace,
Face,
} from 'types/machineLearning';
import { addLogLine } from 'utils/logging';
import {
isDifferentOrOld,
getFaceId,
areFaceIdsSame,
extractFaceImages,
} from 'utils/machineLearning';
import { storeFaceCrop } from 'utils/machineLearning/faceCrop';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import ReaderService from './readerService';
class FaceService {
async syncFileFaceDetections(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const { oldMlFile, newMlFile } = fileContext;
if (
!isDifferentOrOld(
oldMlFile?.faceDetectionMethod,
syncContext.faceDetectionService.method
) &&
oldMlFile?.imageSource === syncContext.config.imageSource
) {
newMlFile.faces = oldMlFile?.faces?.map((existingFace) => ({
id: existingFace.id,
fileId: existingFace.fileId,
detection: existingFace.detection,
}));
newMlFile.imageSource = oldMlFile.imageSource;
newMlFile.imageDimensions = oldMlFile.imageDimensions;
newMlFile.faceDetectionMethod = oldMlFile.faceDetectionMethod;
return;
}
newMlFile.faceDetectionMethod = syncContext.faceDetectionService.method;
fileContext.newDetection = true;
const imageBitmap = await ReaderService.getImageBitmap(
syncContext,
fileContext
);
const faceDetections =
await syncContext.faceDetectionService.detectFaces(imageBitmap);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedFaces = faceDetections?.map((detection) => {
return {
fileId: fileContext.enteFile.id,
detection,
} as DetectedFace;
});
newMlFile.faces = detectedFaces?.map((detectedFace) => ({
...detectedFace,
id: getFaceId(detectedFace, newMlFile.imageDimensions),
}));
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine('[MLService] Detected Faces: ', newMlFile.faces?.length);
}
async syncFileFaceCrops(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const { oldMlFile, newMlFile } = fileContext;
if (
// !syncContext.config.faceCrop.enabled ||
!fileContext.newDetection &&
!isDifferentOrOld(
oldMlFile?.faceCropMethod,
syncContext.faceCropService.method
) &&
areFaceIdsSame(newMlFile.faces, oldMlFile?.faces)
) {
for (const [index, face] of newMlFile.faces.entries()) {
face.crop = oldMlFile.faces[index].crop;
}
newMlFile.faceCropMethod = oldMlFile.faceCropMethod;
return;
}
const imageBitmap = await ReaderService.getImageBitmap(
syncContext,
fileContext
);
newMlFile.faceCropMethod = syncContext.faceCropService.method;
for (const face of newMlFile.faces) {
await this.saveFaceCrop(imageBitmap, face, syncContext);
}
}
async syncFileFaceAlignments(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const { oldMlFile, newMlFile } = fileContext;
if (
!fileContext.newDetection &&
!isDifferentOrOld(
oldMlFile?.faceAlignmentMethod,
syncContext.faceAlignmentService.method
) &&
areFaceIdsSame(newMlFile.faces, oldMlFile?.faces)
) {
for (const [index, face] of newMlFile.faces.entries()) {
face.alignment = oldMlFile.faces[index].alignment;
}
newMlFile.faceAlignmentMethod = oldMlFile.faceAlignmentMethod;
return;
}
newMlFile.faceAlignmentMethod = syncContext.faceAlignmentService.method;
fileContext.newAlignment = true;
for (const face of newMlFile.faces) {
face.alignment = syncContext.faceAlignmentService.getFaceAlignment(
face.detection
);
}
addLogLine('[MLService] alignedFaces: ', newMlFile.faces?.length);
// addLogLine('4 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async syncFileFaceEmbeddings(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const { oldMlFile, newMlFile } = fileContext;
if (
!fileContext.newAlignment &&
!isDifferentOrOld(
oldMlFile?.faceEmbeddingMethod,
syncContext.faceEmbeddingService.method
) &&
areFaceIdsSame(newMlFile.faces, oldMlFile?.faces)
) {
for (const [index, face] of newMlFile.faces.entries()) {
face.embedding = oldMlFile.faces[index].embedding;
}
newMlFile.faceEmbeddingMethod = oldMlFile.faceEmbeddingMethod;
return;
}
newMlFile.faceEmbeddingMethod = syncContext.faceEmbeddingService.method;
// TODO: when not storing face crops, image will be needed to extract faces
// fileContext.imageBitmap ||
// (await this.getImageBitmap(syncContext, fileContext));
const faceImages = await extractFaceImages(
newMlFile.faces,
syncContext.faceEmbeddingService.faceSize
);
const embeddings =
await syncContext.faceEmbeddingService.getFaceEmbeddings(
faceImages
);
faceImages.forEach((faceImage) => faceImage.close());
newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i]));
addLogLine('[MLService] facesWithEmbeddings: ', newMlFile.faces.length);
// addLogLine('5 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async saveFaceCrop(
imageBitmap: ImageBitmap,
face: Face,
syncContext: MLSyncContext
) {
const faceCrop = await syncContext.faceCropService.getFaceCrop(
imageBitmap,
face.detection,
syncContext.config.faceCrop
);
face.crop = await storeFaceCrop(
face.id,
faceCrop,
syncContext.config.faceCrop.blobOptions
);
faceCrop.image.close();
}
async getAllSyncedFacesMap(syncContext: MLSyncContext) {
if (syncContext.allSyncedFacesMap) {
return syncContext.allSyncedFacesMap;
}
syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap();
return syncContext.allSyncedFacesMap;
}
public async runFaceClustering(
syncContext: MLSyncContext,
allFaces: Array<Face>
) {
// await this.init();
const clusteringConfig = syncContext.config.faceClustering;
if (!allFaces || allFaces.length < clusteringConfig.minInputSize) {
addLogLine(
'[MLService] Too few faces to cluster, not running clustering: ',
allFaces.length
);
return;
}
addLogLine('Running clustering allFaces: ', allFaces.length);
syncContext.mlLibraryData.faceClusteringResults =
await syncContext.faceClusteringService.cluster(
allFaces.map((f) => Array.from(f.embedding)),
syncContext.config.faceClustering
);
syncContext.mlLibraryData.faceClusteringMethod =
syncContext.faceClusteringService.method;
addLogLine(
'[MLService] Got face clustering results: ',
JSON.stringify(syncContext.mlLibraryData.faceClusteringResults)
);
// syncContext.faceClustersWithNoise = {
// clusters: syncContext.faceClusteringResults.clusters.map(
// (faces) => ({
// faces,
// })
// ),
// noise: syncContext.faceClusteringResults.noise,
// };
}
}
export default new FaceService();

View file

@ -0,0 +1,44 @@
import { Hdbscan } from 'hdbscan';
import {
ClusteringConfig,
ClusteringInput,
ClusteringMethod,
ClusteringService,
HdbscanResults,
Versioned,
} from 'types/machineLearning';
class HdbscanClusteringService implements ClusteringService {
public method: Versioned<ClusteringMethod>;
constructor() {
this.method = {
value: 'Hdbscan',
version: 1,
};
}
public async cluster(
input: ClusteringInput,
config: ClusteringConfig
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
const hdbscan = new Hdbscan({
input,
minClusterSize: config.minClusterSize,
minSamples: config.minSamples,
clusterSelectionEpsilon: config.clusterSelectionEpsilon,
clusterSelectionMethod: config.clusterSelectionMethod,
debug: config.generateDebugInfo,
});
return {
clusters: hdbscan.getClusters(),
noise: hdbscan.getNoise(),
debugInfo: hdbscan.getDebugInfo(),
};
}
}
export default new HdbscanClusteringService();

View file

@ -0,0 +1,111 @@
import * as tf from '@tensorflow/tfjs-core';
import * as tfjsConverter from '@tensorflow/tfjs-converter';
import {
ObjectDetection,
SceneDetectionMethod,
SceneDetectionService,
Versioned,
} from 'types/machineLearning';
import { SCENE_DETECTION_IMAGE_SIZE } from 'constants/machineLearning/config';
import { resizeToSquare } from 'utils/image';
import { addLogLine } from 'utils/logging';
class ImageScene implements SceneDetectionService {
method: Versioned<SceneDetectionMethod>;
private model: tfjsConverter.GraphModel;
private sceneMap: { [key: string]: string };
private ready: Promise<void>;
private workerID: number;
public constructor() {
this.method = {
value: 'ImageScene',
version: 1,
};
this.workerID = Math.round(Math.random() * 1000);
}
private async init() {
addLogLine(`[${this.workerID}]`, 'ImageScene init called');
if (this.model) {
return;
}
this.sceneMap = await (
await fetch('/models/imagescene/sceneMap.json')
).json();
this.model = await tfjsConverter.loadGraphModel(
'/models/imagescene/model.json'
);
addLogLine(
`[${this.workerID}]`,
'loaded ImageScene model',
tf.getBackend()
);
tf.tidy(() => {
const zeroTensor = tf.zeros([1, 224, 224, 3]);
// warmup the model
this.model.predict(zeroTensor) as tf.Tensor;
});
}
private async getImageSceneModel() {
addLogLine(
`[${this.workerID}]`,
'ImageScene getImageSceneModel called'
);
if (!this.ready) {
this.ready = this.init();
}
await this.ready;
return this.model;
}
async detectScenes(image: ImageBitmap, minScore: number) {
const resized = resizeToSquare(image, SCENE_DETECTION_IMAGE_SIZE);
const model = await this.getImageSceneModel();
const output = tf.tidy(() => {
const tfImage = tf.browser.fromPixels(resized.image);
const input = tf.expandDims(tf.cast(tfImage, 'float32'));
const output = model.predict(input) as tf.Tensor;
return output;
});
const data = (await output.data()) as Float32Array;
output.dispose();
const scenes = this.parseSceneDetectionResult(
data,
minScore,
image.width,
image.height
);
return scenes;
}
private parseSceneDetectionResult(
outputData: Float32Array,
minScore: number,
width: number,
height: number
): ObjectDetection[] {
const scenes = [];
for (let i = 0; i < outputData.length; i++) {
if (outputData[i] >= minScore) {
scenes.push({
class: this.sceneMap[i.toString()],
score: outputData[i],
bbox: [0, 0, width, height],
});
}
}
return scenes;
}
}
export default new ImageScene();

View file

@ -0,0 +1,252 @@
import PQueue from 'p-queue';
import { EnteFile } from 'types/file';
import {
Face,
FaceAlignmentMethod,
FaceAlignmentService,
FaceCropMethod,
FaceCropService,
FaceDetectionMethod,
FaceDetectionService,
FaceEmbeddingMethod,
FaceEmbeddingService,
MLSyncConfig,
MLSyncContext,
ClusteringMethod,
ClusteringService,
MLLibraryData,
ObjectDetectionService,
ObjectDetectionMethod,
TextDetectionMethod,
TextDetectionService,
SceneDetectionService,
SceneDetectionMethod,
} from 'types/machineLearning';
import { getConcurrency } from 'utils/common/concurrency';
import { logQueueStats } from 'utils/machineLearning';
import arcfaceAlignmentService from './arcfaceAlignmentService';
import arcfaceCropService from './arcfaceCropService';
import hdbscanClusteringService from './hdbscanClusteringService';
import blazeFaceDetectionService from './blazeFaceDetectionService';
import mobileFaceNetEmbeddingService from './mobileFaceNetEmbeddingService';
import dbscanClusteringService from './dbscanClusteringService';
import ssdMobileNetV2Service from './ssdMobileNetV2Service';
import tesseractService from './tesseractService';
import imageSceneService from './imageSceneService';
import { getDedicatedCryptoWorker } from 'utils/comlink/ComlinkCryptoWorker';
import { ComlinkWorker } from 'utils/comlink/comlinkWorker';
import { DedicatedCryptoWorker } from 'worker/crypto.worker';
import { addLogLine } from 'utils/logging';
export class MLFactory {
public static getFaceDetectionService(
method: FaceDetectionMethod
): FaceDetectionService {
if (method === 'BlazeFace') {
return blazeFaceDetectionService;
}
throw Error('Unknon face detection method: ' + method);
}
public static getObjectDetectionService(
method: ObjectDetectionMethod
): ObjectDetectionService {
if (method === 'SSDMobileNetV2') {
return ssdMobileNetV2Service;
}
throw Error('Unknown object detection method: ' + method);
}
public static getSceneDetectionService(
method: SceneDetectionMethod
): SceneDetectionService {
if (method === 'ImageScene') {
return imageSceneService;
}
throw Error('Unknown scene detection method: ' + method);
}
public static getTextDetectionService(
method: TextDetectionMethod
): TextDetectionService {
if (method === 'Tesseract') {
return tesseractService;
}
throw Error('Unknown text detection method: ' + method);
}
public static getFaceCropService(method: FaceCropMethod) {
if (method === 'ArcFace') {
return arcfaceCropService;
}
throw Error('Unknon face crop method: ' + method);
}
public static getFaceAlignmentService(
method: FaceAlignmentMethod
): FaceAlignmentService {
if (method === 'ArcFace') {
return arcfaceAlignmentService;
}
throw Error('Unknon face alignment method: ' + method);
}
public static getFaceEmbeddingService(
method: FaceEmbeddingMethod
): FaceEmbeddingService {
if (method === 'MobileFaceNet') {
return mobileFaceNetEmbeddingService;
}
throw Error('Unknon face embedding method: ' + method);
}
public static getClusteringService(
method: ClusteringMethod
): ClusteringService {
if (method === 'Hdbscan') {
return hdbscanClusteringService;
}
if (method === 'Dbscan') {
return dbscanClusteringService;
}
throw Error('Unknon clustering method: ' + method);
}
public static getMLSyncContext(
token: string,
userID: number,
config: MLSyncConfig,
shouldUpdateMLVersion: boolean = true
) {
return new LocalMLSyncContext(
token,
userID,
config,
shouldUpdateMLVersion
);
}
}
export class LocalMLSyncContext implements MLSyncContext {
public token: string;
public userID: number;
public config: MLSyncConfig;
public shouldUpdateMLVersion: boolean;
public faceDetectionService: FaceDetectionService;
public faceCropService: FaceCropService;
public faceAlignmentService: FaceAlignmentService;
public faceEmbeddingService: FaceEmbeddingService;
public faceClusteringService: ClusteringService;
public objectDetectionService: ObjectDetectionService;
public sceneDetectionService: SceneDetectionService;
public textDetectionService: TextDetectionService;
public localFilesMap: Map<number, EnteFile>;
public outOfSyncFiles: EnteFile[];
public nSyncedFiles: number;
public nSyncedFaces: number;
public allSyncedFacesMap?: Map<number, Array<Face>>;
public tsne?: any;
public error?: Error;
public mlLibraryData: MLLibraryData;
public syncQueue: PQueue;
// TODO: wheather to limit concurrent downloads
// private downloadQueue: PQueue;
private concurrency: number;
private comlinkCryptoWorker: Array<
ComlinkWorker<typeof DedicatedCryptoWorker>
>;
private enteWorkers: Array<any>;
constructor(
token: string,
userID: number,
config: MLSyncConfig,
shouldUpdateMLVersion: boolean = true,
concurrency?: number
) {
this.token = token;
this.userID = userID;
this.config = config;
this.shouldUpdateMLVersion = shouldUpdateMLVersion;
this.faceDetectionService = MLFactory.getFaceDetectionService(
this.config.faceDetection.method
);
this.faceCropService = MLFactory.getFaceCropService(
this.config.faceCrop.method
);
this.faceAlignmentService = MLFactory.getFaceAlignmentService(
this.config.faceAlignment.method
);
this.faceEmbeddingService = MLFactory.getFaceEmbeddingService(
this.config.faceEmbedding.method
);
this.faceClusteringService = MLFactory.getClusteringService(
this.config.faceClustering.method
);
this.objectDetectionService = MLFactory.getObjectDetectionService(
this.config.objectDetection.method
);
this.sceneDetectionService = MLFactory.getSceneDetectionService(
this.config.sceneDetection.method
);
this.textDetectionService = MLFactory.getTextDetectionService(
this.config.textDetection.method
);
this.outOfSyncFiles = [];
this.nSyncedFiles = 0;
this.nSyncedFaces = 0;
this.concurrency = concurrency || getConcurrency();
addLogLine('Using concurrency: ', this.concurrency);
// timeout is added on downloads
// timeout on queue will keep the operation open till worker is terminated
this.syncQueue = new PQueue({ concurrency: this.concurrency });
logQueueStats(this.syncQueue, 'sync');
// this.downloadQueue = new PQueue({ concurrency: 1 });
// logQueueStats(this.downloadQueue, 'download');
this.comlinkCryptoWorker = new Array(this.concurrency);
this.enteWorkers = new Array(this.concurrency);
}
public async getEnteWorker(id: number): Promise<any> {
const wid = id % this.enteWorkers.length;
if (!this.enteWorkers[wid]) {
this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker();
this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote;
}
return this.enteWorkers[wid];
}
public async dispose() {
// await this.faceDetectionService.dispose();
// await this.faceEmbeddingService.dispose();
this.localFilesMap = undefined;
await this.syncQueue.onIdle();
this.syncQueue.removeAllListeners();
for (const enteComlinkWorker of this.comlinkCryptoWorker) {
enteComlinkWorker?.terminate();
}
}
}

View file

@ -0,0 +1,597 @@
import { getLocalFiles } from 'services/fileService';
import { EnteFile } from 'types/file';
import * as tf from '@tensorflow/tfjs-core';
import '@tensorflow/tfjs-backend-webgl';
import '@tensorflow/tfjs-backend-cpu';
// import '@tensorflow/tfjs-backend-wasm';
// import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
// import '@tensorflow/tfjs-backend-cpu';
import {
MlFileData,
MLSyncContext,
MLSyncFileContext,
MLSyncResult,
} from 'types/machineLearning';
import { toTSNE } from 'utils/machineLearning/visualization';
// import {
// incrementIndexVersion,
// mlFilesStore
// } from 'utils/storage/mlStorage';
import { getAllFacesFromMap } from 'utils/machineLearning';
import { MLFactory } from './machineLearningFactory';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import { getMLSyncConfig } from 'utils/machineLearning/config';
import { CustomError, parseUploadErrorCodes } from 'utils/error';
import { MAX_ML_SYNC_ERROR_COUNT } from 'constants/machineLearning/config';
import FaceService from './faceService';
import PeopleService from './peopleService';
import ObjectService from './objectService';
// import TextService from './textService';
import ReaderService from './readerService';
import { logError } from 'utils/sentry';
import { addLogLine } from 'utils/logging';
class MachineLearningService {
private initialized = false;
// private faceDetectionService: FaceDetectionService;
// private faceLandmarkService: FAPIFaceLandmarksService;
// private faceAlignmentService: FaceAlignmentService;
// private faceEmbeddingService: FaceEmbeddingService;
// private faceEmbeddingService: FAPIFaceEmbeddingService;
// private clusteringService: ClusteringService;
private localSyncContext: Promise<MLSyncContext>;
private syncContext: Promise<MLSyncContext>;
public constructor() {
// setWasmPaths('/js/tfjs/');
// this.faceDetectionService = new TFJSFaceDetectionService();
// this.faceLandmarkService = new FAPIFaceLandmarksService();
// this.faceAlignmentService = new ArcfaceAlignmentService();
// this.faceEmbeddingService = new TFJSFaceEmbeddingService();
// this.faceEmbeddingService = new FAPIFaceEmbeddingService();
// this.clusteringService = new ClusteringService();
}
public async sync(token: string, userID: number): Promise<MLSyncResult> {
if (!token) {
throw Error('Token needed by ml service to sync file');
}
// await this.init();
// Used to debug tf memory leak, all tf memory
// needs to be cleaned using tf.dispose or tf.tidy
// tf.engine().startScope();
const syncContext = await this.getSyncContext(token, userID);
await this.syncLocalFiles(syncContext);
await this.getOutOfSyncFiles(syncContext);
if (syncContext.outOfSyncFiles.length > 0) {
await this.syncFiles(syncContext);
}
// TODO: running index before all files are on latest ml version
// may be need to just take synced files on latest ml version for indexing
if (
syncContext.outOfSyncFiles.length <= 0 ||
(syncContext.nSyncedFiles === syncContext.config.batchSize &&
Math.random() < 0.2)
) {
await this.syncIndex(syncContext);
}
// tf.engine().endScope();
if (syncContext.config.tsne) {
await this.runTSNE(syncContext);
}
const mlSyncResult: MLSyncResult = {
nOutOfSyncFiles: syncContext.outOfSyncFiles.length,
nSyncedFiles: syncContext.nSyncedFiles,
nSyncedFaces: syncContext.nSyncedFaces,
nFaceClusters:
syncContext.mlLibraryData?.faceClusteringResults?.clusters
.length,
nFaceNoise:
syncContext.mlLibraryData?.faceClusteringResults?.noise.length,
tsne: syncContext.tsne,
error: syncContext.error,
};
// addLogLine('[MLService] sync results: ', mlSyncResult);
// await syncContext.dispose();
addLogLine('Final TF Memory stats: ', JSON.stringify(tf.memory()));
return mlSyncResult;
}
private newMlData(fileId: number) {
return {
fileId,
mlVersion: 0,
errorCount: 0,
} as MlFileData;
}
private async getLocalFilesMap(syncContext: MLSyncContext) {
if (!syncContext.localFilesMap) {
const localFiles = await getLocalFiles();
const personalFiles = localFiles.filter(
(f) => f.ownerID === syncContext.userID
);
syncContext.localFilesMap = new Map<number, EnteFile>();
personalFiles.forEach((f) =>
syncContext.localFilesMap.set(f.id, f)
);
}
return syncContext.localFilesMap;
}
private async syncLocalFiles(syncContext: MLSyncContext) {
const startTime = Date.now();
const localFilesMap = await this.getLocalFilesMap(syncContext);
const db = await mlIDbStorage.db;
const tx = db.transaction('files', 'readwrite');
const mlFileIdsArr = await mlIDbStorage.getAllFileIdsForUpdate(tx);
const mlFileIds = new Set<number>();
mlFileIdsArr.forEach((mlFileId) => mlFileIds.add(mlFileId));
const newFileIds: Array<number> = [];
for (const localFileId of localFilesMap.keys()) {
if (!mlFileIds.has(localFileId)) {
newFileIds.push(localFileId);
}
}
let updated = false;
if (newFileIds.length > 0) {
addLogLine('newFiles: ', newFileIds.length);
const newFiles = newFileIds.map((fileId) => this.newMlData(fileId));
await mlIDbStorage.putAllFiles(newFiles, tx);
updated = true;
}
const removedFileIds: Array<number> = [];
for (const mlFileId of mlFileIds) {
if (!localFilesMap.has(mlFileId)) {
removedFileIds.push(mlFileId);
}
}
if (removedFileIds.length > 0) {
addLogLine('removedFiles: ', removedFileIds.length);
await mlIDbStorage.removeAllFiles(removedFileIds, tx);
updated = true;
}
await tx.done;
if (updated) {
// TODO: should do in same transaction
await mlIDbStorage.incrementIndexVersion('files');
}
addLogLine('syncLocalFiles', Date.now() - startTime, 'ms');
}
// TODO: not required if ml data is stored as field inside ente file object
// remove, not required now
// it removes ml data for files in trash, they will be resynced if restored
// private async syncRemovedFiles(syncContext: MLSyncContext) {
// const db = await mlIDbStorage.db;
// const localFileIdMap = await this.getLocalFilesMap(syncContext);
// const removedFileIds: Array<string> = [];
// await mlFilesStore.iterate((file, idStr) => {
// if (!localFileIdMap.has(parseInt(idStr))) {
// removedFileIds.push(idStr);
// }
// });
// if (removedFileIds.length < 1) {
// return;
// }
// removedFileIds.forEach((fileId) => mlFilesStore.removeItem(fileId));
// addLogLine('Removed local file ids: ', removedFileIds);
// await incrementIndexVersion('files');
// }
private async getOutOfSyncFiles(syncContext: MLSyncContext) {
const startTime = Date.now();
const fileIds = await mlIDbStorage.getFileIds(
syncContext.config.batchSize,
syncContext.config.mlVersion,
MAX_ML_SYNC_ERROR_COUNT
);
addLogLine('fileIds: ', JSON.stringify(fileIds));
const localFilesMap = await this.getLocalFilesMap(syncContext);
syncContext.outOfSyncFiles = fileIds.map((fileId) =>
localFilesMap.get(fileId)
);
addLogLine('getOutOfSyncFiles', Date.now() - startTime, 'ms');
}
// TODO: optimize, use indexdb indexes, move facecrops to cache to reduce io
// remove, already done
private async getUniqueOutOfSyncFilesNoIdx(
syncContext: MLSyncContext,
files: EnteFile[]
) {
const limit = syncContext.config.batchSize;
const mlVersion = syncContext.config.mlVersion;
const uniqueFiles: Map<number, EnteFile> = new Map<number, EnteFile>();
for (let i = 0; uniqueFiles.size < limit && i < files.length; i++) {
const mlFileData = await this.getMLFileData(files[i].id);
const mlFileVersion = mlFileData?.mlVersion || 0;
if (
!uniqueFiles.has(files[i].id) &&
(!mlFileData?.errorCount || mlFileData.errorCount < 2) &&
(mlFileVersion < mlVersion ||
syncContext.config.imageSource !== mlFileData.imageSource)
) {
uniqueFiles.set(files[i].id, files[i]);
}
}
return [...uniqueFiles.values()];
}
// private async getOutOfSyncFilesNoIdx(syncContext: MLSyncContext) {
// const existingFilesMap = await this.getLocalFilesMap(syncContext);
// // existingFiles.sort(
// // (a, b) => b.metadata.creationTime - a.metadata.creationTime
// // );
// console.time('getUniqueOutOfSyncFiles');
// syncContext.outOfSyncFiles = await this.getUniqueOutOfSyncFilesNoIdx(
// syncContext,
// [...existingFilesMap.values()]
// );
// addLogLine('getUniqueOutOfSyncFiles');
// addLogLine(
// 'Got unique outOfSyncFiles: ',
// syncContext.outOfSyncFiles.length,
// 'for batchSize: ',
// syncContext.config.batchSize
// );
// }
private async syncFiles(syncContext: MLSyncContext) {
try {
const functions = syncContext.outOfSyncFiles.map(
(outOfSyncfile) => async () => {
await this.syncFileWithErrorHandler(
syncContext,
outOfSyncfile
);
// TODO: just store file and faces count in syncContext
}
);
syncContext.syncQueue.on('error', () => {
syncContext.syncQueue.clear();
});
await syncContext.syncQueue.addAll(functions);
} catch (error) {
console.error('Error in sync job: ', error);
syncContext.error = error;
}
await syncContext.syncQueue.onIdle();
addLogLine('allFaces: ', syncContext.nSyncedFaces);
// TODO: In case syncJob has to use multiple ml workers
// do in same transaction with each file update
// or keep in files store itself
await mlIDbStorage.incrementIndexVersion('files');
// await this.disposeMLModels();
}
private async getSyncContext(token: string, userID: number) {
if (!this.syncContext) {
addLogLine('Creating syncContext');
this.syncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true)
);
} else {
addLogLine('reusing existing syncContext');
}
return this.syncContext;
}
private async getLocalSyncContext(token: string, userID: number) {
if (!this.localSyncContext) {
addLogLine('Creating localSyncContext');
this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false)
);
} else {
addLogLine('reusing existing localSyncContext');
}
return this.localSyncContext;
}
public async closeLocalSyncContext() {
if (this.localSyncContext) {
addLogLine('Closing localSyncContext');
const syncContext = await this.localSyncContext;
await syncContext.dispose();
this.localSyncContext = undefined;
}
}
public async syncLocalFile(
token: string,
userID: number,
enteFile: EnteFile,
localFile?: globalThis.File,
textDetectionTimeoutIndex?: number
): Promise<MlFileData | Error> {
const syncContext = await this.getLocalSyncContext(token, userID);
try {
const mlFileData = await this.syncFileWithErrorHandler(
syncContext,
enteFile,
localFile,
textDetectionTimeoutIndex
);
if (syncContext.nSyncedFiles >= syncContext.config.batchSize) {
await this.closeLocalSyncContext();
}
// await syncContext.dispose();
return mlFileData;
} catch (e) {
console.error('Error while syncing local file: ', enteFile.id, e);
return e;
}
}
private async syncFileWithErrorHandler(
syncContext: MLSyncContext,
enteFile: EnteFile,
localFile?: globalThis.File,
textDetectionTimeoutIndex?: number
): Promise<MlFileData> {
try {
const mlFileData = await this.syncFile(
syncContext,
enteFile,
localFile,
textDetectionTimeoutIndex
);
syncContext.nSyncedFaces += mlFileData.faces?.length || 0;
syncContext.nSyncedFiles += 1;
return mlFileData;
} catch (e) {
logError(e, 'ML syncFile failed');
let error = e;
console.error(
'Error in ml sync, fileId: ',
enteFile.id,
'name: ',
enteFile.metadata.title,
error
);
if ('status' in error) {
const parsedMessage = parseUploadErrorCodes(error);
error = parsedMessage;
}
// TODO: throw errors not related to specific file
// sync job run should stop after these errors
// don't persist these errors against file,
// can include indexeddb/cache errors too
switch (error.message) {
case CustomError.SESSION_EXPIRED:
case CustomError.NETWORK_ERROR:
throw error;
}
await this.persistMLFileSyncError(syncContext, enteFile, error);
syncContext.nSyncedFiles += 1;
} finally {
addLogLine('TF Memory stats: ', JSON.stringify(tf.memory()));
}
}
private async syncFile(
syncContext: MLSyncContext,
enteFile: EnteFile,
localFile?: globalThis.File,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
textDetectionTimeoutIndex?: number
) {
const fileContext: MLSyncFileContext = { enteFile, localFile };
const oldMlFile =
(fileContext.oldMlFile = await this.getMLFileData(enteFile.id)) ??
this.newMlData(enteFile.id);
if (
fileContext.oldMlFile?.mlVersion === syncContext.config.mlVersion
// TODO: reset mlversion of all files when user changes image source
) {
return fileContext.oldMlFile;
}
const newMlFile = (fileContext.newMlFile = this.newMlData(enteFile.id));
if (syncContext.shouldUpdateMLVersion) {
newMlFile.mlVersion = syncContext.config.mlVersion;
} else if (fileContext.oldMlFile?.mlVersion) {
newMlFile.mlVersion = fileContext.oldMlFile.mlVersion;
}
try {
await ReaderService.getImageBitmap(syncContext, fileContext);
// await this.syncFaceDetections(syncContext, fileContext);
// await ObjectService.syncFileObjectDetections(
// syncContext,
// fileContext
// );
await Promise.all([
this.syncFaceDetections(syncContext, fileContext),
ObjectService.syncFileObjectDetections(
syncContext,
fileContext
),
// TextService.syncFileTextDetections(
// syncContext,
// fileContext,
// textDetectionTimeoutIndex
// ),
]);
newMlFile.errorCount = 0;
newMlFile.lastErrorMessage = undefined;
await this.persistMLFileData(syncContext, newMlFile);
} catch (e) {
logError(e, 'ml detection failed');
newMlFile.mlVersion = oldMlFile.mlVersion;
throw e;
} finally {
fileContext.tfImage && fileContext.tfImage.dispose();
fileContext.imageBitmap && fileContext.imageBitmap.close();
// addLogLine('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: enable once faceId changes go in
// await removeOldFaceCrops(
// fileContext.oldMlFile,
// fileContext.newMlFile
// );
}
return newMlFile;
}
public async init() {
if (this.initialized) {
return;
}
await tf.ready();
addLogLine('01 TF Memory stats: ', JSON.stringify(tf.memory()));
// await tfjsFaceDetectionService.init();
// // addLogLine('02 TF Memory stats: ',JSON.stringify(tf.memory()));
// await this.faceLandmarkService.init();
// await faceapi.nets.faceLandmark68Net.loadFromUri('/models/face-api/');
// // addLogLine('03 TF Memory stats: ',JSON.stringify(tf.memory()));
// await tfjsFaceEmbeddingService.init();
// await faceapi.nets.faceRecognitionNet.loadFromUri('/models/face-api/');
// addLogLine('04 TF Memory stats: ',JSON.stringify(tf.memory()));
this.initialized = true;
}
public async dispose() {
this.initialized = false;
// await this.faceDetectionService.dispose();
// addLogLine('11 TF Memory stats: ',JSON.stringify(tf.memory()));
// await this.faceLandmarkService.dispose();
// addLogLine('12 TF Memory stats: ',JSON.stringify(tf.memory()));
// await this.faceEmbeddingService.dispose();
// addLogLine('13 TF Memory stats: ',JSON.stringify(tf.memory()));
}
private async getMLFileData(fileId: number) {
// return mlFilesStore.getItem<MlFileData>(fileId);
return mlIDbStorage.getFile(fileId);
}
private async persistMLFileData(
syncContext: MLSyncContext,
mlFileData: MlFileData
) {
// return mlFilesStore.setItem(mlFileData.fileId.toString(), mlFileData);
mlIDbStorage.putFile(mlFileData);
}
private async persistMLFileSyncError(
syncContext: MLSyncContext,
enteFile: EnteFile,
e: Error
) {
try {
await mlIDbStorage.upsertFileInTx(enteFile.id, (mlFileData) => {
if (!mlFileData) {
mlFileData = this.newMlData(enteFile.id);
}
mlFileData.errorCount = (mlFileData.errorCount || 0) + 1;
mlFileData.lastErrorMessage = e.message;
return mlFileData;
});
} catch (e) {
// TODO: logError or stop sync job after most of the requests are failed
console.error('Error while storing ml sync error', e);
}
}
private async getMLLibraryData(syncContext: MLSyncContext) {
syncContext.mlLibraryData = await mlIDbStorage.getLibraryData();
if (!syncContext.mlLibraryData) {
syncContext.mlLibraryData = {};
}
}
private async persistMLLibraryData(syncContext: MLSyncContext) {
// return mlLibraryStore.setItem('data', syncContext.mlLibraryData);
return mlIDbStorage.putLibraryData(syncContext.mlLibraryData);
}
public async syncIndex(syncContext: MLSyncContext) {
await this.getMLLibraryData(syncContext);
// await this.init();
await PeopleService.syncPeopleIndex(syncContext);
await ObjectService.syncThingsIndex(syncContext);
await this.persistMLLibraryData(syncContext);
}
private async runTSNE(syncContext: MLSyncContext) {
const allFacesMap = await FaceService.getAllSyncedFacesMap(syncContext);
const allFaces = getAllFacesFromMap(allFacesMap);
const input = allFaces
.slice(0, syncContext.config.tsne.samples)
.map((f) => Array.from(f.embedding));
syncContext.tsne = toTSNE(input, syncContext.config.tsne);
addLogLine('tsne: ', syncContext.tsne);
}
private async syncFaceDetections(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const { newMlFile } = fileContext;
const startTime = Date.now();
await FaceService.syncFileFaceDetections(syncContext, fileContext);
if (newMlFile.faces && newMlFile.faces.length > 0) {
await FaceService.syncFileFaceCrops(syncContext, fileContext);
await FaceService.syncFileFaceAlignments(syncContext, fileContext);
await FaceService.syncFileFaceEmbeddings(syncContext, fileContext);
}
addLogLine(
`face detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
'ms'
);
}
}
export default new MachineLearningService();

View file

@ -0,0 +1,9 @@
import { SimpleJob } from 'utils/common/job';
import { MLSyncResult } from 'types/machineLearning';
import { JobResult } from 'types/common/job';
export interface MLSyncJobResult extends JobResult {
mlSyncResult: MLSyncResult;
}
export class MLSyncJob extends SimpleJob<MLSyncJobResult> {}

View file

@ -0,0 +1,269 @@
import debounce from 'debounce-promise';
import PQueue from 'p-queue';
import { eventBus, Events } from 'services/events';
import { EnteFile } from 'types/file';
import { FILE_TYPE } from 'constants/file';
import { getToken, getUserID } from 'utils/common/key';
import { logQueueStats } from 'utils/machineLearning';
import { getMLSyncJobConfig } from 'utils/machineLearning/config';
import { logError } from 'utils/sentry';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import { MLSyncJobResult, MLSyncJob } from './mlSyncJob';
import { ComlinkWorker } from 'utils/comlink/comlinkWorker';
import { DedicatedMLWorker } from 'worker/ml.worker';
import { getDedicatedMLWorker } from 'utils/comlink/ComlinkMLWorker';
import { addLogLine } from 'utils/logging';
const LIVE_SYNC_IDLE_DEBOUNCE_SEC = 30;
const LIVE_SYNC_QUEUE_TIMEOUT_SEC = 300;
const LOCAL_FILES_UPDATED_DEBOUNCE_SEC = 30;
class MLWorkManager {
private mlSyncJob: MLSyncJob;
private syncJobWorker: ComlinkWorker<typeof DedicatedMLWorker>;
private debouncedLiveSyncIdle: () => void;
private debouncedFilesUpdated: () => void;
private liveSyncQueue: PQueue;
private liveSyncWorker: ComlinkWorker<typeof DedicatedMLWorker>;
private mlSearchEnabled: boolean;
constructor() {
this.liveSyncQueue = new PQueue({
concurrency: 1,
// TODO: temp, remove
timeout: LIVE_SYNC_QUEUE_TIMEOUT_SEC * 1000,
throwOnTimeout: true,
});
this.mlSearchEnabled = false;
eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this);
this.debouncedLiveSyncIdle = debounce(
() => this.onLiveSyncIdle(),
LIVE_SYNC_IDLE_DEBOUNCE_SEC * 1000
);
this.debouncedFilesUpdated = debounce(
() => this.mlSearchEnabled && this.localFilesUpdatedHandler(),
LOCAL_FILES_UPDATED_DEBOUNCE_SEC * 1000
);
}
public async setMlSearchEnabled(enabled: boolean) {
if (!this.mlSearchEnabled && enabled) {
addLogLine('Enabling MLWorkManager');
this.mlSearchEnabled = true;
logQueueStats(this.liveSyncQueue, 'livesync');
this.liveSyncQueue.on('idle', this.debouncedLiveSyncIdle, this);
eventBus.on(
Events.FILE_UPLOADED,
this.fileUploadedHandler.bind(this),
this
);
eventBus.on(
Events.LOCAL_FILES_UPDATED,
this.debouncedFilesUpdated,
this
);
await this.startSyncJob();
} else if (this.mlSearchEnabled && !enabled) {
addLogLine('Disabling MLWorkManager');
this.mlSearchEnabled = false;
this.liveSyncQueue.removeAllListeners();
eventBus.removeListener(
Events.FILE_UPLOADED,
this.fileUploadedHandler.bind(this),
this
);
eventBus.removeListener(
Events.LOCAL_FILES_UPDATED,
this.debouncedFilesUpdated,
this
);
this.stopSyncJob();
}
}
// Handlers
private async appStartHandler() {
addLogLine('appStartHandler');
try {
this.startSyncJob();
} catch (e) {
logError(e, 'Failed in ML appStart Handler');
}
}
private async logoutHandler() {
addLogLine('logoutHandler');
try {
this.stopSyncJob();
this.mlSyncJob = undefined;
await this.terminateLiveSyncWorker();
await mlIDbStorage.clearMLDB();
} catch (e) {
logError(e, 'Failed in ML logout Handler');
}
}
private async fileUploadedHandler(arg: {
enteFile: EnteFile;
localFile: globalThis.File;
}) {
if (!this.mlSearchEnabled) {
return;
}
addLogLine('fileUploadedHandler: ', arg.enteFile.id);
if (arg.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) {
addLogLine('Skipping non image file for local file processing');
return;
}
try {
await this.syncLocalFile(arg.enteFile, arg.localFile);
} catch (error) {
console.error('Error in syncLocalFile: ', arg.enteFile.id, error);
this.liveSyncQueue.clear();
// logError(e, 'Failed in ML fileUploaded Handler');
}
}
private async localFilesUpdatedHandler() {
addLogLine('Local files updated');
this.startSyncJob();
}
// Live Sync
private async getLiveSyncWorker() {
if (!this.liveSyncWorker) {
this.liveSyncWorker = getDedicatedMLWorker('ml-sync-live');
}
return await this.liveSyncWorker.remote;
}
private async terminateLiveSyncWorker() {
if (!this.liveSyncWorker) {
return;
}
try {
const liveSyncWorker = await this.liveSyncWorker.remote;
await liveSyncWorker.closeLocalSyncContext();
} catch (error) {
console.error(
'Error while closing local sync context, terminating worker',
error
);
}
this.liveSyncWorker?.terminate();
this.liveSyncWorker = undefined;
}
private async onLiveSyncIdle() {
addLogLine('Live sync idle');
await this.terminateLiveSyncWorker();
this.mlSearchEnabled && this.startSyncJob();
}
public async syncLocalFile(enteFile: EnteFile, localFile: globalThis.File) {
const result = await this.liveSyncQueue.add(async () => {
this.stopSyncJob();
const token = getToken();
const userID = getUserID();
const mlWorker = await this.getLiveSyncWorker();
return mlWorker.syncLocalFile(token, userID, enteFile, localFile);
});
if ('message' in result) {
// TODO: redirect/refresh to gallery in case of session_expired
// may not be required as uploader should anyways take care of this
console.error('Error while syncing local file: ', result);
}
}
// Sync Job
private async getSyncJobWorker() {
if (!this.syncJobWorker) {
this.syncJobWorker = getDedicatedMLWorker('ml-sync-job');
}
return await this.syncJobWorker.remote;
}
private terminateSyncJobWorker() {
this.syncJobWorker?.terminate();
this.syncJobWorker = undefined;
}
private async runMLSyncJob(): Promise<MLSyncJobResult> {
// TODO: skipping is not required if we are caching chunks through service worker
// currently worker chunk itself is not loaded when network is not there
if (!navigator.onLine) {
addLogLine(
'Skipping ml-sync job run as not connected to internet.'
);
return {
shouldBackoff: true,
mlSyncResult: undefined,
};
}
const token = getToken();
const userID = getUserID();
const jobWorkerProxy = await this.getSyncJobWorker();
const mlSyncResult = await jobWorkerProxy.sync(token, userID);
// this.terminateSyncJobWorker();
const jobResult: MLSyncJobResult = {
shouldBackoff:
!!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1,
mlSyncResult,
};
addLogLine('ML Sync Job result: ', JSON.stringify(jobResult));
// TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job
return jobResult;
}
public async startSyncJob() {
try {
addLogLine('MLWorkManager.startSyncJob');
if (!this.mlSearchEnabled) {
addLogLine('ML Search disabled, not starting ml sync job');
return;
}
if (!getToken()) {
addLogLine('User not logged in, not starting ml sync job');
return;
}
const mlSyncJobConfig = await getMLSyncJobConfig();
if (!this.mlSyncJob) {
this.mlSyncJob = new MLSyncJob(mlSyncJobConfig, () =>
this.runMLSyncJob()
);
}
this.mlSyncJob.start();
} catch (e) {
logError(e, 'Failed to start MLSync Job');
}
}
public stopSyncJob(terminateWorker: boolean = true) {
try {
addLogLine('MLWorkManager.stopSyncJob');
this.mlSyncJob?.stop();
terminateWorker && this.terminateSyncJobWorker();
} catch (e) {
logError(e, 'Failed to stop MLSync Job');
}
}
}
export default new MLWorkManager();

View file

@ -0,0 +1,105 @@
import * as tf from '@tensorflow/tfjs-core';
import { TFLiteModel } from '@tensorflow/tfjs-tflite';
import { MOBILEFACENET_FACE_SIZE } from 'constants/machineLearning/config';
import PQueue from 'p-queue';
import {
FaceEmbedding,
FaceEmbeddingMethod,
FaceEmbeddingService,
Versioned,
} from 'types/machineLearning';
import { addLogLine } from 'utils/logging';
import { imageBitmapsToTensor4D } from 'utils/machineLearning';
class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
public method: Versioned<FaceEmbeddingMethod>;
public faceSize: number;
private mobileFaceNetModel: Promise<TFLiteModel>;
private serialQueue: PQueue;
public constructor(faceSize: number = MOBILEFACENET_FACE_SIZE) {
this.method = {
value: 'MobileFaceNet',
version: 2,
};
this.faceSize = faceSize;
// TODO: set timeout
this.serialQueue = new PQueue({ concurrency: 1 });
}
private async init() {
// TODO: can also create new instance per new syncContext
const tflite = await import('@tensorflow/tfjs-tflite');
tflite.setWasmPath('/js/tflite/');
this.mobileFaceNetModel = tflite.loadTFLiteModel(
'/models/mobilefacenet/mobilefacenet.tflite'
);
addLogLine('loaded mobileFaceNetModel: ', tf.getBackend());
}
private async getMobileFaceNetModel() {
if (!this.mobileFaceNetModel) {
await this.init();
}
return this.mobileFaceNetModel;
}
public getFaceEmbeddingTF(
faceTensor: tf.Tensor4D,
mobileFaceNetModel: TFLiteModel
): tf.Tensor2D {
return tf.tidy(() => {
const normalizedFace = tf.sub(tf.div(faceTensor, 127.5), 1.0);
return mobileFaceNetModel.predict(normalizedFace) as tf.Tensor2D;
});
}
// Do not use this, use getFaceEmbedding which calls this through serialqueue
private async getFaceEmbeddingNoQueue(
faceImage: ImageBitmap
): Promise<FaceEmbedding> {
const mobileFaceNetModel = await this.getMobileFaceNetModel();
const embeddingTensor = tf.tidy(() => {
const faceTensor = imageBitmapsToTensor4D([faceImage]);
const embeddingsTensor = this.getFaceEmbeddingTF(
faceTensor,
mobileFaceNetModel
);
return tf.squeeze(embeddingsTensor, [0]);
});
const embedding = new Float32Array(await embeddingTensor.data());
embeddingTensor.dispose();
return embedding;
}
// TODO: TFLiteModel seems to not work concurrenly,
// remove serialqueue if that is not the case
private async getFaceEmbedding(
faceImage: ImageBitmap
): Promise<FaceEmbedding> {
return this.serialQueue.add(() =>
this.getFaceEmbeddingNoQueue(faceImage)
);
}
public async getFaceEmbeddings(
faceImages: Array<ImageBitmap>
): Promise<Array<FaceEmbedding>> {
return Promise.all(
faceImages.map((faceImage) => this.getFaceEmbedding(faceImage))
);
}
public async dispose() {
this.mobileFaceNetModel = undefined;
}
}
export default new MobileFaceNetEmbeddingService();

View file

@ -0,0 +1,146 @@
import {
MLSyncContext,
MLSyncFileContext,
DetectedObject,
Thing,
} from 'types/machineLearning';
import { addLogLine } from 'utils/logging';
import {
isDifferentOrOld,
getObjectId,
getAllObjectsFromMap,
} from 'utils/machineLearning';
import mlIDbStorage from 'utils/storage/mlIDbStorage';
import ReaderService from './readerService';
class ObjectService {
async syncFileObjectDetections(
syncContext: MLSyncContext,
fileContext: MLSyncFileContext
) {
const startTime = Date.now();
const { oldMlFile, newMlFile } = fileContext;
if (
!isDifferentOrOld(
oldMlFile?.objectDetectionMethod,
syncContext.objectDetectionService.method
) &&
!isDifferentOrOld(
oldMlFile?.sceneDetectionMethod,
syncContext.sceneDetectionService.method
) &&
oldMlFile?.imageSource === syncContext.config.imageSource
) {
newMlFile.objects = oldMlFile?.objects;
newMlFile.imageSource = oldMlFile.imageSource;
newMlFile.imageDimensions = oldMlFile.imageDimensions;
newMlFile.objectDetectionMethod = oldMlFile.objectDetectionMethod;
newMlFile.sceneDetectionMethod = oldMlFile.sceneDetectionMethod;
return;
}
newMlFile.objectDetectionMethod =
syncContext.objectDetectionService.method;
newMlFile.sceneDetectionMethod =
syncContext.sceneDetectionService.method;
fileContext.newDetection = true;
const imageBitmap = await ReaderService.getImageBitmap(
syncContext,
fileContext
);
const objectDetections =
await syncContext.objectDetectionService.detectObjects(
imageBitmap,
syncContext.config.objectDetection.maxNumBoxes,
syncContext.config.objectDetection.minScore
);
objectDetections.push(
...(await syncContext.sceneDetectionService.detectScenes(
imageBitmap,
syncContext.config.sceneDetection.minScore
))
);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedObjects = objectDetections?.map((detection) => {
return {
fileID: fileContext.enteFile.id,
detection,
} as DetectedObject;
});
newMlFile.objects = detectedObjects?.map((detectedObject) => ({
...detectedObject,
id: getObjectId(detectedObject, newMlFile.imageDimensions),
className: detectedObject.detection.class,
}));
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine(
`object detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
'ms'
);
addLogLine('[MLService] Detected Objects: ', newMlFile.objects?.length);
}
async getAllSyncedObjectsMap(syncContext: MLSyncContext) {
if (syncContext.allSyncedObjectsMap) {
return syncContext.allSyncedObjectsMap;
}
syncContext.allSyncedObjectsMap = await mlIDbStorage.getAllObjectsMap();
return syncContext.allSyncedObjectsMap;
}
public async clusterThings(syncContext: MLSyncContext): Promise<Thing[]> {
const allObjectsMap = await this.getAllSyncedObjectsMap(syncContext);
const allObjects = getAllObjectsFromMap(allObjectsMap);
const objectClusters = new Map<string, number[]>();
allObjects.map((object) => {
if (!objectClusters.has(object.detection.class)) {
objectClusters.set(object.detection.class, []);
}
const objectsInCluster = objectClusters.get(object.detection.class);
objectsInCluster.push(object.fileID);
});
return [...objectClusters.entries()].map(([className, files], id) => ({
id,
name: className,
files,
}));
}
async syncThingsIndex(syncContext: MLSyncContext) {
const filesVersion = await mlIDbStorage.getIndexVersion('files');
addLogLine('things', await mlIDbStorage.getIndexVersion('things'));
if (filesVersion <= (await mlIDbStorage.getIndexVersion('things'))) {
addLogLine(
'[MLService] Skipping people index as already synced to latest version'
);
return;
}
const things = await this.clusterThings(syncContext);
if (!things || things.length < 1) {
return;
}
await mlIDbStorage.clearAllThings();
for (const thing of things) {
await mlIDbStorage.putThing(thing);
}
await mlIDbStorage.setIndexVersion('things', filesVersion);
}
async getAllThings() {
return await mlIDbStorage.getAllThings();
}
}
export default new ObjectService();

Some files were not shown because too many files have changed in this diff Show more