Add batch size, return all info to derive face images later

Required to reduce worker -> caller payload size later
This commit is contained in:
Shailesh Pandit 2021-11-06 10:59:05 +05:30
parent 4a94ecbbed
commit 833f25e07f
4 changed files with 77 additions and 32 deletions

View file

@ -15,6 +15,7 @@ export default function MLDebug() {
useState<number>(0.45); useState<number>(0.45);
const [minClusterSize, setMinClusterSize] = useState<number>(4); const [minClusterSize, setMinClusterSize] = useState<number>(4);
const [minFaceSize, setMinFaceSize] = useState<number>(24); const [minFaceSize, setMinFaceSize] = useState<number>(24);
const [batchSize, setBatchSize] = useState<number>(50);
const [mlResult, setMlResult] = useState<MLSyncResult>({ const [mlResult, setMlResult] = useState<MLSyncResult>({
allFaces: [], allFaces: [],
clusterResults: { clusterResults: {
@ -63,7 +64,8 @@ export default function MLDebug() {
token, token,
clusterFaceDistance, clusterFaceDistance,
minClusterSize, minClusterSize,
minFaceSize minFaceSize,
batchSize
); );
setMlResult(result); setMlResult(result);
} catch (e) { } catch (e) {
@ -99,6 +101,13 @@ export default function MLDebug() {
<button onClick={() => setMinClusterSize(8)}>8</button> <button onClick={() => setMinClusterSize(8)}>8</button>
<button onClick={() => setMinClusterSize(12)}>12</button> <button onClick={() => setMinClusterSize(12)}>12</button>
<p></p>
<div>Number of Images in Batch: {batchSize}</div>
<button onClick={() => setBatchSize(50)}>50</button>
<button onClick={() => setBatchSize(100)}>100</button>
<button onClick={() => setBatchSize(200)}>200</button>
<button onClick={() => setBatchSize(500)}>500</button>
<p></p> <p></p>
<button onClick={onSync}>Run ML Sync</button> <button onClick={onSync}>Run ML Sync</button>
@ -111,7 +120,7 @@ export default function MLDebug() {
<div key={ind}> <div key={ind}>
<TFJSImage <TFJSImage
faceImage={ faceImage={
mlResult.allFaces[faceIndex] mlResult.allFaces[faceIndex].faceImage
}></TFJSImage> }></TFJSImage>
</div> </div>
))} ))}
@ -124,7 +133,7 @@ export default function MLDebug() {
<div key={index}> <div key={index}>
<TFJSImage <TFJSImage
faceImage={ faceImage={
mlResult.allFaces[faceIndex] mlResult.allFaces[faceIndex].faceImage
}></TFJSImage> }></TFJSImage>
</div> </div>
))} ))}

View file

@ -6,7 +6,12 @@ import * as tf from '@tensorflow/tfjs-core';
// import TFJSFaceDetectionService from './tfjsFaceDetectionService'; // import TFJSFaceDetectionService from './tfjsFaceDetectionService';
// import TFJSFaceEmbeddingService from './tfjsFaceEmbeddingService'; // import TFJSFaceEmbeddingService from './tfjsFaceEmbeddingService';
import { FaceImage, MLSyncResult } from 'utils/machineLearning/types'; import {
FaceApiResult,
FaceImage,
FaceWithEmbedding,
MLSyncResult,
} from 'utils/machineLearning/types';
import * as jpeg from 'jpeg-js'; import * as jpeg from 'jpeg-js';
import ClusteringService from './clusteringService'; import ClusteringService from './clusteringService';
@ -23,15 +28,9 @@ class MachineLearningService {
private clusterFaceDistance = 0.45; private clusterFaceDistance = 0.45;
private minClusterSize = 4; private minClusterSize = 4;
private minFaceSize = 24; private minFaceSize = 24;
private batchSize = 50;
public allFaces: faceapi.WithFaceDescriptor< public allFaces: FaceWithEmbedding[];
faceapi.WithFaceLandmarks<
{
detection: faceapi.FaceDetection;
},
faceapi.FaceLandmarks68
>
>[];
private allFaceImages: FaceImage[]; private allFaceImages: FaceImage[];
public constructor() { public constructor() {
@ -46,11 +45,13 @@ class MachineLearningService {
public async init( public async init(
clusterFaceDistance: number, clusterFaceDistance: number,
minClusterSize: number, minClusterSize: number,
minFaceSize: number minFaceSize: number,
batchSize: number
) { ) {
this.clusterFaceDistance = clusterFaceDistance; this.clusterFaceDistance = clusterFaceDistance;
this.minClusterSize = minClusterSize; this.minClusterSize = minClusterSize;
this.minFaceSize = minFaceSize; this.minFaceSize = minFaceSize;
this.batchSize = batchSize;
// setWasmPath('/js/tfjs/'); // setWasmPath('/js/tfjs/');
await tf.ready(); await tf.ready();
@ -86,17 +87,22 @@ class MachineLearningService {
existingFiles.sort( existingFiles.sort(
(a, b) => b.metadata.creationTime - a.metadata.creationTime (a, b) => b.metadata.creationTime - a.metadata.creationTime
); );
const files = this.getUniqueFiles(existingFiles, 50); const files = this.getUniqueFiles(existingFiles, this.batchSize);
console.log('Got unique files: ', files.size); console.log(
'Got unique files: ',
files.size,
'for batchSize: ',
this.batchSize
);
this.allFaces = []; this.allFaces = [];
for (const file of files.values()) { for (const file of files.values()) {
try { try {
const result = await this.syncFile(file, token); const result = await this.syncFile(file, token);
this.allFaces = this.allFaces.concat(result.faceApiResults); this.allFaces = this.allFaces.concat(result);
this.allFaceImages = this.allFaceImages.concat( // this.allFaceImages = this.allFaceImages.concat(
result.faceImages // result.faceImages
); // );
console.log('TF Memory stats: ', tf.memory()); console.log('TF Memory stats: ', tf.memory());
} catch (e) { } catch (e) {
console.error( console.error(
@ -120,7 +126,7 @@ class MachineLearningService {
// this.allFaces[0].alignedRect.imageDims // this.allFaces[0].alignedRect.imageDims
const clusterResults = this.clusteringService.clusterUsingDBSCAN( const clusterResults = this.clusteringService.clusterUsingDBSCAN(
this.allFaces.map((f) => Array.from(f.descriptor)), this.allFaces.map((f) => Array.from(f.face.descriptor)),
this.clusterFaceDistance, this.clusterFaceDistance,
this.minClusterSize this.minClusterSize
); );
@ -132,7 +138,7 @@ class MachineLearningService {
console.log('[MLService] Got cluster results: ', clusterResults); console.log('[MLService] Got cluster results: ', clusterResults);
return { return {
allFaces: this.allFaceImages, allFaces: this.allFaces,
clusterResults, clusterResults,
}; };
} }
@ -164,7 +170,7 @@ class MachineLearningService {
// const faceApiInput = tfImage.expandDims(0) as tf.Tensor4D; // const faceApiInput = tfImage.expandDims(0) as tf.Tensor4D;
// tf.dispose(tfImage); // tf.dispose(tfImage);
// console.log('4 TF Memory stats: ', tf.memory()); // console.log('4 TF Memory stats: ', tf.memory());
const faces = await faceapi const faces = (await faceapi
.detectAllFaces( .detectAllFaces(
tfImage as any, tfImage as any,
new SsdMobilenetv1Options({ new SsdMobilenetv1Options({
@ -173,7 +179,7 @@ class MachineLearningService {
}) })
) )
.withFaceLandmarks() .withFaceLandmarks()
.withFaceDescriptors(); .withFaceDescriptors()) as FaceApiResult[];
// console.log('5 TF Memory stats: ', tf.memory()); // console.log('5 TF Memory stats: ', tf.memory());
@ -218,10 +224,13 @@ class MachineLearningService {
tf.dispose(tfImage); tf.dispose(tfImage);
// console.log('8 TF Memory stats: ', tf.memory()); // console.log('8 TF Memory stats: ', tf.memory());
return { return filtertedFaces.map((ff, index) => {
faceApiResults: filtertedFaces, return {
faceImages: faceImages, fileId: file.id.toString(),
}; face: ff,
faceImage: faceImages[index],
} as FaceWithEmbedding;
});
// console.log('[MLService] Got faces: ', filtertedFaces, embeddingResults); // console.log('[MLService] Got faces: ', filtertedFaces, embeddingResults);

View file

@ -1,7 +1,13 @@
import { NormalizedFace } from '@tensorflow-models/blazeface'; import { NormalizedFace } from '@tensorflow-models/blazeface';
import {
FaceDetection,
FaceLandmarks68,
WithFaceDescriptor,
WithFaceLandmarks,
} from 'face-api.js';
export interface MLSyncResult { export interface MLSyncResult {
allFaces: FaceImage[]; allFaces: FaceWithEmbedding[];
clusterResults: ClusteringResults; clusterResults: ClusteringResults;
} }
@ -13,10 +19,20 @@ export declare type FaceEmbedding = Array<number>;
export declare type FaceImage = Array<Array<Array<number>>>; export declare type FaceImage = Array<Array<Array<number>>>;
export declare type FaceApiResult = WithFaceDescriptor<
WithFaceLandmarks<
{
detection: FaceDetection;
},
FaceLandmarks68
>
>;
export interface FaceWithEmbedding { export interface FaceWithEmbedding {
fileId: string; fileId: string;
face: AlignedFace; face: FaceApiResult;
embedding: FaceEmbedding; // face: AlignedFace;
// embedding: FaceEmbedding;
faceImage: FaceImage; faceImage: FaceImage;
} }

View file

@ -2,7 +2,13 @@ import * as Comlink from 'comlink';
import MachineLearningService from 'services/machineLearning/machineLearningService'; import MachineLearningService from 'services/machineLearning/machineLearningService';
export class MachineLearningWorker { export class MachineLearningWorker {
async sync(token, clusterFaceDistance, minClusterSize, minFaceSize) { async sync(
token,
clusterFaceDistance,
minClusterSize,
minFaceSize,
batchSize
) {
if (!(typeof navigator !== 'undefined')) { if (!(typeof navigator !== 'undefined')) {
console.log( console.log(
'MachineLearning worker will only run in web worker env.' 'MachineLearning worker will only run in web worker env.'
@ -12,7 +18,12 @@ export class MachineLearningWorker {
console.log('Running machine learning sync from worker'); console.log('Running machine learning sync from worker');
const mlService = new MachineLearningService(); const mlService = new MachineLearningService();
await mlService.init(clusterFaceDistance, minClusterSize, minFaceSize); await mlService.init(
clusterFaceDistance,
minClusterSize,
minFaceSize,
batchSize
);
const results = await mlService.sync(token); const results = await mlService.sync(token);
console.log('Ran machine learning sync from worker', results); console.log('Ran machine learning sync from worker', results);
return results; return results;