Add batch size, return all info to derive face images later

Required to reduce worker -> caller payload size later
This commit is contained in:
Shailesh Pandit 2021-11-06 10:59:05 +05:30
parent 4a94ecbbed
commit 833f25e07f
4 changed files with 77 additions and 32 deletions

View file

@ -15,6 +15,7 @@ export default function MLDebug() {
useState<number>(0.45);
const [minClusterSize, setMinClusterSize] = useState<number>(4);
const [minFaceSize, setMinFaceSize] = useState<number>(24);
const [batchSize, setBatchSize] = useState<number>(50);
const [mlResult, setMlResult] = useState<MLSyncResult>({
allFaces: [],
clusterResults: {
@ -63,7 +64,8 @@ export default function MLDebug() {
token,
clusterFaceDistance,
minClusterSize,
minFaceSize
minFaceSize,
batchSize
);
setMlResult(result);
} catch (e) {
@ -99,6 +101,13 @@ export default function MLDebug() {
<button onClick={() => setMinClusterSize(8)}>8</button>
<button onClick={() => setMinClusterSize(12)}>12</button>
<p></p>
<div>Number of Images in Batch: {batchSize}</div>
<button onClick={() => setBatchSize(50)}>50</button>
<button onClick={() => setBatchSize(100)}>100</button>
<button onClick={() => setBatchSize(200)}>200</button>
<button onClick={() => setBatchSize(500)}>500</button>
<p></p>
<button onClick={onSync}>Run ML Sync</button>
@ -111,7 +120,7 @@ export default function MLDebug() {
<div key={ind}>
<TFJSImage
faceImage={
mlResult.allFaces[faceIndex]
mlResult.allFaces[faceIndex].faceImage
}></TFJSImage>
</div>
))}
@ -124,7 +133,7 @@ export default function MLDebug() {
<div key={index}>
<TFJSImage
faceImage={
mlResult.allFaces[faceIndex]
mlResult.allFaces[faceIndex].faceImage
}></TFJSImage>
</div>
))}

View file

@ -6,7 +6,12 @@ import * as tf from '@tensorflow/tfjs-core';
// import TFJSFaceDetectionService from './tfjsFaceDetectionService';
// import TFJSFaceEmbeddingService from './tfjsFaceEmbeddingService';
import { FaceImage, MLSyncResult } from 'utils/machineLearning/types';
import {
FaceApiResult,
FaceImage,
FaceWithEmbedding,
MLSyncResult,
} from 'utils/machineLearning/types';
import * as jpeg from 'jpeg-js';
import ClusteringService from './clusteringService';
@ -23,15 +28,9 @@ class MachineLearningService {
private clusterFaceDistance = 0.45;
private minClusterSize = 4;
private minFaceSize = 24;
private batchSize = 50;
public allFaces: faceapi.WithFaceDescriptor<
faceapi.WithFaceLandmarks<
{
detection: faceapi.FaceDetection;
},
faceapi.FaceLandmarks68
>
>[];
public allFaces: FaceWithEmbedding[];
private allFaceImages: FaceImage[];
public constructor() {
@ -46,11 +45,13 @@ class MachineLearningService {
public async init(
clusterFaceDistance: number,
minClusterSize: number,
minFaceSize: number
minFaceSize: number,
batchSize: number
) {
this.clusterFaceDistance = clusterFaceDistance;
this.minClusterSize = minClusterSize;
this.minFaceSize = minFaceSize;
this.batchSize = batchSize;
// setWasmPath('/js/tfjs/');
await tf.ready();
@ -86,17 +87,22 @@ class MachineLearningService {
existingFiles.sort(
(a, b) => b.metadata.creationTime - a.metadata.creationTime
);
const files = this.getUniqueFiles(existingFiles, 50);
console.log('Got unique files: ', files.size);
const files = this.getUniqueFiles(existingFiles, this.batchSize);
console.log(
'Got unique files: ',
files.size,
'for batchSize: ',
this.batchSize
);
this.allFaces = [];
for (const file of files.values()) {
try {
const result = await this.syncFile(file, token);
this.allFaces = this.allFaces.concat(result.faceApiResults);
this.allFaceImages = this.allFaceImages.concat(
result.faceImages
);
this.allFaces = this.allFaces.concat(result);
// this.allFaceImages = this.allFaceImages.concat(
// result.faceImages
// );
console.log('TF Memory stats: ', tf.memory());
} catch (e) {
console.error(
@ -120,7 +126,7 @@ class MachineLearningService {
// this.allFaces[0].alignedRect.imageDims
const clusterResults = this.clusteringService.clusterUsingDBSCAN(
this.allFaces.map((f) => Array.from(f.descriptor)),
this.allFaces.map((f) => Array.from(f.face.descriptor)),
this.clusterFaceDistance,
this.minClusterSize
);
@ -132,7 +138,7 @@ class MachineLearningService {
console.log('[MLService] Got cluster results: ', clusterResults);
return {
allFaces: this.allFaceImages,
allFaces: this.allFaces,
clusterResults,
};
}
@ -164,7 +170,7 @@ class MachineLearningService {
// const faceApiInput = tfImage.expandDims(0) as tf.Tensor4D;
// tf.dispose(tfImage);
// console.log('4 TF Memory stats: ', tf.memory());
const faces = await faceapi
const faces = (await faceapi
.detectAllFaces(
tfImage as any,
new SsdMobilenetv1Options({
@ -173,7 +179,7 @@ class MachineLearningService {
})
)
.withFaceLandmarks()
.withFaceDescriptors();
.withFaceDescriptors()) as FaceApiResult[];
// console.log('5 TF Memory stats: ', tf.memory());
@ -218,10 +224,13 @@ class MachineLearningService {
tf.dispose(tfImage);
// console.log('8 TF Memory stats: ', tf.memory());
return {
faceApiResults: filtertedFaces,
faceImages: faceImages,
};
return filtertedFaces.map((ff, index) => {
return {
fileId: file.id.toString(),
face: ff,
faceImage: faceImages[index],
} as FaceWithEmbedding;
});
// console.log('[MLService] Got faces: ', filtertedFaces, embeddingResults);

View file

@ -1,7 +1,13 @@
import { NormalizedFace } from '@tensorflow-models/blazeface';
import {
FaceDetection,
FaceLandmarks68,
WithFaceDescriptor,
WithFaceLandmarks,
} from 'face-api.js';
export interface MLSyncResult {
allFaces: FaceImage[];
allFaces: FaceWithEmbedding[];
clusterResults: ClusteringResults;
}
@ -13,10 +19,20 @@ export declare type FaceEmbedding = Array<number>;
export declare type FaceImage = Array<Array<Array<number>>>;
export declare type FaceApiResult = WithFaceDescriptor<
WithFaceLandmarks<
{
detection: FaceDetection;
},
FaceLandmarks68
>
>;
export interface FaceWithEmbedding {
fileId: string;
face: AlignedFace;
embedding: FaceEmbedding;
face: FaceApiResult;
// face: AlignedFace;
// embedding: FaceEmbedding;
faceImage: FaceImage;
}

View file

@ -2,7 +2,13 @@ import * as Comlink from 'comlink';
import MachineLearningService from 'services/machineLearning/machineLearningService';
export class MachineLearningWorker {
async sync(token, clusterFaceDistance, minClusterSize, minFaceSize) {
async sync(
token,
clusterFaceDistance,
minClusterSize,
minFaceSize,
batchSize
) {
if (!(typeof navigator !== 'undefined')) {
console.log(
'MachineLearning worker will only run in web worker env.'
@ -12,7 +18,12 @@ export class MachineLearningWorker {
console.log('Running machine learning sync from worker');
const mlService = new MachineLearningService();
await mlService.init(clusterFaceDistance, minClusterSize, minFaceSize);
await mlService.init(
clusterFaceDistance,
minClusterSize,
minFaceSize,
batchSize
);
const results = await mlService.sync(token);
console.log('Ran machine learning sync from worker', results);
return results;