Add TFJSFaceEmbeddingService and tflite model

Set wasm path
This commit is contained in:
Shailesh Pandit 2021-11-04 16:35:09 +05:30
parent 748d914e93
commit 551e91d278
4 changed files with 109 additions and 0 deletions

Binary file not shown.

View file

@ -2,18 +2,22 @@ import { MLSyncResult } from 'utils/machineLearning/types';
import * as tf from '@tensorflow/tfjs';
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
import TFJSFaceDetectionService from './tfjsFaceDetectionService';
import TFJSFaceEmbeddingService from './tfjsFaceEmbeddingService';
class MachineLearningService {
private faceDetectionService: TFJSFaceDetectionService;
private faceEmbeddingService: TFJSFaceEmbeddingService;
public constructor() {
this.faceDetectionService = new TFJSFaceDetectionService();
this.faceEmbeddingService = new TFJSFaceEmbeddingService();
}
public async init() {
await tf.ready();
setWasmPaths('/js/tfjs/');
await this.faceDetectionService.init();
await this.faceEmbeddingService.init();
}
public async sync(token: string): Promise<MLSyncResult> {

View file

@ -0,0 +1,103 @@
import * as tf from '@tensorflow/tfjs';
import * as tflite from '@tensorflow/tfjs-tflite';
import { AlignedFace, FaceEmbedding } from 'utils/machineLearning/types';
class TFJSFaceEmbeddingService {
private mobileFaceNetModel: tflite.TFLiteModel;
public constructor() {}
public async init() {
tflite.setWasmPath('/js/tflite/');
this.mobileFaceNetModel = await tflite.loadTFLiteModel(
'/models/mobilefacenet/mobilefacenet.tflite'
);
console.log(
'loaded mobileFaceNetModel: ',
this.mobileFaceNetModel,
await tf.getBackend()
);
}
private async getEmbeddingsBatch(faceImagesTensor, boxes) {
const embeddings = [];
for (let i = 0; i < boxes.length; i++) {
const face = tf.gather(faceImagesTensor, i).expandDims();
const embedding = (await this.mobileFaceNetModel.predict(
face
)) as any;
embeddings[i] = embedding.gather(0);
}
return tf.stack(embeddings);
}
public async getEmbeddings(image: tf.Tensor3D, faces: AlignedFace[]) {
if (!faces || faces.length < 1) {
return {
embeddings: [],
faceImages: [],
};
}
const reshapedImage = tf.tidy(() => {
if (!(image instanceof tf.Tensor)) {
image = tf.browser.fromPixels(image);
}
return tf.expandDims(
tf.cast(image as tf.Tensor, 'float32'),
0
) as tf.Tensor4D;
});
const width = reshapedImage.shape[2];
const height = reshapedImage.shape[1];
console.log(
'width: ',
width,
height,
faces[0].topLeft,
faces[0].bottomRight
);
const boxes = faces.map((face) => {
return [
face.alignedBox[1] / height,
face.alignedBox[0] / width,
face.alignedBox[3] / height,
face.alignedBox[2] / width,
];
});
console.log('boxes: ', boxes[0]);
const normalizedImage = tf.sub(
tf.div(reshapedImage, 127.5),
1.0
) as tf.Tensor4D;
tf.dispose(reshapedImage);
const faceImagesTensor = tf.image.cropAndResize(
normalizedImage,
boxes,
tf.fill([boxes.length], 0, 'int32'),
[112, 112]
);
tf.dispose(normalizedImage);
// const embeddingsTensor = await this.mobileFaceNetModel.predict(faceImagesTensor);
const embeddingsTensor = await this.getEmbeddingsBatch(
faceImagesTensor,
boxes
);
const embeddings = await embeddingsTensor.array();
const faceImages = await faceImagesTensor.array();
tf.dispose(faceImagesTensor);
tf.dispose(embeddingsTensor);
// console.log('embeddings: ', embeddings[0]);
return {
embeddings: embeddings as FaceEmbedding[],
faceImages: faceImages,
};
}
}
export default TFJSFaceEmbeddingService;

View file

@ -7,3 +7,5 @@ export interface MLSyncResult {
export interface AlignedFace extends NormalizedFace {
alignedBox: [number, number, number, number];
}
export declare type FaceEmbedding = Array<number>;