[mob][photos] Remove old TODOs

This commit is contained in:
laurenspriem 2024-05-17 10:17:01 +05:30
parent 725e628537
commit 345eed5209
5 changed files with 0 additions and 173 deletions

View file

@ -1,6 +1,5 @@
import "dart:math" show min, max;
import "package:logging/logging.dart";
import "package:photos/face/model/box.dart";
import "package:photos/face/model/landmark.dart";
import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart";
@ -51,48 +50,6 @@ class Detection {
return (box.width * imageWidth * box.height * imageHeight).toInt();
}
// TODO: iterate on better scoring logic, current is a placeholder
int getVisibilityScore() {
try {
if (isEmpty) {
return -1;
}
final double aspectRatio = box.width / box.height;
final double eyeDistance = (landmarks[1].x - landmarks[0].x).abs();
final double mouthDistance = (landmarks[4].x - landmarks[3].x).abs();
final double noseEyeDistance =
(landmarks[2].y - ((landmarks[0].y + landmarks[1].y) / 2)).abs();
final double normalizedEyeDistance = eyeDistance / box.width;
final double normalizedMouthDistance = mouthDistance / box.width;
final double normalizedNoseEyeDistance = noseEyeDistance / box.height;
const double aspectRatioThreshold = 0.8;
const double eyeDistanceThreshold = 0.2;
const double mouthDistanceThreshold = 0.3;
const double noseEyeDistanceThreshold = 0.1;
double score = 0;
if (aspectRatio >= aspectRatioThreshold) {
score += 50;
}
if (normalizedEyeDistance >= eyeDistanceThreshold) {
score += 20;
}
if (normalizedMouthDistance >= mouthDistanceThreshold) {
score += 20;
}
if (normalizedNoseEyeDistance >= noseEyeDistanceThreshold) {
score += 10;
}
return score.clamp(0, 100).toInt();
} catch (e) {
Logger("FaceDetection").warning('Error calculating visibility score:', e);
return -1;
}
}
FaceDirection getFaceDirection() {
if (isEmpty) {
return FaceDirection.straight;

View file

@ -26,8 +26,6 @@ class Face {
bool get isHighQuality => (!isBlurry) && hasHighScore;
int get visibility => detection.getVisibilityScore();
int area({int? w, int? h}) {
return detection.getFaceArea(
fileInfo?.imageWidth ?? w ?? 0,

View file

@ -818,122 +818,6 @@ class FaceMlService {
isImageIndexRunning = false;
}
/// Analyzes the given image data by running the full pipeline (face detection, face alignment, face embedding).
///
/// [enteFile] The ente file to analyze.
///
/// [preferUsingThumbnailForEverything] If true, the thumbnail will be used for everything (face detection, face alignment, face embedding), and file data will be used only if a thumbnail is unavailable.
/// If false, thumbnail will only be used for detection, and the original image will be used for face alignment and face embedding.
///
/// Returns an immutable [FaceMlResult] instance containing the results of the analysis.
/// Does not store the result in the database, for that you should use [indexImage].
/// Throws [CouldNotRetrieveAnyFileData] or [GeneralFaceMlException] if something goes wrong.
/// TODO: improve function such that it only uses full image if it is already on the device, otherwise it uses thumbnail. And make sure to store what is used!
Future<FaceMlResult> analyzeImageInComputerAndImageIsolate(
EnteFile enteFile, {
bool preferUsingThumbnailForEverything = false,
bool disposeImageIsolateAfterUse = true,
}) async {
_checkEnteFileForID(enteFile);
final String? thumbnailPath = await _getImagePathForML(
enteFile,
typeOfData: FileDataForML.thumbnailData,
);
String? filePath;
// // TODO: remove/optimize this later. Not now though: premature optimization
// fileData =
// await _getDataForML(enteFile, typeOfData: FileDataForML.fileData);
if (thumbnailPath == null) {
filePath = await _getImagePathForML(
enteFile,
typeOfData: FileDataForML.fileData,
);
if (thumbnailPath == null && filePath == null) {
_logger.severe(
"Failed to get any data for enteFile with uploadedFileID ${enteFile.uploadedFileID}",
);
throw CouldNotRetrieveAnyFileData();
}
}
// TODO: use smallData and largeData instead of thumbnailData and fileData again!
final String smallDataPath = thumbnailPath ?? filePath!;
final resultBuilder = FaceMlResultBuilder.fromEnteFile(enteFile);
_logger.info(
"Analyzing image with uploadedFileID: ${enteFile.uploadedFileID} ${kDebugMode ? enteFile.displayName : ''}",
);
final stopwatch = Stopwatch()..start();
try {
// Get the faces
final List<FaceDetectionRelative> faceDetectionResult =
await _detectFacesIsolate(
smallDataPath,
resultBuilder: resultBuilder,
);
_logger.info("Completed `detectFaces` function");
// If no faces were detected, return a result with no faces. Otherwise, continue.
if (faceDetectionResult.isEmpty) {
_logger.info(
"No faceDetectionResult, Completed analyzing image with uploadedFileID ${enteFile.uploadedFileID}, in "
"${stopwatch.elapsedMilliseconds} ms");
return resultBuilder.buildNoFaceDetected();
}
if (!preferUsingThumbnailForEverything) {
filePath ??= await _getImagePathForML(
enteFile,
typeOfData: FileDataForML.fileData,
);
}
resultBuilder.onlyThumbnailUsed = filePath == null;
final String largeDataPath = filePath ?? thumbnailPath!;
// Align the faces
final Float32List faceAlignmentResult = await _alignFaces(
largeDataPath,
faceDetectionResult,
resultBuilder: resultBuilder,
);
_logger.info("Completed `alignFaces` function");
// Get the embeddings of the faces
final embeddings = await _embedFaces(
faceAlignmentResult,
resultBuilder: resultBuilder,
);
_logger.info("Completed `embedBatchFaces` function");
stopwatch.stop();
_logger.info("Finished Analyze image (${embeddings.length} faces) with "
"uploadedFileID ${enteFile.uploadedFileID}, in "
"${stopwatch.elapsedMilliseconds} ms");
if (disposeImageIsolateAfterUse) {
// Close the image conversion isolate
ImageMlIsolate.instance.dispose();
}
return resultBuilder.build();
} catch (e, s) {
_logger.severe(
"Could not analyze image with ID ${enteFile.uploadedFileID} \n",
e,
s,
);
// throw GeneralFaceMlException("Could not analyze image");
return resultBuilder.buildErrorOccurred();
}
}
/// Analyzes the given image data by running the full pipeline for faces, using [analyzeImageSync] in the isolate.
Future<FaceMlResult?> analyzeImageInSingleIsolate(EnteFile enteFile) async {
_checkEnteFileForID(enteFile);

View file

@ -1,11 +0,0 @@
mixin ModelFile {
static const String faceDetectionBackWeb =
'assets/models/blazeface/blazeface_back_ente_web.tflite';
// TODO: which of the two mobilefacenet model should I use now??
// static const String faceEmbeddingEnte =
// 'assets/models/mobilefacenet/mobilefacenet_ente_web.tflite';
static const String faceEmbeddingEnte =
'assets/models/mobilefacenet/mobilefacenet_unq_TF211.tflite';
static const String yoloV5FaceS640x640DynamicBatchonnx =
'assets/models/yolov5face/yolov5s_face_640_640_dynamic.onnx';
}

View file

@ -93,7 +93,6 @@ class _FacesItemWidgetState extends State<FacesItemWidget> {
];
}
// TODO: add deduplication of faces of same person
final faceIdsToClusterIds = await FaceMLDataDB.instance
.getFaceIdsToClusterIds(faces.map((face) => face.faceID));
final Map<String, PersonEntity> persons =