Mobile face (#1799)

## Description

One fix related to DB, rest just more logging
This commit is contained in:
Neeraj Gupta 2024-05-21 17:02:40 +05:30 committed by GitHub
commit 608c97603b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 84 additions and 77 deletions

View file

@ -27,7 +27,7 @@ class FaceMLDataDB {
static final Logger _logger = Logger("FaceMLDataDB"); static final Logger _logger = Logger("FaceMLDataDB");
static const _databaseName = "ente.face_ml_db.db"; static const _databaseName = "ente.face_ml_db.db";
static const _databaseVersion = 1; // static const _databaseVersion = 1;
FaceMLDataDB._privateConstructor(); FaceMLDataDB._privateConstructor();
@ -53,18 +53,12 @@ class FaceMLDataDB {
} }
Future<void> _onCreate(SqliteDatabase asyncDBConnection) async { Future<void> _onCreate(SqliteDatabase asyncDBConnection) async {
final migrations = SqliteMigrations() await asyncDBConnection.execute(createFacesTable);
..add( await asyncDBConnection.execute(createFaceClustersTable);
SqliteMigration(_databaseVersion, (tx) async { await asyncDBConnection.execute(createClusterPersonTable);
await tx.execute(createFacesTable); await asyncDBConnection.execute(createClusterSummaryTable);
await tx.execute(createFaceClustersTable); await asyncDBConnection.execute(createNotPersonFeedbackTable);
await tx.execute(createClusterPersonTable); await asyncDBConnection.execute(fcClusterIDIndex);
await tx.execute(createClusterSummaryTable);
await tx.execute(createNotPersonFeedbackTable);
await tx.execute(fcClusterIDIndex);
}),
);
await migrations.migrate(asyncDBConnection);
} }
// bulkInsertFaces inserts the faces in the database in batches of 1000. // bulkInsertFaces inserts the faces in the database in batches of 1000.
@ -408,7 +402,7 @@ class FaceMLDataDB {
final clusterID = map[fcClusterID] as int; final clusterID = map[fcClusterID] as int;
final faceID = map[fcFaceId] as String; final faceID = map[fcFaceId] as String;
result.putIfAbsent(personID, () => {}).putIfAbsent(clusterID, () => {}) result.putIfAbsent(personID, () => {}).putIfAbsent(clusterID, () => {})
..add(faceID); .add(faceID);
} }
return result; return result;
} }
@ -922,7 +916,7 @@ class FaceMLDataDB {
await db.execute(dropNotPersonFeedbackTable); await db.execute(dropNotPersonFeedbackTable);
await db.execute(dropClusterSummaryTable); await db.execute(dropClusterSummaryTable);
await db.execute(dropFaceClustersTable); await db.execute(dropFaceClustersTable);
await db.execute(createClusterPersonTable); await db.execute(createClusterPersonTable);
await db.execute(createNotPersonFeedbackTable); await db.execute(createNotPersonFeedbackTable);
await db.execute(createClusterSummaryTable); await db.execute(createClusterSummaryTable);

View file

@ -1,42 +1,34 @@
/// Bounding box of a face. /// Bounding box of a face.
/// ///
/// [xMin] and [yMin] are the coordinates of the top left corner of the box, and /// [ x] and [y] are the minimum coordinates, so the top left corner of the box.
/// [width] and [height] are the width and height of the box. /// [width] and [height] are the width and height of the box.
/// ///
/// WARNING: All values are relative to the original image size, so in the range [0, 1]. /// WARNING: All values are relative to the original image size, so in the range [0, 1].
class FaceBox { class FaceBox {
final double xMin; final double x;
final double yMin; final double y;
final double width; final double width;
final double height; final double height;
FaceBox({ FaceBox({
required this.xMin, required this.x,
required this.yMin, required this.y,
required this.width, required this.width,
required this.height, required this.height,
}); });
factory FaceBox.fromJson(Map<String, dynamic> json) { factory FaceBox.fromJson(Map<String, dynamic> json) {
return FaceBox( return FaceBox(
xMin: (json['xMin'] is int x: (json['x'] as double?) ?? (json['xMin'] as double),
? (json['xMin'] as int).toDouble() y: (json['y'] as double?) ?? (json['yMin'] as double),
: json['xMin'] as double), width: json['width'] as double,
yMin: (json['yMin'] is int height: json['height'] as double,
? (json['yMin'] as int).toDouble()
: json['yMin'] as double),
width: (json['width'] is int
? (json['width'] as int).toDouble()
: json['width'] as double),
height: (json['height'] is int
? (json['height'] as int).toDouble()
: json['height'] as double),
); );
} }
Map<String, dynamic> toJson() => { Map<String, dynamic> toJson() => {
'xMin': xMin, 'x': x,
'yMin': yMin, 'y': y,
'width': width, 'width': width,
'height': height, 'height': height,
}; };

View file

@ -6,7 +6,7 @@ import "package:photos/services/machine_learning/face_ml/face_detection/detectio
/// Stores the face detection data, notably the bounding box and landmarks. /// Stores the face detection data, notably the bounding box and landmarks.
/// ///
/// - Bounding box: [FaceBox] with xMin, yMin (so top left corner), width, height /// - Bounding box: [FaceBox] with x, y (minimum, so top left corner), width, height
/// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth /// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth
/// ///
/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]! /// WARNING: All coordinates are relative to the image size, so in the range [0, 1]!
@ -24,8 +24,8 @@ class Detection {
// empty box // empty box
Detection.empty() Detection.empty()
: box = FaceBox( : box = FaceBox(
xMin: 0, x: 0,
yMin: 0, y: 0,
width: 0, width: 0,
height: 0, height: 0,
), ),

View file

@ -143,8 +143,13 @@ class FaceMlService {
} }
canRunMLController = event.shouldRun; canRunMLController = event.shouldRun;
if (canRunMLController) { if (canRunMLController) {
_logger.info(
"MLController allowed running ML, faces indexing starting",
);
unawaited(indexAndClusterAll()); unawaited(indexAndClusterAll());
} else { } else {
_logger
.info("MLController stopped running ML, faces indexing paused");
pauseIndexing(); pauseIndexing();
} }
}); });
@ -527,9 +532,10 @@ class FaceMlService {
Bus.instance.fire(PeopleChangedEvent()); Bus.instance.fire(PeopleChangedEvent());
_logger.info('clusterAllImages() finished, in ' _logger.info('clusterAllImages() finished, in '
'${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds'); '${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds');
isClusteringRunning = false;
} catch (e, s) { } catch (e, s) {
_logger.severe("`clusterAllImages` failed", e, s); _logger.severe("`clusterAllImages` failed", e, s);
} finally {
isClusteringRunning = false;
} }
} }
@ -793,8 +799,8 @@ class FaceMlService {
final FaceResult faceRes = result.faces[i]; final FaceResult faceRes = result.faces[i];
final detection = face_detection.Detection( final detection = face_detection.Detection(
box: FaceBox( box: FaceBox(
xMin: faceRes.detection.xMinBox, x: faceRes.detection.xMinBox,
yMin: faceRes.detection.yMinBox, y: faceRes.detection.yMinBox,
width: faceRes.detection.width, width: faceRes.detection.width,
height: faceRes.detection.height, height: faceRes.detection.height,
), ),
@ -1011,6 +1017,7 @@ class FaceMlService {
file = await getThumbnailForUploadedFile(enteFile); file = await getThumbnailForUploadedFile(enteFile);
} else { } else {
file = await getFile(enteFile, isOrigin: true); file = await getFile(enteFile, isOrigin: true);
// TODO: This is returning null for Pragadees for all files, so something is wrong here!
} }
if (file == null) { if (file == null) {
_logger.warning("Could not get file for $enteFile"); _logger.warning("Could not get file for $enteFile");

View file

@ -3,6 +3,7 @@ import "dart:math" show max, min;
import "package:flutter/material.dart"; import "package:flutter/material.dart";
import "package:intl/intl.dart"; import "package:intl/intl.dart";
import "package:logging/logging.dart";
import "package:photos/core/event_bus.dart"; import "package:photos/core/event_bus.dart";
import 'package:photos/events/embedding_updated_event.dart'; import 'package:photos/events/embedding_updated_event.dart';
import "package:photos/face/db.dart"; import "package:photos/face/db.dart";
@ -26,6 +27,8 @@ import "package:photos/ui/components/toggle_switch_widget.dart";
import "package:photos/utils/data_util.dart"; import "package:photos/utils/data_util.dart";
import "package:photos/utils/local_settings.dart"; import "package:photos/utils/local_settings.dart";
final _logger = Logger("MachineLearningSettingsPage");
class MachineLearningSettingsPage extends StatefulWidget { class MachineLearningSettingsPage extends StatefulWidget {
const MachineLearningSettingsPage({super.key}); const MachineLearningSettingsPage({super.key});
@ -65,6 +68,7 @@ class _MachineLearningSettingsPageState
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
final bool facesFlag = flagService.faceSearchEnabled; final bool facesFlag = flagService.faceSearchEnabled;
_logger.info("On page open, facesFlag: $facesFlag");
return Scaffold( return Scaffold(
body: CustomScrollView( body: CustomScrollView(
primary: false, primary: false,
@ -435,16 +439,22 @@ class FaceRecognitionStatusWidgetState
} }
Future<(int, int, int, double)> getIndexStatus() async { Future<(int, int, int, double)> getIndexStatus() async {
final indexedFiles = await FaceMLDataDB.instance try {
.getIndexedFileCount(minimumMlVersion: faceMlVersion); final indexedFiles = await FaceMLDataDB.instance
final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length; .getIndexedFileCount(minimumMlVersion: faceMlVersion);
final showIndexedFiles = min(indexedFiles, indexableFiles); final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length;
final pendingFiles = max(indexableFiles - indexedFiles, 0); final showIndexedFiles = min(indexedFiles, indexableFiles);
final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount(); final pendingFiles = max(indexableFiles - indexedFiles, 0);
final clusteredFaces = await FaceMLDataDB.instance.getClusteredFaceCount(); final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount();
final clusteringDoneRatio = clusteredFaces / foundFaces; final clusteredFaces =
await FaceMLDataDB.instance.getClusteredFaceCount();
final clusteringDoneRatio = clusteredFaces / foundFaces;
return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio); return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio);
} catch (e, s) {
_logger.severe('Error getting face recognition status', e, s);
rethrow;
}
} }
@override @override

View file

@ -49,10 +49,8 @@ class CroppedFaceImageView extends StatelessWidget {
final faceBox = face.detection.box; final faceBox = face.detection.box;
final double relativeFaceCenterX = final double relativeFaceCenterX = faceBox.x + faceBox.width / 2;
faceBox.xMin + faceBox.width / 2; final double relativeFaceCenterY = faceBox.y + faceBox.height / 2;
final double relativeFaceCenterY =
faceBox.yMin + faceBox.height / 2;
const double desiredFaceHeightRelativeToWidget = 8 / 10; const double desiredFaceHeightRelativeToWidget = 8 / 10;
final double scale = final double scale =

View file

@ -110,8 +110,8 @@ FaceBoxImage _getSquareFaceBoxImage(img.Image image, FaceBox faceBox) {
final width = (image.width * faceBox.width).round(); final width = (image.width * faceBox.width).round();
final height = (image.height * faceBox.height).round(); final height = (image.height * faceBox.height).round();
final side = max(width, height); final side = max(width, height);
final xImage = (image.width * faceBox.xMin).round(); final xImage = (image.width * faceBox.x).round();
final yImage = (image.height * faceBox.yMin).round(); final yImage = (image.height * faceBox.y).round();
if (height >= width) { if (height >= width) {
final xImageAdj = (xImage - (height - width) / 2).round(); final xImageAdj = (xImage - (height - width) / 2).round();

View file

@ -37,25 +37,30 @@ Future<File?> getFile(
bool isOrigin = false, bool isOrigin = false,
} // only relevant for live photos } // only relevant for live photos
) async { ) async {
if (file.isRemoteFile) { try {
return getFileFromServer(file, liveVideo: liveVideo); if (file.isRemoteFile) {
} else { return getFileFromServer(file, liveVideo: liveVideo);
final String key = file.tag + liveVideo.toString() + isOrigin.toString(); } else {
final cachedFile = FileLruCache.get(key); final String key = file.tag + liveVideo.toString() + isOrigin.toString();
if (cachedFile == null) { final cachedFile = FileLruCache.get(key);
final diskFile = await _getLocalDiskFile( if (cachedFile == null) {
file, final diskFile = await _getLocalDiskFile(
liveVideo: liveVideo, file,
isOrigin: isOrigin, liveVideo: liveVideo,
); isOrigin: isOrigin,
// do not cache origin file for IOS as they are immediately deleted );
// after usage // do not cache origin file for IOS as they are immediately deleted
if (!(isOrigin && Platform.isIOS) && diskFile != null) { // after usage
FileLruCache.put(key, diskFile); if (!(isOrigin && Platform.isIOS) && diskFile != null) {
FileLruCache.put(key, diskFile);
}
return diskFile;
} }
return diskFile; return cachedFile;
} }
return cachedFile; } catch (e, s) {
_logger.warning("Failed to get file", e, s);
return null;
} }
} }

View file

@ -559,6 +559,7 @@ Future<Image> cropImageWithCanvasSimple(
} }
@Deprecated('Old image processing method, use `cropImage` instead!') @Deprecated('Old image processing method, use `cropImage` instead!')
/// Crops an [image] based on the specified [x], [y], [width] and [height]. /// Crops an [image] based on the specified [x], [y], [width] and [height].
/// Optionally, the cropped image can be resized to comply with a [maxSize] and/or [minSize]. /// Optionally, the cropped image can be resized to comply with a [maxSize] and/or [minSize].
/// Optionally, the cropped image can be rotated from the center by [rotation] radians. /// Optionally, the cropped image can be rotated from the center by [rotation] radians.
@ -1276,8 +1277,8 @@ Future<List<Uint8List>> generateFaceThumbnails(
for (final faceBox in faceBoxes) { for (final faceBox in faceBoxes) {
// Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first
final double xMinAbs = faceBox.xMin * img.width; final double xMinAbs = faceBox.x * img.width;
final double yMinAbs = faceBox.yMin * img.height; final double yMinAbs = faceBox.y * img.height;
final double widthAbs = faceBox.width * img.width; final double widthAbs = faceBox.width * img.width;
final double heightAbs = faceBox.height * img.height; final double heightAbs = faceBox.height * img.height;
@ -1323,8 +1324,8 @@ Future<List<Uint8List>> generateFaceThumbnailsUsingCanvas(
final futureFaceThumbnails = <Future<Uint8List>>[]; final futureFaceThumbnails = <Future<Uint8List>>[];
for (final faceBox in faceBoxes) { for (final faceBox in faceBoxes) {
// Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first
final double xMinAbs = faceBox.xMin * img.width; final double xMinAbs = faceBox.x * img.width;
final double yMinAbs = faceBox.yMin * img.height; final double yMinAbs = faceBox.y * img.height;
final double widthAbs = faceBox.width * img.width; final double widthAbs = faceBox.width * img.width;
final double heightAbs = faceBox.height * img.height; final double heightAbs = faceBox.height * img.height;

View file

@ -12,7 +12,7 @@ description: ente photos application
# Read more about iOS versioning at # Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
version: 0.8.97+617 version: 0.8.98+618
publish_to: none publish_to: none
environment: environment: