diff --git a/mobile/lib/face/db.dart b/mobile/lib/face/db.dart index 9316916a3..c72b197b4 100644 --- a/mobile/lib/face/db.dart +++ b/mobile/lib/face/db.dart @@ -27,7 +27,7 @@ class FaceMLDataDB { static final Logger _logger = Logger("FaceMLDataDB"); static const _databaseName = "ente.face_ml_db.db"; - static const _databaseVersion = 1; + // static const _databaseVersion = 1; FaceMLDataDB._privateConstructor(); @@ -53,18 +53,12 @@ class FaceMLDataDB { } Future _onCreate(SqliteDatabase asyncDBConnection) async { - final migrations = SqliteMigrations() - ..add( - SqliteMigration(_databaseVersion, (tx) async { - await tx.execute(createFacesTable); - await tx.execute(createFaceClustersTable); - await tx.execute(createClusterPersonTable); - await tx.execute(createClusterSummaryTable); - await tx.execute(createNotPersonFeedbackTable); - await tx.execute(fcClusterIDIndex); - }), - ); - await migrations.migrate(asyncDBConnection); + await asyncDBConnection.execute(createFacesTable); + await asyncDBConnection.execute(createFaceClustersTable); + await asyncDBConnection.execute(createClusterPersonTable); + await asyncDBConnection.execute(createClusterSummaryTable); + await asyncDBConnection.execute(createNotPersonFeedbackTable); + await asyncDBConnection.execute(fcClusterIDIndex); } // bulkInsertFaces inserts the faces in the database in batches of 1000. @@ -408,7 +402,7 @@ class FaceMLDataDB { final clusterID = map[fcClusterID] as int; final faceID = map[fcFaceId] as String; result.putIfAbsent(personID, () => {}).putIfAbsent(clusterID, () => {}) - ..add(faceID); + .add(faceID); } return result; } @@ -922,7 +916,7 @@ class FaceMLDataDB { await db.execute(dropNotPersonFeedbackTable); await db.execute(dropClusterSummaryTable); await db.execute(dropFaceClustersTable); - + await db.execute(createClusterPersonTable); await db.execute(createNotPersonFeedbackTable); await db.execute(createClusterSummaryTable); diff --git a/mobile/lib/face/model/box.dart b/mobile/lib/face/model/box.dart index 73d7dea38..3c5be3f9f 100644 --- a/mobile/lib/face/model/box.dart +++ b/mobile/lib/face/model/box.dart @@ -1,42 +1,34 @@ /// Bounding box of a face. /// -/// [xMin] and [yMin] are the coordinates of the top left corner of the box, and +/// [ x] and [y] are the minimum coordinates, so the top left corner of the box. /// [width] and [height] are the width and height of the box. /// /// WARNING: All values are relative to the original image size, so in the range [0, 1]. class FaceBox { - final double xMin; - final double yMin; + final double x; + final double y; final double width; final double height; FaceBox({ - required this.xMin, - required this.yMin, + required this.x, + required this.y, required this.width, required this.height, }); factory FaceBox.fromJson(Map json) { return FaceBox( - xMin: (json['xMin'] is int - ? (json['xMin'] as int).toDouble() - : json['xMin'] as double), - yMin: (json['yMin'] is int - ? (json['yMin'] as int).toDouble() - : json['yMin'] as double), - width: (json['width'] is int - ? (json['width'] as int).toDouble() - : json['width'] as double), - height: (json['height'] is int - ? (json['height'] as int).toDouble() - : json['height'] as double), + x: (json['x'] as double?) ?? (json['xMin'] as double), + y: (json['y'] as double?) ?? (json['yMin'] as double), + width: json['width'] as double, + height: json['height'] as double, ); } Map toJson() => { - 'xMin': xMin, - 'yMin': yMin, + 'x': x, + 'y': y, 'width': width, 'height': height, }; diff --git a/mobile/lib/face/model/detection.dart b/mobile/lib/face/model/detection.dart index 6fc5fa07b..44329196a 100644 --- a/mobile/lib/face/model/detection.dart +++ b/mobile/lib/face/model/detection.dart @@ -6,7 +6,7 @@ import "package:photos/services/machine_learning/face_ml/face_detection/detectio /// Stores the face detection data, notably the bounding box and landmarks. /// -/// - Bounding box: [FaceBox] with xMin, yMin (so top left corner), width, height +/// - Bounding box: [FaceBox] with x, y (minimum, so top left corner), width, height /// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth /// /// WARNING: All coordinates are relative to the image size, so in the range [0, 1]! @@ -24,8 +24,8 @@ class Detection { // empty box Detection.empty() : box = FaceBox( - xMin: 0, - yMin: 0, + x: 0, + y: 0, width: 0, height: 0, ), diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart index 0bfbfcd22..9e72f4c55 100644 --- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -143,8 +143,13 @@ class FaceMlService { } canRunMLController = event.shouldRun; if (canRunMLController) { + _logger.info( + "MLController allowed running ML, faces indexing starting", + ); unawaited(indexAndClusterAll()); } else { + _logger + .info("MLController stopped running ML, faces indexing paused"); pauseIndexing(); } }); @@ -527,9 +532,10 @@ class FaceMlService { Bus.instance.fire(PeopleChangedEvent()); _logger.info('clusterAllImages() finished, in ' '${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds'); - isClusteringRunning = false; } catch (e, s) { _logger.severe("`clusterAllImages` failed", e, s); + } finally { + isClusteringRunning = false; } } @@ -793,8 +799,8 @@ class FaceMlService { final FaceResult faceRes = result.faces[i]; final detection = face_detection.Detection( box: FaceBox( - xMin: faceRes.detection.xMinBox, - yMin: faceRes.detection.yMinBox, + x: faceRes.detection.xMinBox, + y: faceRes.detection.yMinBox, width: faceRes.detection.width, height: faceRes.detection.height, ), @@ -1011,6 +1017,7 @@ class FaceMlService { file = await getThumbnailForUploadedFile(enteFile); } else { file = await getFile(enteFile, isOrigin: true); + // TODO: This is returning null for Pragadees for all files, so something is wrong here! } if (file == null) { _logger.warning("Could not get file for $enteFile"); diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 9fa98e46f..1e63cf645 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -3,6 +3,7 @@ import "dart:math" show max, min; import "package:flutter/material.dart"; import "package:intl/intl.dart"; +import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; import 'package:photos/events/embedding_updated_event.dart'; import "package:photos/face/db.dart"; @@ -26,6 +27,8 @@ import "package:photos/ui/components/toggle_switch_widget.dart"; import "package:photos/utils/data_util.dart"; import "package:photos/utils/local_settings.dart"; +final _logger = Logger("MachineLearningSettingsPage"); + class MachineLearningSettingsPage extends StatefulWidget { const MachineLearningSettingsPage({super.key}); @@ -65,6 +68,7 @@ class _MachineLearningSettingsPageState @override Widget build(BuildContext context) { final bool facesFlag = flagService.faceSearchEnabled; + _logger.info("On page open, facesFlag: $facesFlag"); return Scaffold( body: CustomScrollView( primary: false, @@ -435,16 +439,22 @@ class FaceRecognitionStatusWidgetState } Future<(int, int, int, double)> getIndexStatus() async { - final indexedFiles = await FaceMLDataDB.instance - .getIndexedFileCount(minimumMlVersion: faceMlVersion); - final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length; - final showIndexedFiles = min(indexedFiles, indexableFiles); - final pendingFiles = max(indexableFiles - indexedFiles, 0); - final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount(); - final clusteredFaces = await FaceMLDataDB.instance.getClusteredFaceCount(); - final clusteringDoneRatio = clusteredFaces / foundFaces; + try { + final indexedFiles = await FaceMLDataDB.instance + .getIndexedFileCount(minimumMlVersion: faceMlVersion); + final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length; + final showIndexedFiles = min(indexedFiles, indexableFiles); + final pendingFiles = max(indexableFiles - indexedFiles, 0); + final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount(); + final clusteredFaces = + await FaceMLDataDB.instance.getClusteredFaceCount(); + final clusteringDoneRatio = clusteredFaces / foundFaces; - return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio); + return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio); + } catch (e, s) { + _logger.severe('Error getting face recognition status', e, s); + rethrow; + } } @override diff --git a/mobile/lib/ui/viewer/people/cropped_face_image_view.dart b/mobile/lib/ui/viewer/people/cropped_face_image_view.dart index 823a979fc..a76dbe5f0 100644 --- a/mobile/lib/ui/viewer/people/cropped_face_image_view.dart +++ b/mobile/lib/ui/viewer/people/cropped_face_image_view.dart @@ -49,10 +49,8 @@ class CroppedFaceImageView extends StatelessWidget { final faceBox = face.detection.box; - final double relativeFaceCenterX = - faceBox.xMin + faceBox.width / 2; - final double relativeFaceCenterY = - faceBox.yMin + faceBox.height / 2; + final double relativeFaceCenterX = faceBox.x + faceBox.width / 2; + final double relativeFaceCenterY = faceBox.y + faceBox.height / 2; const double desiredFaceHeightRelativeToWidget = 8 / 10; final double scale = diff --git a/mobile/lib/utils/face/face_util.dart b/mobile/lib/utils/face/face_util.dart index c49d57b40..56dc8f3bf 100644 --- a/mobile/lib/utils/face/face_util.dart +++ b/mobile/lib/utils/face/face_util.dart @@ -110,8 +110,8 @@ FaceBoxImage _getSquareFaceBoxImage(img.Image image, FaceBox faceBox) { final width = (image.width * faceBox.width).round(); final height = (image.height * faceBox.height).round(); final side = max(width, height); - final xImage = (image.width * faceBox.xMin).round(); - final yImage = (image.height * faceBox.yMin).round(); + final xImage = (image.width * faceBox.x).round(); + final yImage = (image.height * faceBox.y).round(); if (height >= width) { final xImageAdj = (xImage - (height - width) / 2).round(); diff --git a/mobile/lib/utils/file_util.dart b/mobile/lib/utils/file_util.dart index b845d2ff6..35240a3cc 100644 --- a/mobile/lib/utils/file_util.dart +++ b/mobile/lib/utils/file_util.dart @@ -37,25 +37,30 @@ Future getFile( bool isOrigin = false, } // only relevant for live photos ) async { - if (file.isRemoteFile) { - return getFileFromServer(file, liveVideo: liveVideo); - } else { - final String key = file.tag + liveVideo.toString() + isOrigin.toString(); - final cachedFile = FileLruCache.get(key); - if (cachedFile == null) { - final diskFile = await _getLocalDiskFile( - file, - liveVideo: liveVideo, - isOrigin: isOrigin, - ); - // do not cache origin file for IOS as they are immediately deleted - // after usage - if (!(isOrigin && Platform.isIOS) && diskFile != null) { - FileLruCache.put(key, diskFile); + try { + if (file.isRemoteFile) { + return getFileFromServer(file, liveVideo: liveVideo); + } else { + final String key = file.tag + liveVideo.toString() + isOrigin.toString(); + final cachedFile = FileLruCache.get(key); + if (cachedFile == null) { + final diskFile = await _getLocalDiskFile( + file, + liveVideo: liveVideo, + isOrigin: isOrigin, + ); + // do not cache origin file for IOS as they are immediately deleted + // after usage + if (!(isOrigin && Platform.isIOS) && diskFile != null) { + FileLruCache.put(key, diskFile); + } + return diskFile; } - return diskFile; + return cachedFile; } - return cachedFile; + } catch (e, s) { + _logger.warning("Failed to get file", e, s); + return null; } } diff --git a/mobile/lib/utils/image_ml_util.dart b/mobile/lib/utils/image_ml_util.dart index 8a6051793..916b9099c 100644 --- a/mobile/lib/utils/image_ml_util.dart +++ b/mobile/lib/utils/image_ml_util.dart @@ -559,6 +559,7 @@ Future cropImageWithCanvasSimple( } @Deprecated('Old image processing method, use `cropImage` instead!') + /// Crops an [image] based on the specified [x], [y], [width] and [height]. /// Optionally, the cropped image can be resized to comply with a [maxSize] and/or [minSize]. /// Optionally, the cropped image can be rotated from the center by [rotation] radians. @@ -1276,8 +1277,8 @@ Future> generateFaceThumbnails( for (final faceBox in faceBoxes) { // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first - final double xMinAbs = faceBox.xMin * img.width; - final double yMinAbs = faceBox.yMin * img.height; + final double xMinAbs = faceBox.x * img.width; + final double yMinAbs = faceBox.y * img.height; final double widthAbs = faceBox.width * img.width; final double heightAbs = faceBox.height * img.height; @@ -1323,8 +1324,8 @@ Future> generateFaceThumbnailsUsingCanvas( final futureFaceThumbnails = >[]; for (final faceBox in faceBoxes) { // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first - final double xMinAbs = faceBox.xMin * img.width; - final double yMinAbs = faceBox.yMin * img.height; + final double xMinAbs = faceBox.x * img.width; + final double yMinAbs = faceBox.y * img.height; final double widthAbs = faceBox.width * img.width; final double heightAbs = faceBox.height * img.height; diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 6464496f5..b9d5345c3 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.97+617 +version: 0.8.98+618 publish_to: none environment: