Mobile face (#1799)

## Description

One fix related to DB, rest just more logging
This commit is contained in:
Neeraj Gupta 2024-05-21 17:02:40 +05:30 committed by GitHub
commit 608c97603b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 84 additions and 77 deletions

View file

@ -27,7 +27,7 @@ class FaceMLDataDB {
static final Logger _logger = Logger("FaceMLDataDB");
static const _databaseName = "ente.face_ml_db.db";
static const _databaseVersion = 1;
// static const _databaseVersion = 1;
FaceMLDataDB._privateConstructor();
@ -53,18 +53,12 @@ class FaceMLDataDB {
}
Future<void> _onCreate(SqliteDatabase asyncDBConnection) async {
final migrations = SqliteMigrations()
..add(
SqliteMigration(_databaseVersion, (tx) async {
await tx.execute(createFacesTable);
await tx.execute(createFaceClustersTable);
await tx.execute(createClusterPersonTable);
await tx.execute(createClusterSummaryTable);
await tx.execute(createNotPersonFeedbackTable);
await tx.execute(fcClusterIDIndex);
}),
);
await migrations.migrate(asyncDBConnection);
await asyncDBConnection.execute(createFacesTable);
await asyncDBConnection.execute(createFaceClustersTable);
await asyncDBConnection.execute(createClusterPersonTable);
await asyncDBConnection.execute(createClusterSummaryTable);
await asyncDBConnection.execute(createNotPersonFeedbackTable);
await asyncDBConnection.execute(fcClusterIDIndex);
}
// bulkInsertFaces inserts the faces in the database in batches of 1000.
@ -408,7 +402,7 @@ class FaceMLDataDB {
final clusterID = map[fcClusterID] as int;
final faceID = map[fcFaceId] as String;
result.putIfAbsent(personID, () => {}).putIfAbsent(clusterID, () => {})
..add(faceID);
.add(faceID);
}
return result;
}
@ -922,7 +916,7 @@ class FaceMLDataDB {
await db.execute(dropNotPersonFeedbackTable);
await db.execute(dropClusterSummaryTable);
await db.execute(dropFaceClustersTable);
await db.execute(createClusterPersonTable);
await db.execute(createNotPersonFeedbackTable);
await db.execute(createClusterSummaryTable);

View file

@ -1,42 +1,34 @@
/// Bounding box of a face.
///
/// [xMin] and [yMin] are the coordinates of the top left corner of the box, and
/// [ x] and [y] are the minimum coordinates, so the top left corner of the box.
/// [width] and [height] are the width and height of the box.
///
/// WARNING: All values are relative to the original image size, so in the range [0, 1].
class FaceBox {
final double xMin;
final double yMin;
final double x;
final double y;
final double width;
final double height;
FaceBox({
required this.xMin,
required this.yMin,
required this.x,
required this.y,
required this.width,
required this.height,
});
factory FaceBox.fromJson(Map<String, dynamic> json) {
return FaceBox(
xMin: (json['xMin'] is int
? (json['xMin'] as int).toDouble()
: json['xMin'] as double),
yMin: (json['yMin'] is int
? (json['yMin'] as int).toDouble()
: json['yMin'] as double),
width: (json['width'] is int
? (json['width'] as int).toDouble()
: json['width'] as double),
height: (json['height'] is int
? (json['height'] as int).toDouble()
: json['height'] as double),
x: (json['x'] as double?) ?? (json['xMin'] as double),
y: (json['y'] as double?) ?? (json['yMin'] as double),
width: json['width'] as double,
height: json['height'] as double,
);
}
Map<String, dynamic> toJson() => {
'xMin': xMin,
'yMin': yMin,
'x': x,
'y': y,
'width': width,
'height': height,
};

View file

@ -6,7 +6,7 @@ import "package:photos/services/machine_learning/face_ml/face_detection/detectio
/// Stores the face detection data, notably the bounding box and landmarks.
///
/// - Bounding box: [FaceBox] with xMin, yMin (so top left corner), width, height
/// - Bounding box: [FaceBox] with x, y (minimum, so top left corner), width, height
/// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth
///
/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]!
@ -24,8 +24,8 @@ class Detection {
// empty box
Detection.empty()
: box = FaceBox(
xMin: 0,
yMin: 0,
x: 0,
y: 0,
width: 0,
height: 0,
),

View file

@ -143,8 +143,13 @@ class FaceMlService {
}
canRunMLController = event.shouldRun;
if (canRunMLController) {
_logger.info(
"MLController allowed running ML, faces indexing starting",
);
unawaited(indexAndClusterAll());
} else {
_logger
.info("MLController stopped running ML, faces indexing paused");
pauseIndexing();
}
});
@ -527,9 +532,10 @@ class FaceMlService {
Bus.instance.fire(PeopleChangedEvent());
_logger.info('clusterAllImages() finished, in '
'${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds');
isClusteringRunning = false;
} catch (e, s) {
_logger.severe("`clusterAllImages` failed", e, s);
} finally {
isClusteringRunning = false;
}
}
@ -793,8 +799,8 @@ class FaceMlService {
final FaceResult faceRes = result.faces[i];
final detection = face_detection.Detection(
box: FaceBox(
xMin: faceRes.detection.xMinBox,
yMin: faceRes.detection.yMinBox,
x: faceRes.detection.xMinBox,
y: faceRes.detection.yMinBox,
width: faceRes.detection.width,
height: faceRes.detection.height,
),
@ -1011,6 +1017,7 @@ class FaceMlService {
file = await getThumbnailForUploadedFile(enteFile);
} else {
file = await getFile(enteFile, isOrigin: true);
// TODO: This is returning null for Pragadees for all files, so something is wrong here!
}
if (file == null) {
_logger.warning("Could not get file for $enteFile");

View file

@ -3,6 +3,7 @@ import "dart:math" show max, min;
import "package:flutter/material.dart";
import "package:intl/intl.dart";
import "package:logging/logging.dart";
import "package:photos/core/event_bus.dart";
import 'package:photos/events/embedding_updated_event.dart';
import "package:photos/face/db.dart";
@ -26,6 +27,8 @@ import "package:photos/ui/components/toggle_switch_widget.dart";
import "package:photos/utils/data_util.dart";
import "package:photos/utils/local_settings.dart";
final _logger = Logger("MachineLearningSettingsPage");
class MachineLearningSettingsPage extends StatefulWidget {
const MachineLearningSettingsPage({super.key});
@ -65,6 +68,7 @@ class _MachineLearningSettingsPageState
@override
Widget build(BuildContext context) {
final bool facesFlag = flagService.faceSearchEnabled;
_logger.info("On page open, facesFlag: $facesFlag");
return Scaffold(
body: CustomScrollView(
primary: false,
@ -435,16 +439,22 @@ class FaceRecognitionStatusWidgetState
}
Future<(int, int, int, double)> getIndexStatus() async {
final indexedFiles = await FaceMLDataDB.instance
.getIndexedFileCount(minimumMlVersion: faceMlVersion);
final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length;
final showIndexedFiles = min(indexedFiles, indexableFiles);
final pendingFiles = max(indexableFiles - indexedFiles, 0);
final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount();
final clusteredFaces = await FaceMLDataDB.instance.getClusteredFaceCount();
final clusteringDoneRatio = clusteredFaces / foundFaces;
try {
final indexedFiles = await FaceMLDataDB.instance
.getIndexedFileCount(minimumMlVersion: faceMlVersion);
final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length;
final showIndexedFiles = min(indexedFiles, indexableFiles);
final pendingFiles = max(indexableFiles - indexedFiles, 0);
final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount();
final clusteredFaces =
await FaceMLDataDB.instance.getClusteredFaceCount();
final clusteringDoneRatio = clusteredFaces / foundFaces;
return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio);
return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio);
} catch (e, s) {
_logger.severe('Error getting face recognition status', e, s);
rethrow;
}
}
@override

View file

@ -49,10 +49,8 @@ class CroppedFaceImageView extends StatelessWidget {
final faceBox = face.detection.box;
final double relativeFaceCenterX =
faceBox.xMin + faceBox.width / 2;
final double relativeFaceCenterY =
faceBox.yMin + faceBox.height / 2;
final double relativeFaceCenterX = faceBox.x + faceBox.width / 2;
final double relativeFaceCenterY = faceBox.y + faceBox.height / 2;
const double desiredFaceHeightRelativeToWidget = 8 / 10;
final double scale =

View file

@ -110,8 +110,8 @@ FaceBoxImage _getSquareFaceBoxImage(img.Image image, FaceBox faceBox) {
final width = (image.width * faceBox.width).round();
final height = (image.height * faceBox.height).round();
final side = max(width, height);
final xImage = (image.width * faceBox.xMin).round();
final yImage = (image.height * faceBox.yMin).round();
final xImage = (image.width * faceBox.x).round();
final yImage = (image.height * faceBox.y).round();
if (height >= width) {
final xImageAdj = (xImage - (height - width) / 2).round();

View file

@ -37,25 +37,30 @@ Future<File?> getFile(
bool isOrigin = false,
} // only relevant for live photos
) async {
if (file.isRemoteFile) {
return getFileFromServer(file, liveVideo: liveVideo);
} else {
final String key = file.tag + liveVideo.toString() + isOrigin.toString();
final cachedFile = FileLruCache.get(key);
if (cachedFile == null) {
final diskFile = await _getLocalDiskFile(
file,
liveVideo: liveVideo,
isOrigin: isOrigin,
);
// do not cache origin file for IOS as they are immediately deleted
// after usage
if (!(isOrigin && Platform.isIOS) && diskFile != null) {
FileLruCache.put(key, diskFile);
try {
if (file.isRemoteFile) {
return getFileFromServer(file, liveVideo: liveVideo);
} else {
final String key = file.tag + liveVideo.toString() + isOrigin.toString();
final cachedFile = FileLruCache.get(key);
if (cachedFile == null) {
final diskFile = await _getLocalDiskFile(
file,
liveVideo: liveVideo,
isOrigin: isOrigin,
);
// do not cache origin file for IOS as they are immediately deleted
// after usage
if (!(isOrigin && Platform.isIOS) && diskFile != null) {
FileLruCache.put(key, diskFile);
}
return diskFile;
}
return diskFile;
return cachedFile;
}
return cachedFile;
} catch (e, s) {
_logger.warning("Failed to get file", e, s);
return null;
}
}

View file

@ -559,6 +559,7 @@ Future<Image> cropImageWithCanvasSimple(
}
@Deprecated('Old image processing method, use `cropImage` instead!')
/// Crops an [image] based on the specified [x], [y], [width] and [height].
/// Optionally, the cropped image can be resized to comply with a [maxSize] and/or [minSize].
/// Optionally, the cropped image can be rotated from the center by [rotation] radians.
@ -1276,8 +1277,8 @@ Future<List<Uint8List>> generateFaceThumbnails(
for (final faceBox in faceBoxes) {
// Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first
final double xMinAbs = faceBox.xMin * img.width;
final double yMinAbs = faceBox.yMin * img.height;
final double xMinAbs = faceBox.x * img.width;
final double yMinAbs = faceBox.y * img.height;
final double widthAbs = faceBox.width * img.width;
final double heightAbs = faceBox.height * img.height;
@ -1323,8 +1324,8 @@ Future<List<Uint8List>> generateFaceThumbnailsUsingCanvas(
final futureFaceThumbnails = <Future<Uint8List>>[];
for (final faceBox in faceBoxes) {
// Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first
final double xMinAbs = faceBox.xMin * img.width;
final double yMinAbs = faceBox.yMin * img.height;
final double xMinAbs = faceBox.x * img.width;
final double yMinAbs = faceBox.y * img.height;
final double widthAbs = faceBox.width * img.width;
final double heightAbs = faceBox.height * img.height;

View file

@ -12,7 +12,7 @@ description: ente photos application
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
version: 0.8.97+617
version: 0.8.98+618
publish_to: none
environment: