import "dart:async"; import "dart:collection"; import "dart:io"; import "package:clip_ggml/clip_ggml.dart"; import "package:computer/computer.dart"; import "package:logging/logging.dart"; import "package:photos/core/configuration.dart"; import "package:photos/core/event_bus.dart"; import "package:photos/db/files_db.dart"; import "package:photos/db/object_box.dart"; import "package:photos/events/diff_sync_complete_event.dart"; import 'package:photos/events/embedding_updated_event.dart'; import "package:photos/events/file_uploaded_event.dart"; import "package:photos/models/embedding.dart"; import "package:photos/models/file/file.dart"; import "package:photos/services/semantic_search/embedding_store.dart"; import "package:photos/services/semantic_search/model_loader.dart"; import "package:photos/utils/local_settings.dart"; import "package:photos/utils/thumbnail_util.dart"; import "package:shared_preferences/shared_preferences.dart"; class SemanticSearchService { SemanticSearchService._privateConstructor(); static final SemanticSearchService instance = SemanticSearchService._privateConstructor(); static final Computer _computer = Computer.shared(); static const kModelName = "ggml-clip"; static const kEmbeddingLength = 512; static const kScoreThreshold = 0.23; final _logger = Logger("SemanticSearchService"); final _queue = Queue(); final _modelLoadFuture = Completer(); final _cachedEmbeddings = []; bool _isComputingEmbeddings = false; bool _isSyncing = false; Future>? _ongoingRequest; PendingQuery? _nextQuery; Future init(SharedPreferences preferences) async { try { final response = CLIP.ping("ping"); _logger.info("Ping succeeded, response: " + response); } catch (e, s) { _logger.severe("Ping failed", e, s); } if (Platform.isIOS) { return; } await EmbeddingStore.instance.init(preferences); await ModelLoader.instance.init(_computer); _setupCachedEmbeddings(); Bus.instance.on().listen((event) async { // Diff sync is complete, we can now pull embeddings from remote sync(); }); if (Configuration.instance.hasConfiguredAccount()) { EmbeddingStore.instance.pushEmbeddings(); } _loadModels().then((v) async { _logger.info("Getting text embedding"); await _getTextEmbedding("warm up text encoder"); _logger.info("Got text embedding"); }); Bus.instance.on().listen((event) async { _addToQueue(event.file); }); } Future sync() async { if (_isSyncing) { return; } _isSyncing = true; await EmbeddingStore.instance.pullEmbeddings(); await _backFill(); _isSyncing = false; } Future> search(String query) async { if (!LocalSettings.instance.hasEnabledMagicSearch() || !_modelLoadFuture.isCompleted) { return []; } if (_ongoingRequest == null) { _ongoingRequest = _getMatchingFiles(query).then((result) { _ongoingRequest = null; if (_nextQuery != null) { final next = _nextQuery; _nextQuery = null; search(next!.query).then((nextResult) { next.completer.complete(nextResult); }); } return result; }); return _ongoingRequest!; } else { // If there's an ongoing request, create or replace the nextCompleter. _nextQuery?.completer.future .timeout(const Duration(seconds: 0)); // Cancels the previous future. _nextQuery = PendingQuery(query, Completer>()); return _nextQuery!.completer.future; } } Future getIndexStatus() async { return IndexStatus( _cachedEmbeddings.length, (await _getFileIDsToBeIndexed()).length, ); } void _setupCachedEmbeddings() { ObjectBox.instance .getEmbeddingBox() .query() .watch(triggerImmediately: true) .map((query) => query.find()) .listen((embeddings) { _logger.info("Updated embeddings: " + embeddings.length.toString()); _cachedEmbeddings.clear(); _cachedEmbeddings.addAll(embeddings); Bus.instance.fire(EmbeddingUpdatedEvent()); }); } Future _backFill() async { if (!LocalSettings.instance.hasEnabledMagicSearch()) { return; } await _modelLoadFuture.future; _logger.info("Attempting backfill"); final fileIDs = await _getFileIDsToBeIndexed(); final files = await FilesDB.instance.getUploadedFiles(fileIDs); _logger.info(files.length.toString() + " to be embedded"); _queue.addAll(files); _pollQueue(); } Future> _getFileIDsToBeIndexed() async { final uploadedFileIDs = await FilesDB.instance .getOwnedFileIDs(Configuration.instance.getUserID()!); final embeddedFileIDs = _cachedEmbeddings.map((e) => e.fileID).toSet(); final queuedFileIDs = _queue.map((e) => e.uploadedFileID).toSet(); uploadedFileIDs.removeWhere( (id) => embeddedFileIDs.contains(id) || queuedFileIDs.contains(id), ); return uploadedFileIDs; } Future clearQueue() async { _queue.clear(); } Future> _getMatchingFiles(String query) async { final textEmbedding = await _getTextEmbedding(query); final queryResults = await _getScores(textEmbedding); final filesMap = await FilesDB.instance .getFilesFromIDs(queryResults.map((e) => e.id).toList()); final results = []; for (final result in queryResults) { if (filesMap.containsKey(result.id)) { results.add(filesMap[result.id]!); } } _logger.info(results.length.toString() + " results"); return results; } void _addToQueue(EnteFile file) { if (!LocalSettings.instance.hasEnabledMagicSearch()) { return; } _logger.info("Adding " + file.toString() + " to the queue"); _queue.add(file); _pollQueue(); } Future _loadModels() async { _logger.info("Loading models"); try { await ModelLoader.instance.loadImageModel(); await ModelLoader.instance.loadTextModel(); _modelLoadFuture.complete(); } catch (e, s) { _logger.severe("Model loading failed", e, s); } _logger.info("Models loaded"); } Future _pollQueue() async { if (_isComputingEmbeddings) { return; } _isComputingEmbeddings = true; while (_queue.isNotEmpty) { await _computeImageEmbedding(_queue.removeLast()); } _isComputingEmbeddings = false; } Future _computeImageEmbedding(EnteFile file) async { if (!_modelLoadFuture.isCompleted) { return; } try { final filePath = (await getThumbnailForUploadedFile(file))!.path; _logger.info("Running clip over $file"); final startTime = DateTime.now(); final result = await _computer.compute( createImageEmbedding, param: { "imagePath": filePath, }, taskName: "createImageEmbedding", ) as List; final endTime = DateTime.now(); _logger.info( "createImageEmbedding took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch)}ms", ); if (result.length != kEmbeddingLength) { _logger.severe("Discovered incorrect embedding for $file - $result"); return; } final embedding = Embedding( fileID: file.uploadedFileID!, model: kModelName, embedding: result, ); await EmbeddingStore.instance.storeEmbedding( file, embedding, ); } catch (e, s) { _logger.severe(e, s); } } Future> _getTextEmbedding(String query) async { _logger.info("Searching for " + query); final startTime = DateTime.now(); final embedding = await _computer.compute( createTextEmbedding, param: { "text": query, }, taskName: "createTextEmbedding", ); final endTime = DateTime.now(); _logger.info( "createTextEmbedding took: " + (endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch) .toString() + "ms", ); return embedding; } Future> _getScores(List textEmbedding) async { final startTime = DateTime.now(); final List queryResults = await _computer.compute( computeBulkScore, param: { "imageEmbeddings": _cachedEmbeddings, "textEmbedding": textEmbedding, }, taskName: "computeBulkScore", ); final endTime = DateTime.now(); _logger.info( "computingScores took: " + (endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch) .toString() + "ms", ); return queryResults; } } List createImageEmbedding(Map args) { return CLIP.createImageEmbedding(args["imagePath"]); } List createTextEmbedding(Map args) { return CLIP.createTextEmbedding(args["text"]); } List computeBulkScore(Map args) { final queryResults = []; final imageEmbeddings = args["imageEmbeddings"] as List; final textEmbedding = args["textEmbedding"] as List; for (final imageEmbedding in imageEmbeddings) { final score = computeScore( imageEmbedding.embedding, textEmbedding, ); if (score >= SemanticSearchService.kScoreThreshold) { queryResults.add(QueryResult(imageEmbedding.fileID, score)); } } queryResults.sort((first, second) => second.score.compareTo(first.score)); return queryResults; } double computeScore(List imageEmbedding, List textEmbedding) { assert( imageEmbedding.length == textEmbedding.length, "The two embeddings should have the same length", ); double score = 0; for (int index = 0; index < imageEmbedding.length; index++) { score += imageEmbedding[index] * textEmbedding[index]; } return score; } class QueryResult { final int id; final double score; QueryResult(this.id, this.score); } class PendingQuery { final String query; final Completer> completer; PendingQuery(this.query, this.completer); } class IndexStatus { final int indexedItems, pendingItems; IndexStatus(this.indexedItems, this.pendingItems); }