diff --git a/.github/workflows/auth-lint.yml b/.github/workflows/auth-lint.yml index 63d644c2e..e7c42e1a6 100644 --- a/.github/workflows/auth-lint.yml +++ b/.github/workflows/auth-lint.yml @@ -3,7 +3,7 @@ name: "Lint (auth)" on: # Run on every push to a branch other than main that changes auth/ push: - branches-ignore: [main, "deploy/**", "deploy-f/**"] + branches-ignore: [main] paths: - "auth/**" - ".github/workflows/auth-lint.yml" diff --git a/.github/workflows/auth-release.yml b/.github/workflows/auth-release.yml index 174b6c1d3..cf3749ae6 100644 --- a/.github/workflows/auth-release.yml +++ b/.github/workflows/auth-release.yml @@ -85,30 +85,21 @@ jobs: - name: Install dependencies for desktop build run: | sudo apt-get update -y - sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' - - name: Install appimagetool - run: | - wget -O appimagetool "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage" - chmod +x appimagetool - mv appimagetool /usr/local/bin/ - - name: Build desktop app run: | flutter config --enable-linux-desktop dart pub global activate flutter_distributor flutter_distributor package --platform=linux --targets=deb --skip-clean - flutter_distributor package --platform=linux --targets=rpm --skip-clean - flutter_distributor package --platform=linux --targets=appimage --skip-clean mv dist/**/*-*-linux.deb artifacts/ente-${{ github.ref_name }}-x86_64.deb - mv dist/**/*-*-linux.rpm artifacts/ente-${{ github.ref_name }}-x86_64.rpm - mv dist/**/*-*-linux.AppImage artifacts/ente-${{ github.ref_name }}-x86_64.AppImage env: LIBSODIUM_USE_PKGCONFIG: 1 - - name: Generate checksums - run: sha256sum artifacts/ente-* > artifacts/sha256sum + - name: Generate checksums and push to artifacts + run: | + sha256sum artifacts/ente-* > artifacts/sha256sum-apk-deb - name: Create a draft GitHub release uses: ncipollo/release-action@v1 @@ -128,6 +119,61 @@ jobs: releaseFiles: auth/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab track: internal + build-fedora-etc: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: auth + + steps: + - name: Checkout code and submodules + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install Flutter ${{ env.FLUTTER_VERSION }} + uses: subosito/flutter-action@v2 + with: + channel: "stable" + flutter-version: ${{ env.FLUTTER_VERSION }} + cache: true + + - name: Create artifacts directory + run: mkdir artifacts + + - name: Install dependencies for desktop build + run: | + sudo apt-get update -y + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate libayatana-appindicator3-dev libffi-dev libtiff5 + sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' + + - name: Install appimagetool + run: | + wget -O appimagetool "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage" + chmod +x appimagetool + mv appimagetool /usr/local/bin/ + + - name: Build desktop app + run: | + flutter config --enable-linux-desktop + dart pub global activate flutter_distributor + flutter_distributor package --platform=linux --targets=rpm --skip-clean + flutter_distributor package --platform=linux --targets=appimage --skip-clean + mv dist/**/*-*-linux.rpm artifacts/ente-${{ github.ref_name }}-x86_64.rpm + mv dist/**/*-*-linux.AppImage artifacts/ente-${{ github.ref_name }}-x86_64.AppImage + + - name: Generate checksums + run: sha256sum artifacts/ente-* >> artifacts/sha256sum-rpm-appimage + + - name: Create a draft GitHub release + uses: ncipollo/release-action@v1 + with: + artifacts: "auth/artifacts/*" + draft: true + allowUpdates: true + updateOnlyUnreleased: true + build-windows: runs-on: windows-latest diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml index 0c1929e6a..d1cfda884 100644 --- a/.github/workflows/desktop-lint.yml +++ b/.github/workflows/desktop-lint.yml @@ -3,7 +3,7 @@ name: "Lint (desktop)" on: # Run on every push to a branch other than main that changes desktop/ push: - branches-ignore: [main, "deploy/**", "deploy-f/**"] + branches-ignore: [main] paths: - "desktop/**" - ".github/workflows/desktop-lint.yml" diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml index 01b0c2254..b824fe5c3 100644 --- a/.github/workflows/docs-deploy.yml +++ b/.github/workflows/docs-deploy.yml @@ -37,11 +37,8 @@ jobs: run: yarn build - name: Publish - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: help - directory: docs/docs/.vitepress/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=help docs/docs/.vitepress/dist diff --git a/.github/workflows/docs-verify-build.yml b/.github/workflows/docs-verify-build.yml index 5d31ff837..addb52a05 100644 --- a/.github/workflows/docs-verify-build.yml +++ b/.github/workflows/docs-verify-build.yml @@ -6,7 +6,7 @@ name: "Verify build (docs)" on: # Run on every push to a branch other than main that changes docs/ push: - branches-ignore: [main, "deploy/**", "deploy-f/**"] + branches-ignore: [main] paths: - "docs/**" - ".github/workflows/docs-verify-build.yml" diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml index 4ee736742..c2e43d34f 100644 --- a/.github/workflows/mobile-internal-release.yml +++ b/.github/workflows/mobile-internal-release.yml @@ -1,4 +1,4 @@ -name: "Internal Release - Photos" +name: "Internal release (photos)" on: workflow_dispatch: # Allow manually running the action @@ -54,4 +54,3 @@ jobs: packageName: io.ente.photos releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab track: internal - changesNotSentForReview: true diff --git a/.github/workflows/mobile-lint.yml b/.github/workflows/mobile-lint.yml index 8abc6f0c7..493185b6b 100644 --- a/.github/workflows/mobile-lint.yml +++ b/.github/workflows/mobile-lint.yml @@ -3,7 +3,7 @@ name: "Lint (mobile)" on: # Run on every push to a branch other than main that changes mobile/ push: - branches-ignore: [main, f-droid, "deploy/**", "deploy-f/**"] + branches-ignore: [main, f-droid] paths: - "mobile/**" - ".github/workflows/mobile-lint.yml" diff --git a/.github/workflows/server-lint.yml b/.github/workflows/server-lint.yml index 30038b3b9..c051d0290 100644 --- a/.github/workflows/server-lint.yml +++ b/.github/workflows/server-lint.yml @@ -3,7 +3,7 @@ name: "Lint (server)" on: # Run on every push to a branch other than main that changes server/ push: - branches-ignore: [main, "deploy/**", "deploy-f/**"] + branches-ignore: [main] paths: - "server/**" - ".github/workflows/server-lint.yml" diff --git a/.github/workflows/web-deploy-accounts.yml b/.github/workflows/web-deploy-accounts.yml deleted file mode 100644 index 33da5ee6f..000000000 --- a/.github/workflows/web-deploy-accounts.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (accounts)" - -on: - push: - # Run workflow on pushes to the deploy/accounts - branches: [deploy/accounts, deploy-f/accounts] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build accounts - run: yarn build:accounts - - - name: Publish accounts - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/accounts - directory: web/apps/accounts/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-auth.yml b/.github/workflows/web-deploy-auth.yml deleted file mode 100644 index d195b62f8..000000000 --- a/.github/workflows/web-deploy-auth.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (auth)" - -on: - push: - # Run workflow on pushes to the deploy/auth - branches: [deploy/auth] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build auth - run: yarn build:auth - - - name: Publish auth - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/auth - directory: web/apps/auth/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-cast.yml b/.github/workflows/web-deploy-cast.yml deleted file mode 100644 index 01e17486d..000000000 --- a/.github/workflows/web-deploy-cast.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (cast)" - -on: - push: - # Run workflow on pushes to the deploy/cast - branches: [deploy/cast, deploy-f/cast] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build cast - run: yarn build:cast - - - name: Publish cast - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/cast - directory: web/apps/cast/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-one.yml b/.github/workflows/web-deploy-one.yml new file mode 100644 index 000000000..77c338513 --- /dev/null +++ b/.github/workflows/web-deploy-one.yml @@ -0,0 +1,61 @@ +name: "Deploy one (web)" + +on: + workflow_dispatch: + inputs: + app: + description: "App to build and deploy" + type: choice + required: true + default: "photos" + options: + - "accounts" + - "auth" + - "cast" + - "payments" + - "photos" + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build ${{ inputs.app }} + run: yarn build:${{ inputs.app }} + + - name: Publish ${{ inputs.app }} to preview + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + # [Note: Wrangler commit-dirty] + # + # Without the --commit-dirty flag, running the wrangler-action + # always prints a warning when used: + # + # Warning: Your working directory is a git repo and has uncommitted changes + # To silence this warning, pass in --commit-dirty=true + # + # There is no clear documentation of if passing this is + # harmless, but all indications and in-practice tests seem to + # indicate so. + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/${{ inputs.app }} web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-payments.yml b/.github/workflows/web-deploy-payments.yml deleted file mode 100644 index 367e1db18..000000000 --- a/.github/workflows/web-deploy-payments.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (payments)" - -on: - push: - # Run workflow on pushes to the deploy/payments - branches: [deploy/payments] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build payments - run: yarn build:payments - - - name: Publish payments - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/payments - directory: web/apps/payments/dist - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-photos.yml b/.github/workflows/web-deploy-photos.yml deleted file mode 100644 index cb3a9db86..000000000 --- a/.github/workflows/web-deploy-photos.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (photos)" - -on: - push: - # Run workflow on pushes to the deploy/photos - branches: [deploy/photos] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build photos - run: yarn build:photos - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/photos - directory: web/apps/photos/out - wranglerVersion: "3" diff --git a/.github/workflows/web-preview.yml b/.github/workflows/web-deploy-preview.yml similarity index 84% rename from .github/workflows/web-preview.yml rename to .github/workflows/web-deploy-preview.yml index 8f39c0247..4bb187072 100644 --- a/.github/workflows/web-preview.yml +++ b/.github/workflows/web-deploy-preview.yml @@ -1,4 +1,4 @@ -name: "Preview (web)" +name: "Deploy preview (web)" on: workflow_dispatch: @@ -43,11 +43,8 @@ jobs: run: yarn build:${{ inputs.app }} - name: Publish ${{ inputs.app }} to preview - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: preview - directory: web/apps/${{ inputs.app }}/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=preview web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-staff.yml b/.github/workflows/web-deploy-staff.yml index 4d386344d..854e16364 100644 --- a/.github/workflows/web-deploy-staff.yml +++ b/.github/workflows/web-deploy-staff.yml @@ -38,11 +38,8 @@ jobs: run: yarn build:staff - name: Publish staff - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/staff - directory: web/apps/staff/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/staff web/apps/staff/dist diff --git a/.github/workflows/web-deploy-staging.yml b/.github/workflows/web-deploy-staging.yml new file mode 100644 index 000000000..ca3a6142b --- /dev/null +++ b/.github/workflows/web-deploy-staging.yml @@ -0,0 +1,86 @@ +name: "Deploy staging (web)" + +on: + schedule: + # Run everyday at ~3:00 PM IST + # + # See: [Note: Run workflow every 24 hours] + - cron: "25 9 * * *" + # Also allow manually running the workflow + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build photos + run: yarn build:photos + env: + NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-photos web/apps/photos/out + + - name: Build accounts + run: yarn build:accounts + + - name: Publish accounts + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-accounts web/apps/accounts/out + + - name: Build auth + run: yarn build:auth + + - name: Publish auth + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-auth web/apps/auth/out + + - name: Build cast + run: yarn build:cast + + - name: Publish cast + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-cast web/apps/cast/out + + - name: Build payments + run: yarn build:payments + + - name: Publish payments + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-payments web/apps/payments/dist diff --git a/.github/workflows/web-nightly.yml b/.github/workflows/web-deploy.yml similarity index 60% rename from .github/workflows/web-nightly.yml rename to .github/workflows/web-deploy.yml index 949738292..6f6a113f2 100644 --- a/.github/workflows/web-nightly.yml +++ b/.github/workflows/web-deploy.yml @@ -1,17 +1,21 @@ -name: "Nightly (web)" +name: "Deploy (web)" on: schedule: # [Note: Run workflow every 24 hours] # - # Run every 24 hours - First field is minute, second is hour of the day - # This runs 23:15 UTC everyday - 1 and 15 are just arbitrary offset to - # avoid scheduling it on the exact hour, as suggested by GitHub. + # Run everyday at ~8:00 AM IST (except Sundays). + # + # First field is minute, second is hour of the day. Last is day of week, + # 0 being Sunday. + # + # Add a few minutes of offset to avoid scheduling on exact hourly + # boundaries (recommended by GitHub to avoid congestion). # # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule # https://crontab.guru/ # - - cron: "15 23 * * *" + - cron: "25 2 * * 1-6" # Also allow manually running the workflow workflow_dispatch: @@ -39,69 +43,52 @@ jobs: - name: Install dependencies run: yarn install + - name: Build photos + run: yarn build:photos + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/photos web/apps/photos/out + - name: Build accounts run: yarn build:accounts - name: Publish accounts - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-accounts - directory: web/apps/accounts/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/accounts web/apps/accounts/out - name: Build auth run: yarn build:auth - name: Publish auth - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-auth - directory: web/apps/auth/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/auth web/apps/auth/out - name: Build cast run: yarn build:cast - name: Publish cast - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-cast - directory: web/apps/cast/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/cast web/apps/cast/out - name: Build payments run: yarn build:payments - name: Publish payments - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-payments - directory: web/apps/payments/dist - wranglerVersion: "3" - - - name: Build photos - run: yarn build:photos - env: - NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-photos - directory: web/apps/photos/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/payments web/apps/payments/dist diff --git a/.github/workflows/web-lint.yml b/.github/workflows/web-lint.yml index baf2a98ab..7f5d27002 100644 --- a/.github/workflows/web-lint.yml +++ b/.github/workflows/web-lint.yml @@ -3,7 +3,7 @@ name: "Lint (web)" on: # Run on every push to a branch other than main that changes web/ push: - branches-ignore: [main, "deploy/**", "deploy-f/**"] + branches-ignore: [main] paths: - "web/**" - ".github/workflows/web-lint.yml" diff --git a/.gitignore b/.gitignore index 35ef93d42..8699b46ee 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,6 @@ # macOS .DS_Store +.idea +.ente.authenticator.db +.ente.offline_authenticator.db diff --git a/auth/android/app/build.gradle b/auth/android/app/build.gradle index 5621b08b6..a0179af5b 100644 --- a/auth/android/app/build.gradle +++ b/auth/android/app/build.gradle @@ -1,3 +1,9 @@ +plugins { + id "com.android.application" + id "kotlin-android" + id "dev.flutter.flutter-gradle-plugin" +} + def localProperties = new Properties() def localPropertiesFile = rootProject.file('local.properties') if (localPropertiesFile.exists()) { @@ -6,11 +12,6 @@ if (localPropertiesFile.exists()) { } } -def flutterRoot = localProperties.getProperty('flutter.sdk') -if (flutterRoot == null) { - throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") -} - def flutterVersionCode = localProperties.getProperty('flutter.versionCode') if (flutterVersionCode == null) { flutterVersionCode = '1' @@ -21,10 +22,6 @@ if (flutterVersionName == null) { flutterVersionName = '1.0' } -apply plugin: 'com.android.application' -apply plugin: 'kotlin-android' -apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" - def keystoreProperties = new Properties() def keystorePropertiesFile = rootProject.file('key.properties') if (keystorePropertiesFile.exists()) { @@ -32,7 +29,18 @@ if (keystorePropertiesFile.exists()) { } android { - compileSdkVersion 34 + namespace "io.ente.auth" + compileSdk 34 + ndkVersion flutter.ndkVersion + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = '1.8' + } sourceSets { main.java.srcDirs += 'src/main/kotlin' @@ -46,6 +54,8 @@ android { defaultConfig { applicationId "io.ente.auth" + // You can update the following values to match your application needs. + // For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-gradle-build-configuration. minSdkVersion 21 targetSdkVersion 33 versionCode flutterVersionCode.toInteger() @@ -105,13 +115,4 @@ flutter { source '../..' } -dependencies { - implementation 'io.sentry:sentry-android:2.0.0' - implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" - implementation 'com.android.support:multidex:1.0.3' - implementation 'com.google.guava:guava:28.2-android' - implementation 'com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava' - testImplementation 'junit:junit:4.12' - androidTestImplementation 'androidx.test:runner:1.1.1' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' -} +dependencies {} diff --git a/auth/android/app/src/debug/AndroidManifest.xml b/auth/android/app/src/debug/AndroidManifest.xml index 68e4e89c4..399f6981d 100644 --- a/auth/android/app/src/debug/AndroidManifest.xml +++ b/auth/android/app/src/debug/AndroidManifest.xml @@ -1,6 +1,6 @@ - - diff --git a/auth/android/app/src/main/AndroidManifest.xml b/auth/android/app/src/main/AndroidManifest.xml index abe72b565..a7b34f1ad 100644 --- a/auth/android/app/src/main/AndroidManifest.xml +++ b/auth/android/app/src/main/AndroidManifest.xml @@ -1,6 +1,5 @@ + xmlns:tools="http://schemas.android.com/tools"> - + - diff --git a/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png deleted file mode 100644 index fbfe92399..000000000 Binary files a/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png index 6fbcb6df9..be000c8b3 100644 Binary files a/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png deleted file mode 100644 index 6105c4a2b..000000000 Binary files a/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png index 13fdf3b88..f49d34bb5 100644 Binary files a/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png deleted file mode 100644 index b34272b61..000000000 Binary files a/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png index 5f852e4a3..ef950b6e9 100644 Binary files a/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png deleted file mode 100644 index faa2e9c60..000000000 Binary files a/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png index 5c82f386a..e97eba5d2 100644 Binary files a/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png deleted file mode 100644 index 9814894c6..000000000 Binary files a/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png index 3bea3482c..a37c745ae 100644 Binary files a/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/values-night-v31/styles.xml b/auth/android/app/src/main/res/values-night-v31/styles.xml index 2c379953f..c4a573dfe 100644 --- a/auth/android/app/src/main/res/values-night-v31/styles.xml +++ b/auth/android/app/src/main/res/values-night-v31/styles.xml @@ -4,7 +4,10 @@ diff --git a/auth/android/build.gradle b/auth/android/build.gradle index 47890036d..bc157bd1a 100644 --- a/auth/android/build.gradle +++ b/auth/android/build.gradle @@ -1,16 +1,3 @@ -buildscript { - ext.kotlin_version = '1.8.22' - repositories { - google() - mavenCentral() - } - - dependencies { - classpath 'com.android.tools.build:gradle:7.1.2' - classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" - } -} - allprojects { repositories { google() @@ -21,6 +8,8 @@ allprojects { rootProject.buildDir = '../build' subprojects { project.buildDir = "${rootProject.buildDir}/${project.name}" +} +subprojects { project.evaluationDependsOn(':app') } diff --git a/auth/android/gradle.properties b/auth/android/gradle.properties index 94adc3a3f..598d13fee 100644 --- a/auth/android/gradle.properties +++ b/auth/android/gradle.properties @@ -1,3 +1,3 @@ -org.gradle.jvmargs=-Xmx1536M +org.gradle.jvmargs=-Xmx4G android.useAndroidX=true android.enableJetifier=true diff --git a/auth/android/gradle/wrapper/gradle-wrapper.properties b/auth/android/gradle/wrapper/gradle-wrapper.properties index cc5527d78..e1ca574ef 100644 --- a/auth/android/gradle/wrapper/gradle-wrapper.properties +++ b/auth/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Fri Jun 23 08:50:38 CEST 2017 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.3-all.zip diff --git a/auth/android/settings.gradle b/auth/android/settings.gradle index 44e62bcf0..748caceba 100644 --- a/auth/android/settings.gradle +++ b/auth/android/settings.gradle @@ -1,11 +1,26 @@ -include ':app' +pluginManagement { + def flutterSdkPath = { + def properties = new Properties() + file("local.properties").withInputStream { properties.load(it) } + def flutterSdkPath = properties.getProperty("flutter.sdk") + assert flutterSdkPath != null, "flutter.sdk not set in local.properties" + return flutterSdkPath + } + settings.ext.flutterSdkPath = flutterSdkPath() -def localPropertiesFile = new File(rootProject.projectDir, "local.properties") -def properties = new Properties() + includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle") -assert localPropertiesFile.exists() -localPropertiesFile.withReader("UTF-8") { reader -> properties.load(reader) } + repositories { + google() + mavenCentral() + gradlePluginPortal() + } +} -def flutterSdkPath = properties.getProperty("flutter.sdk") -assert flutterSdkPath != null, "flutter.sdk not set in local.properties" -apply from: "$flutterSdkPath/packages/flutter_tools/gradle/app_plugin_loader.gradle" +plugins { + id "dev.flutter.flutter-plugin-loader" version "1.0.0" + id "com.android.application" version "7.3.0" apply false + id "org.jetbrains.kotlin.android" version "1.8.22" apply false +} + +include ":app" diff --git a/auth/assets/generation-icons/icon-light-adaptive-bg.png b/auth/assets/generation-icons/icon-light-adaptive-bg.png new file mode 100644 index 000000000..d7bde2bdd Binary files /dev/null and b/auth/assets/generation-icons/icon-light-adaptive-bg.png differ diff --git a/auth/assets/generation-icons/icon-light-adaptive-fg.png b/auth/assets/generation-icons/icon-light-adaptive-fg.png index c3899f446..6c1121a49 100644 Binary files a/auth/assets/generation-icons/icon-light-adaptive-fg.png and b/auth/assets/generation-icons/icon-light-adaptive-fg.png differ diff --git a/auth/assets/generation-icons/icon-light.png b/auth/assets/generation-icons/icon-light.png index 5ef7b5a8a..cccf23a2c 100644 Binary files a/auth/assets/generation-icons/icon-light.png and b/auth/assets/generation-icons/icon-light.png differ diff --git a/auth/assets/simple-icons b/auth/assets/simple-icons index 8e7701d6a..8a3731352 160000 --- a/auth/assets/simple-icons +++ b/auth/assets/simple-icons @@ -1 +1 @@ -Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83 +Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0 diff --git a/auth/assets/splash-screen-dark.png b/auth/assets/splash-screen-dark.png deleted file mode 100644 index 5401a47ad..000000000 Binary files a/auth/assets/splash-screen-dark.png and /dev/null differ diff --git a/auth/assets/splash-screen-light.png b/auth/assets/splash-screen-light.png deleted file mode 100644 index a97df13b3..000000000 Binary files a/auth/assets/splash-screen-light.png and /dev/null differ diff --git a/auth/assets/splash/splash-icon-fg-12.png b/auth/assets/splash/splash-icon-fg-12.png new file mode 100644 index 000000000..1a82d32f2 Binary files /dev/null and b/auth/assets/splash/splash-icon-fg-12.png differ diff --git a/auth/assets/splash/splash-icon-fg.png b/auth/assets/splash/splash-icon-fg.png new file mode 100644 index 000000000..58139acb2 Binary files /dev/null and b/auth/assets/splash/splash-icon-fg.png differ diff --git a/auth/assets/svg/button-tint.svg b/auth/assets/svg/button-tint.svg new file mode 100644 index 000000000..1751aece1 --- /dev/null +++ b/auth/assets/svg/button-tint.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/auth/assets/svg/pin-active.svg b/auth/assets/svg/pin-active.svg new file mode 100644 index 000000000..3ba870f5d --- /dev/null +++ b/auth/assets/svg/pin-active.svg @@ -0,0 +1,4 @@ + + + + diff --git a/auth/assets/svg/pin-card.svg b/auth/assets/svg/pin-card.svg new file mode 100644 index 000000000..59b6e15e4 --- /dev/null +++ b/auth/assets/svg/pin-card.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/auth/assets/svg/pin-inactive.svg b/auth/assets/svg/pin-inactive.svg new file mode 100644 index 000000000..2cc59a362 --- /dev/null +++ b/auth/assets/svg/pin-inactive.svg @@ -0,0 +1,3 @@ + + + diff --git a/auth/flutter b/auth/flutter new file mode 160000 index 000000000..ba3931984 --- /dev/null +++ b/auth/flutter @@ -0,0 +1 @@ +Subproject commit ba393198430278b6595976de84fe170f553cc728 diff --git a/auth/ios/Podfile.lock b/auth/ios/Podfile.lock index 991f52b42..814568fba 100644 --- a/auth/ios/Podfile.lock +++ b/auth/ios/Podfile.lock @@ -6,35 +6,35 @@ PODS: - ReachabilitySwift - device_info_plus (0.0.1): - Flutter - - DKImagePickerController/Core (4.3.4): + - DKImagePickerController/Core (4.3.9): - DKImagePickerController/ImageDataManager - DKImagePickerController/Resource - - DKImagePickerController/ImageDataManager (4.3.4) - - DKImagePickerController/PhotoGallery (4.3.4): + - DKImagePickerController/ImageDataManager (4.3.9) + - DKImagePickerController/PhotoGallery (4.3.9): - DKImagePickerController/Core - DKPhotoGallery - - DKImagePickerController/Resource (4.3.4) - - DKPhotoGallery (0.0.17): - - DKPhotoGallery/Core (= 0.0.17) - - DKPhotoGallery/Model (= 0.0.17) - - DKPhotoGallery/Preview (= 0.0.17) - - DKPhotoGallery/Resource (= 0.0.17) + - DKImagePickerController/Resource (4.3.9) + - DKPhotoGallery (0.0.19): + - DKPhotoGallery/Core (= 0.0.19) + - DKPhotoGallery/Model (= 0.0.19) + - DKPhotoGallery/Preview (= 0.0.19) + - DKPhotoGallery/Resource (= 0.0.19) - SDWebImage - SwiftyGif - - DKPhotoGallery/Core (0.0.17): + - DKPhotoGallery/Core (0.0.19): - DKPhotoGallery/Model - DKPhotoGallery/Preview - SDWebImage - SwiftyGif - - DKPhotoGallery/Model (0.0.17): + - DKPhotoGallery/Model (0.0.19): - SDWebImage - SwiftyGif - - DKPhotoGallery/Preview (0.0.17): + - DKPhotoGallery/Preview (0.0.19): - DKPhotoGallery/Model - DKPhotoGallery/Resource - SDWebImage - SwiftyGif - - DKPhotoGallery/Resource (0.0.17): + - DKPhotoGallery/Resource (0.0.19): - SDWebImage - SwiftyGif - file_picker (0.0.1): @@ -81,17 +81,15 @@ PODS: - qr_code_scanner (0.2.0): - Flutter - MTBBarcodeScanner - - ReachabilitySwift (5.2.1) - - SDWebImage (5.19.0): - - SDWebImage/Core (= 5.19.0) - - SDWebImage/Core (5.19.0) - - Sentry/HybridSDK (8.21.0): - - SentryPrivate (= 8.21.0) - - sentry_flutter (7.19.0): + - ReachabilitySwift (5.2.2) + - SDWebImage (5.19.2): + - SDWebImage/Core (= 5.19.2) + - SDWebImage/Core (5.19.2) + - Sentry/HybridSDK (8.25.0) + - sentry_flutter (7.20.1): - Flutter - FlutterMacOS - - Sentry/HybridSDK (= 8.21.0) - - SentryPrivate (8.21.0) + - Sentry/HybridSDK (= 8.25.0) - share_plus (0.0.1): - Flutter - shared_preferences_foundation (0.0.1): @@ -102,23 +100,23 @@ PODS: - sqflite (0.0.3): - Flutter - FlutterMacOS - - sqlite3 (3.45.1): - - sqlite3/common (= 3.45.1) - - sqlite3/common (3.45.1) - - sqlite3/fts5 (3.45.1): + - "sqlite3 (3.45.3+1)": + - "sqlite3/common (= 3.45.3+1)" + - "sqlite3/common (3.45.3+1)" + - "sqlite3/fts5 (3.45.3+1)": - sqlite3/common - - sqlite3/perf-threadsafe (3.45.1): + - "sqlite3/perf-threadsafe (3.45.3+1)": - sqlite3/common - - sqlite3/rtree (3.45.1): + - "sqlite3/rtree (3.45.3+1)": - sqlite3/common - sqlite3_flutter_libs (0.0.1): - Flutter - - sqlite3 (~> 3.45.1) + - "sqlite3 (~> 3.45.3+1)" - sqlite3/fts5 - sqlite3/perf-threadsafe - sqlite3/rtree - - SwiftyGif (5.4.4) - - Toast (4.1.0) + - SwiftyGif (5.4.5) + - Toast (4.1.1) - url_launcher_ios (0.0.1): - Flutter @@ -160,7 +158,6 @@ SPEC REPOS: - ReachabilitySwift - SDWebImage - Sentry - - SentryPrivate - sqlite3 - SwiftyGif - Toast @@ -225,19 +222,19 @@ SPEC CHECKSUMS: app_links: e70ca16b4b0f88253b3b3660200d4a10b4ea9795 connectivity_plus: bf0076dd84a130856aa636df1c71ccaff908fa1d device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6 - DKImagePickerController: b512c28220a2b8ac7419f21c491fc8534b7601ac - DKPhotoGallery: fdfad5125a9fdda9cc57df834d49df790dbb4179 + DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c + DKPhotoGallery: b3834fecb755ee09a593d7c9e389d8b5d6deed60 file_picker: 15fd9539e4eb735dc54bae8c0534a7a9511a03de file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808 fk_user_agent: 1f47ec39291e8372b1d692b50084b0d54103c545 Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 - flutter_email_sender: 02d7443217d8c41483223627972bfdc09f74276b + flutter_email_sender: 10a22605f92809a11ef52b2f412db806c6082d40 flutter_inappwebview_ios: 97215cf7d4677db55df76782dbd2930c5e1c1ea0 flutter_local_authentication: 1172a4dd88f6306dadce067454e2c4caf07977bb flutter_local_notifications: 4cde75091f6327eb8517fa068a0a5950212d2086 flutter_native_splash: edf599c81f74d093a4daf8e17bd7a018854bc778 flutter_secure_storage: 23fc622d89d073675f2eaa109381aefbcf5a49be - fluttertoast: 31b00dabfa7fb7bacd9e7dbee580d7a2ff4bf265 + fluttertoast: 9f2f8e81bb5ce18facb9748d7855bf5a756fe3db local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98 move_to_background: 39a5b79b26d577b0372cbe8a8c55e7aa9fcd3a2d MTBBarcodeScanner: f453b33c4b7dfe545d8c6484ed744d55671788cb @@ -246,19 +243,18 @@ SPEC CHECKSUMS: path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c privacy_screen: 1a131c052ceb3c3659934b003b0d397c2381a24e qr_code_scanner: bb67d64904c3b9658ada8c402e8b4d406d5d796e - ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66 - SDWebImage: 981fd7e860af070920f249fd092420006014c3eb - Sentry: ebc12276bd17613a114ab359074096b6b3725203 - sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5 - SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe + ReachabilitySwift: 2128f3a8c9107e1ad33574c6e58e8285d460b149 + SDWebImage: dfe95b2466a9823cf9f0c6d01217c06550d7b29a + Sentry: cd86fc55628f5b7c572cabe66cc8f95a9d2f165a + sentry_flutter: 4cb24c1055c556d7b27262ab2e179d1e5a0b9b0c share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 sodium_libs: 1faae17af662384acbd13e41867a0008cd2e2318 sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec - sqlite3: 73b7fc691fdc43277614250e04d183740cb15078 - sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80 - SwiftyGif: 93a1cc87bf3a51916001cf8f3d63835fb64c819f - Toast: ec33c32b8688982cecc6348adeae667c1b9938da + sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a + sqlite3_flutter_libs: 9bfe005308998aeca155330bbc2ea6dddf834a3b + SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4 + Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e url_launcher_ios: 6116280ddcfe98ab8820085d8d76ae7449447586 PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png index 23ac5355e..c3d5e0675 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png index 233c57d84..92a287d03 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png index 8dfb32a97..73c2972e7 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png index 780cae73a..45a215602 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png index 09f8c298d..8a871c8e1 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png index d198bb082..3655056e3 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png index 90060839d..3cdcbe923 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png index 8dfb32a97..73c2972e7 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png index fe8e47ed3..7bf74dea0 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png index 14e9af73d..6cb3e22cd 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png new file mode 100644 index 000000000..8fb6f13c6 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png new file mode 100644 index 000000000..63c4f03db Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png new file mode 100644 index 000000000..6ab8f0dc2 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png new file mode 100644 index 000000000..9d2b175ed Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png index 14e9af73d..6cb3e22cd 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png index 21b297f8d..5c75eab74 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png new file mode 100644 index 000000000..f36ab4838 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png new file mode 100644 index 000000000..8dc12384b Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png index f7ef5fa1b..cccb2c4fe 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png index e2ed1b283..1355c5b74 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png index 450115a34..15e1f2c68 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png new file mode 100644 index 000000000..f04fe3978 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json index fa3132785..8bb185b10 100644 --- a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json +++ b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json @@ -2,8 +2,7 @@ "images" : [ { "filename" : "background.png", - "idiom" : "universal", - "scale" : "1x" + "idiom" : "universal" }, { "appearances" : [ @@ -13,36 +12,7 @@ } ], "filename" : "darkbackground.png", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "idiom" : "universal", - "scale" : "3x" + "idiom" : "universal" } ], "info" : { diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png index e29b3b59f..3107d37fa 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png and b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png index 1b5df34e7..71e9c817e 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png and b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json index f3387d4ae..00cabce83 100644 --- a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json +++ b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json @@ -5,48 +5,15 @@ "idiom" : "universal", "scale" : "1x" }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark.png", - "idiom" : "universal", - "scale" : "1x" - }, { "filename" : "LaunchImage@2x.png", "idiom" : "universal", "scale" : "2x" }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark@2x.png", - "idiom" : "universal", - "scale" : "2x" - }, { "filename" : "LaunchImage@3x.png", "idiom" : "universal", "scale" : "3x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark@3x.png", - "idiom" : "universal", - "scale" : "3x" } ], "info" : { diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png index 899cecf22..91acb41ae 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png index 4bb7a5751..9a7c72afa 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png index 176f0c723..5b4d99582 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png deleted file mode 100644 index 87f84c70e..000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png and /dev/null differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png deleted file mode 100644 index ce01bec05..000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png and /dev/null differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png deleted file mode 100644 index 75f4b1f3c..000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png and /dev/null differ diff --git a/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard b/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard index 8d2b7d51a..9e6bc010b 100644 --- a/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard +++ b/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard @@ -38,7 +38,7 @@ - + diff --git a/auth/ios/Runner/Info.plist b/auth/ios/Runner/Info.plist index 35921ba0c..f24fa7f9e 100644 --- a/auth/ios/Runner/Info.plist +++ b/auth/ios/Runner/Info.plist @@ -1,86 +1,86 @@ - - CADisableMinimumFrameDurationOnPhone - - CFBundleDevelopmentRegion - $(DEVELOPMENT_LANGUAGE) - CFBundleDisplayName - auth - CFBundleExecutable - $(EXECUTABLE_NAME) - CFBundleIdentifier - $(PRODUCT_BUNDLE_IDENTIFIER) - CFBundleInfoDictionaryVersion - 6.0 - CFBundleLocalizations - - en - es - - CFBundleName - auth - CFBundlePackageType - APPL - CFBundleShortVersionString - $(FLUTTER_BUILD_NAME) - CFBundleSignature - ???? - CFBundleURLTypes - - - CFBundleTypeRole - Editor - CFBundleURLName - $(PRODUCT_BUNDLE_IDENTIFIER) - CFBundleURLSchemes - - otpauth - enteauth - - - - CFBundleVersion - $(FLUTTER_BUILD_NUMBER) - ITSAppUsesNonExemptEncryption - - LSRequiresIPhoneOS - - MinimumOSVersion - 12.0 - NSCameraUsageDescription - This app needs camera access to scan QR codes - NSFaceIDUsageDescription - Please allow auth to lock itself with FaceID or TouchID - NSPhotoLibraryUsageDescription - Please allow auth to pick a file to import data from - UIApplicationSupportsIndirectInputEvents - - UILaunchStoryboardName - LaunchScreen - UIMainStoryboardFile - Main - UIStatusBarHidden - - UISupportedInterfaceOrientations - - UIInterfaceOrientationPortrait - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UISupportedInterfaceOrientations~ipad - - UIInterfaceOrientationPortrait - UIInterfaceOrientationPortraitUpsideDown - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UIViewControllerBasedStatusBarAppearance - - LSSupportsOpeningDocumentsInPlace - - UIFileSharingEnabled - - + + CADisableMinimumFrameDurationOnPhone + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleDisplayName + auth + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleLocalizations + + en + es + + CFBundleName + auth + CFBundlePackageType + APPL + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleSignature + ???? + CFBundleURLTypes + + + CFBundleTypeRole + Editor + CFBundleURLName + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleURLSchemes + + otpauth + enteauth + + + + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + ITSAppUsesNonExemptEncryption + + LSRequiresIPhoneOS + + MinimumOSVersion + 12.0 + NSCameraUsageDescription + This app needs camera access to scan QR codes + NSFaceIDUsageDescription + Please allow auth to lock itself with FaceID or TouchID + NSPhotoLibraryUsageDescription + Please allow auth to pick a file to import data from + UIApplicationSupportsIndirectInputEvents + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIStatusBarHidden + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + LSSupportsOpeningDocumentsInPlace + + UIFileSharingEnabled + + diff --git a/auth/lib/ente_theme_data.dart b/auth/lib/ente_theme_data.dart index 0316d014f..2eb19bf27 100644 --- a/auth/lib/ente_theme_data.dart +++ b/auth/lib/ente_theme_data.dart @@ -427,6 +427,10 @@ extension CustomColorScheme on ColorScheme { ? const Color.fromRGBO(246, 246, 246, 1) : const Color.fromRGBO(40, 40, 40, 0.6); + Color get primaryColor => brightness == Brightness.light + ? const Color(0xFF9610D6) + : const Color(0xFF9610D6); + EnteTheme get enteTheme => brightness == Brightness.light ? lightTheme : darkTheme; @@ -493,7 +497,7 @@ ElevatedButtonThemeData buildElevatedButtonThemeData({ ), padding: const EdgeInsets.symmetric(vertical: 18), shape: const RoundedRectangleBorder( - borderRadius: BorderRadius.all(Radius.circular(8)), + borderRadius: BorderRadius.all(Radius.circular(4)), ), ), ); diff --git a/auth/lib/l10n/arb/app_en.arb b/auth/lib/l10n/arb/app_en.arb index c22bac930..e4d1a07a5 100644 --- a/auth/lib/l10n/arb/app_en.arb +++ b/auth/lib/l10n/arb/app_en.arb @@ -20,6 +20,8 @@ "codeIssuerHint": "Issuer", "codeSecretKeyHint": "Secret Key", "codeAccountHint": "Account (you@domain.com)", + "codeTagHint": "Tag", + "accountKeyType": "Type of key", "sessionExpired": "Session expired", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -156,6 +158,7 @@ } } }, + "invalidQRCode": "Invalid QR code", "noRecoveryKeyTitle": "No recovery key?", "enterEmailHint": "Enter your email address", "invalidEmailTitle": "Invalid email address", @@ -420,5 +423,18 @@ "invalidEndpoint": "Invalid endpoint", "invalidEndpointMessage": "Sorry, the endpoint you entered is invalid. Please enter a valid endpoint and try again.", "endpointUpdatedMessage": "Endpoint updated successfully", - "customEndpoint": "Connected to {endpoint}" + "customEndpoint": "Connected to {endpoint}", + "pinText": "Pin", + "unpinText": "Unpin", + "pinnedCodeMessage": "{code} has been pinned", + "unpinnedCodeMessage": "{code} has been unpinned", + "tags": "Tags", + "createNewTag": "Create New Tag", + "tag": "Tag", + "create": "Create", + "editTag": "Edit Tag", + "deleteTagTitle": "Delete tag?", + "deleteTagMessage": "Are you sure you want to delete this tag? This action is irreversible.", + "somethingWentWrongParsingCode": "We were unable to parse {x} codes.", + "updateNotAvailable": "Update not available" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_pl.arb b/auth/lib/l10n/arb/app_pl.arb index 3132f6660..796623def 100644 --- a/auth/lib/l10n/arb/app_pl.arb +++ b/auth/lib/l10n/arb/app_pl.arb @@ -185,6 +185,8 @@ "recoveryKeySaveDescription": "Nie przechowujemy tego klucza, proszę zachować ten 24 wyrazowy klucz w bezpiecznym miejscu.", "doThisLater": "Zrób To Później", "saveKey": "Zapisz klucz", + "save": "Zapisz", + "send": "Wyślij", "back": "Wstecz", "createAccount": "Utwórz konto", "passwordStrength": "Siła hasła: {passwordStrengthValue}", @@ -335,6 +337,10 @@ "@androidBiometricNotRecognized": { "description": "Message to let the user know that authentication was failed. It is used on Android side. Maximum 60 characters." }, + "androidCancelButton": "Anuluj", + "@androidCancelButton": { + "description": "Message showed on a button that the user can click to leave the current dialog. It is used on Android side. Maximum 30 characters." + }, "androidSignInTitle": "Wymagana autoryzacja", "@androidSignInTitle": { "description": "Message showed as a title in a dialog which indicates the user that they need to scan biometric to continue. It is used on Android side. Maximum 60 characters." diff --git a/auth/lib/l10n/arb/app_ru.arb b/auth/lib/l10n/arb/app_ru.arb index ca98611ee..42571a166 100644 --- a/auth/lib/l10n/arb/app_ru.arb +++ b/auth/lib/l10n/arb/app_ru.arb @@ -188,6 +188,8 @@ "recoveryKeySaveDescription": "Мы не храним этот ключ, пожалуйста, сохраните этот ключ в безопасном месте.", "doThisLater": "Сделать позже", "saveKey": "Сохранить ключ", + "save": "Сохранить", + "send": "Отправить", "back": "Вернуться", "createAccount": "Создать аккаунт", "passwordStrength": "Мощность пароля: {passwordStrengthValue}", @@ -394,5 +396,13 @@ "signOutOtherDevices": "Выйти из других устройств", "doNotSignOut": "Не выходить", "hearUsWhereTitle": "Как вы узнали о Ente? (необязательно)", - "hearUsExplanation": "Будет полезно, если вы укажете, где нашли нас, так как мы не отслеживаем установки приложения" + "hearUsExplanation": "Будет полезно, если вы укажете, где нашли нас, так как мы не отслеживаем установки приложения", + "waitingForVerification": "Ожидание подтверждения...", + "developerSettingsWarning": "Вы уверены, что хотите изменить настройки разработчика?", + "developerSettings": "Настройки разработчика", + "serverEndpoint": "Конечная точка сервера", + "invalidEndpoint": "Неверная конечная точка", + "invalidEndpointMessage": "Извините, введенная вами конечная точка неверна. Пожалуйста, введите корректную конечную точку и повторите попытку.", + "endpointUpdatedMessage": "Конечная точка успешно обновлена", + "customEndpoint": "Подключено к {endpoint}" } \ No newline at end of file diff --git a/auth/lib/main.dart b/auth/lib/main.dart index d8d22ca4f..9fa2841ff 100644 --- a/auth/lib/main.dart +++ b/auth/lib/main.dart @@ -17,6 +17,7 @@ import 'package:ente_auth/services/update_service.dart'; import 'package:ente_auth/services/user_remote_flag_service.dart'; import 'package:ente_auth/services/user_service.dart'; import 'package:ente_auth/services/window_listener_service.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/store/code_store.dart'; import 'package:ente_auth/ui/tools/app_lock.dart'; import 'package:ente_auth/ui/tools/lock_screen.dart'; @@ -145,6 +146,7 @@ Future _init(bool bool, {String? via}) async { await PreferenceService.instance.init(); await CodeStore.instance.init(); + await CodeDisplayStore.instance.init(); await Configuration.instance.init(); await Network.instance.init(); await UserService.instance.init(); @@ -157,7 +159,7 @@ Future _init(bool bool, {String? via}) async { } Future _setupPrivacyScreen() async { - if (!PlatformUtil.isMobile()) return; + if (!PlatformUtil.isMobile() || kDebugMode) return; final brightness = SchedulerBinding.instance.platformDispatcher.platformBrightness; bool isInDarkMode = brightness == Brightness.dark; diff --git a/auth/lib/models/code.dart b/auth/lib/models/code.dart index bd6077326..852d1dd78 100644 --- a/auth/lib/models/code.dart +++ b/auth/lib/models/code.dart @@ -1,3 +1,6 @@ +import 'dart:convert'; + +import 'package:ente_auth/models/code_display.dart'; import 'package:ente_auth/utils/totp_util.dart'; class Code { @@ -13,10 +16,19 @@ class Code { final String secret; final Algorithm algorithm; final Type type; + + /// otpauth url in the code final String rawData; final int counter; bool? hasSynced; + final CodeDisplay display; + + bool get isPinned => display.pinned; + + final Object? err; + bool get hasError => err != null; + Code( this.account, this.issuer, @@ -28,8 +40,26 @@ class Code { this.counter, this.rawData, { this.generatedID, + required this.display, + this.err, }); + factory Code.withError(Object error, String rawData) { + return Code( + "", + "", + 0, + 0, + "", + Algorithm.sha1, + Type.totp, + 0, + rawData, + err: error, + display: CodeDisplay(), + ); + } + Code copyWith({ String? account, String? issuer, @@ -39,6 +69,7 @@ class Code { Algorithm? algorithm, Type? type, int? counter, + CodeDisplay? display, }) { final String updateAccount = account ?? this.account; final String updateIssuer = issuer ?? this.issuer; @@ -48,6 +79,7 @@ class Code { final Algorithm updatedAlgo = algorithm ?? this.algorithm; final Type updatedType = type ?? this.type; final int updatedCounter = counter ?? this.counter; + final CodeDisplay updatedDisplay = display ?? this.display; return Code( updateAccount, @@ -62,6 +94,7 @@ class Code { "&digits=$updatedDigits&issuer=$updateIssuer" "&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}", generatedID: generatedID, + display: updatedDisplay, ); } @@ -70,6 +103,7 @@ class Code { String account, String issuer, String secret, + CodeDisplay? display, int digits, ) { return Code( @@ -82,10 +116,11 @@ class Code { type, 0, "otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret", + display: display ?? CodeDisplay(), ); } - static Code fromRawData(String rawData) { + static Code fromOTPAuthUrl(String rawData, {CodeDisplay? display}) { Uri uri = Uri.parse(rawData); final issuer = _getIssuer(uri); @@ -100,12 +135,13 @@ class Code { _getType(uri), _getCounter(uri), rawData, + display: CodeDisplay.fromUri(uri) ?? CodeDisplay(), ); } catch (e) { // if account name contains # without encoding, // rest of the url are treated as url fragment if (rawData.contains("#")) { - return Code.fromRawData(rawData.replaceAll("#", '%23')); + return Code.fromOTPAuthUrl(rawData.replaceAll("#", '%23')); } else { rethrow; } @@ -129,6 +165,24 @@ class Code { } } + static Code fromExportJson(Map rawJson) { + Code resultCode = Code.fromOTPAuthUrl( + rawJson['rawData'], + display: CodeDisplay.fromJson(rawJson['display']), + ); + return resultCode; + } + + String toOTPAuthUrlFormat() { + final uri = Uri.parse(rawData); + final query = {...uri.queryParameters}; + query["codeDisplay"] = jsonEncode(display.toJson()); + + final newUri = uri.replace(queryParameters: query); + + return jsonEncode(newUri.toString()); + } + static String _getIssuer(Uri uri) { try { if (uri.queryParameters.containsKey("issuer")) { diff --git a/auth/lib/models/code_display.dart b/auth/lib/models/code_display.dart new file mode 100644 index 000000000..84deb916b --- /dev/null +++ b/auth/lib/models/code_display.dart @@ -0,0 +1,96 @@ +import 'dart:convert'; + +import 'package:flutter/foundation.dart'; + +/// Used to store the display settings of a code. +class CodeDisplay { + final bool pinned; + final bool trashed; + final int lastUsedAt; + final int tapCount; + final List tags; + + CodeDisplay({ + this.pinned = false, + this.trashed = false, + this.lastUsedAt = 0, + this.tapCount = 0, + this.tags = const [], + }); + + // copyWith + CodeDisplay copyWith({ + bool? pinned, + bool? trashed, + int? lastUsedAt, + int? tapCount, + List? tags, + }) { + final bool updatedPinned = pinned ?? this.pinned; + final bool updatedTrashed = trashed ?? this.trashed; + final int updatedLastUsedAt = lastUsedAt ?? this.lastUsedAt; + final int updatedTapCount = tapCount ?? this.tapCount; + final List updatedTags = tags ?? this.tags; + + return CodeDisplay( + pinned: updatedPinned, + trashed: updatedTrashed, + lastUsedAt: updatedLastUsedAt, + tapCount: updatedTapCount, + tags: updatedTags, + ); + } + + factory CodeDisplay.fromJson(Map? json) { + if (json == null) { + return CodeDisplay(); + } + return CodeDisplay( + pinned: json['pinned'] ?? false, + trashed: json['trashed'] ?? false, + lastUsedAt: json['lastUsedAt'] ?? 0, + tapCount: json['tapCount'] ?? 0, + tags: List.from(json['tags'] ?? []), + ); + } + + static CodeDisplay? fromUri(Uri uri) { + if (!uri.queryParameters.containsKey("codeDisplay")) return null; + final String codeDisplay = + uri.queryParameters['codeDisplay']!.replaceAll('%2C', ','); + final decodedDisplay = jsonDecode(codeDisplay); + + return CodeDisplay.fromJson(decodedDisplay); + } + + Map toJson() { + return { + 'pinned': pinned, + 'trashed': trashed, + 'lastUsedAt': lastUsedAt, + 'tapCount': tapCount, + 'tags': tags, + }; + } + + @override + bool operator ==(Object other) { + if (identical(this, other)) return true; + + return other is CodeDisplay && + other.pinned == pinned && + other.trashed == trashed && + other.lastUsedAt == lastUsedAt && + other.tapCount == tapCount && + listEquals(other.tags, tags); + } + + @override + int get hashCode { + return pinned.hashCode ^ + trashed.hashCode ^ + lastUsedAt.hashCode ^ + tapCount.hashCode ^ + tags.hashCode; + } +} diff --git a/auth/lib/onboarding/model/tag_enums.dart b/auth/lib/onboarding/model/tag_enums.dart new file mode 100644 index 000000000..6661b6770 --- /dev/null +++ b/auth/lib/onboarding/model/tag_enums.dart @@ -0,0 +1,10 @@ +enum TagChipState { + selected, + unselected, +} + +enum TagChipAction { + none, + menu, + check, +} diff --git a/auth/lib/onboarding/view/common/add_chip.dart b/auth/lib/onboarding/view/common/add_chip.dart new file mode 100644 index 000000000..39971f416 --- /dev/null +++ b/auth/lib/onboarding/view/common/add_chip.dart @@ -0,0 +1,26 @@ +import "package:ente_auth/theme/ente_theme.dart"; +import "package:flutter/material.dart"; + +class AddChip extends StatelessWidget { + final VoidCallback? onTap; + + const AddChip({ + super.key, + this.onTap, + }); + + @override + Widget build(BuildContext context) { + return GestureDetector( + onTap: onTap, + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 8.0), + child: Icon( + Icons.add_circle_outline, + size: 30, + color: getEnteColorScheme(context).iconButtonColor, + ), + ), + ); + } +} diff --git a/auth/lib/onboarding/view/common/add_tag.dart b/auth/lib/onboarding/view/common/add_tag.dart new file mode 100644 index 000000000..716515ad4 --- /dev/null +++ b/auth/lib/onboarding/view/common/add_tag.dart @@ -0,0 +1,78 @@ +import "package:ente_auth/l10n/l10n.dart"; +import "package:flutter/material.dart"; + +class AddTagDialog extends StatefulWidget { + const AddTagDialog({ + super.key, + required this.onTap, + }); + + final void Function(String) onTap; + + @override + State createState() => _AddTagDialogState(); +} + +class _AddTagDialogState extends State { + String _tag = ""; + + @override + Widget build(BuildContext context) { + final l10n = context.l10n; + return AlertDialog( + title: Text(l10n.createNewTag), + content: SingleChildScrollView( + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + TextFormField( + maxLength: 100, + decoration: InputDecoration( + hintText: l10n.tag, + hintStyle: const TextStyle( + color: Colors.white30, + ), + contentPadding: const EdgeInsets.all(12), + ), + onChanged: (value) { + setState(() { + _tag = value; + }); + }, + autocorrect: false, + initialValue: _tag, + autofocus: true, + ), + ], + ), + ), + actions: [ + TextButton( + child: Text( + l10n.cancel, + style: const TextStyle( + color: Colors.redAccent, + ), + ), + onPressed: () { + Navigator.pop(context); + }, + ), + TextButton( + child: Text( + l10n.create, + style: const TextStyle( + color: Colors.purple, + ), + ), + onPressed: () { + if (_tag.trim().isEmpty) return; + + widget.onTap(_tag); + }, + ), + ], + ); + } +} diff --git a/auth/lib/onboarding/view/common/edit_tag.dart b/auth/lib/onboarding/view/common/edit_tag.dart new file mode 100644 index 000000000..3885312d2 --- /dev/null +++ b/auth/lib/onboarding/view/common/edit_tag.dart @@ -0,0 +1,90 @@ +import "package:ente_auth/l10n/l10n.dart"; +import 'package:ente_auth/store/code_display_store.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class EditTagDialog extends StatefulWidget { + const EditTagDialog({ + super.key, + required this.tag, + }); + + final String tag; + + @override + State createState() => _EditTagDialogState(); +} + +class _EditTagDialogState extends State { + late String _tag = widget.tag; + + @override + Widget build(BuildContext context) { + final l10n = context.l10n; + return AlertDialog( + title: Text(l10n.editTag), + content: SingleChildScrollView( + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + TextFormField( + maxLength: 100, + decoration: InputDecoration( + hintText: l10n.tag, + hintStyle: const TextStyle( + color: Colors.white30, + ), + contentPadding: const EdgeInsets.all(12), + ), + onChanged: (value) { + setState(() { + _tag = value; + }); + }, + autocorrect: false, + initialValue: _tag, + autofocus: true, + ), + ], + ), + ), + actions: [ + TextButton( + child: Text( + l10n.cancel, + style: const TextStyle( + color: Colors.redAccent, + ), + ), + onPressed: () { + Navigator.pop(context); + }, + ), + TextButton( + child: Text( + l10n.saveAction, + style: const TextStyle( + color: Colors.purple, + ), + ), + onPressed: () async { + if (_tag.trim().isEmpty) return; + + final dialog = createProgressDialog( + context, + context.l10n.pleaseWait, + ); + await dialog.show(); + + await CodeDisplayStore.instance.editTag(widget.tag, _tag); + + await dialog.hide(); + + Navigator.pop(context); + }, + ), + ], + ); + } +} diff --git a/auth/lib/onboarding/view/common/tag_chip.dart b/auth/lib/onboarding/view/common/tag_chip.dart new file mode 100644 index 000000000..7f71e68b8 --- /dev/null +++ b/auth/lib/onboarding/view/common/tag_chip.dart @@ -0,0 +1,132 @@ +import "package:ente_auth/l10n/l10n.dart"; +import "package:ente_auth/onboarding/model/tag_enums.dart"; +import "package:ente_auth/store/code_display_store.dart"; +import "package:ente_auth/theme/ente_theme.dart"; +import "package:flutter/material.dart"; +import "package:gradient_borders/box_borders/gradient_box_border.dart"; + +class TagChip extends StatelessWidget { + final String label; + final VoidCallback? onTap; + final TagChipState state; + final TagChipAction action; + + const TagChip({ + super.key, + required this.label, + this.state = TagChipState.unselected, + this.action = TagChipAction.none, + this.onTap, + }); + + @override + Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + + return GestureDetector( + onTap: onTap, + child: Container( + decoration: BoxDecoration( + color: state == TagChipState.selected + ? colorScheme.tagChipSelectedColor + : colorScheme.tagChipUnselectedColor, + borderRadius: BorderRadius.circular(100), + border: GradientBoxBorder( + gradient: LinearGradient( + colors: state == TagChipState.selected + ? colorScheme.tagChipSelectedGradient + : colorScheme.tagChipUnselectedGradient, + begin: Alignment.topLeft, + end: Alignment.bottomRight, + ), + ), + ), + margin: const EdgeInsets.symmetric(vertical: 4), + padding: const EdgeInsets.symmetric(vertical: 6, horizontal: 16) + .copyWith(right: 0), + child: Row( + mainAxisSize: MainAxisSize.min, + children: [ + Text( + label, + style: TextStyle( + color: state == TagChipState.selected || + Theme.of(context).brightness == Brightness.dark + ? Colors.white + : colorScheme.tagTextUnselectedColor, + ), + ), + if (state == TagChipState.selected && + action == TagChipAction.check) ...[ + const SizedBox(width: 16), + const Icon( + Icons.check, + size: 16, + color: Colors.white, + ), + const SizedBox(width: 16), + ] else if (state == TagChipState.selected && + action == TagChipAction.menu) ...[ + SizedBox( + width: 48, + child: PopupMenuButton( + iconSize: 16, + padding: const EdgeInsets.symmetric(horizontal: 16), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(8), + ), + surfaceTintColor: Theme.of(context).cardColor, + iconColor: Colors.white, + initialValue: -1, + onSelected: (value) { + if (value == 0) { + CodeDisplayStore.instance.showEditDialog(context, label); + } else if (value == 1) { + CodeDisplayStore.instance + .showDeleteTagDialog(context, label); + } + }, + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + child: Row( + children: [ + const Icon(Icons.edit_outlined, size: 16), + const SizedBox(width: 12), + Text(context.l10n.edit), + ], + ), + value: 0, + ), + PopupMenuItem( + child: Row( + children: [ + Icon( + Icons.delete_outline, + size: 16, + color: colorScheme.deleteTagIconColor, + ), + const SizedBox(width: 12), + Text( + context.l10n.delete, + style: TextStyle( + color: colorScheme.deleteTagTextColor, + ), + ), + ], + ), + value: 1, + ), + ]; + }, + ), + ), + ] else ...[ + const SizedBox(width: 16), + ], + ], + ), + ), + ); + } +} diff --git a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart index 57edcc2e1..6741788c3 100644 --- a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart +++ b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart @@ -1,5 +1,15 @@ +import 'dart:async'; + +import 'package:ente_auth/core/event_bus.dart'; +import 'package:ente_auth/events/codes_updated_event.dart'; import "package:ente_auth/l10n/l10n.dart"; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/models/code_display.dart'; +import 'package:ente_auth/onboarding/model/tag_enums.dart'; +import 'package:ente_auth/onboarding/view/common/add_chip.dart'; +import 'package:ente_auth/onboarding/view/common/add_tag.dart'; +import 'package:ente_auth/onboarding/view/common/tag_chip.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/ui/components/buttons/button_widget.dart'; import 'package:ente_auth/ui/components/models/button_result.dart'; import 'package:ente_auth/utils/dialog_util.dart'; @@ -21,6 +31,9 @@ class _SetupEnterSecretKeyPageState extends State { late TextEditingController _accountController; late TextEditingController _secretController; late bool _secretKeyObscured; + late List tags = [...?widget.code?.display.tags]; + List allTags = []; + StreamSubscription? _streamSubscription; @override void initState() { @@ -35,9 +48,26 @@ class _SetupEnterSecretKeyPageState extends State { text: widget.code?.secret, ); _secretKeyObscured = widget.code != null; + _loadTags(); + _streamSubscription = Bus.instance.on().listen((event) { + _loadTags(); + }); super.initState(); } + @override + void dispose() { + _streamSubscription?.cancel(); + super.dispose(); + } + + Future _loadTags() async { + allTags = await CodeDisplayStore.instance.getAllTags(); + if (mounted) { + setState(() {}); + } + } + @override Widget build(BuildContext context) { final l10n = context.l10n; @@ -50,6 +80,7 @@ class _SetupEnterSecretKeyPageState extends State { child: Padding( padding: const EdgeInsets.symmetric(vertical: 40.0, horizontal: 40), child: Column( + crossAxisAlignment: CrossAxisAlignment.start, children: [ TextFormField( // The validator receives the text that the user has entered. @@ -115,6 +146,65 @@ class _SetupEnterSecretKeyPageState extends State { controller: _accountController, ), const SizedBox(height: 40), + const SizedBox( + height: 20, + ), + Text( + l10n.tags, + style: const TextStyle( + fontWeight: FontWeight.bold, + ), + ), + const SizedBox(height: 10), + Wrap( + spacing: 12, + alignment: WrapAlignment.start, + children: [ + ...allTags.map( + (e) => TagChip( + label: e, + action: TagChipAction.check, + state: tags.contains(e) + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + if (tags.contains(e)) { + tags.remove(e); + } else { + tags.add(e); + } + setState(() {}); + }, + ), + ), + AddChip( + onTap: () { + showDialog( + context: context, + builder: (BuildContext context) { + return AddTagDialog( + onTap: (tag) { + if (allTags.contains(tag) && + tags.contains(tag)) { + return; + } + allTags.add(tag); + tags.add(tag); + setState(() {}); + Navigator.pop(context); + }, + ); + }, + barrierColor: Colors.black.withOpacity(0.85), + barrierDismissible: false, + ); + }, + ), + ], + ), + const SizedBox( + height: 40, + ), SizedBox( width: 400, child: OutlinedButton( @@ -134,13 +224,7 @@ class _SetupEnterSecretKeyPageState extends State { } await _saveCode(); }, - child: Padding( - padding: const EdgeInsets.symmetric( - horizontal: 16.0, - vertical: 4, - ), - child: Text(l10n.saveAction), - ), + child: Text(l10n.saveAction), ), ), ], @@ -171,18 +255,22 @@ class _SetupEnterSecretKeyPageState extends State { return; } } + final CodeDisplay display = + widget.code?.display.copyWith(tags: tags) ?? CodeDisplay(tags: tags); final Code newCode = widget.code == null ? Code.fromAccountAndSecret( isStreamCode ? Type.steam : Type.totp, account, issuer, secret, + display, isStreamCode ? Code.steamDigits : Code.defaultDigits, ) : widget.code!.copyWith( account: account, issuer: issuer, secret: secret, + display: display, ); // Verify the validity of the code getOTP(newCode); diff --git a/auth/lib/store/code_display_store.dart b/auth/lib/store/code_display_store.dart new file mode 100644 index 000000000..74972f5a2 --- /dev/null +++ b/auth/lib/store/code_display_store.dart @@ -0,0 +1,112 @@ +import 'package:ente_auth/l10n/l10n.dart'; +import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/onboarding/view/common/edit_tag.dart'; +import 'package:ente_auth/services/authenticator_service.dart'; +import 'package:ente_auth/store/code_store.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class CodeDisplayStore { + static final CodeDisplayStore instance = + CodeDisplayStore._privateConstructor(); + + CodeDisplayStore._privateConstructor(); + + late CodeStore _codeStore; + + Future init() async { + _codeStore = CodeStore.instance; + } + + Future> getAllTags({ + AccountMode? accountMode, + List? allCodes, + }) async { + final codes = allCodes ?? + await _codeStore.getAllCodes( + accountMode: accountMode, + sortCodes: false, + ); + final tags = {}; + for (final code in codes) { + if (code.hasError) continue; + tags.addAll(code.display.tags); + } + return tags.toList(); + } + + Future showDeleteTagDialog(BuildContext context, String tag) async { + FocusScope.of(context).requestFocus(); + final l10n = context.l10n; + + await showChoiceActionSheet( + context, + title: l10n.deleteTagTitle, + body: l10n.deleteTagMessage, + firstButtonLabel: l10n.delete, + isCritical: true, + firstButtonOnTap: () async { + // traverse through all the codes and edit this tag's value + final relevantCodes = await _getCodesByTag(tag); + + final tasks = []; + + for (final code in relevantCodes) { + final tags = code.display.tags; + tags.remove(tag); + tasks.add( + _codeStore.addCode( + code.copyWith( + display: code.display.copyWith(tags: tags), + ), + ), + ); + } + + await Future.wait(tasks); + }, + ); + } + + Future showEditDialog(BuildContext context, String tag) async { + await showDialog( + context: context, + builder: (BuildContext context) { + return EditTagDialog(tag: tag); + }, + barrierColor: Colors.black.withOpacity(0.85), + barrierDismissible: false, + ); + } + + Future> _getCodesByTag(String tag) async { + final codes = await _codeStore.getAllCodes(sortCodes: false); + return codes + .where( + (element) => !element.hasError && element.display.tags.contains(tag), + ) + .toList(); + } + + Future editTag(String previousTag, String updatedTag) async { + // traverse through all the codes and edit this tag's value + final relevantCodes = await _getCodesByTag(previousTag); + + final tasks = []; + + for (final code in relevantCodes) { + final tags = code.display.tags; + tags.remove(previousTag); + tags.add(updatedTag); + tasks.add( + CodeStore.instance.addCode( + code.copyWith( + display: code.display.copyWith(tags: tags), + ), + ), + ); + } + + await Future.wait(tasks); + } +} diff --git a/auth/lib/store/code_store.dart b/auth/lib/store/code_store.dart index 9b199f165..43d882281 100644 --- a/auth/lib/store/code_store.dart +++ b/auth/lib/store/code_store.dart @@ -22,27 +22,52 @@ class CodeStore { _authenticatorService = AuthenticatorService.instance; } - Future> getAllCodes({AccountMode? accountMode}) async { + Future> getAllCodes({ + AccountMode? accountMode, + bool sortCodes = true, + }) async { final mode = accountMode ?? _authenticatorService.getAccountMode(); final List entities = await _authenticatorService.getEntities(mode); final List codes = []; + for (final entity in entities) { - final decodeJson = jsonDecode(entity.rawData); - final code = Code.fromRawData(decodeJson); + late Code code; + try { + final decodeJson = jsonDecode(entity.rawData); + + if (decodeJson is String && decodeJson.startsWith('otpauth://')) { + code = Code.fromOTPAuthUrl(decodeJson); + } else { + code = Code.fromExportJson(decodeJson); + } + } catch (e) { + code = Code.withError(e, entity.rawData); + _logger.severe("Could not parse code", code.err); + } code.generatedID = entity.generatedID; code.hasSynced = entity.hasSynced; codes.add(code); } - // sort codes by issuer,account - codes.sort((a, b) { - final issuerComparison = compareAsciiLowerCaseNatural(a.issuer, b.issuer); - if (issuerComparison != 0) { - return issuerComparison; - } - return compareAsciiLowerCaseNatural(a.account, b.account); - }); + if (sortCodes) { + // sort codes by issuer,account + codes.sort((firstCode, secondCode) { + if (secondCode.isPinned && !firstCode.isPinned) return 1; + if (!secondCode.isPinned && firstCode.isPinned) return -1; + + final issuerComparison = + compareAsciiLowerCaseNatural(firstCode.issuer, secondCode.issuer); + if (issuerComparison != 0) { + return issuerComparison; + } + return compareAsciiLowerCaseNatural( + firstCode.account, + secondCode.account, + ); + }); + } + return codes; } @@ -52,30 +77,36 @@ class CodeStore { AccountMode? accountMode, }) async { final mode = accountMode ?? _authenticatorService.getAccountMode(); - final codes = await getAllCodes(accountMode: mode); + final allCodes = await getAllCodes(accountMode: mode); bool isExistingCode = false; - for (final existingCode in codes) { - if (existingCode == code) { - _logger.info("Found duplicate code, skipping add"); - return AddResult.duplicate; - } else if (existingCode.generatedID == code.generatedID) { + bool hasSameCode = false; + for (final existingCode in allCodes) { + if (existingCode.hasError) continue; + if (code.generatedID != null && + existingCode.generatedID == code.generatedID) { isExistingCode = true; break; } + if (existingCode == code) { + hasSameCode = true; + } + } + if (!isExistingCode && hasSameCode) { + return AddResult.duplicate; } late AddResult result; if (isExistingCode) { result = AddResult.updateCode; await _authenticatorService.updateEntry( code.generatedID!, - jsonEncode(code.rawData), + code.toOTPAuthUrlFormat(), shouldSync, mode, ); } else { result = AddResult.newCode; code.generatedID = await _authenticatorService.addEntry( - jsonEncode(code.rawData), + code.toOTPAuthUrlFormat(), shouldSync, mode, ); @@ -93,7 +124,7 @@ class CodeStore { bool _isOfflineImportRunning = false; Future importOfflineCodes() async { - if(_isOfflineImportRunning) { + if (_isOfflineImportRunning) { return; } _isOfflineImportRunning = true; @@ -107,8 +138,10 @@ class CodeStore { } logger.info('start import'); - List offlineCodes = await CodeStore.instance - .getAllCodes(accountMode: AccountMode.offline); + List offlineCodes = (await CodeStore.instance + .getAllCodes(accountMode: AccountMode.offline)) + .where((element) => !element.hasError) + .toList(); if (offlineCodes.isEmpty) { return; } @@ -117,8 +150,10 @@ class CodeStore { logger.info("skip as online sync is not done"); return; } - final List onlineCodes = - await CodeStore.instance.getAllCodes(accountMode: AccountMode.online); + final List onlineCodes = (await CodeStore.instance + .getAllCodes(accountMode: AccountMode.online)) + .where((element) => !element.hasError) + .toList(); logger.info( 'importing ${offlineCodes.length} offline codes with ${onlineCodes.length} online codes', ); diff --git a/auth/lib/theme/colors.dart b/auth/lib/theme/colors.dart index 9ac9d2d7e..278c00777 100644 --- a/auth/lib/theme/colors.dart +++ b/auth/lib/theme/colors.dart @@ -1,5 +1,3 @@ -import 'dart:ui'; - import 'package:flutter/material.dart'; class EnteColorScheme { @@ -41,6 +39,8 @@ class EnteColorScheme { final Color primary400; final Color primary300; + final Color iconButtonColor; + final Color warning700; final Color warning500; final Color warning400; @@ -48,6 +48,28 @@ class EnteColorScheme { final Color caution500; final List avatarColors; + + // Tags + final Color tagChipSelectedColor; + final Color tagChipUnselectedColor; + final List tagChipSelectedGradient; + final List tagChipUnselectedGradient; + final Color tagTextUnselectedColor; + final Color deleteTagIconColor; + final Color deleteTagTextColor; + + // Code Widget + final Color errorCodeProgressColor; + final Color infoIconColor; + final Color errorCardTextColor; + final Color deleteCodeTextColor; + final List pinnedCardBoxShadow; + final Color pinnedBgColor; + + // Gradient Button + final Color gradientButtonBgColor; + final List gradientButtonBgColors; + const EnteColorScheme( this.backgroundBase, this.backgroundElevated, @@ -70,7 +92,23 @@ class EnteColorScheme { this.blurStrokeBase, this.blurStrokeFaint, this.blurStrokePressed, - this.avatarColors, { + this.avatarColors, + this.iconButtonColor, + this.tagChipUnselectedColor, + this.tagChipSelectedGradient, + this.tagChipUnselectedGradient, + this.pinnedBgColor, { + this.tagChipSelectedColor = _tagChipSelectedColor, + this.tagTextUnselectedColor = _tagTextUnselectedColor, + this.deleteTagIconColor = _deleteTagIconColor, + this.deleteTagTextColor = _deleteTagTextColor, + this.errorCodeProgressColor = _errorCodeProgressColor, + this.infoIconColor = _infoIconColor, + this.errorCardTextColor = _errorCardTextColor, + this.deleteCodeTextColor = _deleteCodeTextColor, + this.pinnedCardBoxShadow = _pinnedCardBoxShadow, + this.gradientButtonBgColor = _gradientButtonBgColor, + this.gradientButtonBgColors = _gradientButtonBgColors, this.primaryGreen = _primaryGreen, this.primary700 = _primary700, this.primary500 = _primary500, @@ -107,6 +145,11 @@ const EnteColorScheme lightScheme = EnteColorScheme( blurStrokeFaintLight, blurStrokePressedLight, avatarLight, + _iconButtonBrightColor, + _tagChipUnselectedColorLight, + _tagChipSelectedGradientLight, + _tagChipUnselectedGradientLight, + _pinnedBgColorLight, ); const EnteColorScheme darkScheme = EnteColorScheme( @@ -132,6 +175,11 @@ const EnteColorScheme darkScheme = EnteColorScheme( blurStrokeFaintDark, blurStrokePressedDark, avatarDark, + _iconButtonDarkColor, + _tagChipUnselectedColorDark, + _tagChipSelectedGradientDark, + _tagChipUnselectedGradientDark, + _pinnedBgColorDark, ); // Background Colors @@ -200,7 +248,10 @@ const Color _primary500 = Color.fromARGB(255, 204, 10, 101); const Color _primary400 = Color.fromARGB(255, 122, 41, 193); const Color _primary300 = Color.fromARGB(255, 152, 77, 244); -const Color _warning700 = Color.fromRGBO(234, 63, 63, 1); +const Color _iconButtonBrightColor = Color.fromRGBO(130, 50, 225, 1); +const Color _iconButtonDarkColor = Color.fromRGBO(255, 150, 16, 1); + +const Color _warning700 = Color.fromRGBO(245, 52, 52, 1); const Color _warning500 = Color.fromRGBO(255, 101, 101, 1); const Color _warning800 = Color(0xFFF53434); const Color warning500 = Color.fromRGBO(255, 101, 101, 1); @@ -260,3 +311,64 @@ const List avatarDark = [ Color.fromRGBO(209, 132, 132, 1), Color.fromRGBO(120, 181, 167, 1), ]; + +// Tags +const Color _tagChipUnselectedColorLight = Color(0xFFFCF5FF); +const Color _tagChipUnselectedColorDark = Color(0xFF1C0F22); +const List _tagChipUnselectedGradientLight = [ + Color(0x33AD00FF), + Color(0x338609C2), +]; +const List _tagChipUnselectedGradientDark = [ + Color(0xFFAD00FF), + Color(0x87A269BD), +]; +const Color _tagChipSelectedColor = Color(0xFF722ED1); +const List _tagChipSelectedGradientLight = [ + Color(0xFFB37FEB), + Color(0xFFAE40E3), +]; +const List _tagChipSelectedGradientDark = [ + Color(0xFFB37FEB), + Color(0x87AE40E3), +]; +const Color _tagTextUnselectedColor = Color(0xFF8232E1); +const Color _deleteTagIconColor = Color(0xFFF53434); +const Color _deleteTagTextColor = Color(0xFFF53434); + +// Code Widget +const Color _pinnedBgColorLight = Color(0xFFF9ECFF); +const Color _pinnedBgColorDark = Color(0xFF390C4F); +const Color _errorCodeProgressColor = Color(0xFFF53434); +const Color _infoIconColor = Color(0xFFF53434); +const Color _errorCardTextColor = Color(0xFFF53434); +const Color _deleteCodeTextColor = Color(0xFFFE4A49); +const List _pinnedCardBoxShadow = [ + BoxShadow( + color: Color(0x08000000), + blurRadius: 2, + offset: Offset(0, 7), + ), + BoxShadow( + color: Color(0x17000000), + blurRadius: 2, + offset: Offset(0, 4), + ), + BoxShadow( + color: Color(0x29000000), + blurRadius: 1, + offset: Offset(0, 1), + ), + BoxShadow( + color: Color(0x2E000000), + blurRadius: 1, + offset: Offset(0, 0), + ), +]; + +// Gradient Button +const Color _gradientButtonBgColor = Color(0xFF531DAB); +const List _gradientButtonBgColors = [ + Color(0xFFB37FEB), + Color(0xFF22075E), +]; diff --git a/auth/lib/ui/code_error_widget.dart b/auth/lib/ui/code_error_widget.dart new file mode 100644 index 000000000..ec532ccba --- /dev/null +++ b/auth/lib/ui/code_error_widget.dart @@ -0,0 +1,111 @@ +import 'package:ente_auth/ente_theme_data.dart'; +import 'package:ente_auth/l10n/l10n.dart'; +import 'package:ente_auth/theme/ente_theme.dart'; +import 'package:ente_auth/ui/common/gradient_button.dart'; +import 'package:ente_auth/ui/linear_progress_widget.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class CodeErrorWidget extends StatelessWidget { + const CodeErrorWidget({ + super.key, + required this.errors, + }); + + final int errors; + + @override + Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + + return Container( + height: 132, + width: double.infinity, + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.codeCardBackgroundColor, + borderRadius: BorderRadius.circular(8), + ), + margin: const EdgeInsets.only( + left: 16, + right: 16, + bottom: 8, + top: 8, + ), + child: ClipRRect( + borderRadius: BorderRadius.circular(8), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + SizedBox( + height: 3, + child: LinearProgressWidget( + color: colorScheme.errorCodeProgressColor, + fractionOfStorage: 1, + ), + ), + const SizedBox(height: 16), + Row( + children: [ + const SizedBox(width: 8), + Align( + alignment: Alignment.center, + child: Icon( + Icons.info, + size: 18, + color: colorScheme.infoIconColor, + ), + ), + const SizedBox(width: 8), + Text( + context.l10n.error, + style: TextStyle( + fontSize: 18, + fontWeight: FontWeight.w600, + color: colorScheme.errorCardTextColor, + ), + ), + ], + ), + const SizedBox(height: 8), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 8.0), + child: Text( + context.l10n.somethingWentWrongParsingCode(errors), + style: const TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + ), + ), + ), + const Spacer(), + Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + SizedBox( + width: 102, + height: 28, + child: GradientButton( + text: context.l10n.contactSupport, + fontSize: 10, + onTap: () async { + await showErrorDialog( + context, + context.l10n.contactSupport, + context.l10n + .contactSupportViaEmailMessage("support@ente.io"), + ); + }, + borderWidth: 0.6, + borderRadius: 6, + ), + ), + const SizedBox(width: 6), + ], + ), + const SizedBox(height: 12), + ], + ), + ), + ); + } +} diff --git a/auth/lib/ui/code_timer_progress.dart b/auth/lib/ui/code_timer_progress.dart index b524a0c23..a215f0ca0 100644 --- a/auth/lib/ui/code_timer_progress.dart +++ b/auth/lib/ui/code_timer_progress.dart @@ -1,3 +1,4 @@ +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:ente_auth/ui/linear_progress_widget.dart'; import 'package:flutter/material.dart'; import 'package:flutter/scheduler.dart'; @@ -47,9 +48,14 @@ class _CodeTimerProgressState extends State @override Widget build(BuildContext context) { - return LinearProgressWidget( - color: _progress > 0.4 ? Colors.green : Colors.orange, - fractionOfStorage: _progress, + return SizedBox( + height: 3, + child: LinearProgressWidget( + color: _progress > 0.4 + ? getEnteColorScheme(context).primary700 + : Colors.orange, + fractionOfStorage: _progress, + ), ); } } diff --git a/auth/lib/ui/code_widget.dart b/auth/lib/ui/code_widget.dart index d989edf18..7a9eae46f 100644 --- a/auth/lib/ui/code_widget.dart +++ b/auth/lib/ui/code_widget.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'dart:ui' as ui; import 'package:clipboard/clipboard.dart'; import 'package:ente_auth/core/configuration.dart'; @@ -11,6 +12,7 @@ import 'package:ente_auth/onboarding/view/view_qr_page.dart'; import 'package:ente_auth/services/local_authentication_service.dart'; import 'package:ente_auth/services/preference_service.dart'; import 'package:ente_auth/store/code_store.dart'; +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:ente_auth/ui/code_timer_progress.dart'; import 'package:ente_auth/ui/utils/icon_utils.dart'; import 'package:ente_auth/utils/dialog_util.dart'; @@ -20,13 +22,17 @@ import 'package:ente_auth/utils/totp_util.dart'; import 'package:flutter/material.dart'; import 'package:flutter_context_menu/flutter_context_menu.dart'; import 'package:flutter_slidable/flutter_slidable.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:logging/logging.dart'; import 'package:move_to_background/move_to_background.dart'; class CodeWidget extends StatefulWidget { final Code code; - const CodeWidget(this.code, {super.key}); + const CodeWidget( + this.code, { + super.key, + }); @override State createState() => _CodeWidgetState(); @@ -42,6 +48,7 @@ class _CodeWidgetState extends State { late bool _shouldShowLargeIcon; late bool _hideCode; bool isMaskingEnabled = false; + late final colorScheme = getEnteColorScheme(context); @override void initState() { @@ -97,6 +104,13 @@ class _CodeWidgetState extends State { icon: Icons.qr_code_2_outlined, onSelected: () => _onShowQrPressed(null), ), + MenuItem( + label: widget.code.isPinned ? l10n.unpinText : l10n.pinText, + icon: widget.code.isPinned + ? Icons.push_pin + : Icons.push_pin_outlined, + onSelected: () => _onPinPressed(null), + ), MenuItem( label: l10n.edit, icon: Icons.edit, @@ -119,16 +133,16 @@ class _CodeWidgetState extends State { return Slidable( key: ValueKey(widget.code.hashCode), endActionPane: ActionPane( - extentRatio: 0.60, + extentRatio: 0.90, motion: const ScrollMotion(), children: [ const SizedBox( - width: 4, + width: 14, ), SlidableAction( onPressed: _onShowQrPressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), + borderRadius: const BorderRadius.all(Radius.circular(8)), foregroundColor: Theme.of(context).colorScheme.inverseBackgroundColor, icon: Icons.qr_code_2_outlined, @@ -137,12 +151,48 @@ class _CodeWidgetState extends State { spacing: 8, ), const SizedBox( - width: 4, + width: 14, + ), + CustomSlidableAction( + onPressed: _onPinPressed, + backgroundColor: Colors.grey.withOpacity(0.1), + borderRadius: const BorderRadius.all(Radius.circular(8)), + foregroundColor: + Theme.of(context).colorScheme.inverseBackgroundColor, + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + if (widget.code.isPinned) + SvgPicture.asset( + "assets/svg/pin-active.svg", + colorFilter: ui.ColorFilter.mode( + Theme.of(context).colorScheme.primary, + BlendMode.srcIn, + ), + ) + else + SvgPicture.asset( + "assets/svg/pin-inactive.svg", + colorFilter: ui.ColorFilter.mode( + Theme.of(context).colorScheme.primary, + BlendMode.srcIn, + ), + ), + const SizedBox(height: 8), + Text( + widget.code.isPinned ? l10n.unpinText : l10n.pinText, + ), + ], + ), + padding: const EdgeInsets.only(left: 4, right: 0), + ), + const SizedBox( + width: 14, ), SlidableAction( onPressed: _onEditPressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), + borderRadius: const BorderRadius.all(Radius.circular(8)), foregroundColor: Theme.of(context).colorScheme.inverseBackgroundColor, icon: Icons.edit_outlined, @@ -151,13 +201,13 @@ class _CodeWidgetState extends State { spacing: 8, ), const SizedBox( - width: 4, + width: 14, ), SlidableAction( onPressed: _onDeletePressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), - foregroundColor: const Color(0xFFFE4A49), + borderRadius: const BorderRadius.all(Radius.circular(8)), + foregroundColor: colorScheme.deleteCodeTextColor, icon: Icons.delete, label: l10n.delete, padding: const EdgeInsets.only(left: 0, right: 0), @@ -175,10 +225,15 @@ class _CodeWidgetState extends State { } Widget _clippedCard(AppLocalizations l10n) { - return ClipRRect( - borderRadius: BorderRadius.circular(8), - child: Container( + return Container( + height: 132, + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(8), color: Theme.of(context).colorScheme.codeCardBackgroundColor, + boxShadow: widget.code.isPinned ? colorScheme.pinnedCardBoxShadow : [], + ), + child: ClipRRect( + borderRadius: BorderRadius.circular(8), child: Material( color: Colors.transparent, child: InkWell( @@ -208,37 +263,56 @@ class _CodeWidgetState extends State { } Widget _getCardContents(AppLocalizations l10n) { - return SizedBox( - child: Column( - crossAxisAlignment: CrossAxisAlignment.start, - mainAxisAlignment: MainAxisAlignment.center, - children: [ - if (widget.code.type.isTOTPCompatible) - CodeTimerProgress( - period: widget.code.period, - ), - const SizedBox( - height: 16, - ), - Row( - children: [ - _shouldShowLargeIcon ? _getIcon() : const SizedBox.shrink(), - Expanded( - child: Column( - children: [ - _getTopRow(), - const SizedBox(height: 4), - _getBottomRow(l10n), - ], - ), + return Stack( + children: [ + if (widget.code.isPinned) + Align( + alignment: Alignment.topRight, + child: CustomPaint( + painter: PinBgPainter( + color: colorScheme.pinnedBgColor, ), - ], + size: const Size(39, 39), + ), ), - const SizedBox( - height: 20, + Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + if (widget.code.type == Type.totp) + CodeTimerProgress( + period: widget.code.period, + ), + const SizedBox(height: 16), + Row( + children: [ + _shouldShowLargeIcon ? _getIcon() : const SizedBox.shrink(), + Expanded( + child: Column( + children: [ + _getTopRow(), + const SizedBox(height: 4), + _getBottomRow(l10n), + ], + ), + ), + ], + ), + const SizedBox( + height: 20, + ), + ], + ), + if (widget.code.isPinned) ...[ + Align( + alignment: Alignment.topRight, + child: Padding( + padding: const EdgeInsets.only(right: 6, top: 6), + child: SvgPicture.asset("assets/svg/pin-card.svg"), + ), ), ], - ), + ], ); } @@ -422,7 +496,9 @@ class _CodeWidgetState extends State { final Code? code = await Navigator.of(context).push( MaterialPageRoute( builder: (BuildContext context) { - return SetupEnterSecretKeyPage(code: widget.code); + return SetupEnterSecretKeyPage( + code: widget.code, + ); }, ), ); @@ -448,6 +524,24 @@ class _CodeWidgetState extends State { ); } + Future _onPinPressed(_) async { + bool currentlyPinned = widget.code.isPinned; + final display = widget.code.display; + final Code code = widget.code.copyWith( + display: display.copyWith(pinned: !currentlyPinned), + ); + unawaited( + CodeStore.instance.addCode(code).then( + (value) => showToast( + context, + !currentlyPinned + ? context.l10n.pinnedCodeMessage(widget.code.issuer) + : context.l10n.unpinnedCodeMessage(widget.code.issuer), + ), + ), + ); + } + void _onDeletePressed(_) async { bool isAuthSuccessful = await LocalAuthenticationService.instance.requestLocalAuthentication( @@ -499,3 +593,36 @@ class _CodeWidgetState extends State { return code; } } + +class PinBgPainter extends CustomPainter { + final Color color; + final PaintingStyle paintingStyle; + + PinBgPainter({ + this.color = Colors.black, + this.paintingStyle = PaintingStyle.fill, + }); + + @override + void paint(Canvas canvas, Size size) { + Paint paint = Paint() + ..color = color + ..style = paintingStyle; + + canvas.drawPath(getTrianglePath(size.width, size.height), paint); + } + + Path getTrianglePath(double x, double y) { + return Path() + ..moveTo(0, 0) + ..lineTo(x, 0) + ..lineTo(x, y) + ..lineTo(0, 0); + } + + @override + bool shouldRepaint(PinBgPainter oldDelegate) { + return oldDelegate.color != color || + oldDelegate.paintingStyle != paintingStyle; + } +} diff --git a/auth/lib/ui/common/gradient_button.dart b/auth/lib/ui/common/gradient_button.dart index 8a24c6832..436e1bfb9 100644 --- a/auth/lib/ui/common/gradient_button.dart +++ b/auth/lib/ui/common/gradient_button.dart @@ -1,7 +1,9 @@ +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:flutter/material.dart'; +import 'package:flutter_svg/flutter_svg.dart'; +import 'package:gradient_borders/box_borders/gradient_box_border.dart'; -class GradientButton extends StatelessWidget { - final List linearGradientColors; +class GradientButton extends StatefulWidget { final Function? onTap; // text is ignored if child is specified @@ -13,33 +15,39 @@ class GradientButton extends StatelessWidget { // padding between the text and icon final double paddingValue; - // used when two icons are in row - final bool reversedGradient; + final double fontSize; + final double borderRadius; + final double borderWidth; const GradientButton({ super.key, - this.linearGradientColors = const [ - Color.fromARGB(255, 133, 44, 210), - Color.fromARGB(255, 187, 26, 93), - ], - this.reversedGradient = false, this.onTap, this.text = '', this.iconData, this.paddingValue = 0.0, + this.fontSize = 18, + this.borderRadius = 4, + this.borderWidth = 1, }); + @override + State createState() => _GradientButtonState(); +} + +class _GradientButtonState extends State { + bool isTapped = false; + @override Widget build(BuildContext context) { Widget buttonContent; - if (iconData == null) { + if (widget.iconData == null) { buttonContent = Text( - text, - style: const TextStyle( + widget.text, + style: TextStyle( color: Colors.white, fontWeight: FontWeight.w600, fontFamily: 'Inter-SemiBold', - fontSize: 18, + fontSize: widget.fontSize, ), ); } else { @@ -48,38 +56,79 @@ class GradientButton extends StatelessWidget { crossAxisAlignment: CrossAxisAlignment.center, children: [ Icon( - iconData, + widget.iconData, size: 20, color: Colors.white, ), const Padding(padding: EdgeInsets.symmetric(horizontal: 6)), Text( - text, - style: const TextStyle( + widget.text, + style: TextStyle( color: Colors.white, fontWeight: FontWeight.w600, fontFamily: 'Inter-SemiBold', - fontSize: 18, + fontSize: widget.fontSize, ), ), ], ); } + final colorScheme = getEnteColorScheme(context); + return InkWell( - onTap: onTap as void Function()?, - child: Container( - height: 56, - decoration: BoxDecoration( - gradient: LinearGradient( - begin: const Alignment(0.1, -0.9), - end: const Alignment(-0.6, 0.9), - colors: reversedGradient - ? linearGradientColors.reversed.toList() - : linearGradientColors, + onTapDown: (_) { + setState(() { + isTapped = true; + }); + }, + onTapUp: (_) { + setState(() { + isTapped = false; + }); + }, + onTapCancel: () { + setState(() { + isTapped = false; + }); + }, + borderRadius: BorderRadius.circular(widget.borderRadius), + onTap: widget.onTap as void Function()?, + child: Stack( + children: [ + Container( + height: 56, + width: double.infinity, + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(widget.borderRadius), + color: colorScheme.gradientButtonBgColor, + ), ), - borderRadius: BorderRadius.circular(8), - ), - child: Center(child: buttonContent), + if (!isTapped) + ClipRRect( + borderRadius: BorderRadius.circular(widget.borderRadius), + child: SvgPicture.asset( + 'assets/svg/button-tint.svg', + fit: BoxFit.fill, + width: double.infinity, + height: 56, + ), + ), + Container( + height: 56, + decoration: BoxDecoration( + border: GradientBoxBorder( + width: widget.borderWidth, + gradient: LinearGradient( + colors: colorScheme.gradientButtonBgColors, + begin: Alignment.topLeft, + end: Alignment.bottomRight, + ), + ), + borderRadius: BorderRadius.circular(widget.borderRadius), + ), + child: Center(child: buttonContent), + ), + ], ), ); } diff --git a/auth/lib/ui/home_page.dart b/auth/lib/ui/home_page.dart index c3397d79a..4110a5f88 100644 --- a/auth/lib/ui/home_page.dart +++ b/auth/lib/ui/home_page.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'dart:io'; import 'package:app_links/app_links.dart'; +import 'package:collection/collection.dart'; import 'package:ente_auth/core/configuration.dart'; import 'package:ente_auth/core/event_bus.dart'; import 'package:ente_auth/ente_theme_data.dart'; @@ -10,11 +11,15 @@ import 'package:ente_auth/events/icons_changed_event.dart'; import 'package:ente_auth/events/trigger_logout_event.dart'; import "package:ente_auth/l10n/l10n.dart"; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/onboarding/model/tag_enums.dart'; +import 'package:ente_auth/onboarding/view/common/tag_chip.dart'; import 'package:ente_auth/onboarding/view/setup_enter_secret_key_page.dart'; import 'package:ente_auth/services/preference_service.dart'; import 'package:ente_auth/services/user_service.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/store/code_store.dart'; import 'package:ente_auth/ui/account/logout_dialog.dart'; +import 'package:ente_auth/ui/code_error_widget.dart'; import 'package:ente_auth/ui/code_widget.dart'; import 'package:ente_auth/ui/common/loading_widget.dart'; import 'package:ente_auth/ui/home/coach_mark_widget.dart'; @@ -54,11 +59,13 @@ class _HomePageState extends State { final FocusNode searchInputFocusNode = FocusNode(); bool _showSearchBox = false; String _searchText = ""; - List _codes = []; + List? _allCodes; + List tags = []; List _filteredCodes = []; StreamSubscription? _streamSubscription; StreamSubscription? _triggerLogoutEvent; StreamSubscription? _iconsChangedEvent; + String selectedTag = ""; @override void initState() { @@ -96,14 +103,26 @@ class _HomePageState extends State { void _loadCodes() { CodeStore.instance.getAllCodes().then((codes) { - _codes = codes; - _hasLoaded = true; - _applyFilteringAndRefresh(); + _allCodes = codes; + + CodeDisplayStore.instance.getAllTags(allCodes: _allCodes).then((value) { + tags = value; + + if (mounted) { + if (!tags.contains(selectedTag)) { + selectedTag = ""; + } + _hasLoaded = true; + _applyFilteringAndRefresh(); + } + }); + }).onError((error, stackTrace) { + _logger.severe('Error while loading codes', error, stackTrace); }); } void _applyFilteringAndRefresh() { - if (_searchText.isNotEmpty && _showSearchBox) { + if (_searchText.isNotEmpty && _showSearchBox && _allCodes != null) { final String val = _searchText.toLowerCase(); // Prioritize issuer match above account for better UX while searching // for a specific TOTP for email providers. Searching for "emailProvider" like (gmail, proton) should @@ -112,17 +131,31 @@ class _HomePageState extends State { final List issuerMatch = []; final List accountMatch = []; - for (final Code code in _codes) { - if (code.issuer.toLowerCase().contains(val)) { - issuerMatch.add(code); - } else if (code.account.toLowerCase().contains(val)) { - accountMatch.add(code); + for (final Code codeState in _allCodes!) { + if (codeState.hasError || + selectedTag != "" && + !codeState.display.tags.contains(selectedTag)) { + continue; + } + + if (codeState.issuer.toLowerCase().contains(val)) { + issuerMatch.add(codeState); + } else if (codeState.account.toLowerCase().contains(val)) { + accountMatch.add(codeState); } } _filteredCodes = issuerMatch; _filteredCodes.addAll(accountMatch); } else { - _filteredCodes = _codes; + _filteredCodes = _allCodes + ?.where( + (element) => + !element.hasError && + (selectedTag == "" || + element.display.tags.contains(selectedTag)), + ) + .toList() ?? + []; } if (mounted) { setState(() {}); @@ -149,7 +182,7 @@ class _HomePageState extends State { if (code != null) { await CodeStore.instance.addCode(code); // Focus the new code by searching - if (_codes.length > 2) { + if ((_allCodes?.where((e) => !e.hasError).length ?? 0) > 2) { _focusNewCode(code); } } @@ -171,6 +204,7 @@ class _HomePageState extends State { @override Widget build(BuildContext context) { final l10n = context.l10n; + return PopScope( onPopInvoked: (_) async { if (_isSettingsOpen) { @@ -217,6 +251,7 @@ class _HomePageState extends State { focusedBorder: InputBorder.none, ), ), + centerTitle: true, actions: [ IconButton( icon: _showSearchBox @@ -241,7 +276,7 @@ class _HomePageState extends State { ], ), floatingActionButton: !_hasLoaded || - _codes.isEmpty || + (_allCodes?.isEmpty ?? true) || !PreferenceService.instance.hasShownCoachMark() ? null : _getFab(), @@ -258,18 +293,86 @@ class _HomePageState extends State { onManuallySetupTap: _redirectToManualEntryPage, ); } else { - final list = AlignedGridView.count( - crossAxisCount: (MediaQuery.sizeOf(context).width ~/ 400) - .clamp(1, double.infinity) - .toInt(), - itemBuilder: ((context, index) { - try { - return ClipRect(child: CodeWidget(_filteredCodes[index])); - } catch (e) { - return const Text("Failed"); - } - }), - itemCount: _filteredCodes.length, + final anyCodeHasError = + _allCodes?.firstWhereOrNull((element) => element.hasError) != null; + final indexOffset = anyCodeHasError ? 1 : 0; + + final list = Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + if (!anyCodeHasError) + SizedBox( + height: 48, + child: ListView.separated( + scrollDirection: Axis.horizontal, + padding: + const EdgeInsets.symmetric(horizontal: 16, vertical: 2), + separatorBuilder: (context, index) => + const SizedBox(width: 8), + itemCount: tags.length + 1, + itemBuilder: (context, index) { + if (index == 0) { + return TagChip( + label: "All", + state: selectedTag == "" + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + selectedTag = ""; + setState(() {}); + _applyFilteringAndRefresh(); + }, + ); + } + return TagChip( + label: tags[index - 1], + action: TagChipAction.menu, + state: selectedTag == tags[index - 1] + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + if (selectedTag == tags[index - 1]) { + selectedTag = ""; + setState(() {}); + _applyFilteringAndRefresh(); + return; + } + selectedTag = tags[index - 1]; + setState(() {}); + _applyFilteringAndRefresh(); + }, + ); + }, + ), + ), + Expanded( + child: AlignedGridView.count( + crossAxisCount: (MediaQuery.sizeOf(context).width ~/ 400) + .clamp(1, double.infinity) + .toInt(), + physics: const AlwaysScrollableScrollPhysics(), + padding: const EdgeInsets.only(bottom: 80), + itemBuilder: ((context, index) { + if (index == 0 && anyCodeHasError) { + return CodeErrorWidget( + errors: _allCodes + ?.where((element) => element.hasError) + .length ?? + 0, + ); + } + final newIndex = index - indexOffset; + + return ClipRect( + child: CodeWidget( + _filteredCodes[newIndex], + ), + ); + }), + itemCount: _filteredCodes.length + indexOffset, + ), + ), + ], ); if (!PreferenceService.instance.hasShownCoachMark()) { return Stack( @@ -288,22 +391,12 @@ class _HomePageState extends State { (MediaQuery.sizeOf(context).width ~/ 400) .clamp(1, double.infinity) .toInt(), + padding: const EdgeInsets.only(bottom: 80), itemBuilder: ((context, index) { - Code? code; - try { - code = _filteredCodes[index]; - return CodeWidget(code); - } catch (e, s) { - _logger.severe("code widget error", e, s); - return Center( - child: Padding( - padding: const EdgeInsets.all(8.0), - child: Text( - l10n.sorryUnableToGenCode(code?.issuer ?? ""), - ), - ), - ); - } + final codeState = _filteredCodes[index]; + return CodeWidget( + codeState, + ); }), itemCount: _filteredCodes.length, ) @@ -360,7 +453,7 @@ class _HomePageState extends State { } if (mounted && link.toLowerCase().startsWith("otpauth://")) { try { - final newCode = Code.fromRawData(link); + final newCode = Code.fromOTPAuthUrl(link); getNextTotp(newCode); CodeStore.instance.addCode(newCode); _focusNewCode(newCode); diff --git a/auth/lib/ui/scanner_page.dart b/auth/lib/ui/scanner_page.dart index 6a7793631..a0f88b7c8 100644 --- a/auth/lib/ui/scanner_page.dart +++ b/auth/lib/ui/scanner_page.dart @@ -2,6 +2,7 @@ import 'dart:io'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/utils/toast_util.dart'; import 'package:flutter/material.dart'; import 'package:qr_code_scanner/qr_code_scanner.dart'; @@ -66,11 +67,12 @@ class ScannerPageState extends State { } controller.scannedDataStream.listen((scanData) { try { - final code = Code.fromRawData(scanData.code!); + final code = Code.fromOTPAuthUrl(scanData.code!); controller.dispose(); Navigator.of(context).pop(code); } catch (e) { // Log + showToast(context, context.l10n.invalidQRCode); } }); } diff --git a/auth/lib/ui/settings/data/export_widget.dart b/auth/lib/ui/settings/data/export_widget.dart index ef438301c..0df748289 100644 --- a/auth/lib/ui/settings/data/export_widget.dart +++ b/auth/lib/ui/settings/data/export_widget.dart @@ -171,10 +171,12 @@ Future _exportCodes(BuildContext context, String fileContent) async { } Future _getAuthDataForExport() async { - final codes = await CodeStore.instance.getAllCodes(); + final allCodes = await CodeStore.instance.getAllCodes(); String data = ""; - for (final code in codes) { - data += "${code.rawData}\n"; + for (final code in allCodes) { + if (code.hasError) continue; + data += "${code.rawData.replaceAll(',', '%2C')}\n"; } + return data; } diff --git a/auth/lib/ui/settings/data/import/aegis_import.dart b/auth/lib/ui/settings/data/import/aegis_import.dart index b801e64a5..f6dd87252 100644 --- a/auth/lib/ui/settings/data/import/aegis_import.dart +++ b/auth/lib/ui/settings/data/import/aegis_import.dart @@ -2,8 +2,8 @@ import 'dart:async'; import 'dart:convert'; import 'dart:io'; import 'dart:typed_data'; -import 'package:convert/convert.dart'; +import 'package:convert/convert.dart'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; import 'package:ente_auth/services/authenticator_service.dart'; @@ -150,7 +150,7 @@ Future _processAegisExportFile( } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/bitwarden_import.dart b/auth/lib/ui/settings/data/import/bitwarden_import.dart index 7a562d82b..6878fa9f0 100644 --- a/auth/lib/ui/settings/data/import/bitwarden_import.dart +++ b/auth/lib/ui/settings/data/import/bitwarden_import.dart @@ -86,7 +86,7 @@ Future _processBitwardenExportFile( Code code; if (totp.contains("otpauth://")) { - code = Code.fromRawData(totp); + code = Code.fromOTPAuthUrl(totp); } else { var issuer = item['name']; var account = item['login']['username']; @@ -96,6 +96,7 @@ Future _processBitwardenExportFile( account, issuer, totp, + null, Code.defaultDigits, ); } diff --git a/auth/lib/ui/settings/data/import/encrypted_ente_import.dart b/auth/lib/ui/settings/data/import/encrypted_ente_import.dart index 511c9bbf9..3d7896f88 100644 --- a/auth/lib/ui/settings/data/import/encrypted_ente_import.dart +++ b/auth/lib/ui/settings/data/import/encrypted_ente_import.dart @@ -110,7 +110,7 @@ Future _decryptExportData( final parsedCodes = []; for (final code in splitCodes) { try { - parsedCodes.add(Code.fromRawData(code)); + parsedCodes.add(Code.fromOTPAuthUrl(code)); } catch (e) { Logger('EncryptedText').severe("Could not parse code", e); } diff --git a/auth/lib/ui/settings/data/import/google_auth_import.dart b/auth/lib/ui/settings/data/import/google_auth_import.dart index 12df41a14..c14752fa4 100644 --- a/auth/lib/ui/settings/data/import/google_auth_import.dart +++ b/auth/lib/ui/settings/data/import/google_auth_import.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:convert'; + import 'package:base32/base32.dart'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; @@ -124,7 +125,7 @@ List parseGoogleAuth(String qrCodeData) { } else { throw Exception('Invalid OTP type'); } - codes.add(Code.fromRawData(otpUrl)); + codes.add(Code.fromOTPAuthUrl(otpUrl)); } return codes; } catch (e, s) { diff --git a/auth/lib/ui/settings/data/import/lastpass_import.dart b/auth/lib/ui/settings/data/import/lastpass_import.dart index 53f8b453d..8c36f0253 100644 --- a/auth/lib/ui/settings/data/import/lastpass_import.dart +++ b/auth/lib/ui/settings/data/import/lastpass_import.dart @@ -89,8 +89,8 @@ Future _processLastpassExportFile( // Build the OTP URL String otpUrl = - 'otpauth://totp/$issuer:$account?secret=$secret&issuer=$issuer&algorithm=$algorithm&digits=$digits&period=$timer'; - parsedCodes.add(Code.fromRawData(otpUrl)); + 'otpauth://totp/$issuer:$account?secret=$secret&issuer=$issuer&algorithm=$algorithm&digits=$digits&period=$timer'; + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/plain_text_import.dart b/auth/lib/ui/settings/data/import/plain_text_import.dart index 03bc50dce..6867584b0 100644 --- a/auth/lib/ui/settings/data/import/plain_text_import.dart +++ b/auth/lib/ui/settings/data/import/plain_text_import.dart @@ -13,12 +13,15 @@ import 'package:file_picker/file_picker.dart'; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; +final _logger = Logger('PlainText'); + class PlainTextImport extends StatelessWidget { const PlainTextImport({super.key}); @override Widget build(BuildContext context) { final l10n = context.l10n; + return Column( children: [ Text( @@ -101,20 +104,35 @@ Future _pickImportFile(BuildContext context) async { final progressDialog = createProgressDialog(context, l10n.pleaseWait); await progressDialog.show(); try { + final parsedCodes = []; File file = File(result.files.single.path!); final codes = await file.readAsString(); - List splitCodes = codes.split(","); - if (splitCodes.length == 1) { - splitCodes = const LineSplitter().convert(codes); - } - final parsedCodes = []; - for (final code in splitCodes) { - try { - parsedCodes.add(Code.fromRawData(code)); - } catch (e) { - Logger('PlainText').severe("Could not parse code", e); + + if (codes.startsWith('otpauth://')) { + List splitCodes = codes.split(","); + if (splitCodes.length == 1) { + splitCodes = const LineSplitter().convert(codes); + } + for (final code in splitCodes) { + try { + parsedCodes.add(Code.fromOTPAuthUrl(code)); + } catch (e) { + Logger('PlainText').severe("Could not parse code", e); + } + } + } else { + final decodedCodes = jsonDecode(codes); + List splitCodes = List.from(decodedCodes["items"]); + + for (final code in splitCodes) { + try { + parsedCodes.add(Code.fromExportJson(code)); + } catch (e) { + _logger.severe("Could not parse code", e); + } } } + for (final code in parsedCodes) { await CodeStore.instance.addCode(code, shouldSync: false); } diff --git a/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart b/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart index 48fc74888..3590a38b3 100644 --- a/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart +++ b/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart @@ -57,7 +57,7 @@ Future _pickRaivoJsonFile(BuildContext context) async { String path = result.files.single.path!; int? count = await _processRaivoExportFile(context, path); await progressDialog.hide(); - if(count != null) { + if (count != null) { await importSuccessDialog(context, count); } } catch (e) { @@ -70,9 +70,9 @@ Future _pickRaivoJsonFile(BuildContext context) async { } } -Future _processRaivoExportFile(BuildContext context,String path) async { +Future _processRaivoExportFile(BuildContext context, String path) async { File file = File(path); - if(path.endsWith('.zip')) { + if (path.endsWith('.zip')) { await showErrorDialog( context, context.l10n.sorry, @@ -105,7 +105,7 @@ Future _processRaivoExportFile(BuildContext context,String path) async { } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/two_fas_import.dart b/auth/lib/ui/settings/data/import/two_fas_import.dart index ae5a05b0b..710d898d4 100644 --- a/auth/lib/ui/settings/data/import/two_fas_import.dart +++ b/auth/lib/ui/settings/data/import/two_fas_import.dart @@ -158,7 +158,7 @@ Future _process2FasExportFile( } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings_page.dart b/auth/lib/ui/settings_page.dart index 48fd6467c..0e99a1ea3 100644 --- a/auth/lib/ui/settings_page.dart +++ b/auth/lib/ui/settings_page.dart @@ -108,8 +108,9 @@ class SettingsPage extends StatelessWidget { await handleExportClick(context); } else { if (result.action == ButtonAction.second) { - bool hasCodes = - (await CodeStore.instance.getAllCodes()).isNotEmpty; + bool hasCodes = (await CodeStore.instance.getAllCodes()) + .where((element) => !element.hasError) + .isNotEmpty; if (hasCodes) { final hasAuthenticated = await LocalAuthenticationService .instance diff --git a/auth/lib/utils/email_util.dart b/auth/lib/utils/email_util.dart index 582449edb..8b0412228 100644 --- a/auth/lib/utils/email_util.dart +++ b/auth/lib/utils/email_util.dart @@ -146,7 +146,7 @@ Future getZippedLogsFile(BuildContext context) async { final encoder = ZipFileEncoder(); encoder.create(zipFilePath); await encoder.addDirectory(logsDirectory); - encoder.close(); + await encoder.close(); await dialog.hide(); return zipFilePath; } diff --git a/auth/linux/packaging/appimage/make_config.yaml b/auth/linux/packaging/appimage/make_config.yaml index 90db9c587..9a3004dcd 100644 --- a/auth/linux/packaging/appimage/make_config.yaml +++ b/auth/linux/packaging/appimage/make_config.yaml @@ -24,5 +24,6 @@ startup_notify: false # include: # - libcurl.so.4 include: - - libffi.so.7 + - libffi.so.8 - libtiff.so.5 + - libjpeg.so.8 diff --git a/auth/macos/Podfile.lock b/auth/macos/Podfile.lock index a5b6eb77c..92d05104e 100644 --- a/auth/macos/Podfile.lock +++ b/auth/macos/Podfile.lock @@ -26,40 +26,36 @@ PODS: - path_provider_foundation (0.0.1): - Flutter - FlutterMacOS - - ReachabilitySwift (5.0.0) + - ReachabilitySwift (5.2.2) - screen_retriever (0.0.1): - FlutterMacOS - - Sentry/HybridSDK (8.21.0): - - SentryPrivate (= 8.21.0) - - sentry_flutter (0.0.1): + - Sentry/HybridSDK (8.25.0) + - sentry_flutter (7.20.1): - Flutter - FlutterMacOS - - Sentry/HybridSDK (= 8.21.0) - - SentryPrivate (8.21.0) + - Sentry/HybridSDK (= 8.25.0) - share_plus (0.0.1): - FlutterMacOS - shared_preferences_foundation (0.0.1): - Flutter - FlutterMacOS - - smart_auth (0.0.1): - - FlutterMacOS - sodium_libs (2.2.1): - FlutterMacOS - sqflite (0.0.3): - Flutter - FlutterMacOS - - sqlite3 (3.45.1): - - sqlite3/common (= 3.45.1) - - sqlite3/common (3.45.1) - - sqlite3/fts5 (3.45.1): + - "sqlite3 (3.45.3+1)": + - "sqlite3/common (= 3.45.3+1)" + - "sqlite3/common (3.45.3+1)" + - "sqlite3/fts5 (3.45.3+1)": - sqlite3/common - - sqlite3/perf-threadsafe (3.45.1): + - "sqlite3/perf-threadsafe (3.45.3+1)": - sqlite3/common - - sqlite3/rtree (3.45.1): + - "sqlite3/rtree (3.45.3+1)": - sqlite3/common - sqlite3_flutter_libs (0.0.1): - FlutterMacOS - - sqlite3 (~> 3.45.1) + - "sqlite3 (~> 3.45.3+1)" - sqlite3/fts5 - sqlite3/perf-threadsafe - sqlite3/rtree @@ -87,7 +83,6 @@ DEPENDENCIES: - sentry_flutter (from `Flutter/ephemeral/.symlinks/plugins/sentry_flutter/macos`) - share_plus (from `Flutter/ephemeral/.symlinks/plugins/share_plus/macos`) - shared_preferences_foundation (from `Flutter/ephemeral/.symlinks/plugins/shared_preferences_foundation/darwin`) - - smart_auth (from `Flutter/ephemeral/.symlinks/plugins/smart_auth/macos`) - sodium_libs (from `Flutter/ephemeral/.symlinks/plugins/sodium_libs/macos`) - sqflite (from `Flutter/ephemeral/.symlinks/plugins/sqflite/darwin`) - sqlite3_flutter_libs (from `Flutter/ephemeral/.symlinks/plugins/sqlite3_flutter_libs/macos`) @@ -100,7 +95,6 @@ SPEC REPOS: - OrderedSet - ReachabilitySwift - Sentry - - SentryPrivate - sqlite3 EXTERNAL SOURCES: @@ -136,8 +130,6 @@ EXTERNAL SOURCES: :path: Flutter/ephemeral/.symlinks/plugins/share_plus/macos shared_preferences_foundation: :path: Flutter/ephemeral/.symlinks/plugins/shared_preferences_foundation/darwin - smart_auth: - :path: Flutter/ephemeral/.symlinks/plugins/smart_auth/macos sodium_libs: :path: Flutter/ephemeral/.symlinks/plugins/sodium_libs/macos sqflite: @@ -165,22 +157,20 @@ SPEC CHECKSUMS: OrderedSet: aaeb196f7fef5a9edf55d89760da9176ad40b93c package_info_plus: 02d7a575e80f194102bef286361c6c326e4c29ce path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c - ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825 + ReachabilitySwift: 2128f3a8c9107e1ad33574c6e58e8285d460b149 screen_retriever: 59634572a57080243dd1bf715e55b6c54f241a38 - Sentry: ebc12276bd17613a114ab359074096b6b3725203 - sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e - SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe + Sentry: cd86fc55628f5b7c572cabe66cc8f95a9d2f165a + sentry_flutter: 4cb24c1055c556d7b27262ab2e179d1e5a0b9b0c share_plus: 76dd39142738f7a68dd57b05093b5e8193f220f7 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 - smart_auth: b38e3ab4bfe089eacb1e233aca1a2340f96c28e9 sodium_libs: d39bd76697736cb11ce4a0be73b9b4bc64466d6f sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec - sqlite3: 73b7fc691fdc43277614250e04d183740cb15078 - sqlite3_flutter_libs: 06a05802529659a272beac4ee1350bfec294f386 + sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a + sqlite3_flutter_libs: 8d204ef443cf0d5c1c8b058044eab53f3943a9c5 tray_manager: 9064e219c56d75c476e46b9a21182087930baf90 url_launcher_macos: d2691c7dd33ed713bf3544850a623080ec693d95 window_manager: 3a1844359a6295ab1e47659b1a777e36773cd6e8 PODFILE CHECKSUM: f401c31c8f7c5571f6f565c78915d54338812dab -COCOAPODS: 1.14.3 +COCOAPODS: 1.15.2 diff --git a/auth/pubspec.lock b/auth/pubspec.lock index 772416042..a47858d53 100644 --- a/auth/pubspec.lock +++ b/auth/pubspec.lock @@ -45,10 +45,10 @@ packages: dependency: "direct main" description: name: archive - sha256: "22600aa1e926be775fa5fe7e6894e7fb3df9efda8891c73f70fb3262399a432d" + sha256: "0763b45fa9294197a2885c8567927e2830ade852e5c896fd4ab7e0e348d0f373" url: "https://pub.dev" source: hosted - version: "3.4.10" + version: "3.5.0" args: dependency: transitive description: @@ -318,10 +318,10 @@ packages: dependency: "direct main" description: name: dio - sha256: "639179e1cc0957779e10dd5b786ce180c477c4c0aca5aaba5d1700fa2e834801" + sha256: "11e40df547d418cc0c4900a9318b26304e665da6fa4755399a9ff9efd09034b5" url: "https://pub.dev" source: hosted - version: "5.4.3" + version: "5.4.3+1" dotted_border: dependency: "direct main" description: @@ -468,10 +468,10 @@ packages: dependency: "direct main" description: name: flutter_email_sender - sha256: "5001e9158f91a8799140fb30a11ad89cd587244f30b4f848d87085985c49b60f" + sha256: fb515d4e073d238d0daf1d765e5318487b6396d46b96e0ae9745dbc9a133f97a url: "https://pub.dev" source: hosted - version: "6.0.2" + version: "6.0.3" flutter_inappwebview: dependency: "direct main" description: @@ -565,10 +565,10 @@ packages: dependency: transitive description: name: flutter_local_notifications_platform_interface - sha256: "7cf643d6d5022f3baed0be777b0662cce5919c0a7b86e700299f22dc4ae660ef" + sha256: "340abf67df238f7f0ef58f4a26d2a83e1ab74c77ab03cd2b2d5018ac64db30b7" url: "https://pub.dev" source: hosted - version: "7.0.0+1" + version: "7.1.0" flutter_localizations: dependency: "direct main" description: flutter @@ -685,10 +685,10 @@ packages: dependency: "direct main" description: name: fluttertoast - sha256: dfdde255317af381bfc1c486ed968d5a43a2ded9c931e87cbecd88767d6a71c1 + sha256: "81b68579e23fcbcada2db3d50302813d2371664afe6165bc78148050ab94bf66" url: "https://pub.dev" source: hosted - version: "8.2.4" + version: "8.2.5" freezed_annotation: dependency: transitive description: @@ -721,6 +721,14 @@ packages: url: "https://pub.dev" source: hosted version: "5.0.6" + gradient_borders: + dependency: "direct main" + description: + name: gradient_borders + sha256: "69eeaff519d145a4c6c213ada1abae386bcc8981a4970d923e478ce7ba19e309" + url: "https://pub.dev" + source: hosted + version: "1.0.0" graphs: dependency: transitive description: @@ -813,18 +821,18 @@ packages: dependency: "direct main" description: name: json_annotation - sha256: b10a7b2ff83d83c777edba3c6a0f97045ddadd56c944e1a23a3fdf43a1bf4467 + sha256: "1ce844379ca14835a50d2f019a3099f419082cfdd231cd86a142af94dd5c6bb1" url: "https://pub.dev" source: hosted - version: "4.8.1" + version: "4.9.0" json_serializable: dependency: "direct dev" description: name: json_serializable - sha256: aa1f5a8912615733e0fdc7a02af03308933c93235bdc8d50d0b0c8a8ccb0b969 + sha256: ea1432d167339ea9b5bb153f0571d0039607a873d6e04e0117af043f14a1fd4b url: "https://pub.dev" source: hosted - version: "6.7.1" + version: "6.8.0" leak_tracker: dependency: transitive description: @@ -869,10 +877,10 @@ packages: dependency: "direct main" description: name: local_auth_android - sha256: "3bcd732dda7c75fcb7ddaef12e131230f53dcc8c00790d0d6efb3aa0fbbeda57" + sha256: e0e5b1ea247c5a0951c13a7ee13dc1beae69750e6a2e1910d1ed6a3cd4d56943 url: "https://pub.dev" source: hosted - version: "1.0.37" + version: "1.0.38" local_auth_darwin: dependency: "direct main" description: @@ -1133,10 +1141,10 @@ packages: dependency: "direct main" description: name: pointycastle - sha256: "70fe966348fe08c34bf929582f1d8247d9d9408130723206472b4687227e4333" + sha256: "79fbafed02cfdbe85ef3fd06c7f4bc2cbcba0177e61b765264853d4253b21744" url: "https://pub.dev" source: hosted - version: "3.8.0" + version: "3.9.0" pool: dependency: transitive description: @@ -1221,18 +1229,18 @@ packages: dependency: "direct main" description: name: sentry - sha256: fe99a06970b909a491b7f89d54c9b5119772e3a48a400308a6e129625b333f5b + sha256: e572d33a3ff1d69549f33ee828a8ff514047d43ca8eea4ab093d72461205aa3e url: "https://pub.dev" source: hosted - version: "7.19.0" + version: "7.20.1" sentry_flutter: dependency: "direct main" description: name: sentry_flutter - sha256: fc013d4a753447320f62989b1871fdc1f20c77befcc8be3e38774dd7402e7a62 + sha256: ac8cf6bb849f3560353ae33672e17b2713809a4e8de0d3cf372e9e9c42013757 url: "https://pub.dev" source: hosted - version: "7.19.0" + version: "7.20.1" share_plus: dependency: "direct main" description: @@ -1419,10 +1427,10 @@ packages: dependency: "direct main" description: name: sqlite3_flutter_libs - sha256: d6c31c8511c441d1f12f20b607343df1afe4eddf24a1cf85021677c8eea26060 + sha256: fb2a106a2ea6042fe57de2c47074cc31539a941819c91e105b864744605da3f5 url: "https://pub.dev" source: hosted - version: "0.5.20" + version: "0.5.21" stack_trace: dependency: transitive description: @@ -1499,10 +1507,10 @@ packages: dependency: transitive description: name: timezone - sha256: "1cfd8ddc2d1cfd836bc93e67b9be88c3adaeca6f40a00ca999104c30693cdca0" + sha256: a6ccda4a69a442098b602c44e61a1e2b4bf6f5516e875bbf0f427d5df14745d5 url: "https://pub.dev" source: hosted - version: "0.9.2" + version: "0.9.3" timing: dependency: transitive description: @@ -1595,10 +1603,10 @@ packages: dependency: transitive description: name: url_launcher_web - sha256: "3692a459204a33e04bc94f5fb91158faf4f2c8903281ddd82915adecdb1a901d" + sha256: "8d9e750d8c9338601e709cd0885f95825086bd8b642547f26bda435aade95d8a" url: "https://pub.dev" source: hosted - version: "2.3.0" + version: "2.3.1" url_launcher_windows: dependency: transitive description: @@ -1683,18 +1691,18 @@ packages: dependency: "direct main" description: name: win32 - sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a" + sha256: "0eaf06e3446824099858367950a813472af675116bf63f008a4c2a75ae13e9cb" url: "https://pub.dev" source: hosted - version: "5.4.0" + version: "5.5.0" win32_registry: dependency: transitive description: name: win32_registry - sha256: "41fd8a189940d8696b1b810efb9abcf60827b6cbfab90b0c43e8439e3a39d85a" + sha256: "10589e0d7f4e053f2c61023a31c9ce01146656a70b7b7f0828c0b46d7da2a9bb" url: "https://pub.dev" source: hosted - version: "1.1.2" + version: "1.1.3" window_manager: dependency: "direct main" description: diff --git a/auth/pubspec.yaml b/auth/pubspec.yaml index b7a35b699..148800813 100644 --- a/auth/pubspec.yaml +++ b/auth/pubspec.yaml @@ -1,6 +1,6 @@ name: ente_auth description: ente two-factor authenticator -version: 2.0.57+257 +version: 3.0.1+301 publish_to: none environment: @@ -14,7 +14,7 @@ dependencies: bip39: ^1.0.6 #done bloc: ^8.1.2 clipboard: ^0.1.3 - collection: # dart + collection: ^1.18.0 # dart confetti: ^0.7.0 connectivity_plus: ^5.0.2 convert: ^3.1.1 @@ -62,6 +62,7 @@ dependencies: flutter_svg: ^2.0.5 fluttertoast: ^8.1.1 google_nav_bar: ^5.0.5 #supported + gradient_borders: ^1.0.0 http: ^1.1.0 intl: ^0.18.0 json_annotation: ^4.5.0 @@ -129,6 +130,7 @@ flutter: - assets/simple-icons/_data/ - assets/custom-icons/icons/ - assets/custom-icons/_data/ + - assets/svg/ fonts: - family: Inter @@ -145,16 +147,38 @@ flutter: flutter_icons: android: "launcher_icon" adaptive_icon_foreground: "assets/generation-icons/icon-light-adaptive-fg.png" - adaptive_icon_background: "#ffffff" + adaptive_icon_background: "assets/generation-icons/icon-light-adaptive-bg.png" ios: true image_path: "assets/generation-icons/icon-light.png" remove_alpha_ios: true flutter_native_splash: - color: "#ffffff" + color: "#FFFFFF" color_dark: "#000000" - image: assets/splash-screen-light.png - image_dark: assets/splash-screen-dark.png - android_fullscreen: true + image: "assets/splash/splash-icon-fg.png" android_gravity: center ios_content_mode: center + android_12: + # The image parameter sets the splash screen icon image. If this parameter is not specified, + # the app's launcher icon will be used instead. + # Please note that the splash screen will be clipped to a circle on the center of the screen. + # App icon with an icon background: This should be 960×960 pixels, and fit within a circle + # 640 pixels in diameter. + # App icon without an icon background: This should be 1152×1152 pixels, and fit within a circle + # 768 pixels in diameter. + image: "assets/splash/splash-icon-fg-12.png" + + # Splash screen background color. + color: "#FFFFFF" + + # App icon background color. + #icon_background_color: "#111111" + + # The branding property allows you to specify an image used as branding in the splash screen. + #branding: assets/dart.png + + # The image_dark, color_dark, icon_background_color_dark, and branding_dark set values that + # apply when the device is in dark mode. If they are not specified, the app will use the + # parameters from above. + color_dark: "#000000" + #icon_background_color_dark: "#eeeeee" diff --git a/auth/test/models/code_test.dart b/auth/test/models/code_test.dart index 30ea23a4f..f51364118 100644 --- a/auth/test/models/code_test.dart +++ b/auth/test/models/code_test.dart @@ -1,9 +1,12 @@ +import 'dart:convert'; + import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/models/code_display.dart'; import 'package:flutter_test/flutter_test.dart'; void main() { test("parseCodeFromRawData", () { - final code1 = Code.fromRawData( + final code1 = Code.fromOTPAuthUrl( "otpauth://totp/example%20finance%3Aee%40ff.gg?secret=ASKZNWOU6SVYAMVS", ); expect(code1.issuer, "example finance", reason: "issuerMismatch"); @@ -12,7 +15,7 @@ void main() { }); test("parseDocumentedFormat", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://totp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub", ); expect(code.issuer, "GitHub", reason: "issuerMismatch"); @@ -21,7 +24,7 @@ void main() { }); test("validateCount", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://hotp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub&counter=15", ); expect(code.issuer, "GitHub", reason: "issuerMismatch"); @@ -29,10 +32,29 @@ void main() { expect(code.secret, "ASKZNWOU6SVYAMVS"); expect(code.counter, 15); }); + + test("validateDisplay", () { + Code code = Code.fromOTPAuthUrl( + "otpauth://hotp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub&counter=15", + ); + expect(code.issuer, "GitHub", reason: "issuerMismatch"); + expect(code.account, "testdata@ente.io", reason: "accountMismatch"); + expect(code.secret, "ASKZNWOU6SVYAMVS"); + expect(code.counter, 15); + code = code.copyWith( + display: CodeDisplay(pinned: true, tags: ["tag1", "com,ma", ';;%\$']), + ); + final dataToStore = code.toOTPAuthUrlFormat(); + final restoredCode = Code.fromOTPAuthUrl(jsonDecode(dataToStore)); + expect(restoredCode.display.pinned, true); + expect(restoredCode.display.tags, ["tag1", "com,ma", ';;%\$']); + final secondDataToStore = restoredCode.toOTPAuthUrlFormat(); + expect(dataToStore, secondDataToStore); + }); // test("parseWithFunnyAccountName", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://totp/Mongo Atlas:Acc !@#444?algorithm=sha1&digits=6&issuer=Mongo Atlas&period=30&secret=NI4CTTFEV4G2JFE6", ); expect(code.issuer, "Mongo Atlas", reason: "issuerMismatch"); @@ -43,11 +65,11 @@ void main() { test("parseAndUpdateInChinese", () { const String rubberDuckQr = 'otpauth://totp/%E6%A9%A1%E7%9A%AE%E9%B8%AD?secret=2CWDCK4EOIN5DJDRMYUMYBBO4MKSR5AX&issuer=ente.io'; - final code = Code.fromRawData(rubberDuckQr); + final code = Code.fromOTPAuthUrl(rubberDuckQr); expect(code.account, '橡皮鸭'); final String updatedRawCode = code.copyWith(account: '伍迪', issuer: '鸭子').rawData; - final updateCode = Code.fromRawData(updatedRawCode); + final updateCode = Code.fromOTPAuthUrl(updatedRawCode); expect(updateCode.account, '伍迪', reason: 'updated accountMismatch'); expect(updateCode.issuer, '鸭子', reason: 'updated issuerMismatch'); }); diff --git a/auth/web/index.html b/auth/web/index.html index ef953df53..097159f9e 100644 --- a/auth/web/index.html +++ b/auth/web/index.html @@ -29,9 +29,92 @@ Auth - + - + + + + + + + + + + + + + + + @@ -40,6 +123,13 @@ + + + + + + + diff --git a/auth/web/splash/img/dark-1x.png b/auth/web/splash/img/dark-1x.png index 87f84c70e..91acb41ae 100644 Binary files a/auth/web/splash/img/dark-1x.png and b/auth/web/splash/img/dark-1x.png differ diff --git a/auth/web/splash/img/dark-2x.png b/auth/web/splash/img/dark-2x.png index ce01bec05..9a7c72afa 100644 Binary files a/auth/web/splash/img/dark-2x.png and b/auth/web/splash/img/dark-2x.png differ diff --git a/auth/web/splash/img/dark-3x.png b/auth/web/splash/img/dark-3x.png index 75f4b1f3c..5b4d99582 100644 Binary files a/auth/web/splash/img/dark-3x.png and b/auth/web/splash/img/dark-3x.png differ diff --git a/auth/web/splash/img/dark-4x.png b/auth/web/splash/img/dark-4x.png index 2beb1c816..1666311d2 100644 Binary files a/auth/web/splash/img/dark-4x.png and b/auth/web/splash/img/dark-4x.png differ diff --git a/auth/web/splash/img/light-1x.png b/auth/web/splash/img/light-1x.png index 899cecf22..91acb41ae 100644 Binary files a/auth/web/splash/img/light-1x.png and b/auth/web/splash/img/light-1x.png differ diff --git a/auth/web/splash/img/light-2x.png b/auth/web/splash/img/light-2x.png index 4bb7a5751..9a7c72afa 100644 Binary files a/auth/web/splash/img/light-2x.png and b/auth/web/splash/img/light-2x.png differ diff --git a/auth/web/splash/img/light-3x.png b/auth/web/splash/img/light-3x.png index 176f0c723..5b4d99582 100644 Binary files a/auth/web/splash/img/light-3x.png and b/auth/web/splash/img/light-3x.png differ diff --git a/auth/web/splash/img/light-4x.png b/auth/web/splash/img/light-4x.png index a0d1a26f7..1666311d2 100644 Binary files a/auth/web/splash/img/light-4x.png and b/auth/web/splash/img/light-4x.png differ diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md index eb118a424..5fbbefaaa 100644 --- a/desktop/CHANGELOG.md +++ b/desktop/CHANGELOG.md @@ -2,11 +2,17 @@ ## v1.7.0 (Unreleased) -v1.7 is a major rewrite to improve the security of our app. We have enabled -sandboxing and disabled node integration for the renderer process. All this -required restructuring our IPC mechanisms, which resulted in a lot of under the -hood changes. The outcome is a more secure app that also uses the latest and -greatest Electron recommendations. +v1.7 is a major rewrite to improve the security of our app. In particular, the +UI and the native parts of the app now run isolated from each other and +communicate only using a predefined IPC boundary. + +Other highlights: + +- View your photos on big screens and Chromecast devices by using the "Play + album on TV" option in the album menu. +- Support Brazilian Portuguese, German and Russian. +- Provide a checkbox to select all photos in a day. +- Fix a case where the dedup screen would not refresh after removing items. ## v1.6.63 diff --git a/desktop/docs/dependencies.md b/desktop/docs/dependencies.md index 605235703..a9e92b50d 100644 --- a/desktop/docs/dependencies.md +++ b/desktop/docs/dependencies.md @@ -83,9 +83,6 @@ are similar to that in the web code. Some extra ones specific to the code here are: -- [concurrently](https://github.com/open-cli-tools/concurrently) for spawning - parallel tasks when we do `yarn dev`. - - [shx](https://github.com/shelljs/shx) for providing a portable way to use Unix commands in our `package.json` scripts. This allows us to use the same commands (like `ln`) across different platforms like Linux and Windows. diff --git a/desktop/docs/release.md b/desktop/docs/release.md index a062d7d40..1cda1c11b 100644 --- a/desktop/docs/release.md +++ b/desktop/docs/release.md @@ -34,16 +34,15 @@ The workflow for making such "rc" builds is: gh workflow run desktop-release.yml ``` -We can do steps 2 and 3 multiple times; each time it'll just update the +We can do steps 2 and 3 multiple times: each time it'll just update the artifacts attached to the same draft. ## Workflow - Release -1. Update `package.json` in the source repo to use version `1.x.x`. Create a - new draft release in the release repo with tag `v1.x.x`. +1. Update source repo to set version `1.x.x` in `package.json` and finialize + the CHANGELOG. -2. Push code to the `desktop/rc` branch in the source repo. Remember to update - update the CHANGELOG. +2. Push code to the `desktop/rc` branch in the source repo. 3. In the release repo @@ -51,15 +50,13 @@ artifacts attached to the same draft. ./.github/trigger-release.sh v1.x.x ``` -4. If the build is successful, tag `desktop/rc` and merge it into main: +4. If the build is successful, tag `desktop/rc` in the source repo. ```sh # Assuming we're on desktop/rc that just got build git tag photosd-v1.x.x git push origin photosd-v1.x.x - - # Now open a PR to merge it into main ``` ## Post build diff --git a/desktop/package.json b/desktop/package.json index 7297a0c17..2f2cc8f16 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -54,5 +54,6 @@ "shx": "^0.3", "typescript": "^5" }, + "packageManager": "yarn@1.22.21", "productName": "ente" } diff --git a/desktop/src/main.ts b/desktop/src/main.ts index 9cba9178d..262dceb79 100644 --- a/desktop/src/main.ts +++ b/desktop/src/main.ts @@ -17,7 +17,11 @@ import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import os from "node:os"; import path from "node:path"; -import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc"; +import { + attachFSWatchIPCHandlers, + attachIPCHandlers, + attachLogoutIPCHandler, +} from "./main/ipc"; import log, { initLogging } from "./main/log"; import { createApplicationMenu, createTrayContextMenu } from "./main/menu"; import { setupAutoUpdater } from "./main/services/app-update"; @@ -237,7 +241,7 @@ const uniqueSavePath = (dirPath: string, fileName: string) => { * * @param webContents The renderer to configure. */ -export const allowExternalLinks = (webContents: WebContents) => { +export const allowExternalLinks = (webContents: WebContents) => // By default, if the user were open a link, say // https://github.com/ente-io/ente/discussions, then it would open a _new_ // BrowserWindow within our app. @@ -249,13 +253,37 @@ export const allowExternalLinks = (webContents: WebContents) => { // Returning `action` "deny" accomplishes this. webContents.setWindowOpenHandler(({ url }) => { if (!url.startsWith(rendererURL)) { + // This does not work in Ubuntu currently: mailto links seem to just + // get ignored, and HTTP links open in the text editor instead of in + // the browser. + // https://github.com/electron/electron/issues/31485 void shell.openExternal(url); return { action: "deny" }; } else { return { action: "allow" }; } }); -}; + +/** + * Allow uploading to arbitrary S3 buckets. + * + * The files in the desktop app are served over the ente:// protocol. During + * testing or self-hosting, we might be using a S3 bucket that does not allow + * whitelisting a custom URI scheme. To avoid requiring the bucket to set an + * "Access-Control-Allow-Origin: *" or do a echo-back of `Origin`, we add a + * workaround here instead, intercepting the ACAO header and allowing `*`. + */ +export const allowAllCORSOrigins = (webContents: WebContents) => + webContents.session.webRequest.onHeadersReceived( + ({ responseHeaders }, callback) => { + const headers: NonNullable = {}; + for (const [key, value] of Object.entries(responseHeaders ?? {})) + if (key.toLowerCase() != "access-control-allow-origin") + headers[key] = value; + headers["Access-Control-Allow-Origin"] = ["*"]; + callback({ responseHeaders: headers }); + }, + ); /** * Add an icon for our app in the system tray. @@ -377,13 +405,19 @@ const main = () => { void (async () => { // Create window and prepare for the renderer. mainWindow = createMainWindow(); + + // Setup IPC and streams. + const watcher = createWatcher(mainWindow); attachIPCHandlers(); - attachFSWatchIPCHandlers(createWatcher(mainWindow)); + attachFSWatchIPCHandlers(watcher); + attachLogoutIPCHandler(watcher); registerStreamProtocol(); // Configure the renderer's environment. - setDownloadPath(mainWindow.webContents); - allowExternalLinks(mainWindow.webContents); + const webContents = mainWindow.webContents; + setDownloadPath(webContents); + allowExternalLinks(webContents); + allowAllCORSOrigins(webContents); // Start loading the renderer. void mainWindow.loadURL(rendererURL); diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index 1393f4bfd..e74d5e9d2 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -41,16 +41,13 @@ import { fsWriteFile, } from "./services/fs"; import { convertToJPEG, generateImageThumbnail } from "./services/image"; +import { logout } from "./services/logout"; import { clipImageEmbedding, clipTextEmbeddingIfAvailable, } from "./services/ml-clip"; -import { detectFaces, faceEmbedding } from "./services/ml-face"; -import { - clearStores, - encryptionKey, - saveEncryptionKey, -} from "./services/store"; +import { detectFaces, faceEmbeddings } from "./services/ml-face"; +import { encryptionKey, saveEncryptionKey } from "./services/store"; import { clearPendingUploads, listZipItems, @@ -65,7 +62,6 @@ import { watchFindFiles, watchGet, watchRemove, - watchReset, watchUpdateIgnoredFiles, watchUpdateSyncedFiles, } from "./services/watch"; @@ -106,8 +102,6 @@ export const attachIPCHandlers = () => { ipcMain.handle("selectDirectory", () => selectDirectory()); - ipcMain.on("clearStores", () => clearStores()); - ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) => saveEncryptionKey(encryptionKey), ); @@ -171,14 +165,7 @@ export const attachIPCHandlers = () => { command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, - ) => - ffmpegExec( - command, - dataOrPathOrZipItem, - outputFileExtension, - timeoutMS, - ), + ) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension), ); // - ML @@ -195,8 +182,8 @@ export const attachIPCHandlers = () => { detectFaces(input), ); - ipcMain.handle("faceEmbedding", (_, input: Float32Array) => - faceEmbedding(input), + ipcMain.handle("faceEmbeddings", (_, input: Float32Array) => + faceEmbeddings(input), ); ipcMain.handle("legacyFaceCrop", (_, faceID: string) => @@ -269,6 +256,12 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => { ipcMain.handle("watchFindFiles", (_, folderPath: string) => watchFindFiles(folderPath), ); - - ipcMain.handle("watchReset", () => watchReset(watcher)); +}; + +/** + * Sibling of {@link attachIPCHandlers} specifically for use with the logout + * event with needs access to the {@link FSWatcher} instance. + */ +export const attachLogoutIPCHandler = (watcher: FSWatcher) => { + ipcMain.handle("logout", () => logout(watcher)); }; diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts index cf1404a90..9718dfea5 100644 --- a/desktop/src/main/log.ts +++ b/desktop/src/main/log.ts @@ -5,11 +5,8 @@ import { isDev } from "./utils/electron"; /** * Initialize logging in the main process. * - * This will set our underlying logger up to log to a file named `ente.log`, - * - * - on Linux at ~/.config/ente/logs/ente.log - * - on macOS at ~/Library/Logs/ente/ente.log - * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log + * This will set our underlying logger up to log to a file named `ente.log`, see + * [Note: App log path]. * * On dev builds, it will also log to the console. */ @@ -41,36 +38,41 @@ export const logToDisk = (message: string) => { log.info(`[rndr] ${message}`); }; -const logError = (message: string, e?: unknown) => { - if (!e) { - logError_(message); - return; - } +const messageWithError = (message: string, e?: unknown) => { + if (!e) return message; let es: string; if (e instanceof Error) { // In practice, we expect ourselves to be called with Error objects, so // this is the happy path so to say. - es = `${e.name}: ${e.message}\n${e.stack}`; + es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n"); } else { // For the rest rare cases, use the default string serialization of e. es = String(e); } - logError_(`${message}: ${es}`); + return `${message}: ${es}`; }; -const logError_ = (message: string) => { - log.error(`[main] [error] ${message}`); - if (isDev) console.error(`[error] ${message}`); +const logError = (message: string, e?: unknown) => { + const m = `[error] ${messageWithError(message, e)}`; + console.error(m); + log.error(`[main] ${m}`); +}; + +const logWarn = (message: string, e?: unknown) => { + const m = `[warn] ${messageWithError(message, e)}`; + console.error(m); + log.error(`[main] ${m}`); }; const logInfo = (...params: unknown[]) => { const message = params .map((p) => (typeof p == "string" ? p : util.inspect(p))) .join(" "); - log.info(`[main] ${message}`); - if (isDev) console.log(`[info] ${message}`); + const m = `[info] ${message}`; + if (isDev) console.log(m); + log.info(`[main] ${m}`); }; const logDebug = (param: () => unknown) => { @@ -96,10 +98,15 @@ export default { * any arbitrary object that we obtain, say, when in a try-catch handler (in * JavaScript any arbitrary value can be thrown). * - * The log is written to disk. In development builds, the log is also - * printed to the main (Node.js) process console. + * The log is written to disk and printed to the main (Node.js) process's + * console. */ error: logError, + /** + * Sibling of {@link error}, with the same parameters and behaviour, except + * it gets prefixed with a warning instead of an error tag. + */ + warn: logWarn, /** * Log a message. * @@ -120,7 +127,7 @@ export default { * The function can return an arbitrary value which is serialized before * being logged. * - * This log is NOT written to disk. And it is printed to the main (Node.js) + * This log is NOT written to disk. It is printed to the main (Node.js) * process console, but only on development builds. */ debug: logDebug, diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts index c12e1e319..6e3890e16 100644 --- a/desktop/src/main/services/app-update.ts +++ b/desktop/src/main/services/app-update.ts @@ -11,6 +11,11 @@ import { isDev } from "../utils/electron"; export const setupAutoUpdater = (mainWindow: BrowserWindow) => { autoUpdater.logger = electronLog; autoUpdater.autoDownload = false; + // This is going to be the default at some point, right now if we don't + // explicitly set this to true then electron-builder prints a (harmless) + // warning when updating on Windows. + // See: https://github.com/electron-userland/electron-builder/pull/6575 + autoUpdater.disableWebInstaller = true; /** * [Note: Testing auto updates] @@ -137,23 +142,24 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { const showUpdateDialog = (update: AppUpdate) => mainWindow.webContents.send("appUpdateAvailable", update); - log.debug(() => "Attempting auto update"); - await autoUpdater.downloadUpdate(); - - let timeoutId: ReturnType; + let timeout: ReturnType; const fiveMinutes = 5 * 60 * 1000; autoUpdater.on("update-downloaded", () => { - timeoutId = setTimeout( + log.info(`Update downloaded ${version}`); + timeout = setTimeout( () => showUpdateDialog({ autoUpdatable: true, version }), fiveMinutes, ); }); autoUpdater.on("error", (error) => { - clearTimeout(timeoutId); + clearTimeout(timeout); log.error("Auto update failed", error); showUpdateDialog({ autoUpdatable: false, version }); }); + + log.info(`Downloading update ${version}`); + await autoUpdater.downloadUpdate(); }; /** diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts index 293a720f0..d97cad6fb 100644 --- a/desktop/src/main/services/dir.ts +++ b/desktop/src/main/services/dir.ts @@ -64,14 +64,18 @@ export const openLogDirectory = () => openDirectory(logDirectoryPath()); * Note that Chromium also stores the browser state, e.g. localStorage or disk * caches, in userData. * + * https://www.electronjs.org/docs/latest/api/app + * + * [Note: App log path] + * * Finally, there is the "logs" directory. This is not within "appData" but has * a slightly different OS specific path. Since our log file is named * "ente.log", it can be found at: * * - macOS: ~/Library/Logs/ente/ente.log (production) * - macOS: ~/Library/Logs/Electron/ente.log (dev) - * - * https://www.electronjs.org/docs/latest/api/app + * - Linux: ~/.config/ente/logs/ente.log + * - Windows: %USERPROFILE%\AppData\Roaming\ente\logs\ente.log */ const logDirectoryPath = () => app.getPath("logs"); diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index 6b1171459..4803fd6f0 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,11 +1,10 @@ import pathToFfmpeg from "ffmpeg-static"; import fs from "node:fs/promises"; import type { ZipItem } from "../../types/ipc"; -import log from "../log"; -import { ensure, withTimeout } from "../utils/common"; +import { ensure } from "../utils/common"; import { execAsync } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -46,13 +45,7 @@ export const ffmpegExec = async ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ): Promise => { - // TODO (MR): This currently copies files for both input (when - // dataOrPathOrZipItem is data) and output. This needs to be tested - // extremely large video files when invoked downstream of `convertToMP4` in - // the web code. - const { path: inputFilePath, isFileTemporary: isInputFileTemporary, @@ -69,17 +62,13 @@ export const ffmpegExec = async ( outputFilePath, ); - if (timeoutMS) await withTimeout(execAsync(cmd), timeoutMS); - else await execAsync(cmd); + await execAsync(cmd); return fs.readFile(outputFilePath); } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -112,3 +101,32 @@ const ffmpegBinaryPath = () => { // https://github.com/eugeneware/ffmpeg-static/issues/16 return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked"); }; + +/** + * A variant of {@link ffmpegExec} adapted to work with streams so that it can + * handle the MP4 conversion of large video files. + * + * See: [Note: Convert to MP4] + + * @param inputFilePath The path to a file on the user's local file system. This + * is the video we want to convert. + * @param inputFilePath The path to a file on the user's local file system where + * we should write the converted MP4 video. + */ +export const ffmpegConvertToMP4 = async ( + inputFilePath: string, + outputFilePath: string, +): Promise => { + const command = [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ]; + + const cmd = substitutePlaceholders(command, inputFilePath, outputFilePath); + + await execAsync(cmd); +}; diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts index 957fe8120..fca4628b6 100644 --- a/desktop/src/main/services/image.ts +++ b/desktop/src/main/services/image.ts @@ -3,10 +3,9 @@ import fs from "node:fs/promises"; import path from "node:path"; import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; -import log from "../log"; import { execAsync, isDev } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -23,12 +22,8 @@ export const convertToJPEG = async (imageData: Uint8Array) => { await execAsync(command); return new Uint8Array(await fs.readFile(outputFilePath)); } finally { - try { - await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -49,6 +44,9 @@ const convertToJPEGCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), inputFilePath, @@ -79,7 +77,7 @@ export const generateImageThumbnail = async ( const outputFilePath = await makeTempFilePath("jpeg"); - // Construct the command first, it may throw `NotAvailable` on win32. + // Construct the command first, it may throw `NotAvailable`. let quality = 70; let command = generateImageThumbnailCommand( inputFilePath, @@ -105,12 +103,9 @@ export const generateImageThumbnail = async ( } while (thumbnail.length > maxSize && quality > 50); return thumbnail; } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -138,14 +133,17 @@ const generateImageThumbnailCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), - inputFilePath, - "-auto-orient", "-define", `jpeg:size=${2 * maxDimension}x${2 * maxDimension}`, + inputFilePath, + "-auto-orient", "-thumbnail", - `${maxDimension}x${maxDimension}>`, + `${maxDimension}x${maxDimension}`, "-unsharp", "0x.5", "-quality", diff --git a/desktop/src/main/services/logout.ts b/desktop/src/main/services/logout.ts new file mode 100644 index 000000000..e6cb7666c --- /dev/null +++ b/desktop/src/main/services/logout.ts @@ -0,0 +1,30 @@ +import type { FSWatcher } from "chokidar"; +import log from "../log"; +import { clearConvertToMP4Results } from "../stream"; +import { clearStores } from "./store"; +import { watchReset } from "./watch"; + +/** + * Perform the native side logout sequence. + * + * This function is guaranteed not to throw any errors. + * + * See: [Note: Do not throw during logout]. + */ +export const logout = (watcher: FSWatcher) => { + try { + watchReset(watcher); + } catch (e) { + log.error("Ignoring error during logout (FS watch)", e); + } + try { + clearConvertToMP4Results(); + } catch (e) { + log.error("Ignoring error during logout (convert-to-mp4)", e); + } + try { + clearStores(); + } catch (e) { + log.error("Ignoring error during logout (native stores)", e); + } +}; diff --git a/desktop/src/main/services/ml-face.ts b/desktop/src/main/services/ml-face.ts index 976525255..33157694f 100644 --- a/desktop/src/main/services/ml-face.ts +++ b/desktop/src/main/services/ml-face.ts @@ -32,7 +32,7 @@ const cachedFaceEmbeddingSession = makeCachedInferenceSession( 5286998 /* 5 MB */, ); -export const faceEmbedding = async (input: Float32Array) => { +export const faceEmbeddings = async (input: Float32Array) => { // Dimension of each face (alias) const mobileFaceNetFaceSize = 112; // Smaller alias diff --git a/desktop/src/main/services/upload.ts b/desktop/src/main/services/upload.ts index f7d0436c0..516fbe6dd 100644 --- a/desktop/src/main/services/upload.ts +++ b/desktop/src/main/services/upload.ts @@ -3,6 +3,7 @@ import fs from "node:fs/promises"; import path from "node:path"; import { existsSync } from "original-fs"; import type { PendingUploads, ZipItem } from "../../types/ipc"; +import log from "../log"; import { uploadStatusStore } from "../stores/upload-status"; export const listZipItems = async (zipPath: string): Promise => { @@ -64,11 +65,16 @@ export const pendingUploads = async (): Promise => { // file, but the dedup logic will kick in at that point so no harm will come // of it. if (allZipItems === undefined) { - const allZipPaths = uploadStatusStore.get("filePaths") ?? []; + const allZipPaths = uploadStatusStore.get("zipPaths") ?? []; const zipPaths = allZipPaths.filter((f) => existsSync(f)); zipItems = []; - for (const zip of zipPaths) - zipItems = zipItems.concat(await listZipItems(zip)); + for (const zip of zipPaths) { + try { + zipItems = zipItems.concat(await listZipItems(zip)); + } catch (e) { + log.error("Ignoring items in malformed zip", e); + } + } } else { zipItems = allZipItems.filter(([z]) => existsSync(z)); } diff --git a/desktop/src/main/services/watch.ts b/desktop/src/main/services/watch.ts index de66dcca1..e9629ff70 100644 --- a/desktop/src/main/services/watch.ts +++ b/desktop/src/main/services/watch.ts @@ -151,6 +151,15 @@ export const watchFindFiles = async (dirPath: string) => { return paths; }; +/** + * Stop watching all existing folder watches and remove any callbacks. + * + * This function is meant to be called when the user logs out. It stops + * all existing folder watches and forgets about any "on*" callback + * functions that have been registered. + * + * The persisted state itself gets cleared via {@link clearStores}. + */ export const watchReset = (watcher: FSWatcher) => { watcher.unwatch(folderWatches().map((watch) => watch.folderPath)); }; diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index bae13aa12..1c8223c87 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -3,13 +3,20 @@ */ import { net, protocol } from "electron/main"; import StreamZip from "node-stream-zip"; +import { randomUUID } from "node:crypto"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; import { ReadableStream } from "node:stream/web"; import { pathToFileURL } from "node:url"; import log from "./log"; +import { ffmpegConvertToMP4 } from "./services/ffmpeg"; import { ensure } from "./utils/common"; +import { + deleteTempFile, + deleteTempFileIgnoringErrors, + makeTempFilePath, +} from "./utils/temp"; /** * Register a protocol handler that we use for streaming large files between the @@ -34,119 +41,117 @@ import { ensure } from "./utils/common"; * Depends on {@link registerPrivilegedSchemes}. */ export const registerStreamProtocol = () => { - protocol.handle("stream", async (request: Request) => { - const url = request.url; - // The request URL contains the command to run as the host, and the - // pathname of the file(s) as the search params. - const { host, searchParams } = new URL(url); - switch (host) { - case "read": - return handleRead(ensure(searchParams.get("path"))); - case "read-zip": - return handleReadZip( - ensure(searchParams.get("zipPath")), - ensure(searchParams.get("entryName")), - ); - case "write": - return handleWrite(ensure(searchParams.get("path")), request); - default: - return new Response("", { status: 404 }); + protocol.handle("stream", (request: Request) => { + try { + return handleStreamRequest(request); + } catch (e) { + log.error(`Failed to handle stream request for ${request.url}`, e); + return new Response(String(e), { status: 500 }); } }); }; -const handleRead = async (path: string) => { - try { - const res = await net.fetch(pathToFileURL(path).toString()); - if (res.ok) { - // net.fetch already seems to add "Content-Type" and "Last-Modified" - // headers, but I couldn't find documentation for this. In any case, - // since we already are stat-ting the file for the "Content-Length", - // we explicitly add the "X-Last-Modified-Ms" too, - // - // 1. Guaranteeing its presence, - // - // 2. Having it be in the exact format we want (no string <-> date - // conversions), - // - // 3. Retaining milliseconds. +const handleStreamRequest = async (request: Request): Promise => { + const url = request.url; + // The request URL contains the command to run as the host, and the + // pathname of the file(s) as the search params. + const { host, searchParams } = new URL(url); + switch (host) { + case "read": + return handleRead(ensure(searchParams.get("path"))); - const stat = await fs.stat(path); + case "read-zip": + return handleReadZip( + ensure(searchParams.get("zipPath")), + ensure(searchParams.get("entryName")), + ); - // Add the file's size as the Content-Length header. - const fileSize = stat.size; - res.headers.set("Content-Length", `${fileSize}`); + case "write": + return handleWrite(ensure(searchParams.get("path")), request); - // Add the file's last modified time (as epoch milliseconds). - const mtimeMs = stat.mtimeMs; - res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + case "convert-to-mp4": { + const token = searchParams.get("token"); + const done = searchParams.get("done") !== null; + return token + ? done + ? handleConvertToMP4ReadDone(token) + : handleConvertToMP4Read(token) + : handleConvertToMP4Write(request); } - return res; - } catch (e) { - log.error(`Failed to read stream at ${path}`, e); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); + + default: + return new Response("", { status: 404 }); } }; +const handleRead = async (path: string) => { + const res = await net.fetch(pathToFileURL(path).toString()); + if (res.ok) { + // net.fetch already seems to add "Content-Type" and "Last-Modified" + // headers, but I couldn't find documentation for this. In any case, + // since we already are stat-ting the file for the "Content-Length", we + // explicitly add the "X-Last-Modified-Ms" too, + // + // 1. Guaranteeing its presence, + // + // 2. Having it be in the exact format we want (no string <-> date + // conversions), + // + // 3. Retaining milliseconds. + + const stat = await fs.stat(path); + + // Add the file's size as the Content-Length header. + const fileSize = stat.size; + res.headers.set("Content-Length", `${fileSize}`); + + // Add the file's last modified time (as epoch milliseconds). + const mtimeMs = stat.mtimeMs; + res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + } + return res; +}; + const handleReadZip = async (zipPath: string, entryName: string) => { - try { - const zip = new StreamZip.async({ file: zipPath }); - const entry = await zip.entry(entryName); - if (!entry) return new Response("", { status: 404 }); + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) return new Response("", { status: 404 }); - // This returns an "old style" NodeJS.ReadableStream. - const stream = await zip.stream(entry); - // Convert it into a new style NodeJS.Readable. - const nodeReadable = new Readable().wrap(stream); - // Then convert it into a Web stream. - const webReadableStreamAny = Readable.toWeb(nodeReadable); - // However, we get a ReadableStream now. This doesn't go into the - // `BodyInit` expected by the Response constructor, which wants a - // ReadableStream. Force a cast. - const webReadableStream = - webReadableStreamAny as ReadableStream; + // This returns an "old style" NodeJS.ReadableStream. + const stream = await zip.stream(entry); + // Convert it into a new style NodeJS.Readable. + const nodeReadable = new Readable().wrap(stream); + // Then convert it into a Web stream. + const webReadableStreamAny = Readable.toWeb(nodeReadable); + // However, we get a ReadableStream now. This doesn't go into the + // `BodyInit` expected by the Response constructor, which wants a + // ReadableStream. Force a cast. + const webReadableStream = + webReadableStreamAny as ReadableStream; - // Close the zip handle when the underlying stream closes. - stream.on("end", () => void zip.close()); + // Close the zip handle when the underlying stream closes. + stream.on("end", () => void zip.close()); - return new Response(webReadableStream, { - headers: { - // We don't know the exact type, but it doesn't really matter, - // just set it to a generic binary content-type so that the - // browser doesn't tinker with it thinking of it as text. - "Content-Type": "application/octet-stream", - "Content-Length": `${entry.size}`, - // While it is documented that entry.time is the modification - // time, the units are not mentioned. By seeing the source code, - // we can verify that it is indeed epoch milliseconds. See - // `parseZipTime` in the node-stream-zip source, - // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js - "X-Last-Modified-Ms": `${entry.time}`, - }, - }); - } catch (e) { - log.error( - `Failed to read entry ${entryName} from zip file at ${zipPath}`, - e, - ); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); - } + return new Response(webReadableStream, { + headers: { + // We don't know the exact type, but it doesn't really matter, just + // set it to a generic binary content-type so that the browser + // doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + // While it is documented that entry.time is the modification time, + // the units are not mentioned. By seeing the source code, we can + // verify that it is indeed epoch milliseconds. See `parseZipTime` + // in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + "X-Last-Modified-Ms": `${entry.time}`, + }, + }); }; const handleWrite = async (path: string, request: Request) => { - try { - await writeStream(path, ensure(request.body)); - return new Response("", { status: 200 }); - } catch (e) { - log.error(`Failed to write stream to ${path}`, e); - return new Response(`Failed to write stream: ${String(e)}`, { - status: 500, - }); - } + await writeStream(path, ensure(request.body)); + return new Response("", { status: 200 }); }; /** @@ -154,7 +159,7 @@ const handleWrite = async (path: string, request: Request) => { * * The returned promise resolves when the write completes. * - * @param filePath The local filesystem path where the file should be written. + * @param filePath The local file system path where the file should be written. * * @param readableStream A web * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream). @@ -181,3 +186,84 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => { }); }); }; + +/** + * A map from token to file paths for convert-to-mp4 requests that we have + * received. + */ +const convertToMP4Results = new Map(); + +/** + * Clear any in-memory state for in-flight convert-to-mp4 requests. Meant to be + * called during logout. + */ +export const clearConvertToMP4Results = () => convertToMP4Results.clear(); + +/** + * [Note: Convert to MP4] + * + * When we want to convert a video to MP4, if we were to send the entire + * contents of the video from the renderer to the main process over IPC, it just + * causes the renderer to run out of memory and restart when the videos are very + * large. So we need to stream the original video renderer → main and then + * stream back the converted video renderer ← main. + * + * Currently Chromium does not support bi-directional streaming ("full" duplex + * mode for the Web fetch API). So we need to simulate that using two different + * streaming requests. + * + * renderer → main stream://convert-to-mp4 + * → request.body is the original video + * ← response is a token + * + * renderer → main stream://convert-to-mp4?token= + * ← response.body is the converted video + * + * renderer → main stream://convert-to-mp4?token=&done + * ← 200 OK + * + * Note that the conversion itself is not streaming. The conversion still + * happens in a single shot, we are just streaming the data across the IPC + * boundary to allow us to pass large amounts of data without running out of + * memory. + * + * See also: [Note: IPC streams] + */ +const handleConvertToMP4Write = async (request: Request) => { + const inputTempFilePath = await makeTempFilePath(); + await writeStream(inputTempFilePath, ensure(request.body)); + + const outputTempFilePath = await makeTempFilePath("mp4"); + try { + await ffmpegConvertToMP4(inputTempFilePath, outputTempFilePath); + } catch (e) { + log.error("Conversion to MP4 failed", e); + await deleteTempFileIgnoringErrors(outputTempFilePath); + throw e; + } finally { + await deleteTempFileIgnoringErrors(inputTempFilePath); + } + + const token = randomUUID(); + convertToMP4Results.set(token, outputTempFilePath); + return new Response(token, { status: 200 }); +}; + +const handleConvertToMP4Read = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + return net.fetch(pathToFileURL(filePath).toString()); +}; + +const handleConvertToMP4ReadDone = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + await deleteTempFile(filePath); + + convertToMP4Results.delete(token); + return new Response("", { status: 200 }); +}; diff --git a/desktop/src/main/utils/common.ts b/desktop/src/main/utils/common.ts index 1f5016e61..5ed46aa8a 100644 --- a/desktop/src/main/utils/common.ts +++ b/desktop/src/main/utils/common.ts @@ -13,32 +13,3 @@ export const ensure = (v: T | null | undefined): T => { if (v === undefined) throw new Error("Required value was not found"); return v; }; - -/** - * Wait for {@link ms} milliseconds - * - * This function is a promisified `setTimeout`. It returns a promise that - * resolves after {@link ms} milliseconds. - */ -export const wait = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); - -/** - * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it - * does not resolve within {@link timeoutMS}, then reject with a timeout error. - */ -export const withTimeout = async (promise: Promise, ms: number) => { - let timeoutId: ReturnType; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutId = setTimeout( - () => reject(new Error("Operation timed out")), - ms, - ); - }); - const promiseAndCancelTimeout = async () => { - const result = await promise; - clearTimeout(timeoutId); - return result; - }; - return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); -}; diff --git a/desktop/src/main/utils/electron.ts b/desktop/src/main/utils/electron.ts index 93e8565ef..133edf87c 100644 --- a/desktop/src/main/utils/electron.ts +++ b/desktop/src/main/utils/electron.ts @@ -49,12 +49,12 @@ export const posixPath = (platformPath: string) => * > output, this might not be the best option and it might be better to use the * > underlying functions. */ -export const execAsync = (command: string | string[]) => { +export const execAsync = async (command: string | string[]) => { const escapedCommand = Array.isArray(command) ? shellescape(command) : command; const startTime = Date.now(); - const result = execAsync_(escapedCommand); + const result = await execAsync_(escapedCommand); log.debug( () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`, ); diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts index 11f7a5d84..70dec844d 100644 --- a/desktop/src/main/utils/temp.ts +++ b/desktop/src/main/utils/temp.ts @@ -4,6 +4,7 @@ import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import path from "node:path"; import type { ZipItem } from "../../types/ipc"; +import log from "../log"; import { ensure } from "./common"; /** @@ -62,6 +63,19 @@ export const deleteTempFile = async (tempFilePath: string) => { await fs.rm(tempFilePath, { force: true }); }; +/** + * A variant of {@link deleteTempFile} that supresses any errors, making it + * safe to call them in a sequence without needing to handle the scenario where + * one of them failing causes the rest to be skipped. + */ +export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => { + try { + await deleteTempFile(tempFilePath); + } catch (e) { + log.error(`Could not delete temporary file at path ${tempFilePath}`, e); + } +}; + /** The result of {@link makeFileForDataOrPathOrZipItem}. */ interface FileForDataOrPathOrZipItem { /** diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index f9147e288..c5a1d0d31 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -63,7 +63,10 @@ const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory"); const selectDirectory = () => ipcRenderer.invoke("selectDirectory"); -const clearStores = () => ipcRenderer.send("clearStores"); +const logout = () => { + watchRemoveListeners(); + ipcRenderer.send("logout"); +}; const encryptionKey = () => ipcRenderer.invoke("encryptionKey"); @@ -140,14 +143,12 @@ const ffmpegExec = ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ) => ipcRenderer.invoke( "ffmpegExec", command, dataOrPathOrZipItem, outputFileExtension, - timeoutMS, ); // - ML @@ -161,8 +162,8 @@ const clipTextEmbeddingIfAvailable = (text: string) => const detectFaces = (input: Float32Array) => ipcRenderer.invoke("detectFaces", input); -const faceEmbedding = (input: Float32Array) => - ipcRenderer.invoke("faceEmbedding", input); +const faceEmbeddings = (input: Float32Array) => + ipcRenderer.invoke("faceEmbeddings", input); const legacyFaceCrop = (faceID: string) => ipcRenderer.invoke("legacyFaceCrop", faceID); @@ -211,11 +212,10 @@ const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => { const watchFindFiles = (folderPath: string) => ipcRenderer.invoke("watchFindFiles", folderPath); -const watchReset = async () => { +const watchRemoveListeners = () => { ipcRenderer.removeAllListeners("watchAddFile"); ipcRenderer.removeAllListeners("watchRemoveFile"); ipcRenderer.removeAllListeners("watchRemoveDir"); - await ipcRenderer.invoke("watchReset"); }; // - Upload @@ -307,7 +307,7 @@ contextBridge.exposeInMainWorld("electron", { openDirectory, openLogDirectory, selectDirectory, - clearStores, + logout, encryptionKey, saveEncryptionKey, onMainWindowFocus, @@ -343,7 +343,7 @@ contextBridge.exposeInMainWorld("electron", { clipImageEmbedding, clipTextEmbeddingIfAvailable, detectFaces, - faceEmbedding, + faceEmbeddings, legacyFaceCrop, // - Watch @@ -358,7 +358,6 @@ contextBridge.exposeInMainWorld("electron", { onRemoveFile: watchOnRemoveFile, onRemoveDir: watchOnRemoveDir, findFiles: watchFindFiles, - reset: watchReset, }, // - Upload diff --git a/desktop/yarn.lock b/desktop/yarn.lock index eee3c4b3a..2aa060efc 100644 --- a/desktop/yarn.lock +++ b/desktop/yarn.lock @@ -316,9 +316,9 @@ integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g== "@types/node@*", "@types/node@^20.9.0": - version "20.12.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.7.tgz#04080362fa3dd6c5822061aa3124f5c152cff384" - integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg== + version "20.12.12" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.12.tgz#7cbecdf902085cec634fdb362172dfe12b8f2050" + integrity sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw== dependencies: undici-types "~5.26.4" @@ -1266,9 +1266,9 @@ electron-updater@^6.1: tiny-typed-emitter "^2.1.0" electron@^30: - version "30.0.2" - resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733" - integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ== + version "30.0.6" + resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.6.tgz#9ddea5f68396ecca88ad7c2c466a30fc9c16144b" + integrity sha512-PkhEPFdpYcTzjAO3gMHZ+map7g2+xCrMDedo/L1i0ir2BRXvAB93IkTJX497U6Srb/09r2cFt+k20VPNVCdw3Q== dependencies: "@electron/get" "^2.0.0" "@types/node" "^20.9.0" @@ -2924,7 +2924,12 @@ semver@^6.2.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: +semver@^7.3.2: + version "7.6.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" + integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== + +semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== diff --git a/docs/docs/.vitepress/sidebar.ts b/docs/docs/.vitepress/sidebar.ts index 6af9e3556..84ae5e0fa 100644 --- a/docs/docs/.vitepress/sidebar.ts +++ b/docs/docs/.vitepress/sidebar.ts @@ -123,6 +123,10 @@ export const sidebar = [ text: "Troubleshooting", collapsed: true, items: [ + { + text: "Desktop install", + link: "/photos/troubleshooting/desktop-install/", + }, { text: "Files not uploading", link: "/photos/troubleshooting/files-not-uploading", @@ -197,6 +201,10 @@ export const sidebar = [ text: "System requirements", link: "/self-hosting/guides/system-requirements", }, + { + text: "Configuring S3", + link: "/self-hosting/guides/configuring-s3", + }, { text: "Using external S3", link: "/self-hosting/guides/external-s3", diff --git a/docs/docs/photos/features/cast/index.md b/docs/docs/photos/features/cast/index.md index 89dc801f6..ecd91cb7c 100644 --- a/docs/docs/photos/features/cast/index.md +++ b/docs/docs/photos/features/cast/index.md @@ -1,19 +1,13 @@ --- -title: Archive -description: | - Archiving photos and albums in Ente Photos to remove them from your home - timeline +title: Cast +description: + Casting your photos on to a large screen or a TV or a Chromecast device --- -> [!CAUTION] -> -> This is preview documentation for an upcoming feature. This feature has not -> yet been released yet, so the steps below will not work currently. - # Cast With Ente Cast, you can play a slideshow of your favourite albums on your Google -Chromecast TVs or other Internet-connected large screen devices. +Chromecast TVs or any other internet-connected large screen devices. ## Get Started diff --git a/docs/docs/photos/troubleshooting/desktop-install/index.md b/docs/docs/photos/troubleshooting/desktop-install/index.md new file mode 100644 index 000000000..7410c7818 --- /dev/null +++ b/docs/docs/photos/troubleshooting/desktop-install/index.md @@ -0,0 +1,75 @@ +--- +title: Desktop installation +description: Troubleshooting issues when installing the Ente Photos desktop app +--- + +# Desktop app installation + +The latest version of the Ente Photos desktop app can be downloaded from +[ente.io/download](https://ente.io/download). If you're having trouble, please +see if any of the following cases apply. + +## Windows + +If the app stops with an "A JavaScript error occurred in the main process - The +specified module could not be found" error on your Windows machine when you +start it, then you might need to install the VC++ runtime from Microsoft. + +This is what the error looks like: + +![Error when VC++ runtime is not installed](windows-vc.png){width=500px} + +You can install the Microsoft VC++ redistributable runtime from here:
+https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version + +## AppImages on ARM64 Linux + +If you're on an ARM64 machine running Linux, and the AppImages doesn't do +anything when you run it, you will need to run the following command on your +machine: + +```sh +sudo ln -s /usr/lib/aarch64-linux-gnu/libz.so{.1,} +``` + +It is possible that the exact path might be different on your machine. Briefly, +what we need to do is create `libz.so` as an alias for `libz.so.1`. For more +details, see the following upstream issues: + +- libz.so cannot open shared object file on ARM64 - + [AppImage/AppImageKit/issues/1092](https://github.com/AppImage/AppImageKit/issues/1092) + +- libz.so: cannot open shared object file with Ubuntu arm64 - + [electron-userland/electron-builder/issues/7835](https://github.com/electron-userland/electron-builder/issues/7835) + +## AppImage says it requires FUSE + +See +[docs.appimage.org](https://docs.appimage.org/user-guide/troubleshooting/fuse.html#the-appimage-tells-me-it-needs-fuse-to-run). + +tl;dr; for example, on Ubuntu, + +```sh +sudo apt install libfuse2 +``` + +## Linux SUID error + +On some Linux distributions, if you run the AppImage from the CLI, it might fail +with the following error: + +> The SUID sandbox helper binary was found, but is not configured correctly. + +This happens when you try to run the AppImage from the command line. If you +instead double click on the AppImage in your Files browser, then it should start +properly. + +If you do want to run it from the command line, you can do so by passing the +`--no-sandbox` flag when executing the AppImage. e.g. + +```sh +./ente.AppImage --no-sandbox +``` + +For more details, see this upstream issue on +[electron](https://github.com/electron/electron/issues/17972). diff --git a/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png new file mode 100644 index 000000000..852c037d5 Binary files /dev/null and b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png differ diff --git a/docs/docs/self-hosting/guides/configuring-s3.md b/docs/docs/self-hosting/guides/configuring-s3.md new file mode 100644 index 000000000..8e823ed2a --- /dev/null +++ b/docs/docs/self-hosting/guides/configuring-s3.md @@ -0,0 +1,80 @@ +--- +title: Configuring S3 buckets +description: + Configure S3 endpoints to fix upload errors or use your self hosted ente + from outside localhost +--- + +# Configuring S3 + +There are three components involved in uploading: + +1. The client (e.g. the web app or the mobile app) +2. Ente's server (museum) +3. The S3-compatible object storage (e.g. minio in the default starter) + +For the uploads to work, all three of them need to be able to reach each other. +This is because the client uploads directly to the object storage. The +interaction goes something like this: + +1. Client wants to upload, it asks museum where it should upload to. +2. Museum creates pre-signed URLs for the S3 bucket that was configured. +3. Client directly uploads to the S3 buckets these URLs. + +The upshot of this is that _both_ the client and museum should be able to reach +your S3 bucket. + +The URL for the S3 bucket is configured in +[scripts/compose/credentials.yaml](https://github.com/ente-io/ente/blob/main/server/scripts/compose/credentials.yaml#L10). +You can edit this file directly when testing, though it is just simpler and more +robust to create a `museum.yaml` (in the same folder as the Docker compose file) +and put your custom configuration there (in your case, you can put an entire +`s3` config object in your `museum.yaml`). + +> [!TIP] +> +> For more details about these configuration objects, see the documentaion for +> the `s3` object in +> [configurations/local.yaml](https://github.com/ente-io/ente/blob/main/server/configurations/local.yaml). + +By default, you only need to configure the endpoint for the first bucket. + +> [!NOTE] +> +> If you're wondering why there are 3 buckets - that's because our production +> instance uses these to perform replication. +> +> However, in a self hosted setup replication is off by default (you can turn it +> on if you want). When replication is turned off, only the first bucket is +> used, and you can remove the other two if you wish or just ignore them. + +The `endpoint` for the first bucket in the starter `credentials.yaml` is +`localhost:3200`. The way this works then is that both museum (`2`) and minio +(`3`) are running within the same Docker compose cluster, so are able to reach +each other. If at this point we were to run the web app (`1`) on localhost (say +using `yarn dev:photos`), it would also run on localhost and thus would be able +to reach `3`. + +If you were to try and connect from a mobile app, this would not work since +`localhost:3200` would not resolve on your mobile. So you'll need to modify this +endpoint to a value, say `yourserverip:3200`, so that the mobile app can also +reach it. + +The same principle applies if you're deploying to your custom domain. + +> [!NOTE] +> +> If you need to configure SSL, for example if you're running over the internet, +> you'll need to turn off `s3.are_local_buckets` (which disables SSL in the +> default starter compose template). +> +> Disabling `s3.are_local_buckets` also switches to the subdomain style URLs for +> the buckets. However, not all S3 providers support these, in particular, minio +> does not work with these in default configuration. So in such cases you'll +> also need to then enable `s3.use_path_style_urls`. + +To summarize: + +Set the S3 bucket `endpoint` in `credentials.yaml` to a `yourserverip:3200` or +some such IP/hostname that accessible from both where you are running the Ente +clients (e.g. the mobile app) and also from within the Docker compose cluster. diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md index 8e16004a1..110e3dbb8 100644 --- a/docs/docs/self-hosting/guides/custom-server/index.md +++ b/docs/docs/self-hosting/guides/custom-server/index.md @@ -34,7 +34,8 @@ endpoint: api: "http://localhost:8080" ``` -(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) +(Another +[example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) ## Web appps and Photos desktop app @@ -46,5 +47,5 @@ connect to. For example: NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos ``` -For more details, see [hosting the web -app](https://help.ente.io/self-hosting/guides/web-app). +For more details, see +[hosting the web app](https://help.ente.io/self-hosting/guides/web-app). diff --git a/docs/docs/self-hosting/guides/external-s3.md b/docs/docs/self-hosting/guides/external-s3.md index 505ae6fe9..87a48de27 100644 --- a/docs/docs/self-hosting/guides/external-s3.md +++ b/docs/docs/self-hosting/guides/external-s3.md @@ -164,6 +164,27 @@ EOF RUN chmod +x /docker-entrypoint.d/replace_ente_endpoints.sh ``` +This runs nginx inside to handle both the web & album URLs so we don't have to +make two web images with different port. + +- `DOCKER_RUNTIME_REPLACE_ENDPOINT` this is your public museum API URL. +- `DOCKER_RUNTIME_REPLACE_ALBUMS_ENDPOINT` this is the shared albums URL (for + more details about configuring shared albums, see + [faq/sharing](/self-hosting/faq/sharing)). + +Note how above we had updated the `compose.yaml` file for the server with + +```yaml +web: + build: + context: web + ports: + - 8081:80 + - 8082:80 +``` + +so that web and album both point to the same container and nginx will handle it. + ## 2. Set up the `.credentials.env` file Create a `.credentials.env` file at the root of the project with the following diff --git a/docs/docs/self-hosting/guides/index.md b/docs/docs/self-hosting/guides/index.md index a8a64d960..b8a73d7eb 100644 --- a/docs/docs/self-hosting/guides/index.md +++ b/docs/docs/self-hosting/guides/index.md @@ -16,5 +16,8 @@ See the sidebar for existing guides. In particular: - For various admin related tasks, e.g. increasing the storage quota on your self hosted instance, see [administering your custom server](admin). -- For self hosting both the server and web app using external S3 buckets for - object storage, see [using external S3](external-s3). +- For configuring your S3 buckets to get the object storage to work from your + mobile device or for fixing an upload errors, see + [configuring S3](configuring-s3). There is also a longer + [community contributed guide](external-s3) for a more self hosted setup of + both the server and web app using external S3 buckets for object storage. diff --git a/docs/docs/self-hosting/guides/web-app.md b/docs/docs/self-hosting/guides/web-app.md index 49dfdd114..28802c457 100644 --- a/docs/docs/self-hosting/guides/web-app.md +++ b/docs/docs/self-hosting/guides/web-app.md @@ -1,6 +1,8 @@ --- title: Hosting the web app -description: Building and hosting Ente's web app, connecting it to your self-hosted server +description: + Building and hosting Ente's web app, connecting it to your self-hosted + server --- # Web app diff --git a/docs/docs/self-hosting/troubleshooting/uploads.md b/docs/docs/self-hosting/troubleshooting/uploads.md index 4f7273e94..435a5e93c 100644 --- a/docs/docs/self-hosting/troubleshooting/uploads.md +++ b/docs/docs/self-hosting/troubleshooting/uploads.md @@ -5,9 +5,9 @@ description: Fixing upload errors when trying to self host Ente # Uploads failing -If uploads to your self-hosted server are failing, make sure that -`credentials.yaml` has `yourserverip:3200` for all three minio locations. +If uploads to your minio are failing, you need to ensure that you've configured +the S3 bucket `endpoint` in `credentials.yaml` (or `museum.yaml`) to, say, +`yourserverip:3200`. This can be any host or port, it just need to be a value +that is reachable from both your client and from museum. -By default it is `localhost:3200`, and it needs to be changed to an IP that is -accessible from both where you are running the Ente clients (e.g. the mobile -app) and also from within the Docker compose cluster. +For more details, see [configuring-s3](/self-hosting/guides/configuring-s3). diff --git a/docs/docs/self-hosting/troubleshooting/yarn.md b/docs/docs/self-hosting/troubleshooting/yarn.md index 7d8d13b00..de2b55029 100644 --- a/docs/docs/self-hosting/troubleshooting/yarn.md +++ b/docs/docs/self-hosting/troubleshooting/yarn.md @@ -8,3 +8,6 @@ description: Fixing yarn install errors when trying to self host Ente If your `yarn install` is failing, make sure you are using Yarn Classic - https://classic.yarnpkg.com/lang/en/docs/install + +For more details, see the +[getting started instructions](https://github.com/ente-io/ente/blob/main/web/docs/new.md). diff --git a/docs/package.json b/docs/package.json index 5d4dc3b19..015d79eea 100644 --- a/docs/package.json +++ b/docs/package.json @@ -10,5 +10,6 @@ "devDependencies": { "prettier": "^3", "vitepress": "^1.0.0-rc.45" - } + }, + "packageManager": "yarn@1.22.21" } diff --git a/mobile/lib/core/configuration.dart b/mobile/lib/core/configuration.dart index 5fe08826b..4809ba863 100644 --- a/mobile/lib/core/configuration.dart +++ b/mobile/lib/core/configuration.dart @@ -73,8 +73,6 @@ class Configuration { static const anonymousUserIDKey = "anonymous_user_id"; static const endPointKey = "endpoint"; - final kTempFolderDeletionTimeBuffer = const Duration(hours: 6).inMicroseconds; - static final _logger = Logger("Configuration"); String? _cachedToken; @@ -104,20 +102,7 @@ class Configuration { _documentsDirectory = (await getApplicationDocumentsDirectory()).path; _tempDocumentsDirPath = _documentsDirectory + "/temp/"; final tempDocumentsDir = Directory(_tempDocumentsDirPath); - try { - final currentTime = DateTime.now().microsecondsSinceEpoch; - if (tempDocumentsDir.existsSync() && - (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < - (currentTime - kTempFolderDeletionTimeBuffer)) { - await tempDocumentsDir.delete(recursive: true); - await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); - _logger.info("Cleared temp folder"); - } else { - _logger.info("Skipping temp folder clear"); - } - } catch (e) { - _logger.warning(e); - } + await _cleanUpStaleFiles(tempDocumentsDir); tempDocumentsDir.createSync(recursive: true); final tempDirectoryPath = (await getTemporaryDirectory()).path; _thumbnailCacheDirectory = tempDirectoryPath + "/thumbnail-cache"; @@ -145,6 +130,42 @@ class Configuration { SuperLogging.setUserID(await _getOrCreateAnonymousUserID()).ignore(); } + // _cleanUpStaleFiles deletes all files in the temp directory that are older + // than kTempFolderDeletionTimeBuffer except the the temp encrypted files for upload. + // Those file are deleted by file uploader after the upload is complete or those + // files are not being used / tracked. + Future _cleanUpStaleFiles(Directory tempDocumentsDir) async { + try { + final currentTime = DateTime.now().microsecondsSinceEpoch; + if (tempDocumentsDir.existsSync() && + (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < + (currentTime - tempDirCleanUpInterval)) { + int skippedTempUploadFiles = 0; + final files = tempDocumentsDir.listSync(); + for (final file in files) { + if (file is File) { + if (file.path.contains(uploadTempFilePrefix)) { + skippedTempUploadFiles++; + continue; + } + _logger.info("Deleting file: ${file.path}"); + await file.delete(); + } else if (file is Directory) { + await file.delete(recursive: true); + } + } + await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); + _logger.info( + "Cleared temp folder except $skippedTempUploadFiles upload files", + ); + } else { + _logger.info("Skipping temp folder clear"); + } + } catch (e) { + _logger.warning(e); + } + } + Future logout({bool autoLogout = false}) async { if (SyncService.instance.isSyncInProgress()) { SyncService.instance.stopSync(); diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart index 6b911569c..02923b6c4 100644 --- a/mobile/lib/core/constants.dart +++ b/mobile/lib/core/constants.dart @@ -1,3 +1,5 @@ +import "package:flutter/foundation.dart"; + const int thumbnailSmallSize = 256; const int thumbnailQuality = 50; const int thumbnailLargeSize = 512; @@ -41,6 +43,7 @@ const supportEmail = 'support@ente.io'; // this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. const multipartPartSize = 20 * 1024 * 1024; +const multipartPartSizeInternal = 8 * 1024 * 1024; const kDefaultProductionEndpoint = 'https://api.ente.io'; @@ -98,3 +101,8 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB' const localFileServer = String.fromEnvironment("localFileServer", defaultValue: ""); + +const uploadTempFilePrefix = "upload_file_"; +final tempDirCleanUpInterval = kDebugMode + ? const Duration(seconds: 30).inMicroseconds + : const Duration(hours: 6).inMicroseconds; diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart index 0eb1d3f6d..64878a2ce 100644 --- a/mobile/lib/db/embeddings_db.dart +++ b/mobile/lib/db/embeddings_db.dart @@ -63,6 +63,19 @@ class EmbeddingsDB { return _convertToEmbeddings(results); } + // Get FileIDs for a specific model + Future> getFileIDs(Model model) async { + final db = await _database; + final results = await db.getAll( + 'SELECT $columnFileID FROM $tableName WHERE $columnModel = ?', + [modelToInt(model)!], + ); + if (results.isEmpty) { + return {}; + } + return results.map((e) => e[columnFileID] as int).toSet(); + } + Future put(Embedding embedding) async { final db = await _database; await db.execute( diff --git a/mobile/lib/db/upload_locks_db.dart b/mobile/lib/db/upload_locks_db.dart index 11112d0ce..b32084b6f 100644 --- a/mobile/lib/db/upload_locks_db.dart +++ b/mobile/lib/db/upload_locks_db.dart @@ -3,16 +3,60 @@ import 'dart:io'; import 'package:path/path.dart'; import 'package:path_provider/path_provider.dart'; +import "package:photos/module/upload/model/multipart.dart"; import 'package:sqflite/sqflite.dart'; +import "package:sqflite_migration/sqflite_migration.dart"; class UploadLocksDB { static const _databaseName = "ente.upload_locks.db"; - static const _databaseVersion = 1; - static const _table = "upload_locks"; - static const _columnID = "id"; - static const _columnOwner = "owner"; - static const _columnTime = "time"; + static const _uploadLocksTable = ( + table: "upload_locks", + columnID: "id", + columnOwner: "owner", + columnTime: "time", + ); + + static const _trackUploadTable = ( + table: "track_uploads", + columnID: "id", + columnLocalID: "local_id", + columnFileHash: "file_hash", + columnCollectionID: "collection_id", + columnEncryptedFileName: "encrypted_file_name", + columnEncryptedFileSize: "encrypted_file_size", + columnEncryptedFileKey: "encrypted_file_key", + columnFileEncryptionNonce: "file_encryption_nonce", + columnKeyEncryptionNonce: "key_encryption_nonce", + columnObjectKey: "object_key", + columnCompleteUrl: "complete_url", + columnStatus: "status", + columnPartSize: "part_size", + columnLastAttemptedAt: "last_attempted_at", + columnCreatedAt: "created_at", + ); + + static const _partsTable = ( + table: "upload_parts", + columnObjectKey: "object_key", + columnPartNumber: "part_number", + columnPartUrl: "part_url", + columnPartETag: "part_etag", + columnPartStatus: "part_status", + ); + + static final initializationScript = [ + ..._createUploadLocksTable(), + ]; + + static final migrationScripts = [ + ..._createTrackUploadsTable(), + ]; + + final dbConfig = MigrationConfig( + initializationScript: initializationScript, + migrationScripts: migrationScripts, + ); UploadLocksDB._privateConstructor(); static final UploadLocksDB instance = UploadLocksDB._privateConstructor(); @@ -27,44 +71,82 @@ class UploadLocksDB { final Directory documentsDirectory = await getApplicationDocumentsDirectory(); final String path = join(documentsDirectory.path, _databaseName); - return await openDatabase( - path, - version: _databaseVersion, - onCreate: _onCreate, - ); + + return await openDatabaseWithMigration(path, dbConfig); } - Future _onCreate(Database db, int version) async { - await db.execute( + static List _createUploadLocksTable() { + return [ ''' - CREATE TABLE $_table ( - $_columnID TEXT PRIMARY KEY NOT NULL, - $_columnOwner TEXT NOT NULL, - $_columnTime TEXT NOT NULL + CREATE TABLE ${_uploadLocksTable.table} ( + ${_uploadLocksTable.columnID} TEXT PRIMARY KEY NOT NULL, + ${_uploadLocksTable.columnOwner} TEXT NOT NULL, + ${_uploadLocksTable.columnTime} TEXT NOT NULL ) ''', - ); + ]; + } + + static List _createTrackUploadsTable() { + return [ + ''' + CREATE TABLE IF NOT EXISTS ${_trackUploadTable.table} ( + ${_trackUploadTable.columnID} INTEGER PRIMARY KEY, + ${_trackUploadTable.columnLocalID} TEXT NOT NULL, + ${_trackUploadTable.columnFileHash} TEXT NOT NULL, + ${_trackUploadTable.columnCollectionID} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileName} TEXT NOT NULL, + ${_trackUploadTable.columnEncryptedFileSize} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileKey} TEXT NOT NULL, + ${_trackUploadTable.columnFileEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnKeyEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnObjectKey} TEXT NOT NULL, + ${_trackUploadTable.columnCompleteUrl} TEXT NOT NULL, + ${_trackUploadTable.columnStatus} TEXT DEFAULT '${MultipartStatus.pending.name}' NOT NULL, + ${_trackUploadTable.columnPartSize} INTEGER NOT NULL, + ${_trackUploadTable.columnLastAttemptedAt} INTEGER NOT NULL, + ${_trackUploadTable.columnCreatedAt} INTEGER DEFAULT CURRENT_TIMESTAMP NOT NULL + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS ${_partsTable.table} ( + ${_partsTable.columnObjectKey} TEXT NOT NULL REFERENCES ${_trackUploadTable.table}(${_trackUploadTable.columnObjectKey}) ON DELETE CASCADE, + ${_partsTable.columnPartNumber} INTEGER NOT NULL, + ${_partsTable.columnPartUrl} TEXT NOT NULL, + ${_partsTable.columnPartETag} TEXT, + ${_partsTable.columnPartStatus} TEXT NOT NULL, + PRIMARY KEY (${_partsTable.columnObjectKey}, ${_partsTable.columnPartNumber}) + ) + ''', + ]; } Future clearTable() async { final db = await instance.database; - await db.delete(_table); + await db.delete(_uploadLocksTable.table); + await db.delete(_trackUploadTable.table); + await db.delete(_partsTable.table); } Future acquireLock(String id, String owner, int time) async { final db = await instance.database; final row = {}; - row[_columnID] = id; - row[_columnOwner] = owner; - row[_columnTime] = time; - await db.insert(_table, row, conflictAlgorithm: ConflictAlgorithm.fail); + row[_uploadLocksTable.columnID] = id; + row[_uploadLocksTable.columnOwner] = owner; + row[_uploadLocksTable.columnTime] = time; + await db.insert( + _uploadLocksTable.table, + row, + conflictAlgorithm: ConflictAlgorithm.fail, + ); } Future isLocked(String id, String owner) async { final db = await instance.database; final rows = await db.query( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); return rows.length == 1; @@ -73,8 +155,9 @@ class UploadLocksDB { Future releaseLock(String id, String owner) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); } @@ -82,8 +165,9 @@ class UploadLocksDB { Future releaseLocksAcquiredByOwnerBefore(String owner, int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnOwner = ? AND $_columnTime < ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnOwner} = ? AND ${_uploadLocksTable.columnTime} < ?', whereArgs: [owner, time], ); } @@ -91,9 +175,251 @@ class UploadLocksDB { Future releaseAllLocksAcquiredBefore(int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnTime < ?', + _uploadLocksTable.table, + where: '${_uploadLocksTable.columnTime} < ?', whereArgs: [time], ); } + + Future<({String encryptedFileKey, String fileNonce, String keyNonce})> + getFileEncryptionData( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + + return ( + encryptedFileKey: row[_trackUploadTable.columnEncryptedFileKey] as String, + fileNonce: row[_trackUploadTable.columnFileEncryptionNonce] as String, + keyNonce: row[_trackUploadTable.columnKeyEncryptionNonce] as String, + ); + } + + Future updateLastAttempted( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [ + localId, + fileHash, + collectionID, + ], + ); + } + + Future getCachedLinks( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + final objectKey = row[_trackUploadTable.columnObjectKey] as String; + final partsStatus = await db.query( + _partsTable.table, + where: '${_partsTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + + final List partUploadStatus = []; + final List partsURLs = List.generate( + partsStatus.length, + (index) => "", + ); + final Map partETags = {}; + + for (final part in partsStatus) { + final partNumber = part[_partsTable.columnPartNumber] as int; + final partUrl = part[_partsTable.columnPartUrl] as String; + final partStatus = part[_partsTable.columnPartStatus] as String; + partsURLs[partNumber] = partUrl; + if (part[_partsTable.columnPartETag] != null) { + partETags[partNumber] = part[_partsTable.columnPartETag] as String; + } + partUploadStatus.add(partStatus == "uploaded"); + } + final urls = MultipartUploadURLs( + objectKey: objectKey, + completeURL: row[_trackUploadTable.columnCompleteUrl] as String, + partsURLs: partsURLs, + ); + + return MultipartInfo( + urls: urls, + status: MultipartStatus.values + .byName(row[_trackUploadTable.columnStatus] as String), + partUploadStatus: partUploadStatus, + partETags: partETags, + partSize: row[_trackUploadTable.columnPartSize] as int, + ); + } + + Future createTrackUploadsEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + String fileKey, + String fileNonce, + String keyNonce, { + required int partSize, + }) async { + final db = await UploadLocksDB.instance.database; + final objectKey = urls.objectKey; + + await db.insert( + _trackUploadTable.table, + { + _trackUploadTable.columnLocalID: localId, + _trackUploadTable.columnFileHash: fileHash, + _trackUploadTable.columnCollectionID: collectionID, + _trackUploadTable.columnObjectKey: objectKey, + _trackUploadTable.columnCompleteUrl: urls.completeURL, + _trackUploadTable.columnEncryptedFileName: encryptedFileName, + _trackUploadTable.columnEncryptedFileSize: fileSize, + _trackUploadTable.columnEncryptedFileKey: fileKey, + _trackUploadTable.columnFileEncryptionNonce: fileNonce, + _trackUploadTable.columnKeyEncryptionNonce: keyNonce, + _trackUploadTable.columnPartSize: partSize, + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + ); + + final partsURLs = urls.partsURLs; + final partsLength = partsURLs.length; + + for (int i = 0; i < partsLength; i++) { + await db.insert( + _partsTable.table, + { + _partsTable.columnObjectKey: objectKey, + _partsTable.columnPartNumber: i, + _partsTable.columnPartUrl: partsURLs[i], + _partsTable.columnPartStatus: PartStatus.pending.name, + }, + ); + } + } + + Future updatePartStatus( + String objectKey, + int partNumber, + String etag, + ) async { + final db = await instance.database; + await db.update( + _partsTable.table, + { + _partsTable.columnPartStatus: PartStatus.uploaded.name, + _partsTable.columnPartETag: etag, + }, + where: + '${_partsTable.columnObjectKey} = ? AND ${_partsTable.columnPartNumber} = ?', + whereArgs: [objectKey, partNumber], + ); + } + + Future updateTrackUploadStatus( + String objectKey, + MultipartStatus status, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnStatus: status.name, + }, + where: '${_trackUploadTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + } + + Future deleteMultipartTrack( + String localId, + ) async { + final db = await instance.database; + return await db.delete( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?', + whereArgs: [localId], + ); + } + + // getFileNameToLastAttemptedAtMap returns a map of encrypted file name to last attempted at time + Future> getFileNameToLastAttemptedAtMap() { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + columns: [ + _trackUploadTable.columnEncryptedFileName, + _trackUploadTable.columnLastAttemptedAt, + ], + ); + final map = {}; + for (final row in rows) { + map[row[_trackUploadTable.columnEncryptedFileName] as String] = + row[_trackUploadTable.columnLastAttemptedAt] as int; + } + return map; + }); + } + + Future getEncryptedFileName( + String localId, + String fileHash, + int collectionID, + ) { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + return null; + } + final row = rows.first; + return row[_trackUploadTable.columnEncryptedFileName] as String; + }); + } } diff --git a/mobile/lib/events/embedding_updated_event.dart b/mobile/lib/events/embedding_updated_event.dart index 9021b8b50..736b85c17 100644 --- a/mobile/lib/events/embedding_updated_event.dart +++ b/mobile/lib/events/embedding_updated_event.dart @@ -1,3 +1,5 @@ import "package:photos/events/event.dart"; class EmbeddingUpdatedEvent extends Event {} + +class EmbeddingCacheUpdatedEvent extends Event {} diff --git a/mobile/lib/generated/intl/messages_pt.dart b/mobile/lib/generated/intl/messages_pt.dart index 29879547e..346135bb0 100644 --- a/mobile/lib/generated/intl/messages_pt.dart +++ b/mobile/lib/generated/intl/messages_pt.dart @@ -633,8 +633,9 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Dobre seu armazenamento"), "download": MessageLookupByLibrary.simpleMessage("Baixar"), "downloadFailed": - MessageLookupByLibrary.simpleMessage("Falha ao baixar"), - "downloading": MessageLookupByLibrary.simpleMessage("Baixando..."), + MessageLookupByLibrary.simpleMessage("Falha no download"), + "downloading": + MessageLookupByLibrary.simpleMessage("Fazendo download..."), "dropSupportEmail": m17, "duplicateFileCountWithStorageSaved": m18, "duplicateItemsGroup": m19, @@ -724,8 +725,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Falha ao aplicar o código"), "failedToCancel": MessageLookupByLibrary.simpleMessage("Falha ao cancelar"), - "failedToDownloadVideo": - MessageLookupByLibrary.simpleMessage("Falha ao baixar vídeo"), + "failedToDownloadVideo": MessageLookupByLibrary.simpleMessage( + "Falha ao fazer download do vídeo"), "failedToFetchOriginalForEdit": MessageLookupByLibrary.simpleMessage( "Falha ao obter original para edição"), "failedToFetchReferralDetails": MessageLookupByLibrary.simpleMessage( @@ -743,7 +744,7 @@ class MessageLookup extends MessageLookupByLibrary { "familyPlans": MessageLookupByLibrary.simpleMessage("Plano familiar"), "faq": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), "faqs": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), - "favorite": MessageLookupByLibrary.simpleMessage("Favoritar"), + "favorite": MessageLookupByLibrary.simpleMessage("Favorito"), "feedback": MessageLookupByLibrary.simpleMessage("Comentários"), "fileFailedToSaveToGallery": MessageLookupByLibrary.simpleMessage( "Falha ao salvar o arquivo na galeria"), @@ -911,8 +912,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Carregando galeria..."), "loadingMessage": MessageLookupByLibrary.simpleMessage("Carregando suas fotos..."), - "loadingModel": - MessageLookupByLibrary.simpleMessage("Baixando modelos..."), + "loadingModel": MessageLookupByLibrary.simpleMessage( + "Fazendo download de modelos..."), "localGallery": MessageLookupByLibrary.simpleMessage("Galeria local"), "location": MessageLookupByLibrary.simpleMessage("Local"), "locationName": MessageLookupByLibrary.simpleMessage("Nome do Local"), diff --git a/mobile/lib/generated/intl/messages_zh.dart b/mobile/lib/generated/intl/messages_zh.dart index f8ff74327..20f9721a8 100644 --- a/mobile/lib/generated/intl/messages_zh.dart +++ b/mobile/lib/generated/intl/messages_zh.dart @@ -124,7 +124,7 @@ class MessageLookup extends MessageLookupByLibrary { static String m37(providerName) => "如果您被收取费用,请用英语与 ${providerName} 的客服聊天"; - static String m38(endDate) => "免费试用有效期至 ${endDate}。\n之后您可以选择付费计划。"; + static String m38(endDate) => "免费试用有效期至 ${endDate}。\n您可以随后购买付费计划。"; static String m39(toEmail) => "请给我们发送电子邮件至 ${toEmail}"; diff --git a/mobile/lib/l10n/intl_pt.arb b/mobile/lib/l10n/intl_pt.arb index 08b4ef315..f47dd89e9 100644 --- a/mobile/lib/l10n/intl_pt.arb +++ b/mobile/lib/l10n/intl_pt.arb @@ -410,7 +410,7 @@ "machineLearning": "Aprendizagem de máquina", "magicSearch": "Busca mágica", "magicSearchDescription": "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados.", - "loadingModel": "Baixando modelos...", + "loadingModel": "Fazendo download de modelos...", "waitingForWifi": "Esperando por Wi-Fi...", "status": "Estado", "indexedItems": "Itens indexados", @@ -471,7 +471,7 @@ "criticalUpdateAvailable": "Atualização crítica disponível", "updateAvailable": "Atualização disponível", "ignoreUpdate": "Ignorar", - "downloading": "Baixando...", + "downloading": "Fazendo download...", "cannotDeleteSharedFiles": "Não é possível excluir arquivos compartilhados", "theDownloadCouldNotBeCompleted": "Não foi possível concluir o download", "retry": "Tentar novamente", @@ -660,7 +660,7 @@ "endtoendEncryptedByDefault": "Criptografia de ponta a ponta por padrão", "safelyStored": "Armazenado com segurança", "atAFalloutShelter": "em um abrigo avançado", - "designedToOutlive": "Feito para ter logenvidade", + "designedToOutlive": "Feito para ter longevidade", "available": "Disponível", "everywhere": "em todos os lugares", "androidIosWebDesktop": "Android, iOS, Web, Desktop", @@ -734,7 +734,7 @@ "moveToAlbum": "Mover para álbum", "unhide": "Desocultar", "unarchive": "Desarquivar", - "favorite": "Favoritar", + "favorite": "Favorito", "removeFromFavorite": "Remover dos favoritos", "shareLink": "Compartilhar link", "createCollage": "Criar colagem", @@ -840,7 +840,7 @@ "download": "Baixar", "pressAndHoldToPlayVideo": "Pressione e segure para reproduzir o vídeo", "pressAndHoldToPlayVideoDetailed": "Pressione e segure na imagem para reproduzir o vídeo", - "downloadFailed": "Falha ao baixar", + "downloadFailed": "Falha no download", "deduplicateFiles": "Arquivos duplicados", "deselectAll": "Desmarcar todos", "reviewDeduplicateItems": "Por favor, reveja e exclua os itens que você acredita serem duplicados.", @@ -1132,7 +1132,7 @@ "sharedWithYou": "Compartilhado com você", "sharedByYou": "Compartilhado por você", "inviteYourFriendsToEnte": "Convide seus amigos ao Ente", - "failedToDownloadVideo": "Falha ao baixar vídeo", + "failedToDownloadVideo": "Falha ao fazer download do vídeo", "hiding": "Ocultando...", "unhiding": "Desocultando...", "successfullyHid": "Ocultado com sucesso", diff --git a/mobile/lib/l10n/intl_zh.arb b/mobile/lib/l10n/intl_zh.arb index bd930857c..933eea126 100644 --- a/mobile/lib/l10n/intl_zh.arb +++ b/mobile/lib/l10n/intl_zh.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "免费试用有效期至 {endDate}", "validTill": "有效期至 {endDate}", "addOnValidTill": "您的 {storageAmount} 插件有效期至 {endDate}", - "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n之后您可以选择付费计划。", + "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n您可以随后购买付费计划。", "subWillBeCancelledOn": "您的订阅将于 {endDate} 取消", "subscription": "订阅", "paymentDetails": "付款明细", @@ -942,7 +942,7 @@ "thisActionCannotBeUndone": "此操作无法撤销", "emptyTrash": "要清空回收站吗?", "permDeleteWarning": "回收站中的所有项目将被永久删除\n\n此操作无法撤消", - "empty": "空的", + "empty": "清空", "couldNotFreeUpSpace": "无法释放空间", "permanentlyDeleteFromDevice": "要从设备中永久删除吗?", "someOfTheFilesYouAreTryingToDeleteAre": "您要删除的部分文件仅在您的设备上可用,且删除后无法恢复", diff --git a/mobile/lib/module/upload/model/multipart.dart b/mobile/lib/module/upload/model/multipart.dart new file mode 100644 index 000000000..cda72d141 --- /dev/null +++ b/mobile/lib/module/upload/model/multipart.dart @@ -0,0 +1,66 @@ +import "package:photos/module/upload/model/xml.dart"; + +class PartETag extends XmlParsableObject { + final int partNumber; + final String eTag; + + PartETag(this.partNumber, this.eTag); + + @override + String get elementName => "Part"; + + @override + Map toMap() { + return { + "PartNumber": partNumber, + "ETag": eTag, + }; + } +} + +enum MultipartStatus { + pending, + uploaded, + completed, +} + +enum PartStatus { + pending, + uploaded, +} + +class MultipartInfo { + final List? partUploadStatus; + final Map? partETags; + final int? partSize; + final MultipartUploadURLs urls; + final MultipartStatus status; + + MultipartInfo({ + this.partUploadStatus, + this.partETags, + this.partSize, + this.status = MultipartStatus.pending, + required this.urls, + }); +} + +class MultipartUploadURLs { + final String objectKey; + final List partsURLs; + final String completeURL; + + MultipartUploadURLs({ + required this.objectKey, + required this.partsURLs, + required this.completeURL, + }); + + factory MultipartUploadURLs.fromMap(Map map) { + return MultipartUploadURLs( + objectKey: map["urls"]["objectKey"], + partsURLs: (map["urls"]["partURLs"] as List).cast(), + completeURL: map["urls"]["completeURL"], + ); + } +} diff --git a/mobile/lib/module/upload/model/xml.dart b/mobile/lib/module/upload/model/xml.dart new file mode 100644 index 000000000..9490fc40c --- /dev/null +++ b/mobile/lib/module/upload/model/xml.dart @@ -0,0 +1,41 @@ +// ignore_for_file: implementation_imports + +import "package:xml/xml.dart"; + +// used for classes that can be converted to xml +abstract class XmlParsableObject { + Map toMap(); + String get elementName; +} + +// for converting the response to xml +String convertJs2Xml(Map json) { + final builder = XmlBuilder(); + buildXml(builder, json); + return builder.buildDocument().toXmlString( + pretty: true, + indent: ' ', + ); +} + +// for building the xml node tree recursively +void buildXml(XmlBuilder builder, dynamic node) { + if (node is Map) { + node.forEach((key, value) { + builder.element(key, nest: () => buildXml(builder, value)); + }); + } else if (node is List) { + for (var item in node) { + buildXml(builder, item); + } + } else if (node is XmlParsableObject) { + builder.element( + node.elementName, + nest: () { + buildXml(builder, node.toMap()); + }, + ); + } else { + builder.text(node.toString()); + } +} diff --git a/mobile/lib/module/upload/service/multipart.dart b/mobile/lib/module/upload/service/multipart.dart new file mode 100644 index 000000000..ad0d19703 --- /dev/null +++ b/mobile/lib/module/upload/service/multipart.dart @@ -0,0 +1,266 @@ +import "dart:io"; + +import "package:dio/dio.dart"; +import "package:ente_feature_flag/ente_feature_flag.dart"; +import "package:flutter/foundation.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/constants.dart"; +import "package:photos/db/upload_locks_db.dart"; +import "package:photos/models/encryption_result.dart"; +import "package:photos/module/upload/model/multipart.dart"; +import "package:photos/module/upload/model/xml.dart"; +import "package:photos/services/collections_service.dart"; +import "package:photos/utils/crypto_util.dart"; + +class MultiPartUploader { + final Dio _enteDio; + final Dio _s3Dio; + final UploadLocksDB _db; + final FlagService _featureFlagService; + late final Logger _logger = Logger("MultiPartUploader"); + + MultiPartUploader( + this._enteDio, + this._s3Dio, + this._db, + this._featureFlagService, + ); + + Future getEncryptionResult( + String localId, + String fileHash, + int collectionID, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + final result = + await _db.getFileEncryptionData(localId, fileHash, collectionID); + final encryptedFileKey = CryptoUtil.base642bin(result.encryptedFileKey); + final fileNonce = CryptoUtil.base642bin(result.fileNonce); + + final encryptKeyNonce = CryptoUtil.base642bin(result.keyNonce); + + return EncryptionResult( + key: CryptoUtil.decryptSync( + encryptedFileKey, + collectionKey, + encryptKeyNonce, + ), + header: fileNonce, + ); + } + + int get multipartPartSizeForUpload { + if (_featureFlagService.internalUser) { + return multipartPartSizeInternal; + } + return multipartPartSize; + } + + Future calculatePartCount(int fileSize) async { + // Multipart upload is only enabled for internal users + // and debug builds till it's battle tested. + if (!_featureFlagService.internalUser) return 1; + + final partCount = (fileSize / multipartPartSizeForUpload).ceil(); + return partCount; + } + + Future getMultipartUploadURLs(int count) async { + try { + assert( + _featureFlagService.internalUser, + "Multipart upload should not be enabled for external users.", + ); + final response = await _enteDio.get( + "/files/multipart-upload-urls", + queryParameters: { + "count": count, + }, + ); + + return MultipartUploadURLs.fromMap(response.data); + } on Exception catch (e) { + _logger.severe('failed to get multipart url', e); + rethrow; + } + } + + Future createTableEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + Uint8List fileKey, + Uint8List fileNonce, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + + final encryptedResult = CryptoUtil.encryptSync( + fileKey, + collectionKey, + ); + + await _db.createTrackUploadsEntry( + localId, + fileHash, + collectionID, + urls, + encryptedFileName, + fileSize, + CryptoUtil.bin2base64(encryptedResult.encryptedData!), + CryptoUtil.bin2base64(fileNonce), + CryptoUtil.bin2base64(encryptedResult.nonce!), + partSize: multipartPartSizeForUpload, + ); + } + + Future putExistingMultipartFile( + File encryptedFile, + String localId, + String fileHash, + int collectionID, + ) async { + final multipartInfo = + await _db.getCachedLinks(localId, fileHash, collectionID); + await _db.updateLastAttempted(localId, fileHash, collectionID); + + Map etags = multipartInfo.partETags ?? {}; + + if (multipartInfo.status == MultipartStatus.pending) { + // upload individual parts and get their etags + etags = await _uploadParts(multipartInfo, encryptedFile); + } + + if (multipartInfo.status != MultipartStatus.completed) { + // complete the multipart upload + await _completeMultipartUpload( + multipartInfo.urls.objectKey, + etags, + multipartInfo.urls.completeURL, + ); + } + + return multipartInfo.urls.objectKey; + } + + Future putMultipartFile( + MultipartUploadURLs urls, + File encryptedFile, + ) async { + // upload individual parts and get their etags + final etags = await _uploadParts( + MultipartInfo(urls: urls), + encryptedFile, + ); + + // complete the multipart upload + await _completeMultipartUpload(urls.objectKey, etags, urls.completeURL); + + return urls.objectKey; + } + + Future> _uploadParts( + MultipartInfo partInfo, + File encryptedFile, + ) async { + final partsURLs = partInfo.urls.partsURLs; + final partUploadStatus = partInfo.partUploadStatus; + final partsLength = partsURLs.length; + final etags = partInfo.partETags ?? {}; + + int i = 0; + final partSize = partInfo.partSize ?? multipartPartSizeForUpload; + + // Go to the first part that is not uploaded + while (i < (partUploadStatus?.length ?? 0) && + (partUploadStatus?[i] ?? false)) { + i++; + } + + final int encFileLength = encryptedFile.lengthSync(); + // Start parts upload + int count = 0; + while (i < partsLength) { + count++; + final partURL = partsURLs[i]; + final isLastPart = i == partsLength - 1; + final fileSize = isLastPart ? encFileLength % partSize : partSize; + _logger.info( + "Uploading part ${i + 1} / $partsLength of size $fileSize bytes (total size $encFileLength).", + ); + if (kDebugMode && count > 3) { + throw Exception( + 'Forced exception to test multipart upload retry mechanism.', + ); + } + final response = await _s3Dio.put( + partURL, + data: encryptedFile.openRead( + i * partSize, + isLastPart ? null : (i + 1) * partSize, + ), + options: Options( + headers: { + Headers.contentLengthHeader: fileSize, + }, + ), + ); + + final eTag = response.headers.value("etag"); + + if (eTag?.isEmpty ?? true) { + throw Exception('ETAG_MISSING'); + } + + etags[i] = eTag!; + + await _db.updatePartStatus(partInfo.urls.objectKey, i, eTag); + i++; + } + + await _db.updateTrackUploadStatus( + partInfo.urls.objectKey, + MultipartStatus.uploaded, + ); + + return etags; + } + + Future _completeMultipartUpload( + String objectKey, + Map partEtags, + String completeURL, + ) async { + final body = convertJs2Xml({ + 'CompleteMultipartUpload': partEtags.entries + .map( + (e) => PartETag( + e.key + 1, + e.value, + ), + ) + .toList(), + }).replaceAll('"', '').replaceAll('"', ''); + + try { + await _s3Dio.post( + completeURL, + data: body, + options: Options( + contentType: "text/xml", + ), + ); + await _db.updateTrackUploadStatus( + objectKey, + MultipartStatus.completed, + ); + } catch (e) { + Logger("MultipartUpload").severe(e); + rethrow; + } + } +} diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart index 420b8c97f..485e1f2c9 100644 --- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart +++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart @@ -145,9 +145,12 @@ class EmbeddingStore { } _logger.info("${remoteEmbeddings.length} embeddings fetched"); + return RemoteEmbeddings( remoteEmbeddings, - remoteEmbeddings.length == limit, + // keep fetching until we get all embeddings. Avoid limit check as + // some embedding fetch might fail on server + remoteEmbeddings.isNotEmpty, ); } diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index 5982da02d..d85b4ceb5 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -193,6 +193,7 @@ class SemanticSearchService { _logger.info( "Loading ${_cachedEmbeddings.length} took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch)}ms", ); + Bus.instance.fire(EmbeddingCacheUpdatedEvent()); _logger.info("Cached embeddings: " + _cachedEmbeddings.length.toString()); } @@ -228,7 +229,9 @@ class SemanticSearchService { Future> _getFileIDsToBeIndexed() async { final uploadedFileIDs = await FilesDB.instance .getOwnedFileIDs(Configuration.instance.getUserID()!); - final embeddedFileIDs = _cachedEmbeddings.map((e) => e.fileID).toSet(); + final embeddedFileIDs = + await EmbeddingsDB.instance.getFileIDs(_currentModel); + uploadedFileIDs.removeWhere( (id) => embeddedFileIDs.contains(id), ); diff --git a/mobile/lib/services/remote_assets_service.dart b/mobile/lib/services/remote_assets_service.dart index 251ce6c15..1e2cb3b6d 100644 --- a/mobile/lib/services/remote_assets_service.dart +++ b/mobile/lib/services/remote_assets_service.dart @@ -1,5 +1,7 @@ +import "dart:async"; import "dart:io"; +import "package:flutter/foundation.dart"; import "package:logging/logging.dart"; import "package:path_provider/path_provider.dart"; import "package:photos/core/network/network.dart"; @@ -8,6 +10,10 @@ class RemoteAssetsService { static final _logger = Logger("RemoteAssetsService"); RemoteAssetsService._privateConstructor(); + final StreamController<(String, int, int)> _progressController = + StreamController<(String, int, int)>.broadcast(); + + Stream<(String, int, int)> get progressStream => _progressController.stream; static final RemoteAssetsService instance = RemoteAssetsService._privateConstructor(); @@ -57,7 +63,19 @@ class RemoteAssetsService { if (await existingFile.exists()) { await existingFile.delete(); } - await NetworkClient.instance.getDio().download(url, savePath); + + await NetworkClient.instance.getDio().download( + url, + savePath, + onReceiveProgress: (received, total) { + if (received > 0 && total > 0) { + _progressController.add((url, received, total)); + } else if (kDebugMode) { + debugPrint("$url Received: $received, Total: $total"); + } + }, + ); + _logger.info("Downloaded " + url); } } diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 8cbf94c22..b74b33fe5 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -12,6 +12,7 @@ import "package:photos/service_locator.dart"; import "package:photos/services/machine_learning/face_ml/face_ml_service.dart"; import 'package:photos/services/machine_learning/semantic_search/frameworks/ml_framework.dart'; import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart'; +import "package:photos/services/remote_assets_service.dart"; import "package:photos/theme/ente_theme.dart"; import "package:photos/ui/common/loading_widget.dart"; import "package:photos/ui/components/buttons/icon_button_widget.dart"; @@ -22,6 +23,7 @@ import "package:photos/ui/components/menu_section_title.dart"; import "package:photos/ui/components/title_bar_title_widget.dart"; import "package:photos/ui/components/title_bar_widget.dart"; import "package:photos/ui/components/toggle_switch_widget.dart"; +import "package:photos/utils/data_util.dart"; import "package:photos/utils/local_settings.dart"; class MachineLearningSettingsPage extends StatefulWidget { @@ -153,7 +155,7 @@ class _MachineLearningSettingsPageState children: [ _state == InitializationState.initialized ? const MagicSearchIndexStatsWidget() - : MagicSearchModelLoadingState(_state), + : ModelLoadingState(_state), const SizedBox( height: 12, ), @@ -227,14 +229,46 @@ class _MachineLearningSettingsPageState } } -class MagicSearchModelLoadingState extends StatelessWidget { +class ModelLoadingState extends StatefulWidget { final InitializationState state; - const MagicSearchModelLoadingState( + const ModelLoadingState( this.state, { Key? key, }) : super(key: key); + @override + State createState() => _ModelLoadingStateState(); +} + +class _ModelLoadingStateState extends State { + StreamSubscription<(String, int, int)>? _progressStream; + final Map _progressMap = {}; + @override + void initState() { + _progressStream = + RemoteAssetsService.instance.progressStream.listen((event) { + final String url = event.$1; + String title = ""; + if (url.contains("clip-image")) { + title = "Image Model"; + } else if (url.contains("clip-text")) { + title = "Text Model"; + } + if (title.isNotEmpty) { + _progressMap[title] = (event.$2, event.$3); + setState(() {}); + } + }); + super.initState(); + } + + @override + void dispose() { + super.dispose(); + _progressStream?.cancel(); + } + @override Widget build(BuildContext context) { return Column( @@ -252,12 +286,31 @@ class MagicSearchModelLoadingState extends StatelessWidget { alignCaptionedTextToLeft: true, isGestureDetectorDisabled: true, ), + // show the progress map if in debug mode + if (flagService.internalUser) + ..._progressMap.entries.map((entry) { + return MenuItemWidget( + key: ValueKey(entry.value), + captionedTextWidget: CaptionedTextWidget( + title: entry.key, + ), + trailingWidget: Text( + entry.value.$1 == entry.value.$2 + ? "Done" + : "${formatBytes(entry.value.$1)} / ${formatBytes(entry.value.$2)}", + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + ); + }).toList(), ], ); } String _getTitle(BuildContext context) { - switch (state) { + switch (widget.state) { case InitializationState.waitingForNetwork: return S.of(context).waitingForWifi; default: @@ -279,13 +332,13 @@ class MagicSearchIndexStatsWidget extends StatefulWidget { class _MagicSearchIndexStatsWidgetState extends State { IndexStatus? _status; - late StreamSubscription _eventSubscription; + late StreamSubscription _eventSubscription; @override void initState() { super.initState(); _eventSubscription = - Bus.instance.on().listen((event) { + Bus.instance.on().listen((event) { _fetchIndexStatus(); }); _fetchIndexStatus(); diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart index 2f2c8d061..aa46de55a 100644 --- a/mobile/lib/ui/viewer/file/file_app_bar.dart +++ b/mobile/lib/ui/viewer/file/file_app_bar.dart @@ -131,9 +131,13 @@ class FileAppBarState extends State { ), ); } - // only show fav option for files owned by the user if (!isFileHidden && isFileUploaded) { - _actions.add(FavoriteWidget(widget.file)); + _actions.add( + Padding( + padding: const EdgeInsets.all(8), + child: FavoriteWidget(widget.file), + ), + ); } if (!isFileUploaded) { _actions.add( diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart index f9d643490..3371b1442 100644 --- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart +++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart @@ -50,7 +50,6 @@ class _FavoriteWidgetState extends State { : LikeButton( size: 24, isLiked: isLiked, - padding: const EdgeInsets.all(2), onTap: (oldValue) async { if (widget.file.uploadedFileID == null || widget.file.ownerID != diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart index ddd1bac21..9b1b37fb4 100644 --- a/mobile/lib/utils/file_uploader.dart +++ b/mobile/lib/utils/file_uploader.dart @@ -2,7 +2,7 @@ import 'dart:async'; import 'dart:collection'; import 'dart:convert'; import 'dart:io'; -import 'dart:math'; +import 'dart:math' as math; import 'package:collection/collection.dart'; import 'package:dio/dio.dart'; @@ -27,6 +27,8 @@ import 'package:photos/models/file/file_type.dart'; import "package:photos/models/metadata/file_magic.dart"; import 'package:photos/models/upload_url.dart'; import "package:photos/models/user_details.dart"; +import "package:photos/module/upload/service/multipart.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import "package:photos/services/file_magic_service.dart"; import 'package:photos/services/local_sync_service.dart'; @@ -36,7 +38,6 @@ import 'package:photos/utils/crypto_util.dart'; import 'package:photos/utils/file_download_util.dart'; import 'package:photos/utils/file_uploader_util.dart'; import "package:photos/utils/file_util.dart"; -import "package:photos/utils/multipart_upload_util.dart"; import "package:photos/utils/network_util.dart"; import 'package:shared_preferences/shared_preferences.dart'; import 'package:tuple/tuple.dart'; @@ -51,7 +52,7 @@ class FileUploader { static const kBlockedUploadsPollFrequency = Duration(seconds: 2); static const kFileUploadTimeout = Duration(minutes: 50); static const k20MBStorageBuffer = 20 * 1024 * 1024; - static const kUploadTempPrefix = "upload_file_"; + static const _lastStaleFileCleanupTime = "lastStaleFileCleanupTime"; final _logger = Logger("FileUploader"); final _dio = NetworkClient.instance.getDio(); @@ -79,6 +80,7 @@ class FileUploader { // cases, we don't want to clear the stale upload files. See #removeStaleFiles // as it can result in clearing files which are still being force uploaded. bool _hasInitiatedForceUpload = false; + late MultiPartUploader _multiPartUploader; FileUploader._privateConstructor() { Bus.instance.on().listen((event) { @@ -114,6 +116,17 @@ class FileUploader { // ignore: unawaited_futures _pollBackgroundUploadStatus(); } + _multiPartUploader = MultiPartUploader( + _enteDio, + _dio, + UploadLocksDB.instance, + flagService, + ); + if (currentTime - (_prefs.getInt(_lastStaleFileCleanupTime) ?? 0) > + tempDirCleanUpInterval) { + await removeStaleFiles(); + await _prefs.setInt(_lastStaleFileCleanupTime, currentTime); + } Bus.instance.on().listen((event) { if (event.type == EventType.deletedFromDevice || event.type == EventType.deletedFromEverywhere) { @@ -309,13 +322,28 @@ class FileUploader { // ends with .encrypted. Fetch files in async manner final files = await Directory(dir).list().toList(); final filesToDelete = files.where((file) { - return file.path.contains(kUploadTempPrefix) && + return file.path.contains(uploadTempFilePrefix) && file.path.contains(".encrypted"); }); if (filesToDelete.isNotEmpty) { - _logger.info('cleaning up state files ${filesToDelete.length}'); + _logger.info('Deleting ${filesToDelete.length} stale upload files '); + final fileNameToLastAttempt = + await _uploadLocks.getFileNameToLastAttemptedAtMap(); for (final file in filesToDelete) { - await file.delete(); + final fileName = file.path.split('/').last; + final lastAttemptTime = fileNameToLastAttempt[fileName] != null + ? DateTime.fromMillisecondsSinceEpoch( + fileNameToLastAttempt[fileName]!, + ) + : null; + if (lastAttemptTime == null || + DateTime.now().difference(lastAttemptTime).inDays > 1) { + await file.delete(); + } else { + _logger.info( + 'Skipping file $fileName as it was attempted recently on $lastAttemptTime', + ); + } } } @@ -394,7 +422,7 @@ class FileUploader { (fileOnDisk.updationTime ?? -1) != -1 && (fileOnDisk.collectionID ?? -1) == collectionID; if (wasAlreadyUploaded) { - debugPrint("File is already uploaded ${fileOnDisk.tag}"); + _logger.info("File is already uploaded ${fileOnDisk.tag}"); return fileOnDisk; } } @@ -414,6 +442,7 @@ class FileUploader { } final String lockKey = file.localID!; + bool _isMultipartUpload = false; try { await _uploadLocks.acquireLock( @@ -427,12 +456,27 @@ class FileUploader { } final tempDirectory = Configuration.instance.getTempDirectory(); - final String uniqueID = const Uuid().v4().toString(); - final encryptedFilePath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_file.encrypted'; - final encryptedThumbnailPath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_thumb.encrypted'; MediaUploadData? mediaUploadData; + mediaUploadData = await getUploadDataFromEnteFile(file); + + final String? existingMultipartEncFileName = + mediaUploadData.hashData?.fileHash != null + ? await _uploadLocks.getEncryptedFileName( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; + bool multipartEntryExists = existingMultipartEncFileName != null; + + final String uniqueID = const Uuid().v4().toString(); + + final encryptedFilePath = multipartEntryExists + ? '$tempDirectory$existingMultipartEncFileName' + : '$tempDirectory$uploadTempFilePrefix${uniqueID}_file.encrypted'; + final encryptedThumbnailPath = + '$tempDirectory$uploadTempFilePrefix${uniqueID}_thumb.encrypted'; + var uploadCompleted = false; // This flag is used to decide whether to clear the iOS origin file cache // or not. @@ -446,13 +490,18 @@ class FileUploader { '${isUpdatedFile ? 're-upload' : 'upload'} of ${file.toString()}', ); - mediaUploadData = await getUploadDataFromEnteFile(file); - Uint8List? key; + EncryptionResult? multiPartFileEncResult = multipartEntryExists + ? await _multiPartUploader.getEncryptionResult( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; if (isUpdatedFile) { key = getFileKey(file); } else { - key = null; + key = multiPartFileEncResult?.key; // check if the file is already uploaded and can be mapped to existing // uploaded file. If map is found, it also returns the corresponding // mapped or update file entry. @@ -471,16 +520,40 @@ class FileUploader { } } - if (File(encryptedFilePath).existsSync()) { + final encryptedFileExists = File(encryptedFilePath).existsSync(); + + // If the multipart entry exists but the encrypted file doesn't, it means + // that we'll have to reupload as the nonce is lost + if (multipartEntryExists) { + final bool updateWithDiffKey = isUpdatedFile && + multiPartFileEncResult != null && + !listEquals(key, multiPartFileEncResult.key); + if (!encryptedFileExists || updateWithDiffKey) { + if (updateWithDiffKey) { + _logger.severe('multiPart update resumed with differentKey'); + } else { + _logger.warning( + 'multiPart EncryptedFile missing, discard multipart entry', + ); + } + await _uploadLocks.deleteMultipartTrack(lockKey); + multipartEntryExists = false; + multiPartFileEncResult = null; + } + } else if (encryptedFileExists) { + // otherwise just delete the file for singlepart upload await File(encryptedFilePath).delete(); } await _checkIfWithinStorageLimit(mediaUploadData.sourceFile!); final encryptedFile = File(encryptedFilePath); - final EncryptionResult fileAttributes = await CryptoUtil.encryptFile( - mediaUploadData.sourceFile!.path, - encryptedFilePath, - key: key, - ); + + final EncryptionResult fileAttributes = multiPartFileEncResult ?? + await CryptoUtil.encryptFile( + mediaUploadData.sourceFile!.path, + encryptedFilePath, + key: key, + ); + late final Uint8List? thumbnailData; if (mediaUploadData.thumbnail == null && file.fileType == FileType.video) { @@ -501,31 +574,63 @@ class FileUploader { await encryptedThumbnailFile .writeAsBytes(encryptedThumbnailData.encryptedData!); - final thumbnailUploadURL = await _getUploadURL(); - final String thumbnailObjectKey = - await _putFile(thumbnailUploadURL, encryptedThumbnailFile); - - // Calculate the number of parts for the file. Multiple part upload - // is only enabled for internal users and debug builds till it's battle tested. - final count = kDebugMode - ? await calculatePartCount( - await encryptedFile.length(), - ) - : 1; + // Calculate the number of parts for the file. + final count = await _multiPartUploader.calculatePartCount( + await encryptedFile.length(), + ); late String fileObjectKey; + late String thumbnailObjectKey; if (count <= 1) { + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); final fileUploadURL = await _getUploadURL(); fileObjectKey = await _putFile(fileUploadURL, encryptedFile); } else { - final fileUploadURLs = await getMultipartUploadURLs(count); - fileObjectKey = await putMultipartFile(fileUploadURLs, encryptedFile); + _isMultipartUpload = true; + _logger.finest( + "Init multipartUpload $multipartEntryExists, isUpdate $isUpdatedFile", + ); + if (multipartEntryExists) { + fileObjectKey = await _multiPartUploader.putExistingMultipartFile( + encryptedFile, + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ); + } else { + final fileUploadURLs = + await _multiPartUploader.getMultipartUploadURLs(count); + final encFileName = encryptedFile.path.split('/').last; + await _multiPartUploader.createTableEntry( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + fileUploadURLs, + encFileName, + await encryptedFile.length(), + fileAttributes.key!, + fileAttributes.header!, + ); + fileObjectKey = await _multiPartUploader.putMultipartFile( + fileUploadURLs, + encryptedFile, + ); + } + // in case of multipart, upload the thumbnail towards the end to avoid + // re-uploading the thumbnail in case of failure. + // In regular upload, always upload the thumbnail first to keep existing behaviour + // + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); } final metadata = await file.getMetadataForUpload(mediaUploadData); final encryptedMetadataResult = await CryptoUtil.encryptChaCha( - utf8.encode(jsonEncode(metadata)) as Uint8List, + utf8.encode(jsonEncode(metadata)), fileAttributes.key!, ); final fileDecryptionHeader = @@ -607,6 +712,8 @@ class FileUploader { } await FilesDB.instance.update(remoteFile); } + await UploadLocksDB.instance.deleteMultipartTrack(lockKey); + if (!_isBackground) { Bus.instance.fire( LocalPhotosUpdatedEvent( @@ -648,6 +755,7 @@ class FileUploader { encryptedFilePath, encryptedThumbnailPath, lockKey: lockKey, + isMultiPartUpload: _isMultipartUpload, ); } } @@ -792,6 +900,7 @@ class FileUploader { String encryptedFilePath, String encryptedThumbnailPath, { required String lockKey, + bool isMultiPartUpload = false, }) async { if (mediaUploadData != null && mediaUploadData.sourceFile != null) { // delete the file from app's internal cache if it was copied to app @@ -805,7 +914,14 @@ class FileUploader { } } if (File(encryptedFilePath).existsSync()) { - await File(encryptedFilePath).delete(); + if (isMultiPartUpload && !uploadCompleted) { + _logger.fine( + "skip delete for multipart encrypted file $encryptedFilePath", + ); + } else { + _logger.fine("deleting encrypted file $encryptedFilePath"); + await File(encryptedFilePath).delete(); + } } if (File(encryptedThumbnailPath).existsSync()) { await File(encryptedThumbnailPath).delete(); @@ -1028,7 +1144,7 @@ class FileUploader { if (_uploadURLs.isEmpty) { // the queue is empty, fetch at least for one file to handle force uploads // that are not in the queue. This is to also avoid - await fetchUploadURLs(max(_queue.length, 1)); + await fetchUploadURLs(math.max(_queue.length, 1)); } try { return _uploadURLs.removeFirst(); @@ -1050,7 +1166,7 @@ class FileUploader { final response = await _enteDio.get( "/files/upload-urls", queryParameters: { - "count": min(42, fileCount * 2), // m4gic number + "count": math.min(42, fileCount * 2), // m4gic number }, ); final urls = (response.data["urls"] as List) diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart index 102c08d8d..6b9ccafb9 100644 --- a/mobile/lib/utils/multipart_upload_util.dart +++ b/mobile/lib/utils/multipart_upload_util.dart @@ -6,8 +6,8 @@ import "package:dio/dio.dart"; import "package:logging/logging.dart"; import "package:photos/core/constants.dart"; import "package:photos/core/network/network.dart"; +import 'package:photos/module/upload/model/xml.dart'; import "package:photos/service_locator.dart"; -import "package:photos/utils/xml_parser_util.dart"; final _enteDio = NetworkClient.instance.enteDio; final _dio = NetworkClient.instance.getDio(); diff --git a/mobile/lib/utils/xml_parser_util.dart b/mobile/lib/utils/xml_parser_util.dart index 9490fc40c..8b1378917 100644 --- a/mobile/lib/utils/xml_parser_util.dart +++ b/mobile/lib/utils/xml_parser_util.dart @@ -1,41 +1 @@ -// ignore_for_file: implementation_imports -import "package:xml/xml.dart"; - -// used for classes that can be converted to xml -abstract class XmlParsableObject { - Map toMap(); - String get elementName; -} - -// for converting the response to xml -String convertJs2Xml(Map json) { - final builder = XmlBuilder(); - buildXml(builder, json); - return builder.buildDocument().toXmlString( - pretty: true, - indent: ' ', - ); -} - -// for building the xml node tree recursively -void buildXml(XmlBuilder builder, dynamic node) { - if (node is Map) { - node.forEach((key, value) { - builder.element(key, nest: () => buildXml(builder, value)); - }); - } else if (node is List) { - for (var item in node) { - buildXml(builder, item); - } - } else if (node is XmlParsableObject) { - builder.element( - node.elementName, - nest: () { - buildXml(builder, node.toMap()); - }, - ); - } else { - builder.text(node.toString()); - } -} diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index fc4a89264..ff29cf622 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.94+614 +version: 0.8.96+616 publish_to: none environment: diff --git a/server/cmd/museum/main.go b/server/cmd/museum/main.go index 84c34189d..8ccb43cc0 100644 --- a/server/cmd/museum/main.go +++ b/server/cmd/museum/main.go @@ -678,7 +678,7 @@ func main() { pushHandler := &api.PushHandler{PushController: pushController} privateAPI.POST("/push/token", pushHandler.AddToken) - embeddingController := &embeddingCtrl.Controller{Repo: embeddingRepo, AccessCtrl: accessCtrl, ObjectCleanupController: objectCleanupController, S3Config: s3Config, FileRepo: fileRepo, CollectionRepo: collectionRepo, QueueRepo: queueRepo, TaskLockingRepo: taskLockingRepo, HostName: hostName} + embeddingController := embeddingCtrl.New(embeddingRepo, accessCtrl, objectCleanupController, s3Config, queueRepo, taskLockingRepo, fileRepo, collectionRepo, hostName) embeddingHandler := &api.EmbeddingHandler{Controller: embeddingController} privateAPI.PUT("/embeddings", embeddingHandler.InsertOrUpdate) diff --git a/server/configurations/local.yaml b/server/configurations/local.yaml index 7785f5601..87502c271 100644 --- a/server/configurations/local.yaml +++ b/server/configurations/local.yaml @@ -125,6 +125,16 @@ s3: endpoint: region: bucket: + wasabi-eu-central-2-derived: + key: + secret: + endpoint: + region: + bucket: + # Derived storage bucket is used for storing derived data like embeddings, preview etc. + # By default, it is the same as the hot storage bucket. + # derived-storage: wasabi-eu-central-2-derived + # If true, enable some workarounds to allow us to use a local minio instance # for object storage. # @@ -180,6 +190,9 @@ smtp: port: username: password: + # The email address from which to send the email. Set this to an email + # address whose credentials you're providing. + email: # Zoho Zeptomail config (optional) # diff --git a/server/ente/embedding.go b/server/ente/embedding.go index 2990a779a..fabde44a5 100644 --- a/server/ente/embedding.go +++ b/server/ente/embedding.go @@ -7,6 +7,7 @@ type Embedding struct { DecryptionHeader string `json:"decryptionHeader"` UpdatedAt int64 `json:"updatedAt"` Version *int `json:"version,omitempty"` + Size *int64 } type InsertOrUpdateEmbeddingRequest struct { @@ -30,9 +31,10 @@ type GetFilesEmbeddingRequest struct { } type GetFilesEmbeddingResponse struct { - Embeddings []Embedding `json:"embeddings"` - NoDataFileIDs []int64 `json:"noDataFileIDs"` - ErrFileIDs []int64 `json:"errFileIDs"` + Embeddings []Embedding `json:"embeddings"` + PendingIndexFileIDs []int64 `json:"pendingIndexFileIDs"` + ErrFileIDs []int64 `json:"errFileIDs"` + NoEmbeddingFileIDs []int64 `json:"noEmbeddingFileIDs"` } type Model string diff --git a/server/ente/file.go b/server/ente/file.go index 4a69473e3..a0e67c71c 100644 --- a/server/ente/file.go +++ b/server/ente/file.go @@ -134,6 +134,7 @@ type UpdateMagicMetadata struct { // UpdateMultipleMagicMetadataRequest request payload for updating magic metadata for list of files type UpdateMultipleMagicMetadataRequest struct { MetadataList []UpdateMagicMetadata `json:"metadataList" binding:"required"` + SkipVersion *bool `json:"skipVersion"` } // UploadURL represents the upload url for a specific object diff --git a/server/migrations/86_add_dc_embedding.down.sql b/server/migrations/86_add_dc_embedding.down.sql new file mode 100644 index 000000000..b705b29b6 --- /dev/null +++ b/server/migrations/86_add_dc_embedding.down.sql @@ -0,0 +1,18 @@ +-- Add types for the new dcs that are introduced for the derived data +ALTER TABLE embeddings DROP COLUMN IF EXISTS datacenters; + +DO +$$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'update_embeddings_updated_at') THEN + CREATE TRIGGER update_embeddings_updated_at + BEFORE UPDATE + ON embeddings + FOR EACH ROW + EXECUTE PROCEDURE + trigger_updated_at_microseconds_column(); + ELSE + RAISE NOTICE 'Trigger update_embeddings_updated_at already exists.'; + END IF; + END +$$; \ No newline at end of file diff --git a/server/migrations/86_add_dc_embedding.up.sql b/server/migrations/86_add_dc_embedding.up.sql new file mode 100644 index 000000000..9d8e28ba7 --- /dev/null +++ b/server/migrations/86_add_dc_embedding.up.sql @@ -0,0 +1,4 @@ +-- Add types for the new dcs that are introduced for the derived data +ALTER TYPE s3region ADD VALUE 'wasabi-eu-central-2-derived'; +DROP TRIGGER IF EXISTS update_embeddings_updated_at ON embeddings; +ALTER TABLE embeddings ADD COLUMN IF NOT EXISTS datacenters s3region[] default '{b2-eu-cen}'; diff --git a/server/pkg/api/file.go b/server/pkg/api/file.go index a253c71c2..990336e37 100644 --- a/server/pkg/api/file.go +++ b/server/pkg/api/file.go @@ -110,7 +110,7 @@ func (h *FileHandler) GetUploadURLs(c *gin.Context) { userID := auth.GetUserID(c.Request.Header) count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/api/public_collection.go b/server/pkg/api/public_collection.go index 7a38f4380..9290d6456 100644 --- a/server/pkg/api/public_collection.go +++ b/server/pkg/api/public_collection.go @@ -57,7 +57,7 @@ func (h *PublicCollectionHandler) GetUploadUrls(c *gin.Context) { } userID := collection.Owner.ID count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/controller/embedding/controller.go b/server/pkg/controller/embedding/controller.go index 342411ea3..6f3de3ca7 100644 --- a/server/pkg/controller/embedding/controller.go +++ b/server/pkg/controller/embedding/controller.go @@ -2,12 +2,16 @@ package embedding import ( "bytes" + "context" "encoding/json" "errors" "fmt" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/ente-io/museum/pkg/utils/array" "strconv" + "strings" "sync" + gTime "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3" @@ -20,23 +24,62 @@ import ( "github.com/ente-io/museum/pkg/utils/auth" "github.com/ente-io/museum/pkg/utils/network" "github.com/ente-io/museum/pkg/utils/s3config" - "github.com/ente-io/museum/pkg/utils/time" "github.com/ente-io/stacktrace" "github.com/gin-gonic/gin" log "github.com/sirupsen/logrus" ) +const ( + // maxEmbeddingDataSize is the min size of an embedding object in bytes + minEmbeddingDataSize = 2048 + embeddingFetchTimeout = 10 * gTime.Second +) + +// _fetchConfig is the configuration for the fetching objects from S3 +type _fetchConfig struct { + RetryCount int + InitialTimeout gTime.Duration + MaxTimeout gTime.Duration +} + +var _defaultFetchConfig = _fetchConfig{RetryCount: 3, InitialTimeout: 10 * gTime.Second, MaxTimeout: 30 * gTime.Second} +var _b2FetchConfig = _fetchConfig{RetryCount: 3, InitialTimeout: 15 * gTime.Second, MaxTimeout: 30 * gTime.Second} + type Controller struct { - Repo *embedding.Repository - AccessCtrl access.Controller - ObjectCleanupController *controller.ObjectCleanupController - S3Config *s3config.S3Config - QueueRepo *repo.QueueRepository - TaskLockingRepo *repo.TaskLockRepository - FileRepo *repo.FileRepository - CollectionRepo *repo.CollectionRepository - HostName string - cleanupCronRunning bool + Repo *embedding.Repository + AccessCtrl access.Controller + ObjectCleanupController *controller.ObjectCleanupController + S3Config *s3config.S3Config + QueueRepo *repo.QueueRepository + TaskLockingRepo *repo.TaskLockRepository + FileRepo *repo.FileRepository + CollectionRepo *repo.CollectionRepository + HostName string + cleanupCronRunning bool + derivedStorageDataCenter string + downloadManagerCache map[string]*s3manager.Downloader +} + +func New(repo *embedding.Repository, accessCtrl access.Controller, objectCleanupController *controller.ObjectCleanupController, s3Config *s3config.S3Config, queueRepo *repo.QueueRepository, taskLockingRepo *repo.TaskLockRepository, fileRepo *repo.FileRepository, collectionRepo *repo.CollectionRepository, hostName string) *Controller { + embeddingDcs := []string{s3Config.GetHotBackblazeDC(), s3Config.GetHotWasabiDC(), s3Config.GetWasabiDerivedDC(), s3Config.GetDerivedStorageDataCenter()} + cache := make(map[string]*s3manager.Downloader, len(embeddingDcs)) + for i := range embeddingDcs { + s3Client := s3Config.GetS3Client(embeddingDcs[i]) + cache[embeddingDcs[i]] = s3manager.NewDownloaderWithClient(&s3Client) + } + return &Controller{ + Repo: repo, + AccessCtrl: accessCtrl, + ObjectCleanupController: objectCleanupController, + S3Config: s3Config, + QueueRepo: queueRepo, + TaskLockingRepo: taskLockingRepo, + FileRepo: fileRepo, + CollectionRepo: collectionRepo, + HostName: hostName, + derivedStorageDataCenter: s3Config.GetDerivedStorageDataCenter(), + downloadManagerCache: cache, + } } func (c *Controller) InsertOrUpdate(ctx *gin.Context, req ente.InsertOrUpdateEmbeddingRequest) (*ente.Embedding, error) { @@ -69,12 +112,12 @@ func (c *Controller) InsertOrUpdate(ctx *gin.Context, req ente.InsertOrUpdateEmb DecryptionHeader: req.DecryptionHeader, Client: network.GetPrettyUA(ctx.GetHeader("User-Agent")) + "/" + ctx.GetHeader("X-Client-Version"), } - size, uploadErr := c.uploadObject(obj, c.getObjectKey(userID, req.FileID, req.Model)) + size, uploadErr := c.uploadObject(obj, c.getObjectKey(userID, req.FileID, req.Model), c.derivedStorageDataCenter) if uploadErr != nil { log.Error(uploadErr) return nil, stacktrace.Propagate(uploadErr, "") } - embedding, err := c.Repo.InsertOrUpdate(ctx, userID, req, size, version) + embedding, err := c.Repo.InsertOrUpdate(ctx, userID, req, size, version, c.derivedStorageDataCenter) embedding.Version = &version if err != nil { return nil, stacktrace.Propagate(err, "") @@ -105,7 +148,7 @@ func (c *Controller) GetDiff(ctx *gin.Context, req ente.GetEmbeddingDiffRequest) // Fetch missing embeddings in parallel if len(objectKeys) > 0 { - embeddingObjects, err := c.getEmbeddingObjectsParallel(objectKeys) + embeddingObjects, err := c.getEmbeddingObjectsParallel(objectKeys, c.derivedStorageDataCenter) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -135,15 +178,23 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd return nil, stacktrace.Propagate(err, "") } + embeddingsWithData := make([]ente.Embedding, 0) + noEmbeddingFileIds := make([]int64, 0) dbFileIds := make([]int64, 0) - for _, embedding := range userFileEmbeddings { - dbFileIds = append(dbFileIds, embedding.FileID) + // fileIDs that were indexed, but they don't contain any embedding information + for i := range userFileEmbeddings { + dbFileIds = append(dbFileIds, userFileEmbeddings[i].FileID) + if userFileEmbeddings[i].Size != nil && *userFileEmbeddings[i].Size < minEmbeddingDataSize { + noEmbeddingFileIds = append(noEmbeddingFileIds, userFileEmbeddings[i].FileID) + } else { + embeddingsWithData = append(embeddingsWithData, userFileEmbeddings[i]) + } } - missingFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) + pendingIndexFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) errFileIds := make([]int64, 0) // Fetch missing userFileEmbeddings in parallel - embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, userFileEmbeddings) + embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, embeddingsWithData, c.derivedStorageDataCenter) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -166,88 +217,13 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd } return &ente.GetFilesEmbeddingResponse{ - Embeddings: fetchedEmbeddings, - NoDataFileIDs: missingFileIds, - ErrFileIDs: errFileIds, + Embeddings: fetchedEmbeddings, + PendingIndexFileIDs: pendingIndexFileIds, + ErrFileIDs: errFileIds, + NoEmbeddingFileIDs: noEmbeddingFileIds, }, nil } -func (c *Controller) DeleteAll(ctx *gin.Context) error { - userID := auth.GetUserID(ctx.Request.Header) - - err := c.Repo.DeleteAll(ctx, userID) - if err != nil { - return stacktrace.Propagate(err, "") - } - return nil -} - -// CleanupDeletedEmbeddings clears all embeddings for deleted files from the object store -func (c *Controller) CleanupDeletedEmbeddings() { - log.Info("Cleaning up deleted embeddings") - if c.cleanupCronRunning { - log.Info("Skipping CleanupDeletedEmbeddings cron run as another instance is still running") - return - } - c.cleanupCronRunning = true - defer func() { - c.cleanupCronRunning = false - }() - items, err := c.QueueRepo.GetItemsReadyForDeletion(repo.DeleteEmbeddingsQueue, 200) - if err != nil { - log.WithError(err).Error("Failed to fetch items from queue") - return - } - for _, i := range items { - c.deleteEmbedding(i) - } -} - -func (c *Controller) deleteEmbedding(qItem repo.QueueItem) { - lockName := fmt.Sprintf("Embedding:%s", qItem.Item) - lockStatus, err := c.TaskLockingRepo.AcquireLock(lockName, time.MicrosecondsAfterHours(1), c.HostName) - ctxLogger := log.WithField("item", qItem.Item).WithField("queue_id", qItem.Id) - if err != nil || !lockStatus { - ctxLogger.Warn("unable to acquire lock") - return - } - defer func() { - err = c.TaskLockingRepo.ReleaseLock(lockName) - if err != nil { - ctxLogger.Errorf("Error while releasing lock %s", err) - } - }() - ctxLogger.Info("Deleting all embeddings") - - fileID, _ := strconv.ParseInt(qItem.Item, 10, 64) - ownerID, err := c.FileRepo.GetOwnerID(fileID) - if err != nil { - ctxLogger.WithError(err).Error("Failed to fetch ownerID") - return - } - prefix := c.getEmbeddingObjectPrefix(ownerID, fileID) - - err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, c.S3Config.GetHotDataCenter()) - if err != nil { - ctxLogger.WithError(err).Error("Failed to delete all objects") - return - } - - err = c.Repo.Delete(fileID) - if err != nil { - ctxLogger.WithError(err).Error("Failed to remove from db") - return - } - - err = c.QueueRepo.DeleteItem(repo.DeleteEmbeddingsQueue, qItem.Item) - if err != nil { - ctxLogger.WithError(err).Error("Failed to remove item from the queue") - return - } - - ctxLogger.Info("Successfully deleted all embeddings") -} - func (c *Controller) getObjectKey(userID int64, fileID int64, model string) string { return c.getEmbeddingObjectPrefix(userID, fileID) + model + ".json" } @@ -256,12 +232,23 @@ func (c *Controller) getEmbeddingObjectPrefix(userID int64, fileID int64) string return strconv.FormatInt(userID, 10) + "/ml-data/" + strconv.FormatInt(fileID, 10) + "/" } +// Get userId, model and fileID from the object key +func (c *Controller) getEmbeddingObjectDetails(objectKey string) (userID int64, model string, fileID int64) { + split := strings.Split(objectKey, "/") + userID, _ = strconv.ParseInt(split[0], 10, 64) + fileID, _ = strconv.ParseInt(split[2], 10, 64) + model = strings.Split(split[3], ".")[0] + return userID, model, fileID +} + // uploadObject uploads the embedding object to the object store and returns the object size -func (c *Controller) uploadObject(obj ente.EmbeddingObject, key string) (int, error) { +func (c *Controller) uploadObject(obj ente.EmbeddingObject, key string, dc string) (int, error) { embeddingObj, _ := json.Marshal(obj) - uploader := s3manager.NewUploaderWithClient(c.S3Config.GetHotS3Client()) + s3Client := c.S3Config.GetS3Client(dc) + s3Bucket := c.S3Config.GetBucket(dc) + uploader := s3manager.NewUploaderWithClient(&s3Client) up := s3manager.UploadInput{ - Bucket: c.S3Config.GetHotBucket(), + Bucket: s3Bucket, Key: &key, Body: bytes.NewReader(embeddingObj), } @@ -279,12 +266,10 @@ var globalDiffFetchSemaphore = make(chan struct{}, 300) var globalFileFetchSemaphore = make(chan struct{}, 400) -func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.EmbeddingObject, error) { +func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string, dc string) ([]ente.EmbeddingObject, error) { var wg sync.WaitGroup var errs []error embeddingObjects := make([]ente.EmbeddingObject, len(objectKeys)) - downloader := s3manager.NewDownloaderWithClient(c.S3Config.GetHotS3Client()) - for i, objectKey := range objectKeys { wg.Add(1) globalDiffFetchSemaphore <- struct{}{} // Acquire from global semaphore @@ -292,7 +277,7 @@ func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.Em defer wg.Done() defer func() { <-globalDiffFetchSemaphore }() // Release back to global semaphore - obj, err := c.getEmbeddingObject(objectKey, downloader) + obj, err := c.getEmbeddingObject(context.Background(), objectKey, dc) if err != nil { errs = append(errs, err) log.Error("error fetching embedding object: "+objectKey, err) @@ -317,10 +302,9 @@ type embeddingObjectResult struct { err error } -func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows []ente.Embedding) ([]embeddingObjectResult, error) { +func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows []ente.Embedding, dc string) ([]embeddingObjectResult, error) { var wg sync.WaitGroup embeddingObjects := make([]embeddingObjectResult, len(dbEmbeddingRows)) - downloader := s3manager.NewDownloaderWithClient(c.S3Config.GetHotS3Client()) for i, dbEmbeddingRow := range dbEmbeddingRows { wg.Add(1) @@ -329,7 +313,7 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows defer wg.Done() defer func() { <-globalFileFetchSemaphore }() // Release back to global semaphore objectKey := c.getObjectKey(userID, dbEmbeddingRow.FileID, dbEmbeddingRow.Model) - obj, err := c.getEmbeddingObject(objectKey, downloader) + obj, err := c.getEmbeddingObject(context.Background(), objectKey, dc) if err != nil { log.Error("error fetching embedding object: "+objectKey, err) embeddingObjects[i] = embeddingObjectResult{ @@ -349,32 +333,125 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows return embeddingObjects, nil } -func (c *Controller) getEmbeddingObject(objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) { - return c.getEmbeddingObjectWithRetries(objectKey, downloader, 3) +func (c *Controller) getEmbeddingObject(ctx context.Context, objectKey string, dc string) (ente.EmbeddingObject, error) { + opt := _defaultFetchConfig + if dc == c.S3Config.GetHotBackblazeDC() { + opt = _b2FetchConfig + } + ctxLogger := log.WithField("objectKey", objectKey).WithField("dc", dc) + totalAttempts := opt.RetryCount + 1 + timeout := opt.InitialTimeout + for i := 0; i < totalAttempts; i++ { + if i > 0 { + timeout = timeout * 2 + if timeout > opt.MaxTimeout { + timeout = opt.MaxTimeout + } + } + fetchCtx, cancel := context.WithTimeout(ctx, timeout) + select { + case <-ctx.Done(): + cancel() + return ente.EmbeddingObject{}, stacktrace.Propagate(ctx.Err(), "") + default: + obj, err := c.downloadObject(fetchCtx, objectKey, dc) + cancel() // Ensure cancel is called to release resources + if err == nil { + if i > 0 { + ctxLogger.Infof("Fetched object after %d attempts", i) + } + return obj, nil + } + // Check if the error is due to context timeout or cancellation + if err == nil && fetchCtx.Err() != nil { + ctxLogger.Error("Fetch timed out or cancelled: ", fetchCtx.Err()) + } else { + // check if the error is due to object not found + if s3Err, ok := err.(awserr.RequestFailure); ok { + if s3Err.Code() == s3.ErrCodeNoSuchKey { + var srcDc, destDc string + destDc = c.S3Config.GetDerivedStorageDataCenter() + // todo:(neeraj) Refactor this later to get available the DC from the DB instead of + // querying the DB. This will help in case of multiple DCs and avoid querying the DB + // for each object. + // For initial migration, as we know that original DC was b2, and if the embedding is not found + // in the new derived DC, we can try to fetch it from the B2 DC. + if c.derivedStorageDataCenter != c.S3Config.GetHotBackblazeDC() { + // embeddings ideally should ideally be in the default hot bucket b2 + srcDc = c.S3Config.GetHotBackblazeDC() + } else { + _, modelName, fileID := c.getEmbeddingObjectDetails(objectKey) + activeDcs, err := c.Repo.GetOtherDCsForFileAndModel(context.Background(), fileID, modelName, c.derivedStorageDataCenter) + if err != nil { + return ente.EmbeddingObject{}, stacktrace.Propagate(err, "failed to get other dc") + } + if len(activeDcs) > 0 { + srcDc = activeDcs[0] + } else { + ctxLogger.Error("Object not found in any dc ", s3Err) + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("object not found"), "") + } + } + copyEmbeddingObject, err := c.copyEmbeddingObject(ctx, objectKey, srcDc, destDc) + if err == nil { + ctxLogger.Infof("Got object from dc %s", srcDc) + return *copyEmbeddingObject, nil + } else { + ctxLogger.WithError(err).Errorf("Failed to get object from fallback dc %s", srcDc) + } + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("object not found"), "") + } + } + ctxLogger.Error("Failed to fetch object: ", err) + } + } + } + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("failed to fetch object"), "") } -func (c *Controller) getEmbeddingObjectWithRetries(objectKey string, downloader *s3manager.Downloader, retryCount int) (ente.EmbeddingObject, error) { +func (c *Controller) downloadObject(ctx context.Context, objectKey string, dc string) (ente.EmbeddingObject, error) { var obj ente.EmbeddingObject buff := &aws.WriteAtBuffer{} - _, err := downloader.Download(buff, &s3.GetObjectInput{ - Bucket: c.S3Config.GetHotBucket(), + bucket := c.S3Config.GetBucket(dc) + downloader := c.downloadManagerCache[dc] + _, err := downloader.DownloadWithContext(ctx, buff, &s3.GetObjectInput{ + Bucket: bucket, Key: &objectKey, }) if err != nil { - log.Error(err) - if retryCount > 0 { - return c.getEmbeddingObjectWithRetries(objectKey, downloader, retryCount-1) - } - return obj, stacktrace.Propagate(err, "") + return obj, err } err = json.Unmarshal(buff.Bytes(), &obj) if err != nil { - log.Error(err) - return obj, stacktrace.Propagate(err, "") + return obj, stacktrace.Propagate(err, "unmarshal failed") } return obj, nil } +// download the embedding object from hot bucket and upload to embeddings bucket +func (c *Controller) copyEmbeddingObject(ctx context.Context, objectKey string, srcDC, destDC string) (*ente.EmbeddingObject, error) { + if srcDC == destDC { + return nil, stacktrace.Propagate(errors.New("src and dest dc can not be same"), "") + } + obj, err := c.downloadObject(ctx, objectKey, srcDC) + if err != nil { + return nil, stacktrace.Propagate(err, fmt.Sprintf("failed to download object from %s", srcDC)) + } + go func() { + userID, modelName, fileID := c.getEmbeddingObjectDetails(objectKey) + size, uploadErr := c.uploadObject(obj, objectKey, c.derivedStorageDataCenter) + if uploadErr != nil { + log.WithField("object", objectKey).Error("Failed to copy to embeddings bucket: ", uploadErr) + } + updateDcErr := c.Repo.AddNewDC(context.Background(), fileID, ente.Model(modelName), userID, size, destDC) + if updateDcErr != nil { + log.WithField("object", objectKey).Error("Failed to update dc in db: ", updateDcErr) + return + } + }() + return &obj, nil +} + func (c *Controller) _validateGetFileEmbeddingsRequest(ctx *gin.Context, userID int64, req ente.GetFilesEmbeddingRequest) error { if req.Model == "" { return ente.NewBadRequestWithMessage("model is required") diff --git a/server/pkg/controller/embedding/delete.go b/server/pkg/controller/embedding/delete.go new file mode 100644 index 000000000..dd2027e42 --- /dev/null +++ b/server/pkg/controller/embedding/delete.go @@ -0,0 +1,126 @@ +package embedding + +import ( + "context" + "fmt" + "github.com/ente-io/museum/pkg/repo" + "github.com/ente-io/museum/pkg/utils/auth" + "github.com/ente-io/museum/pkg/utils/time" + "github.com/ente-io/stacktrace" + "github.com/gin-gonic/gin" + log "github.com/sirupsen/logrus" + "strconv" +) + +func (c *Controller) DeleteAll(ctx *gin.Context) error { + userID := auth.GetUserID(ctx.Request.Header) + + err := c.Repo.DeleteAll(ctx, userID) + if err != nil { + return stacktrace.Propagate(err, "") + } + return nil +} + +// CleanupDeletedEmbeddings clears all embeddings for deleted files from the object store +func (c *Controller) CleanupDeletedEmbeddings() { + log.Info("Cleaning up deleted embeddings") + if c.cleanupCronRunning { + log.Info("Skipping CleanupDeletedEmbeddings cron run as another instance is still running") + return + } + c.cleanupCronRunning = true + defer func() { + c.cleanupCronRunning = false + }() + items, err := c.QueueRepo.GetItemsReadyForDeletion(repo.DeleteEmbeddingsQueue, 200) + if err != nil { + log.WithError(err).Error("Failed to fetch items from queue") + return + } + for _, i := range items { + c.deleteEmbedding(i) + } +} + +func (c *Controller) deleteEmbedding(qItem repo.QueueItem) { + lockName := fmt.Sprintf("Embedding:%s", qItem.Item) + lockStatus, err := c.TaskLockingRepo.AcquireLock(lockName, time.MicrosecondsAfterHours(1), c.HostName) + ctxLogger := log.WithField("item", qItem.Item).WithField("queue_id", qItem.Id) + if err != nil || !lockStatus { + ctxLogger.Warn("unable to acquire lock") + return + } + defer func() { + err = c.TaskLockingRepo.ReleaseLock(lockName) + if err != nil { + ctxLogger.Errorf("Error while releasing lock %s", err) + } + }() + ctxLogger.Info("Deleting all embeddings") + + fileID, _ := strconv.ParseInt(qItem.Item, 10, 64) + ownerID, err := c.FileRepo.GetOwnerID(fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to fetch ownerID") + return + } + prefix := c.getEmbeddingObjectPrefix(ownerID, fileID) + datacenters, err := c.Repo.GetDatacenters(context.Background(), fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to fetch datacenters") + return + } + // Ensure that the object are deleted from active derived storage dc. Ideally, this section should never be executed + // unless there's a bug in storing the DC or the service restarts before removing the rows from the table + // todo:(neeraj): remove this section after a few weeks of deployment + if len(datacenters) == 0 { + ctxLogger.Warn("No datacenters found for file, ensuring deletion from derived storage and hot DC") + err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, c.S3Config.GetDerivedStorageDataCenter()) + if err != nil { + ctxLogger.WithError(err).Error("Failed to delete all objects") + return + } + // if Derived DC is different from hot DC, delete from hot DC as well + if c.derivedStorageDataCenter != c.S3Config.GetHotDataCenter() { + err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, c.S3Config.GetHotDataCenter()) + if err != nil { + ctxLogger.WithError(err).Error("Failed to delete all objects from hot DC") + return + } + } + } else { + ctxLogger.Infof("Deleting from all datacenters %v", datacenters) + } + + for i := range datacenters { + err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, datacenters[i]) + if err != nil { + ctxLogger.WithError(err).Errorf("Failed to delete all objects from %s", datacenters[i]) + return + } else { + removeErr := c.Repo.RemoveDatacenter(context.Background(), fileID, datacenters[i]) + if removeErr != nil { + ctxLogger.WithError(removeErr).Error("Failed to remove datacenter from db") + return + } + } + } + + noDcs, noDcErr := c.Repo.GetDatacenters(context.Background(), fileID) + if len(noDcs) > 0 || noDcErr != nil { + ctxLogger.Errorf("Failed to delete from all datacenters %s", noDcs) + return + } + err = c.Repo.Delete(fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to remove from db") + return + } + err = c.QueueRepo.DeleteItem(repo.DeleteEmbeddingsQueue, qItem.Item) + if err != nil { + ctxLogger.WithError(err).Error("Failed to remove item from the queue") + return + } + ctxLogger.Info("Successfully deleted all embeddings") +} diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go index e91d299f1..b3fec115d 100644 --- a/server/pkg/controller/file.go +++ b/server/pkg/controller/file.go @@ -258,7 +258,7 @@ func (c *FileController) Update(ctx context.Context, userID int64, file ente.Fil } // GetUploadURLs returns a bunch of presigned URLs for uploading files -func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App) ([]ente.UploadURL, error) { +func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App, ignoreLimit bool) ([]ente.UploadURL, error) { err := c.UsageCtrl.CanUploadFile(ctx, userID, nil, app) if err != nil { return []ente.UploadURL{}, stacktrace.Propagate(err, "") @@ -268,7 +268,7 @@ func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count bucket := c.S3Config.GetHotBucket() urls := make([]ente.UploadURL, 0) objectKeys := make([]string, 0) - if count > MaxUploadURLsLimit { + if count > MaxUploadURLsLimit && !ignoreLimit { count = MaxUploadURLsLimit } for i := 0; i < count; i++ { @@ -502,7 +502,7 @@ func (c *FileController) UpdateMagicMetadata(ctx *gin.Context, req ente.UpdateMu if err != nil { return stacktrace.Propagate(err, "") } - err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata) + err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata, req.SkipVersion) if err != nil { return stacktrace.Propagate(err, "failed to update magic attributes") } diff --git a/server/pkg/controller/file_copy/file_copy.go b/server/pkg/controller/file_copy/file_copy.go index afab10efe..4f9267e2e 100644 --- a/server/pkg/controller/file_copy/file_copy.go +++ b/server/pkg/controller/file_copy/file_copy.go @@ -92,7 +92,7 @@ func (fc *FileCopyController) CopyFiles(c *gin.Context, req ente.CopyFileSyncReq // request the uploadUrls using existing method. This is to ensure that orphan objects are automatically cleaned up // todo:(neeraj) optimize this method by removing the need for getting a signed url for each object - uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app) + uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app, true) if err != nil { return nil, err } diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go index 2f4446c9d..823b17b2e 100644 --- a/server/pkg/repo/cast/repo.go +++ b/server/pkg/repo/cast/repo.go @@ -8,6 +8,7 @@ import ( "github.com/ente-io/stacktrace" "github.com/google/uuid" log "github.com/sirupsen/logrus" + "strings" ) type Repository struct { @@ -19,6 +20,7 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str if err != nil { return "", err } + codeValue = strings.ToUpper(codeValue) _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip) if err != nil { return "", err @@ -28,11 +30,13 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str // InsertCastData insert collection_id, cast_user, token and encrypted_payload for given code if collection_id is not null func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code string, collectionID int64, castToken string, encryptedPayload string) error { + code = strings.ToUpper(code) _, err := r.DB.ExecContext(ctx, "UPDATE casting SET collection_id = $1, cast_user = $2, token = $3, encrypted_payload = $4 WHERE code = $5 and is_deleted=false", collectionID, castUserID, castToken, encryptedPayload, code) return err } func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) { + code = strings.ToUpper(code) var pubKey, ip string row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&pubKey, &ip) @@ -46,6 +50,7 @@ func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, s } func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) { + code = strings.ToUpper(code) var payload sql.NullString row := r.DB.QueryRowContext(ctx, "SELECT encrypted_payload FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&payload) diff --git a/server/pkg/repo/embedding/repository.go b/server/pkg/repo/embedding/repository.go index f21e3b4f1..5cfbd35c5 100644 --- a/server/pkg/repo/embedding/repository.go +++ b/server/pkg/repo/embedding/repository.go @@ -3,11 +3,11 @@ package embedding import ( "context" "database/sql" + "errors" "fmt" - "github.com/lib/pq" - "github.com/ente-io/museum/ente" "github.com/ente-io/stacktrace" + "github.com/lib/pq" "github.com/sirupsen/logrus" ) @@ -18,15 +18,26 @@ type Repository struct { } // Create inserts a new embedding - -func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry ente.InsertOrUpdateEmbeddingRequest, size int, version int) (ente.Embedding, error) { +func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry ente.InsertOrUpdateEmbeddingRequest, size int, version int, dc string) (ente.Embedding, error) { var updatedAt int64 - err := r.DB.QueryRowContext(ctx, `INSERT INTO embeddings - (file_id, owner_id, model, size, version) - VALUES ($1, $2, $3, $4, $5) - ON CONFLICT ON CONSTRAINT unique_embeddings_file_id_model - DO UPDATE SET updated_at = now_utc_micro_seconds(), size = $4, version = $5 - RETURNING updated_at`, entry.FileID, ownerID, entry.Model, size, version).Scan(&updatedAt) + err := r.DB.QueryRowContext(ctx, ` + INSERT INTO embeddings + (file_id, owner_id, model, size, version, datacenters) + VALUES + ($1, $2, $3, $4, $5, ARRAY[$6]::s3region[]) + ON CONFLICT ON CONSTRAINT unique_embeddings_file_id_model + DO UPDATE + SET + updated_at = now_utc_micro_seconds(), + size = $4, + version = $5, + datacenters = CASE + WHEN $6 = ANY(COALESCE(embeddings.datacenters, ARRAY['b2-eu-cen']::s3region[])) THEN embeddings.datacenters + ELSE array_append(COALESCE(embeddings.datacenters, ARRAY['b2-eu-cen']::s3region[]), $6::s3region) + END + RETURNING updated_at`, + entry.FileID, ownerID, entry.Model, size, version, dc).Scan(&updatedAt) + if err != nil { // check if error is due to model enum invalid value if err.Error() == fmt.Sprintf("pq: invalid input value for enum model: \"%s\"", entry.Model) { @@ -45,7 +56,7 @@ func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry en // GetDiff returns the embeddings that have been updated since the given time func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Model, sinceTime int64, limit int16) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND updated_at > $3 ORDER BY updated_at ASC @@ -57,7 +68,7 @@ func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Mode } func (r *Repository) GetFilesEmbedding(ctx context.Context, ownerID int64, model ente.Model, fileIDs []int64) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND file_id = ANY($3)`, ownerID, model, pq.Array(fileIDs)) if err != nil { @@ -82,6 +93,89 @@ func (r *Repository) Delete(fileID int64) error { return nil } +// GetDatacenters returns unique list of datacenters where derived embeddings are stored +func (r *Repository) GetDatacenters(ctx context.Context, fileID int64) ([]string, error) { + rows, err := r.DB.QueryContext(ctx, `SELECT datacenters FROM embeddings WHERE file_id = $1`, fileID) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + uniqueDatacenters := make(map[string]struct{}) + for rows.Next() { + var datacenters []string + err = rows.Scan(pq.Array(&datacenters)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + for _, dc := range datacenters { + uniqueDatacenters[dc] = struct{}{} + } + } + datacenters := make([]string, 0, len(uniqueDatacenters)) + for dc := range uniqueDatacenters { + datacenters = append(datacenters, dc) + } + return datacenters, nil +} + +// GetOtherDCsForFileAndModel returns the list of datacenters where the embeddings are stored for a given file and model, excluding the ignoredDC +func (r *Repository) GetOtherDCsForFileAndModel(ctx context.Context, fileID int64, model string, ignoredDC string) ([]string, error) { + rows, err := r.DB.QueryContext(ctx, `SELECT datacenters FROM embeddings WHERE file_id = $1 AND model = $2`, fileID, model) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + uniqueDatacenters := make(map[string]bool) + for rows.Next() { + var datacenters []string + err = rows.Scan(pq.Array(&datacenters)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + for _, dc := range datacenters { + // add to uniqueDatacenters if it is not the ignoredDC + if dc != ignoredDC { + uniqueDatacenters[dc] = true + } + } + } + datacenters := make([]string, 0, len(uniqueDatacenters)) + for dc := range uniqueDatacenters { + datacenters = append(datacenters, dc) + } + return datacenters, nil +} + +// RemoveDatacenter removes the given datacenter from the list of datacenters +func (r *Repository) RemoveDatacenter(ctx context.Context, fileID int64, dc string) error { + _, err := r.DB.ExecContext(ctx, `UPDATE embeddings SET datacenters = array_remove(datacenters, $1) WHERE file_id = $2`, dc, fileID) + if err != nil { + return stacktrace.Propagate(err, "") + } + return nil +} + +// AddNewDC adds the dc name to the list of datacenters, if it doesn't exist already, for a given file, model and user. It also updates the size of the embedding +func (r *Repository) AddNewDC(ctx context.Context, fileID int64, model ente.Model, userID int64, size int, dc string) error { + res, err := r.DB.ExecContext(ctx, ` + UPDATE embeddings + SET size = $1, + datacenters = CASE + WHEN $2::s3region = ANY(datacenters) THEN datacenters + ELSE array_append(datacenters, $2::s3region) + END + WHERE file_id = $3 AND model = $4 AND owner_id = $5`, size, dc, fileID, model, userID) + if err != nil { + return stacktrace.Propagate(err, "") + } + rowsAffected, err := res.RowsAffected() + if err != nil { + return stacktrace.Propagate(err, "") + } + if rowsAffected == 0 { + return stacktrace.Propagate(errors.New("no row got updated"), "") + } + return nil +} + func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) { defer func() { if err := rows.Close(); err != nil { @@ -94,7 +188,7 @@ func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) { embedding := ente.Embedding{} var encryptedEmbedding, decryptionHeader sql.NullString var version sql.NullInt32 - err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version) + err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version, &embedding.Size) if encryptedEmbedding.Valid && len(encryptedEmbedding.String) > 0 { embedding.EncryptedEmbedding = encryptedEmbedding.String } diff --git a/server/pkg/repo/file.go b/server/pkg/repo/file.go index eafc7b570..2ae4eafdc 100644 --- a/server/pkg/repo/file.go +++ b/server/pkg/repo/file.go @@ -311,7 +311,12 @@ func (repo *FileRepository) Update(file ente.File, fileSize int64, thumbnailSize // UpdateMagicAttributes updates the magic attributes for the list of files and update collection_files & collection // which have this file. -func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdates []ente.UpdateMagicMetadata, isPublicMetadata bool) error { +func (repo *FileRepository) UpdateMagicAttributes( + ctx context.Context, + fileUpdates []ente.UpdateMagicMetadata, + isPublicMetadata bool, + skipVersion *bool, +) error { updationTime := time.Microseconds() tx, err := repo.DB.BeginTx(ctx, nil) if err != nil { @@ -336,6 +341,9 @@ func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdat return stacktrace.Propagate(err, "") } } + if skipVersion != nil && *skipVersion { + return tx.Commit() + } // todo: full table scan, need to add index (for discussion: add user_id and idx {user_id, file_id}). updatedRows, err := tx.QueryContext(ctx, `UPDATE collection_files SET updation_time = $1 WHERE file_id = ANY($2) AND is_deleted= false RETURNING collection_id`, updationTime, diff --git a/server/pkg/utils/email/email.go b/server/pkg/utils/email/email.go index 46202313e..a19987a1d 100644 --- a/server/pkg/utils/email/email.go +++ b/server/pkg/utils/email/email.go @@ -38,6 +38,7 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s smtpPort := viper.GetString("smtp.port") smtpUsername := viper.GetString("smtp.username") smtpPassword := viper.GetString("smtp.password") + smtpEmail := viper.GetString("smtp.email") var emailMessage string @@ -50,6 +51,11 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s emailAddresses += email } + // If an sender email is provided use it instead of the fromEmail. + if smtpEmail != "" { + fromEmail = smtpEmail + } + header := "From: " + fromName + " <" + fromEmail + ">\n" + "To: " + emailAddresses + "\n" + "Subject: " + subject + "\n" + diff --git a/server/pkg/utils/s3config/s3config.go b/server/pkg/utils/s3config/s3config.go index 9b273bd61..a562e5181 100644 --- a/server/pkg/utils/s3config/s3config.go +++ b/server/pkg/utils/s3config/s3config.go @@ -28,6 +28,8 @@ type S3Config struct { hotDC string // Secondary (hot) data center secondaryHotDC string + //Derived data data center for derived files like ml embeddings & preview files + derivedStorageDC string // A map from data centers to S3 configurations s3Configs map[string]*aws.Config // A map from data centers to pre-created S3 clients @@ -71,6 +73,7 @@ var ( dcWasabiEuropeCentralDeprecated string = "wasabi-eu-central-2" dcWasabiEuropeCentral_v3 string = "wasabi-eu-central-2-v3" dcSCWEuropeFrance_v3 string = "scw-eu-fr-v3" + dcWasabiEuropeCentralDerived string = "wasabi-eu-central-2-derived" ) // Number of days that the wasabi bucket is configured to retain objects. @@ -86,9 +89,9 @@ func NewS3Config() *S3Config { } func (config *S3Config) initialize() { - dcs := [5]string{ + dcs := [6]string{ dcB2EuropeCentral, dcSCWEuropeFranceLockedDeprecated, dcWasabiEuropeCentralDeprecated, - dcWasabiEuropeCentral_v3, dcSCWEuropeFrance_v3} + dcWasabiEuropeCentral_v3, dcSCWEuropeFrance_v3, dcWasabiEuropeCentralDerived} config.hotDC = dcB2EuropeCentral config.secondaryHotDC = dcWasabiEuropeCentral_v3 @@ -99,6 +102,12 @@ func (config *S3Config) initialize() { config.secondaryHotDC = hs2 log.Infof("Hot storage: %s (secondary: %s)", hs1, hs2) } + config.derivedStorageDC = config.hotDC + embeddingsDC := viper.GetString("s3.derived-storage") + if embeddingsDC != "" && array.StringInList(embeddingsDC, dcs[:]) { + config.derivedStorageDC = embeddingsDC + log.Infof("Embeddings bucket: %s", embeddingsDC) + } config.buckets = make(map[string]string) config.s3Configs = make(map[string]*aws.Config) @@ -171,6 +180,18 @@ func (config *S3Config) GetHotS3Client() *s3.S3 { return &s3Client } +func (config *S3Config) GetDerivedStorageDataCenter() string { + return config.derivedStorageDC +} +func (config *S3Config) GetDerivedStorageBucket() *string { + return config.GetBucket(config.derivedStorageDC) +} + +func (config *S3Config) GetDerivedStorageS3Client() *s3.S3 { + s3Client := config.GetS3Client(config.derivedStorageDC) + return &s3Client +} + // Return the name of the hot Backblaze data center func (config *S3Config) GetHotBackblazeDC() string { return dcB2EuropeCentral @@ -181,6 +202,10 @@ func (config *S3Config) GetHotWasabiDC() string { return dcWasabiEuropeCentral_v3 } +func (config *S3Config) GetWasabiDerivedDC() string { + return dcWasabiEuropeCentralDerived +} + // Return the name of the cold Scaleway data center func (config *S3Config) GetColdScalewayDC() string { return dcSCWEuropeFrance_v3 diff --git a/web/README.md b/web/README.md index d33c03904..82ad85cad 100644 --- a/web/README.md +++ b/web/README.md @@ -32,8 +32,11 @@ yarn dev That's it. The web app will automatically hot reload when you make changes. -If you're new to web development and unsure about how to get started, or are -facing some problems when running the above steps, see [docs/new](docs/new.md). +> [!TIP] +> +> If you're new to web development and unsure about how to get started, or are +> facing some problems when running the above steps, see +> [docs/new](docs/new.md). ## Other apps @@ -54,21 +57,22 @@ As a brief overview, this directory contains the following apps: your 2FA codes using this web app. For adding and editing your 2FA codes, please use the Ente Auth [mobile/desktop app](../auth/README.md) instead. -These two are the public facing apps. There are other part of the code which are +These are the public facing apps. There are other part of the code which are accessed as features within the main apps, but in terms of code are independently maintained and deployed: - `apps/accounts`: Passkey support (Coming soon) -- `apps/cast`: Chromecast support (Coming soon) +- `apps/cast`: Browser and Chromecast casting support. +- `apps/payments`: Handle subscription payments. > [!NOTE] > -> This folder is supposed to contain all our web related code. Most of it is -> already here, but some code which is being deployed from our other -> repositories like the family portal (https://github.com/ente-io/families) -> still needs to be brought here. Also, some of the Cloudflare workers we use -> for fixing headers etc too. Hang tight, we're on it, will bring in the -> remaining bits one by one. +> Some older code is being deployed from our other repositories like the family +> portal (https://github.com/ente-io/families) and still needs to be brought +> here. Likewise, some of the Cloudflare workers we use for fixing headers etc. +> We'll gradually bring all these into this monorepo one by one. + +The apps take use various `packages/` to share code amongst themselves. You might also find this [overview of dependencies](docs/dependencies.md) useful. diff --git a/web/apps/accounts/.env b/web/apps/accounts/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/accounts/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/accounts/src/pages/_app.tsx b/web/apps/accounts/src/pages/_app.tsx index 40a4a1458..a1927f52b 100644 --- a/web/apps/accounts/src/pages/_app.tsx +++ b/web/apps/accounts/src/pages/_app.tsx @@ -1,6 +1,8 @@ import { CustomHead } from "@/next/components/Head"; import { setupI18n } from "@/next/i18n"; import { logUnhandledErrorsAndRejections } from "@/next/log-web"; +import { PAGES } from "@ente/accounts/constants/pages"; +import { accountLogout } from "@ente/accounts/services/logout"; import { APPS, APP_TITLES } from "@ente/shared/apps/constants"; import { Overlay } from "@ente/shared/components/Container"; import DialogBoxV2 from "@ente/shared/components/DialogBoxV2"; @@ -27,6 +29,7 @@ interface AppContextProps { isMobile: boolean; showNavBar: (show: boolean) => void; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; } export const AppContext = createContext({} as AppContextProps); @@ -78,6 +81,10 @@ export default function App({ Component, pageProps }: AppProps) { const theme = getTheme(themeColor, APPS.PHOTOS); + const logout = () => { + void accountLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.ACCOUNTS }) : APP_TITLES.get(APPS.ACCOUNTS); @@ -101,6 +108,7 @@ export default function App({ Component, pageProps }: AppProps) { showNavBar, setDialogBoxAttributesV2: setDialogBoxAttributesV2 as any, + logout, }} > {!isI18nReady && ( diff --git a/web/apps/auth/.env b/web/apps/auth/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/auth/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/auth/src/components/Navbar.tsx b/web/apps/auth/src/components/Navbar.tsx index 293d7fc16..87614d643 100644 --- a/web/apps/auth/src/components/Navbar.tsx +++ b/web/apps/auth/src/components/Navbar.tsx @@ -1,4 +1,3 @@ -import { logoutUser } from "@ente/accounts/services/user"; import { HorizontalFlex } from "@ente/shared/components/Container"; import { EnteLogo } from "@ente/shared/components/EnteLogo"; import NavbarBase from "@ente/shared/components/Navbar/base"; @@ -11,7 +10,7 @@ import { AppContext } from "pages/_app"; import React from "react"; export default function AuthNavbar() { - const { isMobile } = React.useContext(AppContext); + const { isMobile, logout } = React.useContext(AppContext); return ( @@ -25,7 +24,7 @@ export default function AuthNavbar() { } - onClick={logoutUser} + onClick={logout} > {t("LOGOUT")} diff --git a/web/apps/auth/src/pages/_app.tsx b/web/apps/auth/src/pages/_app.tsx index a5aa55f98..a0a579a80 100644 --- a/web/apps/auth/src/pages/_app.tsx +++ b/web/apps/auth/src/pages/_app.tsx @@ -4,6 +4,7 @@ import { logStartupBanner, logUnhandledErrorsAndRejections, } from "@/next/log-web"; +import { accountLogout } from "@ente/accounts/services/logout"; import { APPS, APP_TITLES, @@ -44,6 +45,7 @@ type AppContextType = { setThemeColor: SetTheme; somethingWentWrong: () => void; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; }; export const AppContext = createContext(null); @@ -128,6 +130,10 @@ export default function App({ Component, pageProps }: AppProps) { content: t("UNKNOWN_ERROR"), }); + const logout = () => { + void accountLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.AUTH }) : APP_TITLES.get(APPS.AUTH); @@ -162,6 +168,7 @@ export default function App({ Component, pageProps }: AppProps) { setThemeColor, somethingWentWrong, setDialogBoxAttributesV2, + logout, }} > {(loading || !isI18nReady) && ( diff --git a/web/apps/cast/.env b/web/apps/cast/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/cast/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/cast/src/components/Slide.tsx b/web/apps/cast/src/components/Slide.tsx deleted file mode 100644 index 8309f8bc2..000000000 --- a/web/apps/cast/src/components/Slide.tsx +++ /dev/null @@ -1,56 +0,0 @@ -interface SlideViewProps { - /** The URL of the image to show. */ - url: string; - /** The URL of the next image that we will transition to. */ - nextURL: string; -} - -/** - * Show the image at {@link url} in a full screen view. - * - * Also show {@link nextURL} in an hidden image view to prepare the browser for - * an imminent transition to it. - */ -export const SlideView: React.FC = ({ url, nextURL }) => { - return ( -
-
- - -
-
- ); -}; diff --git a/web/apps/cast/src/pages/_app.tsx b/web/apps/cast/src/pages/_app.tsx index 99b047d41..d85ac0542 100644 --- a/web/apps/cast/src/pages/_app.tsx +++ b/web/apps/cast/src/pages/_app.tsx @@ -1,4 +1,5 @@ import { CustomHead } from "@/next/components/Head"; +import { disableDiskLogs } from "@/next/log"; import { logUnhandledErrorsAndRejections } from "@/next/log-web"; import { APPS, APP_TITLES } from "@ente/shared/apps/constants"; import { getTheme } from "@ente/shared/themes"; @@ -11,6 +12,7 @@ import "styles/global.css"; export default function App({ Component, pageProps }: AppProps) { useEffect(() => { + disableDiskLogs(); logUnhandledErrorsAndRejections(true); return () => logUnhandledErrorsAndRejections(false); }, []); diff --git a/web/apps/cast/src/pages/index.tsx b/web/apps/cast/src/pages/index.tsx index e703c879f..37fcf3d4b 100644 --- a/web/apps/cast/src/pages/index.tsx +++ b/web/apps/cast/src/pages/index.tsx @@ -4,19 +4,15 @@ import { styled } from "@mui/material"; import { PairingCode } from "components/PairingCode"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { storeCastData } from "services/cast"; -import { advertiseCode, getCastData, register } from "services/pair"; -import { castReceiverLoadingIfNeeded } from "../utils/cast-receiver"; +import { readCastData, storeCastData } from "services/cast-data"; +import { getCastData, register } from "services/pair"; +import { advertiseOnChromecast } from "../services/chromecast"; export default function Index() { const [publicKeyB64, setPublicKeyB64] = useState(); const [privateKeyB64, setPrivateKeyB64] = useState(); const [pairingCode, setPairingCode] = useState(); - // Keep a boolean flag to ensure that Cast Receiver starts only once even if - // pairing codes change. - const [haveInitializedCast, setHaveInitializedCast] = useState(false); - const router = useRouter(); useEffect(() => { @@ -27,12 +23,10 @@ export default function Index() { setPairingCode(r.pairingCode); }); } else { - if (!haveInitializedCast) { - castReceiverLoadingIfNeeded().then((cast) => { - setHaveInitializedCast(true); - advertiseCode(cast, () => pairingCode); - }); - } + advertiseOnChromecast( + () => pairingCode, + () => readCastData()?.collectionID, + ); } }, [pairingCode]); @@ -52,7 +46,6 @@ export default function Index() { return; } - log.info("Pairing complete"); storeCastData(data); await router.push("/slideshow"); } catch (e) { diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx index d117f6da7..326b183d4 100644 --- a/web/apps/cast/src/pages/slideshow.tsx +++ b/web/apps/cast/src/pages/slideshow.tsx @@ -1,15 +1,16 @@ import log from "@/next/log"; +import { ensure } from "@/utils/ensure"; import { styled } from "@mui/material"; import { FilledCircleCheck } from "components/FilledCircleCheck"; -import { SlideView } from "components/Slide"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { readCastData, renderableImageURLs } from "services/cast"; +import { readCastData } from "services/cast-data"; +import { isChromecast } from "services/chromecast"; +import { imageURLGenerator } from "services/render"; export default function Slideshow() { const [loading, setLoading] = useState(true); const [imageURL, setImageURL] = useState(); - const [nextImageURL, setNextImageURL] = useState(); const [isEmpty, setIsEmpty] = useState(false); const router = useRouter(); @@ -22,19 +23,18 @@ export default function Slideshow() { const loop = async () => { try { - const urlGenerator = renderableImageURLs(readCastData()); + const urlGenerator = imageURLGenerator(ensure(readCastData())); while (!stop) { - const { value: urls, done } = await urlGenerator.next(); - if (done) { + const { value: url, done } = await urlGenerator.next(); + if (done || !url) { // No items in this callection can be shown. setIsEmpty(true); - // Go back to pairing screen after 3 seconds. + // Go back to pairing screen after 5 seconds. setTimeout(pair, 5000); return; } - setImageURL(urls[0]); - setNextImageURL(urls[1]); + setImageURL(url); setLoading(false); } } catch (e) { @@ -50,12 +50,14 @@ export default function Slideshow() { }; }, []); - console.log("Rendering slideshow", { loading, imageURL, nextImageURL }); - if (loading) return ; if (isEmpty) return ; - return ; + return isChromecast() ? ( + + ) : ( + + ); } const PairingComplete: React.FC = () => { @@ -71,19 +73,13 @@ const PairingComplete: React.FC = () => { ); }; -const Message: React.FC = ({ children }) => { - return ( - - {children} - - ); -}; - -const Message_ = styled("div")` +const Message = styled("div")` display: flex; - min-height: 100svh; + flex-direction: column; + height: 100%; justify-content: center; align-items: center; + text-align: center; line-height: 1.5rem; @@ -92,13 +88,6 @@ const Message_ = styled("div")` } `; -const MessageItems = styled("div")` - display: flex; - flex-direction: column; - align-items: center; - text-align: center; -`; - const NoItems: React.FC = () => { return ( @@ -110,3 +99,94 @@ const NoItems: React.FC = () => { ); }; + +interface SlideViewProps { + /** The URL of the image to show. */ + url: string; +} + +const SlideView: React.FC = ({ url }) => { + return ( + + + + ); +}; + +const SlideView_ = styled("div")` + width: 100%; + height: 100%; + + background-size: cover; + background-position: center; + background-repeat: no-repeat; + background-blend-mode: multiply; + background-color: rgba(0, 0, 0, 0.5); + + /* Smooth out the transition a bit. + * + * For the img itself, we set decoding="sync" to have it switch seamlessly. + * But there does not seem to be a way of setting decoding sync for the + * background image, and for large (multi-MB) images the background image + * switch is still visually non-atomic. + * + * As a workaround, add a long transition so that the background image + * transitions in a more "fade-to" manner. This effect might or might not be + * visually the best though. + * + * Does not work in Firefox, but that's fine, this is only a slight tweak, + * not a functional requirement. + */ + transition: all 2s; + + img { + width: 100%; + height: 100%; + backdrop-filter: blur(10px); + object-fit: contain; + } +`; + +/** + * Variant of {@link SlideView} for use when we're running on Chromecast. + * + * Chromecast devices have trouble with + * + * backdrop-filter: blur(10px); + * + * So emulate a cheaper approximation for use on Chromecast. + */ +const SlideViewChromecast: React.FC = ({ url }) => { + return ( + + + + + ); +}; + +const SlideViewChromecast_ = styled("div")` + width: 100%; + height: 100%; + + /* We can't set opacity of background-image, so use a wrapper */ + position: relative; + overflow: hidden; + + img.svc-bg { + position: absolute; + left: 0; + top: 0; + width: 100%; + height: 100%; + object-fit: cover; + opacity: 0.1; + } + + img.svc-content { + position: relative; + width: 100%; + height: 100%; + object-fit: contain; + } +`; diff --git a/web/apps/cast/src/services/cast-data.ts b/web/apps/cast/src/services/cast-data.ts new file mode 100644 index 000000000..587d1db32 --- /dev/null +++ b/web/apps/cast/src/services/cast-data.ts @@ -0,0 +1,41 @@ +export interface CastData { + /** The ID of the callection we are casting. */ + collectionID: string; + /** A key to decrypt the collection we are casting. */ + collectionKey: string; + /** A credential to use for fetching media files for this cast session. */ + castToken: string; +} + +/** + * Save the data received after pairing with a sender into local storage. + * + * We will read in back when we start the slideshow. + */ +export const storeCastData = (payload: unknown) => { + if (!payload || typeof payload != "object") + throw new Error("Unexpected cast data"); + + // Iterate through all the keys of the payload object and save them to + // localStorage. We don't validate here, we'll validate when we read these + // values back in `readCastData`. + for (const key in payload) { + window.localStorage.setItem(key, payload[key]); + } +}; + +/** + * Read back the cast data we got after pairing. + * + * Sibling of {@link storeCastData}. It returns undefined if the expected data + * is not present in localStorage. + */ +export const readCastData = (): CastData | undefined => { + const collectionID = localStorage.getItem("collectionID"); + const collectionKey = localStorage.getItem("collectionKey"); + const castToken = localStorage.getItem("castToken"); + + return collectionID && collectionKey && castToken + ? { collectionID, collectionKey, castToken } + : undefined; +}; diff --git a/web/apps/cast/src/services/chromecast.ts b/web/apps/cast/src/services/chromecast.ts new file mode 100644 index 000000000..e7539e8c5 --- /dev/null +++ b/web/apps/cast/src/services/chromecast.ts @@ -0,0 +1,227 @@ +/// + +import log from "@/next/log"; + +export type Cast = typeof cast; + +/** + * A holder for the "cast" global object exposed by the Chromecast SDK, + * alongwith auxiliary state we need around it. + */ +class CastReceiver { + /** + * A reference to the `cast` global object that the Chromecast Web Receiver + * SDK attaches to the window. + * + * https://developers.google.com/cast/docs/web_receiver/basic + */ + cast: Cast | undefined; + /** + * A promise that allows us to ensure multiple requests to load are funneled + * through the same reified load. + */ + loader: Promise | undefined; + /** + * True if we have already attached listeners (i.e. if we have "started" the + * Chromecast SDK). + * + * Note that "stopping" the Chromecast SDK causes the Chromecast device to + * reload our tab, so this is a one way flag. The stop is something that'll + * only get triggered when we're actually running on a Chromecast since it + * always happens in response to a message handler. + */ + haveStarted = false; + /** + * Cached result of the isChromecast test. + */ + isChromecast: boolean | undefined; + /** + * A callback to invoke to get the pairing code when we get a new incoming + * pairing request. + */ + pairingCode: (() => string | undefined) | undefined; + /** + * A callback to invoke to get the ID of the collection that is currently + * being shown (if any). + */ + collectionID: (() => string | undefined) | undefined; +} + +/** Singleton instance of {@link CastReceiver}. */ +const castReceiver = new CastReceiver(); + +/** + * Listen for incoming messages on the given {@link cast} receiver, replying to + * each of them with a pairing code obtained using the given {@link pairingCode} + * callback. Phase 2 of the pairing protocol. + * + * Calling this function multiple times is fine. The first time around, the + * Chromecast SDK will be loaded and will start listening. Subsequently, each + * time this is call, we'll update the callbacks, but otherwise just return + * immediately (letting the already attached listeners do their thing). + * + * @param pairingCode A callback to invoke to get the pairing code when we get a + * new incoming pairing request. + * + * @param collectionID A callback to invoke to get the ID of the collection that + * is currently being shown (if any). + * + * See: [Note: Pairing protocol]. + */ +export const advertiseOnChromecast = ( + pairingCode: () => string | undefined, + collectionID: () => string | undefined, +) => { + // Always update the callbacks. + castReceiver.pairingCode = pairingCode; + castReceiver.collectionID = collectionID; + + // No-op if we're already running. + if (castReceiver.haveStarted) return; + + void loadingChromecastSDKIfNeeded().then((cast) => advertiseCode(cast)); +}; + +/** + * Load the Chromecast Web Receiver SDK and return a reference to the `cast` + * global object that the SDK attaches to the window. + * + * Calling this function multiple times is fine, once the Chromecast SDK is + * loaded it'll thereafter return the reference to the same object always. + */ +const loadingChromecastSDKIfNeeded = async (): Promise => { + if (castReceiver.cast) return castReceiver.cast; + if (castReceiver.loader) return await castReceiver.loader; + + castReceiver.loader = new Promise((resolve) => { + const script = document.createElement("script"); + script.src = + "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; + script.addEventListener("load", () => { + castReceiver.cast = cast; + resolve(cast); + }); + document.body.appendChild(script); + }); + + return await castReceiver.loader; +}; + +const advertiseCode = (cast: Cast) => { + if (castReceiver.haveStarted) { + // Multiple attempts raced to completion, ignore all but the first. + return; + } + + castReceiver.haveStarted = true; + + // Prepare the Chromecast "context". + const context = cast.framework.CastReceiverContext.getInstance(); + const namespace = "urn:x-cast:pair-request"; + + const options = new cast.framework.CastReceiverOptions(); + // We don't use the media features of the Cast SDK. + options.skipPlayersLoad = true; + // Do not stop the casting if the receiver is unreachable. A user should be + // able to start a cast on their phone and then put it away, leaving the + // cast running on their big screen. + options.disableIdleTimeout = true; + + type ListenerProps = { + senderId: string; + data: unknown; + }; + + // Reply with the code that we have if anyone asks over Chromecast. + const incomingMessageListener = ({ senderId, data }: ListenerProps) => { + // The collection ID with is currently paired (if any). + const pairedCollectionID = castReceiver.collectionID?.(); + + // The collection ID in the request (if any). + const collectionID = + data && + typeof data == "object" && + typeof data["collectionID"] == "string" + ? data["collectionID"] + : undefined; + + // If the request does not have a collectionID (or if we're not showing + // anything currently), forego this check. + + if (collectionID && pairedCollectionID) { + // If we get another connection request for a _different_ collection + // ID, stop the app to allow the second device to reconnect using a + // freshly generated pairing code. + if (pairedCollectionID != collectionID) { + log.info(`request for a new collection ${collectionID}`); + context.stop(); + } else { + // Duplicate request for same collection that we're already + // showing. Ignore. + } + return; + } + + const code = castReceiver.pairingCode?.(); + if (!code) { + // No code, but if we're already showing a collection, then ignore. + if (pairedCollectionID) return; + + // Our caller waits until it has a pairing code before it calls + // `advertiseCode`, but there is still an edge case where we can + // find ourselves without a pairing code: + // + // 1. The current pairing code expires. We start the process to get + // a new one. + // + // 2. But before that happens, someone connects. + // + // The window where this can happen is short, so if we do find + // ourselves in this scenario, just shutdown. + log.error("got pairing request when refreshing pairing codes"); + context.stop(); + return; + } + + context.sendCustomMessage(namespace, senderId, { code }); + }; + + context.addCustomMessageListener( + namespace, + // We need to cast, the `senderId` is present in the message we get but + // not present in the TypeScript type. + incomingMessageListener as unknown as SystemEventHandler, + ); + + // Close the (chromecast) tab if the sender disconnects. + // + // Chromecast does a "shutdown" of our cast app when we call `context.stop`. + // This translates into it closing the tab where it is showing our app. + context.addEventListener( + cast.framework.system.EventType.SENDER_DISCONNECTED, + () => context.stop(), + ); + + // Start listening for Chromecast connections. + context.start(options); +}; + +/** + * Return true if we're running on a Chromecast device. + * + * This allows changing our app's behaviour when we're running on Chromecast. + * Such checks are needed because during our testing we found that in practice, + * some processing is too heavy for Chromecast hardware (we tested with a 2nd + * gen device, this might not be true for newer variants). + * + * This variable is lazily updated when we enter {@link renderableImageURLs}. It + * is kept at the top level to avoid passing it around. + */ +export const isChromecast = () => { + let isCast = castReceiver.isChromecast; + if (isCast === undefined) { + isCast = window.navigator.userAgent.includes("CrKey"); + castReceiver.isChromecast = isCast; + } + return isCast; +}; diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts index 187e19df8..c43529aae 100644 --- a/web/apps/cast/src/services/detect-type.ts +++ b/web/apps/cast/src/services/detect-type.ts @@ -9,6 +9,9 @@ import FileType from "file-type"; * * It first peeks into the file's initial contents to detect the MIME type. If * that doesn't give any results, it tries to deduce it from the file's name. + * + * For the list of returned extensions, see (for our installed version): + * https://github.com/sindresorhus/file-type/blob/main/core.d.ts */ export const detectMediaMIMEType = async (file: File): Promise => { const chunkSizeForTypeDetection = 4100; diff --git a/web/apps/cast/src/services/pair.ts b/web/apps/cast/src/services/pair.ts index 893681d32..36b54cf75 100644 --- a/web/apps/cast/src/services/pair.ts +++ b/web/apps/cast/src/services/pair.ts @@ -1,9 +1,8 @@ import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium"; import castGateway from "@ente/shared/network/cast"; -import { wait } from "@ente/shared/utils"; import _sodium from "libsodium-wrappers"; -import { type Cast } from "../utils/cast-receiver"; export interface Registration { /** A pairing code shown on the screen. A client can use this to connect. */ @@ -100,105 +99,6 @@ export const register = async (): Promise => { return { pairingCode, publicKeyB64, privateKeyB64 }; }; -/** - * Listen for incoming messages on the given {@link cast} receiver, replying to - * each of them with a pairing code obtained using the given {@link pairingCode} - * callback. Phase 2 of the pairing protocol. - * - * See: [Note: Pairing protocol]. - */ -export const advertiseCode = ( - cast: Cast, - pairingCode: () => string | undefined, -) => { - // Prepare the Chromecast "context". - const context = cast.framework.CastReceiverContext.getInstance(); - const namespace = "urn:x-cast:pair-request"; - - const options = new cast.framework.CastReceiverOptions(); - // We don't use the media features of the Cast SDK. - options.skipPlayersLoad = true; - // Do not stop the casting if the receiver is unreachable. A user should be - // able to start a cast on their phone and then put it away, leaving the - // cast running on their big screen. - options.disableIdleTimeout = true; - - // The collection ID with which we paired. If we get another connection - // request for a different collection ID, restart the app to allow them to - // reconnect using a freshly generated pairing code. - // - // If the request does not have a collectionID, forego this check. - let pairedCollectionID: string | undefined; - - type ListenerProps = { - senderId: string; - data: unknown; - }; - - // Reply with the code that we have if anyone asks over Chromecast. - const incomingMessageListener = ({ senderId, data }: ListenerProps) => { - const restart = (reason: string) => { - log.error(`Restarting app because ${reason}`); - // context.stop will close the tab but it'll get reopened again - // immediately since the client app will reconnect in the scenarios - // where we're calling this function. - context.stop(); - }; - - const collectionID = - data && - typeof data == "object" && - typeof data["collectionID"] == "string" - ? data["collectionID"] - : undefined; - - if (pairedCollectionID && pairedCollectionID != collectionID) { - restart(`incoming request for a new collection ${collectionID}`); - return; - } - - pairedCollectionID = collectionID; - - const code = pairingCode(); - if (!code) { - // Our caller waits until it has a pairing code before it calls - // `advertiseCode`, but there is still an edge case where we can - // find ourselves without a pairing code: - // - // 1. The current pairing code expires. We start the process to get - // a new one. - // - // 2. But before that happens, someone connects. - // - // The window where this can happen is short, so if we do find - // ourselves in this scenario, - restart("we got a pairing request when refreshing pairing codes"); - return; - } - - context.sendCustomMessage(namespace, senderId, { code }); - }; - - context.addCustomMessageListener( - namespace, - // We need to cast, the `senderId` is present in the message we get but - // not present in the TypeScript type. - incomingMessageListener as unknown as SystemEventHandler, - ); - - // Close the (chromecast) tab if the sender disconnects. - // - // Chromecast does a "shutdown" of our cast app when we call `context.stop`. - // This translates into it closing the tab where it is showing our app. - context.addEventListener( - cast.framework.system.EventType.SENDER_DISCONNECTED, - () => context.stop(), - ); - - // Start listening for Chromecast connections. - context.start(options); -}; - /** * Ask museum if anyone has sent a (encrypted) payload corresponding to the * given pairing code. If so, decrypt it using our private key and return the diff --git a/web/apps/cast/src/services/cast.ts b/web/apps/cast/src/services/render.ts similarity index 62% rename from web/apps/cast/src/services/cast.ts rename to web/apps/cast/src/services/render.ts index 38f203db2..79065c2af 100644 --- a/web/apps/cast/src/services/cast.ts +++ b/web/apps/cast/src/services/render.ts @@ -1,14 +1,23 @@ import { FILE_TYPE } from "@/media/file-type"; -import { isNonWebImageFileExtension } from "@/media/formats"; +import { isHEICExtension, isNonWebImageFileExtension } from "@/media/formats"; import { decodeLivePhoto } from "@/media/live-photo"; +import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert"; +import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker"; import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; +import type { ComlinkWorker } from "@/next/worker/comlink-worker"; import { shuffled } from "@/utils/array"; -import { ensure, ensureString } from "@/utils/ensure"; +import { wait } from "@/utils/promise"; import ComlinkCryptoWorker from "@ente/shared/crypto"; +import { ApiError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; -import { getCastFileURL, getEndpoint } from "@ente/shared/network/api"; -import { wait } from "@ente/shared/utils"; +import { + getCastFileURL, + getCastThumbnailURL, + getEndpoint, +} from "@ente/shared/network/api"; +import type { AxiosResponse } from "axios"; +import type { CastData } from "services/cast-data"; import { detectMediaMIMEType } from "services/detect-type"; import { EncryptedEnteFile, @@ -16,53 +25,20 @@ import { FileMagicMetadata, FilePublicMagicMetadata, } from "types/file"; +import { isChromecast } from "./chromecast"; /** - * Save the data received after pairing with a sender into local storage. - * - * We will read in back when we start the slideshow. + * If we're using HEIC conversion, then this variable caches the comlink web + * worker we're using to perform the actual conversion. */ -export const storeCastData = (payload: unknown) => { - if (!payload || typeof payload != "object") - throw new Error("Unexpected cast data"); - - // Iterate through all the keys of the payload object and save them to - // localStorage. We don't validate here, we'll validate when we read these - // values back in `readCastData`. - for (const key in payload) { - window.localStorage.setItem(key, payload[key]); - } -}; - -interface CastData { - /** A key to decrypt the collection we are casting. */ - collectionKey: string; - /** A credential to use for fetching media files for this cast session. */ - castToken: string; -} - -/** - * Read back the cast data we got after pairing. - * - * Sibling of {@link storeCastData}. It throws an error if the expected data is - * not present in localStorage. - */ -export const readCastData = (): CastData => { - const collectionKey = ensureString(localStorage.getItem("collectionKey")); - const castToken = ensureString(localStorage.getItem("castToken")); - return { collectionKey, castToken }; -}; - -type RenderableImageURLPair = [url: string, nextURL: string]; +let heicWorker: ComlinkWorker | undefined; /** * An async generator function that loops through all the files in the - * collection, returning renderable URLs to each that can be displayed in a - * slideshow. + * collection, returning renderable image URLs to each that can be displayed in + * a slideshow. * - * Each time it resolves with a pair of URLs (a {@link RenderableImageURLPair}), - * one for the next slideshow image, and one for the slideshow image that will - * be displayed after that. It also pre-fetches the next to next URL each time. + * Each time it resolves with a (data) URL for the slideshow image to show next. * * If there are no renderable image in the collection, the sequence ends by * yielding `{done: true}`. @@ -73,14 +49,18 @@ type RenderableImageURLPair = [url: string, nextURL: string]; * * The generator ignores errors in the fetching and decoding of individual * images in the collection, skipping the erroneous ones and moving onward to - * the next one. It will however throw if there are errors when getting the - * collection itself. This can happen both the first time, or when we are about - * to loop around to the start of the collection. + * the next one. + * + * - It will however throw if there are errors when getting the collection + * itself. This can happen both the first time, or when we are about to loop + * around to the start of the collection. + * + * - It will also throw if three consecutive image fail. * * @param castData The collection to show and credentials to fetch the files * within it. */ -export const renderableImageURLs = async function* (castData: CastData) { +export const imageURLGenerator = async function* (castData: CastData) { const { collectionKey, castToken } = castData; /** @@ -89,11 +69,8 @@ export const renderableImageURLs = async function* (castData: CastData) { */ const previousURLs: string[] = []; - /** The URL pair that we will yield */ - const urls: string[] = []; - /** Number of milliseconds to keep the slide on the screen. */ - const slideDuration = 10000; /* 10 s */ + const slideDuration = 12000; /* 12 s */ /** * Time when we last yielded. @@ -108,6 +85,14 @@ export const renderableImageURLs = async function* (castData: CastData) { // bit, for the user to see the checkmark animation as reassurance). lastYieldTime -= slideDuration - 2500; /* wait at most 2.5 s */ + /** + * Number of time we have caught an exception while trying to generate an + * image URL for individual files. + * + * When this happens three times consecutively, we throw. + */ + let consecutiveFailures = 0; + while (true) { const encryptedFiles = shuffled( await getEncryptedCollectionFiles(castToken), @@ -118,30 +103,34 @@ export const renderableImageURLs = async function* (castData: CastData) { for (const encryptedFile of encryptedFiles) { const file = await decryptEnteFile(encryptedFile, collectionKey); - if (!isFileEligibleForCast(file)) continue; + if (!isFileEligible(file)) continue; - console.log("will start createRenderableURL", new Date()); + let url: string; try { - urls.push(await createRenderableURL(castToken, file)); + url = await createRenderableURL(castToken, file); + consecutiveFailures = 0; haveEligibleFiles = true; } catch (e) { + consecutiveFailures += 1; + // 1, 2, bang! + if (consecutiveFailures == 3) throw e; + + if (e instanceof ApiError && e.httpStatusCode == 401) { + // The token has expired. This can happen, e.g., if the user + // opens the dialog to cast again, causing the client to + // invalidate existing tokens. + // + // Rethrow the error, which will bring us back to the + // pairing page. + throw e; + } + + // On all other errors (including temporary network issues), log.error("Skipping unrenderable file", e); + await wait(100); /* Breathe */ continue; } - console.log("did end createRenderableURL", new Date()); - - // Need at least a pair. - // - // There are two scenarios: - // - // - First run: urls will initially be empty, so gobble two. - // - // - Subsequently, urls will have the "next" / "preloaded" URL left - // over from the last time. We'll promote that to being the one - // that'll get displayed, and preload another one. - // if (urls.length < 2) continue; - // The last element of previousURLs is the URL that is currently // being shown on screen. // @@ -150,23 +139,14 @@ export const renderableImageURLs = async function* (castData: CastData) { if (previousURLs.length > 1) URL.revokeObjectURL(previousURLs.shift()); - // The URL that'll now get displayed on screen. - const url = ensure(urls.shift()); - // The URL that we're preloading for next time around. - const nextURL = ""; //ensure(urls[0]); - previousURLs.push(url); - const urlPair: RenderableImageURLPair = [url, nextURL]; - const elapsedTime = Date.now() - lastYieldTime; - if (elapsedTime > 0 && elapsedTime < slideDuration) { - console.log("waiting", slideDuration - elapsedTime); + if (elapsedTime > 0 && elapsedTime < slideDuration) await wait(slideDuration - elapsedTime); - } lastYieldTime = Date.now(); - yield urlPair; + yield url; } // This collection does not have any files that we can show. @@ -185,7 +165,7 @@ const getEncryptedCollectionFiles = async ( ): Promise => { let files: EncryptedEnteFile[] = []; let sinceTime = 0; - let resp; + let resp: AxiosResponse; do { resp = await HTTPService.get( `${getEndpoint()}/cast/diff`, @@ -269,12 +249,19 @@ const decryptEnteFile = async ( return file; }; -const isFileEligibleForCast = (file: EnteFile) => { +const isFileEligible = (file: EnteFile) => { if (!isImageOrLivePhoto(file)) return false; if (file.info.fileSize > 100 * 1024 * 1024) return false; + // This check is fast but potentially incorrect because in practice we do + // encounter files that are incorrectly named and have a misleading + // extension. To detect the actual type, we need to sniff the MIME type, but + // that requires downloading and decrypting the file first. const [, extension] = nameAndExtension(file.metadata.title); - if (isNonWebImageFileExtension(extension)) return false; + if (isNonWebImageFileExtension(extension)) { + // Of the known non-web types, we support HEIC. + return isHEICExtension(extension); + } return true; }; @@ -284,6 +271,12 @@ const isImageOrLivePhoto = (file: EnteFile) => { return fileType == FILE_TYPE.IMAGE || fileType == FILE_TYPE.LIVE_PHOTO; }; +export const heicToJPEG = async (heicBlob: Blob) => { + let worker = heicWorker; + if (!worker) heicWorker = worker = createHEICConvertComlinkWorker(); + return await (await worker.remote).heicToJPEG(heicBlob); +}; + /** * Create and return a new data URL that can be used to show the given * {@link file} in our slideshow image viewer. @@ -291,29 +284,50 @@ const isImageOrLivePhoto = (file: EnteFile) => { * Once we're done showing the file, the URL should be revoked using * {@link URL.revokeObjectURL} to free up browser resources. */ -const createRenderableURL = async (castToken: string, file: EnteFile) => - URL.createObjectURL(await renderableImageBlob(castToken, file)); +const createRenderableURL = async (castToken: string, file: EnteFile) => { + const imageBlob = await renderableImageBlob(castToken, file); + return URL.createObjectURL(imageBlob); +}; const renderableImageBlob = async (castToken: string, file: EnteFile) => { - const fileName = file.metadata.title; - let blob = await downloadFile(castToken, file); - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - const { imageData } = await decodeLivePhoto(fileName, blob); + const shouldUseThumbnail = isChromecast(); + + let blob = await downloadFile(castToken, file, shouldUseThumbnail); + + let fileName = file.metadata.title; + if (!shouldUseThumbnail && file.metadata.fileType == FILE_TYPE.LIVE_PHOTO) { + const { imageData, imageFileName } = await decodeLivePhoto( + fileName, + blob, + ); + fileName = imageFileName; blob = new Blob([imageData]); } + + // We cannot rely on the file's extension to detect the file type, some + // files are incorrectly named. So use a MIME type sniffer first, but if + // that fails than fallback to the extension. const mimeType = await detectMediaMIMEType(new File([blob], fileName)); if (!mimeType) throw new Error(`Could not detect MIME type for file ${fileName}`); + + if (mimeType == "image/heif" || mimeType == "image/heic") + blob = await heicToJPEG(blob); + return new Blob([blob], { type: mimeType }); }; -const downloadFile = async (castToken: string, file: EnteFile) => { +const downloadFile = async ( + castToken: string, + file: EnteFile, + shouldUseThumbnail: boolean, +) => { if (!isImageOrLivePhoto(file)) throw new Error("Can only cast images and live photos"); - const url = getCastFileURL(file.id); - // TODO(MR): Remove if usused eventually - // const url = getCastThumbnailURL(file.id); + const url = shouldUseThumbnail + ? getCastThumbnailURL(file.id) + : getCastFileURL(file.id); const resp = await HTTPService.get( url, null, @@ -327,9 +341,11 @@ const downloadFile = async (castToken: string, file: EnteFile) => { const cryptoWorker = await ComlinkCryptoWorker.getInstance(); const decrypted = await cryptoWorker.decryptFile( new Uint8Array(resp.data), - await cryptoWorker.fromB64(file.file.decryptionHeader), - // TODO(MR): Remove if usused eventually - // await cryptoWorker.fromB64(file.thumbnail.decryptionHeader), + await cryptoWorker.fromB64( + shouldUseThumbnail + ? file.thumbnail.decryptionHeader + : file.file.decryptionHeader, + ), file.key, ); return new Response(decrypted).blob(); diff --git a/web/apps/cast/src/utils/cast-receiver.tsx b/web/apps/cast/src/utils/cast-receiver.tsx deleted file mode 100644 index 666a085ed..000000000 --- a/web/apps/cast/src/utils/cast-receiver.tsx +++ /dev/null @@ -1,32 +0,0 @@ -/// - -export type Cast = typeof cast; - -let _cast: Cast | undefined; -let _loader: Promise | undefined; - -/** - * Load the Chromecast Web Receiver SDK and return a reference to the `cast` - * global object that the SDK attaches to the window. - * - * Calling this function multiple times is fine, once the Chromecast SDK is - * loaded it'll thereafter return the reference to the same object always. - * - * https://developers.google.com/cast/docs/web_receiver/basic - */ -export const castReceiverLoadingIfNeeded = async (): Promise => { - if (_cast) return _cast; - if (_loader) return await _loader; - - _loader = new Promise((resolve) => { - const script = document.createElement("script"); - script.src = - "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; - - script.addEventListener("load", () => resolve(cast)); - document.body.appendChild(script); - }); - const c = await _loader; - _cast = c; - return c; -}; diff --git a/web/apps/payments/.env b/web/apps/payments/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/payments/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/.env b/web/apps/photos/.env index a039e9105..978c67776 100644 --- a/web/apps/photos/.env +++ b/web/apps/photos/.env @@ -88,3 +88,5 @@ # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON=`cat path/to/expected.json` yarn dev # # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON = {} + +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json index 0aa09f101..a200c8ef7 100644 --- a/web/apps/photos/package.json +++ b/web/apps/photos/package.json @@ -16,20 +16,17 @@ "chrono-node": "^2.2.6", "date-fns": "^2", "debounce": "^2.0.0", - "density-clustering": "^1.3.0", "eventemitter3": "^4.0.7", "exifr": "^7.1.3", "fast-srp-hap": "^2.0.4", "ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm", "formik": "^2.1.5", "hdbscan": "0.0.1-alpha.5", - "heic-convert": "^2.0.0", "idb": "^7.1.1", "leaflet": "^1.9.4", "leaflet-defaulticon-compatibility": "^0.1.1", "localforage": "^1.9.0", "memoize-one": "^6.0.0", - "mime-types": "^2.1.35", "ml-matrix": "^6.10.4", "otpauth": "^9.0.2", "p-debounce": "^4.0.0", diff --git a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx index 3d9d06166..8b92f1cbb 100644 --- a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx +++ b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx @@ -32,7 +32,11 @@ declare global { } } -export default function AlbumCastDialog(props: Props) { +export default function AlbumCastDialog({ + show, + onHide, + currentCollection, +}: Props) { const [view, setView] = useState< "choose" | "auto" | "pin" | "auto-cast-error" >("choose"); @@ -51,7 +55,7 @@ export default function AlbumCastDialog(props: Props) { ) => { try { await doCast(value.trim()); - props.onHide(); + onHide(); } catch (e) { const error = e as Error; let fieldError: string; @@ -80,8 +84,8 @@ export default function AlbumCastDialog(props: Props) { // ok, they exist. let's give them the good stuff. const payload = JSON.stringify({ castToken: castToken, - collectionID: props.currentCollection.id, - collectionKey: props.currentCollection.key, + collectionID: currentCollection.id, + collectionKey: currentCollection.key, }); const encryptedPayload = await boxSeal(btoa(payload), tvPublicKeyB64); @@ -89,7 +93,7 @@ export default function AlbumCastDialog(props: Props) { await castGateway.publishCastPayload( pin, encryptedPayload, - props.currentCollection.id, + currentCollection.id, castToken, ); }; @@ -119,7 +123,7 @@ export default function AlbumCastDialog(props: Props) { doCast(code) .then(() => { setView("choose"); - props.onHide(); + onHide(); }) .catch((e) => { setView("auto-cast-error"); @@ -129,8 +133,9 @@ export default function AlbumCastDialog(props: Props) { }, ); + const collectionID = currentCollection.id; session - .sendMessage("urn:x-cast:pair-request", {}) + .sendMessage("urn:x-cast:pair-request", { collectionID }) .then(() => { log.debug(() => "Message sent successfully"); }) @@ -142,16 +147,16 @@ export default function AlbumCastDialog(props: Props) { }, [view]); useEffect(() => { - if (props.show) { + if (show) { castGateway.revokeAllTokens(); } - }, [props.show]); + }, [show]); return ( {t("LEAVE_ALBUM")} + } + onClick={handleCollectionAction( + CollectionActions.SHOW_ALBUM_CAST_DIALOG, + false, + )} + > + {t("CAST_ALBUM_TO_TV")} + ); } diff --git a/web/apps/photos/src/components/DeleteAccountModal.tsx b/web/apps/photos/src/components/DeleteAccountModal.tsx index 744fbf312..d6eb3a037 100644 --- a/web/apps/photos/src/components/DeleteAccountModal.tsx +++ b/web/apps/photos/src/components/DeleteAccountModal.tsx @@ -1,5 +1,4 @@ import log from "@/next/log"; -import { logoutUser } from "@ente/accounts/services/user"; import DialogBoxV2 from "@ente/shared/components/DialogBoxV2"; import EnteButton from "@ente/shared/components/EnteButton"; import { DELETE_ACCOUNT_EMAIL } from "@ente/shared/constants/urls"; @@ -43,7 +42,8 @@ const getReasonOptions = (): DropdownOption[] => { }; const DeleteAccountModal = ({ open, onClose }: Iprops) => { - const { setDialogBoxAttributesV2, isMobile } = useContext(AppContext); + const { setDialogBoxAttributesV2, isMobile, logout } = + useContext(AppContext); const { authenticateUser } = useContext(GalleryContext); const [loading, setLoading] = useState(false); const deleteAccountChallenge = useRef(); @@ -145,7 +145,7 @@ const DeleteAccountModal = ({ open, onClose }: Iprops) => { ); const { reason, feedback } = reasonAndFeedbackRef.current; await deleteAccount(decryptedChallenge, reason, feedback); - logoutUser(); + logout(); } catch (e) { log.error("solveChallengeAndDeleteAccount failed", e); somethingWentWrong(); diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index 42edddbf1..c4e1f5854 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -1,24 +1,6 @@ +import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import { - Backdrop, - Box, - CircularProgress, - IconButton, - Tab, - Tabs, - Typography, -} from "@mui/material"; -import { - Dispatch, - MutableRefObject, - SetStateAction, - createContext, - useContext, - useEffect, - useRef, - useState, -} from "react"; - +import { ensure } from "@/utils/ensure"; import { CenteredFlex, HorizontalFlex, @@ -32,6 +14,15 @@ import CropIcon from "@mui/icons-material/Crop"; import CropOriginalIcon from "@mui/icons-material/CropOriginal"; import DownloadIcon from "@mui/icons-material/Download"; import MenuIcon from "@mui/icons-material/Menu"; +import { + Backdrop, + Box, + CircularProgress, + IconButton, + Tab, + Tabs, + Typography, +} from "@mui/material"; import { EnteDrawer } from "components/EnteDrawer"; import { EnteMenuItem } from "components/Menu/EnteMenuItem"; import MenuItemDivider from "components/Menu/MenuItemDivider"; @@ -39,10 +30,18 @@ import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import MenuSectionTitle from "components/Menu/MenuSectionTitle"; import { CORNER_THRESHOLD, FILTER_DEFAULT_VALUES } from "constants/photoEditor"; import { t } from "i18next"; -import mime from "mime-types"; import { AppContext } from "pages/_app"; +import { + Dispatch, + MutableRefObject, + SetStateAction, + createContext, + useContext, + useEffect, + useRef, + useState, +} from "react"; import { getLocalCollections } from "services/collectionService"; -import { detectFileTypeInfo } from "services/detect-type"; import downloadManager from "services/download"; import uploadManager from "services/upload/uploadManager"; import { EnteFile } from "types/file"; @@ -72,13 +71,6 @@ export const ImageEditorOverlayContext = createContext( type OperationTab = "crop" | "transform" | "colours"; -const getEditedFileName = (fileName: string) => { - const fileNameParts = fileName.split("."); - const extension = fileNameParts.pop(); - const editedFileName = `${fileNameParts.join(".")}-edited.${extension}`; - return editedFileName; -}; - export interface CropBoxProps { x: number; y: number; @@ -94,6 +86,10 @@ const ImageEditorOverlay = (props: IProps) => { const parentRef = useRef(null); const [fileURL, setFileURL] = useState(""); + // The MIME type of the original file that we are editing. + // + // It _should_ generally be present, but it is not guaranteed to be. + const [mimeType, setMIMEType] = useState(); const [currentRotationAngle, setCurrentRotationAngle] = useState(0); @@ -372,6 +368,10 @@ const ImageEditorOverlay = (props: IProps) => { ); img.src = srcURLs.url as string; setFileURL(srcURLs.url as string); + // We're casting the srcURLs.url to string above, i.e. this code + // is not meant to run for the live photos scenario. For images, + // we usually will have the mime type. + setMIMEType(srcURLs.mimeType); } else { img.src = fileURL; } @@ -430,37 +430,6 @@ const ImageEditorOverlay = (props: IProps) => { loadCanvas(); }, [props.show, props.file]); - const exportCanvasToBlob = (): Promise => { - try { - const canvas = originalSizeCanvasRef.current; - if (!canvas) return; - - const mimeType = mime.lookup(props.file.metadata.title); - - const image = new Image(); - image.src = canvas.toDataURL(); - - const context = canvas.getContext("2d"); - if (!context) return; - return new Promise((resolve) => { - canvas.toBlob(resolve, mimeType); - }); - } catch (e) { - log.error("Error exporting canvas to blob", e); - throw e; - } - }; - - const getEditedFile = async () => { - const blob = await exportCanvasToBlob(); - if (!blob) { - throw Error("no blob"); - } - const editedFileName = getEditedFileName(props.file.metadata.title); - const editedFile = new File([blob], editedFileName); - return editedFile; - }; - const handleClose = () => { setFileURL(null); props.onClose(); @@ -480,25 +449,23 @@ const ImageEditorOverlay = (props: IProps) => { return <>; } - const downloadEditedPhoto = async () => { - try { - if (!canvasRef.current) return; + const getEditedFile = async () => { + const originalSizeCanvas = ensure(originalSizeCanvasRef.current); + const originalFileName = props.file.metadata.title; + return canvasToFile(originalSizeCanvas, originalFileName, mimeType); + }; - const editedFile = await getEditedFile(); - const fileType = await detectFileTypeInfo(editedFile); - const tempImgURL = URL.createObjectURL( - new Blob([editedFile], { type: fileType.mimeType }), - ); - downloadUsingAnchor(tempImgURL, editedFile.name); - } catch (e) { - log.error("Error downloading edited photo", e); - } + const downloadEditedPhoto = async () => { + if (!canvasRef.current) return; + + const f = await getEditedFile(); + // Revokes the URL after downloading. + downloadUsingAnchor(URL.createObjectURL(f), f.name); }; const saveCopyToEnte = async () => { + if (!canvasRef.current) return; try { - if (!canvasRef.current) return; - const collections = await getLocalCollections(); const collection = collections.find( @@ -678,7 +645,7 @@ const ImageEditorOverlay = (props: IProps) => { setCurrentTab(value); }} > - + { }; export default ImageEditorOverlay; + +/** + * Create a new {@link File} with the contents of the given canvas. + * + * @param canvas A {@link HTMLCanvasElement} whose contents we want to download + * as a file. + * + * @param originalFileName The name of the original file which was used to seed + * the canvas. This will be used as a base name for the generated file (with an + * "-edited" suffix). + * + * @param originalMIMEType The MIME type of the original file which was used to + * seed the canvas. When possible, we try to download a file in the same format, + * but this is not guaranteed and depends on browser support. If the original + * MIME type can not be preserved, a PNG file will be downloaded. + */ +const canvasToFile = async ( + canvas: HTMLCanvasElement, + originalFileName: string, + originalMIMEType?: string, +): Promise => { + const image = new Image(); + image.src = canvas.toDataURL(); + + // Browsers are required to support "image/png". They may also support + // "image/jpeg" and "image/webp". Potentially they may even support more + // formats, but to keep this scoped we limit to these three. + let [mimeType, extension] = ["image/png", "png"]; + switch (originalMIMEType) { + case "image/jpeg": + mimeType = originalMIMEType; + extension = "jpeg"; + break; + case "image/webp": + mimeType = originalMIMEType; + extension = "webp"; + break; + default: + break; + } + + const blob = ensure( + await new Promise((resolve) => canvas.toBlob(resolve, mimeType)), + ); + + const [originalName] = nameAndExtension(originalFileName); + const fileName = `${originalName}-edited.${extension}`; + + log.debug(() => ({ a: "canvas => file", blob, type: blob.type, mimeType })); + + return new File([blob], fileName); +}; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx index 6ebc0d942..3b739520e 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx @@ -5,7 +5,7 @@ import { t } from "i18next"; import { AppContext } from "pages/_app"; import { useContext } from "react"; import { components } from "react-select"; -import { IndexStatus } from "types/machineLearning/ui"; +import { IndexStatus } from "services/face/db"; import { Suggestion, SuggestionType } from "types/search"; const { Menu } = components; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx index 3f737b3e0..1e62422dc 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx @@ -9,6 +9,7 @@ import { useCallback, useContext, useEffect, useRef, useState } from "react"; import { components } from "react-select"; import AsyncSelect from "react-select/async"; import { InputActionMeta } from "react-select/src/types"; +import { Person } from "services/face/types"; import { City } from "services/locationSearchService"; import { getAutoCompleteSuggestions, @@ -17,7 +18,6 @@ import { import { Collection } from "types/collection"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; import { ClipSearchScores, DateValue, diff --git a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx index 6dc9b851e..ed03bc917 100644 --- a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx +++ b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx @@ -1,18 +1,17 @@ +import { VerticallyCenteredFlex } from "@ente/shared/components/Container"; import ChevronRight from "@mui/icons-material/ChevronRight"; import ScienceIcon from "@mui/icons-material/Science"; import { Box, DialogProps, Stack, Typography } from "@mui/material"; import { EnteDrawer } from "components/EnteDrawer"; +import { EnteMenuItem } from "components/Menu/EnteMenuItem"; +import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import MenuSectionTitle from "components/Menu/MenuSectionTitle"; import Titlebar from "components/Titlebar"; import { MLSearchSettings } from "components/ml/MLSearchSettings"; import { t } from "i18next"; -import { useContext, useEffect, useState } from "react"; - -import { VerticallyCenteredFlex } from "@ente/shared/components/Container"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import isElectron from "is-electron"; import { AppContext } from "pages/_app"; +import { useContext, useEffect, useState } from "react"; import { CLIPIndexingStatus, clipService } from "services/clip-service"; import { formatNumber } from "utils/number/format"; diff --git a/web/apps/photos/src/components/Sidebar/ExitSection.tsx b/web/apps/photos/src/components/Sidebar/ExitSection.tsx index 6f9492b77..272f2c572 100644 --- a/web/apps/photos/src/components/Sidebar/ExitSection.tsx +++ b/web/apps/photos/src/components/Sidebar/ExitSection.tsx @@ -1,13 +1,11 @@ -import { t } from "i18next"; -import { useContext, useState } from "react"; - -import { logoutUser } from "@ente/accounts/services/user"; import DeleteAccountModal from "components/DeleteAccountModal"; import { EnteMenuItem } from "components/Menu/EnteMenuItem"; +import { t } from "i18next"; import { AppContext } from "pages/_app"; +import { useContext, useState } from "react"; export default function ExitSection() { - const { setDialogMessage } = useContext(AppContext); + const { setDialogMessage, logout } = useContext(AppContext); const [deleteAccountModalView, setDeleteAccountModalView] = useState(false); @@ -19,7 +17,7 @@ export default function ExitSection() { title: t("LOGOUT_MESSAGE"), proceed: { text: t("LOGOUT"), - action: logoutUser, + action: logout, variant: "critical", }, close: { text: t("CANCEL") }, diff --git a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx index bdc0d5a84..4c4a13a50 100644 --- a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx +++ b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx @@ -9,9 +9,7 @@ import { t } from "i18next"; import { useRouter } from "next/router"; /** - * Human readable name for each supported locale - * - * TODO (MR): This names themselves should be localized. + * Human readable name for each supported locale. */ export const localeName = (locale: SupportedLocale) => { switch (locale) { diff --git a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx index 6b4a6f43d..32f61d976 100644 --- a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx +++ b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx @@ -9,7 +9,7 @@ import { t } from "i18next"; import { useRouter } from "next/router"; import { AppContext } from "pages/_app"; import { useContext, useState } from "react"; -// import mlIDbStorage from 'utils/storage/mlIDbStorage'; +// import mlIDbStorage from 'services/ml/db'; import { configurePasskeyRecovery, isPasskeyRecoveryEnabled, diff --git a/web/apps/photos/src/components/ml/PeopleList.tsx b/web/apps/photos/src/components/ml/PeopleList.tsx index 4691d4b65..9e5620a5c 100644 --- a/web/apps/photos/src/components/ml/PeopleList.tsx +++ b/web/apps/photos/src/components/ml/PeopleList.tsx @@ -3,9 +3,9 @@ import { Skeleton, styled } from "@mui/material"; import { Legend } from "components/PhotoViewer/styledComponents/Legend"; import { t } from "i18next"; import React, { useEffect, useState } from "react"; +import mlIDbStorage from "services/face/db"; +import { Face, Person, type MlFileData } from "services/face/types"; import { EnteFile } from "types/file"; -import { Face, Person } from "types/machineLearning"; -import { getPeopleList, getUnidentifiedFaces } from "utils/machineLearning"; const FaceChipContainer = styled("div")` display: flex; @@ -167,10 +167,7 @@ const FaceCropImageView: React.FC = ({ .legacyFaceCrop(faceID) /* cachedOrNew("face-crops", cacheKey, async () => { - const user = await ensureLocalUser(); return machineLearningService.regenerateFaceCrop( - user.token, - user.id, faceId, ); })*/ @@ -194,3 +191,45 @@ const FaceCropImageView: React.FC = ({ ); }; + +async function getPeopleList(file: EnteFile): Promise> { + let startTime = Date.now(); + const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); + log.info( + "getPeopleList:mlFilesStore:getItem", + Date.now() - startTime, + "ms", + ); + if (!mlFileData?.faces || mlFileData.faces.length < 1) { + return []; + } + + const peopleIds = mlFileData.faces + .filter((f) => f.personId !== null && f.personId !== undefined) + .map((f) => f.personId); + if (!peopleIds || peopleIds.length < 1) { + return []; + } + // log.info("peopleIds: ", peopleIds); + startTime = Date.now(); + const peoplePromises = peopleIds.map( + (p) => mlIDbStorage.getPerson(p) as Promise, + ); + const peopleList = await Promise.all(peoplePromises); + log.info( + "getPeopleList:mlPeopleStore:getItems", + Date.now() - startTime, + "ms", + ); + // log.info("peopleList: ", peopleList); + + return peopleList; +} + +async function getUnidentifiedFaces(file: EnteFile): Promise> { + const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); + + return mlFileData?.faces?.filter( + (f) => f.personId === null || f.personId === undefined, + ); +} diff --git a/web/apps/photos/src/constants/mlConfig.ts b/web/apps/photos/src/constants/mlConfig.ts deleted file mode 100644 index 929594e1c..000000000 --- a/web/apps/photos/src/constants/mlConfig.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { JobConfig } from "types/common/job"; -import { MLSearchConfig, MLSyncConfig } from "types/machineLearning"; - -export const DEFAULT_ML_SYNC_JOB_CONFIG: JobConfig = { - intervalSec: 5, - // TODO: finalize this after seeing effects on and from machine sleep - maxItervalSec: 960, - backoffMultiplier: 2, -}; - -export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = { - batchSize: 200, - imageSource: "Original", - faceDetection: { - method: "YoloFace", - }, - faceCrop: { - enabled: true, - method: "ArcFace", - padding: 0.25, - maxSize: 256, - blobOptions: { - type: "image/jpeg", - quality: 0.8, - }, - }, - faceAlignment: { - method: "ArcFace", - }, - blurDetection: { - method: "Laplacian", - threshold: 15, - }, - faceEmbedding: { - method: "MobileFaceNet", - faceSize: 112, - generateTsne: true, - }, - faceClustering: { - method: "Hdbscan", - minClusterSize: 3, - minSamples: 5, - clusterSelectionEpsilon: 0.6, - clusterSelectionMethod: "leaf", - minInputSize: 50, - // maxDistanceInsideCluster: 0.4, - generateDebugInfo: true, - }, - mlVersion: 3, -}; - -export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = { - enabled: false, -}; - -export const MAX_ML_SYNC_ERROR_COUNT = 1; diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx index 0e80d0df9..7d82f7cc3 100644 --- a/web/apps/photos/src/pages/_app.tsx +++ b/web/apps/photos/src/pages/_app.tsx @@ -26,7 +26,6 @@ import EnteSpinner from "@ente/shared/components/EnteSpinner"; import { MessageContainer } from "@ente/shared/components/MessageContainer"; import AppNavbar from "@ente/shared/components/Navbar/app"; import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; -import { Events, eventBus } from "@ente/shared/events"; import { useLocalState } from "@ente/shared/hooks/useLocalState"; import HTTPService from "@ente/shared/network/HTTPService"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; @@ -52,7 +51,12 @@ import "photoswipe/dist/photoswipe.css"; import { createContext, useEffect, useRef, useState } from "react"; import LoadingBar from "react-top-loading-bar"; import DownloadManager from "services/download"; -import exportService, { resumeExportsIfNeeded } from "services/export"; +import { resumeExportsIfNeeded } from "services/export"; +import { photosLogout } from "services/logout"; +import { + getMLSearchConfig, + updateMLSearchConfig, +} from "services/machineLearning/machineLearningService"; import mlWorkManager from "services/machineLearning/mlWorkManager"; import { getFamilyPortalRedirectURL, @@ -64,10 +68,6 @@ import { NotificationAttributes, SetNotificationAttributes, } from "types/Notification"; -import { - getMLSearchConfig, - updateMLSearchConfig, -} from "utils/machineLearning/config"; import { getUpdateAvailableForDownloadMessage, getUpdateReadyToInstallMessage, @@ -100,6 +100,7 @@ type AppContextType = { setDialogBoxAttributesV2: SetDialogBoxAttributesV2; isCFProxyDisabled: boolean; setIsCFProxyDisabled: (disabled: boolean) => void; + logout: () => void; }; export const AppContext = createContext(null); @@ -188,14 +189,6 @@ export default function App({ Component, pageProps }: AppProps) { } }; loadMlSearchState(); - try { - eventBus.on(Events.LOGOUT, () => { - setMlSearchEnabled(false); - mlWorkManager.setMlSearchEnabled(false); - }); - } catch (e) { - log.error("Error while subscribing to logout event", e); - } }, []); useEffect(() => { @@ -213,13 +206,6 @@ export default function App({ Component, pageProps }: AppProps) { await resumeExportsIfNeeded(); }; initExport(); - try { - eventBus.on(Events.LOGOUT, () => { - exportService.disableContinuousExport(); - }); - } catch (e) { - log.error("Error while subscribing to logout event", e); - } }, []); const setUserOnline = () => setOffline(false); @@ -336,6 +322,11 @@ export default function App({ Component, pageProps }: AppProps) { content: t("UNKNOWN_ERROR"), }); + const logout = () => { + setMlSearchEnabled(false); + void photosLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.PHOTOS }) : APP_TITLES.get(APPS.PHOTOS); @@ -394,6 +385,7 @@ export default function App({ Component, pageProps }: AppProps) { updateMapEnabled, isCFProxyDisabled, setIsCFProxyDisabled, + logout, }} > {(loading || !isI18nReady) && ( diff --git a/web/apps/photos/src/pages/gallery/index.tsx b/web/apps/photos/src/pages/gallery/index.tsx index f90d1b837..9ade12fc5 100644 --- a/web/apps/photos/src/pages/gallery/index.tsx +++ b/web/apps/photos/src/pages/gallery/index.tsx @@ -3,6 +3,7 @@ import { APPS } from "@ente/shared/apps/constants"; import { CenteredFlex } from "@ente/shared/components/Container"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; +import { getRecoveryKey } from "@ente/shared/crypto/helpers"; import { CustomError } from "@ente/shared/error"; import { useFileInput } from "@ente/shared/hooks/useFileInput"; import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded"; @@ -84,17 +85,16 @@ import { getSectionSummaries, } from "services/collectionService"; import downloadManager from "services/download"; -import { syncEmbeddings, syncFileEmbeddings } from "services/embeddingService"; +import { + syncCLIPEmbeddings, + syncFaceEmbeddings, +} from "services/embeddingService"; import { syncEntities } from "services/entityService"; import { getLocalFiles, syncFiles } from "services/fileService"; import locationSearchService from "services/locationSearchService"; import { getLocalTrashedFiles, syncTrash } from "services/trashService"; import uploadManager from "services/upload/uploadManager"; -import { - isTokenValid, - syncMapEnabled, - validateKey, -} from "services/userService"; +import { isTokenValid, syncMapEnabled } from "services/userService"; import { Collection, CollectionSummaries } from "types/collection"; import { EnteFile } from "types/file"; import { @@ -130,6 +130,7 @@ import { } from "utils/file"; import { isArchivedFile } from "utils/magicMetadata"; import { getSessionExpiredMessage } from "utils/ui"; +import { isInternalUserForML } from "utils/user"; import { getLocalFamilyData } from "utils/user/family"; export const DeadCenter = styled("div")` @@ -245,8 +246,13 @@ export default function Gallery() { const [tempHiddenFileIds, setTempHiddenFileIds] = useState>( new Set(), ); - const { startLoading, finishLoading, setDialogMessage, ...appContext } = - useContext(AppContext); + const { + startLoading, + finishLoading, + setDialogMessage, + logout, + ...appContext + } = useContext(AppContext); const [collectionSummaries, setCollectionSummaries] = useState(); const [hiddenCollectionSummaries, setHiddenCollectionSummaries] = @@ -315,6 +321,19 @@ export default function Gallery() { const [isClipSearchResult, setIsClipSearchResult] = useState(false); + // Ensure that the keys in local storage are not malformed by verifying that + // the recoveryKey can be decrypted with the masterKey. + // Note: This is not bullet-proof. + const validateKey = async () => { + try { + await getRecoveryKey(); + return true; + } catch (e) { + logout(); + return false; + } + }; + useEffect(() => { appContext.showNavBar(true); const key = getKey(SESSION_KEYS.ENCRYPTION_KEY); @@ -668,7 +687,7 @@ export default function Gallery() { }, [collections, hiddenCollections]); const showSessionExpiredMessage = () => { - setDialogMessage(getSessionExpiredMessage()); + setDialogMessage(getSessionExpiredMessage(logout)); }; const syncWithRemote = async (force = false, silent = false) => { @@ -698,10 +717,10 @@ export default function Gallery() { await syncTrash(collections, setTrashedFiles); await syncEntities(); await syncMapEnabled(); - await syncEmbeddings(); + await syncCLIPEmbeddings(); const electron = globalThis.electron; - if (electron) { - await syncFileEmbeddings(); + if (isInternalUserForML() && electron) { + await syncFaceEmbeddings(); } if (clipService.isPlatformSupported()) { void clipService.scheduleImageEmbeddingExtraction(); diff --git a/web/apps/photos/src/pages/shared-albums/index.tsx b/web/apps/photos/src/pages/shared-albums/index.tsx index ab35b23fa..d26e93ead 100644 --- a/web/apps/photos/src/pages/shared-albums/index.tsx +++ b/web/apps/photos/src/pages/shared-albums/index.tsx @@ -1,5 +1,4 @@ import log from "@/next/log"; -import { logoutUser } from "@ente/accounts/services/user"; import { APPS } from "@ente/shared/apps/constants"; import { CenteredFlex, @@ -185,7 +184,7 @@ export default function PublicCollectionGallery() { nonClosable: true, proceed: { text: t("LOGIN"), - action: logoutUser, + action: () => router.push(PAGES.ROOT), variant: "accent", }, }); diff --git a/web/apps/photos/src/services/clip-service.ts b/web/apps/photos/src/services/clip-service.ts index 703c89cf4..eb5d7ada5 100644 --- a/web/apps/photos/src/services/clip-service.ts +++ b/web/apps/photos/src/services/clip-service.ts @@ -11,7 +11,7 @@ import { Embedding } from "types/embedding"; import { EnteFile } from "types/file"; import { getPersonalFiles } from "utils/file"; import downloadManager from "./download"; -import { getLocalEmbeddings, putEmbedding } from "./embeddingService"; +import { localCLIPEmbeddings, putEmbedding } from "./embeddingService"; import { getAllLocalFiles, getLocalFiles } from "./fileService"; /** Status of CLIP indexing on the images in the user's local library. */ @@ -80,21 +80,20 @@ class CLIPService { this.liveEmbeddingExtractionQueue = new PQueue({ concurrency: 1, }); - eventBus.on(Events.LOGOUT, this.logoutHandler, this); } isPlatformSupported = () => { return isElectron(); }; - private logoutHandler = async () => { + async logout() { if (this.embeddingExtractionInProgress) { this.embeddingExtractionInProgress.abort(); } if (this.onFileUploadedHandler) { await this.removeOnFileUploadListener(); } - }; + } setupOnFileUploadListener = async () => { try { @@ -195,7 +194,7 @@ class CLIPService { return; } const localFiles = getPersonalFiles(await getAllLocalFiles(), user); - const existingEmbeddings = await getLocalEmbeddings(); + const existingEmbeddings = await localCLIPEmbeddings(); const pendingFiles = await getNonClipEmbeddingExtractedFiles( localFiles, existingEmbeddings, @@ -394,7 +393,7 @@ export const computeClipMatchScore = async ( const initialIndexingStatus = async (): Promise => { const user = getData(LS_KEYS.USER); if (!user) throw new Error("Orphan CLIP indexing without a login"); - const allEmbeddings = await getLocalEmbeddings(); + const allEmbeddings = await localCLIPEmbeddings(); const localFiles = getPersonalFiles(await getLocalFiles(), user); const pendingFiles = await getNonClipEmbeddingExtractedFiles( localFiles, diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts index 7b0171da1..0618cd0e6 100644 --- a/web/apps/photos/src/services/download/index.ts +++ b/web/apps/photos/src/services/download/index.ts @@ -6,7 +6,6 @@ import { APPS } from "@ente/shared/apps/constants"; import ComlinkCryptoWorker from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; -import { Events, eventBus } from "@ente/shared/events"; import { isPlaybackPossible } from "@ente/shared/media/video-playback"; import { Remote } from "comlink"; import isElectron from "is-electron"; @@ -31,6 +30,16 @@ export type SourceURLs = { isOriginal: boolean; isRenderable: boolean; type: "normal" | "livePhoto"; + /** + * Best effort attempt at obtaining the MIME type. + * + * Known cases where it is missing: + * + * - Live photos (these have a different code path for obtaining the URL). + * - A video that is passes the isPlayable test in the browser. + * + */ + mimeType?: string; }; export type OnDownloadProgress = (event: { @@ -97,7 +106,6 @@ class DownloadManagerImpl { // } this.cryptoWorker = await ComlinkCryptoWorker.getInstance(); this.ready = true; - eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this); } private ensureInitialized() { @@ -107,21 +115,15 @@ class DownloadManagerImpl { ); } - private async logoutHandler() { - try { - log.info("downloadManger logoutHandler started"); - this.ready = false; - this.cryptoWorker = null; - this.downloadClient = null; - this.fileObjectURLPromises.clear(); - this.fileConversionPromises.clear(); - this.thumbnailObjectURLPromises.clear(); - this.fileDownloadProgress.clear(); - this.progressUpdater = () => {}; - log.info("downloadManager logoutHandler completed"); - } catch (e) { - log.error("downloadManager logoutHandler failed", e); - } + async logout() { + this.ready = false; + this.cryptoWorker = null; + this.downloadClient = null; + this.fileObjectURLPromises.clear(); + this.fileConversionPromises.clear(); + this.thumbnailObjectURLPromises.clear(); + this.fileDownloadProgress.clear(); + this.progressUpdater = () => {}; } updateToken(token: string, passwordToken?: string) { @@ -304,7 +306,12 @@ class DownloadManagerImpl { if (cachedBlob) res = new Response(cachedBlob); else { res = await this.downloadClient.downloadFileStream(file); - this.fileCache?.put(cacheKey, await res.blob()); + // We don't have a files cache currently, so this was already a + // no-op. But even if we had a cache, this seems sus, because + // res.blob() will read the stream and I'd think then trying to do + // the subsequent read of the stream again below won't work. + + // this.fileCache?.put(cacheKey, await res.blob()); } const reader = res.body.getReader(); @@ -323,92 +330,61 @@ class DownloadManagerImpl { decryptionHeader, fileKey, ); - let data = new Uint8Array(); - // The following function handles each data chunk - const push = () => { - // "done" is a Boolean and value a "Uint8Array" - reader.read().then(async ({ done, value }) => { - try { - // Is there more data to read? - if (!done) { - downloadedBytes += value.byteLength; - onDownloadProgress({ - loaded: downloadedBytes, - total: contentLength, - }); - const buffer = new Uint8Array( - data.byteLength + value.byteLength, - ); - buffer.set(new Uint8Array(data), 0); - buffer.set( - new Uint8Array(value), - data.byteLength, - ); - if (buffer.length > decryptionChunkSize) { - const fileData = buffer.slice( - 0, - decryptionChunkSize, - ); - try { - const { decryptedData } = - await this.cryptoWorker.decryptFileChunk( - fileData, - pullState, - ); - controller.enqueue(decryptedData); - data = - buffer.slice( - decryptionChunkSize, - ); - } catch (e) { - if ( - e.message === - CustomError.PROCESSING_FAILED - ) { - log.error( - `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`, - e, - ); - } - throw e; - } - } else { - data = buffer; - } - push(); - } else { - if (data) { - try { - const { decryptedData } = - await this.cryptoWorker.decryptFileChunk( - data, - pullState, - ); - controller.enqueue(decryptedData); - data = null; - } catch (e) { - if ( - e.message === - CustomError.PROCESSING_FAILED - ) { - log.error( - `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`, - e, - ); - } - throw e; - } - } - controller.close(); - } - } catch (e) { - log.error("Failed to process file chunk", e); - controller.error(e); - } - }); - }; - push(); + let data = new Uint8Array(); + let more = true; + while (more) { + more = false; + + // "done" is a Boolean and value a "Uint8Array" + const { done, value } = await reader.read(); + + // Is there more data to read? + if (!done) { + downloadedBytes += value.length; + onDownloadProgress({ + loaded: downloadedBytes, + total: contentLength, + }); + + const buffer = new Uint8Array( + data.length + value.length, + ); + buffer.set(new Uint8Array(data), 0); + buffer.set(new Uint8Array(value), data.length); + + // Note that buffer.length might be a multiple of + // decryptionChunkSize. We let these accumulate, and + // drain it all with a nested while loop when done. + + if (buffer.length > decryptionChunkSize) { + const { decryptedData } = + await this.cryptoWorker.decryptFileChunk( + buffer.slice(0, decryptionChunkSize), + pullState, + ); + controller.enqueue(decryptedData); + data = buffer.slice(decryptionChunkSize); + } else { + data = buffer; + } + more = true; + } else { + while (data && data.length) { + const { decryptedData } = + await this.cryptoWorker.decryptFileChunk( + data.slice(0, decryptionChunkSize), + pullState, + ); + controller.enqueue(decryptedData); + data = + data.length > decryptionChunkSize + ? data.slice(decryptionChunkSize) + : undefined; + } + controller.close(); + } + } } catch (e) { log.error("Failed to process file stream", e); controller.error(e); @@ -475,27 +451,37 @@ async function getRenderableFileURL( originalFileURL: string, forceConvert: boolean, ): Promise { - let srcURLs: SourceURLs["url"]; + const existingOrNewObjectURL = (convertedBlob: Blob) => + convertedBlob + ? convertedBlob === fileBlob + ? originalFileURL + : URL.createObjectURL(convertedBlob) + : undefined; + + let url: SourceURLs["url"]; + let isOriginal: boolean; + let isRenderable: boolean; + let type: SourceURLs["type"] = "normal"; + let mimeType: string | undefined; + switch (file.metadata.fileType) { case FILE_TYPE.IMAGE: { const convertedBlob = await getRenderableImage( file.metadata.title, fileBlob, ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; + const convertedURL = existingOrNewObjectURL(convertedBlob); + url = convertedURL; + isOriginal = convertedURL === originalFileURL; + isRenderable = !!convertedURL; + mimeType = convertedBlob?.type; break; } case FILE_TYPE.LIVE_PHOTO: { - srcURLs = await getRenderableLivePhotoURL( - file, - fileBlob, - forceConvert, - ); + url = await getRenderableLivePhotoURL(file, fileBlob, forceConvert); + isOriginal = false; + isRenderable = false; + type = "livePhoto"; break; } case FILE_TYPE.VIDEO: { @@ -504,52 +490,24 @@ async function getRenderableFileURL( fileBlob, forceConvert, ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; + const convertedURL = existingOrNewObjectURL(convertedBlob); + url = convertedURL; + isOriginal = convertedURL === originalFileURL; + isRenderable = !!convertedURL; + mimeType = convertedBlob?.type; break; } default: { - srcURLs = originalFileURL; + url = originalFileURL; + isOriginal = true; + isRenderable = false; break; } } - let isOriginal: boolean; - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - isOriginal = false; - } else { - isOriginal = (srcURLs as string) === (originalFileURL as string); - } - - return { - url: srcURLs, - isOriginal, - isRenderable: - file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs, - type: - file.metadata.fileType === FILE_TYPE.LIVE_PHOTO - ? "livePhoto" - : "normal", - }; + return { url, isOriginal, isRenderable, type, mimeType }; } -const getFileObjectURL = ( - originalFileURL: string, - originalBlob: Blob, - convertedBlob: Blob, -) => { - const convertedURL = convertedBlob - ? convertedBlob === originalBlob - ? originalFileURL - : URL.createObjectURL(convertedBlob) - : null; - return convertedURL; -}; - async function getRenderableLivePhotoURL( file: EnteFile, fileBlob: Blob, @@ -610,10 +568,9 @@ async function getPlayableVideo( if (!forceConvert && !runOnWeb && !isElectron()) { return null; } - // TODO(MR): This might not work for very large (~ GB) videos. Test. log.info(`Converting video ${videoNameTitle} to mp4`); const convertedVideoData = await ffmpeg.convertToMP4(videoBlob); - return new Blob([convertedVideoData]); + return new Blob([convertedVideoData], { type: "video/mp4" }); } } catch (e) { log.error("Video conversion failed", e); diff --git a/web/apps/photos/src/services/embeddingService.ts b/web/apps/photos/src/services/embeddingService.ts index 36af84842..17ea5a396 100644 --- a/web/apps/photos/src/services/embeddingService.ts +++ b/web/apps/photos/src/services/embeddingService.ts @@ -7,6 +7,7 @@ import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import localForage from "@ente/shared/storage/localForage"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; +import { FileML } from "services/machineLearning/machineLearningService"; import type { Embedding, EmbeddingModel, @@ -15,31 +16,30 @@ import type { PutEmbeddingRequest, } from "types/embedding"; import { EnteFile } from "types/file"; -import { - getLatestVersionEmbeddings, - getLatestVersionFileEmbeddings, -} from "utils/embedding"; -import { FileML } from "utils/machineLearning/mldataMappers"; import { getLocalCollections } from "./collectionService"; import { getAllLocalFiles } from "./fileService"; import { getLocalTrashedFiles } from "./trashService"; -const ENDPOINT = getEndpoint(); - const DIFF_LIMIT = 500; -const EMBEDDINGS_TABLE_V1 = "embeddings"; -const EMBEDDINGS_TABLE = "embeddings_v2"; +/** Local storage key suffix for embedding sync times */ +const embeddingSyncTimeLSKeySuffix = "embedding_sync_time"; +/** Local storage key for CLIP embeddings. */ +const clipEmbeddingsLSKey = "embeddings_v2"; const FILE_EMBEDING_TABLE = "file_embeddings"; -const EMBEDDING_SYNC_TIME_TABLE = "embedding_sync_time"; -export const getAllLocalEmbeddings = async () => { +/** Return all CLIP embeddings that we have available locally. */ +export const localCLIPEmbeddings = async () => + (await storedCLIPEmbeddings()).filter(({ model }) => model === "onnx-clip"); + +const storedCLIPEmbeddings = async () => { const embeddings: Array = - await localForage.getItem(EMBEDDINGS_TABLE); + await localForage.getItem(clipEmbeddingsLSKey); if (!embeddings) { - await localForage.removeItem(EMBEDDINGS_TABLE_V1); - await localForage.removeItem(EMBEDDING_SYNC_TIME_TABLE); - await localForage.setItem(EMBEDDINGS_TABLE, []); + // Migrate + await localForage.removeItem("embeddings"); + await localForage.removeItem("embedding_sync_time"); + await localForage.setItem(clipEmbeddingsLSKey, []); return []; } return embeddings; @@ -54,15 +54,10 @@ export const getFileMLEmbeddings = async (): Promise => { return embeddings; }; -export const getLocalEmbeddings = async () => { - const embeddings = await getAllLocalEmbeddings(); - return embeddings.filter((embedding) => embedding.model === "onnx-clip"); -}; - const getModelEmbeddingSyncTime = async (model: EmbeddingModel) => { return ( (await localForage.getItem( - `${model}-${EMBEDDING_SYNC_TIME_TABLE}`, + `${model}-${embeddingSyncTimeLSKeySuffix}`, )) ?? 0 ); }; @@ -71,13 +66,17 @@ const setModelEmbeddingSyncTime = async ( model: EmbeddingModel, time: number, ) => { - await localForage.setItem(`${model}-${EMBEDDING_SYNC_TIME_TABLE}`, time); + await localForage.setItem(`${model}-${embeddingSyncTimeLSKeySuffix}`, time); }; -export const syncEmbeddings = async () => { - const models: EmbeddingModel[] = ["onnx-clip"]; +/** + * Fetch new CLIP embeddings with the server and save them locally. Also prune + * local embeddings for any files no longer exist locally. + */ +export const syncCLIPEmbeddings = async () => { + const model: EmbeddingModel = "onnx-clip"; try { - let allEmbeddings = await getAllLocalEmbeddings(); + let allEmbeddings = await storedCLIPEmbeddings(); const localFiles = await getAllLocalFiles(); const hiddenAlbums = await getLocalCollections("hidden"); const localTrashFiles = await getLocalTrashedFiles(); @@ -89,79 +88,80 @@ export const syncEmbeddings = async () => { await cleanupDeletedEmbeddings( allLocalFiles, allEmbeddings, - EMBEDDINGS_TABLE, + clipEmbeddingsLSKey, ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); - for (const model of models) { - let modelLastSinceTime = await getModelEmbeddingSyncTime(model); - log.info( - `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, - ); - let response: GetEmbeddingDiffResponse; - do { - response = await getEmbeddingsDiff(modelLastSinceTime, model); - if (!response.diff?.length) { - return; - } - const newEmbeddings = await Promise.all( - response.diff.map(async (embedding) => { - try { - const { - encryptedEmbedding, - decryptionHeader, - ...rest - } = embedding; - const worker = - await ComlinkCryptoWorker.getInstance(); - const fileKey = fileIdToKeyMap.get( - embedding.fileID, - ); - if (!fileKey) { - throw Error(CustomError.FILE_NOT_FOUND); - } - const decryptedData = await worker.decryptEmbedding( - encryptedEmbedding, - decryptionHeader, - fileIdToKeyMap.get(embedding.fileID), - ); - return { - ...rest, - embedding: decryptedData, - } as Embedding; - } catch (e) { - let hasHiddenAlbums = false; - if (e.message === CustomError.FILE_NOT_FOUND) { - hasHiddenAlbums = hiddenAlbums?.length > 0; - } - log.error( - `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, - e, - ); + let modelLastSinceTime = await getModelEmbeddingSyncTime(model); + log.info( + `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, + ); + let response: GetEmbeddingDiffResponse; + do { + response = await getEmbeddingsDiff(modelLastSinceTime, model); + if (!response.diff?.length) { + return; + } + // Note: in rare cases we might get a diff entry for an embedding + // corresponding to a file which has been deleted (but whose + // embedding is enqueued for deletion). Client should expect such a + // scenario (all it has to do is just ignore them). + const newEmbeddings = await Promise.all( + response.diff.map(async (embedding) => { + try { + const { + encryptedEmbedding, + decryptionHeader, + ...rest + } = embedding; + const worker = await ComlinkCryptoWorker.getInstance(); + const fileKey = fileIdToKeyMap.get(embedding.fileID); + if (!fileKey) { + throw Error(CustomError.FILE_NOT_FOUND); } - }), - ); - allEmbeddings = getLatestVersionEmbeddings([ - ...allEmbeddings, - ...newEmbeddings, - ]); - if (response.diff.length) { - modelLastSinceTime = response.diff.slice(-1)[0].updatedAt; - } - await localForage.setItem(EMBEDDINGS_TABLE, allEmbeddings); - await setModelEmbeddingSyncTime(model, modelLastSinceTime); - log.info( - `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, - ); - } while (response.diff.length === DIFF_LIMIT); - } + const decryptedData = await worker.decryptEmbedding( + encryptedEmbedding, + decryptionHeader, + fileIdToKeyMap.get(embedding.fileID), + ); + + return { + ...rest, + embedding: decryptedData, + } as Embedding; + } catch (e) { + let hasHiddenAlbums = false; + if (e.message === CustomError.FILE_NOT_FOUND) { + hasHiddenAlbums = hiddenAlbums?.length > 0; + } + log.error( + `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, + e, + ); + } + }), + ); + allEmbeddings = getLatestVersionEmbeddings([ + ...allEmbeddings, + ...newEmbeddings, + ]); + modelLastSinceTime = response.diff.reduce( + (max, { updatedAt }) => Math.max(max, updatedAt), + modelLastSinceTime, + ); + await localForage.setItem(clipEmbeddingsLSKey, allEmbeddings); + await setModelEmbeddingSyncTime(model, modelLastSinceTime); + log.info( + `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, + ); + } while (response.diff.length > 0); } catch (e) { log.error("Sync embeddings failed", e); } }; -export const syncFileEmbeddings = async () => { - const models: EmbeddingModel[] = ["file-ml-clip-face"]; +export const syncFaceEmbeddings = async () => { + const model: EmbeddingModel = "file-ml-clip-face"; try { let allEmbeddings: FileML[] = await getFileMLEmbeddings(); const localFiles = await getAllLocalFiles(); @@ -178,69 +178,100 @@ export const syncFileEmbeddings = async () => { FILE_EMBEDING_TABLE, ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); - for (const model of models) { - let modelLastSinceTime = await getModelEmbeddingSyncTime(model); - log.info( - `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, - ); - let response: GetEmbeddingDiffResponse; - do { - response = await getEmbeddingsDiff(modelLastSinceTime, model); - if (!response.diff?.length) { - return; - } - const newEmbeddings = await Promise.all( - response.diff.map(async (embedding) => { - try { - const worker = - await ComlinkCryptoWorker.getInstance(); - const fileKey = fileIdToKeyMap.get( - embedding.fileID, - ); - if (!fileKey) { - throw Error(CustomError.FILE_NOT_FOUND); - } - const decryptedData = await worker.decryptMetadata( - embedding.encryptedEmbedding, - embedding.decryptionHeader, - fileIdToKeyMap.get(embedding.fileID), - ); - return { - ...decryptedData, - updatedAt: embedding.updatedAt, - } as unknown as FileML; - } catch (e) { - let hasHiddenAlbums = false; - if (e.message === CustomError.FILE_NOT_FOUND) { - hasHiddenAlbums = hiddenAlbums?.length > 0; - } - log.error( - `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, - e, - ); + let modelLastSinceTime = await getModelEmbeddingSyncTime(model); + log.info( + `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, + ); + let response: GetEmbeddingDiffResponse; + do { + response = await getEmbeddingsDiff(modelLastSinceTime, model); + if (!response.diff?.length) { + return; + } + const newEmbeddings = await Promise.all( + response.diff.map(async (embedding) => { + try { + const worker = await ComlinkCryptoWorker.getInstance(); + const fileKey = fileIdToKeyMap.get(embedding.fileID); + if (!fileKey) { + throw Error(CustomError.FILE_NOT_FOUND); } - }), - ); - allEmbeddings = getLatestVersionFileEmbeddings([ - ...allEmbeddings, - ...newEmbeddings, - ]); - if (response.diff.length) { - modelLastSinceTime = response.diff.slice(-1)[0].updatedAt; - } - await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings); - await setModelEmbeddingSyncTime(model, modelLastSinceTime); - log.info( - `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, - ); - } while (response.diff.length === DIFF_LIMIT); - } + const decryptedData = await worker.decryptMetadata( + embedding.encryptedEmbedding, + embedding.decryptionHeader, + fileIdToKeyMap.get(embedding.fileID), + ); + + return { + ...decryptedData, + updatedAt: embedding.updatedAt, + } as unknown as FileML; + } catch (e) { + let hasHiddenAlbums = false; + if (e.message === CustomError.FILE_NOT_FOUND) { + hasHiddenAlbums = hiddenAlbums?.length > 0; + } + log.error( + `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, + e, + ); + } + }), + ); + allEmbeddings = getLatestVersionFileEmbeddings([ + ...allEmbeddings, + ...newEmbeddings, + ]); + modelLastSinceTime = response.diff.reduce( + (max, { updatedAt }) => Math.max(max, updatedAt), + modelLastSinceTime, + ); + await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings); + await setModelEmbeddingSyncTime(model, modelLastSinceTime); + log.info( + `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, + ); + } while (response.diff.length > 0); } catch (e) { log.error("Sync embeddings failed", e); } }; +const getLatestVersionEmbeddings = (embeddings: Embedding[]) => { + const latestVersionEntities = new Map(); + embeddings.forEach((embedding) => { + if (!embedding?.fileID) { + return; + } + const existingEmbeddings = latestVersionEntities.get(embedding.fileID); + if ( + !existingEmbeddings || + existingEmbeddings.updatedAt < embedding.updatedAt + ) { + latestVersionEntities.set(embedding.fileID, embedding); + } + }); + return Array.from(latestVersionEntities.values()); +}; + +const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => { + const latestVersionEntities = new Map(); + embeddings.forEach((embedding) => { + if (!embedding?.fileID) { + return; + } + const existingEmbeddings = latestVersionEntities.get(embedding.fileID); + if ( + !existingEmbeddings || + existingEmbeddings.updatedAt < embedding.updatedAt + ) { + latestVersionEntities.set(embedding.fileID, embedding); + } + }); + return Array.from(latestVersionEntities.values()); +}; + export const getEmbeddingsDiff = async ( sinceTime: number, model: EmbeddingModel, @@ -251,7 +282,7 @@ export const getEmbeddingsDiff = async ( return; } const response = await HTTPService.get( - `${ENDPOINT}/embeddings/diff`, + `${getEndpoint()}/embeddings/diff`, { sinceTime, limit: DIFF_LIMIT, @@ -280,7 +311,7 @@ export const putEmbedding = async ( throw Error(CustomError.TOKEN_MISSING); } const resp = await HTTPService.put( - `${ENDPOINT}/embeddings`, + `${getEndpoint()}/embeddings`, putEmbeddingReq, null, { diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts index 786932ff8..3a68837e7 100644 --- a/web/apps/photos/src/services/export/index.ts +++ b/web/apps/photos/src/services/export/index.ts @@ -3,12 +3,12 @@ import { decodeLivePhoto } from "@/media/live-photo"; import type { Metadata } from "@/media/types/file"; import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; import { formatDateTimeShort } from "@ente/shared/time/format"; import { User } from "@ente/shared/user/types"; -import { wait } from "@ente/shared/utils"; import QueueProcessor, { CancellationStatus, RequestCanceller, diff --git a/web/apps/photos/src/services/export/migration.ts b/web/apps/photos/src/services/export/migration.ts index 9404ddde5..0c8de03e6 100644 --- a/web/apps/photos/src/services/export/migration.ts +++ b/web/apps/photos/src/services/export/migration.ts @@ -3,9 +3,9 @@ import { decodeLivePhoto } from "@/media/live-photo"; import { ensureElectron } from "@/next/electron"; import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { wait } from "@ente/shared/utils"; import { getLocalCollections } from "services/collectionService"; import downloadManager from "services/download"; import { getAllLocalFiles } from "services/fileService"; diff --git a/web/apps/photos/src/utils/machineLearning/faceAlign.ts b/web/apps/photos/src/services/face/align.ts similarity index 84% rename from web/apps/photos/src/utils/machineLearning/faceAlign.ts rename to web/apps/photos/src/services/face/align.ts index beb98cea9..7a3bf7a04 100644 --- a/web/apps/photos/src/utils/machineLearning/faceAlign.ts +++ b/web/apps/photos/src/services/face/align.ts @@ -1,7 +1,7 @@ import { Matrix } from "ml-matrix"; +import { Point } from "services/face/geom"; +import { FaceAlignment, FaceDetection } from "services/face/types"; import { getSimilarityTransformation } from "similarity-transformation"; -import { FaceAlignment, FaceDetection } from "types/machineLearning"; -import { Point } from "../../../thirdparty/face-api/classes"; const ARCFACE_LANDMARKS = [ [38.2946, 51.6963], @@ -20,9 +20,12 @@ const ARC_FACE_5_LANDMARKS = [ [70.7299, 92.2041], ] as Array<[number, number]>; -export function getArcfaceAlignment( - faceDetection: FaceDetection, -): FaceAlignment { +/** + * Compute and return an {@link FaceAlignment} for the given face detection. + * + * @param faceDetection A geometry indicating a face detected in an image. + */ +export const faceAlignment = (faceDetection: FaceDetection): FaceAlignment => { const landmarkCount = faceDetection.landmarks.length; return getFaceAlignmentUsingSimilarityTransform( faceDetection, @@ -31,12 +34,11 @@ export function getArcfaceAlignment( ARCFACE_LANDMARKS_FACE_SIZE, ), ); -} +}; function getFaceAlignmentUsingSimilarityTransform( faceDetection: FaceDetection, alignedLandmarks: Array<[number, number]>, - // alignmentMethod: Versioned ): FaceAlignment { const landmarksMat = new Matrix( faceDetection.landmarks @@ -67,7 +69,6 @@ function getFaceAlignmentUsingSimilarityTransform( simTransform.rotation.get(0, 1), simTransform.rotation.get(0, 0), ); - // log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size }); return { affineMatrix, diff --git a/web/apps/photos/src/services/face/blur.ts b/web/apps/photos/src/services/face/blur.ts new file mode 100644 index 000000000..c79081297 --- /dev/null +++ b/web/apps/photos/src/services/face/blur.ts @@ -0,0 +1,187 @@ +import { Face } from "services/face/types"; +import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image"; +import { mobileFaceNetFaceSize } from "./embed"; + +/** + * Laplacian blur detection. + */ +export const detectBlur = ( + alignedFaces: Float32Array, + faces: Face[], +): number[] => { + const numFaces = Math.round( + alignedFaces.length / + (mobileFaceNetFaceSize * mobileFaceNetFaceSize * 3), + ); + const blurValues: number[] = []; + for (let i = 0; i < numFaces; i++) { + const face = faces[i]; + const direction = faceDirection(face); + const faceImage = createGrayscaleIntMatrixFromNormalized2List( + alignedFaces, + i, + ); + const laplacian = applyLaplacian(faceImage, direction); + blurValues.push(matrixVariance(laplacian)); + } + return blurValues; +}; + +type FaceDirection = "left" | "right" | "straight"; + +const faceDirection = (face: Face): FaceDirection => { + const landmarks = face.detection.landmarks; + const leftEye = landmarks[0]; + const rightEye = landmarks[1]; + const nose = landmarks[2]; + const leftMouth = landmarks[3]; + const rightMouth = landmarks[4]; + + const eyeDistanceX = Math.abs(rightEye.x - leftEye.x); + const eyeDistanceY = Math.abs(rightEye.y - leftEye.y); + const mouthDistanceY = Math.abs(rightMouth.y - leftMouth.y); + + const faceIsUpright = + Math.max(leftEye.y, rightEye.y) + 0.5 * eyeDistanceY < nose.y && + nose.y + 0.5 * mouthDistanceY < Math.min(leftMouth.y, rightMouth.y); + + const noseStickingOutLeft = + nose.x < Math.min(leftEye.x, rightEye.x) && + nose.x < Math.min(leftMouth.x, rightMouth.x); + + const noseStickingOutRight = + nose.x > Math.max(leftEye.x, rightEye.x) && + nose.x > Math.max(leftMouth.x, rightMouth.x); + + const noseCloseToLeftEye = + Math.abs(nose.x - leftEye.x) < 0.2 * eyeDistanceX; + const noseCloseToRightEye = + Math.abs(nose.x - rightEye.x) < 0.2 * eyeDistanceX; + + if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { + return "left"; + } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { + return "right"; + } + + return "straight"; +}; + +/** + * Return a new image by applying a Laplacian blur kernel to each pixel. + */ +const applyLaplacian = ( + image: number[][], + direction: FaceDirection, +): number[][] => { + const paddedImage: number[][] = padImage(image, direction); + const numRows = paddedImage.length - 2; + const numCols = paddedImage[0].length - 2; + + // Create an output image initialized to 0. + const outputImage: number[][] = Array.from({ length: numRows }, () => + new Array(numCols).fill(0), + ); + + // Define the Laplacian kernel. + const kernel: number[][] = [ + [0, 1, 0], + [1, -4, 1], + [0, 1, 0], + ]; + + // Apply the kernel to each pixel + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < numCols; j++) { + let sum = 0; + for (let ki = 0; ki < 3; ki++) { + for (let kj = 0; kj < 3; kj++) { + sum += paddedImage[i + ki][j + kj] * kernel[ki][kj]; + } + } + // Adjust the output value if necessary (e.g., clipping). + outputImage[i][j] = sum; + } + } + + return outputImage; +}; + +const padImage = (image: number[][], direction: FaceDirection): number[][] => { + const removeSideColumns = 56; /* must be even */ + + const numRows = image.length; + const numCols = image[0].length; + const paddedNumCols = numCols + 2 - removeSideColumns; + const paddedNumRows = numRows + 2; + + // Create a new matrix with extra padding. + const paddedImage: number[][] = Array.from({ length: paddedNumRows }, () => + new Array(paddedNumCols).fill(0), + ); + + if (direction === "straight") { + // Copy original image into the center of the padded image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = + image[i][j + Math.round(removeSideColumns / 2)]; + } + } + } else if (direction === "left") { + // If the face is facing left, we only take the right side of the face image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; + } + } + } else if (direction === "right") { + // If the face is facing right, we only take the left side of the face image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j]; + } + } + } + + // Reflect padding + // Top and bottom rows + for (let j = 1; j <= paddedNumCols - 2; j++) { + paddedImage[0][j] = paddedImage[2][j]; // Top row + paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row + } + // Left and right columns + for (let i = 0; i < numRows + 2; i++) { + paddedImage[i][0] = paddedImage[i][2]; // Left column + paddedImage[i][paddedNumCols - 1] = paddedImage[i][paddedNumCols - 3]; // Right column + } + + return paddedImage; +}; + +const matrixVariance = (matrix: number[][]): number => { + const numRows = matrix.length; + const numCols = matrix[0].length; + const totalElements = numRows * numCols; + + // Calculate the mean. + let mean: number = 0; + matrix.forEach((row) => { + row.forEach((value) => { + mean += value; + }); + }); + mean /= totalElements; + + // Calculate the variance. + let variance: number = 0; + matrix.forEach((row) => { + row.forEach((value) => { + const diff: number = value - mean; + variance += diff * diff; + }); + }); + variance /= totalElements; + + return variance; +}; diff --git a/web/apps/photos/src/services/face/cluster.ts b/web/apps/photos/src/services/face/cluster.ts new file mode 100644 index 000000000..9ddf156cc --- /dev/null +++ b/web/apps/photos/src/services/face/cluster.ts @@ -0,0 +1,34 @@ +import { Hdbscan, type DebugInfo } from "hdbscan"; +import { type Cluster } from "services/face/types"; + +export interface ClusterFacesResult { + clusters: Array; + noise: Cluster; + debugInfo?: DebugInfo; +} + +/** + * Cluster the given {@link faceEmbeddings}. + * + * @param faceEmbeddings An array of embeddings produced by our face indexing + * pipeline. Each embedding is for a face detected in an image (a single image + * may have multiple faces detected within it). + */ +export const clusterFaces = async ( + faceEmbeddings: Array>, +): Promise => { + const hdbscan = new Hdbscan({ + input: faceEmbeddings, + minClusterSize: 3, + minSamples: 5, + clusterSelectionEpsilon: 0.6, + clusterSelectionMethod: "leaf", + debug: true, + }); + + return { + clusters: hdbscan.getClusters(), + noise: hdbscan.getNoise(), + debugInfo: hdbscan.getDebugInfo(), + }; +}; diff --git a/web/apps/photos/src/utils/machineLearning/faceCrop.ts b/web/apps/photos/src/services/face/crop.ts similarity index 54% rename from web/apps/photos/src/utils/machineLearning/faceCrop.ts rename to web/apps/photos/src/services/face/crop.ts index d437a942d..acd49228e 100644 --- a/web/apps/photos/src/utils/machineLearning/faceCrop.ts +++ b/web/apps/photos/src/services/face/crop.ts @@ -1,28 +1,32 @@ -import { FaceAlignment, FaceCrop, FaceCropConfig } from "types/machineLearning"; +import { Box, enlargeBox } from "services/face/geom"; +import { FaceCrop, FaceDetection } from "services/face/types"; import { cropWithRotation } from "utils/image"; -import { enlargeBox } from "."; -import { Box } from "../../../thirdparty/face-api/classes"; +import { faceAlignment } from "./align"; -export function getFaceCrop( +export const getFaceCrop = ( imageBitmap: ImageBitmap, - alignment: FaceAlignment, - config: FaceCropConfig, -): FaceCrop { + faceDetection: FaceDetection, +): FaceCrop => { + const alignment = faceAlignment(faceDetection); + + const padding = 0.25; + const maxSize = 256; + const alignmentBox = new Box({ x: alignment.center.x - alignment.size / 2, y: alignment.center.y - alignment.size / 2, width: alignment.size, height: alignment.size, }).round(); - const scaleForPadding = 1 + config.padding * 2; + const scaleForPadding = 1 + padding * 2; const paddedBox = enlargeBox(alignmentBox, scaleForPadding).round(); const faceImageBitmap = cropWithRotation(imageBitmap, paddedBox, 0, { - width: config.maxSize, - height: config.maxSize, + width: maxSize, + height: maxSize, }); return { image: faceImageBitmap, imageBox: paddedBox, }; -} +}; diff --git a/web/apps/photos/src/utils/storage/mlIDbStorage.ts b/web/apps/photos/src/services/face/db.ts similarity index 95% rename from web/apps/photos/src/utils/storage/mlIDbStorage.ts rename to web/apps/photos/src/services/face/db.ts index 766c3ac9a..399bfff1a 100644 --- a/web/apps/photos/src/utils/storage/mlIDbStorage.ts +++ b/web/apps/photos/src/services/face/db.ts @@ -1,11 +1,5 @@ import { haveWindow } from "@/next/env"; import log from "@/next/log"; -import { - DEFAULT_ML_SEARCH_CONFIG, - DEFAULT_ML_SYNC_CONFIG, - DEFAULT_ML_SYNC_JOB_CONFIG, - MAX_ML_SYNC_ERROR_COUNT, -} from "constants/mlConfig"; import { DBSchema, IDBPDatabase, @@ -15,13 +9,22 @@ import { openDB, } from "idb"; import isElectron from "is-electron"; -import { Face, MLLibraryData, MlFileData, Person } from "types/machineLearning"; -import { IndexStatus } from "types/machineLearning/ui"; +import { Face, MLLibraryData, MlFileData, Person } from "services/face/types"; +import { + DEFAULT_ML_SEARCH_CONFIG, + MAX_ML_SYNC_ERROR_COUNT, +} from "services/machineLearning/machineLearningService"; + +export interface IndexStatus { + outOfSyncFilesExists: boolean; + nSyncedFiles: number; + nTotalFiles: number; + localFilesSynced: boolean; + peopleIndexSynced: boolean; +} interface Config {} -export const ML_SYNC_JOB_CONFIG_NAME = "ml-sync-job"; -export const ML_SYNC_CONFIG_NAME = "ml-sync"; export const ML_SEARCH_CONFIG_NAME = "ml-search"; const MLDATA_DB_NAME = "mldata"; @@ -129,15 +132,18 @@ class MLIDbStorage { // TODO: update configs if version is updated in defaults db.createObjectStore("configs"); + /* await tx .objectStore("configs") .add( DEFAULT_ML_SYNC_JOB_CONFIG, - ML_SYNC_JOB_CONFIG_NAME, + "ml-sync-job", ); + await tx .objectStore("configs") .add(DEFAULT_ML_SYNC_CONFIG, ML_SYNC_CONFIG_NAME); + */ } if (oldVersion < 3) { await tx @@ -156,6 +162,14 @@ class MLIDbStorage { .objectStore("configs") .delete(ML_SEARCH_CONFIG_NAME); + await tx + .objectStore("configs") + .delete(""ml-sync""); + + await tx + .objectStore("configs") + .delete("ml-sync-job"); + await tx .objectStore("configs") .add( diff --git a/web/apps/photos/src/services/face/detect.ts b/web/apps/photos/src/services/face/detect.ts new file mode 100644 index 000000000..39b843062 --- /dev/null +++ b/web/apps/photos/src/services/face/detect.ts @@ -0,0 +1,316 @@ +import { workerBridge } from "@/next/worker/worker-bridge"; +import { euclidean } from "hdbscan"; +import { + Box, + Dimensions, + Point, + boxFromBoundingBox, + newBox, +} from "services/face/geom"; +import { FaceDetection } from "services/face/types"; +import { + Matrix, + applyToPoint, + compose, + scale, + translate, +} from "transformation-matrix"; +import { + clamp, + getPixelBilinear, + normalizePixelBetween0And1, +} from "utils/image"; + +/** + * Detect faces in the given {@link imageBitmap}. + * + * The model used is YOLO, running in an ONNX runtime. + */ +export const detectFaces = async ( + imageBitmap: ImageBitmap, +): Promise> => { + const maxFaceDistancePercent = Math.sqrt(2) / 100; + const maxFaceDistance = imageBitmap.width * maxFaceDistancePercent; + const preprocessResult = preprocessImageBitmapToFloat32ChannelsFirst( + imageBitmap, + 640, + 640, + ); + const data = preprocessResult.data; + const resized = preprocessResult.newSize; + const outputData = await workerBridge.detectFaces(data); + const faces = getFacesFromYOLOOutput(outputData as Float32Array, 0.7); + const inBox = newBox(0, 0, resized.width, resized.height); + const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height); + const transform = computeTransformToBox(inBox, toBox); + const faceDetections: Array = faces?.map((f) => { + const box = transformBox(f.box, transform); + const normLandmarks = f.landmarks; + const landmarks = transformPoints(normLandmarks, transform); + return { + box, + landmarks, + probability: f.probability as number, + } as FaceDetection; + }); + return removeDuplicateDetections(faceDetections, maxFaceDistance); +}; + +const preprocessImageBitmapToFloat32ChannelsFirst = ( + imageBitmap: ImageBitmap, + requiredWidth: number, + requiredHeight: number, + maintainAspectRatio: boolean = true, + normFunction: (pixelValue: number) => number = normalizePixelBetween0And1, +) => { + // Create an OffscreenCanvas and set its size. + const offscreenCanvas = new OffscreenCanvas( + imageBitmap.width, + imageBitmap.height, + ); + const ctx = offscreenCanvas.getContext("2d"); + ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height); + const imageData = ctx.getImageData( + 0, + 0, + imageBitmap.width, + imageBitmap.height, + ); + const pixelData = imageData.data; + + let scaleW = requiredWidth / imageBitmap.width; + let scaleH = requiredHeight / imageBitmap.height; + if (maintainAspectRatio) { + const scale = Math.min( + requiredWidth / imageBitmap.width, + requiredHeight / imageBitmap.height, + ); + scaleW = scale; + scaleH = scale; + } + const scaledWidth = clamp( + Math.round(imageBitmap.width * scaleW), + 0, + requiredWidth, + ); + const scaledHeight = clamp( + Math.round(imageBitmap.height * scaleH), + 0, + requiredHeight, + ); + + const processedImage = new Float32Array( + 1 * 3 * requiredWidth * requiredHeight, + ); + + // Populate the Float32Array with normalized pixel values + let pixelIndex = 0; + const channelOffsetGreen = requiredHeight * requiredWidth; + const channelOffsetBlue = 2 * requiredHeight * requiredWidth; + for (let h = 0; h < requiredHeight; h++) { + for (let w = 0; w < requiredWidth; w++) { + let pixel: { + r: number; + g: number; + b: number; + }; + if (w >= scaledWidth || h >= scaledHeight) { + pixel = { r: 114, g: 114, b: 114 }; + } else { + pixel = getPixelBilinear( + w / scaleW, + h / scaleH, + pixelData, + imageBitmap.width, + imageBitmap.height, + ); + } + processedImage[pixelIndex] = normFunction(pixel.r); + processedImage[pixelIndex + channelOffsetGreen] = normFunction( + pixel.g, + ); + processedImage[pixelIndex + channelOffsetBlue] = normFunction( + pixel.b, + ); + pixelIndex++; + } + } + + return { + data: processedImage, + originalSize: { + width: imageBitmap.width, + height: imageBitmap.height, + }, + newSize: { width: scaledWidth, height: scaledHeight }, + }; +}; + +/** + * @param rowOutput A Float32Array of shape [25200, 16], where each row + * represents a bounding box. + */ +const getFacesFromYOLOOutput = ( + rowOutput: Float32Array, + minScore: number, +): Array => { + const faces: Array = []; + // Iterate over each row. + for (let i = 0; i < rowOutput.length; i += 16) { + const score = rowOutput[i + 4]; + if (score < minScore) { + continue; + } + // The first 4 values represent the bounding box's coordinates: + // + // (x1, y1, x2, y2) + // + const xCenter = rowOutput[i]; + const yCenter = rowOutput[i + 1]; + const width = rowOutput[i + 2]; + const height = rowOutput[i + 3]; + const xMin = xCenter - width / 2.0; // topLeft + const yMin = yCenter - height / 2.0; // topLeft + + const leftEyeX = rowOutput[i + 5]; + const leftEyeY = rowOutput[i + 6]; + const rightEyeX = rowOutput[i + 7]; + const rightEyeY = rowOutput[i + 8]; + const noseX = rowOutput[i + 9]; + const noseY = rowOutput[i + 10]; + const leftMouthX = rowOutput[i + 11]; + const leftMouthY = rowOutput[i + 12]; + const rightMouthX = rowOutput[i + 13]; + const rightMouthY = rowOutput[i + 14]; + + const box = new Box({ + x: xMin, + y: yMin, + width: width, + height: height, + }); + const probability = score as number; + const landmarks = [ + new Point(leftEyeX, leftEyeY), + new Point(rightEyeX, rightEyeY), + new Point(noseX, noseY), + new Point(leftMouthX, leftMouthY), + new Point(rightMouthX, rightMouthY), + ]; + faces.push({ box, landmarks, probability }); + } + return faces; +}; + +export const getRelativeDetection = ( + faceDetection: FaceDetection, + dimensions: Dimensions, +): FaceDetection => { + const oldBox: Box = faceDetection.box; + const box = new Box({ + x: oldBox.x / dimensions.width, + y: oldBox.y / dimensions.height, + width: oldBox.width / dimensions.width, + height: oldBox.height / dimensions.height, + }); + const oldLandmarks: Point[] = faceDetection.landmarks; + const landmarks = oldLandmarks.map((l) => { + return new Point(l.x / dimensions.width, l.y / dimensions.height); + }); + const probability = faceDetection.probability; + return { box, landmarks, probability }; +}; + +/** + * Removes duplicate face detections from an array of detections. + * + * This function sorts the detections by their probability in descending order, + * then iterates over them. + * + * For each detection, it calculates the Euclidean distance to all other + * detections. + * + * If the distance is less than or equal to the specified threshold + * (`withinDistance`), the other detection is considered a duplicate and is + * removed. + * + * @param detections - An array of face detections to remove duplicates from. + * + * @param withinDistance - The maximum Euclidean distance between two detections + * for them to be considered duplicates. + * + * @returns An array of face detections with duplicates removed. + */ +const removeDuplicateDetections = ( + detections: Array, + withinDistance: number, +) => { + detections.sort((a, b) => b.probability - a.probability); + const isSelected = new Map(); + for (let i = 0; i < detections.length; i++) { + if (isSelected.get(i) === false) { + continue; + } + isSelected.set(i, true); + for (let j = i + 1; j < detections.length; j++) { + if (isSelected.get(j) === false) { + continue; + } + const centeri = getDetectionCenter(detections[i]); + const centerj = getDetectionCenter(detections[j]); + const dist = euclidean( + [centeri.x, centeri.y], + [centerj.x, centerj.y], + ); + if (dist <= withinDistance) { + isSelected.set(j, false); + } + } + } + + const uniques: Array = []; + for (let i = 0; i < detections.length; i++) { + isSelected.get(i) && uniques.push(detections[i]); + } + return uniques; +}; + +function getDetectionCenter(detection: FaceDetection) { + const center = new Point(0, 0); + // TODO: first 4 landmarks is applicable to blazeface only + // this needs to consider eyes, nose and mouth landmarks to take center + detection.landmarks?.slice(0, 4).forEach((p) => { + center.x += p.x; + center.y += p.y; + }); + + return new Point(center.x / 4, center.y / 4); +} + +function computeTransformToBox(inBox: Box, toBox: Box): Matrix { + return compose( + translate(toBox.x, toBox.y), + scale(toBox.width / inBox.width, toBox.height / inBox.height), + ); +} + +function transformPoint(point: Point, transform: Matrix) { + const txdPoint = applyToPoint(transform, point); + return new Point(txdPoint.x, txdPoint.y); +} + +function transformPoints(points: Point[], transform: Matrix) { + return points?.map((p) => transformPoint(p, transform)); +} + +function transformBox(box: Box, transform: Matrix) { + const topLeft = transformPoint(box.topLeft, transform); + const bottomRight = transformPoint(box.bottomRight, transform); + + return boxFromBoundingBox({ + left: topLeft.x, + top: topLeft.y, + right: bottomRight.x, + bottom: bottomRight.y, + }); +} diff --git a/web/apps/photos/src/services/face/embed.ts b/web/apps/photos/src/services/face/embed.ts new file mode 100644 index 000000000..2e0977ea1 --- /dev/null +++ b/web/apps/photos/src/services/face/embed.ts @@ -0,0 +1,26 @@ +import { workerBridge } from "@/next/worker/worker-bridge"; +import { FaceEmbedding } from "services/face/types"; + +export const mobileFaceNetFaceSize = 112; + +/** + * Compute embeddings for the given {@link faceData}. + * + * The model used is MobileFaceNet, running in an ONNX runtime. + */ +export const faceEmbeddings = async ( + faceData: Float32Array, +): Promise> => { + const outputData = await workerBridge.faceEmbeddings(faceData); + + const embeddingSize = 192; + const embeddings = new Array( + outputData.length / embeddingSize, + ); + for (let i = 0; i < embeddings.length; i++) { + embeddings[i] = new Float32Array( + outputData.slice(i * embeddingSize, (i + 1) * embeddingSize), + ); + } + return embeddings; +}; diff --git a/web/apps/photos/src/services/face/f-index.ts b/web/apps/photos/src/services/face/f-index.ts new file mode 100644 index 000000000..db054ac29 --- /dev/null +++ b/web/apps/photos/src/services/face/f-index.ts @@ -0,0 +1,194 @@ +import { openCache } from "@/next/blob-cache"; +import log from "@/next/log"; +import { faceAlignment } from "services/face/align"; +import mlIDbStorage from "services/face/db"; +import { detectFaces, getRelativeDetection } from "services/face/detect"; +import { faceEmbeddings, mobileFaceNetFaceSize } from "services/face/embed"; +import { + DetectedFace, + Face, + MLSyncFileContext, + type FaceAlignment, +} from "services/face/types"; +import { imageBitmapToBlob, warpAffineFloat32List } from "utils/image"; +import { detectBlur } from "./blur"; +import { getFaceCrop } from "./crop"; +import { + fetchImageBitmap, + fetchImageBitmapForContext, + getFaceId, + getLocalFile, +} from "./image"; + +export const syncFileAnalyzeFaces = async (fileContext: MLSyncFileContext) => { + const { newMlFile } = fileContext; + const startTime = Date.now(); + + await syncFileFaceDetections(fileContext); + + if (newMlFile.faces && newMlFile.faces.length > 0) { + await syncFileFaceCrops(fileContext); + + const alignedFacesData = await syncFileFaceAlignments(fileContext); + + await syncFileFaceEmbeddings(fileContext, alignedFacesData); + + await syncFileFaceMakeRelativeDetections(fileContext); + } + log.debug( + () => + `Face detection for file ${fileContext.enteFile.id} took ${Math.round(Date.now() - startTime)} ms`, + ); +}; + +const syncFileFaceDetections = async (fileContext: MLSyncFileContext) => { + const { newMlFile } = fileContext; + newMlFile.faceDetectionMethod = { + value: "YoloFace", + version: 1, + }; + fileContext.newDetection = true; + const imageBitmap = await fetchImageBitmapForContext(fileContext); + const faceDetections = await detectFaces(imageBitmap); + // TODO: reenable faces filtering based on width + const detectedFaces = faceDetections?.map((detection) => { + return { + fileId: fileContext.enteFile.id, + detection, + } as DetectedFace; + }); + newMlFile.faces = detectedFaces?.map((detectedFace) => ({ + ...detectedFace, + id: getFaceId(detectedFace, newMlFile.imageDimensions), + })); + // ?.filter((f) => + // f.box.width > syncContext.config.faceDetection.minFaceSize + // ); + log.info("[MLService] Detected Faces: ", newMlFile.faces?.length); +}; + +const syncFileFaceCrops = async (fileContext: MLSyncFileContext) => { + const { newMlFile } = fileContext; + const imageBitmap = await fetchImageBitmapForContext(fileContext); + newMlFile.faceCropMethod = { + value: "ArcFace", + version: 1, + }; + + for (const face of newMlFile.faces) { + await saveFaceCrop(imageBitmap, face); + } +}; + +const syncFileFaceAlignments = async ( + fileContext: MLSyncFileContext, +): Promise => { + const { newMlFile } = fileContext; + newMlFile.faceAlignmentMethod = { + value: "ArcFace", + version: 1, + }; + fileContext.newAlignment = true; + const imageBitmap = + fileContext.imageBitmap || + (await fetchImageBitmapForContext(fileContext)); + + // Execute the face alignment calculations + for (const face of newMlFile.faces) { + face.alignment = faceAlignment(face.detection); + } + // Extract face images and convert to Float32Array + const faceAlignments = newMlFile.faces.map((f) => f.alignment); + const faceImages = await extractFaceImagesToFloat32( + faceAlignments, + mobileFaceNetFaceSize, + imageBitmap, + ); + const blurValues = detectBlur(faceImages, newMlFile.faces); + newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i])); + + imageBitmap.close(); + log.info("[MLService] alignedFaces: ", newMlFile.faces?.length); + + return faceImages; +}; + +const syncFileFaceEmbeddings = async ( + fileContext: MLSyncFileContext, + alignedFacesInput: Float32Array, +) => { + const { newMlFile } = fileContext; + newMlFile.faceEmbeddingMethod = { + value: "MobileFaceNet", + version: 2, + }; + // TODO: when not storing face crops, image will be needed to extract faces + // fileContext.imageBitmap || + // (await this.getImageBitmap(fileContext)); + + const embeddings = await faceEmbeddings(alignedFacesInput); + newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i])); + + log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length); +}; + +const syncFileFaceMakeRelativeDetections = async ( + fileContext: MLSyncFileContext, +) => { + const { newMlFile } = fileContext; + for (let i = 0; i < newMlFile.faces.length; i++) { + const face = newMlFile.faces[i]; + if (face.detection.box.x + face.detection.box.width < 2) continue; // Skip if somehow already relative + face.detection = getRelativeDetection( + face.detection, + newMlFile.imageDimensions, + ); + } +}; + +export const saveFaceCrop = async (imageBitmap: ImageBitmap, face: Face) => { + const faceCrop = getFaceCrop(imageBitmap, face.detection); + + const blob = await imageBitmapToBlob(faceCrop.image); + + const cache = await openCache("face-crops"); + await cache.put(face.id, blob); + + faceCrop.image.close(); + + return blob; +}; + +export const regenerateFaceCrop = async (faceID: string) => { + const fileID = Number(faceID.split("-")[0]); + const personFace = await mlIDbStorage.getFace(fileID, faceID); + if (!personFace) { + throw Error("Face not found"); + } + + const file = await getLocalFile(personFace.fileId); + const imageBitmap = await fetchImageBitmap(file); + return await saveFaceCrop(imageBitmap, personFace); +}; + +async function extractFaceImagesToFloat32( + faceAlignments: Array, + faceSize: number, + image: ImageBitmap, +): Promise { + const faceData = new Float32Array( + faceAlignments.length * faceSize * faceSize * 3, + ); + for (let i = 0; i < faceAlignments.length; i++) { + const alignedFace = faceAlignments[i]; + const faceDataOffset = i * faceSize * faceSize * 3; + warpAffineFloat32List( + image, + alignedFace, + faceSize, + faceData, + faceDataOffset, + ); + } + return faceData; +} diff --git a/web/apps/photos/src/worker/ml.worker.ts b/web/apps/photos/src/services/face/face.worker.ts similarity index 50% rename from web/apps/photos/src/worker/ml.worker.ts rename to web/apps/photos/src/services/face/face.worker.ts index ed46b7bd4..8083406bf 100644 --- a/web/apps/photos/src/worker/ml.worker.ts +++ b/web/apps/photos/src/services/face/face.worker.ts @@ -1,14 +1,10 @@ -import log from "@/next/log"; +import { APPS } from "@ente/shared/apps/constants"; import { expose } from "comlink"; +import downloadManager from "services/download"; import mlService from "services/machineLearning/machineLearningService"; import { EnteFile } from "types/file"; -import { MachineLearningWorker } from "types/machineLearning"; - -export class DedicatedMLWorker implements MachineLearningWorker { - constructor() { - log.info("DedicatedMLWorker constructor called"); - } +export class DedicatedMLWorker { public async closeLocalSyncContext() { return mlService.closeLocalSyncContext(); } @@ -19,23 +15,17 @@ export class DedicatedMLWorker implements MachineLearningWorker { enteFile: EnteFile, localFile: globalThis.File, ) { - return mlService.syncLocalFile(token, userID, enteFile, localFile); + mlService.syncLocalFile(token, userID, enteFile, localFile); } public async sync(token: string, userID: number) { + await downloadManager.init(APPS.PHOTOS, { token }); return mlService.sync(token, userID); } - public async regenerateFaceCrop( - token: string, - userID: number, - faceID: string, - ) { - return mlService.regenerateFaceCrop(token, userID, faceID); - } - - public close() { - self.close(); + public async regenerateFaceCrop(token: string, faceID: string) { + await downloadManager.init(APPS.PHOTOS, { token }); + return mlService.regenerateFaceCrop(faceID); } } diff --git a/web/apps/photos/src/services/face/geom.ts b/web/apps/photos/src/services/face/geom.ts new file mode 100644 index 000000000..556e2b309 --- /dev/null +++ b/web/apps/photos/src/services/face/geom.ts @@ -0,0 +1,92 @@ +export class Point { + public x: number; + public y: number; + + constructor(x: number, y: number) { + this.x = x; + this.y = y; + } +} + +export interface Dimensions { + width: number; + height: number; +} + +export interface IBoundingBox { + left: number; + top: number; + right: number; + bottom: number; +} + +export interface IRect { + x: number; + y: number; + width: number; + height: number; +} + +export function newBox(x: number, y: number, width: number, height: number) { + return new Box({ x, y, width, height }); +} + +export const boxFromBoundingBox = ({ + left, + top, + right, + bottom, +}: IBoundingBox) => { + return new Box({ + x: left, + y: top, + width: right - left, + height: bottom - top, + }); +}; + +export class Box implements IRect { + public x: number; + public y: number; + public width: number; + public height: number; + + constructor({ x, y, width, height }: IRect) { + this.x = x; + this.y = y; + this.width = width; + this.height = height; + } + + public get topLeft(): Point { + return new Point(this.x, this.y); + } + + public get bottomRight(): Point { + return new Point(this.x + this.width, this.y + this.height); + } + + public round(): Box { + const [x, y, width, height] = [ + this.x, + this.y, + this.width, + this.height, + ].map((val) => Math.round(val)); + return new Box({ x, y, width, height }); + } +} + +export function enlargeBox(box: Box, factor: number = 1.5) { + const center = new Point(box.x + box.width / 2, box.y + box.height / 2); + + const size = new Point(box.width, box.height); + const newHalfSize = new Point((factor * size.x) / 2, (factor * size.y) / 2); + + return boxFromBoundingBox({ + left: center.x - newHalfSize.x, + top: center.y - newHalfSize.y, + right: center.x + newHalfSize.x, + bottom: center.y + newHalfSize.y, + }); +} diff --git a/web/apps/photos/src/services/face/image.ts b/web/apps/photos/src/services/face/image.ts new file mode 100644 index 000000000..1ddcc70f6 --- /dev/null +++ b/web/apps/photos/src/services/face/image.ts @@ -0,0 +1,121 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; +import log from "@/next/log"; +import DownloadManager from "services/download"; +import { Dimensions } from "services/face/geom"; +import { DetectedFace, MLSyncFileContext } from "services/face/types"; +import { getLocalFiles } from "services/fileService"; +import { EnteFile } from "types/file"; +import { getRenderableImage } from "utils/file"; +import { clamp } from "utils/image"; + +export const fetchImageBitmapForContext = async ( + fileContext: MLSyncFileContext, +) => { + if (fileContext.imageBitmap) { + return fileContext.imageBitmap; + } + if (fileContext.localFile) { + if (fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) { + throw new Error("Local file of only image type is supported"); + } + fileContext.imageBitmap = await getLocalFileImageBitmap( + fileContext.enteFile, + fileContext.localFile, + ); + } else if ( + [FILE_TYPE.IMAGE, FILE_TYPE.LIVE_PHOTO].includes( + fileContext.enteFile.metadata.fileType, + ) + ) { + fileContext.imageBitmap = await fetchImageBitmap(fileContext.enteFile); + } else { + // TODO-ML(MR): We don't do it on videos, when will we ever come + // here? + fileContext.imageBitmap = await getThumbnailImageBitmap( + fileContext.enteFile, + ); + } + + fileContext.newMlFile.imageSource = "Original"; + const { width, height } = fileContext.imageBitmap; + fileContext.newMlFile.imageDimensions = { width, height }; + + return fileContext.imageBitmap; +}; + +export async function getLocalFile(fileId: number) { + const localFiles = await getLocalFiles(); + return localFiles.find((f) => f.id === fileId); +} + +export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) { + const xMin = clamp( + detectedFace.detection.box.x / imageDims.width, + 0.0, + 0.999999, + ) + .toFixed(5) + .substring(2); + const yMin = clamp( + detectedFace.detection.box.y / imageDims.height, + 0.0, + 0.999999, + ) + .toFixed(5) + .substring(2); + const xMax = clamp( + (detectedFace.detection.box.x + detectedFace.detection.box.width) / + imageDims.width, + 0.0, + 0.999999, + ) + .toFixed(5) + .substring(2); + const yMax = clamp( + (detectedFace.detection.box.y + detectedFace.detection.box.height) / + imageDims.height, + 0.0, + 0.999999, + ) + .toFixed(5) + .substring(2); + + const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`; + const faceID = `${detectedFace.fileId}_${rawFaceID}`; + + return faceID; +} + +export const fetchImageBitmap = async (file: EnteFile) => + fetchRenderableBlob(file).then(createImageBitmap); + +async function fetchRenderableBlob(file: EnteFile) { + const fileStream = await DownloadManager.getFile(file); + const fileBlob = await new Response(fileStream).blob(); + if (file.metadata.fileType === FILE_TYPE.IMAGE) { + return await getRenderableImage(file.metadata.title, fileBlob); + } else { + const { imageFileName, imageData } = await decodeLivePhoto( + file.metadata.title, + fileBlob, + ); + return await getRenderableImage(imageFileName, new Blob([imageData])); + } +} + +export async function getThumbnailImageBitmap(file: EnteFile) { + const thumb = await DownloadManager.getThumbnail(file); + log.info("[MLService] Got thumbnail: ", file.id.toString()); + + return createImageBitmap(new Blob([thumb])); +} + +export async function getLocalFileImageBitmap( + enteFile: EnteFile, + localFile: globalThis.File, +) { + let fileBlob = localFile as Blob; + fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob); + return createImageBitmap(fileBlob); +} diff --git a/web/apps/photos/src/services/face/index.ts b/web/apps/photos/src/services/face/index.ts new file mode 100644 index 000000000..86fa9ab20 --- /dev/null +++ b/web/apps/photos/src/services/face/index.ts @@ -0,0 +1,8 @@ +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import type { DedicatedMLWorker } from "services/face/face.worker"; + +const createFaceWebWorker = () => + new Worker(new URL("face.worker.ts", import.meta.url)); + +export const createFaceComlinkWorker = (name: string) => + new ComlinkWorker(name, createFaceWebWorker()); diff --git a/web/apps/photos/src/services/face/people.ts b/web/apps/photos/src/services/face/people.ts new file mode 100644 index 000000000..416ba9e4e --- /dev/null +++ b/web/apps/photos/src/services/face/people.ts @@ -0,0 +1,111 @@ +import log from "@/next/log"; +import mlIDbStorage from "services/face/db"; +import { Face, Person } from "services/face/types"; +import { type MLSyncContext } from "services/machineLearning/machineLearningService"; +import { clusterFaces } from "./cluster"; +import { saveFaceCrop } from "./f-index"; +import { fetchImageBitmap, getLocalFile } from "./image"; + +export const syncPeopleIndex = async (syncContext: MLSyncContext) => { + const filesVersion = await mlIDbStorage.getIndexVersion("files"); + if (filesVersion <= (await mlIDbStorage.getIndexVersion("people"))) { + return; + } + + // TODO: have faces addresable through fileId + faceId + // to avoid index based addressing, which is prone to wrong results + // one way could be to match nearest face within threshold in the file + const allFacesMap = + syncContext.allSyncedFacesMap ?? + (syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap()); + const allFaces = [...allFacesMap.values()].flat(); + + await runFaceClustering(syncContext, allFaces); + await syncPeopleFromClusters(syncContext, allFacesMap, allFaces); + + await mlIDbStorage.setIndexVersion("people", filesVersion); +}; + +const runFaceClustering = async ( + syncContext: MLSyncContext, + allFaces: Array, +) => { + // await this.init(); + + if (!allFaces || allFaces.length < 50) { + log.info( + `Skipping clustering since number of faces (${allFaces.length}) is less than the clustering threshold (50)`, + ); + return; + } + + log.info("Running clustering allFaces: ", allFaces.length); + syncContext.mlLibraryData.faceClusteringResults = await clusterFaces( + allFaces.map((f) => Array.from(f.embedding)), + ); + syncContext.mlLibraryData.faceClusteringMethod = { + value: "Hdbscan", + version: 1, + }; + log.info( + "[MLService] Got face clustering results: ", + JSON.stringify(syncContext.mlLibraryData.faceClusteringResults), + ); + + // syncContext.faceClustersWithNoise = { + // clusters: syncContext.faceClusteringResults.clusters.map( + // (faces) => ({ + // faces, + // }) + // ), + // noise: syncContext.faceClusteringResults.noise, + // }; +}; + +const syncPeopleFromClusters = async ( + syncContext: MLSyncContext, + allFacesMap: Map>, + allFaces: Array, +) => { + const clusters = syncContext.mlLibraryData.faceClusteringResults?.clusters; + if (!clusters || clusters.length < 1) { + return; + } + + for (const face of allFaces) { + face.personId = undefined; + } + await mlIDbStorage.clearAllPeople(); + for (const [index, cluster] of clusters.entries()) { + const faces = cluster.map((f) => allFaces[f]).filter((f) => f); + + // TODO: take default display face from last leaves of hdbscan clusters + const personFace = faces.reduce((best, face) => + face.detection.probability > best.detection.probability + ? face + : best, + ); + + if (personFace && !personFace.crop?.cacheKey) { + const file = await getLocalFile(personFace.fileId); + const imageBitmap = await fetchImageBitmap(file); + await saveFaceCrop(imageBitmap, personFace); + } + + const person: Person = { + id: index, + files: faces.map((f) => f.fileId), + displayFaceId: personFace?.id, + faceCropCacheKey: personFace?.crop?.cacheKey, + }; + + await mlIDbStorage.putPerson(person); + + faces.forEach((face) => { + face.personId = person.id; + }); + // log.info("Creating person: ", person, faces); + } + + await mlIDbStorage.updateFaces(allFacesMap); +}; diff --git a/web/apps/photos/src/services/face/types.ts b/web/apps/photos/src/services/face/types.ts new file mode 100644 index 000000000..99244bf61 --- /dev/null +++ b/web/apps/photos/src/services/face/types.ts @@ -0,0 +1,161 @@ +import type { ClusterFacesResult } from "services/face/cluster"; +import { Dimensions } from "services/face/geom"; +import { EnteFile } from "types/file"; +import { Box, Point } from "./geom"; + +export interface MLSyncResult { + nOutOfSyncFiles: number; + nSyncedFiles: number; + nSyncedFaces: number; + nFaceClusters: number; + nFaceNoise: number; + error?: Error; +} + +export declare type FaceDescriptor = Float32Array; + +export declare type Cluster = Array; + +export interface FacesCluster { + faces: Cluster; + summary?: FaceDescriptor; +} + +export interface FacesClustersWithNoise { + clusters: Array; + noise: Cluster; +} + +export interface NearestCluster { + cluster: FacesCluster; + distance: number; +} + +export declare type Landmark = Point; + +export declare type ImageType = "Original" | "Preview"; + +export declare type FaceDetectionMethod = "YoloFace"; + +export declare type FaceCropMethod = "ArcFace"; + +export declare type FaceAlignmentMethod = "ArcFace"; + +export declare type FaceEmbeddingMethod = "MobileFaceNet"; + +export declare type BlurDetectionMethod = "Laplacian"; + +export declare type ClusteringMethod = "Hdbscan" | "Dbscan"; + +export class AlignedBox { + box: Box; + rotation: number; +} + +export interface Versioned { + value: T; + version: number; +} + +export interface FaceDetection { + // box and landmarks is relative to image dimentions stored at mlFileData + box: Box; + landmarks?: Array; + probability?: number; +} + +export interface DetectedFace { + fileId: number; + detection: FaceDetection; +} + +export interface DetectedFaceWithId extends DetectedFace { + id: string; +} + +export interface FaceCrop { + image: ImageBitmap; + // imageBox is relative to image dimentions stored at mlFileData + imageBox: Box; +} + +export interface StoredFaceCrop { + cacheKey: string; + imageBox: Box; +} + +export interface CroppedFace extends DetectedFaceWithId { + crop?: StoredFaceCrop; +} + +export interface FaceAlignment { + // TODO: remove affine matrix as rotation, size and center + // are simple to store and use, affine matrix adds complexity while getting crop + affineMatrix: Array>; + rotation: number; + // size and center is relative to image dimentions stored at mlFileData + size: number; + center: Point; +} + +export interface AlignedFace extends CroppedFace { + alignment?: FaceAlignment; + blurValue?: number; +} + +export declare type FaceEmbedding = Float32Array; + +export interface FaceWithEmbedding extends AlignedFace { + embedding?: FaceEmbedding; +} + +export interface Face extends FaceWithEmbedding { + personId?: number; +} + +export interface Person { + id: number; + name?: string; + files: Array; + displayFaceId?: string; + faceCropCacheKey?: string; +} + +export interface MlFileData { + fileId: number; + faces?: Face[]; + imageSource?: ImageType; + imageDimensions?: Dimensions; + faceDetectionMethod?: Versioned; + faceCropMethod?: Versioned; + faceAlignmentMethod?: Versioned; + faceEmbeddingMethod?: Versioned; + mlVersion: number; + errorCount: number; + lastErrorMessage?: string; +} + +export interface MLSearchConfig { + enabled: boolean; +} + +export interface MLSyncFileContext { + enteFile: EnteFile; + localFile?: globalThis.File; + + oldMlFile?: MlFileData; + newMlFile?: MlFileData; + + imageBitmap?: ImageBitmap; + + newDetection?: boolean; + newAlignment?: boolean; +} + +export interface MLLibraryData { + faceClusteringMethod?: Versioned; + faceClusteringResults?: ClusterFacesResult; + faceClustersWithNoise?: FacesClustersWithNoise; +} + +export declare type MLIndex = "files" | "people"; diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts index 4dfdb3f64..85dd5db39 100644 --- a/web/apps/photos/src/services/ffmpeg.ts +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -9,6 +9,11 @@ import { } from "constants/ffmpeg"; import { NULL_LOCATION } from "constants/upload"; import type { ParsedExtractedMetadata } from "types/metadata"; +import { + readConvertToMP4Done, + readConvertToMP4Stream, + writeConvertToMP4Stream, +} from "utils/native-stream"; import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; import { toDataOrPathOrZipEntry, @@ -31,7 +36,7 @@ import { */ export const generateVideoThumbnailWeb = async (blob: Blob) => _generateVideoThumbnail((seekTime: number) => - ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg", 0), + ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg"), ); const _generateVideoThumbnail = async ( @@ -70,7 +75,6 @@ export const generateVideoThumbnailNative = async ( makeGenThumbnailCommand(seekTime), toDataOrPathOrZipEntry(desktopUploadItem), "jpeg", - 0, ), ); @@ -98,8 +102,8 @@ const makeGenThumbnailCommand = (seekTime: number) => [ * of videos that the user is uploading. * * @param uploadItem A {@link File}, or the absolute path to a file on the - * user's local filesytem. A path can only be provided when we're running in the - * context of our desktop app. + * user's local file sytem. A path can only be provided when we're running in + * the context of our desktop app. */ export const extractVideoMetadata = async ( uploadItem: UploadItem, @@ -107,12 +111,11 @@ export const extractVideoMetadata = async ( const command = extractVideoMetadataCommand; const outputData = uploadItem instanceof File - ? await ffmpegExecWeb(command, uploadItem, "txt", 0) + ? await ffmpegExecWeb(command, uploadItem, "txt") : await electron.ffmpegExec( command, toDataOrPathOrZipEntry(uploadItem), "txt", - 0, ); return parseFFmpegExtractedMetadata(outputData); @@ -219,10 +222,9 @@ const ffmpegExecWeb = async ( command: string[], blob: Blob, outputFileExtension: string, - timeoutMs: number, ) => { const worker = await workerFactory.lazy(); - return await worker.exec(command, blob, outputFileExtension, timeoutMs); + return await worker.exec(command, blob, outputFileExtension); }; /** @@ -234,61 +236,46 @@ const ffmpegExecWeb = async ( * * @param blob The video blob. * - * @returns The mp4 video data. + * @returns The mp4 video blob. */ -export const convertToMP4 = async (blob: Blob) => - ffmpegExecNativeOrWeb( - [ +export const convertToMP4 = async (blob: Blob): Promise => { + const electron = globalThis.electron; + if (electron) { + return convertToMP4Native(electron, blob); + } else { + const command = [ ffmpegPathPlaceholder, "-i", inputPathPlaceholder, "-preset", "ultrafast", outputPathPlaceholder, - ], - blob, - "mp4", - 30 * 1000, - ); + ]; + return ffmpegExecWeb(command, blob, "mp4"); + } +}; -/** - * Run the given FFmpeg command using a native FFmpeg binary when we're running - * in the context of our desktop app, otherwise using the browser based wasm - * FFmpeg implemenation. - * - * See also: {@link ffmpegExecWeb}. - */ -const ffmpegExecNativeOrWeb = async ( - command: string[], - blob: Blob, - outputFileExtension: string, - timeoutMs: number, -) => { - const electron = globalThis.electron; - if (electron) - return electron.ffmpegExec( - command, - new Uint8Array(await blob.arrayBuffer()), - outputFileExtension, - timeoutMs, - ); - else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs); +const convertToMP4Native = async (electron: Electron, blob: Blob) => { + const token = await writeConvertToMP4Stream(electron, blob); + const mp4Blob = await readConvertToMP4Stream(electron, token); + readConvertToMP4Done(electron, token); + return mp4Blob; }; /** Lazily create a singleton instance of our worker */ class WorkerFactory { private instance: Promise>; + private createComlinkWorker = () => + new ComlinkWorker( + "ffmpeg-worker", + new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), + ); + async lazy() { - if (!this.instance) this.instance = createComlinkWorker().remote; + if (!this.instance) this.instance = this.createComlinkWorker().remote; return this.instance; } } const workerFactory = new WorkerFactory(); - -const createComlinkWorker = () => - new ComlinkWorker( - "ffmpeg-worker", - new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), - ); diff --git a/web/apps/photos/src/services/heic-convert.ts b/web/apps/photos/src/services/heic-convert.ts index c2ea19839..2b37c3198 100644 --- a/web/apps/photos/src/services/heic-convert.ts +++ b/web/apps/photos/src/services/heic-convert.ts @@ -1,9 +1,10 @@ +import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert"; +import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { CustomError } from "@ente/shared/error"; import { retryAsyncFunction } from "@ente/shared/utils"; import QueueProcessor from "@ente/shared/utils/queueProcessor"; -import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker"; /** * Convert a HEIC image to a JPEG. @@ -29,7 +30,7 @@ class HEICConverter { if (this.workerPool.length > 0) return; this.workerPool = []; for (let i = 0; i < WORKER_POOL_SIZE; i++) - this.workerPool.push(createComlinkWorker()); + this.workerPool.push(createHEICConvertComlinkWorker()); } async convert(fileBlob: Blob): Promise { @@ -79,7 +80,7 @@ class HEICConverter { } catch (e) { log.error("HEIC conversion failed", e); convertWorker.terminate(); - this.workerPool.push(createComlinkWorker()); + this.workerPool.push(createHEICConvertComlinkWorker()); throw e; } }, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS), @@ -99,9 +100,3 @@ class HEICConverter { /** The singleton instance of {@link HEICConverter}. */ const converter = new HEICConverter(); - -const createComlinkWorker = () => - new ComlinkWorker( - "heic-convert-worker", - new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)), - ); diff --git a/web/apps/photos/src/services/logout.ts b/web/apps/photos/src/services/logout.ts new file mode 100644 index 000000000..a6b155c8c --- /dev/null +++ b/web/apps/photos/src/services/logout.ts @@ -0,0 +1,50 @@ +import log from "@/next/log"; +import { accountLogout } from "@ente/accounts/services/logout"; +import { clipService } from "services/clip-service"; +import DownloadManager from "./download"; +import exportService from "./export"; +import mlWorkManager from "./machineLearning/mlWorkManager"; + +/** + * Logout sequence for the photos app. + * + * This function is guaranteed not to throw any errors. + * + * See: [Note: Do not throw during logout]. + */ +export const photosLogout = async () => { + await accountLogout(); + + try { + await DownloadManager.logout(); + } catch (e) { + log.error("Ignoring error during logout (download)", e); + } + + try { + await clipService.logout(); + } catch (e) { + log.error("Ignoring error during logout (CLIP)", e); + } + + const electron = globalThis.electron; + if (electron) { + try { + await mlWorkManager.logout(); + } catch (e) { + log.error("Ignoring error during logout (ML)", e); + } + + try { + exportService.disableContinuousExport(); + } catch (e) { + log.error("Ignoring error during logout (export)", e); + } + + try { + await electron?.logout(); + } catch (e) { + log.error("Ignoring error during logout (electron)", e); + } + } +}; diff --git a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts b/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts deleted file mode 100644 index 99063b3f2..000000000 --- a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { - FaceAlignment, - FaceAlignmentMethod, - FaceAlignmentService, - FaceDetection, - Versioned, -} from "types/machineLearning"; -import { getArcfaceAlignment } from "utils/machineLearning/faceAlign"; - -class ArcfaceAlignmentService implements FaceAlignmentService { - public method: Versioned; - - constructor() { - this.method = { - value: "ArcFace", - version: 1, - }; - } - - public getFaceAlignment(faceDetection: FaceDetection): FaceAlignment { - return getArcfaceAlignment(faceDetection); - } -} - -export default new ArcfaceAlignmentService(); diff --git a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts b/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts deleted file mode 100644 index cb6ccd029..000000000 --- a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { - FaceCrop, - FaceCropConfig, - FaceCropMethod, - FaceCropService, - FaceDetection, - Versioned, -} from "types/machineLearning"; -import { getArcfaceAlignment } from "utils/machineLearning/faceAlign"; -import { getFaceCrop } from "utils/machineLearning/faceCrop"; - -class ArcFaceCropService implements FaceCropService { - public method: Versioned; - - constructor() { - this.method = { - value: "ArcFace", - version: 1, - }; - } - - public async getFaceCrop( - imageBitmap: ImageBitmap, - faceDetection: FaceDetection, - config: FaceCropConfig, - ): Promise { - const alignedFace = getArcfaceAlignment(faceDetection); - const faceCrop = getFaceCrop(imageBitmap, alignedFace, config); - - return faceCrop; - } -} - -export default new ArcFaceCropService(); diff --git a/web/apps/photos/src/services/machineLearning/clusteringService.ts b/web/apps/photos/src/services/machineLearning/clusteringService.ts deleted file mode 100644 index 03931b63b..000000000 --- a/web/apps/photos/src/services/machineLearning/clusteringService.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { DBSCAN, KMEANS, OPTICS } from "density-clustering"; -import { Hdbscan } from "hdbscan"; -import { HdbscanInput } from "hdbscan/dist/types"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringResults, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class ClusteringService { - private dbscan: DBSCAN; - private optics: OPTICS; - private kmeans: KMEANS; - - constructor() { - this.dbscan = new DBSCAN(); - this.optics = new OPTICS(); - this.kmeans = new KMEANS(); - } - - public clusterUsingDBSCAN( - dataset: Array>, - epsilon: number = 1.0, - minPts: number = 2, - ): ClusteringResults { - // log.info("distanceFunction", DBSCAN._); - const clusters = this.dbscan.run(dataset, epsilon, minPts); - const noise = this.dbscan.noise; - return { clusters, noise }; - } - - public clusterUsingOPTICS( - dataset: Array>, - epsilon: number = 1.0, - minPts: number = 2, - ) { - const clusters = this.optics.run(dataset, epsilon, minPts); - return { clusters, noise: [] }; - } - - public clusterUsingKMEANS( - dataset: Array>, - numClusters: number = 5, - ) { - const clusters = this.kmeans.run(dataset, numClusters); - return { clusters, noise: [] }; - } - - public clusterUsingHdbscan(hdbscanInput: HdbscanInput): HdbscanResults { - if (hdbscanInput.input.length < 10) { - throw Error("too few samples to run Hdbscan"); - } - - const hdbscan = new Hdbscan(hdbscanInput); - const clusters = hdbscan.getClusters(); - const noise = hdbscan.getNoise(); - const debugInfo = hdbscan.getDebugInfo(); - - return { clusters, noise, debugInfo }; - } - - public cluster( - method: Versioned, - input: ClusteringInput, - config: ClusteringConfig, - ) { - if (method.value === "Hdbscan") { - return this.clusterUsingHdbscan({ - input, - minClusterSize: config.minClusterSize, - debug: config.generateDebugInfo, - }); - } else if (method.value === "Dbscan") { - return this.clusterUsingDBSCAN( - input, - config.maxDistanceInsideCluster, - config.minClusterSize, - ); - } else { - throw Error("Unknown clustering method: " + method.value); - } - } -} - -export default ClusteringService; diff --git a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts deleted file mode 100644 index 33298eef3..000000000 --- a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { DBSCAN } from "density-clustering"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringService, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class DbscanClusteringService implements ClusteringService { - public method: Versioned; - - constructor() { - this.method = { - value: "Dbscan", - version: 1, - }; - } - - public async cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise { - // log.info('Clustering input: ', input); - const dbscan = new DBSCAN(); - const clusters = dbscan.run( - input, - config.clusterSelectionEpsilon, - config.minClusterSize, - ); - const noise = dbscan.noise; - return { clusters, noise }; - } -} - -export default new DbscanClusteringService(); diff --git a/web/apps/photos/src/services/machineLearning/faceService.ts b/web/apps/photos/src/services/machineLearning/faceService.ts deleted file mode 100644 index 1dedadf15..000000000 --- a/web/apps/photos/src/services/machineLearning/faceService.ts +++ /dev/null @@ -1,306 +0,0 @@ -import { openCache } from "@/next/blob-cache"; -import log from "@/next/log"; -import { - DetectedFace, - Face, - MLSyncContext, - MLSyncFileContext, -} from "types/machineLearning"; -import { imageBitmapToBlob } from "utils/image"; -import { - areFaceIdsSame, - extractFaceImagesToFloat32, - getFaceId, - getLocalFile, - getOriginalImageBitmap, - isDifferentOrOld, -} from "utils/machineLearning"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import ReaderService from "./readerService"; - -class FaceService { - async syncFileFaceDetections( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !isDifferentOrOld( - oldMlFile?.faceDetectionMethod, - syncContext.faceDetectionService.method, - ) && - oldMlFile?.imageSource === syncContext.config.imageSource - ) { - newMlFile.faces = oldMlFile?.faces?.map((existingFace) => ({ - id: existingFace.id, - fileId: existingFace.fileId, - detection: existingFace.detection, - })); - - newMlFile.imageSource = oldMlFile.imageSource; - newMlFile.imageDimensions = oldMlFile.imageDimensions; - newMlFile.faceDetectionMethod = oldMlFile.faceDetectionMethod; - return; - } - - newMlFile.faceDetectionMethod = syncContext.faceDetectionService.method; - fileContext.newDetection = true; - const imageBitmap = await ReaderService.getImageBitmap( - syncContext, - fileContext, - ); - const timerId = `faceDetection-${fileContext.enteFile.id}`; - console.time(timerId); - const faceDetections = - await syncContext.faceDetectionService.detectFaces(imageBitmap); - console.timeEnd(timerId); - console.log("faceDetections: ", faceDetections?.length); - - // TODO: reenable faces filtering based on width - const detectedFaces = faceDetections?.map((detection) => { - return { - fileId: fileContext.enteFile.id, - detection, - } as DetectedFace; - }); - newMlFile.faces = detectedFaces?.map((detectedFace) => ({ - ...detectedFace, - id: getFaceId(detectedFace, newMlFile.imageDimensions), - })); - // ?.filter((f) => - // f.box.width > syncContext.config.faceDetection.minFaceSize - // ); - log.info("[MLService] Detected Faces: ", newMlFile.faces?.length); - } - - async syncFileFaceCrops( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - // !syncContext.config.faceCrop.enabled || - !fileContext.newDetection && - !isDifferentOrOld( - oldMlFile?.faceCropMethod, - syncContext.faceCropService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.crop = oldMlFile.faces[index].crop; - } - newMlFile.faceCropMethod = oldMlFile.faceCropMethod; - return; - } - - const imageBitmap = await ReaderService.getImageBitmap( - syncContext, - fileContext, - ); - newMlFile.faceCropMethod = syncContext.faceCropService.method; - - for (const face of newMlFile.faces) { - await this.saveFaceCrop(imageBitmap, face, syncContext); - } - } - - async syncFileFaceAlignments( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ): Promise { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newDetection && - !isDifferentOrOld( - oldMlFile?.faceAlignmentMethod, - syncContext.faceAlignmentService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.alignment = oldMlFile.faces[index].alignment; - } - newMlFile.faceAlignmentMethod = oldMlFile.faceAlignmentMethod; - return; - } - - newMlFile.faceAlignmentMethod = syncContext.faceAlignmentService.method; - fileContext.newAlignment = true; - const imageBitmap = - fileContext.imageBitmap || - (await ReaderService.getImageBitmap(syncContext, fileContext)); - - // Execute the face alignment calculations - for (const face of newMlFile.faces) { - face.alignment = syncContext.faceAlignmentService.getFaceAlignment( - face.detection, - ); - } - // Extract face images and convert to Float32Array - const faceAlignments = newMlFile.faces.map((f) => f.alignment); - const faceImages = await extractFaceImagesToFloat32( - faceAlignments, - syncContext.faceEmbeddingService.faceSize, - imageBitmap, - ); - const blurValues = syncContext.blurDetectionService.detectBlur( - faceImages, - newMlFile.faces, - ); - newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i])); - - imageBitmap.close(); - log.info("[MLService] alignedFaces: ", newMlFile.faces?.length); - - return faceImages; - } - - async syncFileFaceEmbeddings( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - alignedFacesInput: Float32Array, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newAlignment && - !isDifferentOrOld( - oldMlFile?.faceEmbeddingMethod, - syncContext.faceEmbeddingService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.embedding = oldMlFile.faces[index].embedding; - } - newMlFile.faceEmbeddingMethod = oldMlFile.faceEmbeddingMethod; - return; - } - - newMlFile.faceEmbeddingMethod = syncContext.faceEmbeddingService.method; - // TODO: when not storing face crops, image will be needed to extract faces - // fileContext.imageBitmap || - // (await this.getImageBitmap(syncContext, fileContext)); - - const embeddings = - await syncContext.faceEmbeddingService.getFaceEmbeddings( - alignedFacesInput, - ); - newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i])); - - log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length); - } - - async syncFileFaceMakeRelativeDetections( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newAlignment && - !isDifferentOrOld( - oldMlFile?.faceEmbeddingMethod, - syncContext.faceEmbeddingService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - return; - } - for (let i = 0; i < newMlFile.faces.length; i++) { - const face = newMlFile.faces[i]; - if (face.detection.box.x + face.detection.box.width < 2) continue; // Skip if somehow already relative - face.detection = - syncContext.faceDetectionService.getRelativeDetection( - face.detection, - newMlFile.imageDimensions, - ); - } - } - - async saveFaceCrop( - imageBitmap: ImageBitmap, - face: Face, - syncContext: MLSyncContext, - ) { - const faceCrop = await syncContext.faceCropService.getFaceCrop( - imageBitmap, - face.detection, - syncContext.config.faceCrop, - ); - - const blobOptions = syncContext.config.faceCrop.blobOptions; - const blob = await imageBitmapToBlob(faceCrop.image, blobOptions); - - const cache = await openCache("face-crops"); - await cache.put(face.id, blob); - - faceCrop.image.close(); - - return blob; - } - - async getAllSyncedFacesMap(syncContext: MLSyncContext) { - if (syncContext.allSyncedFacesMap) { - return syncContext.allSyncedFacesMap; - } - - syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap(); - return syncContext.allSyncedFacesMap; - } - - public async runFaceClustering( - syncContext: MLSyncContext, - allFaces: Array, - ) { - // await this.init(); - - const clusteringConfig = syncContext.config.faceClustering; - - if (!allFaces || allFaces.length < clusteringConfig.minInputSize) { - log.info( - "[MLService] Too few faces to cluster, not running clustering: ", - allFaces.length, - ); - return; - } - - log.info("Running clustering allFaces: ", allFaces.length); - syncContext.mlLibraryData.faceClusteringResults = - await syncContext.faceClusteringService.cluster( - allFaces.map((f) => Array.from(f.embedding)), - syncContext.config.faceClustering, - ); - syncContext.mlLibraryData.faceClusteringMethod = - syncContext.faceClusteringService.method; - log.info( - "[MLService] Got face clustering results: ", - JSON.stringify(syncContext.mlLibraryData.faceClusteringResults), - ); - - // syncContext.faceClustersWithNoise = { - // clusters: syncContext.faceClusteringResults.clusters.map( - // (faces) => ({ - // faces, - // }) - // ), - // noise: syncContext.faceClusteringResults.noise, - // }; - } - - public async regenerateFaceCrop( - syncContext: MLSyncContext, - faceID: string, - ) { - const fileID = Number(faceID.split("-")[0]); - const personFace = await mlIDbStorage.getFace(fileID, faceID); - if (!personFace) { - throw Error("Face not found"); - } - - const file = await getLocalFile(personFace.fileId); - const imageBitmap = await getOriginalImageBitmap(file); - return await this.saveFaceCrop(imageBitmap, personFace, syncContext); - } -} - -export default new FaceService(); diff --git a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts deleted file mode 100644 index 21e211825..000000000 --- a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Hdbscan } from "hdbscan"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringService, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class HdbscanClusteringService implements ClusteringService { - public method: Versioned; - - constructor() { - this.method = { - value: "Hdbscan", - version: 1, - }; - } - - public async cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise { - // log.info('Clustering input: ', input); - const hdbscan = new Hdbscan({ - input, - - minClusterSize: config.minClusterSize, - minSamples: config.minSamples, - clusterSelectionEpsilon: config.clusterSelectionEpsilon, - clusterSelectionMethod: config.clusterSelectionMethod, - debug: config.generateDebugInfo, - }); - - return { - clusters: hdbscan.getClusters(), - noise: hdbscan.getNoise(), - debugInfo: hdbscan.getDebugInfo(), - }; - } -} - -export default new HdbscanClusteringService(); diff --git a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts deleted file mode 100644 index 3357e21cc..000000000 --- a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { - BlurDetectionMethod, - BlurDetectionService, - Face, - Versioned, -} from "types/machineLearning"; -import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image"; -import { mobileFaceNetFaceSize } from "./mobileFaceNetEmbeddingService"; - -class LaplacianBlurDetectionService implements BlurDetectionService { - public method: Versioned; - - public constructor() { - this.method = { - value: "Laplacian", - version: 1, - }; - } - - public detectBlur(alignedFaces: Float32Array, faces: Face[]): number[] { - const numFaces = Math.round( - alignedFaces.length / - (mobileFaceNetFaceSize * mobileFaceNetFaceSize * 3), - ); - const blurValues: number[] = []; - for (let i = 0; i < numFaces; i++) { - const face = faces[i]; - const direction = getFaceDirection(face); - const faceImage = createGrayscaleIntMatrixFromNormalized2List( - alignedFaces, - i, - ); - const laplacian = this.applyLaplacian(faceImage, direction); - const variance = this.calculateVariance(laplacian); - blurValues.push(variance); - } - return blurValues; - } - - private calculateVariance(matrix: number[][]): number { - const numRows = matrix.length; - const numCols = matrix[0].length; - const totalElements = numRows * numCols; - - // Calculate the mean - let mean: number = 0; - matrix.forEach((row) => { - row.forEach((value) => { - mean += value; - }); - }); - mean /= totalElements; - - // Calculate the variance - let variance: number = 0; - matrix.forEach((row) => { - row.forEach((value) => { - const diff: number = value - mean; - variance += diff * diff; - }); - }); - variance /= totalElements; - - return variance; - } - - private padImage( - image: number[][], - removeSideColumns: number = 56, - direction: FaceDirection = "straight", - ): number[][] { - // Exception is removeSideColumns is not even - if (removeSideColumns % 2 != 0) { - throw new Error("removeSideColumns must be even"); - } - const numRows = image.length; - const numCols = image[0].length; - const paddedNumCols = numCols + 2 - removeSideColumns; - const paddedNumRows = numRows + 2; - - // Create a new matrix with extra padding - const paddedImage: number[][] = Array.from( - { length: paddedNumRows }, - () => new Array(paddedNumCols).fill(0), - ); - - // Copy original image into the center of the padded image - if (direction === "straight") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = - image[i][j + Math.round(removeSideColumns / 2)]; - } - } - } // If the face is facing left, we only take the right side of the face image - else if (direction === "left") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; - } - } - } // If the face is facing right, we only take the left side of the face image - else if (direction === "right") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = image[i][j]; - } - } - } - - // Reflect padding - // Top and bottom rows - for (let j = 1; j <= paddedNumCols - 2; j++) { - paddedImage[0][j] = paddedImage[2][j]; // Top row - paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row - } - // Left and right columns - for (let i = 0; i < numRows + 2; i++) { - paddedImage[i][0] = paddedImage[i][2]; // Left column - paddedImage[i][paddedNumCols - 1] = - paddedImage[i][paddedNumCols - 3]; // Right column - } - - return paddedImage; - } - - private applyLaplacian( - image: number[][], - direction: FaceDirection = "straight", - ): number[][] { - const paddedImage: number[][] = this.padImage( - image, - undefined, - direction, - ); - const numRows = paddedImage.length - 2; - const numCols = paddedImage[0].length - 2; - - // Create an output image initialized to 0 - const outputImage: number[][] = Array.from({ length: numRows }, () => - new Array(numCols).fill(0), - ); - - // Define the Laplacian kernel - const kernel: number[][] = [ - [0, 1, 0], - [1, -4, 1], - [0, 1, 0], - ]; - - // Apply the kernel to each pixel - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < numCols; j++) { - let sum = 0; - for (let ki = 0; ki < 3; ki++) { - for (let kj = 0; kj < 3; kj++) { - sum += paddedImage[i + ki][j + kj] * kernel[ki][kj]; - } - } - // Adjust the output value if necessary (e.g., clipping) - outputImage[i][j] = sum; - } - } - - return outputImage; - } -} - -export default new LaplacianBlurDetectionService(); - -type FaceDirection = "left" | "right" | "straight"; - -const getFaceDirection = (face: Face): FaceDirection => { - const landmarks = face.detection.landmarks; - const leftEye = landmarks[0]; - const rightEye = landmarks[1]; - const nose = landmarks[2]; - const leftMouth = landmarks[3]; - const rightMouth = landmarks[4]; - - const eyeDistanceX = Math.abs(rightEye.x - leftEye.x); - const eyeDistanceY = Math.abs(rightEye.y - leftEye.y); - const mouthDistanceY = Math.abs(rightMouth.y - leftMouth.y); - - const faceIsUpright = - Math.max(leftEye.y, rightEye.y) + 0.5 * eyeDistanceY < nose.y && - nose.y + 0.5 * mouthDistanceY < Math.min(leftMouth.y, rightMouth.y); - - const noseStickingOutLeft = - nose.x < Math.min(leftEye.x, rightEye.x) && - nose.x < Math.min(leftMouth.x, rightMouth.x); - - const noseStickingOutRight = - nose.x > Math.max(leftEye.x, rightEye.x) && - nose.x > Math.max(leftMouth.x, rightMouth.x); - - const noseCloseToLeftEye = - Math.abs(nose.x - leftEye.x) < 0.2 * eyeDistanceX; - const noseCloseToRightEye = - Math.abs(nose.x - rightEye.x) < 0.2 * eyeDistanceX; - - // if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) { - if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { - return "left"; - // } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) { - } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { - return "right"; - } - - return "straight"; -}; diff --git a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts b/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts deleted file mode 100644 index 991ae6808..000000000 --- a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { haveWindow } from "@/next/env"; -import log from "@/next/log"; -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import PQueue from "p-queue"; -import { EnteFile } from "types/file"; -import { - BlurDetectionMethod, - BlurDetectionService, - ClusteringMethod, - ClusteringService, - Face, - FaceAlignmentMethod, - FaceAlignmentService, - FaceCropMethod, - FaceCropService, - FaceDetectionMethod, - FaceDetectionService, - FaceEmbeddingMethod, - FaceEmbeddingService, - MLLibraryData, - MLSyncConfig, - MLSyncContext, -} from "types/machineLearning"; -import { logQueueStats } from "utils/machineLearning"; -import arcfaceAlignmentService from "./arcfaceAlignmentService"; -import arcfaceCropService from "./arcfaceCropService"; -import dbscanClusteringService from "./dbscanClusteringService"; -import hdbscanClusteringService from "./hdbscanClusteringService"; -import laplacianBlurDetectionService from "./laplacianBlurDetectionService"; -import mobileFaceNetEmbeddingService from "./mobileFaceNetEmbeddingService"; -import yoloFaceDetectionService from "./yoloFaceDetectionService"; - -export class MLFactory { - public static getFaceDetectionService( - method: FaceDetectionMethod, - ): FaceDetectionService { - if (method === "YoloFace") { - return yoloFaceDetectionService; - } - - throw Error("Unknon face detection method: " + method); - } - - public static getFaceCropService(method: FaceCropMethod) { - if (method === "ArcFace") { - return arcfaceCropService; - } - - throw Error("Unknon face crop method: " + method); - } - - public static getFaceAlignmentService( - method: FaceAlignmentMethod, - ): FaceAlignmentService { - if (method === "ArcFace") { - return arcfaceAlignmentService; - } - - throw Error("Unknon face alignment method: " + method); - } - - public static getBlurDetectionService( - method: BlurDetectionMethod, - ): BlurDetectionService { - if (method === "Laplacian") { - return laplacianBlurDetectionService; - } - - throw Error("Unknon blur detection method: " + method); - } - - public static getFaceEmbeddingService( - method: FaceEmbeddingMethod, - ): FaceEmbeddingService { - if (method === "MobileFaceNet") { - return mobileFaceNetEmbeddingService; - } - - throw Error("Unknon face embedding method: " + method); - } - - public static getClusteringService( - method: ClusteringMethod, - ): ClusteringService { - if (method === "Hdbscan") { - return hdbscanClusteringService; - } - if (method === "Dbscan") { - return dbscanClusteringService; - } - - throw Error("Unknon clustering method: " + method); - } - - public static getMLSyncContext( - token: string, - userID: number, - config: MLSyncConfig, - shouldUpdateMLVersion: boolean = true, - ) { - return new LocalMLSyncContext( - token, - userID, - config, - shouldUpdateMLVersion, - ); - } -} - -export class LocalMLSyncContext implements MLSyncContext { - public token: string; - public userID: number; - public config: MLSyncConfig; - public shouldUpdateMLVersion: boolean; - - public faceDetectionService: FaceDetectionService; - public faceCropService: FaceCropService; - public faceAlignmentService: FaceAlignmentService; - public blurDetectionService: BlurDetectionService; - public faceEmbeddingService: FaceEmbeddingService; - public faceClusteringService: ClusteringService; - - public localFilesMap: Map; - public outOfSyncFiles: EnteFile[]; - public nSyncedFiles: number; - public nSyncedFaces: number; - public allSyncedFacesMap?: Map>; - - public error?: Error; - - public mlLibraryData: MLLibraryData; - - public syncQueue: PQueue; - // TODO: wheather to limit concurrent downloads - // private downloadQueue: PQueue; - - private concurrency: number; - private comlinkCryptoWorker: Array< - ComlinkWorker - >; - private enteWorkers: Array; - - constructor( - token: string, - userID: number, - config: MLSyncConfig, - shouldUpdateMLVersion: boolean = true, - concurrency?: number, - ) { - this.token = token; - this.userID = userID; - this.config = config; - this.shouldUpdateMLVersion = shouldUpdateMLVersion; - - this.faceDetectionService = MLFactory.getFaceDetectionService( - this.config.faceDetection.method, - ); - this.faceCropService = MLFactory.getFaceCropService( - this.config.faceCrop.method, - ); - this.faceAlignmentService = MLFactory.getFaceAlignmentService( - this.config.faceAlignment.method, - ); - this.blurDetectionService = MLFactory.getBlurDetectionService( - this.config.blurDetection.method, - ); - this.faceEmbeddingService = MLFactory.getFaceEmbeddingService( - this.config.faceEmbedding.method, - ); - this.faceClusteringService = MLFactory.getClusteringService( - this.config.faceClustering.method, - ); - - this.outOfSyncFiles = []; - this.nSyncedFiles = 0; - this.nSyncedFaces = 0; - - this.concurrency = concurrency ?? getConcurrency(); - - log.info("Using concurrency: ", this.concurrency); - // timeout is added on downloads - // timeout on queue will keep the operation open till worker is terminated - this.syncQueue = new PQueue({ concurrency: this.concurrency }); - logQueueStats(this.syncQueue, "sync"); - // this.downloadQueue = new PQueue({ concurrency: 1 }); - // logQueueStats(this.downloadQueue, 'download'); - - this.comlinkCryptoWorker = new Array(this.concurrency); - this.enteWorkers = new Array(this.concurrency); - } - - public async getEnteWorker(id: number): Promise { - const wid = id % this.enteWorkers.length; - console.log("getEnteWorker: ", id, wid); - if (!this.enteWorkers[wid]) { - this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker(); - this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote; - } - - return this.enteWorkers[wid]; - } - - public async dispose() { - this.localFilesMap = undefined; - await this.syncQueue.onIdle(); - this.syncQueue.removeAllListeners(); - for (const enteComlinkWorker of this.comlinkCryptoWorker) { - enteComlinkWorker?.terminate(); - } - } -} - -export const getConcurrency = () => - haveWindow() && Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2)); diff --git a/web/apps/photos/src/services/machineLearning/machineLearningService.ts b/web/apps/photos/src/services/machineLearning/machineLearningService.ts index 03a3b7e2c..43e0459ce 100644 --- a/web/apps/photos/src/services/machineLearning/machineLearningService.ts +++ b/web/apps/photos/src/services/machineLearning/machineLearningService.ts @@ -1,25 +1,151 @@ +import { haveWindow } from "@/next/env"; import log from "@/next/log"; -import { APPS } from "@ente/shared/apps/constants"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import ComlinkCryptoWorker, { + getDedicatedCryptoWorker, +} from "@ente/shared/crypto"; +import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError, parseUploadErrorCodes } from "@ente/shared/error"; -import { MAX_ML_SYNC_ERROR_COUNT } from "constants/mlConfig"; -import downloadManager from "services/download"; +import PQueue from "p-queue"; import { putEmbedding } from "services/embeddingService"; -import { getLocalFiles } from "services/fileService"; -import { EnteFile } from "types/file"; +import mlIDbStorage, { ML_SEARCH_CONFIG_NAME } from "services/face/db"; import { - MLSyncContext, + Face, + FaceDetection, + Landmark, + MLLibraryData, + MLSearchConfig, MLSyncFileContext, MLSyncResult, MlFileData, -} from "types/machineLearning"; -import { getMLSyncConfig } from "utils/machineLearning/config"; -import { LocalFileMlDataToServerFileMl } from "utils/machineLearning/mldataMappers"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import FaceService from "./faceService"; -import { MLFactory } from "./machineLearningFactory"; -import PeopleService from "./peopleService"; -import ReaderService from "./readerService"; +} from "services/face/types"; +import { getLocalFiles } from "services/fileService"; +import { EnteFile } from "types/file"; +import { isInternalUserForML } from "utils/user"; +import { regenerateFaceCrop, syncFileAnalyzeFaces } from "../face/f-index"; +import { fetchImageBitmapForContext } from "../face/image"; +import { syncPeopleIndex } from "../face/people"; + +/** + * TODO-ML(MR): What and why. + * Also, needs to be 1 (in sync with mobile) when we move out of beta. + */ +export const defaultMLVersion = 3; + +const batchSize = 200; + +export const MAX_ML_SYNC_ERROR_COUNT = 1; + +export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = { + enabled: false, +}; + +export async function getMLSearchConfig() { + if (isInternalUserForML()) { + return mlIDbStorage.getConfig( + ML_SEARCH_CONFIG_NAME, + DEFAULT_ML_SEARCH_CONFIG, + ); + } + // Force disabled for everyone else while we finalize it to avoid redundant + // reindexing for users. + return DEFAULT_ML_SEARCH_CONFIG; +} + +export async function updateMLSearchConfig(newConfig: MLSearchConfig) { + return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig); +} + +export interface MLSyncContext { + token: string; + userID: number; + + localFilesMap: Map; + outOfSyncFiles: EnteFile[]; + nSyncedFiles: number; + nSyncedFaces: number; + allSyncedFacesMap?: Map>; + + error?: Error; + + // oldMLLibraryData: MLLibraryData; + mlLibraryData: MLLibraryData; + + syncQueue: PQueue; + + getEnteWorker(id: number): Promise; + dispose(): Promise; +} + +export class LocalMLSyncContext implements MLSyncContext { + public token: string; + public userID: number; + + public localFilesMap: Map; + public outOfSyncFiles: EnteFile[]; + public nSyncedFiles: number; + public nSyncedFaces: number; + public allSyncedFacesMap?: Map>; + + public error?: Error; + + public mlLibraryData: MLLibraryData; + + public syncQueue: PQueue; + // TODO: wheather to limit concurrent downloads + // private downloadQueue: PQueue; + + private concurrency: number; + private comlinkCryptoWorker: Array< + ComlinkWorker + >; + private enteWorkers: Array; + + constructor(token: string, userID: number, concurrency?: number) { + this.token = token; + this.userID = userID; + + this.outOfSyncFiles = []; + this.nSyncedFiles = 0; + this.nSyncedFaces = 0; + + this.concurrency = concurrency ?? getConcurrency(); + + log.info("Using concurrency: ", this.concurrency); + // timeout is added on downloads + // timeout on queue will keep the operation open till worker is terminated + this.syncQueue = new PQueue({ concurrency: this.concurrency }); + logQueueStats(this.syncQueue, "sync"); + // this.downloadQueue = new PQueue({ concurrency: 1 }); + // logQueueStats(this.downloadQueue, 'download'); + + this.comlinkCryptoWorker = new Array(this.concurrency); + this.enteWorkers = new Array(this.concurrency); + } + + public async getEnteWorker(id: number): Promise { + const wid = id % this.enteWorkers.length; + console.log("getEnteWorker: ", id, wid); + if (!this.enteWorkers[wid]) { + this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker(); + this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote; + } + + return this.enteWorkers[wid]; + } + + public async dispose() { + this.localFilesMap = undefined; + await this.syncQueue.onIdle(); + this.syncQueue.removeAllListeners(); + for (const enteComlinkWorker of this.comlinkCryptoWorker) { + enteComlinkWorker?.terminate(); + } + } +} + +export const getConcurrency = () => + haveWindow() && Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2)); class MachineLearningService { private localSyncContext: Promise; @@ -30,8 +156,6 @@ class MachineLearningService { throw Error("Token needed by ml service to sync file"); } - await downloadManager.init(APPS.PHOTOS, { token }); - const syncContext = await this.getSyncContext(token, userID); await this.syncLocalFiles(syncContext); @@ -42,15 +166,19 @@ class MachineLearningService { await this.syncFiles(syncContext); } - // TODO: running index before all files are on latest ml version - // may be need to just take synced files on latest ml version for indexing + // TODO-ML(MR): Forced disable clustering. It doesn't currently work, + // need to finalize it before we move out of beta. + // + // > Error: Failed to execute 'transferToImageBitmap' on + // > 'OffscreenCanvas': ImageBitmap construction failed + /* if ( syncContext.outOfSyncFiles.length <= 0 || - (syncContext.nSyncedFiles === syncContext.config.batchSize && - Math.random() < 0.2) + (syncContext.nSyncedFiles === batchSize && Math.random() < 0) ) { await this.syncIndex(syncContext); } + */ const mlSyncResult: MLSyncResult = { nOutOfSyncFiles: syncContext.outOfSyncFiles.length, @@ -68,14 +196,8 @@ class MachineLearningService { return mlSyncResult; } - public async regenerateFaceCrop( - token: string, - userID: number, - faceID: string, - ) { - await downloadManager.init(APPS.PHOTOS, { token }); - const syncContext = await this.getSyncContext(token, userID); - return FaceService.regenerateFaceCrop(syncContext, faceID); + public async regenerateFaceCrop(faceID: string) { + return regenerateFaceCrop(faceID); } private newMlData(fileId: number) { @@ -153,8 +275,8 @@ class MachineLearningService { private async getOutOfSyncFiles(syncContext: MLSyncContext) { const startTime = Date.now(); const fileIds = await mlIDbStorage.getFileIds( - syncContext.config.batchSize, - syncContext.config.mlVersion, + batchSize, + defaultMLVersion, MAX_ML_SYNC_ERROR_COUNT, ); @@ -200,9 +322,10 @@ class MachineLearningService { if (!this.syncContext) { log.info("Creating syncContext"); - this.syncContext = getMLSyncConfig().then((mlSyncConfig) => - MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true), - ); + // TODO-ML(MR): Keep as promise for now. + this.syncContext = new Promise((resolve) => { + resolve(new LocalMLSyncContext(token, userID)); + }); } else { log.info("reusing existing syncContext"); } @@ -210,11 +333,13 @@ class MachineLearningService { } private async getLocalSyncContext(token: string, userID: number) { + // TODO-ML(MR): This is updating the file ML version. verify. if (!this.localSyncContext) { log.info("Creating localSyncContext"); - this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) => - MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false), - ); + // TODO-ML(MR): + this.localSyncContext = new Promise((resolve) => { + resolve(new LocalMLSyncContext(token, userID)); + }); } else { log.info("reusing existing localSyncContext"); } @@ -235,24 +360,22 @@ class MachineLearningService { userID: number, enteFile: EnteFile, localFile?: globalThis.File, - ): Promise { + ) { const syncContext = await this.getLocalSyncContext(token, userID); try { - const mlFileData = await this.syncFileWithErrorHandler( + await this.syncFileWithErrorHandler( syncContext, enteFile, localFile, ); - if (syncContext.nSyncedFiles >= syncContext.config.batchSize) { + if (syncContext.nSyncedFiles >= batchSize) { await this.closeLocalSyncContext(); } // await syncContext.dispose(); - return mlFileData; } catch (e) { console.error("Error while syncing local file: ", enteFile.id, e); - return e; } } @@ -260,16 +383,12 @@ class MachineLearningService { syncContext: MLSyncContext, enteFile: EnteFile, localFile?: globalThis.File, - ): Promise { + ) { try { console.log( `Indexing ${enteFile.title ?? ""} ${enteFile.id}`, ); - const mlFileData = await this.syncFile( - syncContext, - enteFile, - localFile, - ); + const mlFileData = await this.syncFile(enteFile, localFile); syncContext.nSyncedFaces += mlFileData.faces?.length || 0; syncContext.nSyncedFiles += 1; return mlFileData; @@ -302,35 +421,20 @@ class MachineLearningService { } } - private async syncFile( - syncContext: MLSyncContext, - enteFile: EnteFile, - localFile?: globalThis.File, - ) { - console.log("Syncing for file" + enteFile.title); + private async syncFile(enteFile: EnteFile, localFile?: globalThis.File) { + log.debug(() => ({ a: "Syncing file", enteFile })); const fileContext: MLSyncFileContext = { enteFile, localFile }; - const oldMlFile = - (fileContext.oldMlFile = await this.getMLFileData(enteFile.id)) ?? - this.newMlData(enteFile.id); - if ( - fileContext.oldMlFile?.mlVersion === syncContext.config.mlVersion - // TODO: reset mlversion of all files when user changes image source - ) { - return fileContext.oldMlFile; + const oldMlFile = await this.getMLFileData(enteFile.id); + if (oldMlFile && oldMlFile.mlVersion) { + return oldMlFile; } - const newMlFile = (fileContext.newMlFile = this.newMlData(enteFile.id)); - if (syncContext.shouldUpdateMLVersion) { - newMlFile.mlVersion = syncContext.config.mlVersion; - } else if (fileContext.oldMlFile?.mlVersion) { - newMlFile.mlVersion = fileContext.oldMlFile.mlVersion; - } + const newMlFile = (fileContext.newMlFile = this.newMlData(enteFile.id)); + newMlFile.mlVersion = defaultMLVersion; try { - await ReaderService.getImageBitmap(syncContext, fileContext); - await Promise.all([ - this.syncFileAnalyzeFaces(syncContext, fileContext), - ]); + await fetchImageBitmapForContext(fileContext); + await syncFileAnalyzeFaces(fileContext); newMlFile.errorCount = 0; newMlFile.lastErrorMessage = undefined; await this.persistOnServer(newMlFile, enteFile); @@ -348,7 +452,11 @@ class MachineLearningService { private async persistOnServer(mlFileData: MlFileData, enteFile: EnteFile) { const serverMl = LocalFileMlDataToServerFileMl(mlFileData); - log.info(mlFileData); + log.debug(() => ({ t: "Local ML file data", mlFileData })); + log.debug(() => ({ + t: "Uploaded ML file data", + d: JSON.stringify(serverMl), + })); const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance(); const { file: encryptedEmbeddingData } = @@ -400,44 +508,168 @@ class MachineLearningService { public async syncIndex(syncContext: MLSyncContext) { await this.getMLLibraryData(syncContext); - await PeopleService.syncPeopleIndex(syncContext); + // TODO-ML(MR): Ensure this doesn't run until fixed. + await syncPeopleIndex(syncContext); await this.persistMLLibraryData(syncContext); } - - private async syncFileAnalyzeFaces( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { newMlFile } = fileContext; - const startTime = Date.now(); - await FaceService.syncFileFaceDetections(syncContext, fileContext); - - if (newMlFile.faces && newMlFile.faces.length > 0) { - await FaceService.syncFileFaceCrops(syncContext, fileContext); - - const alignedFacesData = await FaceService.syncFileFaceAlignments( - syncContext, - fileContext, - ); - - await FaceService.syncFileFaceEmbeddings( - syncContext, - fileContext, - alignedFacesData, - ); - - await FaceService.syncFileFaceMakeRelativeDetections( - syncContext, - fileContext, - ); - } - log.info( - `face detection time taken ${fileContext.enteFile.id}`, - Date.now() - startTime, - "ms", - ); - } } export default new MachineLearningService(); + +export interface FileML extends ServerFileMl { + updatedAt: number; +} + +class ServerFileMl { + public fileID: number; + public height?: number; + public width?: number; + public faceEmbedding: ServerFaceEmbeddings; + + public constructor( + fileID: number, + faceEmbedding: ServerFaceEmbeddings, + height?: number, + width?: number, + ) { + this.fileID = fileID; + this.height = height; + this.width = width; + this.faceEmbedding = faceEmbedding; + } +} + +class ServerFaceEmbeddings { + public faces: ServerFace[]; + public version: number; + public client?: string; + public error?: boolean; + + public constructor( + faces: ServerFace[], + version: number, + client?: string, + error?: boolean, + ) { + this.faces = faces; + this.version = version; + this.client = client; + this.error = error; + } +} + +class ServerFace { + public faceID: string; + public embeddings: number[]; + public detection: ServerDetection; + public score: number; + public blur: number; + + public constructor( + faceID: string, + embeddings: number[], + detection: ServerDetection, + score: number, + blur: number, + ) { + this.faceID = faceID; + this.embeddings = embeddings; + this.detection = detection; + this.score = score; + this.blur = blur; + } +} + +class ServerDetection { + public box: ServerFaceBox; + public landmarks: Landmark[]; + + public constructor(box: ServerFaceBox, landmarks: Landmark[]) { + this.box = box; + this.landmarks = landmarks; + } +} + +class ServerFaceBox { + public xMin: number; + public yMin: number; + public width: number; + public height: number; + + public constructor( + xMin: number, + yMin: number, + width: number, + height: number, + ) { + this.xMin = xMin; + this.yMin = yMin; + this.width = width; + this.height = height; + } +} + +function LocalFileMlDataToServerFileMl( + localFileMlData: MlFileData, +): ServerFileMl { + if ( + localFileMlData.errorCount > 0 && + localFileMlData.lastErrorMessage !== undefined + ) { + return null; + } + const imageDimensions = localFileMlData.imageDimensions; + + const faces: ServerFace[] = []; + for (let i = 0; i < localFileMlData.faces.length; i++) { + const face: Face = localFileMlData.faces[i]; + const faceID = face.id; + const embedding = face.embedding; + const score = face.detection.probability; + const blur = face.blurValue; + const detection: FaceDetection = face.detection; + const box = detection.box; + const landmarks = detection.landmarks; + const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height); + const newLandmarks: Landmark[] = []; + for (let j = 0; j < landmarks.length; j++) { + newLandmarks.push({ + x: landmarks[j].x, + y: landmarks[j].y, + } as Landmark); + } + + const newFaceObject = new ServerFace( + faceID, + Array.from(embedding), + new ServerDetection(newBox, newLandmarks), + score, + blur, + ); + faces.push(newFaceObject); + } + const faceEmbeddings = new ServerFaceEmbeddings( + faces, + 1, + localFileMlData.lastErrorMessage, + ); + return new ServerFileMl( + localFileMlData.fileId, + faceEmbeddings, + imageDimensions.height, + imageDimensions.width, + ); +} + +export function logQueueStats(queue: PQueue, name: string) { + queue.on("active", () => + log.info( + `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`, + ), + ); + queue.on("idle", () => log.info(`queuestats: ${name}: Idle`)); + queue.on("error", (error) => + console.error(`queuestats: ${name}: Error, `, error), + ); +} diff --git a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts index d1c5e9db5..1cb61af00 100644 --- a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts +++ b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts @@ -5,25 +5,94 @@ import { eventBus, Events } from "@ente/shared/events"; import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers"; import debounce from "debounce"; import PQueue from "p-queue"; -import { JobResult } from "types/common/job"; +import { createFaceComlinkWorker } from "services/face"; +import mlIDbStorage from "services/face/db"; +import type { DedicatedMLWorker } from "services/face/face.worker"; +import { MLSyncResult } from "services/face/types"; import { EnteFile } from "types/file"; -import { MLSyncResult } from "types/machineLearning"; -import { getDedicatedMLWorker } from "utils/comlink/ComlinkMLWorker"; -import { SimpleJob } from "utils/common/job"; -import { logQueueStats } from "utils/machineLearning"; -import { getMLSyncJobConfig } from "utils/machineLearning/config"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import { DedicatedMLWorker } from "worker/ml.worker"; +import { logQueueStats } from "./machineLearningService"; -const LIVE_SYNC_IDLE_DEBOUNCE_SEC = 30; -const LIVE_SYNC_QUEUE_TIMEOUT_SEC = 300; -const LOCAL_FILES_UPDATED_DEBOUNCE_SEC = 30; +export type JobState = "Scheduled" | "Running" | "NotScheduled"; -export interface MLSyncJobResult extends JobResult { +export interface MLSyncJobResult { + shouldBackoff: boolean; mlSyncResult: MLSyncResult; } -export class MLSyncJob extends SimpleJob {} +export class MLSyncJob { + private runCallback: () => Promise; + private state: JobState; + private stopped: boolean; + private intervalSec: number; + private nextTimeoutId: ReturnType; + + constructor(runCallback: () => Promise) { + this.runCallback = runCallback; + this.state = "NotScheduled"; + this.stopped = true; + this.resetInterval(); + } + + public resetInterval() { + this.intervalSec = 5; + } + + public start() { + this.stopped = false; + this.resetInterval(); + if (this.state !== "Running") { + this.scheduleNext(); + } else { + log.info("Job already running, not scheduling"); + } + } + + private scheduleNext() { + if (this.state === "Scheduled" || this.nextTimeoutId) { + this.clearScheduled(); + } + + this.nextTimeoutId = setTimeout( + () => this.run(), + this.intervalSec * 1000, + ); + this.state = "Scheduled"; + log.info("Scheduled next job after: ", this.intervalSec); + } + + async run() { + this.nextTimeoutId = undefined; + this.state = "Running"; + + try { + const jobResult = await this.runCallback(); + if (jobResult && jobResult.shouldBackoff) { + this.intervalSec = Math.min(960, this.intervalSec * 2); + } else { + this.resetInterval(); + } + log.info("Job completed"); + } catch (e) { + console.error("Error while running Job: ", e); + } finally { + this.state = "NotScheduled"; + !this.stopped && this.scheduleNext(); + } + } + + // currently client is responsible to terminate running job + public stop() { + this.stopped = true; + this.clearScheduled(); + } + + private clearScheduled() { + clearTimeout(this.nextTimeoutId); + this.nextTimeoutId = undefined; + this.state = "NotScheduled"; + log.info("Cleared next job"); + } +} class MLWorkManager { private mlSyncJob: MLSyncJob; @@ -40,19 +109,18 @@ class MLWorkManager { this.liveSyncQueue = new PQueue({ concurrency: 1, // TODO: temp, remove - timeout: LIVE_SYNC_QUEUE_TIMEOUT_SEC * 1000, + timeout: 300 * 1000, throwOnTimeout: true, }); this.mlSearchEnabled = false; - eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this); this.debouncedLiveSyncIdle = debounce( () => this.onLiveSyncIdle(), - LIVE_SYNC_IDLE_DEBOUNCE_SEC * 1000, + 30 * 1000, ); this.debouncedFilesUpdated = debounce( () => this.mlSearchEnabled && this.localFilesUpdatedHandler(), - LOCAL_FILES_UPDATED_DEBOUNCE_SEC * 1000, + 30 * 1000, ); } @@ -97,26 +165,12 @@ class MLWorkManager { } } - // Handlers - private async appStartHandler() { - log.info("appStartHandler"); - try { - this.startSyncJob(); - } catch (e) { - log.error("Failed in ML appStart Handler", e); - } - } - - private async logoutHandler() { - log.info("logoutHandler"); - try { - this.stopSyncJob(); - this.mlSyncJob = undefined; - await this.terminateLiveSyncWorker(); - await mlIDbStorage.clearMLDB(); - } catch (e) { - log.error("Failed in ML logout Handler", e); - } + async logout() { + this.setMlSearchEnabled(false); + this.stopSyncJob(); + this.mlSyncJob = undefined; + await this.terminateLiveSyncWorker(); + await mlIDbStorage.clearMLDB(); } private async fileUploadedHandler(arg: { @@ -148,7 +202,7 @@ class MLWorkManager { // Live Sync private async getLiveSyncWorker() { if (!this.liveSyncWorker) { - this.liveSyncWorker = getDedicatedMLWorker("ml-sync-live"); + this.liveSyncWorker = createFaceComlinkWorker("ml-sync-live"); } return await this.liveSyncWorker.remote; @@ -178,25 +232,19 @@ class MLWorkManager { } public async syncLocalFile(enteFile: EnteFile, localFile: globalThis.File) { - const result = await this.liveSyncQueue.add(async () => { + await this.liveSyncQueue.add(async () => { this.stopSyncJob(); const token = getToken(); const userID = getUserID(); const mlWorker = await this.getLiveSyncWorker(); return mlWorker.syncLocalFile(token, userID, enteFile, localFile); }); - - if (result instanceof Error) { - // TODO: redirect/refresh to gallery in case of session_expired - // may not be required as uploader should anyways take care of this - console.error("Error while syncing local file: ", result); - } } // Sync Job private async getSyncJobWorker() { if (!this.syncJobWorker) { - this.syncJobWorker = getDedicatedMLWorker("ml-sync-job"); + this.syncJobWorker = createFaceComlinkWorker("ml-sync-job"); } return await this.syncJobWorker.remote; @@ -254,11 +302,8 @@ class MLWorkManager { log.info("User not logged in, not starting ml sync job"); return; } - const mlSyncJobConfig = await getMLSyncJobConfig(); if (!this.mlSyncJob) { - this.mlSyncJob = new MLSyncJob(mlSyncJobConfig, () => - this.runMLSyncJob(), - ); + this.mlSyncJob = new MLSyncJob(() => this.runMLSyncJob()); } this.mlSyncJob.start(); } catch (e) { @@ -266,11 +311,11 @@ class MLWorkManager { } } - public stopSyncJob(terminateWorker: boolean = true) { + public stopSyncJob() { try { log.info("MLWorkManager.stopSyncJob"); this.mlSyncJob?.stop(); - terminateWorker && this.terminateSyncJobWorker(); + this.terminateSyncJobWorker(); } catch (e) { log.error("Failed to stop MLSync Job", e); } diff --git a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts b/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts deleted file mode 100644 index 818b8a5d1..000000000 --- a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { workerBridge } from "@/next/worker/worker-bridge"; -import { - FaceEmbedding, - FaceEmbeddingMethod, - FaceEmbeddingService, - Versioned, -} from "types/machineLearning"; - -export const mobileFaceNetFaceSize = 112; - -class MobileFaceNetEmbeddingService implements FaceEmbeddingService { - public method: Versioned; - public faceSize: number; - - public constructor() { - this.method = { - value: "MobileFaceNet", - version: 2, - }; - this.faceSize = mobileFaceNetFaceSize; - } - - public async getFaceEmbeddings( - faceData: Float32Array, - ): Promise> { - const outputData = await workerBridge.faceEmbedding(faceData); - - const embeddingSize = 192; - const embeddings = new Array( - outputData.length / embeddingSize, - ); - for (let i = 0; i < embeddings.length; i++) { - embeddings[i] = new Float32Array( - outputData.slice(i * embeddingSize, (i + 1) * embeddingSize), - ); - } - return embeddings; - } -} - -export default new MobileFaceNetEmbeddingService(); diff --git a/web/apps/photos/src/services/machineLearning/peopleService.ts b/web/apps/photos/src/services/machineLearning/peopleService.ts deleted file mode 100644 index ad7d7bcec..000000000 --- a/web/apps/photos/src/services/machineLearning/peopleService.ts +++ /dev/null @@ -1,94 +0,0 @@ -import log from "@/next/log"; -import { Face, MLSyncContext, Person } from "types/machineLearning"; -import { - findFirstIfSorted, - getAllFacesFromMap, - getLocalFile, - getOriginalImageBitmap, - isDifferentOrOld, -} from "utils/machineLearning"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import FaceService from "./faceService"; - -class PeopleService { - async syncPeopleIndex(syncContext: MLSyncContext) { - const filesVersion = await mlIDbStorage.getIndexVersion("files"); - if ( - filesVersion <= (await mlIDbStorage.getIndexVersion("people")) && - !isDifferentOrOld( - syncContext.mlLibraryData?.faceClusteringMethod, - syncContext.faceClusteringService.method, - ) - ) { - log.info( - "[MLService] Skipping people index as already synced to latest version", - ); - return; - } - - // TODO: have faces addresable through fileId + faceId - // to avoid index based addressing, which is prone to wrong results - // one way could be to match nearest face within threshold in the file - const allFacesMap = await FaceService.getAllSyncedFacesMap(syncContext); - const allFaces = getAllFacesFromMap(allFacesMap); - - await FaceService.runFaceClustering(syncContext, allFaces); - await this.syncPeopleFromClusters(syncContext, allFacesMap, allFaces); - - await mlIDbStorage.setIndexVersion("people", filesVersion); - } - - private async syncPeopleFromClusters( - syncContext: MLSyncContext, - allFacesMap: Map>, - allFaces: Array, - ) { - const clusters = - syncContext.mlLibraryData.faceClusteringResults?.clusters; - if (!clusters || clusters.length < 1) { - return; - } - - for (const face of allFaces) { - face.personId = undefined; - } - await mlIDbStorage.clearAllPeople(); - for (const [index, cluster] of clusters.entries()) { - const faces = cluster.map((f) => allFaces[f]).filter((f) => f); - - // TODO: take default display face from last leaves of hdbscan clusters - const personFace = findFirstIfSorted( - faces, - (a, b) => b.detection.probability - a.detection.probability, - ); - - if (personFace && !personFace.crop?.cacheKey) { - const file = await getLocalFile(personFace.fileId); - const imageBitmap = await getOriginalImageBitmap(file); - await FaceService.saveFaceCrop( - imageBitmap, - personFace, - syncContext, - ); - } - - const person: Person = { - id: index, - files: faces.map((f) => f.fileId), - displayFaceId: personFace?.id, - faceCropCacheKey: personFace?.crop?.cacheKey, - }; - - await mlIDbStorage.putPerson(person); - - faces.forEach((face) => { - face.personId = person.id; - }); - // log.info("Creating person: ", person, faces); - } - - await mlIDbStorage.updateFaces(allFacesMap); - } -} - -export default new PeopleService(); diff --git a/web/apps/photos/src/services/machineLearning/readerService.ts b/web/apps/photos/src/services/machineLearning/readerService.ts deleted file mode 100644 index 62aebdbd1..000000000 --- a/web/apps/photos/src/services/machineLearning/readerService.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import log from "@/next/log"; -import { MLSyncContext, MLSyncFileContext } from "types/machineLearning"; -import { - getLocalFileImageBitmap, - getOriginalImageBitmap, - getThumbnailImageBitmap, -} from "utils/machineLearning"; - -class ReaderService { - async getImageBitmap( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - try { - if (fileContext.imageBitmap) { - return fileContext.imageBitmap; - } - if (fileContext.localFile) { - if ( - fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE - ) { - throw new Error( - "Local file of only image type is supported", - ); - } - fileContext.imageBitmap = await getLocalFileImageBitmap( - fileContext.enteFile, - fileContext.localFile, - ); - } else if ( - syncContext.config.imageSource === "Original" && - [FILE_TYPE.IMAGE, FILE_TYPE.LIVE_PHOTO].includes( - fileContext.enteFile.metadata.fileType, - ) - ) { - fileContext.imageBitmap = await getOriginalImageBitmap( - fileContext.enteFile, - ); - } else { - fileContext.imageBitmap = await getThumbnailImageBitmap( - fileContext.enteFile, - ); - } - - fileContext.newMlFile.imageSource = syncContext.config.imageSource; - const { width, height } = fileContext.imageBitmap; - fileContext.newMlFile.imageDimensions = { width, height }; - - return fileContext.imageBitmap; - } catch (e) { - log.error("failed to create image bitmap", e); - throw e; - } - } -} -export default new ReaderService(); diff --git a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts b/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts deleted file mode 100644 index 4fa840749..000000000 --- a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts +++ /dev/null @@ -1,332 +0,0 @@ -import { workerBridge } from "@/next/worker/worker-bridge"; -import { euclidean } from "hdbscan"; -import { - Matrix, - applyToPoint, - compose, - scale, - translate, -} from "transformation-matrix"; -import { Dimensions } from "types/image"; -import { - FaceDetection, - FaceDetectionMethod, - FaceDetectionService, - Versioned, -} from "types/machineLearning"; -import { - clamp, - getPixelBilinear, - normalizePixelBetween0And1, -} from "utils/image"; -import { newBox } from "utils/machineLearning"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -class YoloFaceDetectionService implements FaceDetectionService { - public method: Versioned; - - public constructor() { - this.method = { - value: "YoloFace", - version: 1, - }; - } - - public async detectFaces( - imageBitmap: ImageBitmap, - ): Promise> { - const maxFaceDistancePercent = Math.sqrt(2) / 100; - const maxFaceDistance = imageBitmap.width * maxFaceDistancePercent; - const preprocessResult = - this.preprocessImageBitmapToFloat32ChannelsFirst( - imageBitmap, - 640, - 640, - ); - const data = preprocessResult.data; - const resized = preprocessResult.newSize; - const outputData = await workerBridge.detectFaces(data); - const faces = this.getFacesFromYoloOutput( - outputData as Float32Array, - 0.7, - ); - const inBox = newBox(0, 0, resized.width, resized.height); - const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height); - const transform = computeTransformToBox(inBox, toBox); - const faceDetections: Array = faces?.map((f) => { - const box = transformBox(f.box, transform); - const normLandmarks = f.landmarks; - const landmarks = transformPoints(normLandmarks, transform); - return { - box, - landmarks, - probability: f.probability as number, - } as FaceDetection; - }); - return removeDuplicateDetections(faceDetections, maxFaceDistance); - } - - private preprocessImageBitmapToFloat32ChannelsFirst( - imageBitmap: ImageBitmap, - requiredWidth: number, - requiredHeight: number, - maintainAspectRatio: boolean = true, - normFunction: ( - pixelValue: number, - ) => number = normalizePixelBetween0And1, - ) { - // Create an OffscreenCanvas and set its size - const offscreenCanvas = new OffscreenCanvas( - imageBitmap.width, - imageBitmap.height, - ); - const ctx = offscreenCanvas.getContext("2d"); - ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height); - const imageData = ctx.getImageData( - 0, - 0, - imageBitmap.width, - imageBitmap.height, - ); - const pixelData = imageData.data; - - let scaleW = requiredWidth / imageBitmap.width; - let scaleH = requiredHeight / imageBitmap.height; - if (maintainAspectRatio) { - const scale = Math.min( - requiredWidth / imageBitmap.width, - requiredHeight / imageBitmap.height, - ); - scaleW = scale; - scaleH = scale; - } - const scaledWidth = clamp( - Math.round(imageBitmap.width * scaleW), - 0, - requiredWidth, - ); - const scaledHeight = clamp( - Math.round(imageBitmap.height * scaleH), - 0, - requiredHeight, - ); - - const processedImage = new Float32Array( - 1 * 3 * requiredWidth * requiredHeight, - ); - - // Populate the Float32Array with normalized pixel values - let pixelIndex = 0; - const channelOffsetGreen = requiredHeight * requiredWidth; - const channelOffsetBlue = 2 * requiredHeight * requiredWidth; - for (let h = 0; h < requiredHeight; h++) { - for (let w = 0; w < requiredWidth; w++) { - let pixel: { - r: number; - g: number; - b: number; - }; - if (w >= scaledWidth || h >= scaledHeight) { - pixel = { r: 114, g: 114, b: 114 }; - } else { - pixel = getPixelBilinear( - w / scaleW, - h / scaleH, - pixelData, - imageBitmap.width, - imageBitmap.height, - ); - } - processedImage[pixelIndex] = normFunction(pixel.r); - processedImage[pixelIndex + channelOffsetGreen] = normFunction( - pixel.g, - ); - processedImage[pixelIndex + channelOffsetBlue] = normFunction( - pixel.b, - ); - pixelIndex++; - } - } - - return { - data: processedImage, - originalSize: { - width: imageBitmap.width, - height: imageBitmap.height, - }, - newSize: { width: scaledWidth, height: scaledHeight }, - }; - } - - // The rowOutput is a Float32Array of shape [25200, 16], where each row represents a bounding box. - private getFacesFromYoloOutput( - rowOutput: Float32Array, - minScore: number, - ): Array { - const faces: Array = []; - // iterate over each row - for (let i = 0; i < rowOutput.length; i += 16) { - const score = rowOutput[i + 4]; - if (score < minScore) { - continue; - } - // The first 4 values represent the bounding box's coordinates (x1, y1, x2, y2) - const xCenter = rowOutput[i]; - const yCenter = rowOutput[i + 1]; - const width = rowOutput[i + 2]; - const height = rowOutput[i + 3]; - const xMin = xCenter - width / 2.0; // topLeft - const yMin = yCenter - height / 2.0; // topLeft - - const leftEyeX = rowOutput[i + 5]; - const leftEyeY = rowOutput[i + 6]; - const rightEyeX = rowOutput[i + 7]; - const rightEyeY = rowOutput[i + 8]; - const noseX = rowOutput[i + 9]; - const noseY = rowOutput[i + 10]; - const leftMouthX = rowOutput[i + 11]; - const leftMouthY = rowOutput[i + 12]; - const rightMouthX = rowOutput[i + 13]; - const rightMouthY = rowOutput[i + 14]; - - const box = new Box({ - x: xMin, - y: yMin, - width: width, - height: height, - }); - const probability = score as number; - const landmarks = [ - new Point(leftEyeX, leftEyeY), - new Point(rightEyeX, rightEyeY), - new Point(noseX, noseY), - new Point(leftMouthX, leftMouthY), - new Point(rightMouthX, rightMouthY), - ]; - const face: FaceDetection = { - box, - landmarks, - probability, - // detectionMethod: this.method, - }; - faces.push(face); - } - return faces; - } - - public getRelativeDetection( - faceDetection: FaceDetection, - dimensions: Dimensions, - ): FaceDetection { - const oldBox: Box = faceDetection.box; - const box = new Box({ - x: oldBox.x / dimensions.width, - y: oldBox.y / dimensions.height, - width: oldBox.width / dimensions.width, - height: oldBox.height / dimensions.height, - }); - const oldLandmarks: Point[] = faceDetection.landmarks; - const landmarks = oldLandmarks.map((l) => { - return new Point(l.x / dimensions.width, l.y / dimensions.height); - }); - return { - box, - landmarks, - probability: faceDetection.probability, - }; - } -} - -export default new YoloFaceDetectionService(); - -/** - * Removes duplicate face detections from an array of detections. - * - * This function sorts the detections by their probability in descending order, then iterates over them. - * For each detection, it calculates the Euclidean distance to all other detections. - * If the distance is less than or equal to the specified threshold (`withinDistance`), the other detection is considered a duplicate and is removed. - * - * @param detections - An array of face detections to remove duplicates from. - * @param withinDistance - The maximum Euclidean distance between two detections for them to be considered duplicates. - * - * @returns An array of face detections with duplicates removed. - */ -function removeDuplicateDetections( - detections: Array, - withinDistance: number, -) { - // console.time('removeDuplicates'); - detections.sort((a, b) => b.probability - a.probability); - const isSelected = new Map(); - for (let i = 0; i < detections.length; i++) { - if (isSelected.get(i) === false) { - continue; - } - isSelected.set(i, true); - for (let j = i + 1; j < detections.length; j++) { - if (isSelected.get(j) === false) { - continue; - } - const centeri = getDetectionCenter(detections[i]); - const centerj = getDetectionCenter(detections[j]); - const dist = euclidean( - [centeri.x, centeri.y], - [centerj.x, centerj.y], - ); - if (dist <= withinDistance) { - isSelected.set(j, false); - } - } - } - - const uniques: Array = []; - for (let i = 0; i < detections.length; i++) { - isSelected.get(i) && uniques.push(detections[i]); - } - // console.timeEnd('removeDuplicates'); - return uniques; -} - -function getDetectionCenter(detection: FaceDetection) { - const center = new Point(0, 0); - // TODO: first 4 landmarks is applicable to blazeface only - // this needs to consider eyes, nose and mouth landmarks to take center - detection.landmarks?.slice(0, 4).forEach((p) => { - center.x += p.x; - center.y += p.y; - }); - - return center.div({ x: 4, y: 4 }); -} - -function computeTransformToBox(inBox: Box, toBox: Box): Matrix { - return compose( - translate(toBox.x, toBox.y), - scale(toBox.width / inBox.width, toBox.height / inBox.height), - ); -} - -function transformPoint(point: Point, transform: Matrix) { - const txdPoint = applyToPoint(transform, point); - return new Point(txdPoint.x, txdPoint.y); -} - -function transformPoints(points: Point[], transform: Matrix) { - return points?.map((p) => transformPoint(p, transform)); -} - -function transformBox(box: Box, transform: Matrix) { - const topLeft = transformPoint(box.topLeft, transform); - const bottomRight = transformPoint(box.bottomRight, transform); - - return newBoxFromPoints(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y); -} - -function newBoxFromPoints( - left: number, - top: number, - right: number, - bottom: number, -) { - return new Box({ left, top, right, bottom }); -} diff --git a/web/apps/photos/src/services/searchService.ts b/web/apps/photos/src/services/searchService.ts index 96c574b9d..d646ecd00 100644 --- a/web/apps/photos/src/services/searchService.ts +++ b/web/apps/photos/src/services/searchService.ts @@ -2,10 +2,12 @@ import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; import * as chrono from "chrono-node"; import { t } from "i18next"; +import mlIDbStorage from "services/face/db"; +import { Person } from "services/face/types"; +import { defaultMLVersion } from "services/machineLearning/machineLearningService"; import { Collection } from "types/collection"; import { EntityType, LocationTag, LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; import { ClipSearchScores, DateValue, @@ -16,12 +18,9 @@ import { } from "types/search"; import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker"; import { getUniqueFiles } from "utils/file"; -import { getAllPeople } from "utils/machineLearning"; -import { getMLSyncConfig } from "utils/machineLearning/config"; import { getFormattedDate } from "utils/search"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; import { clipService, computeClipMatchScore } from "./clip-service"; -import { getLocalEmbeddings } from "./embeddingService"; +import { localCLIPEmbeddings } from "./embeddingService"; import { getLatestEntities } from "./entityService"; import locationSearchService, { City } from "./locationSearchService"; @@ -176,8 +175,7 @@ export async function getAllPeopleSuggestion(): Promise> { export async function getIndexStatusSuggestion(): Promise { try { - const config = await getMLSyncConfig(); - const indexStatus = await mlIDbStorage.getIndexStatus(config.mlVersion); + const indexStatus = await mlIDbStorage.getIndexStatus(defaultMLVersion); let label; if (!indexStatus.localFilesSynced) { @@ -376,7 +374,7 @@ const searchClip = async ( await clipService.getTextEmbeddingIfAvailable(searchPhrase); if (!textEmbedding) return undefined; - const imageEmbeddings = await getLocalEmbeddings(); + const imageEmbeddings = await localCLIPEmbeddings(); const clipSearchResult = new Map( ( await Promise.all( @@ -430,3 +428,14 @@ function convertSuggestionToSearchQuery(option: Suggestion): Search { return { clip: option.value as ClipSearchScores }; } } + +async function getAllPeople(limit: number = undefined) { + let people: Array = await mlIDbStorage.getAllPeople(); + // await mlPeopleStore.iterate((person) => { + // people.push(person); + // }); + people = people ?? []; + return people + .sort((p1, p2) => p2.files.length - p1.files.length) + .slice(0, limit); +} diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts index 1dd448376..10da88a65 100644 --- a/web/apps/photos/src/services/upload/thumbnail.ts +++ b/web/apps/photos/src/services/upload/thumbnail.ts @@ -1,7 +1,9 @@ import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; +import { scaledImageDimensions } from "@/media/image"; import log from "@/next/log"; import { type Electron } from "@/next/types/ipc"; -import { withTimeout } from "@ente/shared/utils"; +import { ensure } from "@/utils/ensure"; +import { withTimeout } from "@/utils/promise"; import * as ffmpeg from "services/ffmpeg"; import { heicToJPEG } from "services/heic-convert"; import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types"; @@ -30,10 +32,10 @@ export const generateThumbnailWeb = async ( fileTypeInfo: FileTypeInfo, ): Promise => fileTypeInfo.fileType === FILE_TYPE.IMAGE - ? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo) + ? await generateImageThumbnailWeb(blob, fileTypeInfo) : await generateVideoThumbnailWeb(blob); -const generateImageThumbnailUsingCanvas = async ( +const generateImageThumbnailWeb = async ( blob: Blob, { extension }: FileTypeInfo, ) => { @@ -42,8 +44,12 @@ const generateImageThumbnailUsingCanvas = async ( blob = await heicToJPEG(blob); } + return generateImageThumbnailUsingCanvas(blob); +}; + +const generateImageThumbnailUsingCanvas = async (blob: Blob) => { const canvas = document.createElement("canvas"); - const canvasCtx = canvas.getContext("2d"); + const canvasCtx = ensure(canvas.getContext("2d")); const imageURL = URL.createObjectURL(blob); await withTimeout( @@ -53,7 +59,7 @@ const generateImageThumbnailUsingCanvas = async ( image.onload = () => { try { URL.revokeObjectURL(imageURL); - const { width, height } = scaledThumbnailDimensions( + const { width, height } = scaledImageDimensions( image.width, image.height, maxThumbnailDimension, @@ -62,7 +68,7 @@ const generateImageThumbnailUsingCanvas = async ( canvas.height = height; canvasCtx.drawImage(image, 0, 0, width, height); resolve(undefined); - } catch (e) { + } catch (e: unknown) { reject(e); } }; @@ -73,6 +79,32 @@ const generateImageThumbnailUsingCanvas = async ( return await compressedJPEGData(canvas); }; +const compressedJPEGData = async (canvas: HTMLCanvasElement) => { + let blob: Blob | undefined | null; + let prevSize = Number.MAX_SAFE_INTEGER; + let quality = 0.7; + + do { + if (blob) prevSize = blob.size; + blob = await new Promise((resolve) => { + canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality); + }); + quality -= 0.1; + } while ( + quality >= 0.5 && + blob && + blob.size > maxThumbnailSize && + percentageSizeDiff(blob.size, prevSize) >= 10 + ); + + return new Uint8Array(await ensure(blob).arrayBuffer()); +}; + +const percentageSizeDiff = ( + newThumbnailSize: number, + oldThumbnailSize: number, +) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; + const generateVideoThumbnailWeb = async (blob: Blob) => { try { return await ffmpeg.generateVideoThumbnailWeb(blob); @@ -85,9 +117,9 @@ const generateVideoThumbnailWeb = async (blob: Blob) => { } }; -const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { +export const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { const canvas = document.createElement("canvas"); - const canvasCtx = canvas.getContext("2d"); + const canvasCtx = ensure(canvas.getContext("2d")); const videoURL = URL.createObjectURL(blob); await withTimeout( @@ -98,7 +130,7 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { video.addEventListener("loadeddata", () => { try { URL.revokeObjectURL(videoURL); - const { width, height } = scaledThumbnailDimensions( + const { width, height } = scaledImageDimensions( video.videoWidth, video.videoHeight, maxThumbnailDimension, @@ -118,59 +150,6 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { return await compressedJPEGData(canvas); }; -/** - * Compute the size of the thumbnail to create for an image with the given - * {@link width} and {@link height}. - * - * This function calculates a new size of an image for limiting it to maximum - * width and height (both specified by {@link maxDimension}), while maintaining - * aspect ratio. - * - * It returns `{0, 0}` for invalid inputs. - */ -const scaledThumbnailDimensions = ( - width: number, - height: number, - maxDimension: number, -): { width: number; height: number } => { - if (width === 0 || height === 0) return { width: 0, height: 0 }; - const widthScaleFactor = maxDimension / width; - const heightScaleFactor = maxDimension / height; - const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); - const thumbnailDimensions = { - width: Math.round(width * scaleFactor), - height: Math.round(height * scaleFactor), - }; - if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0) - return { width: 0, height: 0 }; - return thumbnailDimensions; -}; - -const compressedJPEGData = async (canvas: HTMLCanvasElement) => { - let blob: Blob; - let prevSize = Number.MAX_SAFE_INTEGER; - let quality = 0.7; - - do { - if (blob) prevSize = blob.size; - blob = await new Promise((resolve) => { - canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality); - }); - quality -= 0.1; - } while ( - quality >= 0.5 && - blob.size > maxThumbnailSize && - percentageSizeDiff(blob.size, prevSize) >= 10 - ); - - return new Uint8Array(await blob.arrayBuffer()); -}; - -const percentageSizeDiff = ( - newThumbnailSize: number, - oldThumbnailSize: number, -) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; - /** * Generate a JPEG thumbnail for the given file or path using native tools. * diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts index e8ae6de97..c23a58b52 100644 --- a/web/apps/photos/src/services/upload/uploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts @@ -1,9 +1,9 @@ import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; -import { wait } from "@ente/shared/utils"; import { EnteFile } from "types/file"; import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index 38fd7037b..0ab9ecff0 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -6,11 +6,11 @@ import log from "@/next/log"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ensure } from "@/utils/ensure"; +import { wait } from "@/utils/promise"; import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; -import { wait } from "@ente/shared/utils"; import { Canceler } from "axios"; import { Remote } from "comlink"; import { diff --git a/web/apps/photos/src/services/userService.ts b/web/apps/photos/src/services/userService.ts index 95b1b95c9..47bda4f0a 100644 --- a/web/apps/photos/src/services/userService.ts +++ b/web/apps/photos/src/services/userService.ts @@ -1,11 +1,8 @@ import log from "@/next/log"; import { putAttributes } from "@ente/accounts/api/user"; -import { logoutUser } from "@ente/accounts/services/user"; -import { getRecoveryKey } from "@ente/shared/crypto/helpers"; import { ApiError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getFamilyPortalURL } from "@ente/shared/network/api"; -import localForage from "@ente/shared/storage/localForage"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { getToken, @@ -104,10 +101,6 @@ export const getRoadmapRedirectURL = async () => { } }; -export const clearFiles = async () => { - await localForage.clear(); -}; - export const isTokenValid = async (token: string) => { try { const resp = await HTTPService.get( @@ -233,19 +226,6 @@ export const deleteAccount = async ( } }; -// Ensure that the keys in local storage are not malformed by verifying that the -// recoveryKey can be decrypted with the masterKey. -// Note: This is not bullet-proof. -export const validateKey = async () => { - try { - await getRecoveryKey(); - return true; - } catch (e) { - await logoutUser(); - return false; - } -}; - export const getFaceSearchEnabledStatus = async () => { try { const token = getToken(); diff --git a/web/apps/photos/src/types/common/job.ts b/web/apps/photos/src/types/common/job.ts deleted file mode 100644 index fe42e4aaf..000000000 --- a/web/apps/photos/src/types/common/job.ts +++ /dev/null @@ -1,11 +0,0 @@ -export type JobState = "Scheduled" | "Running" | "NotScheduled"; - -export interface JobConfig { - intervalSec: number; - maxItervalSec: number; - backoffMultiplier: number; -} - -export interface JobResult { - shouldBackoff: boolean; -} diff --git a/web/apps/photos/src/types/embedding.tsx b/web/apps/photos/src/types/embedding.tsx index d4719986b..161244c15 100644 --- a/web/apps/photos/src/types/embedding.tsx +++ b/web/apps/photos/src/types/embedding.tsx @@ -1,9 +1,9 @@ /** - * The embeddings models that we support. + * The embeddings that we (the current client) knows how to handle. * * This is an exhaustive set of values we pass when PUT-ting encrypted * embeddings on the server. However, we should be prepared to receive an - * {@link EncryptedEmbedding} with a model value distinct from one of these. + * {@link EncryptedEmbedding} with a model value different from these. */ export type EmbeddingModel = "onnx-clip" | "file-ml-clip-face"; diff --git a/web/apps/photos/src/types/image/index.ts b/web/apps/photos/src/types/image/index.ts index 8c9619e2e..e69de29bb 100644 --- a/web/apps/photos/src/types/image/index.ts +++ b/web/apps/photos/src/types/image/index.ts @@ -1,9 +0,0 @@ -export interface Dimensions { - width: number; - height: number; -} - -export interface BlobOptions { - type?: string; - quality?: number; -} diff --git a/web/apps/photos/src/types/machineLearning/data/clip.ts b/web/apps/photos/src/types/machineLearning/data/clip.ts deleted file mode 100644 index 0181e89e5..000000000 --- a/web/apps/photos/src/types/machineLearning/data/clip.ts +++ /dev/null @@ -1,4 +0,0 @@ -export interface ClipEmbedding { - embedding: Float32Array; - model: "ggml-clip" | "onnx-clip"; -} diff --git a/web/apps/photos/src/types/machineLearning/data/face.ts b/web/apps/photos/src/types/machineLearning/data/face.ts deleted file mode 100644 index cac391994..000000000 --- a/web/apps/photos/src/types/machineLearning/data/face.ts +++ /dev/null @@ -1,27 +0,0 @@ -/// [`x`] and [y] are the coordinates of the top left corner of the box, so the minimim values -/// [width] and [height] are the width and height of the box. -/// All values are in absolute pixels relative to the original image size. -export interface CenterBox { - x: number; - y: number; - height: number; - width: number; -} - -export interface Point { - x: number; - y: number; -} - -export interface Detection { - box: CenterBox; - landmarks: Point[]; -} - -export interface Face { - id: string; - confidence: number; - blur: number; - embedding: Float32Array; - detection: Detection; -} diff --git a/web/apps/photos/src/types/machineLearning/data/fileML.ts b/web/apps/photos/src/types/machineLearning/data/fileML.ts deleted file mode 100644 index 7835450e7..000000000 --- a/web/apps/photos/src/types/machineLearning/data/fileML.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { ClipEmbedding } from "./clip"; -import { Face } from "./face"; - -export interface FileML { - fileID: number; - clip?: ClipEmbedding; - faces: Face[]; - height: number; - width: number; - version: number; - error?: string; -} diff --git a/web/apps/photos/src/types/machineLearning/index.ts b/web/apps/photos/src/types/machineLearning/index.ts deleted file mode 100644 index 2c3961cdf..000000000 --- a/web/apps/photos/src/types/machineLearning/index.ts +++ /dev/null @@ -1,331 +0,0 @@ -import { DebugInfo } from "hdbscan"; -import PQueue from "p-queue"; -import { EnteFile } from "types/file"; -import { Dimensions } from "types/image"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -export interface MLSyncResult { - nOutOfSyncFiles: number; - nSyncedFiles: number; - nSyncedFaces: number; - nFaceClusters: number; - nFaceNoise: number; - error?: Error; -} - -export declare type FaceDescriptor = Float32Array; - -export declare type Cluster = Array; - -export interface ClusteringResults { - clusters: Array; - noise: Cluster; -} - -export interface HdbscanResults extends ClusteringResults { - debugInfo?: DebugInfo; -} - -export interface FacesCluster { - faces: Cluster; - summary?: FaceDescriptor; -} - -export interface FacesClustersWithNoise { - clusters: Array; - noise: Cluster; -} - -export interface NearestCluster { - cluster: FacesCluster; - distance: number; -} - -export declare type Landmark = Point; - -export declare type ImageType = "Original" | "Preview"; - -export declare type FaceDetectionMethod = "YoloFace"; - -export declare type FaceCropMethod = "ArcFace"; - -export declare type FaceAlignmentMethod = "ArcFace"; - -export declare type FaceEmbeddingMethod = "MobileFaceNet"; - -export declare type BlurDetectionMethod = "Laplacian"; - -export declare type ClusteringMethod = "Hdbscan" | "Dbscan"; - -export class AlignedBox { - box: Box; - rotation: number; -} - -export interface Versioned { - value: T; - version: number; -} - -export interface FaceDetection { - // box and landmarks is relative to image dimentions stored at mlFileData - box: Box; - landmarks?: Array; - probability?: number; -} - -export interface DetectedFace { - fileId: number; - detection: FaceDetection; -} - -export interface DetectedFaceWithId extends DetectedFace { - id: string; -} - -export interface FaceCrop { - image: ImageBitmap; - // imageBox is relative to image dimentions stored at mlFileData - imageBox: Box; -} - -export interface StoredFaceCrop { - cacheKey: string; - imageBox: Box; -} - -export interface CroppedFace extends DetectedFaceWithId { - crop?: StoredFaceCrop; -} - -export interface FaceAlignment { - // TODO: remove affine matrix as rotation, size and center - // are simple to store and use, affine matrix adds complexity while getting crop - affineMatrix: Array>; - rotation: number; - // size and center is relative to image dimentions stored at mlFileData - size: number; - center: Point; -} - -export interface AlignedFace extends CroppedFace { - alignment?: FaceAlignment; - blurValue?: number; -} - -export declare type FaceEmbedding = Float32Array; - -export interface FaceWithEmbedding extends AlignedFace { - embedding?: FaceEmbedding; -} - -export interface Face extends FaceWithEmbedding { - personId?: number; -} - -export interface Person { - id: number; - name?: string; - files: Array; - displayFaceId?: string; - faceCropCacheKey?: string; -} - -export interface MlFileData { - fileId: number; - faces?: Face[]; - imageSource?: ImageType; - imageDimensions?: Dimensions; - faceDetectionMethod?: Versioned; - faceCropMethod?: Versioned; - faceAlignmentMethod?: Versioned; - faceEmbeddingMethod?: Versioned; - mlVersion: number; - errorCount: number; - lastErrorMessage?: string; -} - -export interface FaceDetectionConfig { - method: FaceDetectionMethod; -} - -export interface FaceCropConfig { - enabled: boolean; - method: FaceCropMethod; - padding: number; - maxSize: number; - blobOptions: { - type: string; - quality: number; - }; -} - -export interface FaceAlignmentConfig { - method: FaceAlignmentMethod; -} - -export interface BlurDetectionConfig { - method: BlurDetectionMethod; - threshold: number; -} - -export interface FaceEmbeddingConfig { - method: FaceEmbeddingMethod; - faceSize: number; - generateTsne?: boolean; -} - -export interface FaceClusteringConfig extends ClusteringConfig {} - -export declare type TSNEMetric = "euclidean" | "manhattan"; - -export interface TSNEConfig { - samples: number; - dim: number; - perplexity?: number; - earlyExaggeration?: number; - learningRate?: number; - nIter?: number; - metric?: TSNEMetric; -} - -export interface MLSyncConfig { - batchSize: number; - imageSource: ImageType; - faceDetection: FaceDetectionConfig; - faceCrop: FaceCropConfig; - faceAlignment: FaceAlignmentConfig; - blurDetection: BlurDetectionConfig; - faceEmbedding: FaceEmbeddingConfig; - faceClustering: FaceClusteringConfig; - mlVersion: number; -} - -export interface MLSearchConfig { - enabled: boolean; -} - -export interface MLSyncContext { - token: string; - userID: number; - config: MLSyncConfig; - shouldUpdateMLVersion: boolean; - - faceDetectionService: FaceDetectionService; - faceCropService: FaceCropService; - faceAlignmentService: FaceAlignmentService; - faceEmbeddingService: FaceEmbeddingService; - blurDetectionService: BlurDetectionService; - faceClusteringService: ClusteringService; - - localFilesMap: Map; - outOfSyncFiles: EnteFile[]; - nSyncedFiles: number; - nSyncedFaces: number; - allSyncedFacesMap?: Map>; - - error?: Error; - - // oldMLLibraryData: MLLibraryData; - mlLibraryData: MLLibraryData; - - syncQueue: PQueue; - - getEnteWorker(id: number): Promise; - dispose(): Promise; -} - -export interface MLSyncFileContext { - enteFile: EnteFile; - localFile?: globalThis.File; - - oldMlFile?: MlFileData; - newMlFile?: MlFileData; - - imageBitmap?: ImageBitmap; - - newDetection?: boolean; - newAlignment?: boolean; -} - -export interface MLLibraryData { - faceClusteringMethod?: Versioned; - faceClusteringResults?: ClusteringResults; - faceClustersWithNoise?: FacesClustersWithNoise; -} - -export declare type MLIndex = "files" | "people"; - -export interface FaceDetectionService { - method: Versioned; - - detectFaces(image: ImageBitmap): Promise>; - getRelativeDetection( - faceDetection: FaceDetection, - imageDimensions: Dimensions, - ): FaceDetection; -} - -export interface FaceCropService { - method: Versioned; - - getFaceCrop( - imageBitmap: ImageBitmap, - face: FaceDetection, - config: FaceCropConfig, - ): Promise; -} - -export interface FaceAlignmentService { - method: Versioned; - getFaceAlignment(faceDetection: FaceDetection): FaceAlignment; -} - -export interface FaceEmbeddingService { - method: Versioned; - faceSize: number; - - getFaceEmbeddings(faceImages: Float32Array): Promise>; -} - -export interface BlurDetectionService { - method: Versioned; - detectBlur(alignedFaces: Float32Array, faces: Face[]): number[]; -} - -export interface ClusteringService { - method: Versioned; - - cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise; -} - -export interface ClusteringConfig { - method: ClusteringMethod; - minClusterSize: number; - minSamples?: number; - clusterSelectionEpsilon?: number; - clusterSelectionMethod?: "eom" | "leaf"; - maxDistanceInsideCluster?: number; - minInputSize?: number; - generateDebugInfo?: boolean; -} - -export declare type ClusteringInput = Array>; - -export interface MachineLearningWorker { - closeLocalSyncContext(): Promise; - - syncLocalFile( - token: string, - userID: number, - enteFile: EnteFile, - localFile: globalThis.File, - ): Promise; - - sync(token: string, userID: number): Promise; - - close(): void; -} diff --git a/web/apps/photos/src/types/machineLearning/ui.ts b/web/apps/photos/src/types/machineLearning/ui.ts deleted file mode 100644 index cd9f63f18..000000000 --- a/web/apps/photos/src/types/machineLearning/ui.ts +++ /dev/null @@ -1,7 +0,0 @@ -export interface IndexStatus { - outOfSyncFilesExists: boolean; - nSyncedFiles: number; - nTotalFiles: number; - localFilesSynced: boolean; - peopleIndexSynced: boolean; -} diff --git a/web/apps/photos/src/types/search/index.ts b/web/apps/photos/src/types/search/index.ts index cf50f4a06..aa5f12804 100644 --- a/web/apps/photos/src/types/search/index.ts +++ b/web/apps/photos/src/types/search/index.ts @@ -1,9 +1,9 @@ import { FILE_TYPE } from "@/media/file-type"; +import { IndexStatus } from "services/face/db"; +import { Person } from "services/face/types"; import { City } from "services/locationSearchService"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; -import { IndexStatus } from "types/machineLearning/ui"; export enum SuggestionType { DATE = "DATE", diff --git a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts deleted file mode 100644 index f312a2c5c..000000000 --- a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { haveWindow } from "@/next/env"; -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { type DedicatedMLWorker } from "worker/ml.worker"; - -export const getDedicatedMLWorker = (name: string) => { - if (haveWindow()) { - const cryptoComlinkWorker = new ComlinkWorker( - name ?? "ente-ml-worker", - new Worker(new URL("worker/ml.worker.ts", import.meta.url)), - ); - return cryptoComlinkWorker; - } -}; diff --git a/web/apps/photos/src/utils/common/job.ts b/web/apps/photos/src/utils/common/job.ts deleted file mode 100644 index 365f879e9..000000000 --- a/web/apps/photos/src/utils/common/job.ts +++ /dev/null @@ -1,82 +0,0 @@ -import log from "@/next/log"; -import { JobConfig, JobResult, JobState } from "types/common/job"; - -export class SimpleJob { - private config: JobConfig; - private runCallback: () => Promise; - private state: JobState; - private stopped: boolean; - private intervalSec: number; - private nextTimeoutId: ReturnType; - - constructor(config: JobConfig, runCallback: () => Promise) { - this.config = config; - this.runCallback = runCallback; - this.state = "NotScheduled"; - this.stopped = true; - this.intervalSec = this.config.intervalSec; - } - - public resetInterval() { - this.intervalSec = this.config.intervalSec; - } - - public start() { - this.stopped = false; - this.resetInterval(); - if (this.state !== "Running") { - this.scheduleNext(); - } else { - log.info("Job already running, not scheduling"); - } - } - - private scheduleNext() { - if (this.state === "Scheduled" || this.nextTimeoutId) { - this.clearScheduled(); - } - - this.nextTimeoutId = setTimeout( - () => this.run(), - this.intervalSec * 1000, - ); - this.state = "Scheduled"; - log.info("Scheduled next job after: ", this.intervalSec); - } - - async run() { - this.nextTimeoutId = undefined; - this.state = "Running"; - - try { - const jobResult = await this.runCallback(); - if (jobResult && jobResult.shouldBackoff) { - this.intervalSec = Math.min( - this.config.maxItervalSec, - this.intervalSec * this.config.backoffMultiplier, - ); - } else { - this.resetInterval(); - } - log.info("Job completed"); - } catch (e) { - console.error("Error while running Job: ", e); - } finally { - this.state = "NotScheduled"; - !this.stopped && this.scheduleNext(); - } - } - - // currently client is responsible to terminate running job - public stop() { - this.stopped = true; - this.clearScheduled(); - } - - private clearScheduled() { - clearTimeout(this.nextTimeoutId); - this.nextTimeoutId = undefined; - this.state = "NotScheduled"; - log.info("Cleared next job"); - } -} diff --git a/web/apps/photos/src/utils/embedding.ts b/web/apps/photos/src/utils/embedding.ts deleted file mode 100644 index 00012f174..000000000 --- a/web/apps/photos/src/utils/embedding.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Embedding } from "types/embedding"; -import { FileML } from "./machineLearning/mldataMappers"; - -export const getLatestVersionEmbeddings = (embeddings: Embedding[]) => { - const latestVersionEntities = new Map(); - embeddings.forEach((embedding) => { - if (!embedding?.fileID) { - return; - } - const existingEmbeddings = latestVersionEntities.get(embedding.fileID); - if ( - !existingEmbeddings || - existingEmbeddings.updatedAt < embedding.updatedAt - ) { - latestVersionEntities.set(embedding.fileID, embedding); - } - }); - return Array.from(latestVersionEntities.values()); -}; - -export const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => { - const latestVersionEntities = new Map(); - embeddings.forEach((embedding) => { - if (!embedding?.fileID) { - return; - } - const existingEmbeddings = latestVersionEntities.get(embedding.fileID); - if ( - !existingEmbeddings || - existingEmbeddings.updatedAt < embedding.updatedAt - ) { - latestVersionEntities.set(embedding.fileID, embedding); - } - }); - return Array.from(latestVersionEntities.values()); -}; diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts index 98a8dd948..af5c06e8e 100644 --- a/web/apps/photos/src/utils/file/index.ts +++ b/web/apps/photos/src/utils/file/index.ts @@ -5,10 +5,11 @@ import { lowercaseExtension } from "@/next/file"; import log from "@/next/log"; import { CustomErrorMessage, type Electron } from "@/next/types/ipc"; import { workerBridge } from "@/next/worker/worker-bridge"; +import { withTimeout } from "@/utils/promise"; import ComlinkCryptoWorker from "@ente/shared/crypto"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils"; +import { downloadUsingAnchor } from "@ente/shared/utils"; import { t } from "i18next"; import isElectron from "is-electron"; import { moveToHiddenCollection } from "services/collectionService"; @@ -270,6 +271,10 @@ export function generateStreamFromArrayBuffer(data: Uint8Array) { }); } +/** + * The returned blob.type is filled in, whenever possible, with the MIME type of + * the data that we're dealing with. + */ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { try { const tempFile = new File([imageBlob], fileName); @@ -283,7 +288,16 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { if (!isNonWebImageFileExtension(extension)) { // Either it is something that the browser already knows how to // render, or something we don't even about yet. - return imageBlob; + const mimeType = fileTypeInfo.mimeType; + if (!mimeType) { + log.info( + "Trying to render a file without a MIME type", + fileName, + ); + return imageBlob; + } else { + return new Blob([imageBlob], { type: mimeType }); + } } const available = !moduleState.isNativeJPEGConversionNotAvailable; @@ -324,7 +338,7 @@ const nativeConvertToJPEG = async (imageBlob: Blob) => { ? await electron.convertToJPEG(imageData) : await workerBridge.convertToJPEG(imageData); log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`); - return new Blob([jpegData]); + return new Blob([jpegData], { type: "image/jpeg" }); }; export function isSupportedRawFormat(exactType: string) { diff --git a/web/apps/photos/src/utils/image/index.ts b/web/apps/photos/src/utils/image/index.ts index e4884716c..7583f97c2 100644 --- a/web/apps/photos/src/utils/image/index.ts +++ b/web/apps/photos/src/utils/image/index.ts @@ -1,10 +1,8 @@ // these utils only work in env where OffscreenCanvas is available import { Matrix, inverse } from "ml-matrix"; -import { BlobOptions, Dimensions } from "types/image"; -import { FaceAlignment } from "types/machineLearning"; -import { enlargeBox } from "utils/machineLearning"; -import { Box } from "../../../thirdparty/face-api/classes"; +import { Box, Dimensions, enlargeBox } from "services/face/geom"; +import { FaceAlignment } from "services/face/types"; export function normalizePixelBetween0And1(pixelValue: number) { return pixelValue / 255.0; @@ -447,17 +445,22 @@ export function addPadding(image: ImageBitmap, padding: number) { return offscreen.transferToImageBitmap(); } -export async function imageBitmapToBlob( - imageBitmap: ImageBitmap, - options?: BlobOptions, -) { +export interface BlobOptions { + type?: string; + quality?: number; +} + +export async function imageBitmapToBlob(imageBitmap: ImageBitmap) { const offscreen = new OffscreenCanvas( imageBitmap.width, imageBitmap.height, ); offscreen.getContext("2d").drawImage(imageBitmap, 0, 0); - return offscreen.convertToBlob(options); + return offscreen.convertToBlob({ + type: "image/jpeg", + quality: 0.8, + }); } export async function imageBitmapFromBlob(blob: Blob) { diff --git a/web/apps/photos/src/utils/machineLearning/config.ts b/web/apps/photos/src/utils/machineLearning/config.ts deleted file mode 100644 index 0c25356ab..000000000 --- a/web/apps/photos/src/utils/machineLearning/config.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { - DEFAULT_ML_SEARCH_CONFIG, - DEFAULT_ML_SYNC_CONFIG, - DEFAULT_ML_SYNC_JOB_CONFIG, -} from "constants/mlConfig"; -import { JobConfig } from "types/common/job"; -import { MLSearchConfig, MLSyncConfig } from "types/machineLearning"; -import mlIDbStorage, { - ML_SEARCH_CONFIG_NAME, - ML_SYNC_CONFIG_NAME, - ML_SYNC_JOB_CONFIG_NAME, -} from "utils/storage/mlIDbStorage"; -import { isInternalUserForML } from "utils/user"; - -export async function getMLSyncJobConfig() { - return mlIDbStorage.getConfig( - ML_SYNC_JOB_CONFIG_NAME, - DEFAULT_ML_SYNC_JOB_CONFIG, - ); -} - -export async function getMLSyncConfig() { - return mlIDbStorage.getConfig(ML_SYNC_CONFIG_NAME, DEFAULT_ML_SYNC_CONFIG); -} - -export async function getMLSearchConfig() { - if (isInternalUserForML()) { - return mlIDbStorage.getConfig( - ML_SEARCH_CONFIG_NAME, - DEFAULT_ML_SEARCH_CONFIG, - ); - } - // Force disabled for everyone else while we finalize it to avoid redundant - // reindexing for users. - return DEFAULT_ML_SEARCH_CONFIG; -} - -export async function updateMLSyncJobConfig(newConfig: JobConfig) { - return mlIDbStorage.putConfig(ML_SYNC_JOB_CONFIG_NAME, newConfig); -} - -export async function updateMLSyncConfig(newConfig: MLSyncConfig) { - return mlIDbStorage.putConfig(ML_SYNC_CONFIG_NAME, newConfig); -} - -export async function updateMLSearchConfig(newConfig: MLSearchConfig) { - return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig); -} diff --git a/web/apps/photos/src/utils/machineLearning/index.ts b/web/apps/photos/src/utils/machineLearning/index.ts deleted file mode 100644 index bc9ae3974..000000000 --- a/web/apps/photos/src/utils/machineLearning/index.ts +++ /dev/null @@ -1,284 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import { decodeLivePhoto } from "@/media/live-photo"; -import log from "@/next/log"; -import PQueue from "p-queue"; -import DownloadManager from "services/download"; -import { getLocalFiles } from "services/fileService"; -import { EnteFile } from "types/file"; -import { Dimensions } from "types/image"; -import { - DetectedFace, - Face, - FaceAlignment, - MlFileData, - Person, - Versioned, -} from "types/machineLearning"; -import { getRenderableImage } from "utils/file"; -import { clamp, warpAffineFloat32List } from "utils/image"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -export function newBox(x: number, y: number, width: number, height: number) { - return new Box({ x, y, width, height }); -} - -export function getBoxCenterPt(topLeft: Point, bottomRight: Point): Point { - return topLeft.add(bottomRight.sub(topLeft).div(new Point(2, 2))); -} - -export function getBoxCenter(box: Box): Point { - return getBoxCenterPt(box.topLeft, box.bottomRight); -} - -export function enlargeBox(box: Box, factor: number = 1.5) { - const center = getBoxCenter(box); - const size = new Point(box.width, box.height); - const newHalfSize = new Point((factor * size.x) / 2, (factor * size.y) / 2); - - return new Box({ - left: center.x - newHalfSize.x, - top: center.y - newHalfSize.y, - right: center.x + newHalfSize.x, - bottom: center.y + newHalfSize.y, - }); -} - -export function getAllFacesFromMap(allFacesMap: Map>) { - const allFaces = [...allFacesMap.values()].flat(); - - return allFaces; -} - -export async function getLocalFile(fileId: number) { - const localFiles = await getLocalFiles(); - return localFiles.find((f) => f.id === fileId); -} - -export async function extractFaceImagesToFloat32( - faceAlignments: Array, - faceSize: number, - image: ImageBitmap, -): Promise { - const faceData = new Float32Array( - faceAlignments.length * faceSize * faceSize * 3, - ); - for (let i = 0; i < faceAlignments.length; i++) { - const alignedFace = faceAlignments[i]; - const faceDataOffset = i * faceSize * faceSize * 3; - warpAffineFloat32List( - image, - alignedFace, - faceSize, - faceData, - faceDataOffset, - ); - } - return faceData; -} - -export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) { - const xMin = clamp( - detectedFace.detection.box.x / imageDims.width, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const yMin = clamp( - detectedFace.detection.box.y / imageDims.height, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const xMax = clamp( - (detectedFace.detection.box.x + detectedFace.detection.box.width) / - imageDims.width, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const yMax = clamp( - (detectedFace.detection.box.y + detectedFace.detection.box.height) / - imageDims.height, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - - const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`; - const faceID = `${detectedFace.fileId}_${rawFaceID}`; - - return faceID; -} - -export async function getImageBlobBitmap(blob: Blob): Promise { - return await createImageBitmap(blob); -} - -async function getOriginalFile(file: EnteFile, queue?: PQueue) { - let fileStream; - if (queue) { - fileStream = await queue.add(() => DownloadManager.getFile(file)); - } else { - fileStream = await DownloadManager.getFile(file); - } - return new Response(fileStream).blob(); -} - -async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) { - const fileBlob = await getOriginalFile(file, queue); - if (file.metadata.fileType === FILE_TYPE.IMAGE) { - return await getRenderableImage(file.metadata.title, fileBlob); - } else { - const { imageFileName, imageData } = await decodeLivePhoto( - file.metadata.title, - fileBlob, - ); - return await getRenderableImage(imageFileName, new Blob([imageData])); - } -} - -export async function getOriginalImageBitmap(file: EnteFile, queue?: PQueue) { - const fileBlob = await getOriginalConvertedFile(file, queue); - log.info("[MLService] Got file: ", file.id.toString()); - return getImageBlobBitmap(fileBlob); -} - -export async function getThumbnailImageBitmap(file: EnteFile) { - const thumb = await DownloadManager.getThumbnail(file); - log.info("[MLService] Got thumbnail: ", file.id.toString()); - - return getImageBlobBitmap(new Blob([thumb])); -} - -export async function getLocalFileImageBitmap( - enteFile: EnteFile, - localFile: globalThis.File, -) { - let fileBlob = localFile as Blob; - fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob); - return getImageBlobBitmap(fileBlob); -} - -export async function getPeopleList(file: EnteFile): Promise> { - let startTime = Date.now(); - const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); - log.info( - "getPeopleList:mlFilesStore:getItem", - Date.now() - startTime, - "ms", - ); - if (!mlFileData?.faces || mlFileData.faces.length < 1) { - return []; - } - - const peopleIds = mlFileData.faces - .filter((f) => f.personId !== null && f.personId !== undefined) - .map((f) => f.personId); - if (!peopleIds || peopleIds.length < 1) { - return []; - } - // log.info("peopleIds: ", peopleIds); - startTime = Date.now(); - const peoplePromises = peopleIds.map( - (p) => mlIDbStorage.getPerson(p) as Promise, - ); - const peopleList = await Promise.all(peoplePromises); - log.info( - "getPeopleList:mlPeopleStore:getItems", - Date.now() - startTime, - "ms", - ); - // log.info("peopleList: ", peopleList); - - return peopleList; -} - -export async function getUnidentifiedFaces( - file: EnteFile, -): Promise> { - const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); - - return mlFileData?.faces?.filter( - (f) => f.personId === null || f.personId === undefined, - ); -} - -export async function getAllPeople(limit: number = undefined) { - let people: Array = await mlIDbStorage.getAllPeople(); - // await mlPeopleStore.iterate((person) => { - // people.push(person); - // }); - people = people ?? []; - return people - .sort((p1, p2) => p2.files.length - p1.files.length) - .slice(0, limit); -} - -export function findFirstIfSorted( - elements: Array, - comparator: (a: T, b: T) => number, -) { - if (!elements || elements.length < 1) { - return; - } - let first = elements[0]; - - for (let i = 1; i < elements.length; i++) { - const comp = comparator(elements[i], first); - if (comp < 0) { - first = elements[i]; - } - } - - return first; -} - -export function isDifferentOrOld( - method: Versioned, - thanMethod: Versioned, -) { - return ( - !method || - method.value !== thanMethod.value || - method.version < thanMethod.version - ); -} - -function primitiveArrayEquals(a, b) { - return ( - Array.isArray(a) && - Array.isArray(b) && - a.length === b.length && - a.every((val, index) => val === b[index]) - ); -} - -export function areFaceIdsSame(ofFaces: Array, toFaces: Array) { - if ( - (ofFaces === null || ofFaces === undefined) && - (toFaces === null || toFaces === undefined) - ) { - return true; - } - return primitiveArrayEquals( - ofFaces?.map((f) => f.id), - toFaces?.map((f) => f.id), - ); -} - -export function logQueueStats(queue: PQueue, name: string) { - queue.on("active", () => - log.info( - `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`, - ), - ); - queue.on("idle", () => log.info(`queuestats: ${name}: Idle`)); - queue.on("error", (error) => - console.error(`queuestats: ${name}: Error, `, error), - ); -} diff --git a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts b/web/apps/photos/src/utils/machineLearning/mldataMappers.ts deleted file mode 100644 index fb91420aa..000000000 --- a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts +++ /dev/null @@ -1,265 +0,0 @@ -import { - Face, - FaceDetection, - Landmark, - MlFileData, -} from "types/machineLearning"; -import { ClipEmbedding } from "types/machineLearning/data/clip"; - -export interface FileML extends ServerFileMl { - updatedAt: number; -} - -class ServerFileMl { - public fileID: number; - public height?: number; - public width?: number; - public faceEmbedding: ServerFaceEmbeddings; - public clipEmbedding?: ClipEmbedding; - - public constructor( - fileID: number, - faceEmbedding: ServerFaceEmbeddings, - clipEmbedding?: ClipEmbedding, - height?: number, - width?: number, - ) { - this.fileID = fileID; - this.height = height; - this.width = width; - this.faceEmbedding = faceEmbedding; - this.clipEmbedding = clipEmbedding; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFileMl { - return JSON.parse(json); - } -} - -class ServerFaceEmbeddings { - public faces: ServerFace[]; - public version: number; - public client?: string; - public error?: boolean; - - public constructor( - faces: ServerFace[], - version: number, - client?: string, - error?: boolean, - ) { - this.faces = faces; - this.version = version; - this.client = client; - this.error = error; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFaceEmbeddings { - return JSON.parse(json); - } -} - -class ServerFace { - public fileID: number; - public faceID: string; - public embeddings: number[]; - public detection: ServerDetection; - public score: number; - public blur: number; - public fileInfo?: ServerFileInfo; - - public constructor( - fileID: number, - faceID: string, - embeddings: number[], - detection: ServerDetection, - score: number, - blur: number, - fileInfo?: ServerFileInfo, - ) { - this.fileID = fileID; - this.faceID = faceID; - this.embeddings = embeddings; - this.detection = detection; - this.score = score; - this.blur = blur; - this.fileInfo = fileInfo; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFace { - return JSON.parse(json); - } -} - -class ServerFileInfo { - public imageWidth?: number; - public imageHeight?: number; - - public constructor(imageWidth?: number, imageHeight?: number) { - this.imageWidth = imageWidth; - this.imageHeight = imageHeight; - } -} - -class ServerDetection { - public box: ServerFaceBox; - public landmarks: Landmark[]; - - public constructor(box: ServerFaceBox, landmarks: Landmark[]) { - this.box = box; - this.landmarks = landmarks; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerDetection { - return JSON.parse(json); - } -} - -class ServerFaceBox { - public xMin: number; - public yMin: number; - public width: number; - public height: number; - - public constructor( - xMin: number, - yMin: number, - width: number, - height: number, - ) { - this.xMin = xMin; - this.yMin = yMin; - this.width = width; - this.height = height; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFaceBox { - return JSON.parse(json); - } -} - -export function LocalFileMlDataToServerFileMl( - localFileMlData: MlFileData, -): ServerFileMl { - if ( - localFileMlData.errorCount > 0 && - localFileMlData.lastErrorMessage !== undefined - ) { - return null; - } - const imageDimensions = localFileMlData.imageDimensions; - const fileInfo = new ServerFileInfo( - imageDimensions.width, - imageDimensions.height, - ); - const faces: ServerFace[] = []; - for (let i = 0; i < localFileMlData.faces.length; i++) { - const face: Face = localFileMlData.faces[i]; - const faceID = face.id; - const embedding = face.embedding; - const score = face.detection.probability; - const blur = face.blurValue; - const detection: FaceDetection = face.detection; - const box = detection.box; - const landmarks = detection.landmarks; - const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height); - const newLandmarks: Landmark[] = []; - for (let j = 0; j < landmarks.length; j++) { - newLandmarks.push({ - x: landmarks[j].x, - y: landmarks[j].y, - } as Landmark); - } - - const newFaceObject = new ServerFace( - localFileMlData.fileId, - faceID, - Array.from(embedding), - new ServerDetection(newBox, newLandmarks), - score, - blur, - fileInfo, - ); - faces.push(newFaceObject); - } - const faceEmbeddings = new ServerFaceEmbeddings( - faces, - 1, - localFileMlData.lastErrorMessage, - ); - return new ServerFileMl( - localFileMlData.fileId, - faceEmbeddings, - null, - imageDimensions.height, - imageDimensions.width, - ); -} - -// // Not sure if this actually works -// export function ServerFileMlToLocalFileMlData( -// serverFileMl: ServerFileMl, -// ): MlFileData { -// const faces: Face[] = []; -// const mlVersion: number = serverFileMl.faceEmbeddings.version; -// const errorCount = serverFileMl.faceEmbeddings.error ? 1 : 0; -// for (let i = 0; i < serverFileMl.faceEmbeddings.faces.length; i++) { -// const face = serverFileMl.faceEmbeddings.faces[i]; -// if(face.detection.landmarks.length === 0) { -// continue; -// } -// const detection = face.detection; -// const box = detection.box; -// const landmarks = detection.landmarks; -// const newBox = new FaceBox( -// box.xMin, -// box.yMin, -// box.width, -// box.height, -// ); -// const newLandmarks: Landmark[] = []; -// for (let j = 0; j < landmarks.length; j++) { -// newLandmarks.push( -// { -// x: landmarks[j].x, -// y: landmarks[j].y, -// } as Landmark -// ); -// } -// const newDetection = new Detection(newBox, newLandmarks); -// const newFace = { - -// } as Face -// faces.push(newFace); -// } -// return { -// fileId: serverFileMl.fileID, -// imageDimensions: { -// width: serverFileMl.width, -// height: serverFileMl.height, -// }, -// faces, -// mlVersion, -// errorCount, -// }; -// } diff --git a/web/apps/photos/src/utils/machineLearning/transform.ts b/web/apps/photos/src/utils/machineLearning/transform.ts deleted file mode 100644 index e69de29bb..000000000 diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts index 4ed9da753..e922c2621 100644 --- a/web/apps/photos/src/utils/native-stream.ts +++ b/web/apps/photos/src/utils/native-stream.ts @@ -111,7 +111,79 @@ export const writeStream = async ( const res = await fetch(req); if (!res.ok) - throw new Error( - `Failed to write stream to ${path}: HTTP ${res.status}`, - ); + throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`); +}; + +/** + * Variant of {@link writeStream} tailored for video conversion. + * + * @param blob The video to convert. + * + * @returns a token that can then be passed to {@link readConvertToMP4Stream} to + * read back the converted video. See: [Note: Convert to MP4]. + */ +export const writeConvertToMP4Stream = async (_: Electron, blob: Blob) => { + const url = "stream://convert-to-mp4"; + + const req = new Request(url, { + method: "POST", + body: blob, + // @ts-expect-error TypeScript's libdom.d.ts does not include the + // "duplex" parameter, e.g. see + // https://github.com/node-fetch/node-fetch/issues/1769. + duplex: "half", + }); + + const res = await fetch(req); + if (!res.ok) + throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`); + + const token = res.text(); + return token; +}; + +/** + * Variant of {@link readStream} tailored for video conversion. + * + * @param token A token obtained from {@link writeConvertToMP4Stream}. + * + * @returns the contents of the converted video. See: [Note: Convert to MP4]. + */ +export const readConvertToMP4Stream = async ( + _: Electron, + token: string, +): Promise => { + const params = new URLSearchParams({ token }); + const url = new URL(`stream://convert-to-mp4?${params.toString()}`); + + const req = new Request(url, { method: "GET" }); + + const res = await fetch(req); + if (!res.ok) + throw new Error( + `Failed to read stream from ${url}: HTTP ${res.status}`, + ); + + return res.blob(); +}; + +/** + * Sibling of {@link readConvertToMP4Stream} to let the native side know when we + * are done reading the response, and they can dispose any temporary resources + * it was using. + * + * @param token A token obtained from {@link writeConvertToMP4Stream}. + */ +export const readConvertToMP4Done = async ( + _: Electron, + token: string, +): Promise => { + // The value for `done` is arbitrary, only its presence matters. + const params = new URLSearchParams({ token, done: "1" }); + const url = new URL(`stream://convert-to-mp4?${params.toString()}`); + + const req = new Request(url, { method: "GET" }); + const res = await fetch(req); + if (!res.ok) + throw new Error(`Failed to close stream at ${url}: HTTP ${res.status}`); }; diff --git a/web/apps/photos/src/utils/ui/index.tsx b/web/apps/photos/src/utils/ui/index.tsx index 8f4895ead..8ac5f94bf 100644 --- a/web/apps/photos/src/utils/ui/index.tsx +++ b/web/apps/photos/src/utils/ui/index.tsx @@ -1,6 +1,5 @@ import { ensureElectron } from "@/next/electron"; import { AppUpdate } from "@/next/types/ipc"; -import { logoutUser } from "@ente/accounts/services/user"; import { DialogBoxAttributes } from "@ente/shared/components/DialogBox/types"; import AutoAwesomeOutlinedIcon from "@mui/icons-material/AutoAwesomeOutlined"; import InfoOutlined from "@mui/icons-material/InfoRounded"; @@ -69,6 +68,7 @@ export const getUpdateReadyToInstallMessage = ({ variant: "secondary", action: () => ensureElectron().updateOnNextRestart(version), }, + staticBackdrop: true, }); export const getUpdateAvailableForDownloadMessage = ({ @@ -121,14 +121,16 @@ export const getSubscriptionPurchaseSuccessMessage = ( ), }); -export const getSessionExpiredMessage = (): DialogBoxAttributes => ({ +export const getSessionExpiredMessage = ( + action: () => void, +): DialogBoxAttributes => ({ title: t("SESSION_EXPIRED"), content: t("SESSION_EXPIRED_MESSAGE"), nonClosable: true, proceed: { text: t("LOGIN"), - action: logoutUser, + action, variant: "accent", }, }); diff --git a/web/apps/photos/src/utils/user/index.ts b/web/apps/photos/src/utils/user/index.ts index 68ffc9bbd..0f8ef142f 100644 --- a/web/apps/photos/src/utils/user/index.ts +++ b/web/apps/photos/src/utils/user/index.ts @@ -14,8 +14,8 @@ export const isInternalUser = () => { }; export const isInternalUserForML = () => { - const userId = (getData(LS_KEYS.USER) as User)?.id; - if (userId == 1) return true; + const userID = (getData(LS_KEYS.USER) as User)?.id; + if (userID == 1 || userID == 2) return true; return isInternalUser(); }; diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts index 946a2090f..d9d6c718f 100644 --- a/web/apps/photos/src/worker/ffmpeg.worker.ts +++ b/web/apps/photos/src/worker/ffmpeg.worker.ts @@ -1,5 +1,4 @@ import log from "@/next/log"; -import { withTimeout } from "@ente/shared/utils"; import QueueProcessor from "@ente/shared/utils/queueProcessor"; import { expose } from "comlink"; import { @@ -7,6 +6,24 @@ import { inputPathPlaceholder, outputPathPlaceholder, } from "constants/ffmpeg"; + +// When we run tsc on CI, the line below errors out +// +// > Error: src/worker/ffmpeg.worker.ts(10,38): error TS2307: Cannot find module +// 'ffmpeg-wasm' or its corresponding type declarations. +// +// Building and running works fine. And this error does not occur when running +// tsc locally either. +// +// Of course, there is some misconfiguration, but we plan to move off our old +// fork and onto upstream ffmpeg-wasm, and the reason can be figured out then. +// For now, disable the error to allow the CI lint to complete. +// +// Note that we can't use @ts-expect-error since it doesn't error out when +// actually building! +// +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore import { FFmpeg, createFFmpeg } from "ffmpeg-wasm"; export class DedicatedFFmpegWorker { @@ -30,15 +47,11 @@ export class DedicatedFFmpegWorker { command: string[], blob: Blob, outputFileExtension: string, - timeoutMs, ): Promise { if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load(); - const go = () => - ffmpegExec(this.ffmpeg, command, outputFileExtension, blob); - const request = this.ffmpegTaskQueue.queueUpRequest(() => - timeoutMs ? withTimeout(go(), timeoutMs) : go(), + ffmpegExec(this.ffmpeg, command, outputFileExtension, blob), ); return await request.promise; diff --git a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts b/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts deleted file mode 100644 index 7263b4b96..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Box } from './Box'; - -export interface IBoundingBox { - left: number - top: number - right: number - bottom: number -} - -export class BoundingBox extends Box implements IBoundingBox { - constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) { - super({ left, top, right, bottom }, allowNegativeDimensions) - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/Box.ts b/web/apps/photos/thirdparty/face-api/classes/Box.ts deleted file mode 100644 index fcf1cbebb..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Box.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { IBoundingBox } from './BoundingBox'; -import { IDimensions } from './Dimensions'; -import { Point } from './Point'; -import { IRect } from './Rect'; - -export class Box implements IBoundingBox, IRect { - - public static isRect(rect: any): boolean { - return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber) - } - - public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) { - if (!Box.isRect(box)) { - throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`) - } - - if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) { - throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`) - } - } - - public x: number - public y: number - public width: number - public height: number - - constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) { - const box = (_box || {}) as any - - const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber) - const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber) - - if (!isRect && !isBbox) { - throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`) - } - - const [x, y, width, height] = isRect - ? [box.x, box.y, box.width, box.height] - : [box.left, box.top, box.right - box.left, box.bottom - box.top] - - Box.assertIsValidBox({ x, y, width, height }, 'Box.constructor', allowNegativeDimensions) - - this.x = x - this.y = y - this.width = width - this.height = height - } - - // public get x(): number { return this._x } - // public get y(): number { return this._y } - // public get width(): number { return this._width } - // public get height(): number { return this._height } - public get left(): number { return this.x } - public get top(): number { return this.y } - public get right(): number { return this.x + this.width } - public get bottom(): number { return this.y + this.height } - public get area(): number { return this.width * this.height } - public get topLeft(): Point { return new Point(this.left, this.top) } - public get topRight(): Point { return new Point(this.right, this.top) } - public get bottomLeft(): Point { return new Point(this.left, this.bottom) } - public get bottomRight(): Point { return new Point(this.right, this.bottom) } - - public round(): Box { - const [x, y, width, height] = [this.x, this.y, this.width, this.height] - .map(val => Math.round(val)) - return new Box({ x, y, width, height }) - } - - public floor(): Box { - const [x, y, width, height] = [this.x, this.y, this.width, this.height] - .map(val => Math.floor(val)) - return new Box({ x, y, width, height }) - } - - public toSquare(): Box { - let { x, y, width, height } = this - const diff = Math.abs(width - height) - if (width < height) { - x -= (diff / 2) - width += diff - } - if (height < width) { - y -= (diff / 2) - height += diff - } - - return new Box({ x, y, width, height }) - } - - public rescale(s: IDimensions | number): Box { - const scaleX = isDimensions(s) ? (s as IDimensions).width : s as number - const scaleY = isDimensions(s) ? (s as IDimensions).height : s as number - return new Box({ - x: this.x * scaleX, - y: this.y * scaleY, - width: this.width * scaleX, - height: this.height * scaleY - }) - } - - public pad(padX: number, padY: number): Box { - let [x, y, width, height] = [ - this.x - (padX / 2), - this.y - (padY / 2), - this.width + padX, - this.height + padY - ] - return new Box({ x, y, width, height }) - } - - public clipAtImageBorders(imgWidth: number, imgHeight: number): Box { - const { x, y, right, bottom } = this - const clippedX = Math.max(x, 0) - const clippedY = Math.max(y, 0) - - const newWidth = right - clippedX - const newHeight = bottom - clippedY - const clippedWidth = Math.min(newWidth, imgWidth - clippedX) - const clippedHeight = Math.min(newHeight, imgHeight - clippedY) - - return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight})).floor() - } - - public shift(sx: number, sy: number): Box { - const { width, height } = this - const x = this.x + sx - const y = this.y + sy - - return new Box({ x, y, width, height }) - } - - public padAtBorders(imageHeight: number, imageWidth: number) { - const w = this.width + 1 - const h = this.height + 1 - - let dx = 1 - let dy = 1 - let edx = w - let edy = h - - let x = this.left - let y = this.top - let ex = this.right - let ey = this.bottom - - if (ex > imageWidth) { - edx = -ex + imageWidth + w - ex = imageWidth - } - if (ey > imageHeight) { - edy = -ey + imageHeight + h - ey = imageHeight - } - if (x < 1) { - edy = 2 - x - x = 1 - } - if (y < 1) { - edy = 2 - y - y = 1 - } - - return { dy, edy, dx, edx, y, ey, x, ex, w, h } - } - - public calibrate(region: Box) { - return new Box({ - left: this.left + (region.left * this.width), - top: this.top + (region.top * this.height), - right: this.right + (region.right * this.width), - bottom: this.bottom + (region.bottom * this.height) - }).toSquare().round() - } -} - -export function isValidNumber(num: any) { - return !!num && num !== Infinity && num !== -Infinity && !isNaN(num) || num === 0 -} - -export function isDimensions(obj: any): boolean { - return obj && obj.width && obj.height -} diff --git a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts b/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts deleted file mode 100644 index 0129f3b67..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { isValidNumber } from './Box'; - -export interface IDimensions { - width: number - height: number -} - -export class Dimensions implements IDimensions { - - private _width: number - private _height: number - - constructor(width: number, height: number) { - if (!isValidNumber(width) || !isValidNumber(height)) { - throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`) - } - - this._width = width - this._height = height - } - - public get width(): number { return this._width } - public get height(): number { return this._height } - - public reverse(): Dimensions { - return new Dimensions(1 / this.width, 1 / this.height) - } -} diff --git a/web/apps/photos/thirdparty/face-api/classes/Point.ts b/web/apps/photos/thirdparty/face-api/classes/Point.ts deleted file mode 100644 index 3c32d5bc1..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Point.ts +++ /dev/null @@ -1,55 +0,0 @@ -export interface IPoint { - x: number - y: number -} - -export class Point implements IPoint { - public x: number - public y: number - - constructor(x: number, y: number) { - this.x = x - this.y = y - } - - // get x(): number { return this._x } - // get y(): number { return this._y } - - public add(pt: IPoint): Point { - return new Point(this.x + pt.x, this.y + pt.y) - } - - public sub(pt: IPoint): Point { - return new Point(this.x - pt.x, this.y - pt.y) - } - - public mul(pt: IPoint): Point { - return new Point(this.x * pt.x, this.y * pt.y) - } - - public div(pt: IPoint): Point { - return new Point(this.x / pt.x, this.y / pt.y) - } - - public abs(): Point { - return new Point(Math.abs(this.x), Math.abs(this.y)) - } - - public magnitude(): number { - return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2)) - } - - public floor(): Point { - return new Point(Math.floor(this.x), Math.floor(this.y)) - } - - public round(): Point { - return new Point(Math.round(this.x), Math.round(this.y)) - } - - public bound(lower: number, higher: number): Point { - const x = Math.max(lower, Math.min(higher, this.x)); - const y = Math.max(lower, Math.min(higher, this.y)); - return new Point(x, y); - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/Rect.ts b/web/apps/photos/thirdparty/face-api/classes/Rect.ts deleted file mode 100644 index 550676984..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Rect.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Box } from './Box'; - -export interface IRect { - x: number - y: number - width: number - height: number -} - -export class Rect extends Box implements IRect { - constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) { - super({ x, y, width, height }, allowNegativeDimensions) - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/index.ts b/web/apps/photos/thirdparty/face-api/classes/index.ts deleted file mode 100644 index 9bb7cccf4..000000000 --- a/web/apps/photos/thirdparty/face-api/classes/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * from './BoundingBox' -export * from './Box' -export * from './Dimensions' -export * from './Point' -export * from './Rect' \ No newline at end of file diff --git a/web/docs/README.md b/web/docs/README.md index 365d3bea0..699b7adad 100644 --- a/web/docs/README.md +++ b/web/docs/README.md @@ -7,3 +7,7 @@ If you just want to run Ente's web apps locally or develop them, you can do The docs in this directory provide more details that some developers might find useful. + +> [!TIP] +> +> To prepare your machine, see [new](new.md). diff --git a/web/docs/dependencies.md b/web/docs/dependencies.md index 3e9cb9a2f..f5082b9f3 100644 --- a/web/docs/dependencies.md +++ b/web/docs/dependencies.md @@ -5,23 +5,40 @@ These are some global dev dependencies in the root `package.json`. These set the baseline for how our code be in all the workspaces in this (yarn) monorepo. -- "prettier" - Formatter -- "eslint" - Linter -- "typescript" - Type checker +- [prettier](https://prettier.io) - Formatter + +- [eslint](https://eslint.org) - Linter + +- [typescript](https://www.typescriptlang.org/) - Type checker They also need some support packages, which come from the leaf `@/build-config` package: -- "@typescript-eslint/parser" - Tells ESLint how to read TypeScript syntax -- "@typescript-eslint/eslint-plugin" - Provides TypeScript rules and presets -- "eslint-plugin-react-hooks", "eslint-plugin-react-namespace-import" - Some - React specific ESLint rules and configurations that are used by the - workspaces that have React code. -- "eslint-plugin-react-refresh" - A plugin to ensure that React components are - exported in a way that they can be HMR-ed. -- "prettier-plugin-organize-imports" - A Prettier plugin to sort imports. -- "prettier-plugin-packagejson" - A Prettier plugin to also prettify - `package.json`. +- [@typescript-eslint/parser](https://typescript-eslint.io/packages/eslint-plugin/) + \- Tells ESLint how to read TypeScript syntax. + +- [@typescript-eslint/eslint-plugin](https://typescript-eslint.io/packages/eslint-plugin/) + \- Provides TypeScript rules and presets + +- [eslint-plugin-react-hooks](https://github.com/jsx-eslint/eslint-plugin-react), + [eslint-plugin-react-hooks](https://reactjs.org/) \- Some React specific + ESLint rules and configurations that are used by the workspaces that have + React code. + +- [eslint-plugin-react-refresh](https://github.com/ArnaudBarre/eslint-plugin-react-refresh) + \- A plugin to ensure that React components are exported in a way that they + can be HMR-ed. + +- [prettier-plugin-organize-imports](https://github.com/simonhaenisch/prettier-plugin-organize-imports) + \- A Prettier plugin to sort imports. + +- [prettier-plugin-packagejson](https://github.com/matzkoh/prettier-plugin-packagejson) + \- A Prettier plugin to also prettify `package.json`. + +The root `package.json` also has a convenience dev dependency: + +- [concurrently](https://github.com/open-cli-tools/concurrently) for spawning + parallel tasks when we invoke various yarn scripts. ## Utils @@ -141,6 +158,14 @@ some cases. became ESM only - for our limited use case, the custom Webpack configuration that entails is not worth the upgrade. +- [heic-convert](https://github.com/catdad-experiments/heic-convert) is used + for converting HEIC files (which browsers don't natively support) into JPEG. + +## Processing + +- [comlink](https://github.com/GoogleChromeLabs/comlink) provides a minimal + layer on top of Web Workers to make them more easier to use. + ## Photos app specific - [react-dropzone](https://github.com/react-dropzone/react-dropzone/) is a @@ -149,3 +174,15 @@ some cases. - [sanitize-filename](https://github.com/parshap/node-sanitize-filename) is for converting arbitrary strings into strings that are suitable for being used as filenames. + +## Face search + +- [matrix](https://github.com/mljs/matrix) and + [similarity-transformation](https://github.com/shaileshpandit/similarity-transformation-js) + are used during face alignment. + +- [transformation-matrix](https://github.com/chrvadala/transformation-matrix) + is used during face detection. + +- [hdbscan](https://github.com/shaileshpandit/hdbscan-js) is used for face + clustering. diff --git a/web/docs/deploy.md b/web/docs/deploy.md index 6358cb87f..75c3106d1 100644 --- a/web/docs/deploy.md +++ b/web/docs/deploy.md @@ -1,50 +1,46 @@ # Deploying The various web apps and static sites in this repository are deployed on -Cloudflare Pages. +Cloudflare Pages using GitHub workflows. -- Production deployments are triggered by pushing to the `deploy/*` branches. +- Automated production deployments of `main` daily 8:00 AM IST. + +- Automated staging deployments `*.ente.sh` of `main` daily 3:00 PM IST. - [help.ente.io](https://help.ente.io) gets deployed whenever a PR that changes anything inside `docs/` gets merged to `main`. -- Every night, all the web apps get automatically deployed to a nightly - preview URLs (`*.ente.sh`) using the current code in main. +- Production or staging deployments can made manually by triggering the + corresponding workflow. There is variant to deploy a single app to + production using the `web-deploy-one.yml` workflow, and a variant to deploy + any one of the apps to `preview.ente.sh` (see below). -- A preview deployment can be made by triggering the "Preview (web)" workflow. - This allows us to deploy a build of any of the apps from an arbitrary branch - to [preview.ente.sh](https://preview.ente.sh). - -Use the various `yarn deploy:*` commands to help with production deployments. -For example, `yarn deploy:photos` will open a PR to merge the current `main` -onto `deploy/photos`, which'll trigger the deployment workflow, which'll build -and publish to [web.ente.io](https://web.ente.io). - -> When merging these deployment PRs, remember to use rebase and merge so that -> their HEAD is a fast forward of `main` instead of diverging from it because of -> the merge commit. +These GitHub workflows use the various `yarn deploy:*` commands. For example, +`yarn deploy:photos` will open a PR to merge the current `main` onto +`deploy/photos`, which'll trigger the deployment workflow, which'll build and +publish to [web.ente.io](https://web.ente.io). ## Deployments Here is a list of all the deployments, whether or not they are production deployments, and the action that triggers them: -| URL | Type | Deployment action | -| -------------------------------------------- | ---------- | -------------------------------------------- | -| [web.ente.io](https://web.ente.io) | Production | Push to `deploy/photos` | -| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) | -| [auth.ente.io](https://auth.ente.io) | Production | Push to `deploy/auth` | -| [accounts.ente.io](https://accounts.ente.io) | Production | Push to `deploy/accounts` | -| [cast.ente.io](https://cast.ente.io) | Production | Push to `deploy/cast` | -| [payments.ente.io](https://payments.ente.io) | Production | Push to `deploy/payments` | -| [help.ente.io](https://help.ente.io) | Production | Push to `main` + changes in `docs/` | -| [staff.ente.sh](https://staff.ente.sh) | Production | Push to `main` + changes in `web/apps/staff` | -| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Nightly deploy of `main` | -| [auth.ente.sh](https://auth.ente.sh) | Preview | Nightly deploy of `main` | -| [cast.ente.sh](https://cast.ente.sh) | Preview | Nightly deploy of `main` | -| [payments.ente.sh](https://payments.ente.sh) | Preview | Nightly deploy of `main` | -| [photos.ente.sh](https://photos.ente.sh) | Preview | Nightly deploy of `main` | -| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered | +| URL | Type | Deployment action | +| -------------------------------------------- | ---------- | --------------------------------------------- | +| [web.ente.io](https://web.ente.io) | Production | Daily deploy of `main` | +| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) | +| [auth.ente.io](https://auth.ente.io) | Production | Daily deploy of `main` | +| [accounts.ente.io](https://accounts.ente.io) | Production | Daily deploy of `main` | +| [cast.ente.io](https://cast.ente.io) | Production | Daily deploy of `main` | +| [payments.ente.io](https://payments.ente.io) | Production | Daily deploy of `main` | +| [help.ente.io](https://help.ente.io) | Production | Changes in `docs/` on push to `main` | +| [staff.ente.sh](https://staff.ente.sh) | Production | Changes in `web/apps/staff` on push to `main` | +| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Daily deploy of `main` | +| [auth.ente.sh](https://auth.ente.sh) | Preview | Daily deploy of `main` | +| [cast.ente.sh](https://cast.ente.sh) | Preview | Daily deploy of `main` | +| [payments.ente.sh](https://payments.ente.sh) | Preview | Daily deploy of `main` | +| [photos.ente.sh](https://photos.ente.sh) | Preview | Daily deploy of `main` | +| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered | ### Other subdomains @@ -60,10 +56,10 @@ Apart from this, there are also some other deployments: ### Preview deployments -To trigger a preview deployment, manually trigger the "Preview (web)" workflow -from the Actions tab on GitHub. You'll need to select the app to build, and the -branch to use. This'll then build the specified app (e.g. "photos") from that -branch, and deploy it to [preview.ente.sh](https://preview.ente.sh). +To trigger a preview deployment, manually trigger the "Deploy preview (web)" +workflow from the Actions tab on GitHub. You'll need to select the app to build, +and the branch to use. This'll then build the specified app (e.g. "photos") from +that branch, and deploy it to [preview.ente.sh](https://preview.ente.sh). The workflow can also be triggered using GitHub's CLI, gh. e.g. diff --git a/web/docs/new.md b/web/docs/new.md index 4500617b5..0617a8ac6 100644 --- a/web/docs/new.md +++ b/web/docs/new.md @@ -1,26 +1,35 @@ # Welcome! -If you're new to this sort of stuff or coming back to it after mobile/backend +If you're new to web stuff or coming back to it after mobile/backend development, here is a recommended workflow: -1. Install VS Code. +1. Install **VS Code**. -2. Install the Prettier and ESLint extensions. +2. Install the **Prettier** and **ESLint** extensions. 3. Enable the VS Code setting to format on save. -4. Install node on your machine `brew install node@20`. Our package manager, - `yarn` comes with it. +4. Install **node** on your machine. There are myriad ways to do this, here are + some examples: + + - macOS: `brew install node@20` + + - Ubuntu: `sudo apt install nodejs npm && sudo npm i -g corepack` + +5. Enable corepack. This allows us to use the correct version of our package + manager (**Yarn**): + + ```sh + + corepack enable + ``` + + If now you run `yarn --version` in the web directory, you should be seeing a + 1.22.xx version, otherwise your `yarn install` will fail. + + ```sh + $ yarn --version + 1.22.21 + ``` That's it. Enjoy coding! - -## Yarn - -Note that we use Yarn classic - -``` -$ yarn --version -1.22.21 -``` - -You should be seeing a 1.xx.xx version, otherwise your `yarn install` will fail. diff --git a/web/package.json b/web/package.json index 647ee3ba3..ec096189a 100644 --- a/web/package.json +++ b/web/package.json @@ -22,13 +22,13 @@ "dev": "yarn dev:photos", "dev:accounts": "yarn workspace accounts next dev -p 3001", "dev:albums": "yarn workspace photos next dev -p 3002", - "dev:auth": "yarn workspace auth next dev", + "dev:auth": "yarn workspace auth next dev -p 3000", "dev:cast": "yarn workspace cast next dev -p 3001", "dev:payments": "yarn workspace payments dev", - "dev:photos": "yarn workspace photos next dev", + "dev:photos": "yarn workspace photos next dev -p 3000", "dev:staff": "yarn workspace staff dev", - "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives .", - "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix .", + "lint": "concurrently --names 'prettier,eslint,tsc' \"yarn prettier --check --log-level warn .\" \"yarn workspaces run eslint --report-unused-disable-directives .\" \"yarn workspaces run tsc\"", + "lint-fix": "concurrently --names 'prettier,eslint,tsc' \"yarn prettier --write --log-level warn .\" \"yarn workspaces run eslint --report-unused-disable-directives --fix .\" \"yarn workspaces run tsc\"", "preview": "yarn preview:photos", "preview:accounts": "yarn build:accounts && python3 -m http.server -d apps/accounts/out 3001", "preview:auth": "yarn build:auth && python3 -m http.server -d apps/auth/out 3000", @@ -41,8 +41,10 @@ "libsodium": "0.7.9" }, "devDependencies": { + "concurrently": "^8.2.2", "eslint": "^8", "prettier": "^3", "typescript": "^5" - } + }, + "packageManager": "yarn@1.22.21" } diff --git a/web/packages/accounts/api/user.ts b/web/packages/accounts/api/user.ts index 7a072064e..7e313b38e 100644 --- a/web/packages/accounts/api/user.ts +++ b/web/packages/accounts/api/user.ts @@ -43,7 +43,7 @@ export const putAttributes = (token: string, keyAttributes: KeyAttributes) => }, ); -export const _logout = async () => { +export const logout = async () => { try { const token = getToken(); await HTTPService.post(`${ENDPOINT}/users/logout`, null, undefined, { diff --git a/web/packages/accounts/components/ChangeEmail.tsx b/web/packages/accounts/components/ChangeEmail.tsx index ec647e671..0b175344b 100644 --- a/web/packages/accounts/components/ChangeEmail.tsx +++ b/web/packages/accounts/components/ChangeEmail.tsx @@ -1,3 +1,4 @@ +import { wait } from "@/utils/promise"; import { changeEmail, sendOTTForEmailChange } from "@ente/accounts/api/user"; import { APP_HOMES } from "@ente/shared/apps/constants"; import { PageProps } from "@ente/shared/apps/types"; @@ -6,7 +7,6 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer"; import LinkButton from "@ente/shared/components/LinkButton"; import SubmitButton from "@ente/shared/components/SubmitButton"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; -import { wait } from "@ente/shared/utils"; import { Alert, Box, TextField } from "@mui/material"; import { Formik, FormikHelpers } from "formik"; import { t } from "i18next"; diff --git a/web/packages/accounts/components/two-factor/VerifyForm.tsx b/web/packages/accounts/components/two-factor/VerifyForm.tsx index b7f7fc278..76fd87ba0 100644 --- a/web/packages/accounts/components/two-factor/VerifyForm.tsx +++ b/web/packages/accounts/components/two-factor/VerifyForm.tsx @@ -1,16 +1,15 @@ -import { Formik, FormikHelpers } from "formik"; -import { t } from "i18next"; -import { useRef, useState } from "react"; -import OtpInput from "react-otp-input"; - +import { wait } from "@/utils/promise"; import InvalidInputMessage from "@ente/accounts/components/two-factor/InvalidInputMessage"; import { CenteredFlex, VerticallyCentered, } from "@ente/shared/components/Container"; import SubmitButton from "@ente/shared/components/SubmitButton"; -import { wait } from "@ente/shared/utils"; import { Box, Typography } from "@mui/material"; +import { Formik, FormikHelpers } from "formik"; +import { t } from "i18next"; +import { useRef, useState } from "react"; +import OtpInput from "react-otp-input"; interface formValues { otp: string; diff --git a/web/packages/accounts/pages/credentials.tsx b/web/packages/accounts/pages/credentials.tsx index 36425c142..777fe97da 100644 --- a/web/packages/accounts/pages/credentials.tsx +++ b/web/packages/accounts/pages/credentials.tsx @@ -1,3 +1,4 @@ +import { isDevBuild } from "@/next/env"; import log from "@/next/log"; import { APP_HOMES } from "@ente/shared/apps/constants"; import { PageProps } from "@ente/shared/apps/types"; @@ -18,7 +19,7 @@ import { } from "@ente/shared/crypto/helpers"; import { B64EncryptionResult } from "@ente/shared/crypto/types"; import { CustomError } from "@ente/shared/error"; -import { getAccountsURL } from "@ente/shared/network/api"; +import { getAccountsURL, getEndpoint } from "@ente/shared/network/api"; import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; import { LS_KEYS, @@ -49,10 +50,11 @@ import { generateSRPSetupAttributes, loginViaSRP, } from "../services/srp"; -import { logoutUser } from "../services/user"; import { SRPAttributes } from "../types/srp"; export default function Credentials({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [srpAttributes, setSrpAttributes] = useState(); const [keyAttributes, setKeyAttributes] = useState(); const [user, setUser] = useState(); @@ -259,7 +261,7 @@ export default function Credentials({ appContext, appName }: PageProps) { return ( - {user.email} +
{user.email}
+ {t("FORGOT_PASSWORD")} - + {t("CHANGE_EMAIL")} + + {isDevBuild && }
); } -const Title: React.FC = ({ children }) => { +const Header: React.FC = ({ children }) => { return ( - + {t("PASSWORD")} {children} - + ); }; -const Title_ = styled("div")` +const Header_ = styled("div")` margin-block-end: 4rem; display: flex; flex-direction: column; gap: 8px; `; + +const ConnectionDetails: React.FC = () => { + const apiOrigin = new URL(getEndpoint()); + + return ( + + + {apiOrigin.host} + + + ); +}; + +const ConnectionDetails_ = styled("div")` + margin-block-start: 1rem; +`; diff --git a/web/packages/accounts/pages/generate.tsx b/web/packages/accounts/pages/generate.tsx index fb92edb14..11c15a4f0 100644 --- a/web/packages/accounts/pages/generate.tsx +++ b/web/packages/accounts/pages/generate.tsx @@ -1,7 +1,6 @@ import log from "@/next/log"; import { putAttributes } from "@ente/accounts/api/user"; import { configureSRP } from "@ente/accounts/services/srp"; -import { logoutUser } from "@ente/accounts/services/user"; import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp"; import { generateAndSaveIntermediateKeyAttributes, @@ -31,6 +30,8 @@ import { KeyAttributes, User } from "@ente/shared/user/types"; import { useRouter } from "next/router"; export default function Generate({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [token, setToken] = useState(); const [user, setUser] = useState(); const [recoverModalView, setRecoveryModalView] = useState(false); @@ -113,7 +114,7 @@ export default function Generate({ appContext, appName }: PageProps) { buttonText={t("SET_PASSPHRASE")} /> - + {t("GO_BACK")} diff --git a/web/packages/accounts/pages/two-factor/recover.tsx b/web/packages/accounts/pages/two-factor/recover.tsx index 150bd47de..8ed187e0e 100644 --- a/web/packages/accounts/pages/two-factor/recover.tsx +++ b/web/packages/accounts/pages/two-factor/recover.tsx @@ -2,7 +2,6 @@ import log from "@/next/log"; import { recoverTwoFactor, removeTwoFactor } from "@ente/accounts/api/user"; import { PAGES } from "@ente/accounts/constants/pages"; import { TwoFactorType } from "@ente/accounts/constants/twofactor"; -import { logoutUser } from "@ente/accounts/services/user"; import { PageProps } from "@ente/shared/apps/types"; import { VerticallyCentered } from "@ente/shared/components/Container"; import { DialogBoxAttributesV2 } from "@ente/shared/components/DialogBoxV2/types"; @@ -33,6 +32,8 @@ export default function Recover({ appContext, twoFactorType = TwoFactorType.TOTP, }: PageProps) { + const { logout } = appContext; + const [encryptedTwoFactorSecret, setEncryptedTwoFactorSecret] = useState(null); const [sessionID, setSessionID] = useState(null); @@ -77,7 +78,7 @@ export default function Recover({ e instanceof ApiError && e.httpStatusCode === HttpStatusCode.NotFound ) { - logoutUser(); + logout(); } else { log.error("two factor recovery page setup failed", e); setDoesHaveEncryptedRecoveryKey(false); diff --git a/web/packages/accounts/pages/two-factor/verify.tsx b/web/packages/accounts/pages/two-factor/verify.tsx index 5498211ae..1ec6e437d 100644 --- a/web/packages/accounts/pages/two-factor/verify.tsx +++ b/web/packages/accounts/pages/two-factor/verify.tsx @@ -3,7 +3,7 @@ import VerifyTwoFactor, { VerifyTwoFactorCallback, } from "@ente/accounts/components/two-factor/VerifyForm"; import { PAGES } from "@ente/accounts/constants/pages"; -import { logoutUser } from "@ente/accounts/services/user"; + import type { PageProps } from "@ente/shared/apps/types"; import { VerticallyCentered } from "@ente/shared/components/Container"; import FormPaper from "@ente/shared/components/Form/FormPaper"; @@ -19,7 +19,11 @@ import { t } from "i18next"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -export const TwoFactorVerify: React.FC = () => { +export const TwoFactorVerify: React.FC = ({ + appContext, +}: PageProps) => { + const { logout } = appContext; + const [sessionID, setSessionID] = useState(""); const router = useRouter(); @@ -60,7 +64,7 @@ export const TwoFactorVerify: React.FC = () => { e instanceof ApiError && e.httpStatusCode === HttpStatusCode.NotFound ) { - logoutUser(); + logout(); } else { throw e; } @@ -79,7 +83,7 @@ export const TwoFactorVerify: React.FC = () => { > {t("LOST_DEVICE")} - + {t("CHANGE_EMAIL")} diff --git a/web/packages/accounts/pages/verify.tsx b/web/packages/accounts/pages/verify.tsx index 6515a96b7..2a410fd6f 100644 --- a/web/packages/accounts/pages/verify.tsx +++ b/web/packages/accounts/pages/verify.tsx @@ -16,7 +16,7 @@ import SingleInputForm, { import { ApiError } from "@ente/shared/error"; import { getAccountsURL } from "@ente/shared/network/api"; import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; -import { clearFiles } from "@ente/shared/storage/localForage/helpers"; +import localForage from "@ente/shared/storage/localForage"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; import { getLocalReferralSource, @@ -30,10 +30,11 @@ import { useRouter } from "next/router"; import { putAttributes, sendOtt, verifyOtt } from "../api/user"; import { PAGES } from "../constants/pages"; import { configureSRP } from "../services/srp"; -import { logoutUser } from "../services/user"; import { SRPSetupAttributes } from "../types/srp"; export default function VerifyPage({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [email, setEmail] = useState(""); const [resend, setResend] = useState(0); @@ -121,7 +122,7 @@ export default function VerifyPage({ appContext, appName }: PageProps) { await configureSRP(srpSetupAttributes); } } - clearFiles(); + localForage.clear(); setIsFirstLogin(true); const redirectURL = InMemoryStore.get(MS_KEYS.REDIRECT_URL); InMemoryStore.delete(MS_KEYS.REDIRECT_URL); @@ -191,7 +192,7 @@ export default function VerifyPage({ appContext, appName }: PageProps) { )} {resend === 1 && {t("SENDING")}} {resend === 2 && {t("SENT")}} - + {t("CHANGE_EMAIL")} diff --git a/web/packages/accounts/services/logout.ts b/web/packages/accounts/services/logout.ts new file mode 100644 index 000000000..70d67b22f --- /dev/null +++ b/web/packages/accounts/services/logout.ts @@ -0,0 +1,50 @@ +import { clearCaches } from "@/next/blob-cache"; +import log from "@/next/log"; +import InMemoryStore from "@ente/shared/storage/InMemoryStore"; +import localForage from "@ente/shared/storage/localForage"; +import { clearData } from "@ente/shared/storage/localStorage"; +import { clearKeys } from "@ente/shared/storage/sessionStorage"; +import { logout as remoteLogout } from "../api/user"; + +/** + * Logout sequence common to all apps that rely on the accounts package. + * + * [Note: Do not throw during logout] + * + * This function is guaranteed to not thrown any errors, and will try to + * independently complete all the steps in the sequence that can be completed. + * This allows the user to logout and start again even if somehow their account + * gets in an unexpected state. + */ +export const accountLogout = async () => { + try { + await remoteLogout(); + } catch (e) { + log.error("Ignoring error during logout (remote)", e); + } + try { + InMemoryStore.clear(); + } catch (e) { + log.error("Ignoring error during logout (in-memory store)", e); + } + try { + clearKeys(); + } catch (e) { + log.error("Ignoring error during logout (session store)", e); + } + try { + clearData(); + } catch (e) { + log.error("Ignoring error during logout (local storage)", e); + } + try { + await localForage.clear(); + } catch (e) { + log.error("Ignoring error during logout (local forage)", e); + } + try { + await clearCaches(); + } catch (e) { + log.error("Ignoring error during logout (cache)", e); + } +}; diff --git a/web/packages/accounts/services/user.ts b/web/packages/accounts/services/user.ts deleted file mode 100644 index 8f6d6609a..000000000 --- a/web/packages/accounts/services/user.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { clearCaches } from "@/next/blob-cache"; -import log from "@/next/log"; -import { Events, eventBus } from "@ente/shared/events"; -import InMemoryStore from "@ente/shared/storage/InMemoryStore"; -import { clearFiles } from "@ente/shared/storage/localForage/helpers"; -import { clearData } from "@ente/shared/storage/localStorage"; -import { clearKeys } from "@ente/shared/storage/sessionStorage"; -import router from "next/router"; -import { _logout } from "../api/user"; -import { PAGES } from "../constants/pages"; - -export const logoutUser = async () => { - try { - await _logout(); - } catch (e) { - log.error("Ignoring error during POST /users/logout", e); - } - try { - InMemoryStore.clear(); - } catch (e) { - log.error("Ignoring error when clearing in-memory store", e); - } - try { - clearKeys(); - } catch (e) { - log.error("Ignoring error when clearing keys", e); - } - try { - clearData(); - } catch (e) { - log.error("Ignoring error when clearing data", e); - } - try { - await clearCaches(); - } catch (e) { - log.error("Ignoring error when clearing caches", e); - } - try { - await clearFiles(); - } catch (e) { - log.error("Ignoring error when clearing files", e); - } - const electron = globalThis.electron; - if (electron) { - try { - await electron.watch.reset(); - } catch (e) { - log.error("Ignoring error when resetting native folder watches", e); - } - try { - await electron.clearStores(); - } catch (e) { - log.error("Ignoring error when clearing native stores", e); - } - } - try { - eventBus.emit(Events.LOGOUT); - } catch (e) { - log.error("Ignoring error in event-bus logout handlers", e); - } - router.push(PAGES.ROOT); -}; diff --git a/web/packages/build-config/package.json b/web/packages/build-config/package.json index e46bb96b1..bacc6e8bb 100644 --- a/web/packages/build-config/package.json +++ b/web/packages/build-config/package.json @@ -7,8 +7,8 @@ "@typescript-eslint/parser": "^7", "eslint-plugin-react": "^7.34", "eslint-plugin-react-hooks": "^4.6", - "eslint-plugin-react-refresh": "^0.4.6", + "eslint-plugin-react-refresh": "^0.4.7", "prettier-plugin-organize-imports": "^3.2", - "prettier-plugin-packagejson": "^2.4" + "prettier-plugin-packagejson": "^2.5" } } diff --git a/web/packages/media/formats.ts b/web/packages/media/formats.ts index 24d2c7c87..1316b654f 100644 --- a/web/packages/media/formats.ts +++ b/web/packages/media/formats.ts @@ -24,3 +24,11 @@ const nonWebImageFileExtensions = [ */ export const isNonWebImageFileExtension = (extension: string) => nonWebImageFileExtensions.includes(extension.toLowerCase()); + +/** + * Return `true` if {@link extension} in for an HEIC-like file. + */ +export const isHEICExtension = (extension: string) => { + const ext = extension.toLowerCase(); + return ext == "heic" || ext == "heif"; +}; diff --git a/web/packages/media/image.ts b/web/packages/media/image.ts new file mode 100644 index 000000000..2912af02a --- /dev/null +++ b/web/packages/media/image.ts @@ -0,0 +1,33 @@ +/** + * Compute optimal dimensions for a resized version of an image while + * maintaining aspect ratio of the source image. + * + * @param width The width of the source image. + * + * @param height The height of the source image. + * + * @param maxDimension The maximum width of height of the resized image. + * + * This function returns a new size limiting it to maximum width and height + * (both specified by {@link maxDimension}), while maintaining aspect ratio of + * the source {@link width} and {@link height}. + * + * It returns `{0, 0}` for invalid inputs. + */ +export const scaledImageDimensions = ( + width: number, + height: number, + maxDimension: number, +): { width: number; height: number } => { + if (width == 0 || height == 0) return { width: 0, height: 0 }; + const widthScaleFactor = maxDimension / width; + const heightScaleFactor = maxDimension / height; + const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); + const resizedDimensions = { + width: Math.round(width * scaleFactor), + height: Math.round(height * scaleFactor), + }; + if (resizedDimensions.width == 0 || resizedDimensions.height == 0) + return { width: 0, height: 0 }; + return resizedDimensions; +}; diff --git a/web/packages/media/package.json b/web/packages/media/package.json index 8be7e8bb6..bf71ed37b 100644 --- a/web/packages/media/package.json +++ b/web/packages/media/package.json @@ -5,6 +5,10 @@ "dependencies": { "@/next": "*", "file-type": "16.5.4", + "heic-convert": "^2.1", "jszip": "^3.10" + }, + "devDependencies": { + "@types/heic-convert": "^1.2.3" } } diff --git a/web/packages/media/tsconfig.json b/web/packages/media/tsconfig.json index f29c34811..bcc1151c1 100644 --- a/web/packages/media/tsconfig.json +++ b/web/packages/media/tsconfig.json @@ -1,5 +1,13 @@ { "extends": "@/build-config/tsconfig-typecheck.json", + "compilerOptions": { + /* Also indicate expectation of a WebWorker runtime */ + "lib": ["ESnext", "DOM", "DOM.Iterable", "WebWorker"] + }, /* Typecheck all files with the given extensions (here or in subfolders) */ - "include": ["**/*.ts", "**/*.tsx"] + "include": [ + "**/*.ts", + "**/*.tsx", + "../../packages/next/global-electron.d.ts" + ] } diff --git a/web/packages/media/worker/heic-convert.ts b/web/packages/media/worker/heic-convert.ts new file mode 100644 index 000000000..476eac00a --- /dev/null +++ b/web/packages/media/worker/heic-convert.ts @@ -0,0 +1,11 @@ +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import type { DedicatedHEICConvertWorker } from "./heic-convert.worker"; + +export const createHEICConvertWebWorker = () => + new Worker(new URL("heic-convert.worker.ts", import.meta.url)); + +export const createHEICConvertComlinkWorker = () => + new ComlinkWorker( + "heic-convert-worker", + createHEICConvertWebWorker(), + ); diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/packages/media/worker/heic-convert.worker.ts similarity index 84% rename from web/apps/photos/src/worker/heic-convert.worker.ts rename to web/packages/media/worker/heic-convert.worker.ts index 96a1a9468..ffb5eb158 100644 --- a/web/apps/photos/src/worker/heic-convert.worker.ts +++ b/web/packages/media/worker/heic-convert.worker.ts @@ -7,7 +7,7 @@ export class DedicatedHEICConvertWorker { } } -expose(DedicatedHEICConvertWorker, self); +expose(DedicatedHEICConvertWorker); /** * Convert a HEIC file to a JPEG file. @@ -18,5 +18,5 @@ export const heicToJPEG = async (heicBlob: Blob): Promise => { const buffer = new Uint8Array(await heicBlob.arrayBuffer()); const result = await HeicConvert({ buffer, format: "JPEG" }); const convertedData = new Uint8Array(result); - return new Blob([convertedData]); + return new Blob([convertedData], { type: "image/jpeg" }); }; diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json index aa88d9c50..dbdfc6e26 100644 --- a/web/packages/next/locales/bg-BG/translation.json +++ b/web/packages/next/locales/bg-BG/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json index 8bfddc1cd..183b1d803 100644 --- a/web/packages/next/locales/de-DE/translation.json +++ b/web/packages/next/locales/de-DE/translation.json @@ -565,6 +565,9 @@ "IMAGE": "Bild", "VIDEO": "Video", "LIVE_PHOTO": "Live-Foto", + "editor": { + "crop": "" + }, "CONVERT": "Konvertieren", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Editor wirklich schließen?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Lade dein bearbeitetes Bild herunter oder speichere es in Ente, um die Änderungen nicht zu verlieren.", @@ -588,7 +591,7 @@ "ROTATION": "Drehen", "RESET": "Zurücksetzen", "PHOTO_EDITOR": "Foto-Editor", - "FASTER_UPLOAD": "Schnelleres hochladen", + "FASTER_UPLOAD": "Schnelleres Hochladen", "FASTER_UPLOAD_DESCRIPTION": "Uploads über nahegelegene Server leiten", "MAGIC_SEARCH_STATUS": "Status der magischen Suche", "INDEXED_ITEMS": "Indizierte Elemente", diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json index f4d6f6100..f7acb63c8 100644 --- a/web/packages/next/locales/en-US/translation.json +++ b/web/packages/next/locales/en-US/translation.json @@ -565,6 +565,9 @@ "IMAGE": "Image", "VIDEO": "Video", "LIVE_PHOTO": "Live Photo", + "editor": { + "crop": "Crop" + }, "CONVERT": "Convert", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Are you sure you want to close the editor?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download your edited image or save a copy to Ente to persist your changes.", diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json index ffc06ffa3..abd06b510 100644 --- a/web/packages/next/locales/es-ES/translation.json +++ b/web/packages/next/locales/es-ES/translation.json @@ -352,7 +352,7 @@ "ADD_COLLABORATORS": "", "ADD_NEW_EMAIL": "", "shared_with_people_zero": "", - "shared_with_people_one": "", + "shared_with_people_one": "Compartido con 1 persona", "shared_with_people_other": "", "participants_zero": "", "participants_one": "", @@ -362,8 +362,8 @@ "CHANGE_PERMISSIONS_TO_COLLABORATOR": "", "CONVERT_TO_VIEWER": "", "CONVERT_TO_COLLABORATOR": "", - "CHANGE_PERMISSION": "", - "REMOVE_PARTICIPANT": "", + "CHANGE_PERMISSION": "¿Cambiar Permiso?", + "REMOVE_PARTICIPANT": "¿Eliminar?", "CONFIRM_REMOVE": "", "MANAGE": "", "ADDED_AS": "", @@ -415,8 +415,8 @@ "albums_other": "{{count}} álbumes", "ALL_ALBUMS": "Todos los álbumes", "ALBUMS": "Álbumes", - "ALL_HIDDEN_ALBUMS": "", - "HIDDEN_ALBUMS": "", + "ALL_HIDDEN_ALBUMS": "Todos los álbumes ocultos", + "HIDDEN_ALBUMS": "Álbumes ocultos", "HIDDEN_ITEMS": "", "ENTER_TWO_FACTOR_OTP": "Ingrese el código de seis dígitos de su aplicación de autenticación a continuación.", "CREATE_ACCOUNT": "Crear cuenta", @@ -518,7 +518,7 @@ "PUBLIC_COLLECT_SUBTEXT": "Permitir a las personas con el enlace añadir fotos al álbum compartido.", "STOP_EXPORT": "Stop", "EXPORT_PROGRESS": "{{progress.success}} / {{progress.total}} archivos exportados", - "MIGRATING_EXPORT": "", + "MIGRATING_EXPORT": "Preparando...", "RENAMING_COLLECTION_FOLDERS": "", "TRASHING_DELETED_FILES": "", "TRASHING_DELETED_COLLECTIONS": "", @@ -543,7 +543,7 @@ "at": "a las", "AUTH_NEXT": "siguiente", "AUTH_DOWNLOAD_MOBILE_APP": "Descarga nuestra aplicación móvil para administrar tus secretos", - "HIDDEN": "", + "HIDDEN": "Oculto", "HIDE": "Ocultar", "UNHIDE": "Mostrar", "UNHIDE_TO_COLLECTION": "Hacer visible al álbum", @@ -565,10 +565,13 @@ "IMAGE": "", "VIDEO": "Video", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", - "BRIGHTNESS": "", + "BRIGHTNESS": "Brillo", "CONTRAST": "", "SATURATION": "", "BLUR": "", @@ -617,7 +620,7 @@ "PASSKEY_LOGIN_FAILED": "", "PASSKEY_LOGIN_URL_INVALID": "", "PASSKEY_LOGIN_ERRORED": "", - "TRY_AGAIN": "", + "TRY_AGAIN": "Inténtelo de nuevo", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", "LOGIN_WITH_PASSKEY": "", "autogenerated_first_album_name": "", diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json index 2f9605019..ce0e8e6e1 100644 --- a/web/packages/next/locales/fa-IR/translation.json +++ b/web/packages/next/locales/fa-IR/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json index 33306389c..9f549eb49 100644 --- a/web/packages/next/locales/fi-FI/translation.json +++ b/web/packages/next/locales/fi-FI/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json index dd17e54ab..9af40b690 100644 --- a/web/packages/next/locales/fr-FR/translation.json +++ b/web/packages/next/locales/fr-FR/translation.json @@ -565,6 +565,9 @@ "IMAGE": "Image", "VIDEO": "Vidéo", "LIVE_PHOTO": "Photos en direct", + "editor": { + "crop": "" + }, "CONVERT": "Convertir", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Êtes-vous sûr de vouloir fermer l'éditeur ?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Téléchargez votre image modifiée ou enregistrez une copie sur Ente pour maintenir vos modifications.", diff --git a/web/packages/next/locales/is-IS/translation.json b/web/packages/next/locales/is-IS/translation.json new file mode 100644 index 000000000..80f443b5d --- /dev/null +++ b/web/packages/next/locales/is-IS/translation.json @@ -0,0 +1,628 @@ +{ + "HERO_SLIDE_1_TITLE": "", + "HERO_SLIDE_1": "", + "HERO_SLIDE_2_TITLE": "", + "HERO_SLIDE_2": "", + "HERO_SLIDE_3_TITLE": "", + "HERO_SLIDE_3": "", + "LOGIN": "", + "SIGN_UP": "", + "NEW_USER": "", + "EXISTING_USER": "", + "ENTER_NAME": "", + "PUBLIC_UPLOADER_NAME_MESSAGE": "", + "ENTER_EMAIL": "", + "EMAIL_ERROR": "", + "REQUIRED": "", + "EMAIL_SENT": "", + "CHECK_INBOX": "", + "ENTER_OTT": "", + "RESEND_MAIL": "", + "VERIFY": "", + "UNKNOWN_ERROR": "", + "INVALID_CODE": "", + "EXPIRED_CODE": "", + "SENDING": "", + "SENT": "", + "PASSWORD": "Lykilorð", + "LINK_PASSWORD": "", + "RETURN_PASSPHRASE_HINT": "Lykilorð", + "SET_PASSPHRASE": "", + "VERIFY_PASSPHRASE": "", + "INCORRECT_PASSPHRASE": "Rangt lykilorð", + "ENTER_ENC_PASSPHRASE": "", + "PASSPHRASE_DISCLAIMER": "", + "WELCOME_TO_ENTE_HEADING": "", + "WELCOME_TO_ENTE_SUBHEADING": "", + "WHERE_YOUR_BEST_PHOTOS_LIVE": "", + "KEY_GENERATION_IN_PROGRESS_MESSAGE": "", + "PASSPHRASE_HINT": "", + "CONFIRM_PASSPHRASE": "", + "REFERRAL_CODE_HINT": "", + "REFERRAL_INFO": "", + "PASSPHRASE_MATCH_ERROR": "", + "CREATE_COLLECTION": "", + "ENTER_ALBUM_NAME": "", + "CLOSE_OPTION": "", + "ENTER_FILE_NAME": "", + "CLOSE": "Loka", + "NO": "Nei", + "NOTHING_HERE": "Ekkert að sjá hér ennþá 👀", + "UPLOAD": "Hlaða upp", + "IMPORT": "", + "ADD_PHOTOS": "", + "ADD_MORE_PHOTOS": "", + "add_photos_one": "", + "add_photos_other": "", + "SELECT_PHOTOS": "", + "FILE_UPLOAD": "", + "UPLOAD_STAGE_MESSAGE": { + "0": "", + "1": "", + "2": "", + "3": "", + "4": "", + "5": "" + }, + "FILE_NOT_UPLOADED_LIST": "", + "SUBSCRIPTION_EXPIRED": "", + "SUBSCRIPTION_EXPIRED_MESSAGE": "", + "STORAGE_QUOTA_EXCEEDED": "", + "INITIAL_LOAD_DELAY_WARNING": "", + "USER_DOES_NOT_EXIST": "", + "NO_ACCOUNT": "", + "ACCOUNT_EXISTS": "", + "CREATE": "", + "DOWNLOAD": "", + "DOWNLOAD_OPTION": "", + "DOWNLOAD_FAVORITES": "", + "DOWNLOAD_UNCATEGORIZED": "", + "DOWNLOAD_HIDDEN_ITEMS": "", + "COPY_OPTION": "", + "TOGGLE_FULLSCREEN": "", + "ZOOM_IN_OUT": "", + "PREVIOUS": "", + "NEXT": "", + "TITLE_PHOTOS": "", + "TITLE_ALBUMS": "", + "TITLE_AUTH": "", + "UPLOAD_FIRST_PHOTO": "", + "IMPORT_YOUR_FOLDERS": "", + "UPLOAD_DROPZONE_MESSAGE": "", + "WATCH_FOLDER_DROPZONE_MESSAGE": "", + "TRASH_FILES_TITLE": "", + "TRASH_FILE_TITLE": "", + "DELETE_FILES_TITLE": "", + "DELETE_FILES_MESSAGE": "", + "DELETE": "Eyða", + "DELETE_OPTION": "", + "FAVORITE_OPTION": "", + "UNFAVORITE_OPTION": "", + "MULTI_FOLDER_UPLOAD": "", + "UPLOAD_STRATEGY_CHOICE": "", + "UPLOAD_STRATEGY_SINGLE_COLLECTION": "", + "OR": "eða", + "UPLOAD_STRATEGY_COLLECTION_PER_FOLDER": "", + "SESSION_EXPIRED_MESSAGE": "", + "SESSION_EXPIRED": "", + "PASSWORD_GENERATION_FAILED": "", + "CHANGE_PASSWORD": "", + "GO_BACK": "Fara til baka", + "RECOVERY_KEY": "", + "SAVE_LATER": "Gera þetta seinna", + "SAVE": "Vista Lykil", + "RECOVERY_KEY_DESCRIPTION": "", + "RECOVER_KEY_GENERATION_FAILED": "", + "KEY_NOT_STORED_DISCLAIMER": "", + "FORGOT_PASSWORD": "Gleymt lykilorð", + "RECOVER_ACCOUNT": "Endurheimta Reikning", + "RECOVERY_KEY_HINT": "Endurheimtunarlykill", + "RECOVER": "Endurheimta", + "NO_RECOVERY_KEY": "Enginn endurheimtunarlykill?", + "INCORRECT_RECOVERY_KEY": "", + "SORRY": "Fyrirgefðu", + "NO_RECOVERY_KEY_MESSAGE": "", + "NO_TWO_FACTOR_RECOVERY_KEY_MESSAGE": "", + "CONTACT_SUPPORT": "", + "REQUEST_FEATURE": "", + "SUPPORT": "", + "CONFIRM": "Staðfesta", + "CANCEL": "Hætta við", + "LOGOUT": "Útskrá", + "DELETE_ACCOUNT": "Eyða aðgangi", + "DELETE_ACCOUNT_MESSAGE": "", + "LOGOUT_MESSAGE": "Ertu viss um að þú viljir skrá þig út?", + "CHANGE_EMAIL": "Breyta netfangi", + "OK": "Í lagi", + "SUCCESS": "Tókst", + "ERROR": "Villa", + "MESSAGE": "Skilaboð", + "INSTALL_MOBILE_APP": "", + "DOWNLOAD_APP_MESSAGE": "", + "DOWNLOAD_APP": "", + "EXPORT": "", + "SUBSCRIPTION": "Áskrift", + "SUBSCRIBE": "Gerast áskrifandi", + "MANAGEMENT_PORTAL": "", + "MANAGE_FAMILY_PORTAL": "", + "LEAVE_FAMILY_PLAN": "", + "LEAVE": "", + "LEAVE_FAMILY_CONFIRM": "", + "CHOOSE_PLAN": "", + "MANAGE_PLAN": "", + "ACTIVE": "Virkur", + "OFFLINE_MSG": "", + "FREE_SUBSCRIPTION_INFO": "", + "FAMILY_SUBSCRIPTION_INFO": "", + "RENEWAL_ACTIVE_SUBSCRIPTION_STATUS": "", + "RENEWAL_CANCELLED_SUBSCRIPTION_STATUS": "", + "RENEWAL_CANCELLED_SUBSCRIPTION_INFO": "", + "ADD_ON_AVAILABLE_TILL": "", + "STORAGE_QUOTA_EXCEEDED_SUBSCRIPTION_INFO": "Þú hefur farið yfir geymsluplássið þitt, vinsamlegast uppfærðu", + "SUBSCRIPTION_PURCHASE_SUCCESS": "", + "SUBSCRIPTION_PURCHASE_CANCELLED": "", + "SUBSCRIPTION_PURCHASE_FAILED": "", + "SUBSCRIPTION_UPDATE_FAILED": "", + "UPDATE_PAYMENT_METHOD_MESSAGE": "", + "STRIPE_AUTHENTICATION_FAILED": "", + "UPDATE_PAYMENT_METHOD": "", + "MONTHLY": "", + "YEARLY": "", + "update_subscription_title": "", + "UPDATE_SUBSCRIPTION_MESSAGE": "", + "UPDATE_SUBSCRIPTION": "", + "CANCEL_SUBSCRIPTION": "", + "CANCEL_SUBSCRIPTION_MESSAGE": "", + "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "", + "SUBSCRIPTION_CANCEL_FAILED": "", + "SUBSCRIPTION_CANCEL_SUCCESS": "", + "REACTIVATE_SUBSCRIPTION": "", + "REACTIVATE_SUBSCRIPTION_MESSAGE": "", + "SUBSCRIPTION_ACTIVATE_SUCCESS": "", + "SUBSCRIPTION_ACTIVATE_FAILED": "", + "SUBSCRIPTION_PURCHASE_SUCCESS_TITLE": "", + "CANCEL_SUBSCRIPTION_ON_MOBILE": "", + "CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE": "", + "MAIL_TO_MANAGE_SUBSCRIPTION": "", + "RENAME": "", + "RENAME_FILE": "", + "RENAME_COLLECTION": "", + "DELETE_COLLECTION_TITLE": "", + "DELETE_COLLECTION": "", + "DELETE_COLLECTION_MESSAGE": "", + "DELETE_PHOTOS": "", + "KEEP_PHOTOS": "", + "SHARE_COLLECTION": "", + "SHARE_WITH_SELF": "", + "ALREADY_SHARED": "", + "SHARING_BAD_REQUEST_ERROR": "", + "SHARING_DISABLED_FOR_FREE_ACCOUNTS": "", + "DOWNLOAD_COLLECTION": "", + "CREATE_ALBUM_FAILED": "", + "SEARCH": "", + "SEARCH_RESULTS": "", + "NO_RESULTS": "", + "SEARCH_HINT": "", + "SEARCH_TYPE": { + "COLLECTION": "", + "LOCATION": "", + "CITY": "", + "DATE": "", + "FILE_NAME": "", + "THING": "", + "FILE_CAPTION": "", + "FILE_TYPE": "", + "CLIP": "" + }, + "photos_count_zero": "", + "photos_count_one": "", + "photos_count_other": "", + "TERMS_AND_CONDITIONS": "", + "ADD_TO_COLLECTION": "", + "SELECTED": "", + "PEOPLE": "", + "INDEXING_SCHEDULED": "", + "ANALYZING_PHOTOS": "", + "INDEXING_PEOPLE": "", + "INDEXING_DONE": "", + "UNIDENTIFIED_FACES": "", + "OBJECTS": "", + "TEXT": "", + "INFO": "", + "INFO_OPTION": "", + "FILE_NAME": "", + "CAPTION_PLACEHOLDER": "", + "LOCATION": "", + "SHOW_ON_MAP": "", + "MAP": "", + "MAP_SETTINGS": "", + "ENABLE_MAPS": "", + "ENABLE_MAP": "", + "DISABLE_MAPS": "", + "ENABLE_MAP_DESCRIPTION": "", + "DISABLE_MAP_DESCRIPTION": "", + "DISABLE_MAP": "", + "DETAILS": "", + "VIEW_EXIF": "", + "NO_EXIF": "", + "EXIF": "", + "ISO": "", + "TWO_FACTOR": "", + "TWO_FACTOR_AUTHENTICATION": "", + "TWO_FACTOR_QR_INSTRUCTION": "", + "ENTER_CODE_MANUALLY": "", + "TWO_FACTOR_MANUAL_CODE_INSTRUCTION": "", + "SCAN_QR_CODE": "", + "ENABLE_TWO_FACTOR": "", + "ENABLE": "", + "LOST_DEVICE": "", + "INCORRECT_CODE": "", + "TWO_FACTOR_INFO": "", + "DISABLE_TWO_FACTOR_LABEL": "", + "UPDATE_TWO_FACTOR_LABEL": "", + "DISABLE": "", + "RECONFIGURE": "", + "UPDATE_TWO_FACTOR": "", + "UPDATE_TWO_FACTOR_MESSAGE": "", + "UPDATE": "", + "DISABLE_TWO_FACTOR": "", + "DISABLE_TWO_FACTOR_MESSAGE": "", + "TWO_FACTOR_DISABLE_FAILED": "", + "EXPORT_DATA": "", + "SELECT_FOLDER": "", + "DESTINATION": "", + "START": "", + "LAST_EXPORT_TIME": "", + "EXPORT_AGAIN": "", + "LOCAL_STORAGE_NOT_ACCESSIBLE": "", + "LOCAL_STORAGE_NOT_ACCESSIBLE_MESSAGE": "", + "SEND_OTT": "", + "EMAIl_ALREADY_OWNED": "", + "ETAGS_BLOCKED": "", + "LIVE_PHOTOS_DETECTED": "", + "RETRY_FAILED": "", + "FAILED_UPLOADS": "", + "SKIPPED_FILES": "", + "THUMBNAIL_GENERATION_FAILED_UPLOADS": "", + "UNSUPPORTED_FILES": "", + "SUCCESSFUL_UPLOADS": "", + "SKIPPED_INFO": "", + "UNSUPPORTED_INFO": "", + "BLOCKED_UPLOADS": "", + "INPROGRESS_METADATA_EXTRACTION": "", + "INPROGRESS_UPLOADS": "", + "TOO_LARGE_UPLOADS": "", + "LARGER_THAN_AVAILABLE_STORAGE_UPLOADS": "", + "LARGER_THAN_AVAILABLE_STORAGE_INFO": "", + "TOO_LARGE_INFO": "", + "THUMBNAIL_GENERATION_FAILED_INFO": "", + "UPLOAD_TO_COLLECTION": "", + "UNCATEGORIZED": "", + "ARCHIVE": "", + "FAVORITES": "", + "ARCHIVE_COLLECTION": "", + "ARCHIVE_SECTION_NAME": "", + "ALL_SECTION_NAME": "", + "MOVE_TO_COLLECTION": "", + "UNARCHIVE": "", + "UNARCHIVE_COLLECTION": "", + "HIDE_COLLECTION": "", + "UNHIDE_COLLECTION": "", + "MOVE": "", + "ADD": "", + "REMOVE": "", + "YES_REMOVE": "", + "REMOVE_FROM_COLLECTION": "", + "TRASH": "", + "MOVE_TO_TRASH": "", + "TRASH_FILES_MESSAGE": "", + "TRASH_FILE_MESSAGE": "", + "DELETE_PERMANENTLY": "", + "RESTORE": "", + "RESTORE_TO_COLLECTION": "", + "EMPTY_TRASH": "", + "EMPTY_TRASH_TITLE": "", + "EMPTY_TRASH_MESSAGE": "", + "LEAVE_SHARED_ALBUM": "", + "LEAVE_ALBUM": "", + "LEAVE_SHARED_ALBUM_TITLE": "", + "LEAVE_SHARED_ALBUM_MESSAGE": "", + "NOT_FILE_OWNER": "", + "CONFIRM_SELF_REMOVE_MESSAGE": "", + "CONFIRM_SELF_AND_OTHER_REMOVE_MESSAGE": "", + "SORT_BY_CREATION_TIME_ASCENDING": "", + "SORT_BY_UPDATION_TIME_DESCENDING": "", + "SORT_BY_NAME": "", + "FIX_CREATION_TIME": "", + "FIX_CREATION_TIME_IN_PROGRESS": "", + "CREATION_TIME_UPDATED": "", + "UPDATE_CREATION_TIME_NOT_STARTED": "", + "UPDATE_CREATION_TIME_COMPLETED": "", + "UPDATE_CREATION_TIME_COMPLETED_WITH_ERROR": "", + "CAPTION_CHARACTER_LIMIT": "hámark 5000 stafir", + "DATE_TIME_ORIGINAL": "", + "DATE_TIME_DIGITIZED": "", + "METADATA_DATE": "", + "CUSTOM_TIME": "", + "REOPEN_PLAN_SELECTOR_MODAL": "", + "OPEN_PLAN_SELECTOR_MODAL_FAILED": "", + "INSTALL": "", + "SHARING_DETAILS": "", + "MODIFY_SHARING": "", + "ADD_COLLABORATORS": "", + "ADD_NEW_EMAIL": "", + "shared_with_people_zero": "", + "shared_with_people_one": "", + "shared_with_people_other": "", + "participants_zero": "", + "participants_one": "", + "participants_other": "", + "ADD_VIEWERS": "", + "CHANGE_PERMISSIONS_TO_VIEWER": "", + "CHANGE_PERMISSIONS_TO_COLLABORATOR": "", + "CONVERT_TO_VIEWER": "", + "CONVERT_TO_COLLABORATOR": "", + "CHANGE_PERMISSION": "", + "REMOVE_PARTICIPANT": "", + "CONFIRM_REMOVE": "", + "MANAGE": "", + "ADDED_AS": "", + "COLLABORATOR_RIGHTS": "", + "REMOVE_PARTICIPANT_HEAD": "", + "OWNER": "Eigandi", + "COLLABORATORS": "", + "ADD_MORE": "", + "VIEWERS": "", + "OR_ADD_EXISTING": "", + "REMOVE_PARTICIPANT_MESSAGE": "", + "NOT_FOUND": "404 - fannst ekki", + "LINK_EXPIRED": "Hlekkur rann út", + "LINK_EXPIRED_MESSAGE": "", + "MANAGE_LINK": "Stjórna hlekk", + "LINK_TOO_MANY_REQUESTS": "", + "FILE_DOWNLOAD": "", + "LINK_PASSWORD_LOCK": "", + "PUBLIC_COLLECT": "", + "LINK_DEVICE_LIMIT": "", + "NO_DEVICE_LIMIT": "", + "LINK_EXPIRY": "", + "NEVER": "", + "DISABLE_FILE_DOWNLOAD": "", + "DISABLE_FILE_DOWNLOAD_MESSAGE": "", + "SHARED_USING": "", + "SHARING_REFERRAL_CODE": "", + "LIVE": "", + "DISABLE_PASSWORD": "", + "DISABLE_PASSWORD_MESSAGE": "", + "PASSWORD_LOCK": "", + "LOCK": "", + "DOWNLOAD_UPLOAD_LOGS": "", + "UPLOAD_FILES": "", + "UPLOAD_DIRS": "", + "UPLOAD_GOOGLE_TAKEOUT": "", + "DEDUPLICATE_FILES": "", + "NO_DUPLICATES_FOUND": "", + "FILES": "", + "EACH": "", + "DEDUPLICATE_BASED_ON_SIZE": "", + "STOP_ALL_UPLOADS_MESSAGE": "", + "STOP_UPLOADS_HEADER": "", + "YES_STOP_UPLOADS": "", + "STOP_DOWNLOADS_HEADER": "", + "YES_STOP_DOWNLOADS": "", + "STOP_ALL_DOWNLOADS_MESSAGE": "", + "albums_one": "", + "albums_other": "", + "ALL_ALBUMS": "", + "ALBUMS": "", + "ALL_HIDDEN_ALBUMS": "", + "HIDDEN_ALBUMS": "", + "HIDDEN_ITEMS": "", + "ENTER_TWO_FACTOR_OTP": "", + "CREATE_ACCOUNT": "", + "COPIED": "", + "WATCH_FOLDERS": "", + "UPGRADE_NOW": "", + "RENEW_NOW": "", + "STORAGE": "", + "USED": "", + "YOU": "", + "FAMILY": "", + "FREE": "", + "OF": "", + "WATCHED_FOLDERS": "", + "NO_FOLDERS_ADDED": "", + "FOLDERS_AUTOMATICALLY_MONITORED": "", + "UPLOAD_NEW_FILES_TO_ENTE": "", + "REMOVE_DELETED_FILES_FROM_ENTE": "", + "ADD_FOLDER": "", + "STOP_WATCHING": "", + "STOP_WATCHING_FOLDER": "", + "STOP_WATCHING_DIALOG_MESSAGE": "", + "YES_STOP": "", + "MONTH_SHORT": "", + "YEAR": "", + "FAMILY_PLAN": "", + "DOWNLOAD_LOGS": "", + "DOWNLOAD_LOGS_MESSAGE": "", + "CHANGE_FOLDER": "", + "TWO_MONTHS_FREE": "", + "POPULAR": "", + "FREE_PLAN_OPTION_LABEL": "", + "free_plan_description": "", + "CURRENT_USAGE": "", + "WEAK_DEVICE": "", + "DRAG_AND_DROP_HINT": "", + "CONFIRM_ACCOUNT_DELETION_MESSAGE": "", + "AUTHENTICATE": "", + "UPLOADED_TO_SINGLE_COLLECTION": "", + "UPLOADED_TO_SEPARATE_COLLECTIONS": "", + "NEVERMIND": "", + "UPDATE_AVAILABLE": "", + "UPDATE_INSTALLABLE_MESSAGE": "", + "INSTALL_NOW": "", + "INSTALL_ON_NEXT_LAUNCH": "", + "UPDATE_AVAILABLE_MESSAGE": "", + "DOWNLOAD_AND_INSTALL": "", + "IGNORE_THIS_VERSION": "", + "TODAY": "", + "YESTERDAY": "", + "NAME_PLACEHOLDER": "", + "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED": "", + "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "", + "CHOSE_THEME": "", + "ML_SEARCH": "", + "ENABLE_ML_SEARCH_DESCRIPTION": "", + "ML_MORE_DETAILS": "", + "ENABLE_FACE_SEARCH": "", + "ENABLE_FACE_SEARCH_TITLE": "", + "ENABLE_FACE_SEARCH_DESCRIPTION": "", + "DISABLE_BETA": "", + "DISABLE_FACE_SEARCH": "", + "DISABLE_FACE_SEARCH_TITLE": "", + "DISABLE_FACE_SEARCH_DESCRIPTION": "", + "ADVANCED": "", + "FACE_SEARCH_CONFIRMATION": "", + "LABS": "", + "YOURS": "", + "PASSPHRASE_STRENGTH_WEAK": "", + "PASSPHRASE_STRENGTH_MODERATE": "", + "PASSPHRASE_STRENGTH_STRONG": "", + "PREFERENCES": "", + "LANGUAGE": "", + "EXPORT_DIRECTORY_DOES_NOT_EXIST": "", + "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "", + "SUBSCRIPTION_VERIFICATION_ERROR": "", + "storage_unit": { + "b": "", + "kb": "", + "mb": "", + "gb": "", + "tb": "" + }, + "AFTER_TIME": { + "HOUR": "", + "DAY": "", + "WEEK": "", + "MONTH": "", + "YEAR": "" + }, + "COPY_LINK": "", + "DONE": "", + "LINK_SHARE_TITLE": "", + "REMOVE_LINK": "", + "CREATE_PUBLIC_SHARING": "", + "PUBLIC_LINK_CREATED": "", + "PUBLIC_LINK_ENABLED": "", + "COLLECT_PHOTOS": "", + "PUBLIC_COLLECT_SUBTEXT": "", + "STOP_EXPORT": "", + "EXPORT_PROGRESS": "", + "MIGRATING_EXPORT": "", + "RENAMING_COLLECTION_FOLDERS": "", + "TRASHING_DELETED_FILES": "", + "TRASHING_DELETED_COLLECTIONS": "", + "CONTINUOUS_EXPORT": "", + "PENDING_ITEMS": "", + "EXPORT_STARTING": "", + "DELETE_ACCOUNT_REASON_LABEL": "", + "DELETE_ACCOUNT_REASON_PLACEHOLDER": "", + "DELETE_REASON": { + "MISSING_FEATURE": "", + "BROKEN_BEHAVIOR": "", + "FOUND_ANOTHER_SERVICE": "", + "NOT_LISTED": "" + }, + "DELETE_ACCOUNT_FEEDBACK_LABEL": "", + "DELETE_ACCOUNT_FEEDBACK_PLACEHOLDER": "", + "CONFIRM_DELETE_ACCOUNT_CHECKBOX_LABEL": "", + "CONFIRM_DELETE_ACCOUNT": "", + "FEEDBACK_REQUIRED": "", + "FEEDBACK_REQUIRED_FOUND_ANOTHER_SERVICE": "", + "RECOVER_TWO_FACTOR": "", + "at": "", + "AUTH_NEXT": "", + "AUTH_DOWNLOAD_MOBILE_APP": "", + "HIDDEN": "", + "HIDE": "Fela", + "UNHIDE": "", + "UNHIDE_TO_COLLECTION": "", + "SORT_BY": "Raða eftir", + "NEWEST_FIRST": "Nýjast fyrst", + "OLDEST_FIRST": "Elsta fyrst", + "CONVERSION_FAILED_NOTIFICATION_MESSAGE": "", + "SELECT_COLLECTION": "", + "PIN_ALBUM": "", + "UNPIN_ALBUM": "", + "DOWNLOAD_COMPLETE": "", + "DOWNLOADING_COLLECTION": "", + "DOWNLOAD_FAILED": "", + "DOWNLOAD_PROGRESS": "", + "CHRISTMAS": "", + "CHRISTMAS_EVE": "", + "NEW_YEAR": "Nýtt ár", + "NEW_YEAR_EVE": "", + "IMAGE": "Mynd", + "VIDEO": "Mynband", + "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, + "CONVERT": "", + "CONFIRM_EDITOR_CLOSE_MESSAGE": "", + "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", + "BRIGHTNESS": "", + "CONTRAST": "", + "SATURATION": "", + "BLUR": "", + "INVERT_COLORS": "", + "ASPECT_RATIO": "", + "SQUARE": "", + "ROTATE_LEFT": "", + "ROTATE_RIGHT": "", + "FLIP_VERTICALLY": "", + "FLIP_HORIZONTALLY": "", + "DOWNLOAD_EDITED": "", + "SAVE_A_COPY_TO_ENTE": "", + "RESTORE_ORIGINAL": "", + "TRANSFORM": "", + "COLORS": "", + "FLIP": "", + "ROTATION": "", + "RESET": "", + "PHOTO_EDITOR": "", + "FASTER_UPLOAD": "", + "FASTER_UPLOAD_DESCRIPTION": "", + "MAGIC_SEARCH_STATUS": "", + "INDEXED_ITEMS": "", + "CAST_ALBUM_TO_TV": "", + "ENTER_CAST_PIN_CODE": "", + "PAIR_DEVICE_TO_TV": "", + "TV_NOT_FOUND": "", + "AUTO_CAST_PAIR": "", + "AUTO_CAST_PAIR_DESC": "", + "PAIR_WITH_PIN": "", + "CHOOSE_DEVICE_FROM_BROWSER": "", + "PAIR_WITH_PIN_DESC": "", + "VISIT_CAST_ENTE_IO": "", + "CAST_AUTO_PAIR_FAILED": "", + "FREEHAND": "", + "APPLY_CROP": "", + "PHOTO_EDIT_REQUIRED_TO_SAVE": "", + "PASSKEYS": "", + "DELETE_PASSKEY": "", + "DELETE_PASSKEY_CONFIRMATION": "", + "RENAME_PASSKEY": "", + "ADD_PASSKEY": "", + "ENTER_PASSKEY_NAME": "", + "PASSKEYS_DESCRIPTION": "", + "CREATED_AT": "", + "PASSKEY_LOGIN_FAILED": "", + "PASSKEY_LOGIN_URL_INVALID": "", + "PASSKEY_LOGIN_ERRORED": "", + "TRY_AGAIN": "", + "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" +} diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json index 8c767c054..d935126f7 100644 --- a/web/packages/next/locales/it-IT/translation.json +++ b/web/packages/next/locales/it-IT/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json index 17fc40588..cec77e0e4 100644 --- a/web/packages/next/locales/ko-KR/translation.json +++ b/web/packages/next/locales/ko-KR/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json index 23850582d..47775c0c2 100644 --- a/web/packages/next/locales/nl-NL/translation.json +++ b/web/packages/next/locales/nl-NL/translation.json @@ -565,6 +565,9 @@ "IMAGE": "Afbeelding", "VIDEO": "Video", "LIVE_PHOTO": "Live foto", + "editor": { + "crop": "" + }, "CONVERT": "Converteren", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Weet u zeker dat u de editor wilt afsluiten?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download uw bewerkte afbeelding of sla een kopie op in Ente om uw wijzigingen te behouden.", diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json index 6d36812ce..a191a4927 100644 --- a/web/packages/next/locales/pt-BR/translation.json +++ b/web/packages/next/locales/pt-BR/translation.json @@ -2,7 +2,7 @@ "HERO_SLIDE_1_TITLE": "
Backups privados
para as suas memórias
", "HERO_SLIDE_1": "Criptografia de ponta a ponta por padrão", "HERO_SLIDE_2_TITLE": "
Armazenado com segurança
em um abrigo avançado
", - "HERO_SLIDE_2": "Feito para ter logenvidade", + "HERO_SLIDE_2": "Feito para ter longevidade", "HERO_SLIDE_3_TITLE": "
Disponível
em qualquer lugar
", "HERO_SLIDE_3": "Android, iOS, Web, Desktop", "LOGIN": "Entrar", @@ -410,7 +410,7 @@ "YES_STOP_UPLOADS": "Sim, parar envios", "STOP_DOWNLOADS_HEADER": "Parar downloads?", "YES_STOP_DOWNLOADS": "Sim, parar downloads", - "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?", + "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos os downloads em andamento?", "albums_one": "1 Álbum", "albums_other": "{{count, number}} Álbuns", "ALL_ALBUMS": "Todos os álbuns", @@ -556,7 +556,7 @@ "UNPIN_ALBUM": "Desafixar álbum", "DOWNLOAD_COMPLETE": "Download concluído", "DOWNLOADING_COLLECTION": "Fazendo download de {{name}}", - "DOWNLOAD_FAILED": "Falha ao baixar", + "DOWNLOAD_FAILED": "Falha no download", "DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos", "CHRISTMAS": "Natal", "CHRISTMAS_EVE": "Véspera de Natal", @@ -565,6 +565,9 @@ "IMAGE": "Imagem", "VIDEO": "Vídeo", "LIVE_PHOTO": "Fotos em movimento", + "editor": { + "crop": "Cortar" + }, "CONVERT": "Converter", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Tem certeza de que deseja fechar o editor?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Baixe sua imagem editada ou salve uma cópia para o ente para persistir nas alterações.", diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json index c89049ec2..981f33126 100644 --- a/web/packages/next/locales/pt-PT/translation.json +++ b/web/packages/next/locales/pt-PT/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json index d8c90af17..7861d339a 100644 --- a/web/packages/next/locales/ru-RU/translation.json +++ b/web/packages/next/locales/ru-RU/translation.json @@ -168,7 +168,7 @@ "UPDATE_PAYMENT_METHOD": "Обновить платёжную информацию", "MONTHLY": "Ежемесячно", "YEARLY": "Ежегодно", - "update_subscription_title": "", + "update_subscription_title": "Подтвердить изменение плана", "UPDATE_SUBSCRIPTION_MESSAGE": "Хотите сменить текущий план?", "UPDATE_SUBSCRIPTION": "Изменить план", "CANCEL_SUBSCRIPTION": "Отменить подписку", @@ -565,6 +565,9 @@ "IMAGE": "Изображение", "VIDEO": "Видео", "LIVE_PHOTO": "Живое фото", + "editor": { + "crop": "" + }, "CONVERT": "Преобразовать", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Вы уверены, что хотите закрыть редактор?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Загрузите отредактированное изображение или сохраните копию в ente, чтобы сохранить внесенные изменения.", @@ -620,6 +623,6 @@ "TRY_AGAIN": "Пробовать снова", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.", "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля", - "autogenerated_first_album_name": "", - "autogenerated_default_album_name": "" + "autogenerated_first_album_name": "Мой первый альбом", + "autogenerated_default_album_name": "Новый альбом" } diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json index 1ceb6370c..2ec0352b0 100644 --- a/web/packages/next/locales/sv-SE/translation.json +++ b/web/packages/next/locales/sv-SE/translation.json @@ -565,6 +565,9 @@ "IMAGE": "Bild", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json index 33306389c..9f549eb49 100644 --- a/web/packages/next/locales/th-TH/translation.json +++ b/web/packages/next/locales/th-TH/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json index 33306389c..9f549eb49 100644 --- a/web/packages/next/locales/tr-TR/translation.json +++ b/web/packages/next/locales/tr-TR/translation.json @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json index 2489bdd43..4ac62c796 100644 --- a/web/packages/next/locales/zh-CN/translation.json +++ b/web/packages/next/locales/zh-CN/translation.json @@ -7,7 +7,7 @@ "HERO_SLIDE_3": "安卓, iOS, 网页端, 桌面端", "LOGIN": "登录", "SIGN_UP": "注册", - "NEW_USER": "刚来到 Ente", + "NEW_USER": "初来 Ente", "EXISTING_USER": "现有用户", "ENTER_NAME": "输入名字", "PUBLIC_UPLOADER_NAME_MESSAGE": "请添加一个名字,以便您的朋友知晓该感谢谁拍摄了这些精美的照片!", @@ -565,6 +565,9 @@ "IMAGE": "图像", "VIDEO": "视频", "LIVE_PHOTO": "实况照片", + "editor": { + "crop": "裁剪" + }, "CONVERT": "转换", "CONFIRM_EDITOR_CLOSE_MESSAGE": "您确定要关闭编辑器吗?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "下载已编辑的图片或将副本保存到 Ente 以保留您的更改。", diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts index f9ef7e549..e69d22b07 100644 --- a/web/packages/next/log.ts +++ b/web/packages/next/log.ts @@ -3,6 +3,19 @@ import { isDevBuild } from "./env"; import { logToDisk as webLogToDisk } from "./log-web"; import { workerBridge } from "./worker/worker-bridge"; +/** + * Whether logs go to disk or are always emitted to the console. + */ +let shouldLogToDisk = true; + +/** + * By default, logs get saved into a ring buffer in the browser's local storage. + * However, in some contexts, e.g. when we're running as the cast app, there is + * no mechanism for the user to retrieve these logs. So this function exists as + * a way to disable the on disk logging and always use the console. + */ +export const disableDiskLogs = () => (shouldLogToDisk = false); + /** * Write a {@link message} to the on-disk log. * @@ -45,14 +58,14 @@ const messageWithError = (message: string, e?: unknown) => { const logError = (message: string, e?: unknown) => { const m = `[error] ${messageWithError(message, e)}`; - if (isDevBuild) console.error(m); - logToDisk(m); + console.error(m); + if (shouldLogToDisk) logToDisk(m); }; const logWarn = (message: string, e?: unknown) => { const m = `[warn] ${messageWithError(message, e)}`; - if (isDevBuild) console.error(m); - logToDisk(m); + console.error(m); + if (shouldLogToDisk) logToDisk(m); }; const logInfo = (...params: unknown[]) => { @@ -60,8 +73,8 @@ const logInfo = (...params: unknown[]) => { .map((p) => (typeof p == "string" ? p : JSON.stringify(p))) .join(" "); const m = `[info] ${message}`; - if (isDevBuild) console.log(m); - logToDisk(m); + if (isDevBuild || !shouldLogToDisk) console.log(m); + if (shouldLogToDisk) logToDisk(m); }; const logDebug = (param: () => unknown) => { @@ -71,8 +84,8 @@ const logDebug = (param: () => unknown) => { /** * Ente's logger. * - * This is an object that provides three functions to log at the corresponding - * levels - error, info or debug. + * This is an object that provides functions to log at the corresponding levels: + * error, warn, info or debug. * * Whenever we need to save a log message to disk, * @@ -89,8 +102,7 @@ export default { * any arbitrary object that we obtain, say, when in a try-catch handler (in * JavaScript any arbitrary value can be thrown). * - * The log is written to disk. In development builds, the log is also - * printed to the browser console. + * The log is written to disk and printed to the browser console. */ error: logError, /** @@ -104,8 +116,10 @@ export default { * This is meant as a replacement of {@link console.log}, and takes an * arbitrary number of arbitrary parameters that it then serializes. * - * The log is written to disk. In development builds, the log is also - * printed to the browser console. + * The log is written to disk. However, if logging to disk is disabled by + * using {@link disableDiskLogs}, then the log is printed to the console. + * + * In development builds, the log is always printed to the browser console. */ info: logInfo, /** @@ -118,8 +132,8 @@ export default { * The function can return an arbitrary value which is serialized before * being logged. * - * This log is NOT written to disk. And it is printed to the browser - * console, but only in development builds. + * This log is NOT written to disk. It is printed to the browser console, + * but only in development builds. */ debug: logDebug, }; diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index b4ef2b6b2..7d5866cdb 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -64,12 +64,9 @@ export interface Electron { selectDirectory: () => Promise; /** - * Clear any stored data. - * - * This is a coarse single shot cleanup, meant for use in clearing any - * Electron side state during logout. + * Perform any logout related cleanup of native side state. */ - clearStores: () => void; + logout: () => Promise; /** * Return the previously saved encryption key from persistent safe storage. @@ -260,7 +257,7 @@ export interface Electron { * This executes the command using a FFmpeg executable we bundle with our * desktop app. We also have a wasm FFmpeg wasm implementation that we use * when running on the web, which has a sibling function with the same - * parameters. See [Note: ffmpeg in Electron]. + * parameters. See [Note:FFmpeg in Electron]. * * @param command An array of strings, each representing one positional * parameter in the command to execute. Placeholders for the input, output @@ -280,9 +277,6 @@ export interface Electron { * just return its contents, for some FFmpeg command the extension matters * (e.g. conversion to a JPEG fails if the extension is arbitrary). * - * @param timeoutMS If non-zero, then abort and throw a timeout error if the - * ffmpeg command takes more than the given number of milliseconds. - * * @returns The contents of the output file produced by the ffmpeg command * (specified as {@link outputPathPlaceholder} in {@link command}). */ @@ -290,7 +284,6 @@ export interface Electron { command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ) => Promise; // - ML @@ -339,12 +332,12 @@ export interface Electron { detectFaces: (input: Float32Array) => Promise; /** - * Return a MobileFaceNet embedding for the given face data. + * Return a MobileFaceNet embeddings for the given faces. * * Both the input and output are opaque binary data whose internal structure * is specific to our implementation and the model (MobileFaceNet) we use. */ - faceEmbedding: (input: Float32Array) => Promise; + faceEmbeddings: (input: Float32Array) => Promise; /** * Return a face crop stored by a previous version of ML. @@ -484,17 +477,6 @@ export interface Electron { * The returned paths are guaranteed to use POSIX separators ('/'). */ findFiles: (folderPath: string) => Promise; - - /** - * Stop watching all existing folder watches and remove any callbacks. - * - * This function is meant to be called when the user logs out. It stops - * all existing folder watches and forgets about any "on*" callback - * functions that have been registered. - * - * The persisted state itself gets cleared via {@link clearStores}. - */ - reset: () => Promise; }; // - Upload diff --git a/web/packages/next/worker/comlink-worker.ts b/web/packages/next/worker/comlink-worker.ts index 5929e5361..cb90d85f8 100644 --- a/web/packages/next/worker/comlink-worker.ts +++ b/web/packages/next/worker/comlink-worker.ts @@ -47,8 +47,8 @@ const workerBridge = { convertToJPEG: (imageData: Uint8Array) => ensureElectron().convertToJPEG(imageData), detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input), - faceEmbedding: (input: Float32Array) => - ensureElectron().faceEmbedding(input), + faceEmbeddings: (input: Float32Array) => + ensureElectron().faceEmbeddings(input), }; export type WorkerBridge = typeof workerBridge; diff --git a/web/packages/shared/apps/types.ts b/web/packages/shared/apps/types.ts index 0d5d1aa1a..bd3a2d4c5 100644 --- a/web/packages/shared/apps/types.ts +++ b/web/packages/shared/apps/types.ts @@ -7,6 +7,7 @@ export interface PageProps { showNavBar: (show: boolean) => void; isMobile: boolean; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; }; appName: APPS; twoFactorType?: TwoFactorType; diff --git a/web/packages/shared/components/DialogBox/types.ts b/web/packages/shared/components/DialogBox/types.ts index 6d076fd5a..08b52fe4c 100644 --- a/web/packages/shared/components/DialogBox/types.ts +++ b/web/packages/shared/components/DialogBox/types.ts @@ -3,6 +3,10 @@ import { ButtonProps } from "@mui/material"; export interface DialogBoxAttributes { icon?: React.ReactNode; title?: string; + /** + * Set this to `true` to prevent the dialog from being closed when the user + * clicks the backdrop outside the dialog. + */ staticBackdrop?: boolean; nonClosable?: boolean; content?: any; diff --git a/web/packages/shared/storage/localForage/index.ts b/web/packages/shared/storage/localForage.ts similarity index 100% rename from web/packages/shared/storage/localForage/index.ts rename to web/packages/shared/storage/localForage.ts diff --git a/web/packages/shared/storage/localForage/helpers.ts b/web/packages/shared/storage/localForage/helpers.ts deleted file mode 100644 index 913b9f52f..000000000 --- a/web/packages/shared/storage/localForage/helpers.ts +++ /dev/null @@ -1,5 +0,0 @@ -import localForage from "."; - -export const clearFiles = async () => { - await localForage.clear(); -}; diff --git a/web/packages/shared/utils/index.ts b/web/packages/shared/utils/index.ts index 568ec5cc4..8b46f6267 100644 --- a/web/packages/shared/utils/index.ts +++ b/web/packages/shared/utils/index.ts @@ -1,11 +1,4 @@ -/** - * Wait for {@link ms} milliseconds - * - * This function is a promisified `setTimeout`. It returns a promise that - * resolves after {@link ms} milliseconds. - */ -export const wait = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); +import { wait } from "@/utils/promise"; export function downloadAsFile(filename: string, content: string) { const file = new Blob([content], { @@ -52,23 +45,3 @@ export async function retryAsyncFunction( } } } - -/** - * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it - * does not resolve within {@link timeoutMS}, then reject with a timeout error. - */ -export const withTimeout = async (promise: Promise, ms: number) => { - let timeoutId: ReturnType; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutId = setTimeout( - () => reject(new Error("Operation timed out")), - ms, - ); - }); - const promiseAndCancelTimeout = async () => { - const result = await promise; - clearTimeout(timeoutId); - return result; - }; - return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); -}; diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts index 93706bfb6..41639ea2b 100644 --- a/web/packages/utils/ensure.ts +++ b/web/packages/utils/ensure.ts @@ -3,7 +3,7 @@ */ export const ensure = (v: T | null | undefined): T => { if (v === null) throw new Error("Required value was null"); - if (v === undefined) throw new Error("Required value was not found"); + if (v === undefined) throw new Error("Required value was undefined"); return v; }; diff --git a/web/packages/utils/promise.ts b/web/packages/utils/promise.ts new file mode 100644 index 000000000..4cb7648fd --- /dev/null +++ b/web/packages/utils/promise.ts @@ -0,0 +1,28 @@ +/** + * Wait for {@link ms} milliseconds + * + * This function is a promisified `setTimeout`. It returns a promise that + * resolves after {@link ms} milliseconds. + */ +export const wait = (ms: number) => + new Promise((resolve) => setTimeout(resolve, ms)); + +/** + * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it + * does not resolve within {@link timeoutMS}, then reject with a timeout error. + */ +export const withTimeout = async (promise: Promise, ms: number) => { + let timeoutId: ReturnType; + const rejectOnTimeout = new Promise((_, reject) => { + timeoutId = setTimeout( + () => reject(new Error("Operation timed out")), + ms, + ); + }); + const promiseAndCancelTimeout = async () => { + const result = await promise; + clearTimeout(timeoutId); + return result; + }; + return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); +}; diff --git a/web/yarn.lock b/web/yarn.lock index 2a50e3f95..a18a0a0dc 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -1015,6 +1015,11 @@ resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.14.tgz#319b63ad6df705ee2a65a73ef042c8271e696613" integrity sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg== +"@types/heic-convert@^1.2.3": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@types/heic-convert/-/heic-convert-1.2.3.tgz#0705f36e467e7b6180806edd0b3f1e673514ff8c" + integrity sha512-5LJ2fGuVk/gnOLihoT56xJwrXxfnNepGvrHwlW5ZtT3HS4jO1AqBaAHCxXUpnY9UaD3zYcyxXMRM2fNN1AFF/Q== + "@types/hoist-non-react-statics@^3.3.1": version "3.3.5" resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz#dab7867ef789d87e2b4b0003c9d65c49cc44a494" @@ -1645,7 +1650,7 @@ chalk@^2.4.2: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0: +chalk@^4.0.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -1665,6 +1670,15 @@ client-only@0.0.1: resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clsx@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" @@ -1721,6 +1735,21 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== +concurrently@^8.2.2: + version "8.2.2" + resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-8.2.2.tgz#353141985c198cfa5e4a3ef90082c336b5851784" + integrity sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg== + dependencies: + chalk "^4.1.2" + date-fns "^2.30.0" + lodash "^4.17.21" + rxjs "^7.8.1" + shell-quote "^1.8.1" + spawn-command "0.0.2" + supports-color "^8.1.1" + tree-kill "^1.2.2" + yargs "^17.7.2" + convert-source-map@^1.5.0: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" @@ -1793,7 +1822,7 @@ data-view-byte-offset@^1.0.0: es-errors "^1.3.0" is-data-view "^1.0.1" -date-fns@^2: +date-fns@^2, date-fns@^2.30.0: version "2.30.0" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0" integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw== @@ -1867,11 +1896,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -density-clustering@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/density-clustering/-/density-clustering-1.3.0.tgz#dc9f59c8f0ab97e1624ac64930fd3194817dcac5" - integrity sha512-icpmBubVTwLnsaor9qH/4tG5+7+f61VcqMN3V3pm9sxxSCt2Jcs0zWOgwZW9ARJYaKD3FumIgHiMOcIMRRAzFQ== - dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" @@ -2290,15 +2314,20 @@ eslint-plugin-jsx-a11y@^6.7.1: object.entries "^1.1.7" object.fromentries "^2.0.7" -"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705", eslint-plugin-react-hooks@^4.6: +"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705": version "4.6.0" resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== -eslint-plugin-react-refresh@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.6.tgz#e8e8accab681861baed00c5c12da70267db0936f" - integrity sha512-NjGXdm7zgcKRkKMua34qVO9doI7VOxZ6ancSvBELJSSoX97jyndXcSoa8XBh69JoB31dNz3EEzlMcizZl7LaMA== +eslint-plugin-react-hooks@^4.6: + version "4.6.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz#c829eb06c0e6f484b3fbb85a97e57784f328c596" + integrity sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ== + +eslint-plugin-react-refresh@^0.4.7: + version "0.4.7" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.7.tgz#1f597f9093b254f10ee0961c139a749acb19af7d" + integrity sha512-yrj+KInFmwuQS2UQcg1SF83ha1tuHC1jMQbRNyuWtlEzzKRDgAl7L4Yp4NlDUZTZNlWvHEzOtJhMi40R7JxcSw== eslint-plugin-react@^7.33.2: version "7.33.2" @@ -2631,6 +2660,11 @@ gensync@^1.0.0-beta.2: resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" @@ -2839,7 +2873,7 @@ hdbscan@0.0.1-alpha.5: dependencies: kd-tree-javascript "^1.0.3" -heic-convert@^2.0.0: +heic-convert@^2.1: version "2.1.0" resolved "https://registry.yarnpkg.com/heic-convert/-/heic-convert-2.1.0.tgz#7f764529e37591ae263ef49582d1d0c13491526e" integrity sha512-1qDuRvEHifTVAj3pFIgkqGgJIr0M3X7cxEPjEp0oG4mo8GFjq99DpCo8Eg3kg17Cy0MTjxpFdoBHOatj7ZVKtg== @@ -3433,7 +3467,7 @@ mime-db@1.52.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-types@^2.1.12, mime-types@^2.1.35: +mime-types@^2.1.12: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -3821,12 +3855,12 @@ prettier-plugin-organize-imports@^3.2: resolved "https://registry.yarnpkg.com/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz#77967f69d335e9c8e6e5d224074609309c62845e" integrity sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog== -prettier-plugin-packagejson@^2.4: - version "2.4.12" - resolved "https://registry.yarnpkg.com/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.4.12.tgz#eeb917dad83ae42d0caccc9f26d3728b5c4f2434" - integrity sha512-hifuuOgw5rHHTdouw9VrhT8+Nd7UwxtL1qco8dUfd4XUFQL6ia3xyjSxhPQTsGnSYFraTWy5Omb+MZm/OWDTpQ== +prettier-plugin-packagejson@^2.5: + version "2.5.0" + resolved "https://registry.yarnpkg.com/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.5.0.tgz#23d2cb8b1f7840702d35e3a5078e564ea0bc63e0" + integrity sha512-6XkH3rpin5QEQodBSVNg+rBo4r91g/1mCaRwS1YGdQJZ6jwqrg2UchBsIG9tpS1yK1kNBvOt84OILsX8uHzBGg== dependencies: - sort-package-json "2.8.0" + sort-package-json "2.10.0" synckit "0.9.0" prettier@^3: @@ -4046,6 +4080,11 @@ regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.2: es-errors "^1.3.0" set-function-name "^2.0.1" +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" @@ -4122,6 +4161,13 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" +rxjs@^7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safe-array-concat@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.0.tgz#8d0cae9cb806d6d1c06e08ab13d847293ebe0692" @@ -4231,6 +4277,11 @@ shebang-regex@^3.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== +shell-quote@^1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + side-channel@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.5.tgz#9a84546599b48909fb6af1211708d23b1946221b" @@ -4285,10 +4336,10 @@ sort-object-keys@^1.1.3: resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" integrity sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg== -sort-package-json@2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-2.8.0.tgz#6a46439ad0fef77f091e678e103f03ecbea575c8" - integrity sha512-PxeNg93bTJWmDGnu0HADDucoxfFiKkIr73Kv85EBThlI1YQPdc0XovBgg2llD0iABZbu2SlKo8ntGmOP9wOj/g== +sort-package-json@2.10.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-2.10.0.tgz#6be07424bf3b7db9fbb1bdd69e7945f301026d8a" + integrity sha512-MYecfvObMwJjjJskhxYfuOADkXp1ZMMnCFC8yhp+9HDsk7HhR336hd7eiBs96lTXfiqmUNI+WQCeCMRBhl251g== dependencies: detect-indent "^7.0.1" detect-newline "^4.0.0" @@ -4296,6 +4347,7 @@ sort-package-json@2.8.0: git-hooks-list "^3.0.0" globby "^13.1.2" is-plain-obj "^4.1.0" + semver "^7.6.0" sort-object-keys "^1.1.3" source-map-js@^1.0.2: @@ -4313,13 +4365,17 @@ source-map@^0.5.7: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== +spawn-command@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e" + integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ== + streamsearch@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0: - name string-width-cjs +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -4497,6 +4553,13 @@ supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-color@^8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" @@ -4570,6 +4633,11 @@ transformation-matrix@^2.15.0: resolved "https://registry.yarnpkg.com/transformation-matrix/-/transformation-matrix-2.16.1.tgz#4a2de06331b94ae953193d1b9a5ba002ec5f658a" integrity sha512-tdtC3wxVEuzU7X/ydL131Q3JU5cPMEn37oqVLITjRDSDsnSHVFzW2JiCLfZLIQEgWzZHdSy3J6bZzvKEN24jGA== +tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + truncate-utf8-bytes@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz#405923909592d56f78a5818434b0b78489ca5f2b" @@ -4592,7 +4660,7 @@ tsconfig-paths@^3.15.0: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^2.0.0, tslib@^2.4.0, tslib@^2.6.2: +tslib@^2.0.0, tslib@^2.1.0, tslib@^2.4.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== @@ -4841,7 +4909,7 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -4876,6 +4944,11 @@ xml-js@^1.6.11: dependencies: sax "^1.2.4" +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" @@ -4891,6 +4964,24 @@ yaml@^1.10.0: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@^17.7.2: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"