Merge branch 'main' into auth-v3

This commit is contained in:
Neeraj Gupta 2024-05-15 14:15:34 +05:30
commit 47d9d3cd1e
303 changed files with 7070 additions and 5639 deletions

View file

@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-auth
localization_branch_name: translations/auth
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[auth] New translations"

View file

@ -3,7 +3,7 @@ name: "Lint (auth)"
on:
# Run on every push to a branch other than main that changes auth/
push:
branches-ignore: [main, "deploy/**"]
branches-ignore: [main]
paths:
- "auth/**"
- ".github/workflows/auth-lint.yml"

View file

@ -3,7 +3,7 @@ name: "Lint (desktop)"
on:
# Run on every push to a branch other than main that changes desktop/
push:
branches-ignore: [main, "deploy/**"]
branches-ignore: [main]
paths:
- "desktop/**"
- ".github/workflows/desktop-lint.yml"

View file

@ -37,11 +37,8 @@ jobs:
run: yarn build
- name: Publish
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: help
directory: docs/docs/.vitepress/dist
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=help docs/docs/.vitepress/dist

View file

@ -6,7 +6,7 @@ name: "Verify build (docs)"
on:
# Run on every push to a branch other than main that changes docs/
push:
branches-ignore: [main, "deploy/**"]
branches-ignore: [main]
paths:
- "docs/**"
- ".github/workflows/docs-verify-build.yml"

View file

@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-mobile
localization_branch_name: translations/mobile
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[mobile] New translations"

View file

@ -1,4 +1,4 @@
name: "Internal Release - Photos"
name: "Internal release (photos)"
on:
workflow_dispatch: # Allow manually running the action

View file

@ -3,7 +3,7 @@ name: "Lint (mobile)"
on:
# Run on every push to a branch other than main that changes mobile/
push:
branches-ignore: [main, f-droid, "deploy/**"]
branches-ignore: [main, f-droid]
paths:
- "mobile/**"
- ".github/workflows/mobile-lint.yml"

View file

@ -3,7 +3,7 @@ name: "Lint (server)"
on:
# Run on every push to a branch other than main that changes server/
push:
branches-ignore: [main, "deploy/**"]
branches-ignore: [main]
paths:
- "server/**"
- ".github/workflows/server-lint.yml"

View file

@ -38,3 +38,8 @@ jobs:
tags: ${{ inputs.commit }}, latest
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Tag as server/ghcr
run: |
git tag -f server/ghcr
git push -f origin server/ghcr

34
.github/workflows/web-crowdin-push.yml vendored Normal file
View file

@ -0,0 +1,34 @@
name: "Push Crowdin translations (web)"
# This is a variant of web-crowdin.yml that uploads the translated strings in
# addition to the source strings.
#
# This allows us to change the strings in our source code for an automated
# refactoring (e.g. renaming a key), and then run this workflow to update the
# data in Crowdin taking our source code as the source of truth.
on:
# Trigger manually, or using
# `gh workflow run web-crowdin-push.yml --ref <my-branch>`
workflow_dispatch:
jobs:
push-to-crowdin:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Crowdin push
uses: crowdin/github-action@v1
with:
base_path: "web/"
config: "web/crowdin.yml"
upload_sources: true
upload_translations: true
download_translations: false
project_id: 569613
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

View file

@ -36,7 +36,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-web
localization_branch_name: translations/web
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[web] New translations"

View file

@ -1,43 +0,0 @@
name: "Deploy (accounts)"
on:
push:
# Run workflow on pushes to the deploy/accounts
branches: [deploy/accounts]
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build accounts
run: yarn build:accounts
- name: Publish accounts
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/accounts
directory: web/apps/accounts/out
wranglerVersion: "3"

View file

@ -1,43 +0,0 @@
name: "Deploy (auth)"
on:
push:
# Run workflow on pushes to the deploy/auth
branches: [deploy/auth]
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build auth
run: yarn build:auth
- name: Publish auth
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/auth
directory: web/apps/auth/out
wranglerVersion: "3"

View file

@ -1,43 +0,0 @@
name: "Deploy (cast)"
on:
push:
# Run workflow on pushes to the deploy/cast
branches: [deploy/cast]
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build cast
run: yarn build:cast
- name: Publish cast
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/cast
directory: web/apps/cast/out
wranglerVersion: "3"

61
.github/workflows/web-deploy-one.yml vendored Normal file
View file

@ -0,0 +1,61 @@
name: "Deploy one (web)"
on:
workflow_dispatch:
inputs:
app:
description: "App to build and deploy"
type: choice
required: true
default: "photos"
options:
- "accounts"
- "auth"
- "cast"
- "payments"
- "photos"
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build ${{ inputs.app }}
run: yarn build:${{ inputs.app }}
- name: Publish ${{ inputs.app }} to preview
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
# [Note: Wrangler commit-dirty]
#
# Without the --commit-dirty flag, running the wrangler-action
# always prints a warning when used:
#
# Warning: Your working directory is a git repo and has uncommitted changes
# To silence this warning, pass in --commit-dirty=true
#
# There is no clear documentation of if passing this is
# harmless, but all indications and in-practice tests seem to
# indicate so.
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/${{ inputs.app }} web/apps/${{ inputs.app }}/out

View file

@ -1,43 +0,0 @@
name: "Deploy (payments)"
on:
push:
# Run workflow on pushes to the deploy/payments
branches: [deploy/payments]
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build payments
run: yarn build:payments
- name: Publish payments
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/payments
directory: web/apps/payments/dist
wranglerVersion: "3"

View file

@ -1,43 +0,0 @@
name: "Deploy (photos)"
on:
push:
# Run workflow on pushes to the deploy/photos
branches: [deploy/photos]
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build photos
run: yarn build:photos
- name: Publish photos
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/photos
directory: web/apps/photos/out
wranglerVersion: "3"

View file

@ -1,4 +1,4 @@
name: "Preview (web)"
name: "Deploy preview (web)"
on:
workflow_dispatch:
@ -43,11 +43,8 @@ jobs:
run: yarn build:${{ inputs.app }}
- name: Publish ${{ inputs.app }} to preview
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: preview
directory: web/apps/${{ inputs.app }}/out
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=preview web/apps/${{ inputs.app }}/out

View file

@ -38,11 +38,8 @@ jobs:
run: yarn build:staff
- name: Publish staff
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: deploy/staff
directory: web/apps/staff/dist
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/staff web/apps/staff/dist

View file

@ -0,0 +1,86 @@
name: "Deploy staging (web)"
on:
schedule:
# Run everyday at ~3:00 PM IST
#
# See: [Note: Run workflow every 24 hours]
- cron: "25 9 * * *"
# Also allow manually running the workflow
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
defaults:
run:
working-directory: web
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "web/yarn.lock"
- name: Install dependencies
run: yarn install
- name: Build photos
run: yarn build:photos
env:
NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh
- name: Publish photos
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=n-photos web/apps/photos/out
- name: Build accounts
run: yarn build:accounts
- name: Publish accounts
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=n-accounts web/apps/accounts/out
- name: Build auth
run: yarn build:auth
- name: Publish auth
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=n-auth web/apps/auth/out
- name: Build cast
run: yarn build:cast
- name: Publish cast
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=n-cast web/apps/cast/out
- name: Build payments
run: yarn build:payments
- name: Publish payments
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=n-payments web/apps/payments/dist

View file

@ -1,17 +1,21 @@
name: "Nightly (web)"
name: "Deploy (web)"
on:
schedule:
# [Note: Run workflow every 24 hours]
#
# Run every 24 hours - First field is minute, second is hour of the day
# This runs 23:15 UTC everyday - 1 and 15 are just arbitrary offset to
# avoid scheduling it on the exact hour, as suggested by GitHub.
# Run everyday at ~8:00 AM IST (except Sundays).
#
# First field is minute, second is hour of the day. Last is day of week,
# 0 being Sunday.
#
# Add a few minutes of offset to avoid scheduling on exact hourly
# boundaries (recommended by GitHub to avoid congestion).
#
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule
# https://crontab.guru/
#
- cron: "15 23 * * *"
- cron: "25 2 * * 1-6"
# Also allow manually running the workflow
workflow_dispatch:
@ -39,69 +43,52 @@ jobs:
- name: Install dependencies
run: yarn install
- name: Build photos
run: yarn build:photos
- name: Publish photos
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/photos web/apps/photos/out
- name: Build accounts
run: yarn build:accounts
- name: Publish accounts
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: n-accounts
directory: web/apps/accounts/out
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/accounts web/apps/accounts/out
- name: Build auth
run: yarn build:auth
- name: Publish auth
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: n-auth
directory: web/apps/auth/out
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/auth web/apps/auth/out
- name: Build cast
run: yarn build:cast
- name: Publish cast
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: n-cast
directory: web/apps/cast/out
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/cast web/apps/cast/out
- name: Build payments
run: yarn build:payments
- name: Publish payments
uses: cloudflare/pages-action@1
uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: n-payments
directory: web/apps/payments/dist
wranglerVersion: "3"
- name: Build photos
run: yarn build:photos
env:
NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh
- name: Publish photos
uses: cloudflare/pages-action@1
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
projectName: ente
branch: n-photos
directory: web/apps/photos/out
wranglerVersion: "3"
command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/payments web/apps/payments/dist

View file

@ -3,7 +3,7 @@ name: "Lint (web)"
on:
# Run on every push to a branch other than main that changes web/
push:
branches-ignore: [main, "deploy/**"]
branches-ignore: [main]
paths:
- "web/**"
- ".github/workflows/web-lint.yml"

@ -1 +1 @@
Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83
Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0

View file

@ -87,7 +87,7 @@ PODS:
- SDWebImage/Core (5.19.0)
- Sentry/HybridSDK (8.21.0):
- SentryPrivate (= 8.21.0)
- sentry_flutter (0.0.1):
- sentry_flutter (7.19.0):
- Flutter
- FlutterMacOS
- Sentry/HybridSDK (= 8.21.0)
@ -249,7 +249,7 @@ SPEC CHECKSUMS:
ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66
SDWebImage: 981fd7e860af070920f249fd092420006014c3eb
Sentry: ebc12276bd17613a114ab359074096b6b3725203
sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e
sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
@ -263,4 +263,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb
COCOAPODS: 1.14.3
COCOAPODS: 1.15.2

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "المصدِّر",
"codeSecretKeyHint": "الرمز السري",
"codeAccountHint": "الحساب (you@domain.com)",
"accountKeyType": "نوع المفتاح",
"sessionExpired": "انتهت صلاحية الجلسة",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Aussteller",
"codeSecretKeyHint": "Geheimer Schlüssel",
"codeAccountHint": "Konto (you@domain.com)",
"accountKeyType": "Art des Schlüssels",
"sessionExpired": "Sitzung abgelaufen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emisor",
"codeSecretKeyHint": "Llave Secreta",
"codeAccountHint": "Cuenta (tu@dominio.com)",
"accountKeyType": "Tipo de llave",
"sessionExpired": "La sesión ha expirado",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@ -113,6 +112,7 @@
"copied": "Copiado",
"pleaseTryAgain": "Por favor, inténtalo nuevamente",
"existingUser": "Usuario existente",
"newUser": "Nuevo a Ente",
"delete": "Borrar",
"enterYourPasswordHint": "Ingrese su contraseña",
"forgotPassword": "Olvidé mi contraseña",
@ -138,6 +138,8 @@
"enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación",
"lostDeviceTitle": "¿Perdió su dispositivo?",
"twoFactorAuthTitle": "Autenticación de dos factores",
"passkeyAuthTitle": "Verificación de llave de acceso",
"verifyPasskey": "Verificar llave de acceso",
"recoverAccount": "Recuperar cuenta",
"enterRecoveryKeyHint": "Introduzca su clave de recuperación",
"recover": "Recuperar",
@ -191,6 +193,8 @@
"recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.",
"doThisLater": "Hacer esto más tarde",
"saveKey": "Guardar Clave",
"save": "Guardar",
"send": "Enviar",
"back": "Atrás",
"createAccount": "Crear cuenta",
"passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}",
@ -397,5 +401,8 @@
"signOutOtherDevices": "Cerrar la sesión de otros dispositivos",
"doNotSignOut": "No cerrar la sesión",
"hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)",
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!"
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!",
"passkey": "Llave de acceso",
"developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?",
"developerSettings": "Ajustes de desarrollador"
}

View file

@ -14,7 +14,6 @@
"codeIssuerHint": "صادر کننده",
"codeSecretKeyHint": "کلید مخفی",
"codeAccountHint": "حساب (you@domain.com)",
"accountKeyType": "نوع کلید",
"sessionExpired": "نشست منقضی شده است",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -12,7 +12,6 @@
"codeIssuerHint": "Myöntäjä",
"codeSecretKeyHint": "Salainen avain",
"codeAccountHint": "Tili (sinun@jokinosoite.com)",
"accountKeyType": "Avaimen tyyppi",
"sessionExpired": "Istunto on vanheutunut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Émetteur",
"codeSecretKeyHint": "Clé secrète",
"codeAccountHint": "Compte (vous@exemple.com)",
"accountKeyType": "Type de clé",
"sessionExpired": "Session expirée",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -19,7 +19,6 @@
"codeIssuerHint": "מנפיק",
"codeSecretKeyHint": "מפתח סודי",
"codeAccountHint": "חשבון(you@domain.com)",
"accountKeyType": "סוג מפתח",
"sessionExpired": "זמן החיבור הסתיים",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emittente",
"codeSecretKeyHint": "Codice segreto",
"codeAccountHint": "Account (username@dominio.it)",
"accountKeyType": "Tipo di chiave",
"sessionExpired": "Sessione scaduta",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "発行者",
"codeSecretKeyHint": "秘密鍵",
"codeAccountHint": "アカウント (you@domain.com)",
"accountKeyType": "鍵の種類",
"sessionExpired": "セッションが失効しました",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "მომწოდებელი",
"codeSecretKeyHint": "გასაღები",
"codeAccountHint": "ანგარიში (you@domain.com)",
"accountKeyType": "გასაღების ტიპი",
"sessionExpired": "სესიის დრო ამოიწურა",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Uitgever",
"codeSecretKeyHint": "Geheime sleutel",
"codeAccountHint": "Account (jij@domein.nl)",
"accountKeyType": "Type sleutel",
"sessionExpired": "Sessie verlopen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Wydawca",
"codeSecretKeyHint": "Tajny klucz",
"codeAccountHint": "Konto (ty@domena.com)",
"accountKeyType": "Rodzaj klucza",
"sessionExpired": "Sesja wygasła",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@ -186,6 +185,8 @@
"recoveryKeySaveDescription": "Nie przechowujemy tego klucza, proszę zachować ten 24 wyrazowy klucz w bezpiecznym miejscu.",
"doThisLater": "Zrób To Później",
"saveKey": "Zapisz klucz",
"save": "Zapisz",
"send": "Wyślij",
"back": "Wstecz",
"createAccount": "Utwórz konto",
"passwordStrength": "Siła hasła: {passwordStrengthValue}",
@ -336,6 +337,10 @@
"@androidBiometricNotRecognized": {
"description": "Message to let the user know that authentication was failed. It is used on Android side. Maximum 60 characters."
},
"androidCancelButton": "Anuluj",
"@androidCancelButton": {
"description": "Message showed on a button that the user can click to leave the current dialog. It is used on Android side. Maximum 30 characters."
},
"androidSignInTitle": "Wymagana autoryzacja",
"@androidSignInTitle": {
"description": "Message showed as a title in a dialog which indicates the user that they need to scan biometric to continue. It is used on Android side. Maximum 60 characters."

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emissor",
"codeSecretKeyHint": "Chave secreta",
"codeAccountHint": "Conta (voce@dominio.com)",
"accountKeyType": "Tipo de chave",
"sessionExpired": "Sessão expirada",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Эмитент",
"codeSecretKeyHint": "Секретный ключ",
"codeAccountHint": "Аккаунт (you@domain.com)",
"accountKeyType": "Тип ключа",
"sessionExpired": "Сеанс истек",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -16,7 +16,6 @@
"codeIssuerHint": "Utfärdare",
"codeSecretKeyHint": "Secret Key",
"codeAccountHint": "Konto (du@domän.com)",
"accountKeyType": "Typ av nyckel",
"sessionExpired": "Sessionen har gått ut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "ኣዋጂ",
"codeSecretKeyHint": "ምስጢራዊ መፍትሕ",
"codeAccountHint": "ሕሳብ (you@domain.com)",
"accountKeyType": "ዓይነት መፍትሕ",
"sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Yayınlayan",
"codeSecretKeyHint": "Gizli Anahtar",
"codeAccountHint": "Hesap (ornek@domain.com)",
"accountKeyType": "Anahtar türü",
"sessionExpired": "Oturum süresi doldu",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Nhà phát hành",
"codeSecretKeyHint": "Khóa bí mật",
"codeAccountHint": "Tài khoản (bạn@miền.com)",
"accountKeyType": "Loại khóa",
"sessionExpired": "Phiên làm việc đã hết hạn",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "发行人",
"codeSecretKeyHint": "私钥",
"codeAccountHint": "账户 (you@domain.com)",
"accountKeyType": "密钥类型",
"sessionExpired": "会话已过期",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{
// Subcommand for 'account add'
var addAccCmd = &cobra.Command{
Use: "add",
Short: "Add a new account",
Short: "login into existing account",
Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app",
Run: func(cmd *cobra.Command, args []string) {
recoverWithLog()
ctrl.AddAccount(context.Background())

View file

@ -25,4 +25,4 @@ ente [flags]
* [ente export](ente_export.md) - Starts the export process
* [ente version](ente_version.md) - Prints the current version
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -11,9 +11,9 @@ Manage account settings
### SEE ALSO
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
* [ente account add](ente_account_add.md) - Add a new account
* [ente account add](ente_account_add.md) - login into existing account
* [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app
* [ente account list](ente_account_list.md) - list configured accounts
* [ente account update](ente_account_update.md) - Update an existing account's export directory
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -1,6 +1,10 @@
## ente account add
Add a new account
login into existing account
### Synopsis
Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app
```
ente account add [flags]
@ -16,4 +20,4 @@ ente account add [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente account get-token [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente account list [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -19,4 +19,4 @@ ente account update [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage
* [ente admin list-users](ente_admin_list-users.md) - List all users
* [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin delete-user [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin disable-2fa [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin get-user-id [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -17,4 +17,4 @@ ente admin list-users [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -23,4 +23,4 @@ ente admin update-subscription [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -13,4 +13,4 @@ Authenticator commands
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
* [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags]
* [ente auth](ente_auth.md) - Authenticator commands
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente export [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente version [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) {
authResponse, flowErr = c.validateTOTP(cxt, authResponse)
}
if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil {
panic("no encrypted token or keyAttributes")
log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.")
}
secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey)
if decErr != nil {

View file

@ -1,20 +1,12 @@
name: "Release"
# This will create a new draft release with public artifacts.
# Build the ente-io/ente's desktop/rc branch and create/update a draft release.
#
# Note that a release will only get created if there is an associated tag
# (GitHub releases need a corresponding tag).
#
# The canonical source for this action is in the repository where we keep the
# source code for the Ente Photos desktop app: https://github.com/ente-io/ente
#
# However, it actually lives and runs in the repository that we use for making
# releases: https://github.com/ente-io/photos-desktop
#
# We need two repositories because Electron updater currently doesn't work well
# with monorepos. For more details, see `docs/release.md`.
# For more details, see `docs/release.md` in ente-io/ente.
on:
# Trigger manually or `gh workflow run desktop-release.yml`.
workflow_dispatch:
push:
# Run when a tag matching the pattern "v*"" is pushed.
#
@ -38,11 +30,9 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
with:
# Checkout the tag photosd-v1.x.x from the source code
# repository when we're invoked for tag v1.x.x on the releases
# repository.
# Checkout the desktop/rc branch from the source repository.
repository: ente-io/ente
ref: photosd-${{ github.ref_name }}
ref: desktop/rc
submodules: recursive
- name: Setup node
@ -50,6 +40,11 @@ jobs:
with:
node-version: 20
- name: Increase yarn timeout
# `yarn install` times out sometimes on the Windows runner,
# resulting in flaky builds.
run: yarn config set network-timeout 900000 -g
- name: Install dependencies
run: yarn install
@ -63,13 +58,15 @@ jobs:
uses: ente-io/action-electron-builder@v1.0.0
with:
package_root: desktop
build_script_name: build:ci
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.GITHUB_TOKEN }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building.
# create a (draft) release after building. Otherwise upload
# assets to the existing draft named after the version.
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.MAC_CERTS }}

View file

@ -2,11 +2,17 @@
## v1.7.0 (Unreleased)
v1.7 is a major rewrite to improve the security of our app. We have enabled
sandboxing and disabled node integration for the renderer process. All this
required restructuring our IPC mechanisms, which resulted in a lot of under the
hood changes. The outcome is a more secure app that also uses the latest and
greatest Electron recommendations.
v1.7 is a major rewrite to improve the security of our app. In particular, the
UI and the native parts of the app now run isolated from each other and
communicate only using a predefined IPC boundary.
Other highlights:
- View your photos on big screens and Chromecast devices by using the "Play
album on TV" option in the album menu.
- Support Brazilian Portuguese, German and Russian.
- Provide a checkbox to select all photos in a day.
- Fix a case where the dedup screen would not refresh after removing items.
## v1.6.63

View file

@ -1,46 +1,64 @@
## Releases
Conceptually, the release is straightforward: We push a tag, a GitHub workflow
gets triggered that creates a draft release with artifacts built from that tag.
We then publish that release. The download links on our website, and existing
apps already know how to check for the latest GitHub release and update
accordingly.
Conceptually, the release is straightforward: We trigger a GitHub workflow that
creates a draft release with artifacts built. When ready, we publish that
release. The download links on our website, and existing apps already check the
latest GitHub release and update accordingly.
The complication comes by the fact that Electron Updater (the mechanism that we
use for auto updates) doesn't work well with monorepos. So we need to keep a
separate (non-mono) repository just for doing releases.
The complication comes by the fact that electron-builder's auto updaterr (the
mechanism that we use for auto updates) doesn't work with monorepos. So we need
to keep a separate (non-mono) repository just for doing releases.
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
- Releases are done from
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
## Workflow
## Workflow - Release Candidates
The workflow is:
Leading up to the release, we can make one or more draft releases that are not
intended to be published, but serve as test release candidates.
1. Finalize the changes in the source repo.
The workflow for making such "rc" builds is:
- Update the CHANGELOG.
- Update the version in `package.json`
- `git commit -m "[photosd] Release v1.2.3"`
- Open PR, merge into main.
1. Update `package.json` in the source repo to use version `1.x.x-rc`. Create a
new draft release in the release repo with title `1.x.x-rc`. In the tag
input enter `v1.x.x-rc` and select the option to "create a new tag on
publish".
2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where
`1.2.3` is the version in `package.json`
2. Push code to the `desktop/rc` branch in the source repo.
3. Trigger the GitHub action in the release repo
```sh
git tag photosd-v1.x.x
git push origin photosd-v1.x.x
gh workflow run desktop-release.yml
```
3. Head over to the releases repository and run the trigger script, passing it
the tag _without_ the `photosd-` prefix.
We can do steps 2 and 3 multiple times: each time it'll just update the
artifacts attached to the same draft.
## Workflow - Release
1. Update source repo to set version `1.x.x` in `package.json` and finialize
the CHANGELOG.
2. Push code to the `desktop/rc` branch in the source repo.
3. In the release repo
```sh
./.github/trigger-release.sh v1.x.x
```
4. If the build is successful, tag `desktop/rc` in the source repo.
```sh
# Assuming we're on desktop/rc that just got build
git tag photosd-v1.x.x
git push origin photosd-v1.x.x
```
## Post build
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts

View file

@ -29,5 +29,3 @@ mac:
arch: [universal]
category: public.app-category.photography
hardenedRuntime: true
notarize: true
afterSign: electron-builder-notarize

View file

@ -1,6 +1,6 @@
{
"name": "ente",
"version": "1.7.0-beta.0",
"version": "1.7.0-rc",
"private": true,
"description": "Desktop client for Ente Photos",
"repository": "github:ente-io/photos-desktop",
@ -11,6 +11,7 @@
"build-main": "tsc && electron-builder",
"build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null",
"build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && shx rm -f out && shx ln -sf ../web/apps/photos/out out",
"build:ci": "yarn build-renderer && tsc",
"build:quick": "yarn build-renderer && yarn build-main:quick",
"dev": "concurrently --kill-others --success first --names 'main,rndr' \"yarn dev-main\" \"yarn dev-renderer\"",
"dev-main": "tsc && electron app/main.js",
@ -46,7 +47,6 @@
"concurrently": "^8",
"electron": "^30",
"electron-builder": "25.0.0-alpha.6",
"electron-builder-notarize": "^1.5",
"eslint": "^8",
"prettier": "^3",
"prettier-plugin-organize-imports": "^3",

View file

@ -142,7 +142,7 @@ const createMainWindow = () => {
// Create the main window. This'll show our web content.
const window = new BrowserWindow({
webPreferences: {
preload: path.join(app.getAppPath(), "preload.js"),
preload: path.join(__dirname, "preload.js"),
sandbox: true,
},
// The color to show in the window until the web content gets loaded.
@ -287,13 +287,29 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
/**
* Older versions of our app used to maintain a cache dir using the main
* process. This has been deprecated in favor of using a normal web cache.
* process. This has been removed in favor of cache on the web layer.
*
* Delete the old cache dir if it exists. This code was added March 2024, and
* can be removed after some time once most people have upgraded to newer
* versions.
* Delete the old cache dir if it exists.
*
* This will happen in two phases. The cache had three subdirectories:
*
* - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024).
*
* - The third one, "face-crops" will be removed once we finish the face search
* changes. See: [Note: Legacy face crops].
*
* This migration code can be removed after some time once most people have
* upgraded to newer versions.
*/
const deleteLegacyDiskCacheDirIfExists = async () => {
const removeIfExists = async (dirPath: string) => {
if (existsSync(dirPath)) {
log.info(`Removing legacy disk cache from ${dirPath}`);
await fs.rm(dirPath, { recursive: true });
}
};
// [Note: Getting the cache path]
//
// The existing code was passing "cache" as a parameter to getPath.
//
// However, "cache" is not a valid parameter to getPath. It works! (for
@ -309,8 +325,8 @@ const deleteLegacyDiskCacheDirIfExists = async () => {
// @ts-expect-error "cache" works but is not part of the public API.
const cacheDir = path.join(app.getPath("cache"), "ente");
if (existsSync(cacheDir)) {
log.info(`Removing legacy disk cache from ${cacheDir}`);
await fs.rm(cacheDir, { recursive: true });
await removeIfExists(path.join(cacheDir, "thumbs"));
await removeIfExists(path.join(cacheDir, "files"));
}
};
@ -375,7 +391,7 @@ const main = () => {
// Continue on with the rest of the startup sequence.
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
setupTrayItem(mainWindow);
if (!isDev) setupAutoUpdater(mainWindow);
setupAutoUpdater(mainWindow);
try {
await deleteLegacyDiskCacheDirIfExists();

View file

@ -24,6 +24,7 @@ import {
updateOnNextRestart,
} from "./services/app-update";
import {
legacyFaceCrop,
openDirectory,
openLogDirectory,
selectDirectory,
@ -68,6 +69,7 @@ import {
watchUpdateIgnoredFiles,
watchUpdateSyncedFiles,
} from "./services/watch";
import { clearConvertToMP4Results } from "./stream";
/**
* Listen for IPC events sent/invoked by the renderer process, and route them to
@ -107,6 +109,8 @@ export const attachIPCHandlers = () => {
ipcMain.on("clearStores", () => clearStores());
ipcMain.on("clearConvertToMP4Results", () => clearConvertToMP4Results());
ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
saveEncryptionKey(encryptionKey),
);
@ -170,14 +174,7 @@ export const attachIPCHandlers = () => {
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
) =>
ffmpegExec(
command,
dataOrPathOrZipItem,
outputFileExtension,
timeoutMS,
),
) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension),
);
// - ML
@ -198,6 +195,10 @@ export const attachIPCHandlers = () => {
faceEmbedding(input),
);
ipcMain.handle("legacyFaceCrop", (_, faceID: string) =>
legacyFaceCrop(faceID),
);
// - Upload
ipcMain.handle("listZipItems", (_, zipPath: string) =>

View file

@ -5,11 +5,8 @@ import { isDev } from "./utils/electron";
/**
* Initialize logging in the main process.
*
* This will set our underlying logger up to log to a file named `ente.log`,
*
* - on Linux at ~/.config/ente/logs/ente.log
* - on macOS at ~/Library/Logs/ente/ente.log
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
* This will set our underlying logger up to log to a file named `ente.log`, see
* [Note: App log path].
*
* On dev builds, it will also log to the console.
*/
@ -62,7 +59,7 @@ const logError = (message: string, e?: unknown) => {
const logError_ = (message: string) => {
log.error(`[main] [error] ${message}`);
if (isDev) console.error(`[error] ${message}`);
console.error(`[error] ${message}`);
};
const logInfo = (...params: unknown[]) => {
@ -96,8 +93,8 @@ export default {
* any arbitrary object that we obtain, say, when in a try-catch handler (in
* JavaScript any arbitrary value can be thrown).
*
* The log is written to disk. In development builds, the log is also
* printed to the main (Node.js) process console.
* The log is written to disk and printed to the main (Node.js) process's
* console.
*/
error: logError,
/**
@ -120,7 +117,7 @@ export default {
* The function can return an arbitrary value which is serialized before
* being logged.
*
* This log is NOT written to disk. And it is printed to the main (Node.js)
* This log is NOT written to disk. It is printed to the main (Node.js)
* process console, but only on development builds.
*/
debug: logDebug,

View file

@ -10,7 +10,6 @@ import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/auto-launcher";
import { openLogDirectory } from "./services/dir";
import { userPreferences } from "./stores/user-preferences";
import { isDev } from "./utils/electron";
/** Create and return the entries in the app's main menu bar */
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@ -24,9 +23,6 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
const devOnly = (options: MenuItemConstructorOptions[]) =>
isDev ? options : [];
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
@ -86,12 +82,14 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
checked: isAutoLaunchEnabled,
click: toggleAutoLaunch,
},
{
label: "Hide Dock Icon",
type: "checkbox",
checked: shouldHideDockIcon,
click: toggleHideDockIcon,
},
...macOSOnly([
{
label: "Hide Dock Icon",
type: "checkbox",
checked: shouldHideDockIcon,
click: toggleHideDockIcon,
},
]),
],
},
@ -130,11 +128,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
submenu: [
{
role: "startSpeaking",
label: "start speaking",
label: "Start Speaking",
},
{
role: "stopSpeaking",
label: "stop speaking",
label: "Stop Speaking",
},
],
},
@ -145,9 +143,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
label: "View",
submenu: [
{ label: "Reload", role: "reload" },
...devOnly([
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
]),
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
{ type: "separator" },
{ label: "Toggle Full Screen", role: "togglefullscreen" },
],

View file

@ -6,11 +6,90 @@ import { allowWindowClose } from "../../main";
import { AppUpdate } from "../../types/ipc";
import log from "../log";
import { userPreferences } from "../stores/user-preferences";
import { isDev } from "../utils/electron";
export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
/**
* [Note: Testing auto updates]
*
* By default, we skip checking for updates automatically in dev builds.
* This is because even if installing updates would fail (at least on macOS)
* because auto updates only work for signed builds.
*
* So an end to end testing for updates requires using a temporary GitHub
* repository and signed builds therein. More on this later.
*
* ---------------
*
* [Note: Testing auto updates - Sanity checks]
*
* However, for partial checks of the UI flow, something like the following
* can be used to do a test of the update process (up until the actual
* installation itself).
*
* Create a `app/dev-app-update.yml` with:
*
* provider: generic
* url: http://127.0.0.1:7777/
*
* and start a local webserver in some directory:
*
* python3 -m http.server 7777
*
* In this directory, put `latest-mac.yml` and the DMG file that this YAML
* file refers to.
*
* Alternatively, `dev-app-update.yml` can point to some arbitrary GitHub
* repository too, e.g.:
*
* provider: github
* owner: ente-io
* repo: test-desktop-updates
*
* Now we can use the "Check for updates..." menu option to trigger the
* update flow.
*/
autoUpdater.forceDevUpdateConfig = isDev;
if (isDev) return;
/**
* [Note: Testing auto updates - End to end checks]
*
* Since end-to-end update testing can only be done with signed builds, the
* easiest way is to create temporary builds in a test repository.
*
* Let us say we have v2.0.0 about to go out. We have builds artifacts for
* v2.0.0 also in some draft release in our normal release repository.
*
* Create a new test repository, say `ente-io/test-desktop-updates`. In this
* repository, create a release v2.0.0, attaching the actual build
* artifacts. Make this release the latest.
*
* Now we need to create a old signed build.
*
* First, modify `package.json` to put in a version number older than the
* new version number that we want to test updating to, e.g. `v1.0.0-test`.
*
* Then uncomment the following block of code. This tells the auto updater
* to use `ente-io/test-desktop-updates` to get updates.
*
* With these two changes (older version and setFeedURL), create a new
* release signed build on CI. Install this build - it will check for
* updates in the temporary feed URL that we set, and we'll be able to check
* the full update flow.
*/
/*
autoUpdater.setFeedURL({
provider: "github",
owner: "ente-io",
repo: "test-desktop-updates",
});
*/
const oneDay = 1 * 24 * 60 * 60 * 1000;
setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
void checkForUpdatesAndNotify(mainWindow);
@ -61,17 +140,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
log.debug(() => "Attempting auto update");
await autoUpdater.downloadUpdate();
let timeoutId: ReturnType<typeof setTimeout>;
let timeout: ReturnType<typeof setTimeout>;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
timeoutId = setTimeout(
timeout = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeoutId);
clearTimeout(timeout);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});

View file

@ -27,14 +27,14 @@ class AutoLauncher {
}
async toggleAutoLaunch() {
const isEnabled = await this.isEnabled();
const wasEnabled = await this.isEnabled();
const autoLaunch = this.autoLaunch;
if (autoLaunch) {
if (isEnabled) await autoLaunch.disable();
if (wasEnabled) await autoLaunch.disable();
else await autoLaunch.enable();
} else {
if (isEnabled) app.setLoginItemSettings({ openAtLogin: false });
else app.setLoginItemSettings({ openAtLogin: true });
const openAtLogin = !wasEnabled;
app.setLoginItemSettings({ openAtLogin });
}
}
@ -42,8 +42,7 @@ class AutoLauncher {
if (this.autoLaunch) {
return app.commandLine.hasSwitch("hidden");
} else {
// TODO(MR): This apparently doesn't work anymore.
return app.getLoginItemSettings().wasOpenedAtLogin;
return app.getLoginItemSettings().openAtLogin;
}
}
}

View file

@ -1,5 +1,7 @@
import { shell } from "electron/common";
import { app, dialog } from "electron/main";
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "node:path";
import { posixPath } from "../utils/electron";
@ -38,14 +40,54 @@ export const openLogDirectory = () => openDirectory(logDirectoryPath());
*
* [Note: Electron app paths]
*
* By default, these paths are at the following locations:
* There are three paths we need to be aware of usually.
*
* - macOS: `~/Library/Application Support/ente`
* First is the "appData". We can obtain this with `app.getPath("appData")`.
* This is per-user application data directory. This is usually the following:
*
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local`
* - Linux: `~/.config`
* - macOS: `~/Library/Application Support`
*
* Now, if we suffix the app's name onto the appData directory, we get the
* "userData" directory. This is the **primary** place applications are meant to
* store user's data, e.g. various configuration files and saved state.
*
* During development, our app name is "Electron", so this'd be, for example,
* `~/Library/Application Support/Electron` if we run using `yarn dev`. For the
* packaged production app, our app name is "ente", so this would be:
*
* - Windows: `%APPDATA%\ente`, e.g. `C:\Users\<username>\AppData\Local\ente`
* - Linux: `~/.config/ente`
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local\ente`
* - Windows: C:\Users\<you>\AppData\Local\<Your App Name>
* - macOS: `~/Library/Application Support/ente`
*
* Note that Chromium also stores the browser state, e.g. localStorage or disk
* caches, in userData.
*
* https://www.electronjs.org/docs/latest/api/app
*
* [Note: App log path]
*
* Finally, there is the "logs" directory. This is not within "appData" but has
* a slightly different OS specific path. Since our log file is named
* "ente.log", it can be found at:
*
* - macOS: ~/Library/Logs/ente/ente.log (production)
* - macOS: ~/Library/Logs/Electron/ente.log (dev)
* - Linux: ~/.config/ente/logs/ente.log
* - Windows: %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
*/
const logDirectoryPath = () => app.getPath("logs");
/**
* See: [Note: Legacy face crops]
*/
export const legacyFaceCrop = async (
faceID: string,
): Promise<Uint8Array | undefined> => {
// See: [Note: Getting the cache path]
// @ts-expect-error "cache" works but is not part of the public API.
const cacheDir = path.join(app.getPath("cache"), "ente");
const filePath = path.join(cacheDir, "face-crops", faceID);
return existsSync(filePath) ? await fs.readFile(filePath) : undefined;
};

View file

@ -1,11 +1,10 @@
import pathToFfmpeg from "ffmpeg-static";
import fs from "node:fs/promises";
import type { ZipItem } from "../../types/ipc";
import log from "../log";
import { ensure, withTimeout } from "../utils/common";
import { ensure } from "../utils/common";
import { execAsync } from "../utils/electron";
import {
deleteTempFile,
deleteTempFileIgnoringErrors,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
@ -46,13 +45,7 @@ export const ffmpegExec = async (
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
): Promise<Uint8Array> => {
// TODO (MR): This currently copies files for both input (when
// dataOrPathOrZipItem is data) and output. This needs to be tested
// extremely large video files when invoked downstream of `convertToMP4` in
// the web code.
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
@ -69,17 +62,13 @@ export const ffmpegExec = async (
outputFilePath,
);
if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
else await execAsync(cmd);
await execAsync(cmd);
return fs.readFile(outputFilePath);
} finally {
try {
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
if (isInputFileTemporary)
await deleteTempFileIgnoringErrors(inputFilePath);
await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@ -112,3 +101,32 @@ const ffmpegBinaryPath = () => {
// https://github.com/eugeneware/ffmpeg-static/issues/16
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
};
/**
* A variant of {@link ffmpegExec} adapted to work with streams so that it can
* handle the MP4 conversion of large video files.
*
* See: [Note: Convert to MP4]
* @param inputFilePath The path to a file on the user's local file system. This
* is the video we want to convert.
* @param inputFilePath The path to a file on the user's local file system where
* we should write the converted MP4 video.
*/
export const ffmpegConvertToMP4 = async (
inputFilePath: string,
outputFilePath: string,
): Promise<void> => {
const command = [
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-preset",
"ultrafast",
outputPathPlaceholder,
];
const cmd = substitutePlaceholders(command, inputFilePath, outputFilePath);
await execAsync(cmd);
};

View file

@ -6,7 +6,7 @@ import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
import log from "../log";
import { execAsync, isDev } from "../utils/electron";
import {
deleteTempFile,
deleteTempFileIgnoringErrors,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
@ -23,12 +23,8 @@ export const convertToJPEG = async (imageData: Uint8Array) => {
await execAsync(command);
return new Uint8Array(await fs.readFile(outputFilePath));
} finally {
try {
await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
await deleteTempFileIgnoringErrors(inputFilePath);
await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@ -49,6 +45,9 @@ const convertToJPEGCommand = (
];
case "linux":
// The bundled binary is an ELF x86-64 executable.
if (process.arch != "x64")
throw new Error(CustomErrorMessage.NotAvailable);
return [
imageMagickPath(),
inputFilePath,
@ -79,7 +78,7 @@ export const generateImageThumbnail = async (
const outputFilePath = await makeTempFilePath("jpeg");
// Construct the command first, it may throw `NotAvailable` on win32.
// Construct the command first, it may throw `NotAvailable`.
let quality = 70;
let command = generateImageThumbnailCommand(
inputFilePath,
@ -94,6 +93,9 @@ export const generateImageThumbnail = async (
let thumbnail: Uint8Array;
do {
await execAsync(command);
// TODO(MR): release 1.7
// TODO(MR): imagemagick debugging. Remove me after verifying logs.
log.info(`Generated thumbnail using ${command.join(" ")}`);
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
quality -= 10;
command = generateImageThumbnailCommand(
@ -105,12 +107,9 @@ export const generateImageThumbnail = async (
} while (thumbnail.length > maxSize && quality > 50);
return thumbnail;
} finally {
try {
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
if (isInputFileTemporary)
await deleteTempFileIgnoringErrors(inputFilePath);
await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@ -138,14 +137,17 @@ const generateImageThumbnailCommand = (
];
case "linux":
// The bundled binary is an ELF x86-64 executable.
if (process.arch != "x64")
throw new Error(CustomErrorMessage.NotAvailable);
return [
imageMagickPath(),
inputFilePath,
"-auto-orient",
"-define",
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
inputFilePath,
"-auto-orient",
"-thumbnail",
`${maxDimension}x${maxDimension}>`,
`${maxDimension}x${maxDimension}`,
"-unsharp",
"0x.5",
"-quality",

View file

@ -14,6 +14,15 @@ export const clearStores = () => {
watchStore.clear();
};
/**
* [Note: Safe storage keys]
*
* On macOS, `safeStorage` stores our data under a Keychain entry named
* "<app-name> Safe Storage". Which resolves to:
*
* - Electron Safe Storage (dev)
* - ente Safe Storage (prod)
*/
export const saveEncryptionKey = (encryptionKey: string) => {
const encryptedKey = safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");

View file

@ -3,13 +3,20 @@
*/
import { net, protocol } from "electron/main";
import StreamZip from "node-stream-zip";
import { randomUUID } from "node:crypto";
import { createWriteStream, existsSync } from "node:fs";
import fs from "node:fs/promises";
import { Readable } from "node:stream";
import { ReadableStream } from "node:stream/web";
import { pathToFileURL } from "node:url";
import log from "./log";
import { ffmpegConvertToMP4 } from "./services/ffmpeg";
import { ensure } from "./utils/common";
import {
deleteTempFile,
deleteTempFileIgnoringErrors,
makeTempFilePath,
} from "./utils/temp";
/**
* Register a protocol handler that we use for streaming large files between the
@ -34,119 +41,117 @@ import { ensure } from "./utils/common";
* Depends on {@link registerPrivilegedSchemes}.
*/
export const registerStreamProtocol = () => {
protocol.handle("stream", async (request: Request) => {
const url = request.url;
// The request URL contains the command to run as the host, and the
// pathname of the file(s) as the search params.
const { host, searchParams } = new URL(url);
switch (host) {
case "read":
return handleRead(ensure(searchParams.get("path")));
case "read-zip":
return handleReadZip(
ensure(searchParams.get("zipPath")),
ensure(searchParams.get("entryName")),
);
case "write":
return handleWrite(ensure(searchParams.get("path")), request);
default:
return new Response("", { status: 404 });
protocol.handle("stream", (request: Request) => {
try {
return handleStreamRequest(request);
} catch (e) {
log.error(`Failed to handle stream request for ${request.url}`, e);
return new Response(String(e), { status: 500 });
}
});
};
const handleRead = async (path: string) => {
try {
const res = await net.fetch(pathToFileURL(path).toString());
if (res.ok) {
// net.fetch already seems to add "Content-Type" and "Last-Modified"
// headers, but I couldn't find documentation for this. In any case,
// since we already are stat-ting the file for the "Content-Length",
// we explicitly add the "X-Last-Modified-Ms" too,
//
// 1. Guaranteeing its presence,
//
// 2. Having it be in the exact format we want (no string <-> date
// conversions),
//
// 3. Retaining milliseconds.
const handleStreamRequest = async (request: Request): Promise<Response> => {
const url = request.url;
// The request URL contains the command to run as the host, and the
// pathname of the file(s) as the search params.
const { host, searchParams } = new URL(url);
switch (host) {
case "read":
return handleRead(ensure(searchParams.get("path")));
const stat = await fs.stat(path);
case "read-zip":
return handleReadZip(
ensure(searchParams.get("zipPath")),
ensure(searchParams.get("entryName")),
);
// Add the file's size as the Content-Length header.
const fileSize = stat.size;
res.headers.set("Content-Length", `${fileSize}`);
case "write":
return handleWrite(ensure(searchParams.get("path")), request);
// Add the file's last modified time (as epoch milliseconds).
const mtimeMs = stat.mtimeMs;
res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
case "convert-to-mp4": {
const token = searchParams.get("token");
const done = searchParams.get("done") !== null;
return token
? done
? handleConvertToMP4ReadDone(token)
: handleConvertToMP4Read(token)
: handleConvertToMP4Write(request);
}
return res;
} catch (e) {
log.error(`Failed to read stream at ${path}`, e);
return new Response(`Failed to read stream: ${String(e)}`, {
status: 500,
});
default:
return new Response("", { status: 404 });
}
};
const handleRead = async (path: string) => {
const res = await net.fetch(pathToFileURL(path).toString());
if (res.ok) {
// net.fetch already seems to add "Content-Type" and "Last-Modified"
// headers, but I couldn't find documentation for this. In any case,
// since we already are stat-ting the file for the "Content-Length", we
// explicitly add the "X-Last-Modified-Ms" too,
//
// 1. Guaranteeing its presence,
//
// 2. Having it be in the exact format we want (no string <-> date
// conversions),
//
// 3. Retaining milliseconds.
const stat = await fs.stat(path);
// Add the file's size as the Content-Length header.
const fileSize = stat.size;
res.headers.set("Content-Length", `${fileSize}`);
// Add the file's last modified time (as epoch milliseconds).
const mtimeMs = stat.mtimeMs;
res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
}
return res;
};
const handleReadZip = async (zipPath: string, entryName: string) => {
try {
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
if (!entry) return new Response("", { status: 404 });
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
if (!entry) return new Response("", { status: 404 });
// This returns an "old style" NodeJS.ReadableStream.
const stream = await zip.stream(entry);
// Convert it into a new style NodeJS.Readable.
const nodeReadable = new Readable().wrap(stream);
// Then convert it into a Web stream.
const webReadableStreamAny = Readable.toWeb(nodeReadable);
// However, we get a ReadableStream<any> now. This doesn't go into the
// `BodyInit` expected by the Response constructor, which wants a
// ReadableStream<Uint8Array>. Force a cast.
const webReadableStream =
webReadableStreamAny as ReadableStream<Uint8Array>;
// This returns an "old style" NodeJS.ReadableStream.
const stream = await zip.stream(entry);
// Convert it into a new style NodeJS.Readable.
const nodeReadable = new Readable().wrap(stream);
// Then convert it into a Web stream.
const webReadableStreamAny = Readable.toWeb(nodeReadable);
// However, we get a ReadableStream<any> now. This doesn't go into the
// `BodyInit` expected by the Response constructor, which wants a
// ReadableStream<Uint8Array>. Force a cast.
const webReadableStream =
webReadableStreamAny as ReadableStream<Uint8Array>;
// Close the zip handle when the underlying stream closes.
stream.on("end", () => void zip.close());
// Close the zip handle when the underlying stream closes.
stream.on("end", () => void zip.close());
return new Response(webReadableStream, {
headers: {
// We don't know the exact type, but it doesn't really matter,
// just set it to a generic binary content-type so that the
// browser doesn't tinker with it thinking of it as text.
"Content-Type": "application/octet-stream",
"Content-Length": `${entry.size}`,
// While it is documented that entry.time is the modification
// time, the units are not mentioned. By seeing the source code,
// we can verify that it is indeed epoch milliseconds. See
// `parseZipTime` in the node-stream-zip source,
// https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
"X-Last-Modified-Ms": `${entry.time}`,
},
});
} catch (e) {
log.error(
`Failed to read entry ${entryName} from zip file at ${zipPath}`,
e,
);
return new Response(`Failed to read stream: ${String(e)}`, {
status: 500,
});
}
return new Response(webReadableStream, {
headers: {
// We don't know the exact type, but it doesn't really matter, just
// set it to a generic binary content-type so that the browser
// doesn't tinker with it thinking of it as text.
"Content-Type": "application/octet-stream",
"Content-Length": `${entry.size}`,
// While it is documented that entry.time is the modification time,
// the units are not mentioned. By seeing the source code, we can
// verify that it is indeed epoch milliseconds. See `parseZipTime`
// in the node-stream-zip source,
// https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
"X-Last-Modified-Ms": `${entry.time}`,
},
});
};
const handleWrite = async (path: string, request: Request) => {
try {
await writeStream(path, ensure(request.body));
return new Response("", { status: 200 });
} catch (e) {
log.error(`Failed to write stream to ${path}`, e);
return new Response(`Failed to write stream: ${String(e)}`, {
status: 500,
});
}
await writeStream(path, ensure(request.body));
return new Response("", { status: 200 });
};
/**
@ -154,7 +159,7 @@ const handleWrite = async (path: string, request: Request) => {
*
* The returned promise resolves when the write completes.
*
* @param filePath The local filesystem path where the file should be written.
* @param filePath The local file system path where the file should be written.
*
* @param readableStream A web
* [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
@ -181,3 +186,84 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => {
});
});
};
/**
* A map from token to file paths for convert-to-mp4 requests that we have
* received.
*/
const convertToMP4Results = new Map<string, string>();
/**
* Clear any in-memory state for in-flight convert-to-mp4 requests. Meant to be
* called during logout.
*/
export const clearConvertToMP4Results = () => convertToMP4Results.clear();
/**
* [Note: Convert to MP4]
*
* When we want to convert a video to MP4, if we were to send the entire
* contents of the video from the renderer to the main process over IPC, it just
* causes the renderer to run out of memory and restart when the videos are very
* large. So we need to stream the original video renderer main and then
* stream back the converted video renderer main.
*
* Currently Chromium does not support bi-directional streaming ("full" duplex
* mode for the Web fetch API). So we need to simulate that using two different
* streaming requests.
*
* renderer main stream://convert-to-mp4
* request.body is the original video
* response is a token
*
* renderer main stream://convert-to-mp4?token=<token>
* response.body is the converted video
*
* renderer main stream://convert-to-mp4?token=<token>&done
* 200 OK
*
* Note that the conversion itself is not streaming. The conversion still
* happens in a single shot, we are just streaming the data across the IPC
* boundary to allow us to pass large amounts of data without running out of
* memory.
*
* See also: [Note: IPC streams]
*/
const handleConvertToMP4Write = async (request: Request) => {
const inputTempFilePath = await makeTempFilePath();
await writeStream(inputTempFilePath, ensure(request.body));
const outputTempFilePath = await makeTempFilePath("mp4");
try {
await ffmpegConvertToMP4(inputTempFilePath, outputTempFilePath);
} catch (e) {
log.error("Conversion to MP4 failed", e);
await deleteTempFileIgnoringErrors(outputTempFilePath);
throw e;
} finally {
await deleteTempFileIgnoringErrors(inputTempFilePath);
}
const token = randomUUID();
convertToMP4Results.set(token, outputTempFilePath);
return new Response(token, { status: 200 });
};
const handleConvertToMP4Read = async (token: string) => {
const filePath = convertToMP4Results.get(token);
if (!filePath)
return new Response(`Unknown token ${token}`, { status: 404 });
return net.fetch(pathToFileURL(filePath).toString());
};
const handleConvertToMP4ReadDone = async (token: string) => {
const filePath = convertToMP4Results.get(token);
if (!filePath)
return new Response(`Unknown token ${token}`, { status: 404 });
await deleteTempFile(filePath);
convertToMP4Results.delete(token);
return new Response("", { status: 200 });
};

View file

@ -13,32 +13,3 @@ export const ensure = <T>(v: T | null | undefined): T => {
if (v === undefined) throw new Error("Required value was not found");
return v;
};
/**
* Wait for {@link ms} milliseconds
*
* This function is a promisified `setTimeout`. It returns a promise that
* resolves after {@link ms} milliseconds.
*/
export const wait = (ms: number) =>
new Promise((resolve) => setTimeout(resolve, ms));
/**
* Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
* does not resolve within {@link timeoutMS}, then reject with a timeout error.
*/
export const withTimeout = async <T>(promise: Promise<T>, ms: number) => {
let timeoutId: ReturnType<typeof setTimeout>;
const rejectOnTimeout = new Promise<T>((_, reject) => {
timeoutId = setTimeout(
() => reject(new Error("Operation timed out")),
ms,
);
});
const promiseAndCancelTimeout = async () => {
const result = await promise;
clearTimeout(timeoutId);
return result;
};
return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
};

View file

@ -49,12 +49,12 @@ export const posixPath = (platformPath: string) =>
* > output, this might not be the best option and it might be better to use the
* > underlying functions.
*/
export const execAsync = (command: string | string[]) => {
export const execAsync = async (command: string | string[]) => {
const escapedCommand = Array.isArray(command)
? shellescape(command)
: command;
const startTime = Date.now();
const result = execAsync_(escapedCommand);
const result = await execAsync_(escapedCommand);
log.debug(
() => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
);

View file

@ -4,6 +4,7 @@ import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import type { ZipItem } from "../../types/ipc";
import log from "../log";
import { ensure } from "./common";
/**
@ -62,6 +63,19 @@ export const deleteTempFile = async (tempFilePath: string) => {
await fs.rm(tempFilePath, { force: true });
};
/**
* A variant of {@link deleteTempFile} that supresses any errors, making it
* safe to call them in a sequence without needing to handle the scenario where
* one of them failing causes the rest to be skipped.
*/
export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => {
try {
await deleteTempFile(tempFilePath);
} catch (e) {
log.error(`Could not delete temporary file at path ${tempFilePath}`, e);
}
};
/** The result of {@link makeFileForDataOrPathOrZipItem}. */
interface FileForDataOrPathOrZipItem {
/**

View file

@ -65,6 +65,9 @@ const selectDirectory = () => ipcRenderer.invoke("selectDirectory");
const clearStores = () => ipcRenderer.send("clearStores");
const clearConvertToMP4Results = () =>
ipcRenderer.send("clearConvertToMP4Results");
const encryptionKey = () => ipcRenderer.invoke("encryptionKey");
const saveEncryptionKey = (encryptionKey: string) =>
@ -140,14 +143,12 @@ const ffmpegExec = (
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
) =>
ipcRenderer.invoke(
"ffmpegExec",
command,
dataOrPathOrZipItem,
outputFileExtension,
timeoutMS,
);
// - ML
@ -164,6 +165,9 @@ const detectFaces = (input: Float32Array) =>
const faceEmbedding = (input: Float32Array) =>
ipcRenderer.invoke("faceEmbedding", input);
const legacyFaceCrop = (faceID: string) =>
ipcRenderer.invoke("legacyFaceCrop", faceID);
// - Watch
const watchGet = () => ipcRenderer.invoke("watchGet");
@ -305,6 +309,7 @@ contextBridge.exposeInMainWorld("electron", {
openLogDirectory,
selectDirectory,
clearStores,
clearConvertToMP4Results,
encryptionKey,
saveEncryptionKey,
onMainWindowFocus,
@ -341,6 +346,7 @@ contextBridge.exposeInMainWorld("electron", {
clipTextEmbeddingIfAvailable,
detectFaces,
faceEmbedding,
legacyFaceCrop,
// - Watch

View file

@ -7,29 +7,6 @@
resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d"
integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A==
"@babel/code-frame@^7.0.0":
version "7.24.2"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae"
integrity sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==
dependencies:
"@babel/highlight" "^7.24.2"
picocolors "^1.0.0"
"@babel/helper-validator-identifier@^7.24.5":
version "7.24.5"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62"
integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==
"@babel/highlight@^7.24.2":
version "7.24.5"
resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e"
integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw==
dependencies:
"@babel/helper-validator-identifier" "^7.24.5"
chalk "^2.4.2"
js-tokens "^4.0.0"
picocolors "^1.0.0"
"@babel/runtime@^7.21.0":
version "7.24.5"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c"
@ -339,9 +316,9 @@
integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==
"@types/node@*", "@types/node@^20.9.0":
version "20.12.7"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.7.tgz#04080362fa3dd6c5822061aa3124f5c152cff384"
integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==
version "20.12.11"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.11.tgz#c4ef00d3507000d17690643278a60dc55a9dc9be"
integrity sha512-vDg9PZ/zi+Nqp6boSOT7plNuthRugEKixDv5sFTIpkE89MmNtEArAShI4mxuX2+UrLEe9pxC1vm2cjm9YlWbJw==
dependencies:
undici-types "~5.26.4"
@ -350,11 +327,6 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b"
integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==
"@types/normalize-package-data@^2.4.0":
version "2.4.4"
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901"
integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==
"@types/plist@^3.0.1":
version "3.0.5"
resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.5.tgz#9a0c49c0f9886c8c8696a7904dd703f6284036e0"
@ -557,13 +529,6 @@ ansi-regex@^5.0.1:
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
ansi-styles@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
dependencies:
color-convert "^1.9.0"
ansi-styles@^4.0.0, ansi-styles@^4.1.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
@ -641,13 +606,6 @@ are-we-there-yet@^3.0.0:
delegates "^1.0.0"
readable-stream "^3.6.0"
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
dependencies:
sprintf-js "~1.0.2"
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
@ -875,15 +833,6 @@ caseless@^0.12.0:
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==
chalk@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
dependencies:
ansi-styles "^3.2.1"
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
@ -973,13 +922,6 @@ clone@^1.0.2:
resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e"
integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==
color-convert@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
dependencies:
color-name "1.1.3"
color-convert@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
@ -987,11 +929,6 @@ color-convert@^2.0.1:
dependencies:
color-name "~1.1.4"
color-name@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
color-name@~1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
@ -1259,11 +1196,6 @@ dotenv-expand@^5.1.0:
resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0"
integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==
dotenv@^8.2.0:
version "8.6.0"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b"
integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==
dotenv@^9.0.2:
version "9.0.2"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05"
@ -1276,16 +1208,6 @@ ejs@^3.1.8:
dependencies:
jake "^10.8.5"
electron-builder-notarize@^1.5:
version "1.5.2"
resolved "https://registry.yarnpkg.com/electron-builder-notarize/-/electron-builder-notarize-1.5.2.tgz#540185b57a336fc6eec01bfe092a3b4764459255"
integrity sha512-vo6RGgIFYxMk2yp59N4NsvmAYfB7ncYi6gV9Fcq2TVKxEn2tPXrSjIKB2e/pu+5iXIY6BHNZNXa75F3DHgOOLA==
dependencies:
dotenv "^8.2.0"
electron-notarize "^1.1.1"
js-yaml "^3.14.0"
read-pkg-up "^7.0.0"
electron-builder@25.0.0-alpha.6:
version "25.0.0-alpha.6"
resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-25.0.0-alpha.6.tgz#a72f96f7029539ac28f92ce5c83f872ba3b6e7c1"
@ -1308,14 +1230,6 @@ electron-log@^5.1:
resolved "https://registry.yarnpkg.com/electron-log/-/electron-log-5.1.2.tgz#fb40ad7f4ae694dd0e4c02c662d1a65c03e1243e"
integrity sha512-Cpg4hAZ27yM9wzE77c4TvgzxzavZ+dVltCczParXN+Vb3jocojCSAuSMCVOI9fhFuuOR+iuu3tZLX1cu0y0kgQ==
electron-notarize@^1.1.1:
version "1.2.2"
resolved "https://registry.yarnpkg.com/electron-notarize/-/electron-notarize-1.2.2.tgz#ebf2b258e8e08c1c9f8ff61dc53d5b16b439daf4"
integrity sha512-ZStVWYcWI7g87/PgjPJSIIhwQXOaw4/XeXU+pWqMMktSLHaGMLHdyPPN7Cmao7+Cr7fYufA16npdtMndYciHNw==
dependencies:
debug "^4.1.1"
fs-extra "^9.0.1"
electron-publish@25.0.0-alpha.6:
version "25.0.0-alpha.6"
resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-25.0.0-alpha.6.tgz#8af3cb6e2435c00b8c71de43c330483808df5924"
@ -1352,9 +1266,9 @@ electron-updater@^6.1:
tiny-typed-emitter "^2.1.0"
electron@^30:
version "30.0.2"
resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733"
integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ==
version "30.0.3"
resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.3.tgz#7c25ddb12ba89fd117991d010f1b274b1bafcb73"
integrity sha512-h+suwx6e0fnv/9wi0/cmCMtG+4LrPzJZa+3DEEpxcPcP+pcWnBI70t8QspxgMNIh2wzXLMD9XVqrLkEbiBAInw==
dependencies:
"@electron/get" "^2.0.0"
"@types/node" "^20.9.0"
@ -1389,13 +1303,6 @@ err-code@^2.0.2:
resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9"
integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==
error-ex@^1.3.1:
version "1.3.2"
resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
dependencies:
is-arrayish "^0.2.1"
es-define-property@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845"
@ -1418,11 +1325,6 @@ escalade@^3.1.1:
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27"
integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==
escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
escape-string-regexp@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
@ -1494,11 +1396,6 @@ espree@^9.6.0, espree@^9.6.1:
acorn-jsx "^5.3.2"
eslint-visitor-keys "^3.4.1"
esprima@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
esquery@^1.4.2:
version "1.5.0"
resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b"
@ -1622,14 +1519,6 @@ find-up@^3.0.0:
dependencies:
locate-path "^3.0.0"
find-up@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
dependencies:
locate-path "^5.0.0"
path-exists "^4.0.0"
find-up@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
@ -1907,11 +1796,6 @@ graphemer@^1.4.0:
resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6"
integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==
has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
has-flag@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
@ -1946,11 +1830,6 @@ hasown@^2.0.0:
dependencies:
function-bind "^1.1.2"
hosted-git-info@^2.1.4:
version "2.8.9"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
hosted-git-info@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224"
@ -2081,11 +1960,6 @@ ip-address@^9.0.5:
jsbn "1.1.0"
sprintf-js "^1.1.3"
is-arrayish@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
@ -2198,19 +2072,6 @@ jpeg-js@^0.4:
resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.4.tgz#a9f1c6f1f9f0fa80cdb3484ed9635054d28936aa"
integrity sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==
js-tokens@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
js-yaml@^3.14.0:
version "3.14.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
dependencies:
argparse "^1.0.7"
esprima "^4.0.0"
js-yaml@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
@ -2228,11 +2089,6 @@ json-buffer@3.0.1:
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
json-parse-even-better-errors@^2.3.0:
version "2.3.1"
resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
json-schema-traverse@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
@ -2299,11 +2155,6 @@ levn@^0.4.1:
prelude-ls "^1.2.1"
type-check "~0.4.0"
lines-and-columns@^1.1.6:
version "1.2.4"
resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
@ -2312,13 +2163,6 @@ locate-path@^3.0.0:
p-locate "^3.0.0"
path-exists "^3.0.0"
locate-path@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
dependencies:
p-locate "^4.1.0"
locate-path@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
@ -2643,16 +2487,6 @@ nopt@^6.0.0:
dependencies:
abbrev "^1.0.0"
normalize-package-data@^2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==
dependencies:
hosted-git-info "^2.1.4"
resolve "^1.10.0"
semver "2 || 3 || 4 || 5"
validate-npm-package-license "^3.0.1"
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
@ -2737,7 +2571,7 @@ p-cancelable@^2.0.0:
resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf"
integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==
p-limit@^2.0.0, p-limit@^2.2.0:
p-limit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
@ -2758,13 +2592,6 @@ p-locate@^3.0.0:
dependencies:
p-limit "^2.0.0"
p-locate@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
dependencies:
p-limit "^2.2.0"
p-locate@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
@ -2796,16 +2623,6 @@ parse-cache-control@^1.0.1:
resolved "https://registry.yarnpkg.com/parse-cache-control/-/parse-cache-control-1.0.1.tgz#8eeab3e54fa56920fe16ba38f77fa21aacc2d74e"
integrity sha512-60zvsJReQPX5/QP0Kzfd/VrpjScIQ7SHBW6bFCYfEP+fp0Eppr1SHhIO5nd1PjZtvclzSzES9D/p5nFJurwfWg==
parse-json@^5.0.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
dependencies:
"@babel/code-frame" "^7.0.0"
error-ex "^1.3.1"
json-parse-even-better-errors "^2.3.0"
lines-and-columns "^1.1.6"
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
@ -2849,11 +2666,6 @@ pend@~1.2.0:
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
picocolors@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
@ -2958,25 +2770,6 @@ read-config-file@6.3.2:
json5 "^2.2.0"
lazy-val "^1.0.4"
read-pkg-up@^7.0.0:
version "7.0.1"
resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507"
integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==
dependencies:
find-up "^4.1.0"
read-pkg "^5.2.0"
type-fest "^0.8.1"
read-pkg@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc"
integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==
dependencies:
"@types/normalize-package-data" "^2.4.0"
normalize-package-data "^2.5.0"
parse-json "^5.0.0"
type-fest "^0.6.0"
readable-stream@^3.0.2, readable-stream@^3.4.0, readable-stream@^3.6.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
@ -3025,7 +2818,7 @@ resolve-from@^4.0.0:
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
resolve@^1.1.6, resolve@^1.10.0:
resolve@^1.1.6:
version "1.22.8"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
@ -3126,17 +2919,17 @@ semver-compare@^1.0.0:
resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc"
integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==
"semver@2 || 3 || 4 || 5":
version "5.7.2"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
semver@^6.2.0:
version "6.3.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
semver@^7.3.2, semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0:
semver@^7.3.2:
version "7.6.2"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13"
integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==
semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0:
version "7.6.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d"
integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==
@ -3284,42 +3077,11 @@ spawn-command@0.0.2:
resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e"
integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==
spdx-correct@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c"
integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==
dependencies:
spdx-expression-parse "^3.0.0"
spdx-license-ids "^3.0.0"
spdx-exceptions@^2.1.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66"
integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==
spdx-expression-parse@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679"
integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==
dependencies:
spdx-exceptions "^2.1.0"
spdx-license-ids "^3.0.0"
spdx-license-ids@^3.0.0:
version "3.0.17"
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c"
integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==
sprintf-js@^1.1.2, sprintf-js@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a"
integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==
sprintf-js@~1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
ssri@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057"
@ -3367,13 +3129,6 @@ sumchecker@^3.0.1:
dependencies:
debug "^4.1.0"
supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
dependencies:
has-flag "^3.0.0"
supports-color@^7.1.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
@ -3501,16 +3256,6 @@ type-fest@^0.20.2:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
type-fest@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
type-fest@^0.8.1:
version "0.8.1"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
type-fest@^2.17.0:
version "2.19.0"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b"
@ -3577,14 +3322,6 @@ util-deprecate@^1.0.1:
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
validate-npm-package-license@^3.0.1:
version "3.0.4"
resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a"
integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==
dependencies:
spdx-correct "^3.0.0"
spdx-expression-parse "^3.0.0"
verror@^1.10.0:
version "1.10.1"
resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.1.tgz#4bf09eeccf4563b109ed4b3d458380c972b0cdeb"

View file

@ -123,6 +123,10 @@ export const sidebar = [
text: "Troubleshooting",
collapsed: true,
items: [
{
text: "Desktop install",
link: "/photos/troubleshooting/desktop-install/",
},
{
text: "Files not uploading",
link: "/photos/troubleshooting/files-not-uploading",
@ -197,6 +201,10 @@ export const sidebar = [
text: "System requirements",
link: "/self-hosting/guides/system-requirements",
},
{
text: "Configuring S3",
link: "/self-hosting/guides/configuring-s3",
},
{
text: "Using external S3",
link: "/self-hosting/guides/external-s3",

View file

@ -1,19 +1,13 @@
---
title: Archive
description: |
Archiving photos and albums in Ente Photos to remove them from your home
timeline
title: Cast
description:
Casting your photos on to a large screen or a TV or a Chromecast device
---
> [!CAUTION]
>
> This is preview documentation for an upcoming feature. This feature has not
> yet been released yet, so the steps below will not work currently.
# Cast
With Ente Cast, you can play a slideshow of your favourite albums on your Google
Chromecast TVs or other Internet-connected large screen devices.
Chromecast TVs or any other internet-connected large screen devices.
## Get Started

View file

@ -0,0 +1,75 @@
---
title: Desktop installation
description: Troubleshooting issues when installing the Ente Photos desktop app
---
# Desktop app installation
The latest version of the Ente Photos desktop app can be downloaded from
[ente.io/download](https://ente.io/download). If you're having trouble, please
see if any of the following cases apply.
## Windows
If the app stops with an "A JavaScript error occurred in the main process - The
specified module could not be found" error on your Windows machine when you
start it, then you might need to install the VC++ runtime from Microsoft.
This is what the error looks like:
![Error when VC++ runtime is not installed](windows-vc.png){width=500px}
You can install the Microsoft VC++ redistributable runtime from here:<br/>
https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version
## AppImages on ARM64 Linux
If you're on an ARM64 machine running Linux, and the AppImages doesn't do
anything when you run it, you will need to run the following command on your
machine:
```sh
sudo ln -s /usr/lib/aarch64-linux-gnu/libz.so{.1,}
```
It is possible that the exact path might be different on your machine. Briefly,
what we need to do is create `libz.so` as an alias for `libz.so.1`. For more
details, see the following upstream issues:
- libz.so cannot open shared object file on ARM64 -
[AppImage/AppImageKit/issues/1092](https://github.com/AppImage/AppImageKit/issues/1092)
- libz.so: cannot open shared object file with Ubuntu arm64 -
[electron-userland/electron-builder/issues/7835](https://github.com/electron-userland/electron-builder/issues/7835)
## AppImage says it requires FUSE
See
[docs.appimage.org](https://docs.appimage.org/user-guide/troubleshooting/fuse.html#the-appimage-tells-me-it-needs-fuse-to-run).
tl;dr; for example, on Ubuntu,
```sh
sudo apt install libfuse2
```
## Linux SUID error
On some Linux distributions, if you run the AppImage from the CLI, it might fail
with the following error:
> The SUID sandbox helper binary was found, but is not configured correctly.
This happens when you try to run the AppImage from the command line. If you
instead double click on the AppImage in your Files browser, then it should start
properly.
If you do want to run it from the command line, you can do so by passing the
`--no-sandbox` flag when executing the AppImage. e.g.
```sh
./ente.AppImage --no-sandbox
```
For more details, see this upstream issue on
[electron](https://github.com/electron/electron/issues/17972).

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View file

@ -0,0 +1,80 @@
---
title: Configuring S3 buckets
description:
Configure S3 endpoints to fix upload errors or use your self hosted ente
from outside localhost
---
# Configuring S3
There are three components involved in uploading:
1. The client (e.g. the web app or the mobile app)
2. Ente's server (museum)
3. The S3-compatible object storage (e.g. minio in the default starter)
For the uploads to work, all three of them need to be able to reach each other.
This is because the client uploads directly to the object storage. The
interaction goes something like this:
1. Client wants to upload, it asks museum where it should upload to.
2. Museum creates pre-signed URLs for the S3 bucket that was configured.
3. Client directly uploads to the S3 buckets these URLs.
The upshot of this is that _both_ the client and museum should be able to reach
your S3 bucket.
The URL for the S3 bucket is configured in
[scripts/compose/credentials.yaml](https://github.com/ente-io/ente/blob/main/server/scripts/compose/credentials.yaml#L10).
You can edit this file directly when testing, though it is just simpler and more
robust to create a `museum.yaml` (in the same folder as the Docker compose file)
and put your custom configuration there (in your case, you can put an entire
`s3` config object in your `museum.yaml`).
> [!TIP]
>
> For more details about these configuration objects, see the documentaion for
> the `s3` object in
> [configurations/local.yaml](https://github.com/ente-io/ente/blob/main/server/configurations/local.yaml).
By default, you only need to configure the endpoint for the first bucket.
> [!NOTE]
>
> If you're wondering why there are 3 buckets - that's because our production
> instance uses these to perform replication.
>
> However, in a self hosted setup replication is off by default (you can turn it
> on if you want). When replication is turned off, only the first bucket is
> used, and you can remove the other two if you wish or just ignore them.
The `endpoint` for the first bucket in the starter `credentials.yaml` is
`localhost:3200`. The way this works then is that both museum (`2`) and minio
(`3`) are running within the same Docker compose cluster, so are able to reach
each other. If at this point we were to run the web app (`1`) on localhost (say
using `yarn dev:photos`), it would also run on localhost and thus would be able
to reach `3`.
If you were to try and connect from a mobile app, this would not work since
`localhost:3200` would not resolve on your mobile. So you'll need to modify this
endpoint to a value, say `yourserverip:3200`, so that the mobile app can also
reach it.
The same principle applies if you're deploying to your custom domain.
> [!NOTE]
>
> If you need to configure SSL, for example if you're running over the internet,
> you'll need to turn off `s3.are_local_buckets` (which disables SSL in the
> default starter compose template).
>
> Disabling `s3.are_local_buckets` also switches to the subdomain style URLs for
> the buckets. However, not all S3 providers support these, in particular, minio
> does not work with these in default configuration. So in such cases you'll
> also need to then enable `s3.use_path_style_urls`.
To summarize:
Set the S3 bucket `endpoint` in `credentials.yaml` to a `yourserverip:3200` or
some such IP/hostname that accessible from both where you are running the Ente
clients (e.g. the mobile app) and also from within the Docker compose cluster.

View file

@ -34,4 +34,18 @@ endpoint:
api: "http://localhost:8080"
```
(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example))
(Another
[example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example))
## Web appps and Photos desktop app
You will need to build the app from source and use the
`NEXT_PUBLIC_ENTE_ENDPOINT` environment variable to tell it which server to
connect to. For example:
```sh
NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos
```
For more details, see
[hosting the web app](https://help.ente.io/self-hosting/guides/web-app).

View file

@ -16,5 +16,8 @@ See the sidebar for existing guides. In particular:
- For various admin related tasks, e.g. increasing the storage quota on your
self hosted instance, see [administering your custom server](admin).
- For self hosting both the server and web app using external S3 buckets for
object storage, see [using external S3](external-s3).
- For configuring your S3 buckets to get the object storage to work from your
mobile device or for fixing an upload errors, see
[configuring S3](configuring-s3). There is also a longer
[community contributed guide](external-s3) for a more self hosted setup of
both the server and web app using external S3 buckets for object storage.

View file

@ -1,6 +1,8 @@
---
title: Hosting the web app
description: Building and hosting Ente's web app, connecting it to your self-hosted server
description:
Building and hosting Ente's web app, connecting it to your self-hosted
server
---
# Web app

View file

@ -5,9 +5,9 @@ description: Fixing upload errors when trying to self host Ente
# Uploads failing
If uploads to your self-hosted server are failing, make sure that
`credentials.yaml` has `yourserverip:3200` for all three minio locations.
If uploads to your minio are failing, you need to ensure that you've configured
the S3 bucket `endpoint` in `credentials.yaml` (or `museum.yaml`) to, say,
`yourserverip:3200`. This can be any host or port, it just need to be a value
that is reachable from both your client and from museum.
By default it is `localhost:3200`, and it needs to be changed to an IP that is
accessible from both where you are running the Ente clients (e.g. the mobile
app) and also from within the Docker compose cluster.
For more details, see [configuring-s3](/self-hosting/guides/configuring-s3).

View file

@ -108,7 +108,7 @@
<key>NSBonjourServices</key>
<array>
<string>_googlecast._tcp</string>
<string>F5BCEC64._googlecast._tcp</string>
<string>_F5BCEC64._googlecast._tcp</string>
</array>
<key>NSLocalNetworkUsageDescription</key>

View file

@ -72,8 +72,6 @@ class Configuration {
static const anonymousUserIDKey = "anonymous_user_id";
static const endPointKey = "endpoint";
final kTempFolderDeletionTimeBuffer = const Duration(hours: 6).inMicroseconds;
static final _logger = Logger("Configuration");
String? _cachedToken;
@ -103,20 +101,7 @@ class Configuration {
_documentsDirectory = (await getApplicationDocumentsDirectory()).path;
_tempDocumentsDirPath = _documentsDirectory + "/temp/";
final tempDocumentsDir = Directory(_tempDocumentsDirPath);
try {
final currentTime = DateTime.now().microsecondsSinceEpoch;
if (tempDocumentsDir.existsSync() &&
(_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) <
(currentTime - kTempFolderDeletionTimeBuffer)) {
await tempDocumentsDir.delete(recursive: true);
await _preferences.setInt(lastTempFolderClearTimeKey, currentTime);
_logger.info("Cleared temp folder");
} else {
_logger.info("Skipping temp folder clear");
}
} catch (e) {
_logger.warning(e);
}
await _cleanUpStaleFiles(tempDocumentsDir);
tempDocumentsDir.createSync(recursive: true);
final tempDirectoryPath = (await getTemporaryDirectory()).path;
_thumbnailCacheDirectory = tempDirectoryPath + "/thumbnail-cache";
@ -144,6 +129,42 @@ class Configuration {
SuperLogging.setUserID(await _getOrCreateAnonymousUserID()).ignore();
}
// _cleanUpStaleFiles deletes all files in the temp directory that are older
// than kTempFolderDeletionTimeBuffer except the the temp encrypted files for upload.
// Those file are deleted by file uploader after the upload is complete or those
// files are not being used / tracked.
Future<void> _cleanUpStaleFiles(Directory tempDocumentsDir) async {
try {
final currentTime = DateTime.now().microsecondsSinceEpoch;
if (tempDocumentsDir.existsSync() &&
(_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) <
(currentTime - tempDirCleanUpInterval)) {
int skippedTempUploadFiles = 0;
final files = tempDocumentsDir.listSync();
for (final file in files) {
if (file is File) {
if (file.path.contains(uploadTempFilePrefix)) {
skippedTempUploadFiles++;
continue;
}
_logger.info("Deleting file: ${file.path}");
await file.delete();
} else if (file is Directory) {
await file.delete(recursive: true);
}
}
await _preferences.setInt(lastTempFolderClearTimeKey, currentTime);
_logger.info(
"Cleared temp folder except $skippedTempUploadFiles upload files",
);
} else {
_logger.info("Skipping temp folder clear");
}
} catch (e) {
_logger.warning(e);
}
}
Future<void> logout({bool autoLogout = false}) async {
if (SyncService.instance.isSyncInProgress()) {
SyncService.instance.stopSync();

View file

@ -1,3 +1,5 @@
import "package:flutter/foundation.dart";
const int thumbnailSmallSize = 256;
const int thumbnailQuality = 50;
const int thumbnailLargeSize = 512;
@ -41,6 +43,7 @@ const supportEmail = 'support@ente.io';
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
const multipartPartSize = 20 * 1024 * 1024;
const multipartPartSizeInternal = 8 * 1024 * 1024;
const kDefaultProductionEndpoint = 'https://api.ente.io';
@ -95,3 +98,8 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB'
'KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' +
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' +
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=';
const uploadTempFilePrefix = "upload_file_";
final tempDirCleanUpInterval = kDebugMode
? const Duration(seconds: 30).inMicroseconds
: const Duration(hours: 6).inMicroseconds;

View file

@ -63,6 +63,19 @@ class EmbeddingsDB {
return _convertToEmbeddings(results);
}
// Get FileIDs for a specific model
Future<Set<int>> getFileIDs(Model model) async {
final db = await _database;
final results = await db.getAll(
'SELECT $columnFileID FROM $tableName WHERE $columnModel = ?',
[modelToInt(model)!],
);
if (results.isEmpty) {
return <int>{};
}
return results.map((e) => e[columnFileID] as int).toSet();
}
Future<void> put(Embedding embedding) async {
final db = await _database;
await db.execute(

View file

@ -3,16 +3,60 @@ import 'dart:io';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
import "package:photos/module/upload/model/multipart.dart";
import 'package:sqflite/sqflite.dart';
import "package:sqflite_migration/sqflite_migration.dart";
class UploadLocksDB {
static const _databaseName = "ente.upload_locks.db";
static const _databaseVersion = 1;
static const _table = "upload_locks";
static const _columnID = "id";
static const _columnOwner = "owner";
static const _columnTime = "time";
static const _uploadLocksTable = (
table: "upload_locks",
columnID: "id",
columnOwner: "owner",
columnTime: "time",
);
static const _trackUploadTable = (
table: "track_uploads",
columnID: "id",
columnLocalID: "local_id",
columnFileHash: "file_hash",
columnCollectionID: "collection_id",
columnEncryptedFileName: "encrypted_file_name",
columnEncryptedFileSize: "encrypted_file_size",
columnEncryptedFileKey: "encrypted_file_key",
columnFileEncryptionNonce: "file_encryption_nonce",
columnKeyEncryptionNonce: "key_encryption_nonce",
columnObjectKey: "object_key",
columnCompleteUrl: "complete_url",
columnStatus: "status",
columnPartSize: "part_size",
columnLastAttemptedAt: "last_attempted_at",
columnCreatedAt: "created_at",
);
static const _partsTable = (
table: "upload_parts",
columnObjectKey: "object_key",
columnPartNumber: "part_number",
columnPartUrl: "part_url",
columnPartETag: "part_etag",
columnPartStatus: "part_status",
);
static final initializationScript = [
..._createUploadLocksTable(),
];
static final migrationScripts = [
..._createTrackUploadsTable(),
];
final dbConfig = MigrationConfig(
initializationScript: initializationScript,
migrationScripts: migrationScripts,
);
UploadLocksDB._privateConstructor();
static final UploadLocksDB instance = UploadLocksDB._privateConstructor();
@ -27,44 +71,82 @@ class UploadLocksDB {
final Directory documentsDirectory =
await getApplicationDocumentsDirectory();
final String path = join(documentsDirectory.path, _databaseName);
return await openDatabase(
path,
version: _databaseVersion,
onCreate: _onCreate,
);
return await openDatabaseWithMigration(path, dbConfig);
}
Future _onCreate(Database db, int version) async {
await db.execute(
static List<String> _createUploadLocksTable() {
return [
'''
CREATE TABLE $_table (
$_columnID TEXT PRIMARY KEY NOT NULL,
$_columnOwner TEXT NOT NULL,
$_columnTime TEXT NOT NULL
CREATE TABLE ${_uploadLocksTable.table} (
${_uploadLocksTable.columnID} TEXT PRIMARY KEY NOT NULL,
${_uploadLocksTable.columnOwner} TEXT NOT NULL,
${_uploadLocksTable.columnTime} TEXT NOT NULL
)
''',
);
];
}
static List<String> _createTrackUploadsTable() {
return [
'''
CREATE TABLE IF NOT EXISTS ${_trackUploadTable.table} (
${_trackUploadTable.columnID} INTEGER PRIMARY KEY,
${_trackUploadTable.columnLocalID} TEXT NOT NULL,
${_trackUploadTable.columnFileHash} TEXT NOT NULL,
${_trackUploadTable.columnCollectionID} INTEGER NOT NULL,
${_trackUploadTable.columnEncryptedFileName} TEXT NOT NULL,
${_trackUploadTable.columnEncryptedFileSize} INTEGER NOT NULL,
${_trackUploadTable.columnEncryptedFileKey} TEXT NOT NULL,
${_trackUploadTable.columnFileEncryptionNonce} TEXT NOT NULL,
${_trackUploadTable.columnKeyEncryptionNonce} TEXT NOT NULL,
${_trackUploadTable.columnObjectKey} TEXT NOT NULL,
${_trackUploadTable.columnCompleteUrl} TEXT NOT NULL,
${_trackUploadTable.columnStatus} TEXT DEFAULT '${MultipartStatus.pending.name}' NOT NULL,
${_trackUploadTable.columnPartSize} INTEGER NOT NULL,
${_trackUploadTable.columnLastAttemptedAt} INTEGER NOT NULL,
${_trackUploadTable.columnCreatedAt} INTEGER DEFAULT CURRENT_TIMESTAMP NOT NULL
)
''',
'''
CREATE TABLE IF NOT EXISTS ${_partsTable.table} (
${_partsTable.columnObjectKey} TEXT NOT NULL REFERENCES ${_trackUploadTable.table}(${_trackUploadTable.columnObjectKey}) ON DELETE CASCADE,
${_partsTable.columnPartNumber} INTEGER NOT NULL,
${_partsTable.columnPartUrl} TEXT NOT NULL,
${_partsTable.columnPartETag} TEXT,
${_partsTable.columnPartStatus} TEXT NOT NULL,
PRIMARY KEY (${_partsTable.columnObjectKey}, ${_partsTable.columnPartNumber})
)
''',
];
}
Future<void> clearTable() async {
final db = await instance.database;
await db.delete(_table);
await db.delete(_uploadLocksTable.table);
await db.delete(_trackUploadTable.table);
await db.delete(_partsTable.table);
}
Future<void> acquireLock(String id, String owner, int time) async {
final db = await instance.database;
final row = <String, dynamic>{};
row[_columnID] = id;
row[_columnOwner] = owner;
row[_columnTime] = time;
await db.insert(_table, row, conflictAlgorithm: ConflictAlgorithm.fail);
row[_uploadLocksTable.columnID] = id;
row[_uploadLocksTable.columnOwner] = owner;
row[_uploadLocksTable.columnTime] = time;
await db.insert(
_uploadLocksTable.table,
row,
conflictAlgorithm: ConflictAlgorithm.fail,
);
}
Future<bool> isLocked(String id, String owner) async {
final db = await instance.database;
final rows = await db.query(
_table,
where: '$_columnID = ? AND $_columnOwner = ?',
_uploadLocksTable.table,
where:
'${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?',
whereArgs: [id, owner],
);
return rows.length == 1;
@ -73,8 +155,9 @@ class UploadLocksDB {
Future<int> releaseLock(String id, String owner) async {
final db = await instance.database;
return db.delete(
_table,
where: '$_columnID = ? AND $_columnOwner = ?',
_uploadLocksTable.table,
where:
'${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?',
whereArgs: [id, owner],
);
}
@ -82,8 +165,9 @@ class UploadLocksDB {
Future<int> releaseLocksAcquiredByOwnerBefore(String owner, int time) async {
final db = await instance.database;
return db.delete(
_table,
where: '$_columnOwner = ? AND $_columnTime < ?',
_uploadLocksTable.table,
where:
'${_uploadLocksTable.columnOwner} = ? AND ${_uploadLocksTable.columnTime} < ?',
whereArgs: [owner, time],
);
}
@ -91,9 +175,251 @@ class UploadLocksDB {
Future<int> releaseAllLocksAcquiredBefore(int time) async {
final db = await instance.database;
return db.delete(
_table,
where: '$_columnTime < ?',
_uploadLocksTable.table,
where: '${_uploadLocksTable.columnTime} < ?',
whereArgs: [time],
);
}
Future<({String encryptedFileKey, String fileNonce, String keyNonce})>
getFileEncryptionData(
String localId,
String fileHash,
int collectionID,
) async {
final db = await instance.database;
final rows = await db.query(
_trackUploadTable.table,
where: '${_trackUploadTable.columnLocalID} = ?'
' AND ${_trackUploadTable.columnFileHash} = ?'
' AND ${_trackUploadTable.columnCollectionID} = ?',
whereArgs: [localId, fileHash, collectionID],
);
if (rows.isEmpty) {
throw Exception("No cached links found for $localId and $fileHash");
}
final row = rows.first;
return (
encryptedFileKey: row[_trackUploadTable.columnEncryptedFileKey] as String,
fileNonce: row[_trackUploadTable.columnFileEncryptionNonce] as String,
keyNonce: row[_trackUploadTable.columnKeyEncryptionNonce] as String,
);
}
Future<void> updateLastAttempted(
String localId,
String fileHash,
int collectionID,
) async {
final db = await instance.database;
await db.update(
_trackUploadTable.table,
{
_trackUploadTable.columnLastAttemptedAt:
DateTime.now().millisecondsSinceEpoch,
},
where: '${_trackUploadTable.columnLocalID} = ?'
' AND ${_trackUploadTable.columnFileHash} = ?'
' AND ${_trackUploadTable.columnCollectionID} = ?',
whereArgs: [
localId,
fileHash,
collectionID,
],
);
}
Future<MultipartInfo> getCachedLinks(
String localId,
String fileHash,
int collectionID,
) async {
final db = await instance.database;
final rows = await db.query(
_trackUploadTable.table,
where: '${_trackUploadTable.columnLocalID} = ?'
' AND ${_trackUploadTable.columnFileHash} = ?'
' AND ${_trackUploadTable.columnCollectionID} = ?',
whereArgs: [localId, fileHash, collectionID],
);
if (rows.isEmpty) {
throw Exception("No cached links found for $localId and $fileHash");
}
final row = rows.first;
final objectKey = row[_trackUploadTable.columnObjectKey] as String;
final partsStatus = await db.query(
_partsTable.table,
where: '${_partsTable.columnObjectKey} = ?',
whereArgs: [objectKey],
);
final List<bool> partUploadStatus = [];
final List<String> partsURLs = List.generate(
partsStatus.length,
(index) => "",
);
final Map<int, String> partETags = {};
for (final part in partsStatus) {
final partNumber = part[_partsTable.columnPartNumber] as int;
final partUrl = part[_partsTable.columnPartUrl] as String;
final partStatus = part[_partsTable.columnPartStatus] as String;
partsURLs[partNumber] = partUrl;
if (part[_partsTable.columnPartETag] != null) {
partETags[partNumber] = part[_partsTable.columnPartETag] as String;
}
partUploadStatus.add(partStatus == "uploaded");
}
final urls = MultipartUploadURLs(
objectKey: objectKey,
completeURL: row[_trackUploadTable.columnCompleteUrl] as String,
partsURLs: partsURLs,
);
return MultipartInfo(
urls: urls,
status: MultipartStatus.values
.byName(row[_trackUploadTable.columnStatus] as String),
partUploadStatus: partUploadStatus,
partETags: partETags,
partSize: row[_trackUploadTable.columnPartSize] as int,
);
}
Future<void> createTrackUploadsEntry(
String localId,
String fileHash,
int collectionID,
MultipartUploadURLs urls,
String encryptedFileName,
int fileSize,
String fileKey,
String fileNonce,
String keyNonce, {
required int partSize,
}) async {
final db = await UploadLocksDB.instance.database;
final objectKey = urls.objectKey;
await db.insert(
_trackUploadTable.table,
{
_trackUploadTable.columnLocalID: localId,
_trackUploadTable.columnFileHash: fileHash,
_trackUploadTable.columnCollectionID: collectionID,
_trackUploadTable.columnObjectKey: objectKey,
_trackUploadTable.columnCompleteUrl: urls.completeURL,
_trackUploadTable.columnEncryptedFileName: encryptedFileName,
_trackUploadTable.columnEncryptedFileSize: fileSize,
_trackUploadTable.columnEncryptedFileKey: fileKey,
_trackUploadTable.columnFileEncryptionNonce: fileNonce,
_trackUploadTable.columnKeyEncryptionNonce: keyNonce,
_trackUploadTable.columnPartSize: partSize,
_trackUploadTable.columnLastAttemptedAt:
DateTime.now().millisecondsSinceEpoch,
},
);
final partsURLs = urls.partsURLs;
final partsLength = partsURLs.length;
for (int i = 0; i < partsLength; i++) {
await db.insert(
_partsTable.table,
{
_partsTable.columnObjectKey: objectKey,
_partsTable.columnPartNumber: i,
_partsTable.columnPartUrl: partsURLs[i],
_partsTable.columnPartStatus: PartStatus.pending.name,
},
);
}
}
Future<void> updatePartStatus(
String objectKey,
int partNumber,
String etag,
) async {
final db = await instance.database;
await db.update(
_partsTable.table,
{
_partsTable.columnPartStatus: PartStatus.uploaded.name,
_partsTable.columnPartETag: etag,
},
where:
'${_partsTable.columnObjectKey} = ? AND ${_partsTable.columnPartNumber} = ?',
whereArgs: [objectKey, partNumber],
);
}
Future<void> updateTrackUploadStatus(
String objectKey,
MultipartStatus status,
) async {
final db = await instance.database;
await db.update(
_trackUploadTable.table,
{
_trackUploadTable.columnStatus: status.name,
},
where: '${_trackUploadTable.columnObjectKey} = ?',
whereArgs: [objectKey],
);
}
Future<int> deleteMultipartTrack(
String localId,
) async {
final db = await instance.database;
return await db.delete(
_trackUploadTable.table,
where: '${_trackUploadTable.columnLocalID} = ?',
whereArgs: [localId],
);
}
// getFileNameToLastAttemptedAtMap returns a map of encrypted file name to last attempted at time
Future<Map<String, int>> getFileNameToLastAttemptedAtMap() {
return instance.database.then((db) async {
final rows = await db.query(
_trackUploadTable.table,
columns: [
_trackUploadTable.columnEncryptedFileName,
_trackUploadTable.columnLastAttemptedAt,
],
);
final map = <String, int>{};
for (final row in rows) {
map[row[_trackUploadTable.columnEncryptedFileName] as String] =
row[_trackUploadTable.columnLastAttemptedAt] as int;
}
return map;
});
}
Future<String?> getEncryptedFileName(
String localId,
String fileHash,
int collectionID,
) {
return instance.database.then((db) async {
final rows = await db.query(
_trackUploadTable.table,
where: '${_trackUploadTable.columnLocalID} = ?'
' AND ${_trackUploadTable.columnFileHash} = ?'
' AND ${_trackUploadTable.columnCollectionID} = ?',
whereArgs: [localId, fileHash, collectionID],
);
if (rows.isEmpty) {
return null;
}
final row = rows.first;
return row[_trackUploadTable.columnEncryptedFileName] as String;
});
}
}

View file

@ -1,3 +1,5 @@
import "package:photos/events/event.dart";
class EmbeddingUpdatedEvent extends Event {}
class EmbeddingCacheUpdatedEvent extends Event {}

View file

@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary {
"Please talk to ${providerName} support if you were charged";
static String m38(endDate) =>
"Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards.";
"Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards.";
static String m39(toEmail) => "Please email us at ${toEmail}";

View file

@ -368,6 +368,14 @@ class MessageLookup extends MessageLookupByLibrary {
"Verificatie mislukt, probeer het opnieuw"),
"authenticationSuccessful":
MessageLookupByLibrary.simpleMessage("Verificatie geslaagd!"),
"autoCastDialogBody": MessageLookupByLibrary.simpleMessage(
"Je zult de beschikbare Cast apparaten hier zien."),
"autoCastiOSPermission": MessageLookupByLibrary.simpleMessage(
"Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen."),
"autoPair":
MessageLookupByLibrary.simpleMessage("Automatisch koppelen"),
"autoPairDesc": MessageLookupByLibrary.simpleMessage(
"Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen."),
"available": MessageLookupByLibrary.simpleMessage("Beschikbaar"),
"backedUpFolders":
MessageLookupByLibrary.simpleMessage("Back-up mappen"),
@ -399,6 +407,10 @@ class MessageLookup extends MessageLookupByLibrary {
"cannotAddMorePhotosAfterBecomingViewer": m9,
"cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage(
"Kan gedeelde bestanden niet verwijderen"),
"castIPMismatchBody": MessageLookupByLibrary.simpleMessage(
"Zorg ervoor dat je op hetzelfde netwerk zit als de tv."),
"castIPMismatchTitle":
MessageLookupByLibrary.simpleMessage("Album casten mislukt"),
"castInstruction": MessageLookupByLibrary.simpleMessage(
"Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen."),
"centerPoint": MessageLookupByLibrary.simpleMessage("Middelpunt"),
@ -473,6 +485,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"),
"confirmYourRecoveryKey":
MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"),
"connectToDevice": MessageLookupByLibrary.simpleMessage(
"Verbinding maken met apparaat"),
"contactFamilyAdmin": m12,
"contactSupport":
MessageLookupByLibrary.simpleMessage("Contacteer klantenservice"),
@ -750,6 +764,8 @@ class MessageLookup extends MessageLookupByLibrary {
"filesBackedUpInAlbum": m23,
"filesDeleted":
MessageLookupByLibrary.simpleMessage("Bestanden verwijderd"),
"filesSavedToGallery": MessageLookupByLibrary.simpleMessage(
"Bestand opgeslagen in galerij"),
"flip": MessageLookupByLibrary.simpleMessage("Omdraaien"),
"forYourMemories":
MessageLookupByLibrary.simpleMessage("voor uw herinneringen"),
@ -938,6 +954,8 @@ class MessageLookup extends MessageLookupByLibrary {
"manageParticipants": MessageLookupByLibrary.simpleMessage("Beheren"),
"manageSubscription":
MessageLookupByLibrary.simpleMessage("Abonnement beheren"),
"manualPairDesc": MessageLookupByLibrary.simpleMessage(
"Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien."),
"map": MessageLookupByLibrary.simpleMessage("Kaart"),
"maps": MessageLookupByLibrary.simpleMessage("Kaarten"),
"mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"),
@ -974,6 +992,8 @@ class MessageLookup extends MessageLookupByLibrary {
"no": MessageLookupByLibrary.simpleMessage("Nee"),
"noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage(
"Nog geen albums gedeeld door jou"),
"noDeviceFound":
MessageLookupByLibrary.simpleMessage("Geen apparaat gevonden"),
"noDeviceLimit": MessageLookupByLibrary.simpleMessage("Geen"),
"noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage(
"Je hebt geen bestanden op dit apparaat die verwijderd kunnen worden"),
@ -1023,6 +1043,9 @@ class MessageLookup extends MessageLookupByLibrary {
"orPickAnExistingOne":
MessageLookupByLibrary.simpleMessage("Of kies een bestaande"),
"pair": MessageLookupByLibrary.simpleMessage("Koppelen"),
"pairWithPin": MessageLookupByLibrary.simpleMessage("Koppelen met PIN"),
"pairingComplete":
MessageLookupByLibrary.simpleMessage("Koppeling voltooid"),
"passkey": MessageLookupByLibrary.simpleMessage("Passkey"),
"passkeyAuthTitle":
MessageLookupByLibrary.simpleMessage("Passkey verificatie"),
@ -1383,6 +1406,10 @@ class MessageLookup extends MessageLookupByLibrary {
"sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Succes"),
"startBackup": MessageLookupByLibrary.simpleMessage("Back-up starten"),
"status": MessageLookupByLibrary.simpleMessage("Status"),
"stopCastingBody":
MessageLookupByLibrary.simpleMessage("Wil je stoppen met casten?"),
"stopCastingTitle":
MessageLookupByLibrary.simpleMessage("Casten stoppen"),
"storage": MessageLookupByLibrary.simpleMessage("Opslagruimte"),
"storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Familie"),
"storageBreakupYou": MessageLookupByLibrary.simpleMessage("Jij"),

Some files were not shown because too many files have changed in this diff Show more