Merge branch 'main' into sepa-fix

This commit is contained in:
vishnukvmd 2024-03-20 14:08:36 +05:30
commit 628565cfaf
45 changed files with 1113 additions and 1773 deletions

View file

@ -1 +0,0 @@
ui/*

View file

@ -1,55 +0,0 @@
{
"root": true,
"env": {
"browser": true,
"es2021": true,
"node": true
},
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"google",
"prettier"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaFeatures": {
"jsx": true
},
"ecmaVersion": 12,
"sourceType": "module"
},
"plugins": ["@typescript-eslint"],
"rules": {
"indent": "off",
"class-methods-use-this": "off",
"react/prop-types": "off",
"react/display-name": "off",
"react/no-unescaped-entities": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error"],
"require-jsdoc": "off",
"valid-jsdoc": "off",
"max-len": "off",
"new-cap": "off",
"no-invalid-this": "off",
"eqeqeq": "error",
"object-curly-spacing": ["error", "always"],
"space-before-function-paren": "off",
"operator-linebreak": [
"error",
"after",
{ "overrides": { "?": "before", ":": "before" } }
]
},
"settings": {
"react": {
"version": "detect"
}
},
"globals": {
"JSX": "readonly",
"NodeJS": "readonly",
"ReadableStreamDefaultController": "readonly"
}
}

31
desktop/.eslintrc.js Normal file
View file

@ -0,0 +1,31 @@
/* eslint-env node */
module.exports = {
extends: [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
/* What we really want eventually */
// "plugin:@typescript-eslint/strict-type-checked",
// "plugin:@typescript-eslint/stylistic-type-checked",
],
/* Temporarily disable some rules
Enhancement: Remove me */
rules: {
"no-unused-vars": "off",
},
/* Temporarily add a global
Enhancement: Remove me */
globals: {
NodeJS: "readonly",
},
plugins: ["@typescript-eslint"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: true,
},
root: true,
ignorePatterns: [".eslintrc.js", "app", "out", "dist"],
env: {
es2022: true,
node: true,
},
};

3
desktop/.gitignore vendored
View file

@ -14,7 +14,8 @@ node_modules/
# tsc transpiles src/**/*.ts and emits the generated JS into app
app/
# out is a symlink to the photos web app's dir
# out is a symlink to the photos web app's out dir, which contains the built up
# photos app.
out
# electron-builder

View file

@ -1,5 +1,6 @@
{
"tabWidth": 4,
"proseWrap": "always",
"plugins": [
"prettier-plugin-organize-imports",
"prettier-plugin-packagejson"

View file

@ -131,7 +131,8 @@
### Photo Editor
Check out our [blog](https://ente.io/blog/introducing-web-desktop-photo-editor/) to know about feature and functionalities.
Check out our [blog](https://ente.io/blog/introducing-web-desktop-photo-editor/)
to know about feature and functionalities.
## v1.6.47
@ -146,15 +147,19 @@ Check out our [blog](https://ente.io/blog/introducing-web-desktop-photo-editor/)
### Bug Fixes
- Fixes OOM crashes during file upload [#1379](https://github.com/ente-io/photos-web/pull/1379)
- Fixes OOM crashes during file upload
[#1379](https://github.com/ente-io/photos-web/pull/1379)
## v1.6.45
### Bug Fixes
- Fixed app keeps reloading issue [#235](https://github.com/ente-io/photos-desktop/pull/235)
- Fixed dng and arw preview issue [#1378](https://github.com/ente-io/photos-web/pull/1378)
- Added view crash report option (help menu) for user to share electron crash report locally
- Fixed app keeps reloading issue
[#235](https://github.com/ente-io/photos-desktop/pull/235)
- Fixed dng and arw preview issue
[#1378](https://github.com/ente-io/photos-web/pull/1378)
- Added view crash report option (help menu) for user to share electron crash
report locally
## v1.6.44
@ -166,23 +171,28 @@ Check out our [blog](https://ente.io/blog/introducing-web-desktop-photo-editor/)
- #### Check for update and changelog option
Added options to check for update manually and a view changelog via the app menubar
Added options to check for update manually and a view changelog via the app
menubar
- #### Opt out of crash reporting
Added option to out of a crash reporting, it can accessed from the settings -> preferences -> disable crash reporting
Added option to out of a crash reporting, it can accessed from the settings
-> preferences -> disable crash reporting
- #### Type search
Added new search option to search files based on file type i.e, image, video, live-photo.
Added new search option to search files based on file type i.e, image,
video, live-photo.
- #### Manual Convert Button
In case the video is not playable, Now there is a convert button which can be used to trigger conversion of the video to supported format.
In case the video is not playable, Now there is a convert button which can
be used to trigger conversion of the video to supported format.
- #### File Download Progress
The file loader now also shows the exact percentage download progress, instead of just a simple loader.
The file loader now also shows the exact percentage download progress,
instead of just a simple loader.
- #### Bug fixes & other enhancements
@ -198,16 +208,19 @@ Check out our [blog](https://ente.io/blog/introducing-web-desktop-photo-editor/)
- #### Email verification
We have now made email verification optional, so you can sign in with just your email address and password, without waiting for a verification code.
We have now made email verification optional, so you can sign in with just
your email address and password, without waiting for a verification code.
You can opt in / out of email verification from Settings > Security.
- #### Download Album
You can now chose the download location for downloading albums. Along with that we have also added progress bar for album download.
You can now chose the download location for downloading albums. Along with
that we have also added progress bar for album download.
- #### Bug fixes & other enhancements
We have squashed a few pesky bugs that were reported by our community
If you would like to help us improve ente, come join the party @ ente.io/community!
If you would like to help us improve ente, come join the party @
ente.io/community!

View file

@ -2,14 +2,20 @@
The sweetness of Ente Photos, right on your computer. Linux, Windows and macOS.
You can [**download** a pre-built binary from
releases](https://github.com/ente-io/photos-desktop/releases/latest).
You can
[**download** a pre-built binary from releases](https://github.com/ente-io/photos-desktop/releases/latest).
To know more about Ente, see [our main README](../README.md) or visit
[ente.io](https://ente.io).
## Building from source
Fetch submodules
```sh
git submodule update --init --recursive
```
Install dependencies
```sh

View file

@ -1,30 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>ente Photos</title>
</head>
<body style="background-color: black">
<div
style="
height: 95vh;
width: 96vw;
display: grid;
place-items: center;
color: white;
"
>
<div>
<div style="margin-bottom: 10px">
Site unreachable, please try again later
</div>
<button onClick="window[`ElectronAPIs`].reloadWindow()">
Reload
</button>
</div>
</div>
</body>
</html>

View file

@ -1,14 +1,61 @@
# Dependencies
See [web/docs/dependencies.md](../../web/docs/dependencies.md) for general web
specific dependencies. See [electron.md](electron.md) for our main dependency,
Electron. The rest of this document describes the remaining, desktop specific
dependencies that are used by the Photos desktop app.
## Electron
## Electron related
[Electron](https://www.electronjs.org) is a cross-platform (Linux, Windows,
macOS) way for creating desktop apps using TypeScript.
Electron embeds Chromium and Node.js in the generated app's binary. The
generated app thus consists of two separate processes - the _main_ process, and
a _renderer_ process.
- The _main_ process is runs the embedded node. This process can deal with the
host OS - it is conceptually like a `node` repl running on your machine. In
our case, the TypeScript code (in the `src/` directory) gets transpiled by
`tsc` into JavaScript in the `build/app/` directory, which gets bundled in
the generated app's binary and is loaded by the node (main) process when the
app starts.
- The _renderer_ process is a regular web app that gets loaded into the
embedded Chromium. When the main process starts, it creates a new "window"
that shows this embedded Chromium. In our case, we build and bundle a static
export of the [Photos web app](../web/README.md) in the generated app. This
gets loaded by the embedded Chromium at runtime, acting as the app's UI.
There is also a third environment that gets temporarily created:
- The [preload script](../src/preload.ts) acts as a gateway between the _main_
and the _renderer_ process. It runs in its own isolated environment.
### electron-builder
[Electron Builder](https://www.electron.build) is used for packaging the app for
distribution.
During the build it uses
[electron-builder-notarize](https://github.com/karaggeorge/electron-builder-notarize)
to notarize the macOS binary.
### next-electron-server
This spins up a server for serving files using a protocol handler inside our
Electron process. This allows us to directly use the output produced by `next
build` for loading into our renderer process.
Electron process. This allows us to directly use the output produced by
`next build` for loading into our renderer process.
### electron-reload
Reloads contents of the BrowserWindow (renderer process) when source files are
changed.
* TODO (MR): Do we need this? Isn't the next-electron-server HMR covering this?
## DX
See [web/docs/dependencies#DX](../../web/docs/dependencies.md#dx) for the
general development experience related dependencies like TypeScript etc, which
are similar to that in the web code.
Some extra ones specific to the code here are:
* [concurrently](https://github.com/open-cli-tools/concurrently) for spawning
parallel tasks when we do `yarn dev`.

View file

@ -1,4 +1,41 @@
# Development tips
# Development
- `yarn build:quick` is a variant of `yarn build` that uses the
`--config.compression=store` flag to (slightly) speed up electron-builder.
## Yarn commands
### yarn dev
Launch the app in development mode
- Runs a development server for the renderer, with HMR.
- Starts tsc in watch mode to recompile the JS files used by the main process.
- Starts the main process, reloading it on changes to the the TS files in
`src/`.
### yarn build
Build a binary for your current platform.
Note that our actual releases use a
[GitHub workflow](../.github/workflows/desktop-release.yml) that is similar to
this, except it builds binaries for all the supported OSes and uses production
signing credentials.
During development, you might find `yarn build:quick` helpful. It is a variant
of `yarn build` that omits some steps to build a binary quicker, something that
can be useful during development.
### postinstall
When using native node modules (those written in C/C++), we need to ensure they
are built against `electron`'s packaged `node` version. We use
[electron-builder](https://www.electron.build/cli)'s `install-app-deps` command
to rebuild those modules automatically after each `yarn install` by invoking it
in as the `postinstall` step in our package.json.
### lint and lint-fix
Use `yarn lint` to check that your code formatting is as expected, and that
there are no linter errors. Use `yarn lint-fix` to try and automatically fix the
issues.

View file

@ -1,21 +0,0 @@
# Electron
[Electron](https://www.electronjs.org) is a cross-platform (Linux, Windows,
macOS) way for creating desktop apps using TypeScript.
Electron embeds Chromium and Node.js in the generated app's binary. The
generated app thus consists of two separate processes - the _main_ process, and
a _renderer_ process.
- The _main_ process is runs the embedded node. This process can deal with the
host OS - it is conceptually like a `node` repl running on your machine. In our
case, the TypeScript code (in the `src/` directory) gets transpiled by `tsc`
into JavaScript in the `build/app/` directory, which gets bundled in the
generated app's binary and is loaded by the node (main) process when the app
starts.
- The _renderer_ process is a regular web app that gets loaded into the embedded
Chromium. When the main process starts, it creates a new "window" that shows
this embedded Chromium. In our case, we build and bundle a static export of
the [Photos web app](../web/README.md) in the generated app. This gets loaded
by the embedded Chromium at runtime, acting as the app's UI.

View file

@ -42,9 +42,9 @@ To wrap up, we also need to merge back these changes into main. So for that,
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
defined in the `build` value in `package.json`.
* Windows - An NSIS installer.
* Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
* macOS - A universal DMG
- Windows - An NSIS installer.
- Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
- macOS - A universal DMG
Additionally, the GitHub action notarizes the macOS DMG. For this it needs
credentials provided via GitHub secrets.
@ -70,19 +70,19 @@ If everything goes well, we'll have a release on GitHub, and the corresponding
source maps for the renderer process uploaded to Sentry. There isn't anything
else to do:
* The website automatically redirects to the latest release on GitHub when
- The website automatically redirects to the latest release on GitHub when
people try to download.
* The file formats with support auto update (Windows `exe`, the Linux AppImage
- The file formats with support auto update (Windows `exe`, the Linux AppImage
and the macOS DMG) also check the latest GitHub release automatically to
download and apply the update (the rest of the formats don't support auto
updates).
* We're not putting the desktop app in other stores currently. It is available
as a `brew cask`, but we only had to open a PR to add the initial formula, now
their maintainers automatically bump the SHA, version number and the (derived
from the version) URL in the formula when their tools notice a new release on
our GitHub.
- We're not putting the desktop app in other stores currently. It is available
as a `brew cask`, but we only had to open a PR to add the initial formula,
now their maintainers automatically bump the SHA, version number and the
(derived from the version) URL in the formula when their tools notice a new
release on our GitHub.
We can also publish the draft releases by checking the "pre-release" option.
Such releases don't cause any of the channels (our website, or the desktop app

View file

@ -0,0 +1,43 @@
appId: io.ente.bhari-frame
artifactName: ${productName}-${version}-${arch}.${ext}
nsis:
deleteAppDataOnUninstall: true
linux:
target:
- target: AppImage
arch:
- x64
- arm64
- target: deb
arch:
- x64
- arm64
- target: rpm
arch:
- x64
- arm64
- target: pacman
arch:
- x64
- arm64
icon: ./resources/icon.icns
category: Photography
mac:
target:
target: default
arch:
- universal
category: public.app-category.photography
hardenedRuntime: true
x64ArchFiles: Contents/Resources/ggmlclip-mac
afterSign: electron-builder-notarize
asarUnpack:
- node_modules/ffmpeg-static/bin/${os}/${arch}/ffmpeg
- node_modules/ffmpeg-static/index.js
- node_modules/ffmpeg-static/package.json
extraFiles:
- from: build
to: resources
files:
- app/**/*
- out

View file

@ -8,7 +8,7 @@
"scripts": {
"build": "yarn build-renderer && yarn build-main",
"build-main": "tsc && electron-builder",
"build-main:quick": "tsc && electron-builder --config.compression=store",
"build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null",
"build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && rm -f out && ln -sf ../web/apps/photos/out",
"build:quick": "yarn build-renderer && yarn build-main:quick",
"dev": "concurrently --names 'main,rndr,tscw' \"yarn dev-main\" \"yarn dev-renderer\" \"yarn dev-main-watch\"",
@ -16,8 +16,8 @@
"dev-main-watch": "tsc --watch --preserveWatchOutput",
"dev-renderer": "cd ../web && yarn install && yarn dev:photos",
"postinstall": "electron-builder install-app-deps",
"lint": "yarn prettier --check . && eslint \"src/**/*.ts\"",
"lint-fix": "yarn prettier --write . && eslint --fix src"
"lint": "yarn prettier --check . && eslint --ext .ts src",
"lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src"
},
"dependencies": {
"any-shell-escape": "^0.1.1",
@ -42,94 +42,19 @@
"@types/auto-launch": "^5.0.2",
"@types/ffmpeg-static": "^3.0.1",
"@types/get-folder-size": "^2.0.0",
"@types/node": "18.15.0",
"@types/node-fetch": "^2.6.2",
"@types/promise-fs": "^2.1.1",
"@typescript-eslint/eslint-plugin": "^5.28.0",
"@typescript-eslint/parser": "^5.28.0",
"concurrently": "^7.0.0",
"@typescript-eslint/eslint-plugin": "^7",
"@typescript-eslint/parser": "^7",
"concurrently": "^8",
"electron": "^25.8.4",
"electron-builder": "^24.6.4",
"electron-builder-notarize": "^1.2.0",
"electron-download": "^4.1.1",
"eslint": "^7.23.0",
"eslint-config-google": "^0.14.0",
"eslint-config-prettier": "^8.5.0",
"eslint": "^8",
"prettier": "^3",
"prettier-plugin-organize-imports": "^3.2",
"prettier-plugin-packagejson": "^2.4",
"typescript": "^4.2.3"
"typescript": "^5"
},
"build": {
"appId": "io.ente.bhari-frame",
"artifactName": "${productName}-${version}-${arch}.${ext}",
"nsis": {
"deleteAppDataOnUninstall": true
},
"linux": {
"target": [
{
"target": "AppImage",
"arch": [
"x64",
"arm64"
]
},
{
"target": "deb",
"arch": [
"x64",
"arm64"
]
},
{
"target": "rpm",
"arch": [
"x64",
"arm64"
]
},
{
"target": "pacman",
"arch": [
"x64",
"arm64"
]
}
],
"icon": "./resources/icon.icns",
"category": "Photography"
},
"mac": {
"target": {
"target": "default",
"arch": [
"universal"
]
},
"category": "public.app-category.photography",
"hardenedRuntime": true,
"x64ArchFiles": "Contents/Resources/ggmlclip-mac"
},
"afterSign": "electron-builder-notarize",
"asarUnpack": [
"node_modules/ffmpeg-static/bin/${os}/${arch}/ffmpeg",
"node_modules/ffmpeg-static/index.js",
"node_modules/ffmpeg-static/package.json"
],
"extraFiles": [
{
"from": "build",
"to": "resources"
}
],
"files": [
"app/**/*",
"out"
]
},
"productName": "ente",
"standard": {
"parser": "babel-eslint"
}
"productName": "ente"
}

View file

@ -5,11 +5,7 @@ import { DiskCache } from "../services/diskCache";
const ENTE_CACHE_DIR_NAME = "ente";
export const getCacheDirectory = async () => {
const customCacheDir = await getCustomCacheDirectory();
if (customCacheDir && existsSync(customCacheDir)) {
return customCacheDir;
}
const getCacheDirectory = async () => {
const defaultSystemCacheDir = await ipcRenderer.invoke("get-path", "cache");
return path.join(defaultSystemCacheDir, ENTE_CACHE_DIR_NAME);
};
@ -40,13 +36,3 @@ export async function deleteDiskCache(cacheName: string) {
return false;
}
}
export async function setCustomCacheDirectory(
directory: string,
): Promise<void> {
await ipcRenderer.invoke("set-custom-cache-directory", directory);
}
async function getCustomCacheDirectory(): Promise<string> {
return await ipcRenderer.invoke("get-custom-cache-directory");
}

View file

@ -1,54 +0,0 @@
import { ipcRenderer } from "electron";
import { writeStream } from "../services/fs";
import { Model } from "../types";
import { isExecError, parseExecError } from "../utils/error";
export async function computeImageEmbedding(
model: Model,
imageData: Uint8Array,
): Promise<Float32Array> {
let tempInputFilePath = null;
try {
tempInputFilePath = await ipcRenderer.invoke("get-temp-file-path", "");
const imageStream = new Response(imageData.buffer).body;
await writeStream(tempInputFilePath, imageStream);
const embedding = await ipcRenderer.invoke(
"compute-image-embedding",
model,
tempInputFilePath,
);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
} finally {
if (tempInputFilePath) {
await ipcRenderer.invoke("remove-temp-file", tempInputFilePath);
}
}
}
export async function computeTextEmbedding(
model: Model,
text: string,
): Promise<Float32Array> {
try {
const embedding = await ipcRenderer.invoke(
"compute-text-embedding",
model,
text,
);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
}
}

View file

@ -1,39 +1,5 @@
import { ipcRenderer } from "electron/renderer";
import { logError } from "../services/logging";
export const selectDirectory = async (): Promise<string> => {
try {
return await ipcRenderer.invoke("select-dir");
} catch (e) {
logError(e, "error while selecting root directory");
}
};
export const getAppVersion = async (): Promise<string> => {
try {
return await ipcRenderer.invoke("get-app-version");
} catch (e) {
logError(e, "failed to get release version");
throw e;
}
};
export const openDirectory = async (dirPath: string): Promise<void> => {
try {
await ipcRenderer.invoke("open-dir", dirPath);
} catch (e) {
logError(e, "error while opening directory");
throw e;
}
};
export const getPlatform = async (): Promise<"mac" | "windows" | "linux"> => {
try {
return await ipcRenderer.invoke("get-platform");
} catch (e) {
logError(e, "failed to get platform");
throw e;
}
};
export { logToDisk, openLogDirectory } from "../services/logging";

View file

@ -5,11 +5,3 @@ export async function getDirFiles(dirPath: string) {
const electronFiles = await Promise.all(files.map(getElectronFile));
return electronFiles;
}
export {
deleteFile,
deleteFolder,
isFolder,
moveFile,
readTextFile,
rename,
} from "../services/fs";

View file

@ -1,13 +1,6 @@
import { ipcRenderer } from "electron";
import { AppUpdateInfo } from "../types";
export const sendNotification = (content: string) => {
ipcRenderer.send("send-notification", content);
};
export const reloadWindow = () => {
ipcRenderer.send("reload-window");
};
export const registerUpdateEventListener = (
showUpdateDialog: (updateInfo: AppUpdateInfo) => void,
) => {
@ -23,15 +16,3 @@ export const registerForegroundEventListener = (onForeground: () => void) => {
onForeground();
});
};
export const updateAndRestart = () => {
ipcRenderer.send("update-and-restart");
};
export const skipAppUpdate = (version: string) => {
ipcRenderer.send("skip-app-update", version);
};
export const muteUpdateNotification = (version: string) => {
ipcRenderer.send("mute-update-notification", version);
};

View file

@ -19,7 +19,6 @@ import {
setupMainMenu,
setupTrayItem,
} from "./utils/main";
import { setupMainProcessStatsLogger } from "./utils/processStats";
let mainWindow: BrowserWindow;
@ -104,7 +103,6 @@ if (!gotTheLock) {
// Some APIs can only be used after this event occurs.
app.on("ready", async () => {
logSystemInfo();
setupMainProcessStatsLogger();
mainWindow = await createWindow();
const tray = setupTrayItem(mainWindow);
const watcher = initWatcher(mainWindow);

View file

@ -1,19 +1,39 @@
import {
deleteDiskCache,
getCacheDirectory,
openDiskCache,
setCustomCacheDirectory,
} from "./api/cache";
import { computeImageEmbedding, computeTextEmbedding } from "./api/clip";
import {
getAppVersion,
getPlatform,
logToDisk,
openDirectory,
openLogDirectory,
selectDirectory,
} from "./api/common";
import { clearElectronStore } from "./api/electronStore";
/**
* @file The preload script
*
* The preload script runs in a renderer process before its web contents begin
* loading. During their execution they have access to a subset of Node.js APIs
* and imports. Its purpose is to expose the relevant imports and other
* functions as an object on the DOM, so that the renderer process can invoke
* functions that live in the main (Node.js) process if needed.
*
* Note that this script cannot import other code from `src/` - conceptually it
* can be thought of as running in a separate, third, process different from
* both the main or a renderer process (technically, it runs in a BrowserWindow
* context that runs prior to the renderer process).
*
* That said, this can be split into multiple files if we wished. However,
* that'd require us setting up a bundler to package it back up into a single JS
* file that can be used at runtime.
*
* > Since enabling the sandbox disables Node.js integration in your preload
* > scripts, you can no longer use require("../my-script"). In other words,
* > your preload script needs to be a single file.
* >
* > https://www.electronjs.org/blog/breach-to-barrier
*
* Since most of this is just boilerplate code providing a bridge between the
* main and renderer, we avoid introducing another moving part into the mix and
* just keep the entire preload setup in this single file.
*/
import { contextBridge, ipcRenderer } from "electron";
import { existsSync } from "fs";
import path from "path";
import * as fs from "promise-fs";
import { Readable } from "stream";
import { deleteDiskCache, openDiskCache } from "./api/cache";
import { logToDisk, openLogDirectory } from "./api/common";
import {
checkExistsAndCreateDir,
exists,
@ -21,25 +41,12 @@ import {
saveStreamToDisk,
} from "./api/export";
import { runFFmpegCmd } from "./api/ffmpeg";
import {
deleteFile,
deleteFolder,
getDirFiles,
isFolder,
moveFile,
readTextFile,
rename,
} from "./api/fs";
import { getDirFiles } from "./api/fs";
import { convertToJPEG, generateImageThumbnail } from "./api/imageProcessor";
import { getEncryptionKey, setEncryptionKey } from "./api/safeStorage";
import {
muteUpdateNotification,
registerForegroundEventListener,
registerUpdateEventListener,
reloadWindow,
sendNotification,
skipAppUpdate,
updateAndRestart,
} from "./api/system";
import {
getElectronFilesFromGoogleZip,
@ -59,25 +66,318 @@ import {
updateWatchMappingSyncedFiles,
} from "./api/watch";
import { setupLogging } from "./utils/logging";
import {
logRendererProcessMemoryUsage,
setupRendererProcessStatsLogger,
} from "./utils/processStats";
/* Some of the code below has been duplicated to make this file self contained.
Enhancement: consider alternatives */
/* preload: duplicated logError */
export function logError(error: Error, message: string, info?: string): void {
ipcRenderer.invoke("log-error", error, message, info);
}
// -
export const convertBrowserStreamToNode = (
fileStream: ReadableStream<Uint8Array>,
) => {
const reader = fileStream.getReader();
const rs = new Readable();
rs._read = async () => {
try {
const result = await reader.read();
if (!result.done) {
rs.push(Buffer.from(result.value));
} else {
rs.push(null);
return;
}
} catch (e) {
rs.emit("error", e);
}
};
return rs;
};
export async function writeNodeStream(
filePath: string,
fileStream: NodeJS.ReadableStream,
) {
const writeable = fs.createWriteStream(filePath);
fileStream.on("error", (error) => {
writeable.destroy(error); // Close the writable stream with an error
});
fileStream.pipe(writeable);
await new Promise((resolve, reject) => {
writeable.on("finish", resolve);
writeable.on("error", async (e) => {
if (existsSync(filePath)) {
await fs.unlink(filePath);
}
reject(e);
});
});
}
/* preload: duplicated writeStream */
export async function writeStream(
filePath: string,
fileStream: ReadableStream<Uint8Array>,
) {
const readable = convertBrowserStreamToNode(fileStream);
await writeNodeStream(filePath, readable);
}
// -
async function readTextFile(filePath: string) {
if (!existsSync(filePath)) {
throw new Error("File does not exist");
}
return await fs.readFile(filePath, "utf-8");
}
async function moveFile(
sourcePath: string,
destinationPath: string,
): Promise<void> {
if (!existsSync(sourcePath)) {
throw new Error("File does not exist");
}
if (existsSync(destinationPath)) {
throw new Error("Destination file already exists");
}
// check if destination folder exists
const destinationFolder = path.dirname(destinationPath);
if (!existsSync(destinationFolder)) {
await fs.mkdir(destinationFolder, { recursive: true });
}
await fs.rename(sourcePath, destinationPath);
}
export async function isFolder(dirPath: string) {
try {
const stats = await fs.stat(dirPath);
return stats.isDirectory();
} catch (e) {
let err = e;
// if code is defined, it's an error from fs.stat
if (typeof e.code !== "undefined") {
// ENOENT means the file does not exist
if (e.code === "ENOENT") {
return false;
}
err = Error(`fs error code: ${e.code}`);
}
logError(err, "isFolder failed");
return false;
}
}
async function deleteFolder(folderPath: string): Promise<void> {
if (!existsSync(folderPath)) {
return;
}
if (!fs.statSync(folderPath).isDirectory()) {
throw new Error("Path is not a folder");
}
// check if folder is empty
const files = await fs.readdir(folderPath);
if (files.length > 0) {
throw new Error("Folder is not empty");
}
await fs.rmdir(folderPath);
}
async function rename(oldPath: string, newPath: string) {
if (!existsSync(oldPath)) {
throw new Error("Path does not exist");
}
await fs.rename(oldPath, newPath);
}
function deleteFile(filePath: string): void {
if (!existsSync(filePath)) {
return;
}
if (!fs.statSync(filePath).isFile()) {
throw new Error("Path is not a file");
}
fs.rmSync(filePath);
}
// -
/* preload: duplicated Model */
export enum Model {
GGML_CLIP = "ggml-clip",
ONNX_CLIP = "onnx-clip",
}
const computeImageEmbedding = async (
model: Model,
imageData: Uint8Array,
): Promise<Float32Array> => {
let tempInputFilePath = null;
try {
tempInputFilePath = await ipcRenderer.invoke("get-temp-file-path", "");
const imageStream = new Response(imageData.buffer).body;
await writeStream(tempInputFilePath, imageStream);
const embedding = await ipcRenderer.invoke(
"compute-image-embedding",
model,
tempInputFilePath,
);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
} finally {
if (tempInputFilePath) {
await ipcRenderer.invoke("remove-temp-file", tempInputFilePath);
}
}
};
export async function computeTextEmbedding(
model: Model,
text: string,
): Promise<Float32Array> {
try {
const embedding = await ipcRenderer.invoke(
"compute-text-embedding",
model,
text,
);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
}
}
// -
/**
* [Note: Custom errors across Electron/Renderer boundary]
*
* We need to use the `message` field to disambiguate between errors thrown by
* the main process when invoked from the renderer process. This is because:
*
* > Errors thrown throw `handle` in the main process are not transparent as
* > they are serialized and only the `message` property from the original error
* > is provided to the renderer process.
* >
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
* >
* > Ref: https://github.com/electron/electron/issues/24427
*/
/* preload: duplicated CustomErrors */
const CustomErrorsP = {
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
"Windows native image processing is not supported",
INVALID_OS: (os: string) => `Invalid OS - ${os}`,
WAIT_TIME_EXCEEDED: "Wait time exceeded",
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
`Unsupported platform - ${platform} ${arch}`,
MODEL_DOWNLOAD_PENDING:
"Model download pending, skipping clip search request",
INVALID_FILE_PATH: "Invalid file path",
INVALID_CLIP_MODEL: (model: string) => `Invalid Clip model - ${model}`,
};
const isExecError = (err: any) => {
return err.message.includes("Command failed:");
};
const parseExecError = (err: any) => {
const errMessage = err.message;
if (errMessage.includes("Bad CPU type in executable")) {
return CustomErrorsP.UNSUPPORTED_PLATFORM(
process.platform,
process.arch,
);
} else {
return errMessage;
}
};
// -
const selectDirectory = async (): Promise<string> => {
try {
return await ipcRenderer.invoke("select-dir");
} catch (e) {
logError(e, "error while selecting root directory");
}
};
const getAppVersion = async (): Promise<string> => {
try {
return await ipcRenderer.invoke("get-app-version");
} catch (e) {
logError(e, "failed to get release version");
throw e;
}
};
const openDirectory = async (dirPath: string): Promise<void> => {
try {
await ipcRenderer.invoke("open-dir", dirPath);
} catch (e) {
logError(e, "error while opening directory");
throw e;
}
};
// -
const clearElectronStore = () => {
ipcRenderer.send("clear-electron-store");
};
// -
const updateAndRestart = () => {
ipcRenderer.send("update-and-restart");
};
const skipAppUpdate = (version: string) => {
ipcRenderer.send("skip-app-update", version);
};
const muteUpdateNotification = (version: string) => {
ipcRenderer.send("mute-update-notification", version);
};
// -
setupLogging();
setupRendererProcessStatsLogger();
const windowObject: any = window;
windowObject["ElectronAPIs"] = {
// These objects exposed here will become available to the JS code in our
// renderer (the web/ code) as `window.ElectronAPIs.*`
//
// https://www.electronjs.org/docs/latest/tutorial/tutorial-preload
contextBridge.exposeInMainWorld("ElectronAPIs", {
exists,
checkExistsAndCreateDir,
saveStreamToDisk,
saveFileToDisk,
selectDirectory,
clearElectronStore,
sendNotification,
reloadWindow,
readTextFile,
showUploadFilesDialog,
showUploadDirsDialog,
@ -108,7 +408,6 @@ windowObject["ElectronAPIs"] = {
runFFmpegCmd,
muteUpdateNotification,
generateImageThumbnail,
logRendererProcessMemoryUsage,
registerForegroundEventListener,
openDirectory,
moveFile,
@ -117,7 +416,4 @@ windowObject["ElectronAPIs"] = {
deleteFile,
computeImageEmbedding,
computeTextEmbedding,
getPlatform,
getCacheDirectory,
setCustomCacheDirectory,
};
});

View file

@ -184,25 +184,6 @@ export const getZipFileStream = async (
return readableStream;
};
export async function isFolder(dirPath: string) {
try {
const stats = await fs.stat(dirPath);
return stats.isDirectory();
} catch (e) {
let err = e;
// if code is defined, it's an error from fs.stat
if (typeof e.code !== "undefined") {
// ENOENT means the file does not exist
if (e.code === "ENOENT") {
return false;
}
err = Error(`fs error code: ${e.code}`);
}
logError(err, "isFolder failed");
return false;
}
}
export const convertBrowserStreamToNode = (
fileStream: ReadableStream<Uint8Array>,
) => {
@ -257,60 +238,3 @@ export async function writeStream(
const readable = convertBrowserStreamToNode(fileStream);
await writeNodeStream(filePath, readable);
}
export async function readTextFile(filePath: string) {
if (!existsSync(filePath)) {
throw new Error("File does not exist");
}
return await fs.readFile(filePath, "utf-8");
}
export async function moveFile(
sourcePath: string,
destinationPath: string,
): Promise<void> {
if (!existsSync(sourcePath)) {
throw new Error("File does not exist");
}
if (existsSync(destinationPath)) {
throw new Error("Destination file already exists");
}
// check if destination folder exists
const destinationFolder = path.dirname(destinationPath);
if (!existsSync(destinationFolder)) {
await fs.mkdir(destinationFolder, { recursive: true });
}
await fs.rename(sourcePath, destinationPath);
}
export async function deleteFolder(folderPath: string): Promise<void> {
if (!existsSync(folderPath)) {
return;
}
if (!fs.statSync(folderPath).isDirectory()) {
throw new Error("Path is not a folder");
}
// check if folder is empty
const files = await fs.readdir(folderPath);
if (files.length > 0) {
throw new Error("Folder is not empty");
}
await fs.rmdir(folderPath);
}
export async function rename(oldPath: string, newPath: string) {
if (!existsSync(oldPath)) {
throw new Error("Path does not exist");
}
await fs.rename(oldPath, newPath);
}
export function deleteFile(filePath: string): void {
if (!existsSync(filePath)) {
return;
}
if (!fs.statSync(filePath).isFile()) {
throw new Error("Path is not a file");
}
fs.rmSync(filePath);
}

View file

@ -31,11 +31,3 @@ export function clearSkipAppVersion() {
export function clearMuteUpdateNotificationVersion() {
userPreferencesStore.delete("muteUpdateNotificationVersion");
}
export function setCustomCacheDirectory(directory: string) {
userPreferencesStore.set("customCacheDirectory", directory);
}
export function getCustomCacheDirectory(): string {
return userPreferencesStore.get("customCacheDirectory");
}

View file

@ -11,9 +11,6 @@ const userPreferencesSchema: Schema<UserPreferencesType> = {
muteUpdateNotificationVersion: {
type: "string",
},
customCacheDirectory: {
type: "string",
},
};
export const userPreferencesStore = new Store({

View file

@ -58,7 +58,6 @@ export interface UserPreferencesType {
hideDockIcon: boolean;
skipAppVersion: string;
muteUpdateNotificationVersion: string;
customCacheDirectory: string;
}
export interface AppUpdateInfo {

View file

@ -1,6 +1,7 @@
# CLIP Byte Pair Encoding JavaScript Port
A JavaScript port of [OpenAI's CLIP byte-pair-encoding tokenizer](https://github.com/openai/CLIP/blob/3bee28119e6b28e75b82b811b87b56935314e6a5/clip/simple_tokenizer.py).
A JavaScript port of
[OpenAI's CLIP byte-pair-encoding tokenizer](https://github.com/openai/CLIP/blob/3bee28119e6b28e75b82b811b87b56935314e6a5/clip/simple_tokenizer.py).
```js
import Tokenizer from "https://deno.land/x/clip_bpe@v0.0.6/mod.js";
@ -18,10 +19,22 @@ t.encode("hello world!"); // [3306, 1002, 256]
t.encodeForCLIP("hello world!"); // [49406,3306,1002,256,49407,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
```
This encoder/decoder behaves differently to the the GPT-2/3 tokenizer (JavaScript version of that [here](https://github.com/latitudegames/GPT-3-Encoder)). For example, it doesn't preserve capital letters, as shown above.
This encoder/decoder behaves differently to the the GPT-2/3 tokenizer
(JavaScript version of that
[here](https://github.com/latitudegames/GPT-3-Encoder)). For example, it doesn't
preserve capital letters, as shown above.
The [Python version](https://github.com/openai/CLIP/blob/3bee28119e6b28e75b82b811b87b56935314e6a5/clip/simple_tokenizer.py) of this tokenizer uses the `ftfy` module to clean up the text before encoding it. I didn't include that module by default because currently the only version available in JavaScript is [this one](https://github.com/josephrocca/ftfy-pyodide), which requires importing a full Python runtime as a WebAssembly module. If you want the `ftfy` cleaning, just import it and clean your text with it before passing it to the `.encode()` method.
The
[Python version](https://github.com/openai/CLIP/blob/3bee28119e6b28e75b82b811b87b56935314e6a5/clip/simple_tokenizer.py)
of this tokenizer uses the `ftfy` module to clean up the text before encoding
it. I didn't include that module by default because currently the only version
available in JavaScript is
[this one](https://github.com/josephrocca/ftfy-pyodide), which requires
importing a full Python runtime as a WebAssembly module. If you want the `ftfy`
cleaning, just import it and clean your text with it before passing it to the
`.encode()` method.
# License
To the extent that there is any original work in this repo, it is MIT Licensed, just like [openai/CLIP](https://github.com/openai/CLIP).
To the extent that there is any original work in this repo, it is MIT Licensed,
just like [openai/CLIP](https://github.com/openai/CLIP).

View file

@ -8,7 +8,12 @@ import { getHideDockIconPreference } from "../services/userPreference";
import { isDev } from "./common";
import { isPlatform } from "./common/platform";
export async function createWindow(): Promise<BrowserWindow> {
/**
* Create an return the {@link BrowserWindow} that will form our app's UI.
*
* This window will show the HTML served from {@link rendererURL}.
*/
export const createWindow = async () => {
const appImgPath = isDev
? "resources/window-icon.png"
: path.join(process.resourcesPath, "window-icon.png");
@ -16,9 +21,7 @@ export async function createWindow(): Promise<BrowserWindow> {
// Create the browser window.
const mainWindow = new BrowserWindow({
webPreferences: {
sandbox: false,
preload: path.join(__dirname, "../preload.js"),
contextIsolation: false,
},
icon: appIcon,
show: false, // don't show the main window on load,
@ -49,16 +52,6 @@ export async function createWindow(): Promise<BrowserWindow> {
);
mainWindow.loadURL(rendererURL);
}
mainWindow.webContents.on("did-fail-load", () => {
splash.close();
isDev
? mainWindow.loadFile(`../resources/error.html`)
: splash.loadURL(
`file://${path.join(process.resourcesPath, "error.html")}`,
);
mainWindow.maximize();
mainWindow.show();
});
mainWindow.once("ready-to-show", async () => {
try {
splash.destroy();
@ -114,4 +107,4 @@ export async function createWindow(): Promise<BrowserWindow> {
}
});
return mainWindow;
}
};

View file

@ -1,17 +0,0 @@
import { CustomErrors } from "../constants/errors";
export const isExecError = (err: any) => {
return err.message.includes("Command failed:");
};
export const parseExecError = (err: any) => {
const errMessage = err.message;
if (errMessage.includes("Bad CPU type in executable")) {
return CustomErrors.UNSUPPORTED_PLATFORM(
process.platform,
process.arch,
);
} else {
return errMessage;
}
};

View file

@ -4,12 +4,12 @@ import {
BrowserWindow,
dialog,
ipcMain,
Notification,
safeStorage,
shell,
Tray,
} from "electron";
import path from "path";
import { clearElectronStore } from "../api/electronStore";
import {
getAppVersion,
muteUpdateNotification,
@ -27,12 +27,6 @@ import {
generateImageThumbnail,
} from "../services/imageProcessor";
import { logErrorSentry } from "../services/sentry";
import {
getCustomCacheDirectory,
setCustomCacheDirectory,
} from "../services/userPreference";
import { getPlatform } from "./common/platform";
import { createWindow } from "./createWindow";
import { generateTempFilePath } from "./temp";
export default function setupIpcComs(
@ -49,19 +43,6 @@ export default function setupIpcComs(
}
});
ipcMain.on("send-notification", (_, args) => {
const notification = {
title: "ente",
body: args,
};
new Notification(notification).show();
});
ipcMain.on("reload-window", async () => {
const secondWindow = await createWindow();
mainWindow.destroy();
mainWindow = secondWindow;
});
ipcMain.handle("show-upload-files-dialog", async () => {
const files = await dialog.showOpenDialog({
properties: ["openFile", "multiSelections"],
@ -110,6 +91,10 @@ export default function setupIpcComs(
return safeStorage.decryptString(message);
});
ipcMain.on("clear-electron-store", () => {
clearElectronStore();
});
ipcMain.handle("get-path", (_, message) => {
// By default, these paths are at the following locations:
//
@ -180,15 +165,4 @@ export default function setupIpcComs(
ipcMain.handle("compute-text-embedding", (_, model, text) => {
return computeTextEmbedding(model, text);
});
ipcMain.handle("get-platform", () => {
return getPlatform();
});
ipcMain.handle("set-custom-cache-directory", (_, directory: string) => {
setCustomCacheDirectory(directory);
});
ipcMain.handle("get-custom-cache-directory", async () => {
return getCustomCacheDirectory();
});
}

View file

@ -9,16 +9,3 @@ export function setupLogging(isDev?: boolean) {
log.transports.file.format =
"[{y}-{m}-{d}T{h}:{i}:{s}{z}] [{level}]{scope} {text}";
}
export function convertBytesToHumanReadable(
bytes: number,
precision = 2,
): string {
if (bytes === 0 || isNaN(bytes)) {
return "0 MB";
}
const i = Math.floor(Math.log(bytes) / Math.log(1024));
const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i];
}

View file

@ -1,295 +0,0 @@
import ElectronLog from "electron-log";
import { webFrame } from "electron/renderer";
import { convertBytesToHumanReadable } from "./logging";
const LOGGING_INTERVAL_IN_MICROSECONDS = 30 * 1000; // 30 seconds
const SPIKE_DETECTION_INTERVAL_IN_MICROSECONDS = 1 * 1000; // 1 seconds
const MAIN_MEMORY_USAGE_DIFF_IN_KILOBYTES_CONSIDERED_AS_SPIKE = 50 * 1024; // 50 MB
const HIGH_MAIN_MEMORY_USAGE_THRESHOLD_IN_KILOBYTES = 200 * 1024; // 200 MB
const RENDERER_MEMORY_USAGE_DIFF_IN_KILOBYTES_CONSIDERED_AS_SPIKE = 200 * 1024; // 200 MB
const HIGH_RENDERER_MEMORY_USAGE_THRESHOLD_IN_KILOBYTES = 1024 * 1024; // 1 GB
async function logMainProcessStats() {
const processMemoryInfo = await getNormalizedProcessMemoryInfo(
await process.getProcessMemoryInfo(),
);
const cpuUsage = process.getCPUUsage();
const heapStatistics = getNormalizedHeapStatistics(
process.getHeapStatistics(),
);
ElectronLog.log("main process stats", {
processMemoryInfo,
heapStatistics,
cpuUsage,
});
}
let previousMainProcessMemoryInfo: Electron.ProcessMemoryInfo = {
private: 0,
shared: 0,
residentSet: 0,
};
let mainProcessUsingHighMemory = false;
async function logSpikeMainMemoryUsage() {
const processMemoryInfo = await process.getProcessMemoryInfo();
const currentMemoryUsage = Math.max(
processMemoryInfo.residentSet ?? 0,
processMemoryInfo.private,
);
const previousMemoryUsage = Math.max(
previousMainProcessMemoryInfo.residentSet ?? 0,
previousMainProcessMemoryInfo.private,
);
const isSpiking =
currentMemoryUsage - previousMemoryUsage >=
MAIN_MEMORY_USAGE_DIFF_IN_KILOBYTES_CONSIDERED_AS_SPIKE;
const isHighMemoryUsage =
currentMemoryUsage >= HIGH_MAIN_MEMORY_USAGE_THRESHOLD_IN_KILOBYTES;
const shouldReport =
(isHighMemoryUsage && !mainProcessUsingHighMemory) ||
(!isHighMemoryUsage && mainProcessUsingHighMemory);
if (isSpiking || shouldReport) {
const normalizedCurrentProcessMemoryInfo =
await getNormalizedProcessMemoryInfo(processMemoryInfo);
const normalizedPreviousProcessMemoryInfo =
await getNormalizedProcessMemoryInfo(previousMainProcessMemoryInfo);
const cpuUsage = process.getCPUUsage();
const heapStatistics = getNormalizedHeapStatistics(
process.getHeapStatistics(),
);
ElectronLog.log("reporting main memory usage spike", {
currentProcessMemoryInfo: normalizedCurrentProcessMemoryInfo,
previousProcessMemoryInfo: normalizedPreviousProcessMemoryInfo,
heapStatistics,
cpuUsage,
});
}
previousMainProcessMemoryInfo = processMemoryInfo;
if (shouldReport) {
mainProcessUsingHighMemory = !mainProcessUsingHighMemory;
}
}
let previousRendererProcessMemoryInfo: Electron.ProcessMemoryInfo = {
private: 0,
shared: 0,
residentSet: 0,
};
let rendererUsingHighMemory = false;
async function logSpikeRendererMemoryUsage() {
const processMemoryInfo = await process.getProcessMemoryInfo();
const currentMemoryUsage = Math.max(
processMemoryInfo.residentSet ?? 0,
processMemoryInfo.private,
);
const previousMemoryUsage = Math.max(
previousRendererProcessMemoryInfo.private,
previousRendererProcessMemoryInfo.residentSet ?? 0,
);
const isSpiking =
currentMemoryUsage - previousMemoryUsage >=
RENDERER_MEMORY_USAGE_DIFF_IN_KILOBYTES_CONSIDERED_AS_SPIKE;
const isHighMemoryUsage =
currentMemoryUsage >= HIGH_RENDERER_MEMORY_USAGE_THRESHOLD_IN_KILOBYTES;
const shouldReport =
(isHighMemoryUsage && !rendererUsingHighMemory) ||
(!isHighMemoryUsage && rendererUsingHighMemory);
if (isSpiking || shouldReport) {
const normalizedCurrentProcessMemoryInfo =
await getNormalizedProcessMemoryInfo(processMemoryInfo);
const normalizedPreviousProcessMemoryInfo =
await getNormalizedProcessMemoryInfo(
previousRendererProcessMemoryInfo,
);
const cpuUsage = process.getCPUUsage();
const heapStatistics = getNormalizedHeapStatistics(
process.getHeapStatistics(),
);
ElectronLog.log("reporting renderer memory usage spike", {
currentProcessMemoryInfo: normalizedCurrentProcessMemoryInfo,
previousProcessMemoryInfo: normalizedPreviousProcessMemoryInfo,
heapStatistics,
cpuUsage,
});
}
previousRendererProcessMemoryInfo = processMemoryInfo;
if (shouldReport) {
rendererUsingHighMemory = !rendererUsingHighMemory;
}
}
async function logRendererProcessStats() {
const blinkMemoryInfo = getNormalizedBlinkMemoryInfo();
const heapStatistics = getNormalizedHeapStatistics(
process.getHeapStatistics(),
);
const webFrameResourceUsage = getNormalizedWebFrameResourceUsage();
const processMemoryInfo = await getNormalizedProcessMemoryInfo(
await process.getProcessMemoryInfo(),
);
ElectronLog.log("renderer process stats", {
blinkMemoryInfo,
heapStatistics,
processMemoryInfo,
webFrameResourceUsage,
});
}
export function setupMainProcessStatsLogger() {
setInterval(
logSpikeMainMemoryUsage,
SPIKE_DETECTION_INTERVAL_IN_MICROSECONDS,
);
setInterval(logMainProcessStats, LOGGING_INTERVAL_IN_MICROSECONDS);
}
export function setupRendererProcessStatsLogger() {
setInterval(
logSpikeRendererMemoryUsage,
SPIKE_DETECTION_INTERVAL_IN_MICROSECONDS,
);
setInterval(logRendererProcessStats, LOGGING_INTERVAL_IN_MICROSECONDS);
}
export async function logRendererProcessMemoryUsage(message: string) {
const processMemoryInfo = await process.getProcessMemoryInfo();
const processMemory = Math.max(
processMemoryInfo.private,
processMemoryInfo.residentSet ?? 0,
);
ElectronLog.log(
"renderer ProcessMemory",
message,
convertBytesToHumanReadable(processMemory * 1024),
);
}
const getNormalizedProcessMemoryInfo = async (
processMemoryInfo: Electron.ProcessMemoryInfo,
) => {
return {
residentSet: convertBytesToHumanReadable(
processMemoryInfo.residentSet * 1024,
),
private: convertBytesToHumanReadable(processMemoryInfo.private * 1024),
shared: convertBytesToHumanReadable(processMemoryInfo.shared * 1024),
};
};
const getNormalizedBlinkMemoryInfo = () => {
const blinkMemoryInfo = process.getBlinkMemoryInfo();
return {
allocated: convertBytesToHumanReadable(
blinkMemoryInfo.allocated * 1024,
),
total: convertBytesToHumanReadable(blinkMemoryInfo.total * 1024),
};
};
const getNormalizedHeapStatistics = (
heapStatistics: Electron.HeapStatistics,
) => {
return {
totalHeapSize: convertBytesToHumanReadable(
heapStatistics.totalHeapSize * 1024,
),
totalHeapSizeExecutable: convertBytesToHumanReadable(
heapStatistics.totalHeapSizeExecutable * 1024,
),
totalPhysicalSize: convertBytesToHumanReadable(
heapStatistics.totalPhysicalSize * 1024,
),
totalAvailableSize: convertBytesToHumanReadable(
heapStatistics.totalAvailableSize * 1024,
),
usedHeapSize: convertBytesToHumanReadable(
heapStatistics.usedHeapSize * 1024,
),
heapSizeLimit: convertBytesToHumanReadable(
heapStatistics.heapSizeLimit * 1024,
),
mallocedMemory: convertBytesToHumanReadable(
heapStatistics.mallocedMemory * 1024,
),
peakMallocedMemory: convertBytesToHumanReadable(
heapStatistics.peakMallocedMemory * 1024,
),
doesZapGarbage: heapStatistics.doesZapGarbage,
};
};
const getNormalizedWebFrameResourceUsage = () => {
const webFrameResourceUsage = webFrame.getResourceUsage();
return {
images: {
count: webFrameResourceUsage.images.count,
size: convertBytesToHumanReadable(
webFrameResourceUsage.images.size,
),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.images.liveSize,
),
},
scripts: {
count: webFrameResourceUsage.scripts.count,
size: convertBytesToHumanReadable(
webFrameResourceUsage.scripts.size,
),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.scripts.liveSize,
),
},
cssStyleSheets: {
count: webFrameResourceUsage.cssStyleSheets.count,
size: convertBytesToHumanReadable(
webFrameResourceUsage.cssStyleSheets.size,
),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.cssStyleSheets.liveSize,
),
},
xslStyleSheets: {
count: webFrameResourceUsage.xslStyleSheets.count,
size: convertBytesToHumanReadable(
webFrameResourceUsage.xslStyleSheets.size,
),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.xslStyleSheets.liveSize,
),
},
fonts: {
count: webFrameResourceUsage.fonts.count,
size: convertBytesToHumanReadable(webFrameResourceUsage.fonts.size),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.fonts.liveSize,
),
},
other: {
count: webFrameResourceUsage.other.count,
size: convertBytesToHumanReadable(webFrameResourceUsage.other.size),
liveSize: convertBytesToHumanReadable(
webFrameResourceUsage.other.liveSize,
),
},
};
};

View file

@ -1,17 +1,82 @@
{
/* TSConfig for a set of vanilla TypeScript files that need to be transpiled
into JavaScript that'll then be loaded and run by the main (node) process
of our Electron app. */
/* TSConfig docs: https://aka.ms/tsconfig.json */
"compilerOptions": {
"target": "es2021",
"module": "commonjs",
/* Recommended target, lib and other settings for code running in the
version of Node.js bundled with Electron.
Currently, with Electron 25, this is Node.js 18
https://www.electronjs.org/blog/electron-25-0
Note that we cannot do
"extends": "@tsconfig/node18/tsconfig.json",
because that sets "lib": ["es2023"]. However (and I don't fully
understand what's going on here), that breaks our compilation since
tsc can then not find type definitions of things like ReadableStream.
Adding "dom" to "lib" (e.g. `"lib": ["es2023", "dom"]`) fixes the
issue, but that doesn't sound correct - the main Electron process
isn't running in a browser context.
It is possible that we're using some of the types incorrectly. For
now, we just omit the "lib" definition and rely on the defaults for
the "target" we've chosen. This is also what the current
electron-forge starter does:
yarn create electron-app electron-forge-starter -- --template=webpack-typescript
Enhancement: Can revisit this later.
Refs:
- https://github.com/electron/electron/issues/27092
- https://github.com/electron/electron/issues/16146
*/
"target": "es2022",
"module": "node16",
/* Enable various workarounds to play better with CJS libraries */
"esModuleInterop": true,
/* Emit the generated JS into app */
/* Speed things up by not type checking `node_modules` */
"skipLibCheck": true,
/* Emit the generated JS into `app/` */
"outDir": "app",
"noImplicitAny": true,
/* Generate source maps */
"sourceMap": true,
/* Allow absolute imports starting with src as root */
"baseUrl": "src",
/* Allow imports of paths from node_modules */
"paths": {
"*": ["node_modules/*"]
}
},
/* Transpile all ts files in src/ */
/* Temporary overrides to get things to compile with the older config */
"strict": false,
"noImplicitAny": true
/* Below is the state we want */
/* Enable these one by one */
// "strict": true,
/* Require the `type` modifier when importing types */
// "verbatimModuleSyntax": true
/* Stricter than strict */
// "noImplicitReturns": true,
// "noUnusedParameters": true,
// "noUnusedLocals": true,
// "noFallthroughCasesInSwitch": true,
/* e.g. makes array indexing returns undefined */
// "noUncheckedIndexedAccess": true,
// "exactOptionalPropertyTypes": true,
},
/* Transpile all `.ts` files in `src/` */
"include": ["src/**/*.ts"]
}

File diff suppressed because it is too large Load diff

View file

@ -95,10 +95,19 @@ sudo journalctl --follow --unit example
## Logging
Services should log to files in `/var/logs` within the container. This should be
mounted to `/root/var/logs` on the instance (using the `-v` flag in the service
file which launches the Docker container or the Docker compose cluster).
Simple services can log to their standard output: these are captured by Docker,
and by default promtail is setup to injest Docker logs and send them to Grafana.
If these logs need to be sent to Grafana, then ensure that there is an entry for
this log file in the `promtail/promtail.yaml` on that instance. The logs will
then get scraped by Promtail and sent over to Grafana.
One issue with the above simple setup is that we cannot attach job names.
If the service needs to to attach a specific job name, or if the service wants
more control over the log retention etc, then then services can log to to its
own files.
* Such files should be in `/var/logs` within the container, and this should be
mounted to `/root/var/logs` on the instance (using the `-v` flag in the
service file which launches the Docker container or the Docker compose cluster).
* There should be entry for this log file in the `promtail/promtail.yaml` on
that instance. The logs will then get scraped by Promtail and sent over to
Grafana.

View file

@ -3,16 +3,6 @@
*
* This worker receives webhooks from GitHub, filters out the ones we don't
* need, and forwards them to a Discord webhook.
*
* [Note: GitHub specific Discord Webhooks]
*
* By appending `/github` to the end of the webhook URL, we can get Discord to
* automatically parse the payload sent by GitHub.
* https://discord.com/developers/docs/resources/webhook#execute-githubcompatible-webhook
*
* Note that this doesn't work for all events. And sadly, the events it doesn't
* work for get silently ignored (Discord responds with a 204).
* https://github.com/discord/discord-api-docs/issues/6203#issuecomment-1608151265
*/
export default {
async fetch(request: Request, env: Env) {
@ -24,20 +14,78 @@ interface Env {
DISCORD_WEBHOOK_URL: string;
}
const handleRequest = async (request: Request, targetURL: string) => {
const handleRequest = async (request: Request, discordWebhookURL: string) => {
const requestBody = await request.text();
let sender = JSON.parse(requestBody)["sender"]["login"];
const requestJSON = JSON.parse(requestBody);
const sender = requestJSON["sender"]["login"];
if (sender === "cloudflare-pages[bot]" || sender === "CLAassistant") {
// Ignore pings from CF bot
return new Response(null, { status: 200 });
}
const response = await fetch(targetURL, {
// [Note: GitHub specific Discord Webhooks]
//
// By appending `/github` to the end of the webhook URL, we can get Discord
// to automatically parse the payload sent by GitHub.
// https://discord.com/developers/docs/resources/webhook#execute-githubcompatible-webhook
//
// Note that this doesn't work for all events. And sadly, the events it
// doesn't work for get silently ignored (Discord responds with a 204).
// https://github.com/discord/discord-api-docs/issues/6203#issuecomment-1608151265
let response = await fetch(`${discordWebhookURL}/github`, {
method: request.method,
headers: request.headers,
body: requestBody,
});
if (response.status === 429) {
// Sometimes Discord starts returning 429 Rate Limited responses when we
// try to invoke the webhook.
//
// Retry-After: 300
// X-Ratelimit-Global: true
// X-Ratelimit-Scope: global
//
// {"message": "You are being rate limited.", "retry_after": 0.3, "global": true}
//
// This just seems to be a bug on their end, and it goes away on its own
// after a while. My best guess is that the IP of the Cloudflare Worker
// somehow gets rate limited because of someone else trying to spam from
// a worker running on the same IP. But it's a guess. I'm not sure.
//
// Ref:
// https://discord.com/developers/docs/topics/rate-limits#global-rate-limit
//
// Interestingly, this only happens for the `/github` specific webhook.
// The normal webhook still works. So as a workaround, just send a
// normal text message to the webhook when we get a 429.
// The JSON sent by GitHub has a varied schema. This is a stop-gap
// arrangement (we shouldn't be getting 429s forever), so just try to
// see if we can extract a URL from something we recognize.
let activityURL: string | undefined;
if (requestJSON["issue"]) {
activityURL = requestJSON["issue"]["html_url"];
}
if (!activityURL && requestJSON["discussion"]) {
activityURL = requestJSON["discussion"]["html_url"];
}
// Ignore things like issue label changes.
const action = requestJSON["action"];
if (activityURL && ["created", "opened"].includes(action)) {
response = await fetch(discordWebhookURL, {
method: request.method,
headers: request.headers,
body: JSON.stringify({
content: `Activity in ${activityURL}`,
}),
});
}
}
const responseBody = await response.text();
const newResponse = new Response(responseBody, {
status: response.status,

View file

@ -4,4 +4,4 @@ compatibility_date = "2024-03-14"
[vars]
# Added as a secret via the Cloudflare dashboard
# DISCORD_WEBHOOK_URL = "https://discord.com/api/webhooks/{webhook.id}/{webhook.token}/github"
# DISCORD_WEBHOOK_URL = "https://discord.com/api/webhooks/{webhook.id}/{webhook.token}"

View file

@ -16,7 +16,11 @@ import 'package:photos/services/user_service.dart';
import 'package:photos/ui/common/web_page.dart';
import 'package:photos/utils/dialog_util.dart';
const kWebPaymentRedirectUrl = "https://payments.ente.io/frameRedirect";
const kWebPaymentRedirectUrl = String.fromEnvironment(
"web-payment-redirect",
defaultValue: "https://payments.ente.io/frameRedirect",
);
const kWebPaymentBaseEndpoint = String.fromEnvironment(
"web-payment",
defaultValue: "https://payments.ente.io",

View file

@ -331,6 +331,7 @@ class _StripeSubscriptionPageState extends State<StripeSubscriptionPage> {
await _dialog.show();
try {
final String url = await _billingService.getStripeCustomerPortalUrl();
await _dialog.hide();
await Navigator.of(context).push(
MaterialPageRoute(
builder: (BuildContext context) {
@ -342,7 +343,6 @@ class _StripeSubscriptionPageState extends State<StripeSubscriptionPage> {
await _dialog.hide();
await showGenericErrorDialog(context: context, error: e);
}
await _dialog.hide();
}
Widget _stripeRenewOrCancelButton() {

View file

@ -300,6 +300,11 @@ func (c *StripeController) handleCustomerSubscriptionDeleted(event stripe.Event,
return ente.StripeEventLog{}, stacktrace.Propagate(err, "")
}
err = c.BillingRepo.UpdateSubscriptionCancellationStatus(userID, true)
if err != nil {
return ente.StripeEventLog{}, stacktrace.Propagate(err, "")
}
skipMail := stripeSubscription.Metadata[SkipMailKey]
// Send a cancellation notification email for folks who are either on
// individual plan or admin of a family plan.

View file

@ -16,7 +16,6 @@ import isElectron from "is-electron";
import { AppContext } from "pages/_app";
import { ClipExtractionStatus, ClipService } from "services/clipService";
import { formatNumber } from "utils/number/format";
import CacheDirectory from "./Preferences/CacheDirectory";
export default function AdvancedSettings({ open, onClose, onRootClose }) {
const appContext = useContext(AppContext);
@ -77,8 +76,6 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) {
<Box px={"8px"}>
<Stack py="20px" spacing="24px">
{isElectron() && (
<>
<CacheDirectory />
<Box>
<MenuSectionTitle
title={t("LABS")}
@ -92,7 +89,6 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) {
/>
</MenuItemGroup>
</Box>
</>
)}
<Box>
<MenuItemGroup>

View file

@ -1,60 +0,0 @@
import ElectronAPIs from "@ente/shared/electron";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import Box from "@mui/material/Box";
import { DirectoryPath } from "components/Directory";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { MenuItemGroup } from "components/Menu/MenuItemGroup";
import MenuSectionTitle from "components/Menu/MenuSectionTitle";
import { t } from "i18next";
import isElectron from "is-electron";
import { useEffect, useState } from "react";
import DownloadManager from "services/download";
export default function CacheDirectory() {
const [cacheDirectory, setCacheDirectory] = useState(undefined);
useEffect(() => {
const main = async () => {
if (isElectron()) {
const customCacheDirectory =
await ElectronAPIs.getCacheDirectory();
setCacheDirectory(customCacheDirectory);
}
};
main();
}, []);
const handleCacheDirectoryChange = async () => {
try {
if (!isElectron()) {
return;
}
const newFolder = await ElectronAPIs.selectDirectory();
if (!newFolder) {
return;
}
addLogLine(`Export folder changed to ${newFolder}`);
await ElectronAPIs.setCustomCacheDirectory(newFolder);
setCacheDirectory(newFolder);
await DownloadManager.reloadCaches();
} catch (e) {
logError(e, "handleCacheDirectoryChange failed");
}
};
return (
<Box>
<MenuSectionTitle title={t("CACHE_DIRECTORY")} />
<MenuItemGroup>
<EnteMenuItem
variant="path"
onClick={handleCacheDirectoryChange}
labelComponent={
<DirectoryPath width={265} path={cacheDirectory} />
}
/>
</MenuItemGroup>
</Box>
);
}

View file

@ -130,11 +130,6 @@ class DownloadManagerImpl {
this.progressUpdater = progressUpdater;
}
async reloadCaches() {
this.thumbnailCache = await openThumbnailCache();
this.diskFileCache = isElectron() && (await openDiskFileCache());
}
private async getCachedThumbnail(fileID: number) {
try {
const cacheResp: Response = await this.thumbnailCache?.match(

View file

@ -1,18 +1,25 @@
# Dependencies
## Global
## DX
These are some global dev dependencies in the root `package.json`. These set the
baseline for how our code be in all the workspaces in the monorepo.
baseline for how our code be in all the workspaces in this (yarn) monorepo.
* "prettier" - Formatter
* "eslint" - Linter
* "typescript" - Type checker
They also need some support packages:
They also need some support packages, which come from the leaf `@/build-config`
package:
* "@typescript-eslint/parser" - Tells ESLint how to read TypeScript syntax
* "@typescript-eslint/eslint-plugin" - Provides TypeScript rules and presets
* "eslint-plugin-react-hooks", "eslint-plugin-react-namespace-import" - Some
React specific ESLint rules and configurations that are used by the workspaces
that have React code.
* "prettier-plugin-organize-imports" - A Prettier plugin to sort imports.
* "prettier-plugin-packagejson" - A Prettier plugin to also prettify
`package.json`.
## Utils

View file

@ -10,8 +10,5 @@
"eslint-config-next": "latest",
"eslint-config-prettier": "latest",
"eslint-plugin-react": "latest"
},
"standard": {
"parser": "babel-eslint"
}
}

View file

@ -21,7 +21,6 @@ export interface ElectronAPIsType {
) => Promise<void>;
saveFileToDisk: (path: string, file: any) => Promise<void>;
selectDirectory: () => Promise<string>;
sendNotification: (content: string) => void;
readTextFile: (path: string) => Promise<string>;
showUploadFilesDialog: () => Promise<ElectronFile[]>;
showUploadDirsDialog: () => Promise<ElectronFile[]>;
@ -93,7 +92,6 @@ export interface ElectronAPIsType {
maxDimension: number,
maxSize: number,
) => Promise<Uint8Array>;
logRendererProcessMemoryUsage: (message: string) => Promise<void>;
registerForegroundEventListener: (onForeground: () => void) => void;
openDirectory: (dirPath: string) => Promise<void>;
moveFile: (oldPath: string, newPath: string) => Promise<void>;
@ -105,7 +103,4 @@ export interface ElectronAPIsType {
imageData: Uint8Array,
) => Promise<Float32Array>;
computeTextEmbedding: (model: Model, text: string) => Promise<Float32Array>;
getPlatform: () => Promise<"mac" | "windows" | "linux">;
setCustomCacheDirectory: (directory: string) => Promise<void>;
getCacheDirectory: () => Promise<string>;
}