[web] The great electron renaming (#1390)

- Expose on the globalThis
- Rename the deprecated loggers too
This commit is contained in:
Manav Rathi 2024-04-09 13:23:06 +05:30 committed by GitHub
commit 9dc4a17593
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
138 changed files with 1195 additions and 1441 deletions

View file

@ -60,11 +60,11 @@ const logInfo = (...params: any[]) => {
.map((p) => (typeof p == "string" ? p : util.inspect(p)))
.join(" ");
log.info(`[main] ${message}`);
if (isDev) console.log(message);
if (isDev) console.log(`[info] ${message}`);
};
const logDebug = (param: () => any) => {
if (isDev) console.log(`[main] [debug] ${util.inspect(param())}`);
if (isDev) console.log(`[debug] ${util.inspect(param())}`);
};
/**

View file

@ -307,7 +307,7 @@ const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
//
// The copy itself is relatively fast, but the problem with transfering large
// amounts of data is potentially running out of memory during the copy.
contextBridge.exposeInMainWorld("ElectronAPIs", {
contextBridge.exposeInMainWorld("electron", {
// - General
appVersion,
openDirectory,

View file

@ -1,8 +1,8 @@
import log from "@/next/log";
import { VerticallyCentered } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { ACCOUNTS_PAGES } from "@ente/shared/constants/pages";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { useRouter } from "next/router";
import { useEffect } from "react";
@ -16,7 +16,7 @@ const AccountHandoff = () => {
router.push(ACCOUNTS_PAGES.PASSKEYS);
} catch (e) {
logError(e, "Failed to deserialize and set passed user data");
log.error("Failed to deserialize and set passed user data", e);
router.push(ACCOUNTS_PAGES.LOGIN);
}
};

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import { APPS, CLIENT_PACKAGE_NAMES } from "@ente/shared/apps/constants";
import {
CenteredFlex,
@ -7,7 +8,6 @@ import EnteButton from "@ente/shared/components/EnteButton";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import FormPaper from "@ente/shared/components/Form/FormPaper";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import InfoIcon from "@mui/icons-material/Info";
import { Box, Typography } from "@mui/material";
@ -73,7 +73,7 @@ const PasskeysFlow = () => {
try {
beginData = await beginAuthentication(passkeySessionID);
} catch (e) {
logError(e, "Couldn't begin passkey authentication");
log.error("Couldn't begin passkey authentication", e);
setErrored(true);
return;
} finally {
@ -89,7 +89,7 @@ const PasskeysFlow = () => {
try {
credential = await getCredential(beginData.options.publicKey);
} catch (e) {
logError(e, "Couldn't get credential");
log.error("Couldn't get credential", e);
continue;
} finally {
tries++;
@ -117,7 +117,7 @@ const PasskeysFlow = () => {
beginData.ceremonySessionID,
);
} catch (e) {
logError(e, "Couldn't finish passkey authentication");
log.error("Couldn't finish passkey authentication", e);
setErrored(true);
setLoading(false);
return;

View file

@ -1,8 +1,8 @@
import log from "@/next/log";
import { CenteredFlex } from "@ente/shared/components/Container";
import FormPaper from "@ente/shared/components/Form/FormPaper";
import SingleInputForm from "@ente/shared/components/SingleInputForm";
import { ACCOUNTS_PAGES } from "@ente/shared/constants/pages";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { Box, Typography } from "@mui/material";
import { t } from "i18next";
@ -104,7 +104,7 @@ const Passkeys = () => {
try {
newCredential = await navigator.credentials.create(options);
} catch (e) {
logError(e, "Error creating credential");
log.error("Error creating credential", e);
setFieldError("Failed to create credential");
return;
}

View file

@ -1,8 +1,9 @@
import log from "@/next/log";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import _sodium from "libsodium-wrappers";
const ENDPOINT = getEndpoint();
export const getPasskeys = async () => {
@ -16,7 +17,7 @@ export const getPasskeys = async () => {
);
return await response.data;
} catch (e) {
logError(e, "get passkeys failed");
log.error("get passkeys failed", e);
throw e;
}
};
@ -33,7 +34,7 @@ export const renamePasskey = async (id: string, name: string) => {
);
return await response.data;
} catch (e) {
logError(e, "rename passkey failed");
log.error("rename passkey failed", e);
throw e;
}
};
@ -50,7 +51,7 @@ export const deletePasskey = async (id: string) => {
);
return await response.data;
} catch (e) {
logError(e, "delete passkey failed");
log.error("delete passkey failed", e);
throw e;
}
};
@ -68,7 +69,7 @@ export const getPasskeyRegistrationOptions = async () => {
);
return await response.data;
} catch (e) {
logError(e, "get passkey registration options failed");
log.error("get passkey registration options failed", e);
throw e;
}
};
@ -116,7 +117,7 @@ export const finishPasskeyRegistration = async (
);
return await response.data;
} catch (e) {
logError(e, "finish passkey registration failed");
log.error("finish passkey registration failed", e);
throw e;
}
};
@ -142,7 +143,7 @@ export const beginPasskeyAuthentication = async (
return data.data;
} catch (e) {
logError(e, "begin passkey authentication failed");
log.error("begin passkey authentication failed", e);
throw e;
}
};
@ -194,7 +195,7 @@ export const finishPasskeyAuthentication = async (
return data.data;
} catch (e) {
logError(e, "finish passkey authentication failed");
log.error("finish passkey authentication failed", e);
throw e;
}
};

View file

@ -1,8 +1,8 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { ApiError, CustomError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getActualKey } from "@ente/shared/user";
import { HttpStatusCode } from "axios";
@ -35,10 +35,7 @@ export const getAuthCodes = async (): Promise<Code[]> => {
);
return Code.fromRawData(entity.id, decryptedCode);
} catch (e) {
logError(
Error("failed to parse code"),
"codeId = " + entity.id,
);
log.error(`failed to parse codeId = ${entity.id}`);
return null;
}
}),
@ -62,7 +59,7 @@ export const getAuthCodes = async (): Promise<Code[]> => {
return filteredAuthCodes;
} catch (e) {
if (e.message !== CustomError.AUTH_KEY_NOT_FOUND) {
logError(e, "get authenticator entities failed");
log.error("get authenticator entities failed", e);
}
throw e;
}
@ -85,7 +82,7 @@ export const getAuthKey = async (): Promise<AuthKey> => {
) {
throw Error(CustomError.AUTH_KEY_NOT_FOUND);
} else {
logError(e, "Get key failed");
log.error("Get key failed", e);
throw e;
}
}
@ -109,7 +106,7 @@ export const getDiff = async (
);
return resp.data.diff;
} catch (e) {
logError(e, "Get diff failed");
log.error("Get diff failed", e);
throw e;
}
};

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay";
import Theatre from "components/Theatre";
import { FILE_TYPE } from "constants/file";
@ -54,7 +54,7 @@ export default function Slideshow() {
);
}
} catch (e) {
logError(e, "error during sync");
log.error("error during sync", e);
router.push("/");
}
};
@ -107,7 +107,7 @@ export default function Slideshow() {
return () => clearTimeout(timeoutId);
} catch (e) {
logError(e, "error during sync");
log.error("error during sync", e);
router.push("/");
}
}, []);

View file

@ -1,10 +1,9 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { Collection, CollectionPublicMagicMetadata } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
import { decryptFile, mergeMetadata, sortFiles } from "utils/file";
@ -150,14 +149,14 @@ export const syncPublicFiles = async (
setPublicFiles([...sortFiles(mergeMetadata(files), sortAsc)]);
} catch (e) {
const parsedError = parseSharingErrorCodes(e);
logError(e, "failed to sync shared collection files");
log.error("failed to sync shared collection files", e);
if (parsedError.message === CustomError.TOKEN_EXPIRED) {
throw e;
}
}
return [...sortFiles(mergeMetadata(files), sortAsc)];
} catch (e) {
logError(e, "failed to get local or sync shared collection files");
log.error("failed to get local or sync shared collection files", e);
throw e;
}
};
@ -217,7 +216,7 @@ const fetchFiles = async (
} while (resp.data.hasMore);
return decryptedFiles;
} catch (e) {
logError(e, "Get cast files failed");
log.error("Get cast files failed", e);
throw e;
}
};
@ -264,7 +263,7 @@ export const getCastCollection = async (
await saveCollection(collection);
return collection;
} catch (e) {
logError(e, "failed to get cast collection");
log.error("failed to get cast collection", e);
throw e;
}
};

View file

@ -1,13 +1,14 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
export async function getUint8ArrayView(file: Blob): Promise<Uint8Array> {
try {
return new Uint8Array(await file.arrayBuffer());
} catch (e) {
logError(e, "reading file blob failed", {
fileSize: convertBytesToHumanReadable(file.size),
});
log.error(
`Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
e,
);
throw e;
}
}

View file

@ -1,6 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { FILE_TYPE } from "constants/file";
import {
KNOWN_NON_MEDIA_FORMATS,
@ -42,7 +41,6 @@ export async function getFileType(receivedFile: File): Promise<FileTypeInfo> {
};
} catch (e) {
const fileFormat = getFileExtension(receivedFile.name);
const fileSize = convertBytesToHumanReadable(receivedFile.size);
const whiteListedFormat = WHITELISTED_FILE_FORMATS.find(
(a) => a.exactType === fileFormat,
);
@ -53,16 +51,10 @@ export async function getFileType(receivedFile: File): Promise<FileTypeInfo> {
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
}
if (e.message === CustomError.NON_MEDIA_FILE) {
logError(e, "unsupported file format", {
fileFormat,
fileSize,
});
log.error(`unsupported file format ${fileFormat}`, e);
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
}
logError(e, "type detection failed", {
fileFormat,
fileSize,
});
log.error(`type detection failed for format ${fileFormat}`, e);
throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat));
}
}

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { logError } from "@ente/shared/sentry";
import { FILE_TYPE, RAW_FORMATS } from "constants/file";
import CastDownloadManager from "services/castDownloadManager";
import { decodeLivePhoto } from "services/livePhotoService";
@ -80,7 +80,7 @@ export async function decryptFile(
pubMagicMetadata: filePubMagicMetadata,
};
} catch (e) {
logError(e, "file decryption failed");
log.error("file decryption failed", e);
throw e;
}
}
@ -160,6 +160,6 @@ export const getPreviewableImage = async (
fileBlob = new Blob([fileBlob], { type: fileType.mimeType });
return fileBlob;
} catch (e) {
logError(e, "failed to download file");
log.error("failed to download file", e);
}
};

View file

@ -1,14 +1,14 @@
import { useContext, useEffect, useState } from "react";
import log from "@/next/log";
import DialogBoxV2 from "@ente/shared/components/DialogBoxV2";
import VerifyMasterPasswordForm, {
VerifyMasterPasswordFormProps,
} from "@ente/shared/components/VerifyMasterPasswordForm";
import { logError } from "@ente/shared/sentry";
import { getData, LS_KEYS } from "@ente/shared/storage/localStorage";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { KeyAttributes, User } from "@ente/shared/user/types";
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react";
interface Iprops {
open: boolean;
onClose: () => void;
@ -51,7 +51,7 @@ export default function AuthenticateUserModal({
setKeyAttributes(keyAttributes);
}
} catch (e) {
logError(e, "AuthenticateUserModal initialization failed");
log.error("AuthenticateUserModal initialization failed", e);
onClose();
somethingWentWrong();
}

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import { HorizontalFlex } from "@ente/shared/components/Container";
import OverflowMenu from "@ente/shared/components/OverflowMenu/menu";
import { logError } from "@ente/shared/sentry";
import MoreHoriz from "@mui/icons-material/MoreHoriz";
import { Box } from "@mui/material";
import {
@ -161,22 +161,15 @@ const CollectionOptions = (props: CollectionOptionsProps) => {
case CollectionActions.SHOW_ALBUM_CAST_DIALOG:
callback = showCastAlbumDialog;
break;
default:
logError(
Error("invalid collection action "),
"handleCollectionAction failed",
);
{
action;
}
log.error(`invalid collection action ${action}`);
}
return async (...args: any) => {
try {
loader && startLoading();
await callback(...args);
} catch (e) {
logError(e, "collection action failed", { action });
log.error(`collection action ${action} failed`, e);
setDialogMessage({
title: t("ERROR"),
content: t("UNKNOWN_ERROR"),

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import BlockIcon from "@mui/icons-material/Block";
import DoneIcon from "@mui/icons-material/Done";
import ModeEditIcon from "@mui/icons-material/ModeEdit";
@ -63,8 +63,7 @@ export default function ManageParticipant({
selectedParticipant.role = newRole;
await galleryContext.syncWithRemote(false, true);
} catch (e) {
const errorMessage = handleSharingErrors(e);
logError(e, errorMessage);
log.error(handleSharingErrors(e), e);
}
};

View file

@ -1,8 +1,8 @@
import log from "@/next/log";
import { logoutUser } from "@ente/accounts/services/user";
import DialogBoxV2 from "@ente/shared/components/DialogBoxV2";
import EnteButton from "@ente/shared/components/EnteButton";
import { DELETE_ACCOUNT_EMAIL } from "@ente/shared/constants/urls";
import { logError } from "@ente/shared/sentry";
import { Button, Link, Stack } from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next";
@ -92,7 +92,7 @@ const DeleteAccountModal = ({ open, onClose }: Iprops) => {
askToMailForDeletion();
}
} catch (e) {
logError(e, "Error while initiating account deletion");
log.error("Error while initiating account deletion", e);
somethingWentWrong();
} finally {
setLoading(false);
@ -147,7 +147,7 @@ const DeleteAccountModal = ({ open, onClose }: Iprops) => {
await deleteAccount(decryptedChallenge, reason, feedback);
logoutUser();
} catch (e) {
logError(e, "solveChallengeAndDeleteAccount failed");
log.error("solveChallengeAndDeleteAccount failed", e);
somethingWentWrong();
} finally {
setLoading(false);

View file

@ -1,6 +1,6 @@
import ElectronAPIs from "@/next/electron";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import LinkButton from "@ente/shared/components/LinkButton";
import { logError } from "@ente/shared/sentry";
import { Tooltip } from "@mui/material";
import { styled } from "@mui/material/styles";
@ -19,9 +19,9 @@ const DirectoryPathContainer = styled(LinkButton)(
export const DirectoryPath = ({ width, path }) => {
const handleClick = async () => {
try {
await ElectronAPIs.openDirectory(path);
await ensureElectron().openDirectory(path);
} catch (e) {
logError(e, "openDirectory failed");
log.error("openDirectory failed", e);
}
};
return (

View file

@ -1,7 +1,5 @@
import ElectronAPIs from "@/next/electron";
import Notification from "components/Notification";
import { t } from "i18next";
import isElectron from "is-electron";
import { AppContext } from "pages/_app";
import { GalleryContext } from "pages/gallery";
import { useContext } from "react";
@ -101,8 +99,9 @@ export const FilesDownloadProgress: React.FC<FilesDownloadProgressProps> = ({
const handleOnClick = (id: number) => () => {
const attributes = attributesList.find((attr) => attr.id === id);
if (isElectron()) {
ElectronAPIs.openDirectory(attributes.downloadDirPath);
const electron = globalThis.electron;
if (electron) {
electron.openDirectory(attributes.downloadDirPath);
} else {
if (attributes.isHidden) {
galleryContext.openHiddenSection(() => {

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { Box, DialogProps, Typography } from "@mui/material";
import { EnteDrawer } from "components/EnteDrawer";
import { t } from "i18next";
@ -41,7 +41,7 @@ const MLSearchSettings = ({ open, onClose, onRootClose }) => {
updateMlSearchEnabled(true);
}
} catch (e) {
logError(e, "Enable ML search failed");
log.error("Enable ML search failed", e);
somethingWentWrong();
}
};
@ -54,7 +54,7 @@ const MLSearchSettings = ({ open, onClose, onRootClose }) => {
closeEnableFaceSearch();
finishLoading();
} catch (e) {
logError(e, "Enable face search failed");
log.error("Enable face search failed", e);
somethingWentWrong();
}
};
@ -64,7 +64,7 @@ const MLSearchSettings = ({ open, onClose, onRootClose }) => {
await updateMlSearchEnabled(false);
onClose();
} catch (e) {
logError(e, "Disable ML search failed");
log.error("Disable ML search failed", e);
somethingWentWrong();
}
};
@ -76,7 +76,7 @@ const MLSearchSettings = ({ open, onClose, onRootClose }) => {
await disableMlSearch();
finishLoading();
} catch (e) {
logError(e, "Disable face search failed");
log.error("Disable face search failed", e);
somethingWentWrong();
}
};

View file

@ -1,5 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { styled } from "@mui/material";
import { Legend } from "components/PhotoViewer/styledComponents/Legend";
@ -87,11 +86,11 @@ export function PhotoPeopleList(props: PhotoPeopleListProps) {
let didCancel = false;
async function updateFaceImages() {
addLogLine("calling getPeopleList");
log.info("calling getPeopleList");
const startTime = Date.now();
const people = await getPeopleList(props.file);
addLogLine("getPeopleList", Date.now() - startTime, "ms");
addLogLine("getPeopleList done, didCancel: ", didCancel);
log.info(`getPeopleList ${Date.now() - startTime} ms`);
log.info(`getPeopleList done, didCancel: ${didCancel}`);
!didCancel && setPeople(people);
}
@ -130,7 +129,7 @@ export function AllPeopleList(props: AllPeopleListProps) {
}
!didCancel && setPeople(people);
} catch (e) {
logError(e, "updateFaceImages failed");
log.error("updateFaceImages failed", e);
}
}
updateFaceImages();

View file

@ -1,8 +1,7 @@
import log from "@/next/log";
import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { styled } from "@mui/material";
import PhotoViewer from "components/PhotoViewer";
import { TRASH_SECTION } from "constants/collection";
@ -184,7 +183,7 @@ const PhotoFrame = ({
const file = displayFiles[index];
// this is to prevent outdated updateURL call from updating the wrong file
if (file.id !== id) {
addLogLine(
log.info(
`[${id}]PhotoSwipe: updateURL: file id mismatch: ${file.id} !== ${id}`,
);
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -204,7 +203,7 @@ const PhotoFrame = ({
const file = displayFiles[index];
// this is to prevent outdate updateSrcURL call from updating the wrong file
if (file.id !== id) {
addLogLine(
log.info(
`[${id}]PhotoSwipe: updateSrcURL: file id mismatch: ${file.id}`,
);
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -212,7 +211,7 @@ const PhotoFrame = ({
if (file.isSourceLoaded && !forceUpdate) {
throw Error(CustomError.URL_ALREADY_SET);
} else if (file.conversionFailed) {
addLogLine(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`);
log.info(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`);
throw Error(CustomError.FILE_CONVERSION_FAILED);
}
@ -308,7 +307,7 @@ const PhotoFrame = ({
index: number,
item: EnteFile,
) => {
addLogLine(
log.info(
`[${
item.id
}] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${
@ -319,17 +318,15 @@ const PhotoFrame = ({
if (!item.msrc) {
try {
if (thumbFetching[item.id]) {
addLogLine(
`[${item.id}] thumb download already in progress`,
);
log.info(`[${item.id}] thumb download already in progress`);
return;
}
addLogLine(`[${item.id}] doesn't have thumbnail`);
log.info(`[${item.id}] doesn't have thumbnail`);
thumbFetching[item.id] = true;
const url = await DownloadManager.getThumbnailForPreview(item);
try {
updateURL(index)(item.id, url);
addLogLine(
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -340,35 +337,35 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(
e,
log.error(
"updating photoswipe after msrc url update failed",
e,
);
}
// ignore
}
} catch (e) {
logError(e, "getSlideData failed get msrc url failed");
log.error("getSlideData failed get msrc url failed", e);
thumbFetching[item.id] = false;
}
}
if (item.isSourceLoaded || item.conversionFailed) {
if (item.isSourceLoaded) {
addLogLine(`[${item.id}] source already loaded`);
log.info(`[${item.id}] source already loaded`);
}
if (item.conversionFailed) {
addLogLine(`[${item.id}] conversion failed`);
log.info(`[${item.id}] conversion failed`);
}
return;
}
if (fetching[item.id]) {
addLogLine(`[${item.id}] file download already in progress`);
log.info(`[${item.id}] file download already in progress`);
return;
}
try {
addLogLine(`[${item.id}] new file src request`);
log.info(`[${item.id}] new file src request`);
fetching[item.id] = true;
const srcURLs = await DownloadManager.getFileForPreview(item);
if (item.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
@ -383,7 +380,7 @@ const PhotoFrame = ({
};
try {
await updateSrcURL(index, item.id, dummyImgSrcUrl);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -392,9 +389,9 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(
e,
log.error(
"updating photoswipe after for live photo imgSrc update failed",
e,
);
}
}
@ -417,7 +414,7 @@ const PhotoFrame = ({
loadedLivePhotoSrcURL,
true,
);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -426,16 +423,16 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(
e,
log.error(
"updating photoswipe for live photo complete update failed",
e,
);
}
}
} else {
try {
await updateSrcURL(index, item.id, srcURLs);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -444,15 +441,15 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(
e,
log.error(
"updating photoswipe after src url update failed",
e,
);
}
}
}
} catch (e) {
logError(e, "getSlideData failed get src url failed");
log.error("getSlideData failed get src url failed", e);
fetching[item.id] = false;
// no-op
}
@ -467,22 +464,18 @@ const PhotoFrame = ({
item.metadata.fileType !== FILE_TYPE.VIDEO &&
item.metadata.fileType !== FILE_TYPE.LIVE_PHOTO
) {
logError(
new Error(),
"getConvertedVideo called for non video file",
);
log.error("getConvertedVideo called for non video file");
return;
}
if (item.conversionFailed) {
logError(
new Error(),
log.error(
"getConvertedVideo called for file that conversion failed",
);
return;
}
try {
updateURL(index)(item.id, item.msrc, true);
addLogLine(
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -493,12 +486,15 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(e, "updating photoswipe after msrc url update failed");
log.error(
"updating photoswipe after msrc url update failed",
e,
);
}
// ignore
}
try {
addLogLine(
log.info(
`[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`,
);
fetching[item.id] = true;
@ -507,7 +503,7 @@ const PhotoFrame = ({
try {
await updateSrcURL(index, item.id, srcURL, true);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -516,15 +512,15 @@ const PhotoFrame = ({
}
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(
e,
log.error(
"updating photoswipe after src url update failed",
e,
);
}
throw e;
}
} catch (e) {
logError(e, "getConvertedVideo failed get src url failed");
log.error("getConvertedVideo failed get src url failed", e);
fetching[item.id] = false;
// no-op
}

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import { logError } from "@ente/shared/sentry";
import Close from "@mui/icons-material/Close";
import Done from "@mui/icons-material/Done";
import { Box, IconButton, TextField } from "@mui/material";
@ -48,7 +48,7 @@ export function RenderCaption({
scheduleUpdate();
}
} catch (e) {
logError(e, "failed to update caption");
log.error("failed to update caption", e);
}
};

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import { logError } from "@ente/shared/sentry";
import { formatDate, formatTime } from "@ente/shared/time/format";
import CalendarTodayIcon from "@mui/icons-material/CalendarToday";
import EnteDateTimePicker from "components/EnteDateTimePicker";
@ -44,7 +44,7 @@ export function RenderCreationTime({
scheduleUpdate();
}
} catch (e) {
logError(e, "failed to update creationTime");
log.error("failed to update creationTime", e);
} finally {
closeEditMode();
setLoading(false);

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import { logError } from "@ente/shared/sentry";
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
import VideocamOutlined from "@mui/icons-material/VideocamOutlined";
import Box from "@mui/material/Box";
@ -86,7 +86,7 @@ export function RenderFileName({
scheduleUpdate();
}
} catch (e) {
logError(e, "failed to update file name");
log.error("failed to update file name", e);
throw e;
}
};

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import Crop169Icon from "@mui/icons-material/Crop169";
import Crop32Icon from "@mui/icons-material/Crop32";
import CropSquareIcon from "@mui/icons-material/CropSquare";
@ -172,10 +172,13 @@ const TransformMenu = () => {
setCanvasLoading(false);
setTransformationPerformed(true);
} catch (e) {
logError(e, "crop handler failed", {
widthRatio,
heightRatio,
});
log.error(
`crop handler failed - ${JSON.stringify({
widthRatio,
heightRatio,
})}`,
e,
);
}
};
const createRotationHandler = (rotation: "left" | "right") => () => {
@ -189,9 +192,7 @@ const TransformMenu = () => {
setCanvasLoading(false);
setTransformationPerformed(true);
} catch (e) {
logError(e, "rotation handler failed", {
rotation,
});
log.error(`rotation handler (${rotation}) failed`, e);
}
};
@ -204,9 +205,7 @@ const TransformMenu = () => {
setCanvasLoading(false);
setTransformationPerformed(true);
} catch (e) {
logError(e, "flip handler failed", {
direction,
});
log.error(`flip handler ${direction} failed`, e);
}
};

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import {
Backdrop,
Box,
@ -23,7 +24,6 @@ import {
HorizontalFlex,
} from "@ente/shared/components/Container";
import EnteButton from "@ente/shared/components/EnteButton";
import { logError } from "@ente/shared/sentry";
import { downloadUsingAnchor } from "@ente/shared/utils";
import ChevronRightIcon from "@mui/icons-material/ChevronRight";
import CloseIcon from "@mui/icons-material/Close";
@ -275,7 +275,7 @@ const ImageEditorOverlay = (props: IProps) => {
invert !== FILTER_DEFAULT_VALUES.invert,
);
} catch (e) {
logError(e, "Error applying filters");
log.error("Error applying filters", e);
}
}, [brightness, contrast, blur, saturation, invert, canvasRef, fileURL]);
@ -329,7 +329,7 @@ const ImageEditorOverlay = (props: IProps) => {
});
}
} catch (e) {
logError(e, "Error applying filters");
log.error("Error applying filters", e);
throw e;
}
};
@ -422,7 +422,7 @@ const ImageEditorOverlay = (props: IProps) => {
};
});
} catch (e) {
logError(e, "Error loading canvas");
log.error("Error loading canvas", e);
}
};
@ -447,7 +447,7 @@ const ImageEditorOverlay = (props: IProps) => {
canvas.toBlob(resolve, mimeType);
});
} catch (e) {
logError(e, "Error exporting canvas to blob");
log.error("Error exporting canvas to blob", e);
throw e;
}
};
@ -492,7 +492,7 @@ const ImageEditorOverlay = (props: IProps) => {
);
downloadUsingAnchor(tempImgURL, editedFile.name);
} catch (e) {
logError(e, "Error downloading edited photo");
log.error("Error downloading edited photo", e);
}
};
@ -520,7 +520,7 @@ const ImageEditorOverlay = (props: IProps) => {
props.onClose();
props.closePhotoViewer();
} catch (e) {
logError(e, "Error saving copy to ente");
log.error("Error saving copy to ente", e);
}
};
return (

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import Photoswipe from "photoswipe";
import PhotoswipeUIDefault from "photoswipe/dist/photoswipe-ui-default";
import { useContext, useEffect, useMemo, useRef, useState } from "react";
@ -16,7 +16,6 @@ import {
isSupportedRawFormat,
} from "utils/file";
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
@ -496,7 +495,7 @@ function PhotoViewer(props: Iprops) {
}
needUpdate.current = true;
} catch (e) {
logError(e, "onFavClick failed");
log.error("onFavClick failed", e);
}
};
@ -511,7 +510,7 @@ function PhotoViewer(props: Iprops) {
updateItems(props.items.filter((item) => item.id !== file.id));
needUpdate.current = true;
} catch (e) {
logError(e, "trashFile failed");
log.error("trashFile failed", e);
}
};
@ -562,7 +561,7 @@ function PhotoViewer(props: Iprops) {
}
}
} catch (e) {
logError(e, "updateItems failed");
log.error("updateItems failed", e);
}
};
@ -573,7 +572,7 @@ function PhotoViewer(props: Iprops) {
photoSwipe.updateSize(true);
}
} catch (e) {
logError(e, "refreshPhotoswipe failed");
log.error("refreshPhotoswipe failed", e);
}
};
@ -613,9 +612,10 @@ function PhotoViewer(props: Iprops) {
} catch (e) {
setExif({ key: file.src, value: null });
const fileExtension = getFileExtension(file.metadata.title);
logError(e, "checkExifAvailable failed", {
extension: fileExtension,
});
log.error(
`checkExifAvailable failed for extension ${fileExtension}`,
e,
);
}
};

View file

@ -25,7 +25,7 @@ const Caption = styled("span")`
const MenuWithPeople = (props) => {
const appContext = useContext(AppContext);
// addLogLine("props.selectProps.options: ", selectRef);
// log.info("props.selectProps.options: ", selectRef);
const peopleSuggestions = props.selectProps.options.filter(
(o) => o.type === SuggestionType.PERSON,
);

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import ChevronRight from "@mui/icons-material/ChevronRight";
import ScienceIcon from "@mui/icons-material/Science";
import { Box, DialogProps, Stack, Typography } from "@mui/material";
@ -9,7 +10,6 @@ import { t } from "i18next";
import { useContext, useEffect, useState } from "react";
import { VerticallyCenteredFlex } from "@ente/shared/components/Container";
import { logError } from "@ente/shared/sentry";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { MenuItemGroup } from "components/Menu/MenuItemGroup";
import isElectron from "is-electron";
@ -41,7 +41,7 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) {
try {
appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled);
} catch (e) {
logError(e, "toggleFasterUpload failed");
log.error("toggleFasterUpload failed", e);
}
};
const [indexingStatus, setIndexingStatus] = useState<ClipExtractionStatus>({

View file

@ -1,15 +1,12 @@
import log from "@/next/log";
import { savedLogs } from "@/next/log-web";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react";
import { Trans } from "react-i18next";
import ElectronAPIs from "@/next/electron";
import { savedLogs } from "@/next/log-web";
import { addLogLine } from "@ente/shared/logging";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import isElectron from "is-electron";
import { isInternalUser } from "utils/user";
import { testUpload } from "../../../tests/upload.test";
import {
@ -19,16 +16,12 @@ import {
export default function DebugSection() {
const appContext = useContext(AppContext);
const [appVersion, setAppVersion] = useState<string>(null);
const [appVersion, setAppVersion] = useState<string | undefined>();
const electron = globalThis.electron;
useEffect(() => {
const main = async () => {
if (isElectron()) {
const appVersion = await ElectronAPIs.appVersion();
setAppVersion(appVersion);
}
};
main();
electron?.appVersion().then((v) => setAppVersion(v));
});
const confirmLogDownload = () =>
@ -46,8 +39,8 @@ export default function DebugSection() {
});
const downloadLogs = () => {
addLogLine("Downloading logs");
if (isElectron()) ElectronAPIs.openLogDirectory();
log.info("Downloading logs");
if (electron) electron.openLogDirectory();
else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs());
};

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { Box, DialogProps } from "@mui/material";
import { EnteDrawer } from "components/EnteDrawer";
import { AppContext } from "pages/_app";
@ -14,7 +14,7 @@ const ModifyMapEnabled = ({ open, onClose, onRootClose, mapEnabled }) => {
await updateMapEnabled(false);
onClose();
} catch (e) {
logError(e, "Disable Map failed");
log.error("Disable Map failed", e);
somethingWentWrong();
}
};
@ -24,7 +24,7 @@ const ModifyMapEnabled = ({ open, onClose, onRootClose, mapEnabled }) => {
await updateMapEnabled(true);
onClose();
} catch (e) {
logError(e, "Enable Map failed");
log.error("Enable Map failed", e);
somethingWentWrong();
}
};

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import RecoveryKey from "@ente/shared/components/RecoveryKey";
import {
ACCOUNTS_PAGES,
@ -21,7 +22,6 @@ import {
generateEncryptionKey,
} from "@ente/shared/crypto/internal/libsodium";
import { getAccountsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { THEME_COLOR } from "@ente/shared/themes/constants";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import WatchFolder from "components/WatchFolder";
@ -109,7 +109,7 @@ export default function UtilitySection({ closeSidebar }) {
)}&token=${accountsToken}`,
);
} catch (e) {
logError(e, "failed to redirect to accounts page");
log.error("failed to redirect to accounts page", e);
}
};

View file

@ -1,7 +1,6 @@
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import type { Electron } from "@/next/types/ipc";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { isPromise } from "@ente/shared/utils";
import DiscFullIcon from "@mui/icons-material/DiscFull";
import UserNameInputDialog from "components/UserNameInputDialog";
@ -133,6 +132,8 @@ export default function Uploader(props: Props) {
const uploaderNameRef = useRef<string>(null);
const isDragAndDrop = useRef(false);
const electron = globalThis.electron;
const closeUploadProgress = () => setUploadProgressView(false);
const showUserNameInputDialog = () => setUserNameInputDialogView(true);
@ -178,9 +179,7 @@ export default function Uploader(props: Props) {
if (isElectron()) {
ImportService.getPendingUploads().then(
({ files: electronFiles, collectionName, type }) => {
addLogLine(
`found pending desktop upload, resuming uploads`,
);
log.info(`found pending desktop upload, resuming uploads`);
resumeDesktopUpload(type, electronFiles, collectionName);
},
);
@ -211,29 +210,29 @@ export default function Uploader(props: Props) {
pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS &&
props.webFolderSelectorFiles?.length > 0
) {
addLogLine(`received folder upload request`);
log.info(`received folder upload request`);
setWebFiles(props.webFolderSelectorFiles);
} else if (
pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES &&
props.webFileSelectorFiles?.length > 0
) {
addLogLine(`received file upload request`);
log.info(`received file upload request`);
setWebFiles(props.webFileSelectorFiles);
} else if (props.dragAndDropFiles?.length > 0) {
isDragAndDrop.current = true;
if (isElectron()) {
if (electron) {
const main = async () => {
try {
addLogLine(`uploading dropped files from desktop app`);
log.info(`uploading dropped files from desktop app`);
// check and parse dropped files which are zip files
let electronFiles = [] as ElectronFile[];
for (const file of props.dragAndDropFiles) {
if (file.name.endsWith(".zip")) {
const zipFiles =
await ElectronAPIs.getElectronFilesFromGoogleZip(
await electron.getElectronFilesFromGoogleZip(
(file as any).path,
);
addLogLine(
log.info(
`zip file - ${file.name} contains ${zipFiles.length} files`,
);
electronFiles = [...electronFiles, ...zipFiles];
@ -251,18 +250,18 @@ export default function Uploader(props: Props) {
);
}
}
addLogLine(
log.info(
`uploading dropped files from desktop app - ${electronFiles.length} files found`,
);
setElectronFiles(electronFiles);
} catch (e) {
logError(e, "failed to upload desktop dropped files");
log.error("failed to upload desktop dropped files", e);
setWebFiles(props.dragAndDropFiles);
}
};
main();
} else {
addLogLine(`uploading dropped files from web app`);
log.info(`uploading dropped files from web app`);
setWebFiles(props.dragAndDropFiles);
}
}
@ -278,7 +277,7 @@ export default function Uploader(props: Props) {
webFiles?.length > 0 ||
appContext.sharedFiles?.length > 0
) {
addLogLine(
log.info(
`upload request type:${
electronFiles?.length > 0
? "electronFiles"
@ -293,13 +292,13 @@ export default function Uploader(props: Props) {
);
if (uploadManager.isUploadRunning()) {
if (watchFolderService.isUploadRunning()) {
addLogLine(
log.info(
"watchFolder upload was running, pausing it to run user upload",
);
// pause watch folder service on user upload
watchFolderService.pauseRunningSync();
} else {
addLogLine(
log.info(
"an upload is already running, rejecting new upload request",
);
// no-op
@ -371,7 +370,7 @@ export default function Uploader(props: Props) {
uploaderName?: string,
) => {
try {
addLogLine(
log.info(
`upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`,
);
await preCollectionCreationAction();
@ -387,7 +386,7 @@ export default function Uploader(props: Props) {
uploaderName,
);
} catch (e) {
logError(e, "Failed to upload files to existing collections");
log.error("Failed to upload files to existing collections", e);
}
};
@ -396,7 +395,7 @@ export default function Uploader(props: Props) {
collectionName?: string,
) => {
try {
addLogLine(
log.info(
`upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`,
);
await preCollectionCreationAction();
@ -416,7 +415,7 @@ export default function Uploader(props: Props) {
toUploadFiles.current,
);
}
addLogLine(
log.info(
`upload collections - [${[...collectionNameToFilesMap.keys()]}]`,
);
try {
@ -446,7 +445,7 @@ export default function Uploader(props: Props) {
}
} catch (e) {
closeUploadProgress();
logError(e, "Failed to create album");
log.error("Failed to create album", e);
appContext.setDialogMessage({
title: t("ERROR"),
@ -461,7 +460,7 @@ export default function Uploader(props: Props) {
);
toUploadFiles.current = null;
} catch (e) {
logError(e, "Failed to upload files to new collections");
log.error("Failed to upload files to new collections", e);
}
};
@ -501,22 +500,22 @@ export default function Uploader(props: Props) {
uploaderName?: string,
) => {
try {
addLogLine("uploadFiles called");
log.info("uploadFiles called");
preUploadAction();
if (
isElectron() &&
electron &&
!isPendingDesktopUpload.current &&
!watchFolderService.isUploadRunning()
) {
await ImportService.setToUploadCollection(collections);
if (zipPaths.current) {
await ElectronAPIs.setToUploadFiles(
await electron.setToUploadFiles(
PICKED_UPLOAD_TYPE.ZIPS,
zipPaths.current,
);
zipPaths.current = null;
}
await ElectronAPIs.setToUploadFiles(
await electron.setToUploadFiles(
PICKED_UPLOAD_TYPE.FILES,
filesWithCollectionToUploadIn.map(
({ file }) => (file as ElectronFile).path,
@ -543,9 +542,9 @@ export default function Uploader(props: Props) {
watchFolderService.resumePausedSync();
}
}
} catch (err) {
logError(err, "failed to upload files");
showUserFacingError(err.message);
} catch (e) {
log.error("failed to upload files", e);
showUserFacingError(e.message);
closeUploadProgress();
} finally {
postUploadAction();
@ -554,7 +553,7 @@ export default function Uploader(props: Props) {
const retryFailed = async () => {
try {
addLogLine("user retrying failed upload");
log.info("user retrying failed upload");
const filesWithCollections =
uploadManager.getFailedFilesWithCollections();
const uploaderName = uploadManager.getUploaderName();
@ -564,9 +563,9 @@ export default function Uploader(props: Props) {
filesWithCollections.collections,
uploaderName,
);
} catch (err) {
logError(err, "retry failed files failed");
showUserFacingError(err.message);
} catch (e) {
log.error("retry failed files failed", e);
showUserFacingError(e.message);
closeUploadProgress();
} finally {
postUploadAction();
@ -629,7 +628,7 @@ export default function Uploader(props: Props) {
) => {
try {
if (accessedThroughSharedURL) {
addLogLine(
log.info(
`uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
);
const uploaderName = await getPublicCollectionUploaderName(
@ -644,7 +643,7 @@ export default function Uploader(props: Props) {
if (isPendingDesktopUpload.current) {
isPendingDesktopUpload.current = false;
if (pendingDesktopUploadCollectionName.current) {
addLogLine(
log.info(
`upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`,
);
uploadFilesToNewCollections(
@ -653,7 +652,7 @@ export default function Uploader(props: Props) {
);
pendingDesktopUploadCollectionName.current = null;
} else {
addLogLine(
log.info(
`pending upload - strategy - "multiple collections" `,
);
uploadFilesToNewCollections(
@ -663,7 +662,7 @@ export default function Uploader(props: Props) {
return;
}
if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) {
addLogLine("uploading zip files");
log.info("uploading zip files");
uploadFilesToNewCollections(
UPLOAD_STRATEGY.COLLECTION_PER_FOLDER,
);
@ -684,7 +683,7 @@ export default function Uploader(props: Props) {
}
let showNextModal = () => {};
if (importSuggestion.hasNestedFolders) {
addLogLine(`nested folders detected`);
log.info(`nested folders detected`);
showNextModal = () => setChoiceModalView(true);
} else {
showNextModal = () =>
@ -697,24 +696,27 @@ export default function Uploader(props: Props) {
intent: CollectionSelectorIntent.upload,
});
} catch (e) {
logError(e, "handleCollectionCreationAndUpload failed");
log.error("handleCollectionCreationAndUpload failed", e);
}
};
const handleDesktopUpload = async (type: PICKED_UPLOAD_TYPE) => {
const handleDesktopUpload = async (
type: PICKED_UPLOAD_TYPE,
electron: Electron,
) => {
let files: ElectronFile[];
pickedUploadType.current = type;
if (type === PICKED_UPLOAD_TYPE.FILES) {
files = await ElectronAPIs.showUploadFilesDialog();
files = await electron.showUploadFilesDialog();
} else if (type === PICKED_UPLOAD_TYPE.FOLDERS) {
files = await ElectronAPIs.showUploadDirsDialog();
files = await electron.showUploadDirsDialog();
} else {
const response = await ElectronAPIs.showUploadZipDialog();
const response = await electron.showUploadZipDialog();
files = response.files;
zipPaths.current = response.zipPaths;
}
if (files?.length > 0) {
addLogLine(
log.info(
` desktop upload for type:${type} and fileCount: ${files?.length} requested`,
);
setElectronFiles(files);
@ -738,8 +740,8 @@ export default function Uploader(props: Props) {
};
const handleUpload = (type) => () => {
if (isElectron()) {
handleDesktopUpload(type);
if (electron) {
handleDesktopUpload(type, electron);
} else {
handleWebUpload(type);
}
@ -767,7 +769,7 @@ export default function Uploader(props: Props) {
uploaderName,
);
} catch (e) {
logError(e, "public upload failed ");
log.error("public upload failed ", e);
}
};

View file

@ -1,17 +1,14 @@
import DialogTitleWithCloseButton from "@ente/shared/components/DialogBox/TitleWithCloseButton";
import { Button, Dialog, DialogContent, Stack } from "@mui/material";
import UploadStrategyChoiceModal from "components/Upload/UploadStrategyChoiceModal";
import { PICKED_UPLOAD_TYPE, UPLOAD_STRATEGY } from "constants/upload";
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react";
import watchFolderService from "services/watchFolder/watchFolderService";
import { WatchMapping } from "types/watchFolder";
import { MappingList } from "./mappingList";
import ElectronAPIs from "@/next/electron";
import DialogTitleWithCloseButton from "@ente/shared/components/DialogBox/TitleWithCloseButton";
import UploadStrategyChoiceModal from "components/Upload/UploadStrategyChoiceModal";
import { PICKED_UPLOAD_TYPE, UPLOAD_STRATEGY } from "constants/upload";
import isElectron from "is-electron";
import { getImportSuggestion } from "utils/upload";
import { MappingList } from "./mappingList";
interface Iprops {
open: boolean;
@ -24,10 +21,10 @@ export default function WatchFolder({ open, onClose }: Iprops) {
const [choiceModalOpen, setChoiceModalOpen] = useState(false);
const appContext = useContext(AppContext);
const electron = globalThis.electron;
useEffect(() => {
if (!isElectron()) {
return;
}
if (!electron) return;
watchFolderService.getWatchMappings().then((m) => setMappings(m));
}, []);
@ -52,8 +49,10 @@ export default function WatchFolder({ open, onClose }: Iprops) {
};
const addFolderForWatching = async (path: string) => {
if (!electron) return;
setInputFolderPath(path);
const files = await ElectronAPIs.getDirFiles(path);
const files = await electron.getDirFiles(path);
const analysisResult = getImportSuggestion(
PICKED_UPLOAD_TYPE.FOLDERS,
files,

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { styled } from "@mui/material";
import { useTheme } from "@mui/material/styles";
import { GalleryContext } from "pages/gallery";
@ -47,7 +47,7 @@ const Avatar: React.FC<AvatarProps> = ({ file, email, opacity }) => {
// getting email from in-memory id-email map
const email = userIDToEmailMap.get(file.ownerID);
if (!email) {
logError(Error(), "email not found in userIDToEmailMap");
log.error("email not found in userIDToEmailMap");
return;
}
const colorIndex =
@ -58,8 +58,7 @@ const Avatar: React.FC<AvatarProps> = ({ file, email, opacity }) => {
} else if (file.ownerID === user.id) {
const uploaderName = file.pubMagicMetadata.data.uploaderName;
if (!uploaderName) {
logError(
Error(),
log.error(
"uploaderName not found in file.pubMagicMetadata.data",
);
return;
@ -67,8 +66,8 @@ const Avatar: React.FC<AvatarProps> = ({ file, email, opacity }) => {
setUserLetter(uploaderName[0].toUpperCase());
setColorCode(PUBLIC_COLLECTED_FILES_AVATAR_COLOR_CODE);
}
} catch (err) {
logError(err, "AvatarIcon.tsx - useLayoutEffect file failed");
} catch (e) {
log.error("AvatarIcon.tsx - useLayoutEffect file failed", e);
}
}, [file]);
@ -87,15 +86,15 @@ const Avatar: React.FC<AvatarProps> = ({ file, email, opacity }) => {
(key) => userIDToEmailMap.get(key) === email,
);
if (!id) {
logError(Error(), `ID not found for email: ${email}`);
log.error(`ID not found for email: ${email}`);
return;
}
const colorIndex = id % theme.colors.avatarColors.length;
const colorCode = theme.colors.avatarColors[colorIndex];
setUserLetter(email[0].toUpperCase());
setColorCode(colorCode);
} catch (err) {
logError(err, "AvatarIcon.tsx - useLayoutEffect email failed");
} catch (e) {
log.error("AvatarIcon.tsx - useLayoutEffect email failed", e);
}
}, [email]);

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import { SUPPORT_EMAIL } from "@ente/shared/constants/urls";
import { useLocalState } from "@ente/shared/hooks/useLocalState";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS } from "@ente/shared/storage/localStorage";
import { Link, Stack } from "@mui/material";
import { PLAN_PERIOD } from "constants/gallery";
@ -92,7 +92,7 @@ function PlanSelectorCard(props: Props) {
}
setPlans(plans);
} catch (e) {
logError(e, "plan selector modal open failed");
log.error("plan selector modal open failed", e);
props.closeModal();
appContext.setDialogMessage({
title: t("OPEN_PLAN_SELECTOR_MODAL_FAILED"),

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { Overlay } from "@ente/shared/components/Container";
import { CustomError } from "@ente/shared/error";
import useLongPress from "@ente/shared/hooks/useLongPress";
import { logError } from "@ente/shared/sentry";
import { formatDateRelative } from "@ente/shared/time/format";
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
import PlayCircleOutlineOutlinedIcon from "@mui/icons-material/PlayCircleOutlineOutlined";
@ -271,7 +271,7 @@ export default function PreviewCard(props: IProps) {
updateURL(file.id, url);
} catch (e) {
if (e.message !== CustomError.URL_ALREADY_SET) {
logError(e, "preview card useEffect failed");
log.error("preview card useEffect failed", e);
}
// no-op
}

View file

@ -1,6 +1,6 @@
import { CustomHead } from "@/next/components/Head";
import ElectronAPIs from "@/next/electron";
import { setupI18n } from "@/next/i18n";
import log from "@/next/log";
import { logStartupBanner } from "@/next/log-web";
import { AppUpdateInfo } from "@/next/types/ipc";
import {
@ -23,12 +23,9 @@ import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { MessageContainer } from "@ente/shared/components/MessageContainer";
import AppNavbar from "@ente/shared/components/Navbar/app";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { useLocalState } from "@ente/shared/hooks/useLocalState";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import {
getLocalMapEnabled,
@ -156,7 +153,8 @@ export default function App({ Component, pageProps }: AppProps) {
}, []);
useEffect(() => {
if (isElectron()) {
const electron = globalThis.electron;
if (electron) {
const showUpdateDialog = (updateInfo: AppUpdateInfo) => {
if (updateInfo.autoUpdatable) {
setDialogMessage(
@ -176,7 +174,7 @@ export default function App({ Component, pageProps }: AppProps) {
});
}
};
ElectronAPIs.registerUpdateEventListener(showUpdateDialog);
electron.registerUpdateEventListener(showUpdateDialog);
}
}, []);
@ -190,7 +188,7 @@ export default function App({ Component, pageProps }: AppProps) {
setMlSearchEnabled(mlSearchConfig.enabled);
mlWorkManager.setMlSearchEnabled(mlSearchConfig.enabled);
} catch (e) {
logError(e, "Error while loading mlSearchEnabled");
log.error("Error while loading mlSearchEnabled", e);
}
};
loadMlSearchState();
@ -200,7 +198,7 @@ export default function App({ Component, pageProps }: AppProps) {
mlWorkManager.setMlSearchEnabled(false);
});
} catch (e) {
logError(e, "Error while subscribing to logout event");
log.error("Error while subscribing to logout event", e);
}
}, []);
@ -214,10 +212,10 @@ export default function App({ Component, pageProps }: AppProps) {
}
const initExport = async () => {
try {
addLogLine("init export");
log.info("init export");
const token = getToken();
if (!token) {
addLogLine(
log.info(
"User not logged in, not starting export continuous sync job",
);
return;
@ -238,11 +236,11 @@ export default function App({ Component, pageProps }: AppProps) {
exportService.enableContinuousExport();
}
if (isExportInProgress(exportRecord.stage)) {
addLogLine("export was in progress, resuming");
log.info("export was in progress, resuming");
exportService.scheduleExport();
}
} catch (e) {
logError(e, "init export failed");
log.error("init export failed", e);
}
};
initExport();
@ -251,7 +249,7 @@ export default function App({ Component, pageProps }: AppProps) {
exportService.disableContinuousExport();
});
} catch (e) {
logError(e, "Error while subscribing to logout event");
log.error("Error while subscribing to logout event", e);
}
}, []);
@ -268,9 +266,7 @@ export default function App({ Component, pageProps }: AppProps) {
const redirectAction = redirectMap.get(redirect);
window.location.href = await redirectAction();
} else {
logError(CustomError.BAD_REQUEST, "invalid redirection", {
redirect,
});
log.error(`invalid redirection ${redirect}`);
}
};
@ -337,7 +333,7 @@ export default function App({ Component, pageProps }: AppProps) {
setMlSearchEnabled(enabled);
mlWorkManager.setMlSearchEnabled(enabled);
} catch (e) {
logError(e, "Error while updating mlSearchEnabled");
log.error("Error while updating mlSearchEnabled", e);
}
};
@ -347,7 +343,7 @@ export default function App({ Component, pageProps }: AppProps) {
setLocalMapEnabled(enabled);
setMapEnabled(enabled);
} catch (e) {
logError(e, "Error while updating mapEnabled");
log.error("Error while updating mapEnabled", e);
}
};

View file

@ -60,38 +60,11 @@ import {
sortFiles,
} from "utils/file";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import CollectionNamer, {
CollectionNamerAttributes,
} from "components/Collections/CollectionNamer";
import Uploader from "components/Upload/Uploader";
import PlanSelector from "components/pages/gallery/PlanSelector";
import {
ALL_SECTION,
ARCHIVE_SECTION,
CollectionSummaryType,
DUMMY_UNCATEGORIZED_COLLECTION,
HIDDEN_ITEMS_SECTION,
TRASH_SECTION,
} from "constants/collection";
import { AppContext } from "pages/_app";
import { getLocalTrashedFiles, syncTrash } from "services/trashService";
import {
COLLECTION_OPS_TYPE,
constructCollectionNameMap,
getArchivedCollections,
getDefaultHiddenCollectionIDs,
getSelectedCollection,
handleCollectionOps,
hasNonSystemCollections,
splitNormalAndHiddenCollections,
} from "utils/collection";
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import { CenteredFlex } from "@ente/shared/components/Container";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
import useFileInput from "@ente/shared/hooks/useFileInput";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
@ -101,6 +74,9 @@ import { User } from "@ente/shared/user/types";
import { isPromise } from "@ente/shared/utils";
import AuthenticateUserModal from "components/AuthenticateUserModal";
import Collections from "components/Collections";
import CollectionNamer, {
CollectionNamerAttributes,
} from "components/Collections/CollectionNamer";
import ExportModal from "components/ExportModal";
import {
FilesDownloadProgress,
@ -112,16 +88,27 @@ import FixCreationTime, {
import GalleryEmptyState from "components/GalleryEmptyState";
import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList";
import SearchResultInfo from "components/Search/SearchResultInfo";
import Uploader from "components/Upload/Uploader";
import UploadInputs from "components/UploadSelectorInputs";
import { GalleryNavbar } from "components/pages/gallery/Navbar";
import PlanSelector from "components/pages/gallery/PlanSelector";
import {
ALL_SECTION,
ARCHIVE_SECTION,
CollectionSummaryType,
DUMMY_UNCATEGORIZED_COLLECTION,
HIDDEN_ITEMS_SECTION,
TRASH_SECTION,
} from "constants/collection";
import { SYNC_INTERVAL_IN_MICROSECONDS } from "constants/gallery";
import isElectron from "is-electron";
import { AppContext } from "pages/_app";
import { ClipService } from "services/clipService";
import { constructUserIDToEmailMap } from "services/collectionService";
import downloadManager from "services/download";
import { syncEmbeddings } from "services/embeddingService";
import { syncEntities } from "services/entityService";
import locationSearchService from "services/locationSearchService";
import { getLocalTrashedFiles, syncTrash } from "services/trashService";
import uploadManager from "services/upload/uploadManager";
import { Collection, CollectionSummaries } from "types/collection";
import { EnteFile } from "types/file";
@ -134,6 +121,16 @@ import {
} from "types/gallery";
import { Search, SearchResultSummary, UpdateSearch } from "types/search";
import { FamilyData } from "types/user";
import {
COLLECTION_OPS_TYPE,
constructCollectionNameMap,
getArchivedCollections,
getDefaultHiddenCollectionIDs,
getSelectedCollection,
handleCollectionOps,
hasNonSystemCollections,
splitNormalAndHiddenCollections,
} from "utils/collection";
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
import { isArchivedFile } from "utils/magicMetadata";
import { getSessionExpiredMessage } from "utils/ui";
@ -321,6 +318,7 @@ export default function Gallery() {
return;
}
preloadImage("/images/subscription-card-background");
const electron = globalThis.electron;
const main = async () => {
const valid = await validateKey();
if (!valid) {
@ -363,9 +361,9 @@ export default function Gallery() {
syncInterval.current = setInterval(() => {
syncWithRemote(false, true);
}, SYNC_INTERVAL_IN_MICROSECONDS);
if (isElectron()) {
if (electron) {
void ClipService.setupOnFileUploadListener();
ElectronAPIs.registerForegroundEventListener(() => {
electron.registerForegroundEventListener(() => {
syncWithRemote(false, true);
});
}
@ -373,8 +371,8 @@ export default function Gallery() {
main();
return () => {
clearInterval(syncInterval.current);
if (isElectron()) {
ElectronAPIs.registerForegroundEventListener(() => {});
if (electron) {
electron.registerForegroundEventListener(() => {});
ClipService.removeOnFileUploadListener();
}
};
@ -719,7 +717,7 @@ export default function Gallery() {
router.push(PAGES.CREDENTIALS);
break;
default:
logError(e, "syncWithRemote failed");
log.error("syncWithRemote failed", e);
}
} finally {
setTempDeletedFileIds(new Set());
@ -871,7 +869,7 @@ export default function Gallery() {
clearSelection();
await syncWithRemote(false, true);
} catch (e) {
logError(e, "collection ops failed", { ops });
log.error(`collection ops (${ops}) failed`, e);
setDialogMessage({
title: t("ERROR"),
@ -916,7 +914,7 @@ export default function Gallery() {
clearSelection();
await syncWithRemote(false, true);
} catch (e) {
logError(e, "file ops failed", { ops });
log.error(`file ops (${ops}) failed`, e);
setDialogMessage({
title: t("ERROR"),
@ -935,7 +933,7 @@ export default function Gallery() {
const collection = await createAlbum(collectionName);
await collectionOpsHelper(ops)(collection);
} catch (e) {
logError(e, "create and collection ops failed", { ops });
log.error(`create and collection ops (${ops}) failed`, e);
setDialogMessage({
title: t("ERROR"),

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import Login from "@ente/accounts/components/Login";
import SignUp from "@ente/accounts/components/SignUp";
import { APPS } from "@ente/shared/apps/constants";
@ -7,14 +7,12 @@ import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { saveKeyInSessionStore } from "@ente/shared/crypto/helpers";
import { getAlbumsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { getData, LS_KEYS } from "@ente/shared/storage/localStorage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getKey, SESSION_KEYS } from "@ente/shared/storage/sessionStorage";
import { Button, styled, Typography, TypographyProps } from "@mui/material";
import { t } from "i18next";
import isElectron from "is-electron";
import { useRouter } from "next/router";
import { CarouselProvider, DotGroup, Slide, Slider } from "pure-react-carousel";
import "pure-react-carousel/dist/react-carousel.es.css";
@ -132,11 +130,12 @@ export default function LandingPage() {
const handleNormalRedirect = async () => {
const user = getData(LS_KEYS.USER);
let key = getKey(SESSION_KEYS.ENCRYPTION_KEY);
if (!key && isElectron()) {
const electron = globalThis.electron;
if (!key && electron) {
try {
key = await ElectronAPIs.getEncryptionKey();
key = await electron.getEncryptionKey();
} catch (e) {
logError(e, "getEncryptionKey failed");
log.error("getEncryptionKey failed", e);
}
if (key) {
await saveKeyInSessionStore(
@ -160,7 +159,7 @@ export default function LandingPage() {
try {
await localForage.ready();
} catch (e) {
logError(e, "usage in incognito mode tried");
log.error("usage in incognito mode tried", e);
appContext.setDialogMessage({
title: t("LOCAL_STORAGE_NOT_ACCESSIBLE"),

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import {
CenteredFlex,
SpaceBetweenFlex,
@ -46,7 +47,6 @@ import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { ENTE_WEBSITE_LINK } from "@ente/shared/constants/urls";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import useFileInput from "@ente/shared/hooks/useFileInput";
import { logError } from "@ente/shared/sentry";
import AddPhotoAlternateOutlined from "@mui/icons-material/AddPhotoAlternateOutlined";
import FileDownloadOutlinedIcon from "@mui/icons-material/FileDownloadOutlined";
import MoreHoriz from "@mui/icons-material/MoreHoriz";
@ -292,7 +292,7 @@ export default function PublicCollectionGallery() {
setFilesDownloadProgressAttributes,
);
} catch (e) {
logError(e, "failed to downloads shared album all files");
log.error("failed to downloads shared album all files", e);
}
};
@ -417,7 +417,7 @@ export default function PublicCollectionGallery() {
setPublicCollection(null);
setPublicFiles(null);
} else {
logError(e, "failed to sync public album with remote");
log.error("failed to sync public album with remote", e);
}
} finally {
appContext.finishLoading();
@ -441,7 +441,7 @@ export default function PublicCollectionGallery() {
publicUrl.memLimit,
);
} catch (e) {
logError(e, "failed to derive key for verifyLinkPassword");
log.error("failed to derive key for verifyLinkPassword", e);
setFieldError(`${t("UNKNOWN_ERROR")} ${e.message}`);
return;
}
@ -468,7 +468,7 @@ export default function PublicCollectionGallery() {
await syncWithRemote();
appContext.finishLoading();
} catch (e) {
logError(e, "failed to verifyLinkPassword");
log.error("failed to verifyLinkPassword", e);
setFieldError(`${t("UNKNOWN_ERROR")} ${e.message}`);
}
};
@ -528,7 +528,7 @@ export default function PublicCollectionGallery() {
);
clearSelection();
} catch (e) {
logError(e, "failed to download selected files");
log.error("failed to download selected files", e);
}
};

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint, getPaymentsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import {
LS_KEYS,
removeData,
@ -40,7 +40,7 @@ class billingService {
const { plans } = response.data;
return plans;
} catch (e) {
logError(e, "failed to get plans");
log.error("failed to get plans", e);
}
}
@ -56,7 +56,7 @@ class billingService {
const { subscription } = response.data;
setData(LS_KEYS.SUBSCRIPTION, subscription);
} catch (e) {
logError(e, "failed to get user's subscription details");
log.error("failed to get user's subscription details", e);
}
}
@ -69,7 +69,7 @@ class billingService {
PaymentActionType.Buy,
);
} catch (e) {
logError(e, "unable to buy subscription");
log.error("unable to buy subscription", e);
throw e;
}
}
@ -83,7 +83,7 @@ class billingService {
PaymentActionType.Update,
);
} catch (e) {
logError(e, "subscription update failed");
log.error("subscription update failed", e);
throw e;
}
}
@ -101,7 +101,7 @@ class billingService {
const { subscription } = response.data;
setData(LS_KEYS.SUBSCRIPTION, subscription);
} catch (e) {
logError(e, "subscription cancel failed");
log.error("subscription cancel failed", e);
throw e;
}
}
@ -119,7 +119,7 @@ class billingService {
const { subscription } = response.data;
setData(LS_KEYS.SUBSCRIPTION, subscription);
} catch (e) {
logError(e, "failed to activate subscription");
log.error("failed to activate subscription", e);
throw e;
}
}
@ -147,9 +147,9 @@ class billingService {
const { subscription } = response.data;
setData(LS_KEYS.SUBSCRIPTION, subscription);
return subscription;
} catch (err) {
logError(err, "Error while verifying subscription");
throw err;
} catch (e) {
log.error("Error while verifying subscription", e);
throw e;
}
}
@ -163,7 +163,7 @@ class billingService {
});
removeData(LS_KEYS.FAMILY_DATA);
} catch (e) {
logError(e, "/family/leave failed");
log.error("/family/leave failed", e);
throw e;
}
}
@ -177,7 +177,7 @@ class billingService {
const redirectURL = this.getRedirectURL();
window.location.href = `${getPaymentsURL()}?productID=${productID}&paymentToken=${paymentToken}&action=${action}&redirectURL=${redirectURL}`;
} catch (e) {
logError(e, "unable to get payments url");
log.error("unable to get payments url", e);
throw e;
}
}
@ -194,7 +194,7 @@ class billingService {
);
window.location.href = response.data.url;
} catch (e) {
logError(e, "unable to get customer portal url");
log.error("unable to get customer portal url", e);
throw e;
}
}

View file

@ -1,9 +1,8 @@
import ElectronAPIs from "@/next/electron";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
@ -63,36 +62,36 @@ class ClipServiceImpl {
return;
}
if (this.onFileUploadedHandler) {
addLogLine("file upload listener already setup");
log.info("file upload listener already setup");
return;
}
addLogLine("setting up file upload listener");
log.info("setting up file upload listener");
this.onFileUploadedHandler = (args) => {
this.runLocalFileClipExtraction(args);
};
eventBus.on(Events.FILE_UPLOADED, this.onFileUploadedHandler, this);
addLogLine("setup file upload listener successfully");
log.info("setup file upload listener successfully");
} catch (e) {
logError(e, "failed to setup clip service");
log.error("failed to setup clip service", e);
}
};
removeOnFileUploadListener = async () => {
try {
if (!this.onFileUploadedHandler) {
addLogLine("file upload listener already removed");
log.info("file upload listener already removed");
return;
}
addLogLine("removing file upload listener");
log.info("removing file upload listener");
eventBus.removeListener(
Events.FILE_UPLOADED,
this.onFileUploadedHandler,
this,
);
this.onFileUploadedHandler = null;
addLogLine("removed file upload listener successfully");
log.info("removed file upload listener successfully");
} catch (e) {
logError(e, "failed to remove clip service");
log.error("failed to remove clip service", e);
}
};
@ -107,7 +106,7 @@ class ClipServiceImpl {
}
return this.clipExtractionStatus;
} catch (e) {
logError(e, "failed to get clip indexing status");
log.error("failed to get clip indexing status", e);
}
};
@ -121,13 +120,13 @@ class ClipServiceImpl {
) => {
try {
if (this.embeddingExtractionInProgress) {
addLogLine(
log.info(
"clip embedding extraction already in progress, scheduling re-run",
);
this.reRunNeeded = true;
return;
} else {
addLogLine(
log.info(
"clip embedding extraction not in progress, starting clip embedding extraction",
);
}
@ -139,7 +138,7 @@ class ClipServiceImpl {
this.embeddingExtractionInProgress = null;
if (!canceller.signal.aborted && this.reRunNeeded) {
this.reRunNeeded = false;
addLogLine("re-running clip embedding extraction");
log.info("re-running clip embedding extraction");
setTimeout(
() => this.scheduleImageEmbeddingExtraction(),
0,
@ -148,7 +147,7 @@ class ClipServiceImpl {
}
} catch (e) {
if (e.message !== CustomError.REQUEST_CANCELLED) {
logError(e, "failed to schedule clip embedding extraction");
log.error("failed to schedule clip embedding extraction", e);
}
}
};
@ -158,12 +157,12 @@ class ClipServiceImpl {
model: Model = Model.ONNX_CLIP,
): Promise<Float32Array> => {
try {
return ElectronAPIs.computeTextEmbedding(model, text);
return ensureElectron().computeTextEmbedding(model, text);
} catch (e) {
if (e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)) {
this.unsupportedPlatform = true;
}
logError(e, "failed to compute text embedding");
log.error("failed to compute text embedding", e);
throw e;
}
};
@ -174,7 +173,7 @@ class ClipServiceImpl {
) => {
try {
if (this.unsupportedPlatform) {
addLogLine(
log.info(
`skipping clip embedding extraction, platform unsupported`,
);
return;
@ -194,15 +193,15 @@ class ClipServiceImpl {
pending: pendingFiles.length,
});
if (pendingFiles.length === 0) {
addLogLine("no clip embedding extraction needed, all done");
log.info("no clip embedding extraction needed, all done");
return;
}
addLogLine(
log.info(
`starting clip embedding extraction for ${pendingFiles.length} files`,
);
for (const file of pendingFiles) {
try {
addLogLine(
log.info(
`extracting clip embedding for file: ${file.metadata.title} fileID: ${file.id}`,
);
if (canceller.signal.aborted) {
@ -210,7 +209,7 @@ class ClipServiceImpl {
}
const embeddingData =
await this.extractFileClipImageEmbedding(model, file);
addLogLine(
log.info(
`successfully extracted clip embedding for file: ${file.metadata.title} fileID: ${file.id} embedding length: ${embeddingData?.length}`,
);
await this.encryptAndUploadEmbedding(
@ -219,14 +218,14 @@ class ClipServiceImpl {
embeddingData,
);
this.onSuccessStatusUpdater();
addLogLine(
log.info(
`successfully put clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
);
} catch (e) {
if (e?.message !== CustomError.REQUEST_CANCELLED) {
logError(
e,
log.error(
"failed to extract clip embedding for file",
e,
);
}
if (
@ -244,7 +243,7 @@ class ClipServiceImpl {
}
} catch (e) {
if (e.message !== CustomError.REQUEST_CANCELLED) {
logError(e, "failed to extract clip embedding");
log.error("failed to extract clip embedding", e);
}
throw e;
}
@ -258,24 +257,24 @@ class ClipServiceImpl {
model: Model = Model.ONNX_CLIP,
) {
const { enteFile, localFile } = arg;
addLogLine(
log.info(
`clip embedding extraction onFileUploadedHandler file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
enteFile.id,
);
if (enteFile.metadata.fileType === FILE_TYPE.VIDEO) {
addLogLine(
log.info(
`skipping video file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
return;
}
const extension = enteFile.metadata.title.split(".").pop();
if (!extension || !["jpg", "jpeg"].includes(extension)) {
addLogLine(
log.info(
`skipping non jpg file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
return;
}
addLogLine(
log.info(
`queuing up for local clip embedding extraction for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
try {
@ -290,11 +289,11 @@ class ClipServiceImpl {
embedding,
);
});
addLogLine(
log.info(
`successfully extracted clip embedding for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
} catch (e) {
logError(e, "Failed in ML onFileUploadedHandler");
log.error("Failed in ML onFileUploadedHandler", e);
}
}
@ -305,7 +304,10 @@ class ClipServiceImpl {
const file = await localFile
.arrayBuffer()
.then((buffer) => new Uint8Array(buffer));
const embedding = await ElectronAPIs.computeImageEmbedding(model, file);
const embedding = await ensureElectron().computeImageEmbedding(
model,
file,
);
return embedding;
};
@ -322,7 +324,7 @@ class ClipServiceImpl {
const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance();
const { file: encryptedEmbeddingData } =
await comlinkCryptoWorker.encryptEmbedding(embeddingData, file.key);
addLogLine(
log.info(
`putting clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
);
await putEmbedding({
@ -345,7 +347,7 @@ class ClipServiceImpl {
file: EnteFile,
) => {
const thumb = await downloadManager.getThumbnail(file);
const embedding = await ElectronAPIs.computeImageEmbedding(
const embedding = await ensureElectron().computeImageEmbedding(
model,
thumb,
);

View file

@ -2,10 +2,10 @@ import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import { getData, LS_KEYS } from "@ente/shared/storage/localStorage";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getActualKey } from "@ente/shared/user";
import { User } from "@ente/shared/user/types";
@ -198,9 +198,10 @@ const getCollections = async (
try {
return await getCollectionWithSecrets(collection, key);
} catch (e) {
logError(e, `decryption failed for collection`, {
collectionID: collection.id,
});
log.error(
`decryption failed for collection with ID ${collection.id}`,
e,
);
return collection;
}
},
@ -212,7 +213,7 @@ const getCollections = async (
);
return collections;
} catch (e) {
logError(e, "getCollections failed");
log.error("getCollections failed", e);
throw e;
}
};
@ -340,7 +341,7 @@ export const getCollection = async (
);
return collectionWithSecrets;
} catch (e) {
logError(e, "failed to get collection");
log.error("failed to get collection", e);
throw e;
}
};
@ -462,7 +463,7 @@ const createCollection = async (
);
return decryptedCreatedCollection;
} catch (e) {
logError(e, "create collection failed");
log.error("create collection failed", e);
throw e;
}
};
@ -480,7 +481,7 @@ const postCollection = async (
);
return response.data.collection;
} catch (e) {
logError(e, "post Collection failed ");
log.error("post Collection failed ", e);
}
};
@ -496,7 +497,7 @@ export const addToFavorites = async (file: EnteFile) => {
}
await addToCollection(favCollection, [file]);
} catch (e) {
logError(e, "failed to add to favorite");
log.error("failed to add to favorite", e);
}
};
@ -508,7 +509,7 @@ export const removeFromFavorites = async (file: EnteFile) => {
}
await removeFromCollection(favCollection.id, [file]);
} catch (e) {
logError(e, "remove from favorite failed");
log.error("remove from favorite failed", e);
}
};
@ -537,7 +538,7 @@ export const addToCollection = async (
);
}
} catch (e) {
logError(e, "Add to collection Failed ");
log.error("Add to collection Failed ", e);
throw e;
}
};
@ -567,7 +568,7 @@ export const restoreToCollection = async (
);
}
} catch (e) {
logError(e, "restore to collection Failed ");
log.error("restore to collection Failed ", e);
throw e;
}
};
@ -598,7 +599,7 @@ export const moveToCollection = async (
);
}
} catch (e) {
logError(e, "move to collection Failed ");
log.error("move to collection Failed ", e);
throw e;
}
};
@ -649,7 +650,7 @@ export const removeFromCollection = async (
await removeUserFiles(collectionID, userFiles, allFiles);
}
} catch (e) {
logError(e, "remove from collection failed ");
log.error("remove from collection failed ", e);
throw e;
}
};
@ -715,7 +716,7 @@ export const removeUserFiles = async (
leftFiles,
);
} catch (e) {
logError(e, "remove user files failed ");
log.error("remove user files failed ", e);
throw e;
}
};
@ -742,7 +743,7 @@ export const removeNonUserFiles = async (
);
}
} catch (e) {
logError(e, "remove non user files failed ");
log.error("remove non user files failed ", e);
throw e;
}
};
@ -768,7 +769,7 @@ export const deleteCollection = async (
{ "X-Auth-Token": token },
);
} catch (e) {
logError(e, "delete collection failed ");
log.error("delete collection failed ", e);
throw e;
}
};
@ -784,7 +785,7 @@ export const leaveSharedAlbum = async (collectionID: number) => {
{ "X-Auth-Token": token },
);
} catch (e) {
logError(e, "leave shared album failed ");
log.error("leave shared album failed ", e);
throw e;
}
};
@ -976,7 +977,7 @@ export const shareCollection = async (
},
);
} catch (e) {
logError(e, "share collection failed ");
log.error("share collection failed ", e);
throw e;
}
};
@ -1000,7 +1001,7 @@ export const unshareCollection = async (
},
);
} catch (e) {
logError(e, "unshare collection failed ");
log.error("unshare collection failed ", e);
}
};
@ -1023,7 +1024,7 @@ export const createShareableURL = async (collection: Collection) => {
);
return resp.data.result as PublicURL;
} catch (e) {
logError(e, "createShareableURL failed ");
log.error("createShareableURL failed ", e);
throw e;
}
};
@ -1043,7 +1044,7 @@ export const deleteShareableURL = async (collection: Collection) => {
},
);
} catch (e) {
logError(e, "deleteShareableURL failed ");
log.error("deleteShareableURL failed ", e);
throw e;
}
};
@ -1066,7 +1067,7 @@ export const updateShareableURL = async (
);
return res.data.result as PublicURL;
} catch (e) {
logError(e, "updateShareableURL failed ");
log.error("updateShareableURL failed ", e);
throw e;
}
};
@ -1392,7 +1393,7 @@ export async function moveToHiddenCollection(files: EnteFile[]) {
await moveToCollection(collectionID, hiddenCollection, files);
}
} catch (e) {
logError(e, "move to hidden collection failed ");
log.error("move to hidden collection failed ", e);
throw e;
}
}
@ -1411,7 +1412,7 @@ export async function unhideToCollection(
await moveToCollection(collectionID, collection, files);
}
} catch (e) {
logError(e, "unhide to collection failed ");
log.error("unhide to collection failed ", e);
throw e;
}
}
@ -1436,7 +1437,7 @@ export const constructUserIDToEmailMap = (
});
return userIDToEmailMap;
} catch (e) {
logError("Error Mapping UserId to email:", e);
log.error("Error Mapping UserId to email:", e);
return new Map<number, string>();
}
};

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { FILE_TYPE } from "constants/file";
import { EnteFile } from "types/file";
@ -64,7 +64,7 @@ export async function getDuplicates(
return result;
} catch (e) {
logError(e, "failed to get duplicate files");
log.error("failed to get duplicate files", e);
}
}
@ -156,7 +156,7 @@ async function fetchDuplicateFileIDs() {
);
return (response.data as DuplicatesResponse).duplicates;
} catch (e) {
logError(e, "failed to fetch duplicate file IDs");
log.error("failed to fetch duplicate file IDs", e);
}
}

View file

@ -1,22 +1,20 @@
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { LimitedCache } from "@ente/shared/storage/cacheStorage/types";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import { isInternalUser } from "utils/user";
import { PhotosDownloadClient } from "./clients/photos";
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
@ -80,7 +78,7 @@ class DownloadManagerImpl {
) {
try {
if (this.ready) {
addLogLine("DownloadManager already initialized");
log.info("DownloadManager already initialized");
return;
}
this.downloadClient = createDownloadClient(app, tokens, timeout);
@ -90,14 +88,14 @@ class DownloadManagerImpl {
this.ready = true;
eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this);
} catch (e) {
logError(e, "DownloadManager init failed");
log.error("DownloadManager init failed", e);
throw e;
}
}
private async logoutHandler() {
try {
addLogLine("downloadManger logoutHandler started");
log.info("downloadManger logoutHandler started");
this.ready = false;
this.cryptoWorker = null;
this.downloadClient = null;
@ -106,9 +104,9 @@ class DownloadManagerImpl {
this.thumbnailObjectURLPromises.clear();
this.fileDownloadProgress.clear();
this.progressUpdater = () => {};
addLogLine("downloadManager logoutHandler completed");
log.info("downloadManager logoutHandler completed");
} catch (e) {
logError(e, "downloadManager logoutHandler failed");
log.error("downloadManager logoutHandler failed", e);
}
}
@ -138,7 +136,7 @@ class DownloadManagerImpl {
return new Uint8Array(await cacheResp.arrayBuffer());
}
} catch (e) {
logError(e, "failed to get cached thumbnail");
log.error("failed to get cached thumbnail", e);
throw e;
}
}
@ -153,7 +151,7 @@ class DownloadManagerImpl {
);
return cacheResp?.clone();
} catch (e) {
logError(e, "failed to get cached file");
log.error("failed to get cached file", e);
throw e;
}
}
@ -185,12 +183,12 @@ class DownloadManagerImpl {
this.thumbnailCache
?.put(file.id.toString(), new Response(thumb))
.catch((e) => {
logError(e, "thumb cache put failed");
log.error("thumb cache put failed", e);
// TODO: handle storage full exception.
});
return thumb;
} catch (e) {
logError(e, "getThumbnail failed");
log.error("getThumbnail failed", e);
throw e;
}
}
@ -215,7 +213,7 @@ class DownloadManagerImpl {
return thumb;
} catch (e) {
this.thumbnailObjectURLPromises.delete(file.id);
logError(e, "get DownloadManager preview Failed");
log.error("get DownloadManager preview Failed", e);
throw e;
}
}
@ -253,7 +251,7 @@ class DownloadManagerImpl {
return fileURLs;
} catch (e) {
this.fileConversionPromises.delete(file.id);
logError(e, "download manager getFileForPreview Failed");
log.error("download manager getFileForPreview Failed", e);
throw e;
}
};
@ -291,7 +289,7 @@ class DownloadManagerImpl {
}
} catch (e) {
this.fileObjectURLPromises.delete(file.id);
logError(e, "download manager getFile Failed");
log.error("download manager getFile Failed", e);
throw e;
}
}
@ -300,7 +298,7 @@ class DownloadManagerImpl {
file: EnteFile,
): Promise<ReadableStream<Uint8Array>> {
try {
addLogLine(`download attempted for fileID:${file.id}`);
log.info(`download attempted for fileID:${file.id}`);
const onDownloadProgress = this.trackDownloadProgress(
file.id,
file.info?.fileSize,
@ -321,7 +319,7 @@ class DownloadManagerImpl {
this.diskFileCache
.put(file.id.toString(), encrypted.clone())
.catch((e) => {
logError(e, "file cache put failed");
log.error("file cache put failed", e);
// TODO: handle storage full exception.
});
}
@ -338,15 +336,9 @@ class DownloadManagerImpl {
return generateStreamFromArrayBuffer(decrypted);
} catch (e) {
if (e.message === CustomError.PROCESSING_FAILED) {
logError(e, "Failed to process file", {
fileID: file.id,
fromMobile:
!!file.metadata.localID ||
!!file.metadata.deviceFolder ||
!!file.metadata.version,
});
addLogLine(
`Failed to process file with fileID:${file.id}, localID: ${file.metadata.localID}, version: ${file.metadata.version}, deviceFolder:${file.metadata.deviceFolder} with error: ${e.message}`,
log.error(
`Failed to process file with fileID:${file.id}, localID: ${file.metadata.localID}, version: ${file.metadata.version}, deviceFolder:${file.metadata.deviceFolder}`,
e,
);
}
throw e;
@ -360,7 +352,7 @@ class DownloadManagerImpl {
this.diskFileCache
.put(file.id.toString(), resp.clone())
.catch((e) => {
logError(e, "file cache put failed");
log.error("file cache put failed", e);
});
}
}
@ -430,22 +422,9 @@ class DownloadManagerImpl {
e.message ===
CustomError.PROCESSING_FAILED
) {
logError(
log.error(
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
e,
"Failed to process file",
{
fileID: file.id,
fromMobile:
!!file.metadata
.localID ||
!!file.metadata
.deviceFolder ||
!!file.metadata
.version,
},
);
addLogLine(
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder} with error: ${e.message}`,
);
}
throw e;
@ -471,22 +450,9 @@ class DownloadManagerImpl {
e.message ===
CustomError.PROCESSING_FAILED
) {
logError(
log.error(
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
e,
"Failed to process file",
{
fileID: file.id,
fromMobile:
!!file.metadata
.localID ||
!!file.metadata
.deviceFolder ||
!!file.metadata
.version,
},
);
addLogLine(
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder} with error: ${e.message}`,
);
}
throw e;
@ -495,7 +461,10 @@ class DownloadManagerImpl {
controller.close();
}
} catch (e) {
logError(e, "Failed to process file chunk");
log.error(
"Failed to process file chunk",
e,
);
controller.error(e);
}
});
@ -503,14 +472,14 @@ class DownloadManagerImpl {
push();
} catch (e) {
logError(e, "Failed to process file stream");
log.error("Failed to process file stream", e);
controller.error(e);
}
},
});
return stream;
} catch (e) {
logError(e, "Failed to download file");
log.error("Failed to download file", e);
throw e;
}
}
@ -549,7 +518,7 @@ async function openThumbnailCache() {
try {
return await CacheStorageService.open(CACHES.THUMBS);
} catch (e) {
logError(e, "Failed to open thumbnail cache");
log.error("Failed to open thumbnail cache", e);
if (isInternalUser()) {
throw e;
} else {
@ -565,7 +534,7 @@ async function openDiskFileCache() {
}
return await CacheStorageService.open(CACHES.FILES);
} catch (e) {
logError(e, "Failed to open file cache");
log.error("Failed to open file cache", e);
if (isInternalUser()) {
throw e;
} else {

View file

@ -1,9 +1,8 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import {
@ -68,10 +67,10 @@ export const syncEmbeddings = async (models: Model[] = [Model.ONNX_CLIP]) => {
fileIdToKeyMap.set(file.id, file.key);
});
await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings);
addLogLine(`Syncing embeddings localCount: ${allEmbeddings.length}`);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
for (const model of models) {
let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
addLogLine(
log.info(
`Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
);
let response: GetEmbeddingDiffResponse;
@ -107,18 +106,13 @@ export const syncEmbeddings = async (models: Model[] = [Model.ONNX_CLIP]) => {
embedding: decryptedData,
} as Embedding;
} catch (e) {
let info: Record<string, unknown>;
let hasHiddenAlbums = false;
if (e.message === CustomError.FILE_NOT_FOUND) {
const hasHiddenAlbums =
hiddenAlbums?.length > 0;
info = {
hasHiddenAlbums,
};
hasHiddenAlbums = hiddenAlbums?.length > 0;
}
logError(
log.error(
`decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
e,
"decryptEmbedding failed for file",
info,
);
}
}),
@ -132,13 +126,13 @@ export const syncEmbeddings = async (models: Model[] = [Model.ONNX_CLIP]) => {
}
await localForage.setItem(EMBEDDINGS_TABLE, allEmbeddings);
await setModelEmbeddingSyncTime(model, modelLastSinceTime);
addLogLine(
log.info(
`Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
);
} while (response.diff.length === DIFF_LIMIT);
}
} catch (e) {
logError(e, "Sync embeddings failed");
log.error("Sync embeddings failed", e);
}
};
@ -164,7 +158,7 @@ export const getEmbeddingsDiff = async (
);
return await response.data;
} catch (e) {
logError(e, "get embeddings diff failed");
log.error("get embeddings diff failed", e);
throw e;
}
};
@ -187,7 +181,7 @@ export const putEmbedding = async (
);
return resp.data;
} catch (e) {
logError(e, "put embedding failed");
log.error("put embedding failed", e);
throw e;
}
};
@ -205,7 +199,7 @@ export const cleanupDeletedEmbeddings = async (
activeFileIds.has(embedding.fileID),
);
if (allLocalEmbeddings.length !== remainingEmbeddings.length) {
addLogLine(
log.info(
`cleanupDeletedEmbeddings embeddingsCount: ${allLocalEmbeddings.length} remainingEmbeddingsCount: ${remainingEmbeddings.length}`,
);
await localForage.setItem(EMBEDDINGS_TABLE, remainingEmbeddings);

View file

@ -1,8 +1,7 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getActualKey } from "@ente/shared/user";
@ -82,7 +81,7 @@ const getEntityKey = async (type: EntityType) => {
localForage.setItem(ENTITY_KEY_TABLES[type], decryptedEntityKey);
return decryptedEntityKey;
} catch (e) {
logError(e, "Get entity key failed");
log.error("Get entity key failed", e);
throw e;
}
};
@ -92,7 +91,7 @@ export const getLatestEntities = async <T>(type: EntityType) => {
await syncEntity<T>(type);
return await getLocalEntity<T>(type);
} catch (e) {
logError(e, "Sync entities failed");
log.error("Sync entities failed", e);
throw e;
}
};
@ -101,7 +100,7 @@ export const syncEntities = async () => {
try {
await syncEntity(EntityType.LOCATION_TAG);
} catch (e) {
logError(e, "Sync entities failed");
log.error("Sync entities failed", e);
throw e;
}
};
@ -109,11 +108,11 @@ export const syncEntities = async () => {
const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
try {
let entities = await getLocalEntity(type);
addLogLine(
log.info(
`Syncing ${type} entities localEntitiesCount: ${entities.length}`,
);
let syncTime = await getEntityLastSyncTime(type);
addLogLine(`Syncing ${type} entities syncTime: ${syncTime}`);
log.info(`Syncing ${type} entities syncTime: ${syncTime}`);
let response: EntitySyncDiffResponse;
do {
response = await getEntityDiff(type, syncTime);
@ -157,12 +156,12 @@ const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
}
await localForage.setItem(ENTITY_TABLES[type], nonDeletedEntities);
await localForage.setItem(ENTITY_SYNC_TIME_TABLES[type], syncTime);
addLogLine(
log.info(
`Syncing ${type} entities syncedEntitiesCount: ${nonDeletedEntities.length}`,
);
} while (response.diff.length === DIFF_LIMIT);
} catch (e) {
logError(e, "Sync entity failed");
log.error("Sync entity failed", e);
}
};
@ -189,7 +188,7 @@ const getEntityDiff = async (
return resp.data;
} catch (e) {
logError(e, "Get entity diff failed");
log.error("Get entity diff failed", e);
throw e;
}
};

View file

@ -1,7 +1,29 @@
import { logError } from "@ente/shared/sentry";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { sleep } from "@ente/shared/utils";
import QueueProcessor, {
CancellationStatus,
RequestCanceller,
} from "@ente/shared/utils/queueProcessor";
import { ExportStage } from "constants/export";
import { FILE_TYPE } from "constants/file";
import { Collection } from "types/collection";
import {
ExportProgress,
ExportRecord,
ExportSettings,
ExportUIUpdaters,
} from "types/export";
import { EnteFile } from "types/file";
import {
constructCollectionNameMap,
getCollectionUserFacingName,
getNonEmptyPersonalCollections,
} from "utils/collection";
import {
convertCollectionIDExportNameObjectToMap,
convertFileIDExportNameObjectToMap,
@ -25,41 +47,16 @@ import {
isLivePhotoExportName,
parseLivePhotoExportName,
} from "utils/export";
import { getAllLocalCollections } from "../collectionService";
import downloadManager from "../download";
import { getAllLocalFiles } from "../fileService";
import {
generateStreamFromArrayBuffer,
getPersonalFiles,
getUpdatedEXIFFileForDownload,
mergeMetadata,
} from "utils/file";
import { getAllLocalCollections } from "../collectionService";
import downloadManager from "../download";
import { getAllLocalFiles } from "../fileService";
import { decodeLivePhoto } from "../livePhotoService";
import ElectronAPIs from "@/next/electron";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { User } from "@ente/shared/user/types";
import QueueProcessor, {
CancellationStatus,
RequestCanceller,
} from "@ente/shared/utils/queueProcessor";
import { ExportStage } from "constants/export";
import { FILE_TYPE } from "constants/file";
import { Collection } from "types/collection";
import {
ExportProgress,
ExportRecord,
ExportSettings,
ExportUIUpdaters,
} from "types/export";
import {
constructCollectionNameMap,
getCollectionUserFacingName,
getNonEmptyPersonalCollections,
} from "utils/collection";
import { migrateExport } from "./migration";
const EXPORT_RECORD_FILE_NAME = "export_status.json";
@ -102,7 +99,7 @@ class ExportService {
this.exportSettings = exportSettings;
return exportSettings;
} catch (e) {
logError(e, "getExportSettings failed");
log.error("getExportSettings failed", e);
throw e;
}
}
@ -114,7 +111,7 @@ class ExportService {
this.exportSettings = newSettings;
setData(LS_KEYS.EXPORT, newSettings);
} catch (e) {
logError(e, "updateExportSettings failed");
log.error("updateExportSettings failed", e);
throw e;
}
}
@ -125,11 +122,11 @@ class ExportService {
updateProgress: (progress: ExportProgress) => void,
) {
try {
addLogLine("running migration");
log.info("running migration");
await migrateExport(exportDir, exportRecord, updateProgress);
addLogLine("migration completed");
log.info("migration completed");
} catch (e) {
logError(e, "migration failed");
log.error("migration failed", e);
throw e;
}
}
@ -160,16 +157,16 @@ class ExportService {
async changeExportDirectory() {
try {
const newRootDir = await ElectronAPIs.selectDirectory();
const newRootDir = await ensureElectron().selectDirectory();
if (!newRootDir) {
throw Error(CustomError.SELECT_FOLDER_ABORTED);
}
const newExportDir = `${newRootDir}/${ENTE_EXPORT_DIRECTORY}`;
await ElectronAPIs.checkExistsAndCreateDir(newExportDir);
await ensureElectron().checkExistsAndCreateDir(newExportDir);
return newExportDir;
} catch (e) {
if (e.message !== CustomError.SELECT_FOLDER_ABORTED) {
logError(e, "changeExportDirectory failed");
log.error("changeExportDirectory failed", e);
}
throw e;
}
@ -178,10 +175,10 @@ class ExportService {
enableContinuousExport() {
try {
if (this.continuousExportEventHandler) {
addLogLine("continuous export already enabled");
log.info("continuous export already enabled");
return;
}
addLogLine("enabling continuous export");
log.info("enabling continuous export");
this.continuousExportEventHandler = () => {
this.scheduleExport();
};
@ -191,7 +188,7 @@ class ExportService {
this.continuousExportEventHandler,
);
} catch (e) {
logError(e, "failed to enableContinuousExport ");
log.error("failed to enableContinuousExport ", e);
throw e;
}
}
@ -199,17 +196,17 @@ class ExportService {
disableContinuousExport() {
try {
if (!this.continuousExportEventHandler) {
addLogLine("continuous export already disabled");
log.info("continuous export already disabled");
return;
}
addLogLine("disabling continuous export");
log.info("disabling continuous export");
eventBus.removeListener(
Events.LOCAL_FILES_UPDATED,
this.continuousExportEventHandler,
);
this.continuousExportEventHandler = null;
} catch (e) {
logError(e, "failed to disableContinuousExport");
log.error("failed to disableContinuousExport", e);
throw e;
}
}
@ -239,7 +236,7 @@ class ExportService {
);
return unExportedFiles;
} catch (e) {
logError(e, "getUpdateFileLists failed");
log.error("getUpdateFileLists failed", e);
throw e;
}
};
@ -271,30 +268,30 @@ class ExportService {
const pendingExports = await this.getPendingExports(exportRecord);
this.uiUpdater.setPendingExports(pendingExports);
} catch (e) {
logError(e, "postExport failed");
log.error("postExport failed", e);
}
}
async stopRunningExport() {
try {
addLogLine("user requested export cancellation");
log.info("user requested export cancellation");
this.exportInProgress.exec();
this.exportInProgress = null;
this.reRunNeeded = false;
await this.postExport();
} catch (e) {
logError(e, "stopRunningExport failed");
log.error("stopRunningExport failed", e);
}
}
scheduleExport = async () => {
try {
if (this.exportInProgress) {
addLogLine("export in progress, scheduling re-run");
log.info("export in progress, scheduling re-run");
this.reRunNeeded = true;
return;
} else {
addLogLine("export not in progress, starting export");
log.info("export not in progress, starting export");
}
const isCanceled: CancellationStatus = { status: false };
@ -307,22 +304,22 @@ class ExportService {
try {
const exportFolder = this.getExportSettings()?.folder;
await this.preExport(exportFolder);
addLogLine("export started");
log.info("export started");
await this.runExport(exportFolder, isCanceled);
addLogLine("export completed");
log.info("export completed");
} finally {
if (isCanceled.status) {
addLogLine("export cancellation done");
log.info("export cancellation done");
if (!this.exportInProgress) {
await this.postExport();
}
} else {
await this.postExport();
addLogLine("resetting export in progress after completion");
log.info("resetting export in progress after completion");
this.exportInProgress = null;
if (this.reRunNeeded) {
this.reRunNeeded = false;
addLogLine("re-running export");
log.info("re-running export");
setTimeout(() => this.scheduleExport(), 0);
}
}
@ -332,7 +329,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "scheduleExport failed");
log.error("scheduleExport failed", e);
}
}
};
@ -390,7 +387,7 @@ class ExportService {
exportRecord,
);
addLogLine(
log.info(
`personal files:${personalFiles.length} unexported files: ${filesToExport.length}, deleted exported files: ${removedFileUIDs.length}, renamed collections: ${renamedCollections.length}, deleted collections: ${deletedExportedCollections.length}`,
);
let success = 0;
@ -416,7 +413,7 @@ class ExportService {
};
if (renamedCollections?.length > 0) {
this.updateExportStage(ExportStage.RENAMING_COLLECTION_FOLDERS);
addLogLine(`renaming ${renamedCollections.length} collections`);
log.info(`renaming ${renamedCollections.length} collections`);
await this.collectionRenamer(
exportFolder,
collectionIDExportNameMap,
@ -427,7 +424,7 @@ class ExportService {
if (removedFileUIDs?.length > 0) {
this.updateExportStage(ExportStage.TRASHING_DELETED_FILES);
addLogLine(`trashing ${removedFileUIDs.length} files`);
log.info(`trashing ${removedFileUIDs.length} files`);
await this.fileTrasher(
exportFolder,
collectionIDExportNameMap,
@ -437,7 +434,7 @@ class ExportService {
}
if (filesToExport?.length > 0) {
this.updateExportStage(ExportStage.EXPORTING_FILES);
addLogLine(`exporting ${filesToExport.length} files`);
log.info(`exporting ${filesToExport.length} files`);
await this.fileExporter(
filesToExport,
collectionIDNameMap,
@ -452,7 +449,7 @@ class ExportService {
this.updateExportStage(
ExportStage.TRASHING_DELETED_COLLECTIONS,
);
addLogLine(
log.info(
`removing ${deletedExportedCollections.length} collections`,
);
await this.collectionRemover(
@ -466,7 +463,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "runExport failed");
log.error("runExport failed", e);
}
throw e;
}
@ -497,7 +494,7 @@ class ExportService {
exportFolder,
getCollectionUserFacingName(collection),
);
addLogLine(
log.info(
`renaming collection with id ${collection.id} from ${oldCollectionExportName} to ${newCollectionExportName}`,
);
const newCollectionExportPath = getCollectionExportPath(
@ -515,7 +512,7 @@ class ExportService {
newCollectionExportName,
);
try {
await ElectronAPIs.rename(
await ensureElectron().rename(
oldCollectionExportPath,
newCollectionExportPath,
);
@ -531,11 +528,11 @@ class ExportService {
);
throw e;
}
addLogLine(
log.info(
`renaming collection with id ${collection.id} from ${oldCollectionExportName} to ${newCollectionExportName} successful`,
);
} catch (e) {
logError(e, "collectionRenamer failed a collection");
log.error("collectionRenamer failed a collection", e);
if (
e.message ===
CustomError.UPDATE_EXPORTED_RECORD_FAILED ||
@ -552,7 +549,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "collectionRenamer failed");
log.error("collectionRenamer failed", e);
}
throw e;
}
@ -575,7 +572,7 @@ class ExportService {
throw Error(CustomError.EXPORT_STOPPED);
}
await this.verifyExportFolderExists(exportFolder);
addLogLine(
log.info(
`removing collection with id ${collectionID} from export folder`,
);
const collectionExportName =
@ -600,11 +597,13 @@ class ExportService {
);
try {
// delete the collection metadata folder
await ElectronAPIs.deleteFolder(
await ensureElectron().deleteFolder(
getMetadataFolderExportPath(collectionExportPath),
);
// delete the collection folder
await ElectronAPIs.deleteFolder(collectionExportPath);
await ensureElectron().deleteFolder(
collectionExportPath,
);
} catch (e) {
await this.addCollectionExportedRecord(
exportFolder,
@ -613,11 +612,11 @@ class ExportService {
);
throw e;
}
addLogLine(
log.info(
`removing collection with id ${collectionID} from export folder successful`,
);
} catch (e) {
logError(e, "collectionRemover failed a collection");
log.error("collectionRemover failed a collection", e);
if (
e.message ===
CustomError.UPDATE_EXPORTED_RECORD_FAILED ||
@ -634,7 +633,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "collectionRemover failed");
log.error("collectionRemover failed", e);
}
throw e;
}
@ -651,7 +650,7 @@ class ExportService {
): Promise<void> {
try {
for (const file of files) {
addLogLine(
log.info(
`exporting file ${file.metadata.title} with id ${
file.id
} from collection ${collectionIDNameMap.get(
@ -687,10 +686,10 @@ class ExportService {
exportDir,
collectionExportName,
);
await ElectronAPIs.checkExistsAndCreateDir(
await ensureElectron().checkExistsAndCreateDir(
collectionExportPath,
);
await ElectronAPIs.checkExistsAndCreateDir(
await ensureElectron().checkExistsAndCreateDir(
getMetadataFolderExportPath(collectionExportPath),
);
await this.downloadAndSave(
@ -699,7 +698,7 @@ class ExportService {
file,
);
incrementSuccess();
addLogLine(
log.info(
`exporting file ${file.metadata.title} with id ${
file.id
} from collection ${collectionIDNameMap.get(
@ -708,7 +707,7 @@ class ExportService {
);
} catch (e) {
incrementFailed();
logError(e, "export failed for a file");
log.error("export failed for a file", e);
if (
e.message ===
CustomError.UPDATE_EXPORTED_RECORD_FAILED ||
@ -725,7 +724,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "fileExporter failed");
log.error("fileExporter failed", e);
}
throw e;
}
@ -744,7 +743,7 @@ class ExportService {
);
for (const fileUID of removedFileUIDs) {
await this.verifyExportFolderExists(exportDir);
addLogLine(`trashing file with id ${fileUID}`);
log.info(`trashing file with id ${fileUID}`);
if (isCanceled.status) {
throw Error(CustomError.EXPORT_STOPPED);
}
@ -766,11 +765,11 @@ class ExportService {
collectionExportPath,
imageExportName,
);
addLogLine(
log.info(
`moving image file ${imageExportPath} to trash folder`,
);
if (await this.exists(imageExportPath)) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
imageExportPath,
await getTrashedFileExportPath(
exportDir,
@ -785,7 +784,7 @@ class ExportService {
if (
await this.exists(imageMetadataFileExportPath)
) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
imageMetadataFileExportPath,
await getTrashedFileExportPath(
exportDir,
@ -798,11 +797,11 @@ class ExportService {
collectionExportPath,
videoExportName,
);
addLogLine(
log.info(
`moving video file ${videoExportPath} to trash folder`,
);
if (await this.exists(videoExportPath)) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
videoExportPath,
await getTrashedFileExportPath(
exportDir,
@ -815,7 +814,7 @@ class ExportService {
if (
await this.exists(videoMetadataFileExportPath)
) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
videoMetadataFileExportPath,
await getTrashedFileExportPath(
exportDir,
@ -833,11 +832,11 @@ class ExportService {
exportDir,
fileExportPath,
);
addLogLine(
log.info(
`moving file ${fileExportPath} to ${trashedFilePath} trash folder`,
);
if (await this.exists(fileExportPath)) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
fileExportPath,
trashedFilePath,
);
@ -845,7 +844,7 @@ class ExportService {
const metadataFileExportPath =
getMetadataFileExportPath(fileExportPath);
if (await this.exists(metadataFileExportPath)) {
await ElectronAPIs.moveFile(
await ensureElectron().moveFile(
metadataFileExportPath,
await getTrashedFileExportPath(
exportDir,
@ -862,9 +861,9 @@ class ExportService {
);
throw e;
}
addLogLine(`trashing file with id ${fileUID} successful`);
log.info(`trashing file with id ${fileUID} successful`);
} catch (e) {
logError(e, "trashing failed for a file");
log.error("trashing failed for a file", e);
if (
e.message ===
CustomError.UPDATE_EXPORTED_RECORD_FAILED ||
@ -881,7 +880,7 @@ class ExportService {
e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST &&
e.message !== CustomError.EXPORT_STOPPED
) {
logError(e, "fileTrasher failed");
log.error("fileTrasher failed", e);
}
throw e;
}
@ -904,7 +903,7 @@ class ExportService {
await this.updateExportRecord(folder, exportRecord);
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "addFileExportedRecord failed");
log.error("addFileExportedRecord failed", e);
}
throw e;
}
@ -928,7 +927,7 @@ class ExportService {
await this.updateExportRecord(folder, exportRecord);
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "addCollectionExportedRecord failed");
log.error("addCollectionExportedRecord failed", e);
}
throw e;
}
@ -947,7 +946,7 @@ class ExportService {
await this.updateExportRecord(folder, exportRecord);
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "removeCollectionExportedRecord failed");
log.error("removeCollectionExportedRecord failed", e);
}
throw e;
}
@ -964,7 +963,7 @@ class ExportService {
await this.updateExportRecord(folder, exportRecord);
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "removeFileExportedRecord failed");
log.error("removeFileExportedRecord failed", e);
}
throw e;
}
@ -984,7 +983,7 @@ class ExportService {
try {
const exportRecord = await this.getExportRecord(folder);
const newRecord: ExportRecord = { ...exportRecord, ...newData };
await ElectronAPIs.saveFileToDisk(
await ensureElectron().saveFileToDisk(
`${folder}/${EXPORT_RECORD_FILE_NAME}`,
JSON.stringify(newRecord, null, 2),
);
@ -993,7 +992,7 @@ class ExportService {
if (e.message === CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
throw e;
}
logError(e, "error updating Export Record");
log.error("error updating Export Record", e);
throw Error(CustomError.UPDATE_EXPORTED_RECORD_FAILED);
}
}
@ -1006,7 +1005,7 @@ class ExportService {
return this.createEmptyExportRecord(exportRecordJSONPath);
}
const recordFile =
await ElectronAPIs.readTextFile(exportRecordJSONPath);
await ensureElectron().readTextFile(exportRecordJSONPath);
try {
return JSON.parse(recordFile);
} catch (e) {
@ -1021,7 +1020,7 @@ class ExportService {
return await this.getExportRecord(folder, false);
}
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "export Record JSON parsing failed");
log.error("export Record JSON parsing failed", e);
}
throw e;
}
@ -1042,8 +1041,8 @@ class ExportService {
exportFolder,
collectionExportName,
);
await ElectronAPIs.checkExistsAndCreateDir(collectionExportPath);
await ElectronAPIs.checkExistsAndCreateDir(
await ensureElectron().checkExistsAndCreateDir(collectionExportPath);
await ensureElectron().checkExistsAndCreateDir(
getMetadataFolderExportPath(collectionExportPath),
);
@ -1090,7 +1089,7 @@ class ExportService {
fileExportName,
file,
);
await ElectronAPIs.saveStreamToDisk(
await ensureElectron().saveStreamToDisk(
getFileExportPath(collectionExportPath, fileExportName),
updatedFileStream,
);
@ -1100,7 +1099,7 @@ class ExportService {
}
}
} catch (e) {
logError(e, "download and save failed");
log.error("download and save failed", e);
throw e;
}
}
@ -1138,7 +1137,7 @@ class ExportService {
imageExportName,
file,
);
await ElectronAPIs.saveStreamToDisk(
await ensureElectron().saveStreamToDisk(
getFileExportPath(collectionExportPath, imageExportName),
imageStream,
);
@ -1150,12 +1149,12 @@ class ExportService {
file,
);
try {
await ElectronAPIs.saveStreamToDisk(
await ensureElectron().saveStreamToDisk(
getFileExportPath(collectionExportPath, videoExportName),
videoStream,
);
} catch (e) {
await ElectronAPIs.deleteFile(
await ensureElectron().deleteFile(
getFileExportPath(collectionExportPath, imageExportName),
);
throw e;
@ -1171,7 +1170,7 @@ class ExportService {
fileExportName: string,
file: EnteFile,
) {
await ElectronAPIs.saveFileToDisk(
await ensureElectron().saveFileToDisk(
getFileMetadataExportPath(collectionExportPath, fileExportName),
getGoogleLikeMetadataFile(fileExportName, file),
);
@ -1182,15 +1181,15 @@ class ExportService {
};
exists = (path: string) => {
return ElectronAPIs.fs.exists(path);
return ensureElectron().fs.exists(path);
};
rename = (oldPath: string, newPath: string) => {
return ElectronAPIs.rename(oldPath, newPath);
return ensureElectron().rename(oldPath, newPath);
};
checkExistsAndCreateDir = (path: string) => {
return ElectronAPIs.checkExistsAndCreateDir(path);
return ensureElectron().checkExistsAndCreateDir(path);
};
exportFolderExists = async (exportFolder: string) => {
@ -1204,7 +1203,7 @@ class ExportService {
}
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "verifyExportFolderExists failed");
log.error("verifyExportFolderExists failed", e);
}
throw e;
}
@ -1212,7 +1211,7 @@ class ExportService {
private createEmptyExportRecord = async (exportRecordJSONPath: string) => {
const exportRecord: ExportRecord = NULL_EXPORT_RECORD;
await ElectronAPIs.saveFileToDisk(
await ensureElectron().saveFileToDisk(
exportRecordJSONPath,
JSON.stringify(exportRecord, null, 2),
);

View file

@ -1,5 +1,3 @@
import ElectronAPIs from "@/next/electron";
import isElectron from "is-electron";
import { ElectronFile } from "types/upload";
import ComlinkFFmpegWorker from "utils/comlink/ComlinkFFmpegWorker";
@ -16,10 +14,11 @@ class FFmpegFactory {
private client: IFFmpeg;
async getFFmpegClient() {
if (!this.client) {
if (isElectron()) {
const electron = globalThis.electron;
if (electron) {
this.client = {
run(cmd, inputFile, outputFilename, dontTimeout) {
return ElectronAPIs.runFFmpegCmd(
return electron.runFFmpegCmd(
cmd,
inputFile,
outputFilename,

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import {
FFMPEG_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
@ -40,7 +40,7 @@ export async function generateVideoThumbnail(
seekTime--;
}
} catch (e) {
logError(e, "ffmpeg generateVideoThumbnail failed");
log.error("ffmpeg generateVideoThumbnail failed", e);
throw e;
}
}
@ -72,7 +72,7 @@ export async function extractVideoMetadata(file: File | ElectronFile) {
new Uint8Array(await metadata.arrayBuffer()),
);
} catch (e) {
logError(e, "ffmpeg extractVideoMetadata failed");
log.error("ffmpeg extractVideoMetadata failed", e);
throw e;
}
}
@ -94,7 +94,7 @@ export async function convertToMP4(file: File | ElectronFile) {
true,
);
} catch (e) {
logError(e, "ffmpeg convertToMP4 failed");
log.error("ffmpeg convertToMP4 failed", e);
throw e;
}
}

View file

@ -1,11 +1,9 @@
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { REQUEST_BATCH_SIZE } from "constants/api";
import { Collection } from "types/collection";
@ -48,18 +46,19 @@ const setLocalFiles = async (type: "normal" | "hidden", files: EnteFile[]) => {
try {
eventBus.emit(Events.LOCAL_FILES_UPDATED);
} catch (e) {
logError(e, "Error in localFileUpdated handlers");
log.error("Error in localFileUpdated handlers", e);
}
} catch (e1) {
try {
const storageEstimate = await navigator.storage.estimate();
logError(e1, "failed to save files to indexedDB", {
storageEstimate,
});
addLogLine(`storage estimate ${JSON.stringify(storageEstimate)}`);
log.error(
`failed to save files to indexedDB (storageEstimate was ${storageEstimate}`,
e1,
);
log.info(`storage estimate ${JSON.stringify(storageEstimate)}`);
} catch (e2) {
logError(e1, "failed to save files to indexedDB");
logError(e2, "failed to get storage stats");
log.error("failed to save files to indexedDB", e1);
log.error("failed to get storage stats", e2);
}
throw e1;
}
@ -151,7 +150,7 @@ export const getFiles = async (
} while (resp.data.hasMore);
return decryptedFiles;
} catch (e) {
logError(e, "Get files failed");
log.error("Get files failed", e);
throw e;
}
};
@ -192,7 +191,7 @@ export const trashFiles = async (filesToTrash: EnteFile[]) => {
);
}
} catch (e) {
logError(e, "trash file failed");
log.error("trash file failed", e);
throw e;
}
};
@ -216,7 +215,7 @@ export const deleteFromTrash = async (filesToDelete: number[]) => {
);
}
} catch (e) {
logError(e, "deleteFromTrash failed");
log.error("deleteFromTrash failed", e);
throw e;
}
};

View file

@ -1,8 +1,7 @@
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
@ -46,7 +45,7 @@ class HEICConverter {
await worker.convertHEICToJPEG(
fileBlob,
);
addLogLine(
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable(
@ -65,17 +64,19 @@ class HEICConverter {
},
);
if (!convertedHEIC || convertedHEIC?.size === 0) {
logError(
Error(`converted heic fileSize is Zero`),
"converted heic fileSize is Zero",
{
originalFileSize: convertBytesToHumanReadable(
fileBlob?.size ?? 0,
),
convertedFileSize: convertBytesToHumanReadable(
convertedHEIC?.size ?? 0,
),
},
log.error(
`converted heic fileSize is Zero - ${JSON.stringify(
{
originalFileSize:
convertBytesToHumanReadable(
fileBlob?.size ?? 0,
),
convertedFileSize:
convertBytesToHumanReadable(
convertedHEIC?.size ?? 0,
),
},
)}`,
);
}
await new Promise((resolve) => {
@ -87,7 +88,7 @@ class HEICConverter {
this.workerPool.push(convertWorker);
return convertedHEIC;
} catch (e) {
logError(e, "heic conversion failed");
log.error("heic conversion failed", e);
convertWorker.terminate();
this.workerPool.push(getDedicatedConvertWorker());
throw e;

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import WasmHEICConverterService from "./heic-convert/service";
class HeicConversionService {
@ -6,7 +6,7 @@ class HeicConversionService {
try {
return await WasmHEICConverterService.convert(heicFileData);
} catch (e) {
logError(e, "failed to convert heic file");
log.error("failed to convert heic file", e);
throw e;
}
}

View file

@ -1,5 +1,5 @@
import ElectronAPIs from "@/next/electron";
import { logError } from "@ente/shared/sentry";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { PICKED_UPLOAD_TYPE } from "constants/upload";
import { Collection } from "types/collection";
import { ElectronFile, FileWithCollection } from "types/upload";
@ -14,13 +14,13 @@ class ImportService {
async getPendingUploads(): Promise<PendingUploads> {
try {
const pendingUploads =
(await ElectronAPIs.getPendingUploads()) as PendingUploads;
(await ensureElectron().getPendingUploads()) as PendingUploads;
return pendingUploads;
} catch (e) {
if (e?.message?.includes("ENOENT: no such file or directory")) {
// ignore
} else {
logError(e, "failed to getPendingUploads ");
log.error("failed to getPendingUploads ", e);
}
return { files: [], collectionName: null, type: null };
}
@ -40,7 +40,7 @@ class ImportService {
if (collections.length === 1) {
collectionName = collections[0].name;
}
await ElectronAPIs.setToUploadCollection(collectionName);
await ensureElectron().setToUploadCollection(collectionName);
}
async updatePendingUploads(files: FileWithCollection[]) {
@ -57,16 +57,17 @@ class ImportService {
filePaths.push((fileWithCollection.file as ElectronFile).path);
}
}
await ElectronAPIs.setToUploadFiles(
await ensureElectron().setToUploadFiles(
PICKED_UPLOAD_TYPE.FILES,
filePaths,
);
}
async cancelRemainingUploads() {
await ElectronAPIs.setToUploadCollection(null);
await ElectronAPIs.setToUploadFiles(PICKED_UPLOAD_TYPE.ZIPS, []);
await ElectronAPIs.setToUploadFiles(PICKED_UPLOAD_TYPE.FILES, []);
const electron = ensureElectron();
await electron.setToUploadCollection(null);
await electron.setToUploadFiles(PICKED_UPLOAD_TYPE.ZIPS, []);
await electron.setToUploadFiles(PICKED_UPLOAD_TYPE.FILES, []);
}
}

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { LocationTagData } from "types/entity";
import { Location } from "types/upload";
@ -30,7 +30,7 @@ class LocationSearchService {
});
await this.citiesPromise;
} catch (e) {
logError(e, "LocationSearchService loadCities failed");
log.error("LocationSearchService loadCities failed", e);
this.citiesPromise = null;
}
}
@ -47,7 +47,7 @@ class LocationSearchService {
.startsWith(searchTerm.toLowerCase());
});
} catch (e) {
logError(e, "LocationSearchService searchCities failed");
log.error("LocationSearchService searchCities failed", e);
throw e;
}
}

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { GraphModel } from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core";
import {
@ -60,7 +60,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
inputHeight: BLAZEFACE_INPUT_SIZE,
inputWidth: BLAZEFACE_INPUT_SIZE,
});
addLogLine(
log.info(
"loaded blazeFaceModel: ",
// await this.blazeFaceModel,
// eslint-disable-next-line @typescript-eslint/await-thenable
@ -121,20 +121,20 @@ class BlazeFaceDetectionService implements FaceDetectionService {
let desiredDist = desiredRightEyeX - this.desiredLeftEye[0];
desiredDist *= this.desiredFaceSize;
const scale = desiredDist / dist;
// addLogLine("scale: ", scale);
// log.info("scale: ", scale);
const eyesCenter = [];
eyesCenter[0] = Math.floor((leftEye[0] + rightEye[0]) / 2);
eyesCenter[1] = Math.floor((leftEye[1] + rightEye[1]) / 2);
// addLogLine("eyesCenter: ", eyesCenter);
// log.info("eyesCenter: ", eyesCenter);
const faceWidth = this.desiredFaceSize / scale;
const faceHeight = this.desiredFaceSize / scale;
// addLogLine("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
// log.info("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
const tx = eyesCenter[0] - faceWidth * 0.5;
const ty = eyesCenter[1] - faceHeight * this.desiredLeftEye[1];
// addLogLine("tx: ", tx, "ty: ", ty);
// log.info("tx: ", tx, "ty: ", ty);
return new Box({
left: tx,
@ -155,7 +155,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const normalizedImage = tf.sub(tf.div(reshapedImage, 127.5), 1.0);
// eslint-disable-next-line @typescript-eslint/await-thenable
const results = await this.blazeFaceBackModel.predict(normalizedImage);
// addLogLine('onFacesDetected: ', results);
// log.info('onFacesDetected: ', results);
return results;
}
@ -180,7 +180,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const inBox = newBox(0, 0, resized.width, resized.height);
const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height);
const transform = computeTransformToBox(inBox, toBox);
// addLogLine("1st pass: ", { transform });
// log.info("1st pass: ", { transform });
const faceDetections: Array<FaceDetection> = faces?.map((f) => {
const box = transformBox(normFaceBox(f), transform);
@ -223,7 +223,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
);
let selected = pass2Detections?.[0];
if (pass2Detections?.length > 1) {
// addLogLine('2nd pass >1 face', pass2Detections.length);
// log.info('2nd pass >1 face', pass2Detections.length);
selected = getNearestDetection(
pass1Detection,
pass2Detections,
@ -234,7 +234,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
// we might miss 1st pass face actually having score within threshold
// it is ok as results will be consistent with 2nd pass only detections
if (selected && selected.probability >= BLAZEFACE_SCORE_THRESHOLD) {
// addLogLine("pass2: ", { imageBox, paddedBox, transform, selected });
// log.info("pass2: ", { imageBox, paddedBox, transform, selected });
detections.push(selected);
}
}

View file

@ -26,7 +26,7 @@ class ClusteringService {
epsilon: number = 1.0,
minPts: number = 2,
): ClusteringResults {
// addLogLine("distanceFunction", DBSCAN._);
// log.info("distanceFunction", DBSCAN._);
const clusters = this.dbscan.run(dataset, epsilon, minPts);
const noise = this.dbscan.noise;
return { clusters, noise };

View file

@ -22,7 +22,7 @@ class DbscanClusteringService implements ClusteringService {
input: ClusteringInput,
config: ClusteringConfig,
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
// log.info('Clustering input: ', input);
const dbscan = new DBSCAN();
const clusters = dbscan.run(
input,

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import {
DetectedFace,
Face,
@ -51,7 +51,7 @@ class FaceService {
);
const faceDetections =
await syncContext.faceDetectionService.detectFaces(imageBitmap);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedFaces = faceDetections?.map((detection) => {
return {
@ -66,7 +66,7 @@ class FaceService {
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine("[MLService] Detected Faces: ", newMlFile.faces?.length);
log.info("[MLService] Detected Faces: ", newMlFile.faces?.length);
}
async syncFileFaceCrops(
@ -128,8 +128,8 @@ class FaceService {
face.detection,
);
}
addLogLine("[MLService] alignedFaces: ", newMlFile.faces?.length);
// addLogLine('4 TF Memory stats: ',JSON.stringify(tf.memory()));
log.info("[MLService] alignedFaces: ", newMlFile.faces?.length);
// log.info('4 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async syncFileFaceEmbeddings(
@ -168,8 +168,8 @@ class FaceService {
faceImages.forEach((faceImage) => faceImage.close());
newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i]));
addLogLine("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// addLogLine('5 TF Memory stats: ',JSON.stringify(tf.memory()));
log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// log.info('5 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async saveFaceCrop(
@ -210,14 +210,14 @@ class FaceService {
const clusteringConfig = syncContext.config.faceClustering;
if (!allFaces || allFaces.length < clusteringConfig.minInputSize) {
addLogLine(
log.info(
"[MLService] Too few faces to cluster, not running clustering: ",
allFaces.length,
);
return;
}
addLogLine("Running clustering allFaces: ", allFaces.length);
log.info("Running clustering allFaces: ", allFaces.length);
syncContext.mlLibraryData.faceClusteringResults =
await syncContext.faceClusteringService.cluster(
allFaces.map((f) => Array.from(f.embedding)),
@ -225,7 +225,7 @@ class FaceService {
);
syncContext.mlLibraryData.faceClusteringMethod =
syncContext.faceClusteringService.method;
addLogLine(
log.info(
"[MLService] Got face clustering results: ",
JSON.stringify(syncContext.mlLibraryData.faceClusteringResults),
);

View file

@ -22,7 +22,7 @@ class HdbscanClusteringService implements ClusteringService {
input: ClusteringInput,
config: ClusteringConfig,
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
// log.info('Clustering input: ', input);
const hdbscan = new Hdbscan({
input,

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import * as tfjsConverter from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core";
import { SCENE_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
@ -26,7 +26,7 @@ class ImageScene implements SceneDetectionService {
}
private async init() {
addLogLine(`[${this.workerID}]`, "ImageScene init called");
log.info(`[${this.workerID}]`, "ImageScene init called");
if (this.model) {
return;
}
@ -38,7 +38,7 @@ class ImageScene implements SceneDetectionService {
this.model = await tfjsConverter.loadGraphModel(
"/models/imagescene/model.json",
);
addLogLine(
log.info(
`[${this.workerID}]`,
"loaded ImageScene model",
tf.getBackend(),
@ -52,10 +52,7 @@ class ImageScene implements SceneDetectionService {
}
private async getImageSceneModel() {
addLogLine(
`[${this.workerID}]`,
"ImageScene getImageSceneModel called",
);
log.info(`[${this.workerID}]`, "ImageScene getImageSceneModel called");
if (!this.ready) {
this.ready = this.init();
}

View file

@ -1,8 +1,8 @@
import { haveWindow } from "@/next/env";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import PQueue from "p-queue";
import { EnteFile } from "types/file";
import {
@ -198,7 +198,7 @@ export class LocalMLSyncContext implements MLSyncContext {
this.concurrency = concurrency || getConcurrency();
addLogLine("Using concurrency: ", this.concurrency);
log.info("Using concurrency: ", this.concurrency);
// timeout is added on downloads
// timeout on queue will keep the operation open till worker is terminated
this.syncQueue = new PQueue({ concurrency: this.concurrency });

View file

@ -1,7 +1,6 @@
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl";
import * as tf from "@tensorflow/tfjs-core";
@ -79,10 +78,10 @@ class MachineLearningService {
tsne: syncContext.tsne,
error: syncContext.error,
};
// addLogLine('[MLService] sync results: ', mlSyncResult);
// log.info('[MLService] sync results: ', mlSyncResult);
// await syncContext.dispose();
addLogLine("Final TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("Final TF Memory stats: ", JSON.stringify(tf.memory()));
return mlSyncResult;
}
@ -140,7 +139,7 @@ class MachineLearningService {
let updated = false;
if (newFileIds.length > 0) {
addLogLine("newFiles: ", newFileIds.length);
log.info("newFiles: ", newFileIds.length);
const newFiles = newFileIds.map((fileId) => this.newMlData(fileId));
await mlIDbStorage.putAllFiles(newFiles, tx);
updated = true;
@ -154,7 +153,7 @@ class MachineLearningService {
}
if (removedFileIds.length > 0) {
addLogLine("removedFiles: ", removedFileIds.length);
log.info("removedFiles: ", removedFileIds.length);
await mlIDbStorage.removeAllFiles(removedFileIds, tx);
updated = true;
}
@ -166,7 +165,7 @@ class MachineLearningService {
await mlIDbStorage.incrementIndexVersion("files");
}
addLogLine("syncLocalFiles", Date.now() - startTime, "ms");
log.info("syncLocalFiles", Date.now() - startTime, "ms");
}
private async getOutOfSyncFiles(syncContext: MLSyncContext) {
@ -177,13 +176,13 @@ class MachineLearningService {
MAX_ML_SYNC_ERROR_COUNT,
);
addLogLine("fileIds: ", JSON.stringify(fileIds));
log.info("fileIds: ", JSON.stringify(fileIds));
const localFilesMap = await this.getLocalFilesMap(syncContext);
syncContext.outOfSyncFiles = fileIds.map((fileId) =>
localFilesMap.get(fileId),
);
addLogLine("getOutOfSyncFiles", Date.now() - startTime, "ms");
log.info("getOutOfSyncFiles", Date.now() - startTime, "ms");
}
private async syncFiles(syncContext: MLSyncContext) {
@ -206,7 +205,7 @@ class MachineLearningService {
syncContext.error = error;
}
await syncContext.syncQueue.onIdle();
addLogLine("allFaces: ", syncContext.nSyncedFaces);
log.info("allFaces: ", syncContext.nSyncedFaces);
// TODO: In case syncJob has to use multiple ml workers
// do in same transaction with each file update
@ -217,32 +216,32 @@ class MachineLearningService {
private async getSyncContext(token: string, userID: number) {
if (!this.syncContext) {
addLogLine("Creating syncContext");
log.info("Creating syncContext");
this.syncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true),
);
} else {
addLogLine("reusing existing syncContext");
log.info("reusing existing syncContext");
}
return this.syncContext;
}
private async getLocalSyncContext(token: string, userID: number) {
if (!this.localSyncContext) {
addLogLine("Creating localSyncContext");
log.info("Creating localSyncContext");
this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false),
);
} else {
addLogLine("reusing existing localSyncContext");
log.info("reusing existing localSyncContext");
}
return this.localSyncContext;
}
public async closeLocalSyncContext() {
if (this.localSyncContext) {
addLogLine("Closing localSyncContext");
log.info("Closing localSyncContext");
const syncContext = await this.localSyncContext;
await syncContext.dispose();
this.localSyncContext = undefined;
@ -294,7 +293,7 @@ class MachineLearningService {
syncContext.nSyncedFiles += 1;
return mlFileData;
} catch (e) {
logError(e, "ML syncFile failed");
log.error("ML syncFile failed", e);
let error = e;
console.error(
"Error in ml sync, fileId: ",
@ -320,7 +319,7 @@ class MachineLearningService {
await this.persistMLFileSyncError(syncContext, enteFile, error);
syncContext.nSyncedFiles += 1;
} finally {
addLogLine("TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("TF Memory stats: ", JSON.stringify(tf.memory()));
}
}
@ -362,13 +361,13 @@ class MachineLearningService {
newMlFile.lastErrorMessage = undefined;
await this.persistMLFileData(syncContext, newMlFile);
} catch (e) {
logError(e, "ml detection failed");
log.error("ml detection failed", e);
newMlFile.mlVersion = oldMlFile.mlVersion;
throw e;
} finally {
fileContext.tfImage && fileContext.tfImage.dispose();
fileContext.imageBitmap && fileContext.imageBitmap.close();
// addLogLine('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: enable once faceId changes go in
// await removeOldFaceCrops(
@ -387,7 +386,7 @@ class MachineLearningService {
await tf.ready();
addLogLine("01 TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("01 TF Memory stats: ", JSON.stringify(tf.memory()));
this.initialized = true;
}
@ -464,7 +463,7 @@ class MachineLearningService {
await FaceService.syncFileFaceEmbeddings(syncContext, fileContext);
}
addLogLine(
log.info(
`face detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
"ms",

View file

@ -1,7 +1,6 @@
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { eventBus, Events } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import { FILE_TYPE } from "constants/file";
import debounce from "debounce";
@ -51,7 +50,7 @@ class MLWorkManager {
public async setMlSearchEnabled(enabled: boolean) {
if (!this.mlSearchEnabled && enabled) {
addLogLine("Enabling MLWorkManager");
log.info("Enabling MLWorkManager");
this.mlSearchEnabled = true;
logQueueStats(this.liveSyncQueue, "livesync");
@ -70,7 +69,7 @@ class MLWorkManager {
await this.startSyncJob();
} else if (this.mlSearchEnabled && !enabled) {
addLogLine("Disabling MLWorkManager");
log.info("Disabling MLWorkManager");
this.mlSearchEnabled = false;
this.liveSyncQueue.removeAllListeners();
@ -92,23 +91,23 @@ class MLWorkManager {
// Handlers
private async appStartHandler() {
addLogLine("appStartHandler");
log.info("appStartHandler");
try {
this.startSyncJob();
} catch (e) {
logError(e, "Failed in ML appStart Handler");
log.error("Failed in ML appStart Handler", e);
}
}
private async logoutHandler() {
addLogLine("logoutHandler");
log.info("logoutHandler");
try {
this.stopSyncJob();
this.mlSyncJob = undefined;
await this.terminateLiveSyncWorker();
await mlIDbStorage.clearMLDB();
} catch (e) {
logError(e, "Failed in ML logout Handler");
log.error("Failed in ML logout Handler", e);
}
}
@ -119,9 +118,9 @@ class MLWorkManager {
if (!this.mlSearchEnabled) {
return;
}
addLogLine("fileUploadedHandler: ", arg.enteFile.id);
log.info("fileUploadedHandler: ", arg.enteFile.id);
if (arg.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) {
addLogLine("Skipping non image file for local file processing");
log.info("Skipping non image file for local file processing");
return;
}
try {
@ -134,7 +133,7 @@ class MLWorkManager {
}
private async localFilesUpdatedHandler() {
addLogLine("Local files updated");
log.info("Local files updated");
this.startSyncJob();
}
@ -165,7 +164,7 @@ class MLWorkManager {
}
private async onLiveSyncIdle() {
addLogLine("Live sync idle");
log.info("Live sync idle");
await this.terminateLiveSyncWorker();
this.mlSearchEnabled && this.startSyncJob();
}
@ -206,7 +205,7 @@ class MLWorkManager {
// TODO: skipping is not required if we are caching chunks through service worker
// currently worker chunk itself is not loaded when network is not there
if (!navigator.onLine) {
addLogLine(
log.info(
"Skipping ml-sync job run as not connected to internet.",
);
return {
@ -227,25 +226,25 @@ class MLWorkManager {
!!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1,
mlSyncResult,
};
addLogLine("ML Sync Job result: ", JSON.stringify(jobResult));
log.info("ML Sync Job result: ", JSON.stringify(jobResult));
// TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job
return jobResult;
} catch (e) {
logError(e, "Failed to run MLSync Job");
log.error("Failed to run MLSync Job", e);
}
}
public async startSyncJob() {
try {
addLogLine("MLWorkManager.startSyncJob");
log.info("MLWorkManager.startSyncJob");
if (!this.mlSearchEnabled) {
addLogLine("ML Search disabled, not starting ml sync job");
log.info("ML Search disabled, not starting ml sync job");
return;
}
if (!getToken()) {
addLogLine("User not logged in, not starting ml sync job");
log.info("User not logged in, not starting ml sync job");
return;
}
const mlSyncJobConfig = await getMLSyncJobConfig();
@ -256,17 +255,17 @@ class MLWorkManager {
}
this.mlSyncJob.start();
} catch (e) {
logError(e, "Failed to start MLSync Job");
log.error("Failed to start MLSync Job", e);
}
}
public stopSyncJob(terminateWorker: boolean = true) {
try {
addLogLine("MLWorkManager.stopSyncJob");
log.info("MLWorkManager.stopSyncJob");
this.mlSyncJob?.stop();
terminateWorker && this.terminateSyncJobWorker();
} catch (e) {
logError(e, "Failed to stop MLSync Job");
log.error("Failed to stop MLSync Job", e);
}
}
}

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import * as tf from "@tensorflow/tfjs-core";
import { TFLiteModel } from "@tensorflow/tfjs-tflite";
import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig";
@ -37,7 +37,7 @@ class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
"/models/mobilefacenet/mobilefacenet.tflite",
);
addLogLine("loaded mobileFaceNetModel: ", tf.getBackend());
log.info("loaded mobileFaceNetModel: ", tf.getBackend());
}
private async getMobileFaceNetModel() {

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import {
DetectedObject,
MLSyncContext,
@ -61,7 +61,7 @@ class ObjectService {
syncContext.config.sceneDetection.minScore,
)),
);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedObjects = objectDetections?.map((detection) => {
return {
@ -77,13 +77,13 @@ class ObjectService {
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine(
log.info(
`object detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
"ms",
);
addLogLine("[MLService] Detected Objects: ", newMlFile.objects?.length);
log.info("[MLService] Detected Objects: ", newMlFile.objects?.length);
}
async getAllSyncedObjectsMap(syncContext: MLSyncContext) {
@ -115,9 +115,9 @@ class ObjectService {
async syncThingsIndex(syncContext: MLSyncContext) {
const filesVersion = await mlIDbStorage.getIndexVersion("files");
addLogLine("things", await mlIDbStorage.getIndexVersion("things"));
log.info("things", await mlIDbStorage.getIndexVersion("things"));
if (filesVersion <= (await mlIDbStorage.getIndexVersion("things"))) {
addLogLine(
log.info(
"[MLService] Skipping people index as already synced to latest version",
);
return;

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { Face, MLSyncContext, Person } from "types/machineLearning";
import {
findFirstIfSorted,
@ -20,7 +20,7 @@ class PeopleService {
syncContext.faceClusteringService.method,
)
) {
addLogLine(
log.info(
"[MLService] Skipping people index as already synced to latest version",
);
return;
@ -84,7 +84,7 @@ class PeopleService {
faces.forEach((face) => {
face.personId = person.id;
});
// addLogLine("Creating person: ", person, faces);
// log.info("Creating person: ", person, faces);
}
await mlIDbStorage.updateFaces(allFacesMap);

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { FILE_TYPE } from "constants/file";
import { MLSyncContext, MLSyncFileContext } from "types/machineLearning";
import {
@ -16,7 +16,7 @@ class ReaderService {
if (fileContext.imageBitmap) {
return fileContext.imageBitmap;
}
// addLogLine('1 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('1 TF Memory stats: ',JSON.stringify(tf.memory()));
if (fileContext.localFile) {
if (
fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE
@ -47,11 +47,11 @@ class ReaderService {
fileContext.newMlFile.imageSource = syncContext.config.imageSource;
const { width, height } = fileContext.imageBitmap;
fileContext.newMlFile.imageDimensions = { width, height };
// addLogLine('2 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('2 TF Memory stats: ',JSON.stringify(tf.memory()));
return fileContext.imageBitmap;
} catch (e) {
logError(e, "failed to create image bitmap");
log.error("failed to create image bitmap", e);
throw e;
}
}

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import * as tf from "@tensorflow/tfjs-core";
import {
ObjectDetection,
@ -6,7 +7,6 @@ import {
Versioned,
} from "types/machineLearning";
import { addLogLine } from "@ente/shared/logging";
import * as SSDMobileNet from "@tensorflow-models/coco-ssd";
import { OBJECT_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
import { resizeToSquare } from "utils/image";
@ -28,7 +28,7 @@ class SSDMobileNetV2 implements ObjectDetectionService {
base: "mobilenet_v2",
modelUrl: "/models/ssdmobilenet/model.json",
});
addLogLine("loaded ssdMobileNetV2Model", tf.getBackend());
log.info("loaded ssdMobileNetV2Model", tf.getBackend());
}
private async getSSDMobileNetV2Model() {

View file

@ -1,8 +1,8 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { Collection, CollectionPublicMagicMetadata } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
@ -223,14 +223,14 @@ export const syncPublicFiles = async (
setPublicFiles([...sortFiles(mergeMetadata(files), sortAsc)]);
} catch (e) {
const parsedError = parseSharingErrorCodes(e);
logError(e, "failed to sync shared collection files");
log.error("failed to sync shared collection files", e);
if (parsedError.message === CustomError.TOKEN_EXPIRED) {
throw e;
}
}
return [...sortFiles(mergeMetadata(files), sortAsc)];
} catch (e) {
logError(e, "failed to get local or sync shared collection files");
log.error("failed to get local or sync shared collection files", e);
throw e;
}
};
@ -294,7 +294,7 @@ const getPublicFiles = async (
} while (resp.data.hasMore);
return decryptedFiles;
} catch (e) {
logError(e, "Get public files failed");
log.error("Get public files failed", e);
throw e;
}
};
@ -347,7 +347,7 @@ export const getPublicCollection = async (
await saveReferralCode(referralCode);
return [collection, referralCode];
} catch (e) {
logError(e, "failed to get public collection");
log.error("failed to get public collection", e);
throw e;
}
};
@ -366,7 +366,7 @@ export const verifyPublicCollectionPassword = async (
const jwtToken = resp.data.jwtToken;
return jwtToken;
} catch (e) {
logError(e, "failed to verify public collection password");
log.error("failed to verify public collection password", e);
throw e;
}
};

View file

@ -1,5 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { ElectronFile } from "types/upload";
export async function getUint8ArrayView(
@ -8,9 +8,10 @@ export async function getUint8ArrayView(
try {
return new Uint8Array(await file.arrayBuffer());
} catch (e) {
logError(e, "reading file blob failed", {
fileSize: convertBytesToHumanReadable(file.size),
});
log.error(
`Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
e,
);
throw e;
}
}

View file

@ -1,11 +1,8 @@
import * as chrono from "chrono-node";
import { t } from "i18next";
import { getAllPeople } from "utils/machineLearning";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import * as chrono from "chrono-node";
import { FILE_TYPE } from "constants/file";
import { t } from "i18next";
import { Collection } from "types/collection";
import { Model } from "types/embedding";
import { EntityType, LocationTag, LocationTagData } from "types/entity";
@ -21,6 +18,7 @@ import {
} from "types/search";
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
import { getUniqueFiles } from "utils/file";
import { getAllPeople } from "utils/machineLearning";
import { getMLSyncConfig } from "utils/machineLearning/config";
import { getFormattedDate } from "utils/search";
import mlIDbStorage from "utils/storage/mlIDbStorage";
@ -64,7 +62,7 @@ export const getAutoCompleteSuggestions =
return convertSuggestionsToOptions(suggestions);
} catch (e) {
logError(e, "getAutoCompleteSuggestions failed");
log.error("getAutoCompleteSuggestions failed", e);
return [];
}
};
@ -159,7 +157,7 @@ function getYearSuggestion(searchPhrase: string): Suggestion[] {
];
}
} catch (e) {
logError(e, "getYearSuggestion failed");
log.error("getYearSuggestion failed", e);
}
}
return [];
@ -175,7 +173,7 @@ export async function getAllPeopleSuggestion(): Promise<Array<Suggestion>> {
hide: true,
}));
} catch (e) {
logError(e, "getAllPeopleSuggestion failed");
log.error("getAllPeopleSuggestion failed", e);
return [];
}
}
@ -205,7 +203,7 @@ export async function getIndexStatusSuggestion(): Promise<Suggestion> {
hide: true,
};
} catch (e) {
logError(e, "getIndexStatusSuggestion failed");
log.error("getIndexStatusSuggestion failed", e);
}
}
@ -319,7 +317,7 @@ async function getClipSuggestion(searchPhrase: string): Promise<Suggestion> {
};
} catch (e) {
if (!e.message?.includes(CustomError.MODEL_DOWNLOAD_PENDING)) {
logError(e, "getClipSuggestion failed");
log.error("getClipSuggestion failed", e);
}
return null;
}
@ -383,7 +381,7 @@ async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
locationTag.data.name.toLowerCase().includes(searchPhrase),
);
if (matchedLocationTags.length > 0) {
addLogLine(
log.info(
`Found ${matchedLocationTags.length} location tags for search phrase`,
);
}

View file

@ -1,16 +1,14 @@
import log from "@/next/log";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { Collection } from "types/collection";
import { SetFiles } from "types/gallery";
import { decryptFile, sortTrashFiles } from "utils/file";
import { getCollection } from "./collectionService";
import HTTPService from "@ente/shared/network/HTTPService";
import { EnteFile } from "types/file";
import { SetFiles } from "types/gallery";
import { EncryptedTrashItem, Trash } from "types/trash";
import { mergeMetadata } from "utils/file";
import { decryptFile, mergeMetadata, sortTrashFiles } from "utils/file";
import { getCollection } from "./collectionService";
const TRASH = "file-trash";
const TRASH_TIME = "trash-time";
@ -135,7 +133,7 @@ export const updateTrash = async (
} while (resp.data.hasMore);
return updatedTrash;
} catch (e) {
logError(e, "Get trash files failed");
log.error("Get trash files failed", e);
}
return currentTrash;
};
@ -170,7 +168,7 @@ export const emptyTrash = async () => {
},
);
} catch (e) {
logError(e, "empty trash failed");
log.error("empty trash failed", e);
throw e;
}
};

View file

@ -1,6 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { FILE_TYPE } from "constants/file";
import {
KNOWN_NON_MEDIA_FORMATS,
@ -10,7 +9,6 @@ import FileType, { FileTypeResult } from "file-type";
import { ElectronFile, FileTypeInfo } from "types/upload";
import { getFileExtension } from "utils/file";
import { getUint8ArrayView } from "./readerService";
import { getFileSize } from "./upload/fileService";
const TYPE_VIDEO = "video";
const TYPE_IMAGE = "image";
@ -51,7 +49,6 @@ export async function getFileType(
};
} catch (e) {
const fileFormat = getFileExtension(receivedFile.name);
const fileSize = convertBytesToHumanReadable(getFileSize(receivedFile));
const whiteListedFormat = WHITELISTED_FILE_FORMATS.find(
(a) => a.exactType === fileFormat,
);
@ -62,16 +59,10 @@ export async function getFileType(
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
}
if (e.message === CustomError.NON_MEDIA_FILE) {
logError(e, "unsupported file format", {
fileFormat,
fileSize,
});
log.error(`unsupported file format ${fileFormat}`, e);
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
}
logError(e, "type detection failed", {
fileFormat,
fileSize,
});
log.error(`type detection failed for format ${fileFormat}`, e);
throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat));
}
}

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
import type { FixOption } from "components/FixCreationTime";
import { FILE_TYPE } from "constants/file";
@ -88,7 +88,7 @@ export async function updateCreationTimeWithExif(
updateExistingFilePubMetadata(file, updatedFile);
}
} catch (e) {
logError(e, "failed to updated a CreationTime With Exif");
log.error("failed to updated a CreationTime With Exif", e);
completedWithError = true;
} finally {
setProgressTracker({
@ -98,7 +98,7 @@ export async function updateCreationTimeWithExif(
}
}
} catch (e) {
logError(e, "update CreationTime With Exif failed");
log.error("update CreationTime With Exif failed", e);
completedWithError = true;
}
return completedWithError;

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
import { EXIFLESS_FORMATS, NULL_LOCATION } from "constants/upload";
import exifr from "exifr";
@ -67,13 +67,15 @@ export async function getParsedExifData(
return parseExifData(filteredExifData);
} catch (e) {
if (e.message === EXIFR_UNSUPPORTED_FILE_FORMAT_MESSAGE) {
logError(e, "exif library unsupported format", {
fileType: fileTypeInfo.exactType,
});
log.error(
`exif library unsupported format ${fileTypeInfo.exactType}`,
e,
);
} else {
logError(e, "get parsed exif data failed", {
fileType: fileTypeInfo.exactType,
});
log.error(
`get parsed exif data failed for file type ${fileTypeInfo.exactType}`,
e,
);
throw e;
}
}
@ -128,13 +130,13 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData {
parsedExif.imageWidth = ImageWidth;
parsedExif.imageHeight = ImageHeight;
} else {
logError(
new Error("ImageWidth or ImageHeight is not a number"),
"Image dimension parsing failed",
{
ImageWidth,
ImageHeight,
},
log.error(
`Image dimension parsing failed - ImageWidth or ImageHeight is not a number ${JSON.stringify(
{
ImageWidth,
ImageHeight,
},
)}`,
);
}
} else if (ExifImageWidth && ExifImageHeight) {
@ -145,13 +147,13 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData {
parsedExif.imageWidth = ExifImageWidth;
parsedExif.imageHeight = ExifImageHeight;
} else {
logError(
new Error("ExifImageWidth or ExifImageHeight is not a number"),
"Image dimension parsing failed",
{
ExifImageWidth,
ExifImageHeight,
},
log.error(
`Image dimension parsing failed - ExifImageWidth or ExifImageHeight is not a number ${JSON.stringify(
{
ExifImageWidth,
ExifImageHeight,
},
)}`,
);
}
} else if (PixelXDimension && PixelYDimension) {
@ -162,13 +164,13 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData {
parsedExif.imageWidth = PixelXDimension;
parsedExif.imageHeight = PixelYDimension;
} else {
logError(
new Error("PixelXDimension or PixelYDimension is not a number"),
"Image dimension parsing failed",
{
PixelXDimension,
PixelYDimension,
},
log.error(
`Image dimension parsing failed - PixelXDimension or PixelYDimension is not a number ${JSON.stringify(
{
PixelXDimension,
PixelYDimension,
},
)}`,
);
}
}
@ -229,9 +231,7 @@ function parseEXIFDate(dateTimeString: string) {
}
return date;
} catch (e) {
logError(e, "parseEXIFDate failed", {
dateTimeString,
});
log.error(`Failed to parseEXIFDate ${dateTimeString}`, e);
return null;
}
}
@ -265,12 +265,15 @@ export function parseEXIFLocation(
);
return { latitude, longitude };
} catch (e) {
logError(e, "parseEXIFLocation failed", {
gpsLatitude,
gpsLatitudeRef,
gpsLongitude,
gpsLongitudeRef,
});
log.error(
`Failed to parseEXIFLocation ${JSON.stringify({
gpsLatitude,
gpsLatitudeRef,
gpsLongitude,
gpsLongitudeRef,
})}`,
e,
);
return NULL_LOCATION;
}
}
@ -330,7 +333,7 @@ export async function updateFileCreationDateInEXIF(
const exifInsertedFile = piexif.insert(exifBytes, imageDataURL);
return dataURIToBlob(exifInsertedFile);
} catch (e) {
logError(e, "updateFileModifyDateInEXIF failed");
log.error("updateFileModifyDateInEXIF failed", e);
return fileBlob;
}
}

View file

@ -1,7 +1,9 @@
import { getFileNameSize } from "@/next/file";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload";
import { EncryptedMagicMetadata } from "types/magicMetadata";
import {
DataStream,
ElectronFile,
@ -13,10 +15,6 @@ import {
ParsedMetadataJSON,
ParsedMetadataJSONMap,
} from "types/upload";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { Remote } from "comlink";
import { EncryptedMagicMetadata } from "types/magicMetadata";
import {
getElectronFileStream,
getFileStream,
@ -47,7 +45,7 @@ export async function readFile(
rawFile,
fileTypeInfo,
);
addLogLine(`reading file data ${getFileNameSize(rawFile)} `);
log.info(`reading file data ${getFileNameSize(rawFile)} `);
let filedata: Uint8Array | DataStream;
if (!(rawFile instanceof File)) {
if (rawFile.size > MULTIPART_PART_SIZE) {
@ -64,7 +62,7 @@ export async function readFile(
filedata = await getUint8ArrayView(rawFile);
}
addLogLine(`read file data successfully ${getFileNameSize(rawFile)} `);
log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
return {
filedata,
@ -152,7 +150,7 @@ export async function encryptFile(
};
return result;
} catch (e) {
logError(e, "Error encrypting files");
log.error("Error encrypting files", e);
throw e;
}
}

View file

@ -1,8 +1,7 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE } from "constants/upload";
import { getElectronFileStream, getFileStream } from "services/readerService";
@ -13,7 +12,7 @@ export async function getFileHash(
file: File | ElectronFile,
) {
try {
addLogLine(`getFileHash called for ${getFileNameSize(file)}`);
log.info(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
@ -38,14 +37,12 @@ export async function getFileHash(
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
const hash = await worker.completeChunkHashing(hashState);
addLogLine(
log.info(
`file hashing completed successfully ${getFileNameSize(file)}`,
);
return hash;
} catch (e) {
logError(e, "getFileHash failed");
addLogLine(
`file hashing failed ${getFileNameSize(file)} ,${e.message} `,
);
log.error("getFileHash failed", e);
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
}
}

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import { LIVE_PHOTO_ASSET_SIZE_LIMIT } from "constants/upload";
@ -213,7 +213,7 @@ export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
if (e.message === CustomError.UPLOAD_CANCELLED) {
throw e;
} else {
logError(e, "failed to cluster live photo");
log.error("failed to cluster live photo", e);
throw e;
}
}
@ -265,15 +265,13 @@ function areFilesLivePhotoAssets(
) {
return true;
} else {
logError(
new Error(CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS),
CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS,
{
log.error(
`${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({
fileSizes: [
firstFileIdentifier.size,
secondFileIdentifier.size,
],
},
})}`,
);
}
}

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { logError } from "@ente/shared/sentry";
import {
parseDateFromFusedDateString,
tryToParseDateTime,
@ -109,7 +109,7 @@ export async function getImageMetadata(
height: exifData?.imageHeight ?? null,
};
} catch (e) {
logError(e, "getExifData failed");
log.error("getExifData failed", e);
}
return imageMetadata;
}
@ -206,7 +206,7 @@ export async function parseMetadataJSON(receivedFile: File | ElectronFile) {
}
return parsedMetadataJSON;
} catch (e) {
logError(e, "parseMetadataJSON failed");
log.error("parseMetadataJSON failed", e);
// ignore
}
}
@ -237,7 +237,7 @@ export function extractDateFromFileName(filename: string): number {
}
return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate);
} catch (e) {
logError(e, "failed to extract date From FileName ");
log.error("failed to extract date From FileName ", e);
return null;
}
}
@ -250,7 +250,7 @@ function convertSignalNameToFusedDateString(filename: string) {
const EDITED_FILE_SUFFIX = "-edited";
/*
Get the original file name for edited file to associate it to original file's metadataJSON file
Get the original file name for edited file to associate it to original file's metadataJSON file
as edited file doesn't have their own metadata file
*/
function getFileOriginalName(fileName: string) {

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { CustomError, handleUploadError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { EnteFile } from "types/file";
import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload";
import { retryHTTPCall } from "utils/upload/uploadRetrier";
@ -39,7 +39,7 @@ class PublicUploadHttpClient {
);
return response.data;
} catch (e) {
logError(e, "upload public File Failed");
log.error("upload public File Failed", e);
throw e;
}
}
@ -78,7 +78,7 @@ class PublicUploadHttpClient {
}
return this.uploadURLFetchInProgress;
} catch (e) {
logError(e, "fetch public upload-url failed ");
log.error("fetch public upload-url failed ", e);
throw e;
}
}
@ -107,7 +107,7 @@ class PublicUploadHttpClient {
return response.data["urls"];
} catch (e) {
logError(e, "fetch public multipart-upload-url failed");
log.error("fetch public multipart-upload-url failed", e);
throw e;
}
}

View file

@ -1,8 +1,7 @@
import ElectronAPIs from "@/next/electron";
import { ensureElectron } from "@/next/electron";
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import isElectron from "is-electron";
@ -30,7 +29,7 @@ export async function generateThumbnail(
fileTypeInfo: FileTypeInfo,
): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
try {
addLogLine(`generating thumbnail for ${getFileNameSize(file)}`);
log.info(`generating thumbnail for ${getFileNameSize(file)}`);
let hasStaticThumbnail = false;
let thumbnail: Uint8Array;
try {
@ -40,32 +39,26 @@ export async function generateThumbnail(
thumbnail = await generateVideoThumbnail(file, fileTypeInfo);
}
if (thumbnail.length > 1.5 * MAX_THUMBNAIL_SIZE) {
logError(
Error("thumbnail_too_large"),
"thumbnail greater than max limit",
{
log.error(
`thumbnail greater than max limit - ${JSON.stringify({
thumbnailSize: convertBytesToHumanReadable(
thumbnail.length,
),
fileSize: convertBytesToHumanReadable(file.size),
fileType: fileTypeInfo.exactType,
},
})}`,
);
}
if (thumbnail.length === 0) {
throw Error("EMPTY THUMBNAIL");
}
addLogLine(
log.info(
`thumbnail successfully generated ${getFileNameSize(file)}`,
);
} catch (e) {
logError(e, "uploading static thumbnail", {
fileFormat: fileTypeInfo.exactType,
});
addLogLine(
`thumbnail generation failed ${getFileNameSize(file)} error: ${
e.message
}`,
log.error(
`thumbnail generation failed ${getFileNameSize(file)} with format ${fileTypeInfo.exactType}`,
e,
);
thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) =>
c.charCodeAt(0),
@ -74,7 +67,7 @@ export async function generateThumbnail(
}
return { thumbnail, hasStaticThumbnail };
} catch (e) {
logError(e, "Error generating static thumbnail");
log.error("Error generating static thumbnail", e);
throw e;
}
}
@ -105,12 +98,12 @@ const generateImageThumbnailInElectron = async (
): Promise<Uint8Array> => {
try {
const startTime = Date.now();
const thumb = await ElectronAPIs.generateImageThumbnail(
const thumb = await ensureElectron().generateImageThumbnail(
inputFile,
maxDimension,
maxSize,
);
addLogLine(
log.info(
`originalFileSize:${convertBytesToHumanReadable(
inputFile?.size,
)},thumbFileSize:${convertBytesToHumanReadable(
@ -125,7 +118,7 @@ const generateImageThumbnailInElectron = async (
e.message !==
CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
) {
logError(e, "failed to generate image thumbnail natively");
log.error("failed to generate image thumbnail natively", e);
}
throw e;
}
@ -142,12 +135,12 @@ export async function generateImageThumbnailUsingCanvas(
let timeout = null;
const isHEIC = isFileHEIC(fileTypeInfo.exactType);
if (isHEIC) {
addLogLine(`HEICConverter called for ${getFileNameSize(file)}`);
log.info(`HEICConverter called for ${getFileNameSize(file)}`);
const convertedBlob = await HeicConversionService.convert(
new Blob([await file.arrayBuffer()]),
);
file = new File([convertedBlob], file.name);
addLogLine(`${getFileNameSize(file)} successfully converted`);
log.info(`${getFileNameSize(file)} successfully converted`);
}
let image = new Image();
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
@ -198,24 +191,25 @@ async function generateVideoThumbnail(
) {
let thumbnail: Uint8Array;
try {
addLogLine(
log.info(
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
);
const thumbnail = await FFmpegService.generateVideoThumbnail(file);
addLogLine(
log.info(
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
);
return await getUint8ArrayView(thumbnail);
} catch (e) {
addLogLine(
log.info(
`ffmpeg thumbnail generated failed ${getFileNameSize(
file,
)} error: ${e.message}`,
);
logError(e, "failed to generate thumbnail using ffmpeg", {
fileFormat: fileTypeInfo.exactType,
});
log.error(
`failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`,
e,
);
thumbnail = await generateVideoThumbnailUsingCanvas(file);
}
return thumbnail;
@ -265,7 +259,7 @@ export async function generateVideoThumbnailUsingCanvas(
const err = Error(
`${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`,
);
logError(e, CustomError.THUMBNAIL_GENERATION_FAILED);
log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e);
reject(err);
}
});

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { CustomError, handleUploadError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { EnteFile } from "types/file";
import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload";
@ -30,7 +30,7 @@ class UploadHttpClient {
);
return response.data;
} catch (e) {
logError(e, "upload Files Failed");
log.error("upload Files Failed", e);
throw e;
}
}
@ -60,7 +60,7 @@ class UploadHttpClient {
}
return this.uploadURLFetchInProgress;
} catch (e) {
logError(e, "fetch upload-url failed ");
log.error("fetch upload-url failed ", e);
throw e;
}
}
@ -83,7 +83,7 @@ class UploadHttpClient {
return response.data["urls"];
} catch (e) {
logError(e, "fetch multipart-upload-url failed");
log.error("fetch multipart-upload-url failed", e);
throw e;
}
}
@ -108,7 +108,7 @@ class UploadHttpClient {
return fileUploadURL.objectKey;
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "putFile to dataStore failed ");
log.error("putFile to dataStore failed ", e);
}
throw e;
}
@ -134,7 +134,7 @@ class UploadHttpClient {
return fileUploadURL.objectKey;
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "putFile to dataStore failed ");
log.error("putFile to dataStore failed ", e);
}
throw e;
}
@ -156,7 +156,7 @@ class UploadHttpClient {
);
if (!resp?.headers?.etag) {
const err = Error(CustomError.ETAG_MISSING);
logError(err, "putFile in parts failed");
log.error("putFile in parts failed", err);
throw err;
}
return resp;
@ -164,7 +164,7 @@ class UploadHttpClient {
return response.headers.etag as string;
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "put filePart failed");
log.error("put filePart failed", e);
}
throw e;
}
@ -188,7 +188,7 @@ class UploadHttpClient {
);
if (!resp?.data?.etag) {
const err = Error(CustomError.ETAG_MISSING);
logError(err, "putFile in parts failed");
log.error("putFile in parts failed", err);
throw err;
}
return resp;
@ -196,7 +196,7 @@ class UploadHttpClient {
return response.data.etag as string;
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "put filePart failed");
log.error("put filePart failed", e);
}
throw e;
}
@ -210,7 +210,7 @@ class UploadHttpClient {
}),
);
} catch (e) {
logError(e, "put file in parts failed");
log.error("put file in parts failed", e);
throw e;
}
}
@ -229,7 +229,7 @@ class UploadHttpClient {
),
);
} catch (e) {
logError(e, "put file in parts failed");
log.error("put file in parts failed", e);
throw e;
}
}

View file

@ -1,34 +1,10 @@
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { logError } from "@ente/shared/sentry";
import { Collection } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
import { SetFiles } from "types/gallery";
import {
FileWithCollection,
ParsedMetadataJSON,
ParsedMetadataJSONMap,
PublicUploadProps,
} from "types/upload";
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
import {
areFileWithCollectionsSame,
segregateMetadataAndMediaFiles,
} from "utils/upload";
import { getLocalFiles } from "../fileService";
import {
getMetadataJSONMapKeyForJSON,
parseMetadataJSON,
} from "./metadataService";
import UIService from "./uiService";
import UploadService from "./uploadService";
import uploader from "./uploader";
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { Remote } from "comlink";
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
import isElectron from "is-electron";
@ -39,9 +15,30 @@ import {
} from "services/publicCollectionService";
import { getDisableCFUploadProxyFlag } from "services/userService";
import watchFolderService from "services/watchFolder/watchFolderService";
import { Collection } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
import { SetFiles } from "types/gallery";
import {
FileWithCollection,
ParsedMetadataJSON,
ParsedMetadataJSONMap,
PublicUploadProps,
} from "types/upload";
import { ProgressUpdater } from "types/upload/ui";
import uiService from "./uiService";
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
import {
areFileWithCollectionsSame,
segregateMetadataAndMediaFiles,
} from "utils/upload";
import { getLocalFiles } from "../fileService";
import {
getMetadataJSONMapKeyForJSON,
parseMetadataJSON,
} from "./metadataService";
import { default as UIService, default as uiService } from "./uiService";
import uploadCancelService from "./uploadCancelService";
import UploadService from "./uploadService";
import uploader from "./uploader";
const MAX_CONCURRENT_UPLOADS = 4;
@ -126,7 +123,7 @@ class UploadManager {
this.uploadInProgress = true;
await this.updateExistingFilesAndCollections(collections);
this.uploaderName = uploaderName;
addLogLine(
log.info(
`received ${filesWithCollectionToUploadIn.length} files to upload`,
);
uiService.setFilenames(
@ -139,8 +136,8 @@ class UploadManager {
);
const { metadataJSONFiles, mediaFiles } =
segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn);
addLogLine(`has ${metadataJSONFiles.length} metadata json files`);
addLogLine(`has ${mediaFiles.length} media files`);
log.info(`has ${metadataJSONFiles.length} metadata json files`);
log.info(`has ${mediaFiles.length} media files`);
if (metadataJSONFiles.length) {
UIService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
@ -152,11 +149,11 @@ class UploadManager {
);
}
if (mediaFiles.length) {
addLogLine(`clusterLivePhotoFiles started`);
log.info(`clusterLivePhotoFiles started`);
const analysedMediaFiles =
await UploadService.clusterLivePhotoFiles(mediaFiles);
addLogLine(`clusterLivePhotoFiles ended`);
addLogLine(
log.info(`clusterLivePhotoFiles ended`);
log.info(
`got live photos: ${
mediaFiles.length !== analysedMediaFiles.length
}`,
@ -183,7 +180,7 @@ class UploadManager {
await ImportService.cancelRemainingUploads();
}
} else {
logError(e, "uploading failed with error");
log.error("uploading failed with error", e);
throw e;
}
} finally {
@ -200,14 +197,14 @@ class UploadManager {
return false;
}
} catch (e) {
logError(e, " failed to return shouldCloseProgressBar");
log.error(" failed to return shouldCloseProgressBar", e);
return false;
}
}
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
try {
addLogLine(`parseMetadataJSONFiles function executed `);
log.info(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length);
@ -216,7 +213,7 @@ class UploadManager {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(
log.info(
`parsing metadata json file ${getFileNameSize(file)}`,
);
@ -231,7 +228,7 @@ class UploadManager {
);
UIService.increaseFileUploaded();
}
addLogLine(
log.info(
`successfully parsed metadata json file ${getFileNameSize(
file,
)}`,
@ -241,8 +238,8 @@ class UploadManager {
throw e;
} else {
// and don't break for subsequent files just log and move on
logError(e, "parsing failed for a file");
addLogLine(
log.error("parsing failed for a file", e);
log.info(
`failed to parse metadata json file ${getFileNameSize(
file,
)} error: ${e.message}`,
@ -252,14 +249,14 @@ class UploadManager {
}
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "error seeding MetadataMap");
log.error("error seeding MetadataMap", e);
}
throw e;
}
}
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
addLogLine(`uploadMediaFiles called`);
log.info(`uploadMediaFiles called`);
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
if (isElectron()) {
@ -323,7 +320,7 @@ class UploadManager {
) {
try {
let decryptedFile: EnteFile;
addLogLine(
log.info(
`post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`,
);
await this.updateElectronRemainingFiles(fileWithCollection);
@ -368,7 +365,7 @@ class UploadManager {
fileWithCollection.livePhotoAssets.image,
});
} catch (e) {
logError(e, "Error in fileUploaded handlers");
log.error("Error in fileUploaded handlers", e);
}
this.updateExistingFiles(decryptedFile);
}
@ -379,7 +376,7 @@ class UploadManager {
);
return fileUploadResult;
} catch (e) {
logError(e, "failed to do post file upload action");
log.error("failed to do post file upload action", e);
return UPLOAD_RESULT.FAILED;
}
}
@ -399,7 +396,7 @@ class UploadManager {
}
public cancelRunningUpload() {
addLogLine("user cancelled running upload");
log.info("user cancelled running upload");
UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
uploadCancelService.requestUploadCancelation();
}

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { B64EncryptionResult } from "@ente/shared/crypto/types";
import { CustomError, handleUploadError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { Remote } from "comlink";
import { Collection } from "types/collection";
import { FilePublicMagicMetadataProps } from "types/file";
@ -231,7 +231,7 @@ class UploadService {
return backupedFile;
} catch (e) {
if (e.message !== CustomError.UPLOAD_CANCELLED) {
logError(e, "error uploading to bucket");
log.error("error uploading to bucket", e);
}
throw e;
}
@ -264,7 +264,7 @@ class UploadService {
await this.fetchUploadURLs();
// checking for any subscription related errors
} catch (e) {
logError(e, "prefetch uploadURL failed");
log.error("prefetch uploadURL failed", e);
handleUploadError(e);
}
}

View file

@ -1,6 +1,5 @@
import { getFileNameSize } from "@/next/file";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { NULL_EXTRACTED_METADATA } from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import { ElectronFile } from "types/upload";
@ -8,14 +7,14 @@ import { ElectronFile } from "types/upload";
export async function getVideoMetadata(file: File | ElectronFile) {
let videoMetadata = NULL_EXTRACTED_METADATA;
try {
addLogLine(`getVideoMetadata called for ${getFileNameSize(file)}`);
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
videoMetadata = await ffmpegService.extractVideoMetadata(file);
addLogLine(
log.info(
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
);
} catch (e) {
logError(e, "failed to get video metadata");
addLogLine(
log.error("failed to get video metadata", e);
log.info(
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
e.message
} `,

View file

@ -1,10 +1,10 @@
import log from "@/next/log";
import { putAttributes } from "@ente/accounts/api/user";
import { logoutUser } from "@ente/accounts/services/user";
import { getRecoveryKey } from "@ente/shared/crypto/helpers";
import { ApiError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint, getFamilyPortalURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import {
@ -63,7 +63,7 @@ export const getFamiliesToken = async () => {
);
return resp.data["familiesToken"];
} catch (e) {
logError(e, "failed to get family token");
log.error("failed to get family token", e);
throw e;
}
};
@ -81,7 +81,7 @@ export const getAccountsToken = async () => {
);
return resp.data["accountsToken"];
} catch (e) {
logError(e, "failed to get accounts token");
log.error("failed to get accounts token", e);
throw e;
}
};
@ -99,7 +99,7 @@ export const getRoadmapRedirectURL = async () => {
);
return resp.data["url"];
} catch (e) {
logError(e, "failed to get roadmap url");
log.error("failed to get roadmap url", e);
throw e;
}
};
@ -128,15 +128,15 @@ export const isTokenValid = async (token: string) => {
getData(LS_KEYS.ORIGINAL_KEY_ATTRIBUTES),
);
} catch (e) {
logError(e, "put attribute failed");
log.error("put attribute failed", e);
}
}
} catch (e) {
logError(e, "hasSetKeys not set in session validity response");
log.error("hasSetKeys not set in session validity response", e);
}
return true;
} catch (e) {
logError(e, "session-validity api call failed");
log.error("session-validity api call failed", e);
if (
e instanceof ApiError &&
e.httpStatusCode === HttpStatusCode.Unauthorized
@ -172,7 +172,7 @@ export const getUserDetailsV2 = async (): Promise<UserDetails> => {
);
return resp.data;
} catch (e) {
logError(e, "failed to get user details v2");
log.error("failed to get user details v2", e);
throw e;
}
};
@ -185,7 +185,7 @@ export const getFamilyPortalRedirectURL = async () => {
window.location.origin
}/gallery`;
} catch (e) {
logError(e, "unable to generate to family portal URL");
log.error("unable to generate to family portal URL", e);
throw e;
}
};
@ -203,7 +203,7 @@ export const getAccountDeleteChallenge = async () => {
);
return resp.data as DeleteChallengeResponse;
} catch (e) {
logError(e, "failed to get account delete challenge");
log.error("failed to get account delete challenge", e);
throw e;
}
};
@ -228,7 +228,7 @@ export const deleteAccount = async (
},
);
} catch (e) {
logError(e, "deleteAccount api call failed");
log.error("deleteAccount api call failed", e);
throw e;
}
};
@ -262,7 +262,7 @@ export const getFaceSearchEnabledStatus = async () => {
);
return resp.data.value === "true";
} catch (e) {
logError(e, "failed to get face search enabled status");
log.error("failed to get face search enabled status", e);
throw e;
}
};
@ -282,7 +282,7 @@ export const updateFaceSearchEnabledStatus = async (newStatus: boolean) => {
},
);
} catch (e) {
logError(e, "failed to update face search enabled status");
log.error("failed to update face search enabled status", e);
throw e;
}
};
@ -292,7 +292,7 @@ export const syncMapEnabled = async () => {
const status = await getMapEnabledStatus();
setLocalMapEnabled(status);
} catch (e) {
logError(e, "failed to sync map enabled status");
log.error("failed to sync map enabled status", e);
throw e;
}
};
@ -313,7 +313,7 @@ export const getMapEnabledStatus = async () => {
);
return resp.data.value === "true";
} catch (e) {
logError(e, "failed to get map enabled status");
log.error("failed to get map enabled status", e);
throw e;
}
};
@ -333,7 +333,7 @@ export const updateMapEnabledStatus = async (newStatus: boolean) => {
},
);
} catch (e) {
logError(e, "failed to update map enabled status");
log.error("failed to update map enabled status", e);
throw e;
}
};
@ -363,7 +363,7 @@ export async function getDisableCFUploadProxyFlag(): Promise<boolean> {
).json() as GetFeatureFlagResponse;
return featureFlags.disableCFUploadProxy;
} catch (e) {
logError(e, "failed to get feature flags");
log.error("failed to get feature flags", e);
return false;
}
}

View file

@ -1,5 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { promiseWithTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { generateTempName } from "@ente/shared/utils/temp";
@ -51,7 +50,7 @@ export class WasmFFmpeg {
try {
return await response.promise;
} catch (e) {
logError(e, "ffmpeg run failed");
log.error("ffmpeg run failed", e);
throw e;
}
}
@ -86,7 +85,7 @@ export class WasmFFmpeg {
return cmdPart;
}
});
addLogLine(`${cmd}`);
log.info(`${cmd}`);
await this.ffmpeg.run(...cmd);
return new File(
[this.ffmpeg.FS("readFile", tempOutputFilePath)],
@ -96,12 +95,12 @@ export class WasmFFmpeg {
try {
this.ffmpeg.FS("unlink", tempInputFilePath);
} catch (e) {
logError(e, "unlink input file failed");
log.error("unlink input file failed", e);
}
try {
this.ffmpeg.FS("unlink", tempOutputFilePath);
} catch (e) {
logError(e, "unlink output file failed");
log.error("unlink output file failed", e);
}
}
}

View file

@ -1,5 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { ElectronFile } from "types/upload";
import { EventQueueItem } from "types/watchFolder";
import watchFolderService from "./watchFolderService";
@ -22,11 +21,11 @@ export async function diskFileAddedCallback(file: ElectronFile) {
files: [file],
};
watchFolderService.pushEvent(event);
addLogLine(
log.info(
`added (upload) to event queue, collectionName:${event.collectionName} folderPath:${event.folderPath}, filesCount: ${event.files.length}`,
);
} catch (e) {
logError(e, "error while calling diskFileAddedCallback");
log.error("error while calling diskFileAddedCallback", e);
}
}
@ -48,11 +47,11 @@ export async function diskFileRemovedCallback(filePath: string) {
paths: [filePath],
};
watchFolderService.pushEvent(event);
addLogLine(
log.info(
`added (trash) to event queue collectionName:${event.collectionName} folderPath:${event.folderPath} , pathsCount: ${event.paths.length}`,
);
} catch (e) {
logError(e, "error while calling diskFileRemovedCallback");
log.error("error while calling diskFileRemovedCallback", e);
}
}
@ -63,12 +62,12 @@ export async function diskFolderRemovedCallback(folderPath: string) {
(mapping) => mapping.folderPath === folderPath,
);
if (!mapping) {
addLogLine(`folder not found in mappings, ${folderPath}`);
log.info(`folder not found in mappings, ${folderPath}`);
throw Error(`Watch mapping not found`);
}
watchFolderService.pushTrashedDir(folderPath);
addLogLine(`added trashedDir, ${folderPath}`);
log.info(`added trashedDir, ${folderPath}`);
} catch (e) {
logError(e, "error while calling diskFolderRemovedCallback");
log.error("error while calling diskFolderRemovedCallback", e);
}
}

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@/next/electron";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { UPLOAD_RESULT, UPLOAD_STRATEGY } from "constants/upload";
import debounce from "debounce";
@ -83,7 +83,7 @@ class watchFolderService {
for (const mapping of mappings) {
const filesOnDisk: ElectronFile[] =
await ElectronAPIs.getDirFiles(mapping.folderPath);
await ensureElectron().getDirFiles(mapping.folderPath);
this.uploadDiffOfFiles(mapping, filesOnDisk);
this.trashDiffOfFiles(mapping, filesOnDisk);
@ -150,11 +150,11 @@ class watchFolderService {
): Promise<WatchMapping[]> {
const notDeletedMappings = [];
for (const mapping of mappings) {
const mappingExists = await ElectronAPIs.isFolder(
const mappingExists = await ensureElectron().isFolder(
mapping.folderPath,
);
if (!mappingExists) {
ElectronAPIs.removeWatchMapping(mapping.folderPath);
ensureElectron().removeWatchMapping(mapping.folderPath);
} else {
notDeletedMappings.push(mapping);
}
@ -172,7 +172,7 @@ class watchFolderService {
}
private setupWatcherFunctions() {
ElectronAPIs.registerWatcherFunctions(
ensureElectron().registerWatcherFunctions(
diskFileAddedCallback,
diskFileRemovedCallback,
diskFolderRemovedCallback,
@ -185,7 +185,7 @@ class watchFolderService {
uploadStrategy: UPLOAD_STRATEGY,
) {
try {
await ElectronAPIs.addWatchMapping(
await ensureElectron().addWatchMapping(
rootFolderName,
folderPath,
uploadStrategy,
@ -198,7 +198,7 @@ class watchFolderService {
async removeWatchMapping(folderPath: string) {
try {
await ElectronAPIs.removeWatchMapping(folderPath);
await ensureElectron().removeWatchMapping(folderPath);
} catch (e) {
log.error("error while removing watch mapping", e);
}
@ -206,7 +206,7 @@ class watchFolderService {
async getWatchMappings(): Promise<WatchMapping[]> {
try {
return (await ElectronAPIs.getWatchMappings()) ?? [];
return (await ensureElectron().getWatchMappings()) ?? [];
} catch (e) {
log.error("error while getting watch mappings", e);
return [];
@ -378,7 +378,7 @@ class watchFolderService {
...this.currentlySyncedMapping.syncedFiles,
...syncedFiles,
];
await ElectronAPIs.updateWatchMappingSyncedFiles(
await ensureElectron().updateWatchMappingSyncedFiles(
this.currentlySyncedMapping.folderPath,
this.currentlySyncedMapping.syncedFiles,
);
@ -388,7 +388,7 @@ class watchFolderService {
...this.currentlySyncedMapping.ignoredFiles,
...ignoredFiles,
];
await ElectronAPIs.updateWatchMappingIgnoredFiles(
await ensureElectron().updateWatchMappingIgnoredFiles(
this.currentlySyncedMapping.folderPath,
this.currentlySyncedMapping.ignoredFiles,
);
@ -503,7 +503,7 @@ class watchFolderService {
this.currentlySyncedMapping.syncedFiles.filter(
(file) => !filePathsToRemove.has(file.path),
);
await ElectronAPIs.updateWatchMappingSyncedFiles(
await ensureElectron().updateWatchMappingSyncedFiles(
this.currentlySyncedMapping.folderPath,
this.currentlySyncedMapping.syncedFiles,
);
@ -595,7 +595,7 @@ class watchFolderService {
async selectFolder(): Promise<string> {
try {
const folderPath = await ElectronAPIs.selectDirectory();
const folderPath = await ensureElectron().selectDirectory();
return folderPath;
} catch (e) {
log.error("error while selecting folder", e);
@ -623,7 +623,7 @@ class watchFolderService {
async isFolder(folderPath: string) {
try {
const isFolder = await ElectronAPIs.isFolder(folderPath);
const isFolder = await ensureElectron().isFolder(folderPath);
return isFolder;
} catch (e) {
log.error("error while checking if folder exists", e);

View file

@ -1,9 +1,8 @@
import { t } from "i18next";
import log from "@/next/log";
import { SetDialogBoxAttributes } from "@ente/shared/components/DialogBox/types";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { REDIRECTS, getRedirectURL } from "constants/redirects";
import { t } from "i18next";
import { NextRouter } from "next/router";
import billingService from "services/billingService";
import { Plan, Subscription } from "types/billing";
@ -261,8 +260,8 @@ export async function manageFamilyMethod(
setLoading(true);
const familyPortalRedirectURL = getRedirectURL(REDIRECTS.FAMILIES);
openLink(familyPortalRedirectURL, true);
} catch (error) {
logError(error, "failed to redirect to family portal");
} catch (e) {
log.error("failed to redirect to family portal", e);
setDialogMessage({
title: t("ERROR"),
content: t("UNKNOWN_ERROR"),
@ -308,7 +307,7 @@ function handleFailureReason(
setDialogMessage: SetDialogBoxAttributes,
setLoading: SetLoading,
): void {
logError(Error(reason), "subscription purchase failed");
log.error(`subscription purchase failed: ${reason}`);
switch (reason) {
case FAILURE_REASON.CANCELED:
setDialogMessage({

View file

@ -1,8 +1,6 @@
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getAlbumsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { getUnixTimeInMicroSecondsWithDelta } from "@ente/shared/time";
import { User } from "@ente/shared/user/types";
@ -18,7 +16,6 @@ import {
SYSTEM_COLLECTION_TYPES,
} from "constants/collection";
import { t } from "i18next";
import isElectron from "is-electron";
import {
addToCollection,
createAlbum,
@ -119,7 +116,7 @@ export async function downloadCollectionHelper(
setFilesDownloadProgressAttributes,
);
} catch (e) {
logError(e, "download collection failed ");
log.error("download collection failed ", e);
}
}
@ -140,7 +137,7 @@ export async function downloadDefaultHiddenCollectionHelper(
setFilesDownloadProgressAttributes,
);
} catch (e) {
logError(e, "download hidden files failed ");
log.error("download hidden files failed ", e);
}
}
@ -153,8 +150,9 @@ export async function downloadCollectionFiles(
return;
}
let downloadDirPath: string;
if (isElectron()) {
const selectedDir = await ElectronAPIs.selectDirectory();
const electron = globalThis.electron;
if (electron) {
const selectedDir = await electron.selectDirectory();
if (!selectedDir) {
return;
}
@ -272,7 +270,7 @@ export const changeCollectionVisibility = async (
);
}
} catch (e) {
logError(e, "change collection visibility failed");
log.error("change collection visibility failed", e);
throw e;
}
};
@ -298,7 +296,7 @@ export const changeCollectionSortOrder = async (
updatedPubMagicMetadata,
);
} catch (e) {
logError(e, "change collection sort order failed");
log.error("change collection sort order failed", e);
}
};
@ -319,7 +317,7 @@ export const changeCollectionOrder = async (
await updateCollectionMagicMetadata(collection, updatedMagicMetadata);
} catch (e) {
logError(e, "change collection order failed");
log.error("change collection order failed", e);
}
};
@ -339,7 +337,7 @@ export const changeCollectionSubType = async (
);
await updateCollectionMagicMetadata(collection, updatedMagicMetadata);
} catch (e) {
logError(e, "change collection subType failed");
log.error("change collection subType failed", e);
throw e;
}
};
@ -568,13 +566,13 @@ export const getOrCreateAlbum = async (
}
for (const collection of existingCollections) {
if (isValidReplacementAlbum(collection, user, albumName)) {
addLogLine(
log.info(
`Found existing album ${albumName} with id ${collection.id}`,
);
return collection;
}
}
const album = await createAlbum(albumName);
addLogLine(`Created new album ${albumName} with id ${album.id}`);
log.info(`Created new album ${albumName} with id ${album.id}`);
return album;
};

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { JobConfig, JobResult, JobState } from "types/common/job";
export class SimpleJob<R extends JobResult> {
@ -27,7 +27,7 @@ export class SimpleJob<R extends JobResult> {
if (this.state !== "Running") {
this.scheduleNext();
} else {
addLogLine("Job already running, not scheduling");
log.info("Job already running, not scheduling");
}
}
@ -41,7 +41,7 @@ export class SimpleJob<R extends JobResult> {
this.intervalSec * 1000,
);
this.state = "Scheduled";
addLogLine("Scheduled next job after: ", this.intervalSec);
log.info("Scheduled next job after: ", this.intervalSec);
}
async run() {
@ -58,7 +58,7 @@ export class SimpleJob<R extends JobResult> {
} else {
this.resetInterval();
}
addLogLine("Job completed");
log.info("Job completed");
} catch (e) {
console.error("Error while running Job: ", e);
} finally {
@ -77,6 +77,6 @@ export class SimpleJob<R extends JobResult> {
clearTimeout(this.nextTimeoutId);
this.nextTimeoutId = undefined;
this.state = "NotScheduled";
addLogLine("Cleared next job");
log.info("Cleared next job");
}
}

View file

@ -1,6 +1,6 @@
import ElectronAPIs from "@/next/electron";
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import type { Electron } from "@/next/types/ipc";
import { workerBridge } from "@/next/worker/worker-bridge";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
@ -489,8 +489,9 @@ const convertToJPEGInElectron = async (
try {
const startTime = Date.now();
const inputFileData = new Uint8Array(await fileBlob.arrayBuffer());
const convertedFileData = isElectron()
? await ElectronAPIs.convertToJPEG(inputFileData, filename)
const electron = globalThis.electron;
const convertedFileData = electron
? await electron.convertToJPEG(inputFileData, filename)
: await workerBridge.convertToJPEG(inputFileData, filename);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
@ -688,8 +689,10 @@ export async function downloadFilesWithProgress(
canceller,
});
if (isElectron()) {
const electron = globalThis.electron;
if (electron) {
await downloadFilesDesktop(
electron,
files,
{ increaseSuccess, increaseFailed, isCancelled },
downloadDirPath,
@ -711,8 +714,9 @@ export async function downloadSelectedFiles(
return;
}
let downloadDirPath: string;
if (isElectron()) {
downloadDirPath = await ElectronAPIs.selectDirectory();
const electron = globalThis.electron;
if (electron) {
downloadDirPath = await electron.selectDirectory();
if (!downloadDirPath) {
return;
}
@ -729,8 +733,9 @@ export async function downloadSingleFile(
setFilesDownloadProgressAttributes: SetFilesDownloadProgressAttributes,
) {
let downloadDirPath: string;
if (isElectron()) {
downloadDirPath = await ElectronAPIs.selectDirectory();
const electron = globalThis.electron;
if (electron) {
downloadDirPath = await electron.selectDirectory();
if (!downloadDirPath) {
return;
}
@ -764,7 +769,8 @@ export async function downloadFiles(
}
}
export async function downloadFilesDesktop(
async function downloadFilesDesktop(
electron: Electron,
files: EnteFile[],
progressBarUpdater: {
increaseSuccess: () => void;
@ -779,7 +785,7 @@ export async function downloadFilesDesktop(
if (progressBarUpdater?.isCancelled()) {
return;
}
await downloadFileDesktop(fileReader, file, downloadPath);
await downloadFileDesktop(electron, fileReader, file, downloadPath);
progressBarUpdater?.increaseSuccess();
} catch (e) {
log.error("download fail for file", e);
@ -788,7 +794,8 @@ export async function downloadFilesDesktop(
}
}
export async function downloadFileDesktop(
async function downloadFileDesktop(
electron: Electron,
fileReader: FileReader,
file: EnteFile,
downloadPath: string,
@ -810,7 +817,7 @@ export async function downloadFileDesktop(
livePhoto.imageNameTitle,
);
const imageStream = generateStreamFromArrayBuffer(livePhoto.image);
await ElectronAPIs.saveStreamToDisk(
await electron.saveStreamToDisk(
getFileExportPath(downloadPath, imageExportName),
imageStream,
);
@ -820,12 +827,12 @@ export async function downloadFileDesktop(
livePhoto.videoNameTitle,
);
const videoStream = generateStreamFromArrayBuffer(livePhoto.video);
await ElectronAPIs.saveStreamToDisk(
await electron.saveStreamToDisk(
getFileExportPath(downloadPath, videoExportName),
videoStream,
);
} catch (e) {
await ElectronAPIs.deleteFile(
await electron.deleteFile(
getFileExportPath(downloadPath, imageExportName),
);
throw e;
@ -835,7 +842,7 @@ export async function downloadFileDesktop(
downloadPath,
file.metadata.title,
);
await ElectronAPIs.saveStreamToDisk(
await electron.saveStreamToDisk(
getFileExportPath(downloadPath, fileExportName),
updatedFileStream,
);

View file

@ -80,7 +80,7 @@ export function cropWithRotation(
}
}
// addLogLine({ imageBitmap, box, outputSize });
// log.info({ imageBitmap, box, outputSize });
const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height);
const offscreenCtx = offscreen.getContext("2d");

View file

@ -61,7 +61,7 @@ export function getFaceAlignmentUsingSimilarityTransform(
simTransform.rotation.get(0, 1),
simTransform.rotation.get(0, 0),
);
// addLogLine({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
// log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
return {
affineMatrix,
@ -169,7 +169,7 @@ export function ibExtractFaceImageUsingTransform(
const scaledMatrix = new Matrix(alignment.affineMatrix)
.mul(faceSize)
.to2DArray();
// addLogLine("scaledMatrix: ", scaledMatrix);
// log.info("scaledMatrix: ", scaledMatrix);
return transform(image, scaledMatrix, faceSize, faceSize);
}
@ -230,7 +230,7 @@ export function getRotatedFaceImage(
padding: number = 1.5,
): tf.Tensor4D {
const paddedBox = enlargeBox(faceDetection.box, padding);
// addLogLine("paddedBox", paddedBox);
// log.info("paddedBox", paddedBox);
const landmarkPoints = faceDetection.landmarks;
return tf.tidy(() => {
@ -245,15 +245,15 @@ export function getRotatedFaceImage(
foreheadCenter,
); // landmarkPoints[BLAZEFACE_NOSE_INDEX]
// angle = computeRotation(leftEye, rightEye);
// addLogLine('angle: ', angle);
// log.info('angle: ', angle);
const faceCenter = getBoxCenter(faceDetection.box);
// addLogLine('faceCenter: ', faceCenter);
// log.info('faceCenter: ', faceCenter);
const faceCenterNormalized: [number, number] = [
faceCenter.x / tf4dFloat32Image.shape[2],
faceCenter.y / tf4dFloat32Image.shape[1],
];
// addLogLine('faceCenterNormalized: ', faceCenterNormalized);
// log.info('faceCenterNormalized: ', faceCenterNormalized);
let rotatedImage = tf4dFloat32Image;
if (angle !== 0) {

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { getBlobFromCache } from "@ente/shared/storage/cacheStorage/helpers";
@ -105,7 +105,7 @@ export async function removeOldFaceCrops(
}
export async function removeFaceCropUrls(faceCropUrls: Array<string>) {
addLogLine("Removing face crop urls: ", JSON.stringify(faceCropUrls));
log.info("Removing face crop urls: ", JSON.stringify(faceCropUrls));
const faceCropCache = await CacheStorageService.open(CACHES.FACE_CROPS);
const urlRemovalPromises = faceCropUrls?.map((url) =>
faceCropCache.delete(url),
@ -132,7 +132,7 @@ export function extractFaceImageFromCrop(
.shift(-imageBox.x, -imageBox.y)
.rescale(scale)
.round();
// addLogLine({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox });
// log.info({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox });
const faceSizeDimentions: Dimensions = {
width: faceSize,

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { cached } from "@ente/shared/storage/cacheStorage/helpers";
import * as tf from "@tensorflow/tfjs-core";
@ -130,7 +130,7 @@ export function extractFaces(
];
});
// addLogLine('boxes: ', boxes[0]);
// log.info('boxes: ', boxes[0]);
const faceImagesTensor = tf.image.cropAndResize(
reshapedImage,
@ -356,14 +356,14 @@ export async function getOriginalImageBitmap(
} else {
fileBlob = await getOriginalConvertedFile(file, queue);
}
addLogLine("[MLService] Got file: ", file.id.toString());
log.info("[MLService] Got file: ", file.id.toString());
return getImageBlobBitmap(fileBlob);
}
export async function getThumbnailImageBitmap(file: EnteFile) {
const thumb = await DownloadManager.getThumbnail(file);
addLogLine("[MLService] Got thumbnail: ", file.id.toString());
log.info("[MLService] Got thumbnail: ", file.id.toString());
return getImageBlobBitmap(new Blob([thumb]));
}
@ -380,7 +380,7 @@ export async function getLocalFileImageBitmap(
export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
let startTime = Date.now();
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
addLogLine(
log.info(
"getPeopleList:mlFilesStore:getItem",
Date.now() - startTime,
"ms",
@ -395,18 +395,18 @@ export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
if (!peopleIds || peopleIds.length < 1) {
return [];
}
// addLogLine("peopleIds: ", peopleIds);
// log.info("peopleIds: ", peopleIds);
startTime = Date.now();
const peoplePromises = peopleIds.map(
(p) => mlIDbStorage.getPerson(p) as Promise<Person>,
);
const peopleList = await Promise.all(peoplePromises);
addLogLine(
log.info(
"getPeopleList:mlPeopleStore:getItems",
Date.now() - startTime,
"ms",
);
// addLogLine("peopleList: ", peopleList);
// log.info("peopleList: ", peopleList);
return peopleList;
}
@ -514,7 +514,7 @@ export function getNearestPointIndex(
(a, b) => Math.abs(a.distance) - Math.abs(b.distance),
);
// addLogLine('Nearest dist: ', nearest.distance, maxDistance);
// log.info('Nearest dist: ', nearest.distance, maxDistance);
if (!maxDistance || nearest.distance <= maxDistance) {
return nearest.index;
}
@ -522,11 +522,11 @@ export function getNearestPointIndex(
export function logQueueStats(queue: PQueue, name: string) {
queue.on("active", () =>
addLogLine(
log.info(
`queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`,
),
);
queue.on("idle", () => addLogLine(`queuestats: ${name}: Idle`));
queue.on("idle", () => log.info(`queuestats: ${name}: Idle`));
queue.on("error", (error) =>
console.error(`queuestats: ${name}: Error, `, error),
);

View file

@ -1,4 +1,4 @@
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { FILE_TYPE } from "constants/file";
import { LivePhotoSourceURL, SourceURLs } from "services/download";
import { EnteFile } from "types/file";
@ -94,10 +94,7 @@ export async function updateFileSrcProps(
} else if (file.metadata.fileType === FILE_TYPE.IMAGE) {
file.src = url as string;
} else {
logError(
Error(`unknown file type - ${file.metadata.fileType}`),
"Unknown file type",
);
log.error(`unknown file type - ${file.metadata.fileType}`);
file.src = url as string;
}
}

Some files were not shown because too many files have changed in this diff Show more