Merge branch 'master' into ui-redesign

This commit is contained in:
Abhinav 2022-06-06 14:57:32 +05:30
commit 40ad1d2db2
61 changed files with 1810 additions and 571 deletions

72
.github/workflows/codeql-analysis.yml vendored Normal file
View file

@ -0,0 +1,72 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ master, release ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '34 0 * * 2'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

2
.gitmodules vendored
View file

@ -4,5 +4,5 @@
branch = master
[submodule "ffmpeg-wasm"]
path = thirdparty/ffmpeg-wasm
url = git@github.com:abhinavkgrd/ffmpeg.wasm.git
url = https://github.com/abhinavkgrd/ffmpeg.wasm.git
branch = single-thread

View file

@ -1,6 +1,6 @@
{
"name": "bada-frame",
"version": "0.9.0",
"version": "0.9.1",
"private": true,
"scripts": {
"dev": "next dev",

View file

@ -1,2 +1,3 @@
User-agent: *
Allow: /.well-known/*
Disallow:

View file

@ -27,6 +27,8 @@ import { DeduplicateContext } from 'pages/deduplicate';
import { IsArchived } from 'utils/magicMetadata';
import { isSameDayAnyYear, isInsideBox } from 'utils/search';
import { Search } from 'types/search';
import { logError } from 'utils/sentry';
import { CustomError } from 'utils/error';
const Container = styled.div`
display: block;
@ -61,6 +63,7 @@ interface Props {
activeCollection: number;
isSharedCollection?: boolean;
enableDownload?: boolean;
isDeduplicating?: boolean;
}
type SourceURL = {
@ -84,6 +87,7 @@ const PhotoFrame = ({
activeCollection,
isSharedCollection,
enableDownload,
isDeduplicating,
}: Props) => {
const [open, setOpen] = useState(false);
const [currentIndex, setCurrentIndex] = useState<number>(0);
@ -192,6 +196,7 @@ const PhotoFrame = ({
return false;
}
if (
!isDeduplicating &&
activeCollection === ALL_SECTION &&
(IsArchived(item) ||
archivedCollections?.has(item.collectionID))
@ -242,7 +247,15 @@ const PhotoFrame = ({
}
}, [open]);
const updateURL = (index: number) => (url: string) => {
const getFileIndexFromID = (files: EnteFile[], id: number) => {
const index = files.findIndex((file) => file.id === id);
if (index === -1) {
throw CustomError.FILE_ID_NOT_FOUND;
}
return index;
};
const updateURL = (id: number) => (url: string) => {
const updateFile = (file: EnteFile) => {
file = {
...file,
@ -280,13 +293,15 @@ const PhotoFrame = ({
return file;
};
setFiles((files) => {
const index = getFileIndexFromID(files, id);
files[index] = updateFile(files[index]);
return files;
});
const index = getFileIndexFromID(files, id);
return updateFile(files[index]);
};
const updateSrcURL = async (index: number, srcURL: SourceURL) => {
const updateSrcURL = async (id: number, srcURL: SourceURL) => {
const { videoURL, imageURL } = srcURL;
const isPlayable = videoURL && (await isPlaybackPossible(videoURL));
const updateFile = (file: EnteFile) => {
@ -342,10 +357,12 @@ const PhotoFrame = ({
return file;
};
setFiles((files) => {
const index = getFileIndexFromID(files, id);
files[index] = updateFile(files[index]);
return files;
});
setIsSourceLoaded(true);
const index = getFileIndexFromID(files, id);
return updateFile(files[index]);
};
@ -416,7 +433,7 @@ const PhotoFrame = ({
selected[files[index].id] ?? false
}`}
file={files[index]}
updateURL={updateURL(files[index].dataIndex)}
updateURL={updateURL(files[index].id)}
onClick={onThumbnailClick(index)}
selectable={!isSharedCollection}
onSelect={handleSelect(files[index].id, index)}
@ -462,7 +479,7 @@ const PhotoFrame = ({
}
galleryContext.thumbs.set(item.id, url);
}
const newFile = updateURL(item.dataIndex)(url);
const newFile = updateURL(item.id)(url);
item.msrc = newFile.msrc;
item.html = newFile.html;
item.src = newFile.src;
@ -471,17 +488,23 @@ const PhotoFrame = ({
try {
instance.invalidateCurrItems();
instance.updateSize(true);
if (instance.isOpen()) {
instance.updateSize(true);
}
} catch (e) {
logError(
e,
'updating photoswipe after msrc url update failed'
);
// ignore
}
} catch (e) {
// no-op
logError(e, 'getSlideData failed get msrc url failed');
}
}
if (!fetching[item.dataIndex]) {
if (!fetching[item.id]) {
try {
fetching[item.dataIndex] = true;
fetching[item.id] = true;
let urls: string[];
if (galleryContext.files.has(item.id)) {
const mergedURL = galleryContext.files.get(item.id);
@ -514,7 +537,7 @@ const PhotoFrame = ({
[imageURL] = urls;
}
setIsSourceLoaded(false);
const newFile = await updateSrcURL(item.dataIndex, {
const newFile = await updateSrcURL(item.id, {
imageURL,
videoURL,
});
@ -525,14 +548,21 @@ const PhotoFrame = ({
item.h = newFile.h;
try {
instance.invalidateCurrItems();
instance.updateSize(true);
if (instance.isOpen()) {
instance.updateSize(true);
}
} catch (e) {
logError(
e,
'updating photoswipe after src url update failed'
);
// ignore
}
} catch (e) {
logError(e, 'getSlideData failed get src url failed');
// no-op
} finally {
fetching[item.dataIndex] = false;
fetching[item.id] = false;
}
}
};

View file

@ -56,7 +56,11 @@ export default function HelpSection() {
{constants.REQUEST_FEATURE}
</SidebarButton>
<SidebarButton onClick={initToSupportMail}>
{constants.SUPPORT}
<a
style={{ textDecoration: 'none', color: 'inherit' }}
href="mailto:contact@ente.io">
{constants.SUPPORT}
</a>
</SidebarButton>
<SidebarButton onClick={exportFiles}>
<div style={{ display: 'flex' }}>

View file

@ -14,16 +14,41 @@ import { convertBytesToHumanReadable } from 'utils/billing';
interface Iprops {
userDetails: UserDetails;
closeSidebar: () => void;
}
export default function SubscriptionDetails({ userDetails }: Iprops) {
// const { setDialogMessage } = useContext(AppContext);
// async function onLeaveFamilyClick() {
// try {
// await billingService.leaveFamily();
// closeSidebar();
// } catch (e) {
// setDialogMessage({
// title: constants.ERROR,
// staticBackdrop: true,
// close: { variant: 'danger' },
// content: constants.UNKNOWN_ERROR,
// });
// }
// }
// const { showPlanSelectorModal } = useContext(GalleryContext);
// function onManageClick() {
// closeSidebar();
// showPlanSelectorModal();
// }
return (
<Box
display="flex"
flexDirection={'column'}
height={160}
bgcolor="accent.main"
position={'relative'}>
// position={'relative'}
// onClick={onManageClick}
>
{userDetails ? (
<>
<Box padding={2}>
@ -92,4 +117,71 @@ export default function SubscriptionDetails({ userDetails }: Iprops) {
)}
</Box>
);
{
/* {!hasNonAdminFamilyMembers(userDetails.familyData) ||
isFamilyAdmin(userDetails.familyData) ? (
<div style={{ color: '#959595' }}>
{isSubscriptionActive(userDetails.subscription) ? (
isOnFreePlan(userDetails.subscription) ? (
constants.FREE_SUBSCRIPTION_INFO(
userDetails.subscription?.expiryTime
)
) : isSubscriptionCancelled(
userDetails.subscription
) ? (
constants.RENEWAL_CANCELLED_SUBSCRIPTION_INFO(
userDetails.subscription?.expiryTime
)
) : (
constants.RENEWAL_ACTIVE_SUBSCRIPTION_INFO(
userDetails.subscription?.expiryTime
)
)
) : (
<p>{constants.SUBSCRIPTION_EXPIRED(onManageClick)}</p>
)}
<Button onClick={onManageClick}>
{isSubscribed(userDetails.subscription)
? constants.MANAGE
: constants.SUBSCRIBE}
</Button>
</div>
) : (
<div style={{ color: '#959595' }}>
{constants.FAMILY_PLAN_MANAGE_ADMIN_ONLY(
getFamilyPlanAdmin(userDetails.familyData)?.email
)}
<Button
onClick={() =>
setDialogMessage({
title: `${constants.LEAVE_FAMILY}`,
content: constants.LEAVE_FAMILY_CONFIRM,
staticBackdrop: true,
proceed: {
text: constants.LEAVE_FAMILY,
action: onLeaveFamilyClick,
variant: 'danger',
},
close: { text: constants.CANCEL },
})
}>
{constants.LEAVE_FAMILY}
</Button>
</div>
)}
{hasNonAdminFamilyMembers(userDetails.familyData)
? constants.FAMILY_USAGE_INFO(
userDetails.usage,
convertBytesToHumanReadable(
getStorage(userDetails.familyData)
)
)
: constants.USAGE_INFO(
userDetails.usage,
convertBytesToHumanReadable(
userDetails.subscription?.storage
)
)} */
}
}

View file

@ -21,7 +21,10 @@ export default function Sidebar({ collectionSummaries }: Iprops) {
<DrawerSidebar open={sidebarView} onClose={closeSidebar}>
<HeaderSection closeSidebar={closeSidebar} />
<PaddedDivider spaced />
<UserDetailsSection sidebarView={sidebarView} />
<UserDetailsSection
sidebarView={sidebarView}
closeSidebar={closeSidebar}
/>
<PaddedDivider invisible />
<NavigationSection
closeSidebar={closeSidebar}

View file

@ -2,7 +2,7 @@ import React, { useEffect } from 'react';
import { SpaceBetweenFlex } from 'components/Container';
import { PaddedDivider } from './styledComponents';
import SubscriptionDetails from './SubscriptionDetails';
import { getUserDetails } from 'services/userService';
import { getUserDetailsV2 } from 'services/userService';
import { UserDetails } from 'types/user';
import { LS_KEYS } from 'utils/storage/localStorage';
import { useLocalState } from 'hooks/useLocalState';
@ -10,7 +10,7 @@ import { THEMES } from 'types/theme';
import ThemeSwitcher from './ThemeSwitcher';
import Typography from '@mui/material/Typography';
export default function UserDetailsSection({ sidebarView }) {
export default function UserDetailsSection({ sidebarView, closeSidebar }) {
const [userDetails, setUserDetails] = useLocalState<UserDetails>(
LS_KEYS.USER_DETAILS
);
@ -21,7 +21,7 @@ export default function UserDetailsSection({ sidebarView }) {
return;
}
const main = async () => {
const userDetails = await getUserDetails();
const userDetails = await getUserDetailsV2();
setUserDetails(userDetails);
};
main();
@ -34,7 +34,10 @@ export default function UserDetailsSection({ sidebarView }) {
<ThemeSwitcher theme={theme} setTheme={setTheme} />
</SpaceBetweenFlex>
<PaddedDivider invisible />
<SubscriptionDetails userDetails={userDetails} />
<SubscriptionDetails
userDetails={userDetails}
closeSidebar={closeSidebar}
/>
</>
);
}

View file

@ -7,7 +7,7 @@ import { UploadProgressHeader } from './header';
import { InProgressSection } from './inProgressSection';
import { ResultSection } from './resultSection';
import { NotUploadSectionHeader } from './styledComponents';
import { DESKTOP_APP_DOWNLOAD_URL } from 'utils/common';
import { getOSSpecificDesktopAppDownloadLink } from 'utils/common';
import DialogBoxBase from 'components/DialogBox/base';
export function UploadProgressDialog({
handleClose,
@ -47,6 +47,17 @@ export function UploadProgressDialog({
fileUploadResult={FileUploadResults.UPLOADED}
sectionTitle={constants.SUCCESSFUL_UPLOADS}
/>
<ResultSection
filenames={props.filenames}
fileUploadResultMap={fileUploadResultMap}
fileUploadResult={
FileUploadResults.UPLOADED_WITH_STATIC_THUMBNAIL
}
sectionTitle={
constants.THUMBNAIL_GENERATION_FAILED_UPLOADS
}
sectionInfo={constants.THUMBNAIL_GENERATION_FAILED_INFO}
/>
{props.uploadStage === UPLOAD_STAGES.FINISH &&
filesNotUploaded && (
@ -61,7 +72,7 @@ export function UploadProgressDialog({
fileUploadResult={FileUploadResults.BLOCKED}
sectionTitle={constants.BLOCKED_UPLOADS}
sectionInfo={constants.ETAGS_BLOCKED(
DESKTOP_APP_DOWNLOAD_URL
getOSSpecificDesktopAppDownloadLink()
)}
/>
<ResultSection

View file

@ -45,7 +45,10 @@ export default function UploadProgress(props: Props) {
if (!fileUploadResultMap.has(progress)) {
fileUploadResultMap.set(progress, []);
}
if (progress !== FileUploadResults.UPLOADED) {
if (
progress !== FileUploadResults.UPLOADED &&
progress !== FileUploadResults.UPLOADED_WITH_STATIC_THUMBNAIL
) {
filesNotUploaded = true;
}
const fileList = fileUploadResultMap.get(progress);

View file

@ -5,9 +5,10 @@ import DeleteIcon from 'components/icons/DeleteIcon';
import React, { useContext } from 'react';
import styled from 'styled-components';
import { DeduplicateContext } from 'pages/deduplicate';
import LeftArrow from 'components/icons/LeftArrow';
import { IconWithMessage } from 'components/IconWithMessage';
import { AppContext } from 'pages/_app';
import CloseIcon from '@mui/icons-material/Close';
import BackButton from '@mui/icons-material/ArrowBackOutlined';
const VerticalLine = styled.div`
position: absolute;
@ -17,16 +18,24 @@ const VerticalLine = styled.div`
background: #303030;
`;
const CheckboxText = styled.div`
margin-left: 0.5em;
font-size: 16px;
margin-right: 0.8em;
`;
interface IProps {
deleteFileHelper: () => void;
close: () => void;
count: number;
clearSelection: () => void;
}
export default function DeduplicateOptions({
deleteFileHelper,
close,
count,
clearSelection,
}: IProps) {
const deduplicateContext = useContext(DeduplicateContext);
const { setDialogMessage } = useContext(AppContext);
@ -47,14 +56,19 @@ export default function DeduplicateOptions({
return (
<SelectionBar>
<FluidContainer>
<IconButton onClick={close}>
<LeftArrow />
</IconButton>
{count ? (
<IconButton onClick={clearSelection}>
<CloseIcon />
</IconButton>
) : (
<IconButton onClick={close}>
<BackButton />
</IconButton>
)}
<div>
{count} {constants.SELECTED}
</div>
</FluidContainer>
<input
type="checkbox"
style={{
@ -69,14 +83,7 @@ export default function DeduplicateOptions({
!deduplicateContext.clubSameTimeFilesOnly
);
}}></input>
<div
style={{
marginLeft: '0.5em',
fontSize: '16px',
marginRight: '0.8em',
}}>
{constants.CLUB_BY_CAPTURE_TIME}
</div>
<CheckboxText>{constants.CLUB_BY_CAPTURE_TIME}</CheckboxText>
<div>
<VerticalLine />
</div>

View file

@ -18,6 +18,7 @@ import {
planForSubscription,
hasMobileSubscription,
hasPaypalSubscription,
manageFamilyMethod,
} from 'utils/billing';
import { reverseString } from 'utils/common';
import ArrowEast from 'components/icons/ArrowEast';
@ -313,72 +314,86 @@ function PlanSelector(props: Props) {
{plans && PlanIcons}
</div>
<DeadCenter style={{ marginBottom: '30px' }}>
{hasStripeSubscription(subscription) ? (
{hasPaidSubscription(subscription) ? (
<>
{isSubscriptionCancelled(subscription) ? (
<LinkButton
color={'success'}
onClick={() =>
appContext.setDialogMessage({
title: constants.CONFIRM_ACTIVATE_SUBSCRIPTION,
content:
constants.ACTIVATE_SUBSCRIPTION_MESSAGE(
subscription.expiryTime
),
staticBackdrop: true,
proceed: {
text: constants.ACTIVATE_SUBSCRIPTION,
action: activateSubscription.bind(
null,
appContext.setDialogMessage,
props.closeModal,
props.setLoading
),
variant: 'success',
},
close: {
text: constants.CANCEL,
},
})
}>
{constants.ACTIVATE_SUBSCRIPTION}
</LinkButton>
) : (
<LinkButton
color="danger"
onClick={() =>
appContext.setDialogMessage({
title: constants.CONFIRM_CANCEL_SUBSCRIPTION,
content:
constants.CANCEL_SUBSCRIPTION_MESSAGE(),
staticBackdrop: true,
proceed: {
text: constants.CANCEL_SUBSCRIPTION,
action: cancelSubscription.bind(
null,
appContext.setDialogMessage,
props.closeModal,
props.setLoading
),
variant: 'danger',
},
close: {
text: constants.CANCEL,
},
})
}>
{constants.CANCEL_SUBSCRIPTION}
</LinkButton>
{hasStripeSubscription(subscription) && (
<>
{isSubscriptionCancelled(subscription) ? (
<LinkButton
color="success"
onClick={() =>
appContext.setDialogMessage({
title: constants.CONFIRM_ACTIVATE_SUBSCRIPTION,
content:
constants.ACTIVATE_SUBSCRIPTION_MESSAGE(
subscription.expiryTime
),
staticBackdrop: true,
proceed: {
text: constants.ACTIVATE_SUBSCRIPTION,
action: activateSubscription.bind(
null,
appContext.setDialogMessage,
props.closeModal,
props.setLoading
),
variant: 'success',
},
close: {
text: constants.CANCEL,
},
})
}>
{constants.ACTIVATE_SUBSCRIPTION}
</LinkButton>
) : (
<LinkButton
color="danger"
onClick={() =>
appContext.setDialogMessage({
title: constants.CONFIRM_CANCEL_SUBSCRIPTION,
content:
constants.CANCEL_SUBSCRIPTION_MESSAGE(),
staticBackdrop: true,
proceed: {
text: constants.CANCEL_SUBSCRIPTION,
action: cancelSubscription.bind(
null,
appContext.setDialogMessage,
props.closeModal,
props.setLoading
),
variant: 'danger',
},
close: {
text: constants.CANCEL,
},
})
}>
{constants.CANCEL_SUBSCRIPTION}
</LinkButton>
)}
<LinkButton
color="primary"
onClick={updatePaymentMethod.bind(
null,
appContext.setDialogMessage,
props.setLoading
)}
style={{ marginTop: '20px' }}>
{constants.MANAGEMENT_PORTAL}
</LinkButton>
</>
)}
<LinkButton
color="primary"
onClick={updatePaymentMethod.bind(
onClick={manageFamilyMethod.bind(
null,
appContext.setDialogMessage,
props.setLoading
)}
style={{ marginTop: '20px' }}>
{constants.MANAGEMENT_PORTAL}
{constants.MANAGE_FAMILY_PORTAL}
</LinkButton>
</>
) : (

View file

@ -11,6 +11,7 @@ import PublicCollectionDownloadManager from 'services/publicCollectionDownloadMa
import LivePhotoIndicatorOverlay from 'components/icons/LivePhotoIndicatorOverlay';
import { isLivePhoto } from 'utils/file';
import { DeduplicateContext } from 'pages/deduplicate';
import { logError } from 'utils/sentry';
interface IProps {
file: EnteFile;
@ -100,7 +101,7 @@ export const HoverOverlay = styled.div<{ checked: boolean }>`
`;
export const InSelectRangeOverLay = styled.div<{ active: boolean }>`
opacity: ${(props) => (!props.active ? 0 : 1)});
opacity: ${(props) => (!props.active ? 0 : 1)};
left: 0;
top: 0;
outline: none;
@ -115,6 +116,7 @@ export const FileAndCollectionNameOverlay = styled.div`
bottom: 0;
left: 0;
max-height: 40%;
width: 100%;
background: linear-gradient(rgba(0, 0, 0, 0), rgba(0, 0, 0, 2));
& > p {
max-width: calc(${IMAGE_CONTAINER_MAX_WIDTH}px - 10px);
@ -227,14 +229,17 @@ export default function PreviewCard(props: IProps) {
if (isMounted.current) {
setImgSrc(url);
thumbs.set(file.id, url);
const newFile = updateURL(url);
file.msrc = newFile.msrc;
file.html = newFile.html;
file.src = newFile.src;
file.w = newFile.w;
file.h = newFile.h;
if (updateURL) {
const newFile = updateURL(url);
file.msrc = newFile.msrc;
file.html = newFile.html;
file.src = newFile.src;
file.w = newFile.w;
file.h = newFile.h;
}
}
} catch (e) {
logError(e, 'preview card useEffect failed');
// no-op
}
};

View file

@ -22,18 +22,23 @@ import { SetLoading, SetFiles } from 'types/gallery';
import { FileUploadResults, UPLOAD_STAGES } from 'constants/upload';
import { ElectronFile, FileWithCollection } from 'types/upload';
import UploadTypeSelector from '../../UploadTypeSelector';
import Router from 'next/router';
import { isCanvasBlocked } from 'utils/upload/isCanvasBlocked';
import { downloadApp } from 'utils/common';
const FIRST_ALBUM_NAME = 'My First Album';
interface Props {
syncWithRemote: (force?: boolean, silent?: boolean) => Promise<void>;
setBannerMessage: (message: string | JSX.Element) => void;
acceptedFiles: File[];
droppedFiles: File[];
clearDroppedFiles: () => void;
closeCollectionSelector: () => void;
setCollectionSelectorAttributes: SetCollectionSelectorAttributes;
setCollectionNamerAttributes: SetCollectionNamerAttributes;
setLoading: SetLoading;
setUploadInProgress: any;
uploadInProgress: boolean;
setUploadInProgress: (value: boolean) => void;
showCollectionSelector: () => void;
fileRejections: FileRejection[];
setFiles: SetFiles;
@ -49,9 +54,10 @@ enum UPLOAD_STRATEGY {
COLLECTION_PER_FOLDER,
}
enum DESKTOP_UPLOAD_TYPE {
FILES,
FOLDERS,
export enum DESKTOP_UPLOAD_TYPE {
FILES = 'files',
FOLDERS = 'folders',
ZIPS = 'zips',
}
interface AnalysisResult {
@ -59,6 +65,11 @@ interface AnalysisResult {
multipleFolders: boolean;
}
const NULL_ANALYSIS_RESULT = {
suggestedCollectionName: '',
multipleFolders: false,
};
export default function Upload(props: Props) {
const [progressView, setProgressView] = useState(false);
const [uploadStage, setUploadStage] = useState<UPLOAD_STAGES>(
@ -74,10 +85,8 @@ export default function Upload(props: Props) {
const [hasLivePhotos, setHasLivePhotos] = useState(false);
const [choiceModalView, setChoiceModalView] = useState(false);
const [analysisResult, setAnalysisResult] = useState<AnalysisResult>({
suggestedCollectionName: '',
multipleFolders: false,
});
const [analysisResult, setAnalysisResult] =
useState<AnalysisResult>(NULL_ANALYSIS_RESULT);
const appContext = useContext(AppContext);
const galleryContext = useContext(GalleryContext);
@ -85,6 +94,7 @@ export default function Upload(props: Props) {
const isPendingDesktopUpload = useRef(false);
const pendingDesktopUploadCollectionName = useRef<string>('');
const desktopUploadType = useRef<DESKTOP_UPLOAD_TYPE>(null);
const zipPaths = useRef<string[]>(null);
useEffect(() => {
UploadManager.initUploader(
@ -100,10 +110,10 @@ export default function Upload(props: Props) {
props.setFiles
);
if (isElectron()) {
if (isElectron() && ImportService.checkAllElectronAPIsExists()) {
ImportService.getPendingUploads().then(
({ files: electronFiles, collectionName }) => {
resumeDesktopUpload(electronFiles, collectionName);
({ files: electronFiles, collectionName, type }) => {
resumeDesktopUpload(type, electronFiles, collectionName);
}
);
}
@ -111,39 +121,50 @@ export default function Upload(props: Props) {
useEffect(() => {
if (
props.acceptedFiles?.length > 0 ||
appContext.sharedFiles?.length > 0 ||
props.electronFiles?.length > 0
props.electronFiles?.length > 0 ||
props.droppedFiles?.length > 0 ||
appContext.sharedFiles?.length > 0
) {
props.setLoading(true);
let analysisResult: AnalysisResult;
if (
props.acceptedFiles?.length > 0 ||
props.electronFiles?.length > 0
) {
if (props.acceptedFiles?.length > 0) {
if (props.uploadInProgress) {
// no-op
// a upload is already in progress
} else if (isCanvasBlocked()) {
appContext.setDialogMessage({
title: constants.CANVAS_BLOCKED_TITLE,
staticBackdrop: true,
content: constants.CANVAS_BLOCKED_MESSAGE(),
close: { text: constants.CLOSE },
proceed: {
text: constants.DOWNLOAD_APP,
action: downloadApp,
variant: 'success',
},
});
} else {
props.setLoading(true);
if (props.droppedFiles?.length > 0) {
// File selection by drag and drop or selection of file.
toUploadFiles.current = props.acceptedFiles;
} else {
toUploadFiles.current = props.droppedFiles;
props.clearDroppedFiles();
} else if (appContext.sharedFiles?.length > 0) {
toUploadFiles.current = appContext.sharedFiles;
appContext.resetSharedFiles();
} else if (props.electronFiles?.length > 0) {
// File selection from desktop app
toUploadFiles.current = props.electronFiles;
props.setElectronFiles([]);
}
const analysisResult = analyseUploadFiles();
setAnalysisResult(analysisResult);
analysisResult = analyseUploadFiles();
if (analysisResult) {
setAnalysisResult(analysisResult);
}
} else if (appContext.sharedFiles.length > 0) {
toUploadFiles.current = appContext.sharedFiles;
handleCollectionCreationAndUpload(
analysisResult,
props.isFirstUpload
);
props.setLoading(false);
}
handleCollectionCreationAndUpload(
analysisResult,
props.isFirstUpload
);
props.setLoading(false);
}
}, [props.acceptedFiles, appContext.sharedFiles, props.electronFiles]);
}, [props.droppedFiles, appContext.sharedFiles, props.electronFiles]);
const uploadInit = function () {
setUploadStage(UPLOAD_STAGES.START);
@ -156,24 +177,27 @@ export default function Upload(props: Props) {
};
const resumeDesktopUpload = async (
type: DESKTOP_UPLOAD_TYPE,
electronFiles: ElectronFile[],
collectionName: string
) => {
if (electronFiles && electronFiles?.length > 0) {
isPendingDesktopUpload.current = true;
pendingDesktopUploadCollectionName.current = collectionName;
desktopUploadType.current = type;
props.setElectronFiles(electronFiles);
}
};
function analyseUploadFiles(): AnalysisResult {
if (toUploadFiles.current.length === 0) {
return null;
}
if (desktopUploadType.current === DESKTOP_UPLOAD_TYPE.FILES) {
desktopUploadType.current = null;
return { suggestedCollectionName: '', multipleFolders: false };
if (
isElectron() &&
(!desktopUploadType.current ||
desktopUploadType.current === DESKTOP_UPLOAD_TYPE.FILES)
) {
return NULL_ANALYSIS_RESULT;
}
const paths: string[] = toUploadFiles.current.map(
(file) => file['path']
);
@ -181,19 +205,24 @@ export default function Upload(props: Props) {
paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2));
const firstPath = paths[0];
const lastPath = paths[paths.length - 1];
const L = firstPath.length;
let i = 0;
const firstFileFolder = firstPath.substr(0, firstPath.lastIndexOf('/'));
const lastFileFolder = lastPath.substr(0, lastPath.lastIndexOf('/'));
const firstFileFolder = firstPath.substring(
0,
firstPath.lastIndexOf('/')
);
const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf('/'));
while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++;
let commonPathPrefix = firstPath.substring(0, i);
if (commonPathPrefix) {
commonPathPrefix = commonPathPrefix.substr(
1,
commonPathPrefix.lastIndexOf('/') - 1
commonPathPrefix = commonPathPrefix.substring(
0,
commonPathPrefix.lastIndexOf('/')
);
if (commonPathPrefix) {
commonPathPrefix = commonPathPrefix.substr(
commonPathPrefix = commonPathPrefix.substring(
commonPathPrefix.lastIndexOf('/') + 1
);
}
@ -208,11 +237,14 @@ export default function Upload(props: Props) {
for (const file of toUploadFiles.current) {
const filePath = file['path'] as string;
let folderPath = filePath.substr(0, filePath.lastIndexOf('/'));
let folderPath = filePath.substring(0, filePath.lastIndexOf('/'));
if (folderPath.endsWith(METADATA_FOLDER_NAME)) {
folderPath = folderPath.substr(0, folderPath.lastIndexOf('/'));
folderPath = folderPath.substring(
0,
folderPath.lastIndexOf('/')
);
}
const folderName = folderPath.substr(
const folderName = folderPath.substring(
folderPath.lastIndexOf('/') + 1
);
if (!collectionWiseFiles.has(folderName)) {
@ -225,7 +257,6 @@ export default function Upload(props: Props) {
const uploadFilesToExistingCollection = async (collection: Collection) => {
try {
uploadInit();
const filesWithCollectionToUpload: FileWithCollection[] =
toUploadFiles.current.map((file, index) => ({
file,
@ -243,8 +274,6 @@ export default function Upload(props: Props) {
collectionName?: string
) => {
try {
uploadInit();
const filesWithCollectionToUpload: FileWithCollection[] = [];
const collections: Collection[] = [];
let collectionWiseFiles = new Map<
@ -296,13 +325,24 @@ export default function Upload(props: Props) {
collections: Collection[]
) => {
try {
uploadInit();
props.setUploadInProgress(true);
props.closeCollectionSelector();
await props.syncWithRemote(true, true);
if (isElectron()) {
if (isElectron() && !isPendingDesktopUpload.current) {
await ImportService.setToUploadCollection(collections);
if (zipPaths.current) {
await ImportService.setToUploadFiles(
DESKTOP_UPLOAD_TYPE.ZIPS,
zipPaths.current
);
zipPaths.current = null;
}
await ImportService.setToUploadFiles(
filesWithCollectionToUpload,
collections
DESKTOP_UPLOAD_TYPE.FILES,
filesWithCollectionToUpload.map(
({ file }) => (file as ElectronFile).path
)
);
}
await uploadManager.queueFilesForUpload(
@ -318,7 +358,6 @@ export default function Upload(props: Props) {
setProgressView(false);
throw err;
} finally {
appContext.resetSharedFiles();
props.setUploadInProgress(false);
props.syncWithRemote();
}
@ -372,6 +411,7 @@ export default function Upload(props: Props) {
uploadToSingleNewCollection(
pendingDesktopUploadCollectionName.current
);
pendingDesktopUploadCollectionName.current = null;
} else {
uploadFilesToNewCollections(
UPLOAD_STRATEGY.COLLECTION_PER_FOLDER
@ -379,6 +419,13 @@ export default function Upload(props: Props) {
}
return;
}
if (
isElectron() &&
desktopUploadType.current === DESKTOP_UPLOAD_TYPE.ZIPS
) {
uploadFilesToNewCollections(UPLOAD_STRATEGY.COLLECTION_PER_FOLDER);
return;
}
if (isFirstUpload && !analysisResult.suggestedCollectionName) {
analysisResult.suggestedCollectionName = FIRST_ALBUM_NAME;
}
@ -402,21 +449,26 @@ export default function Upload(props: Props) {
desktopUploadType.current = type;
if (type === DESKTOP_UPLOAD_TYPE.FILES) {
files = await ImportService.showUploadFilesDialog();
} else {
} else if (type === DESKTOP_UPLOAD_TYPE.FOLDERS) {
files = await ImportService.showUploadDirsDialog();
} else {
const response = await ImportService.showUploadZipDialog();
files = response.files;
zipPaths.current = response.zipPaths;
}
if (files?.length > 0) {
props.setElectronFiles(files);
props.setUploadTypeSelectorView(false);
}
props.setElectronFiles(files);
props.setUploadTypeSelectorView(false);
};
const cancelUploads = async () => {
setProgressView(false);
UploadManager.cancelRemainingUploads();
if (isElectron()) {
ImportService.updatePendingUploads([]);
ImportService.cancelRemainingUploads();
}
await props.setUploadInProgress(false);
await props.syncWithRemote();
Router.reload();
};
return (
@ -445,7 +497,7 @@ export default function Upload(props: Props) {
handleDesktopUploadTypes(DESKTOP_UPLOAD_TYPE.FOLDERS)
}
uploadGoogleTakeoutZips={() =>
handleDesktopUploadTypes(DESKTOP_UPLOAD_TYPE.FOLDERS)
handleDesktopUploadTypes(DESKTOP_UPLOAD_TYPE.ZIPS)
}
/>
<UploadProgress

View file

@ -7,6 +7,7 @@ export const FORMAT_MISSED_BY_FILE_TYPE_LIB = [
{ fileType: FILE_TYPE.IMAGE, exactType: 'jpeg', mimeType: 'image/jpeg' },
{ fileType: FILE_TYPE.IMAGE, exactType: 'jpg', mimeType: 'image/jpeg' },
{ fileType: FILE_TYPE.VIDEO, exactType: 'webm', mimeType: 'video/webm' },
{ fileType: FILE_TYPE.VIDEO, exactType: 'mod', mimeType: 'video/mpeg' },
];
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
@ -38,9 +39,10 @@ export enum FileUploadResults {
TOO_LARGE,
LARGER_THAN_AVAILABLE_STORAGE,
UPLOADED,
UPLOADED_WITH_STATIC_THUMBNAIL,
}
export const MAX_FILE_SIZE_SUPPORTED = 5 * 1024 * 1024 * 1024; // 5 GB
export const MAX_FILE_SIZE_SUPPORTED = 4 * 1024 * 1024 * 1024; // 4 GB
export const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB
@ -50,3 +52,5 @@ export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
};
export const A_SEC_IN_MICROSECONDS = 1e6;
export const USE_CF_PROXY = false;

View file

@ -1,3 +1,3 @@
export const FIX_CREATION_TIME_VISIBLE_TO_USER_IDS = [
1, 125, 243, 341, 1580559962387273, 1580559962388564,
1, 125, 243, 341, 1071, 1580559962387273, 1580559962388564,
];

View file

@ -10,7 +10,6 @@ import 'styles/global.css';
import EnteSpinner from 'components/EnteSpinner';
import { logError } from '../utils/sentry';
// import { Workbox } from 'workbox-window';
import { getEndpoint } from 'utils/common/apiUtil';
import { getData, LS_KEYS } from 'utils/storage/localStorage';
import HTTPService from 'services/HTTPService';
import FlashMessageBar from 'components/FlashMessageBar';
@ -24,6 +23,11 @@ import { CssBaseline } from '@mui/material';
// eslint-disable-next-line @typescript-eslint/no-unused-vars
import * as types from 'styled-components/cssprop'; // need to css prop on styled component
import { SetDialogBoxAttributes, DialogBoxAttributes } from 'types/dialogBox';
import {
getFamilyPortalRedirectURL,
getRoadmapRedirectURL,
} from 'services/userService';
import { CustomError } from 'utils/error';
export const LogoImage = styled.img`
max-height: 28px;
@ -68,10 +72,10 @@ export interface FlashMessage {
}
export const AppContext = createContext<AppContextType>(null);
const redirectMap = {
roadmap: (token: string) =>
`${getEndpoint()}/users/roadmap?token=${encodeURIComponent(token)}`,
};
const redirectMap = new Map([
['roadmap', getRoadmapRedirectURL],
['families', getFamilyPortalRedirectURL],
]);
export default function App({ Component, err }) {
const router = useRouter();
@ -140,14 +144,30 @@ export default function App({ Component, err }) {
'font-size: 20px;'
);
}
const redirectTo = async (redirect) => {
if (
redirectMap.has(redirect) &&
typeof redirectMap.get(redirect) === 'function'
) {
const redirectAction = redirectMap.get(redirect);
const url = await redirectAction();
window.location.href = url;
} else {
logError(CustomError.BAD_REQUEST, 'invalid redirection', {
redirect,
});
}
};
const query = new URLSearchParams(window.location.search);
const redirect = query.get('redirect');
if (redirect && redirectMap[redirect]) {
const redirectName = query.get('redirect');
if (redirectName) {
const user = getData(LS_KEYS.USER);
if (user?.token) {
window.location.href = redirectMap[redirect](user.token);
redirectTo(redirectName);
} else {
setRedirectName(redirect);
setRedirectName(redirectName);
}
}
@ -159,9 +179,11 @@ export default function App({ Component, err }) {
if (redirectName) {
const user = getData(LS_KEYS.USER);
if (user?.token) {
window.location.href = redirectMap[redirectName](
user.token
);
redirectTo(redirectName);
// https://github.com/vercel/next.js/issues/2476#issuecomment-573460710
// eslint-disable-next-line no-throw-literal
throw 'Aborting route change, redirection in process....';
}
}
});

View file

@ -138,6 +138,10 @@ export default function Deduplicate() {
}
};
const clearSelection = function () {
setSelected({ count: 0, collectionID: 0 });
};
if (!duplicateFiles) {
return <></>;
}
@ -166,11 +170,13 @@ export default function Deduplicate() {
setSelected={setSelected}
selected={selected}
activeCollection={ALL_SECTION}
isDeduplicating
/>
<DeduplicateOptions
deleteFileHelper={deleteFileHelper}
count={selected.count}
close={closeDeduplication}
clearSelection={clearSelection}
/>
</DeduplicateContext.Provider>
);

View file

@ -211,6 +211,7 @@ export default function Gallery() {
const closeSidebar = () => setSidebarView(false);
const openSidebar = () => setSidebarView(true);
const [droppedFiles, setDroppedFiles] = useState([]);
useEffect(() => {
appContext.showNavBar(false);
@ -258,6 +259,8 @@ export default function Gallery() {
[fixCreationTimeAttributes]
);
useEffect(() => setDroppedFiles(acceptedFiles), [acceptedFiles]);
useEffect(() => {
if (typeof activeCollection === 'undefined') {
return;
@ -309,6 +312,7 @@ export default function Gallery() {
files.push(...getTrashedFiles(trash));
await setDerivativeState(collections, files);
} catch (e) {
logError(e, 'syncWithRemote failed');
switch (e.message) {
case ServerErrorCodes.SESSION_EXPIRED:
setBannerMessage(constants.SESSION_EXPIRED_MESSAGE);
@ -648,7 +652,8 @@ export default function Gallery() {
<Upload
syncWithRemote={syncWithRemote}
setBannerMessage={setBannerMessage}
acceptedFiles={acceptedFiles}
droppedFiles={droppedFiles}
clearDroppedFiles={() => setDroppedFiles([])}
showCollectionSelector={setCollectionSelectorView.bind(
null,
true
@ -662,6 +667,7 @@ export default function Gallery() {
)}
setLoading={setBlockingLoad}
setCollectionNamerAttributes={setCollectionNamerAttributes}
uploadInProgress={uploadInProgress}
setUploadInProgress={setUploadInProgress}
fileRejections={fileRejections}
setFiles={setFiles}

View file

@ -19,14 +19,13 @@ export default function Home() {
const main = async () => {
router.prefetch(PAGES.CREDENTIALS);
const user: User = getData(LS_KEYS.USER);
if (
user &&
if (!user?.email || !user.twoFactorSessionID) {
router.push(PAGES.ROOT);
} else if (
!user.isTwoFactorEnabled &&
(user.encryptedToken || user.token)
) {
router.push(PAGES.CREDENTIALS);
} else if (!user?.email || !user.twoFactorSessionID) {
router.push(PAGES.ROOT);
} else {
setSessionID(user.twoFactorSessionID);
}

View file

@ -1,6 +1,6 @@
import { getEndpoint, getPaymentsURL } from 'utils/common/apiUtil';
import { getToken } from 'utils/common/key';
import { setData, LS_KEYS } from 'utils/storage/localStorage';
import { setData, LS_KEYS, removeData } from 'utils/storage/localStorage';
import HTTPService from './HTTPService';
import { logError } from 'utils/sentry';
import { getPaymentToken } from './userService';
@ -147,6 +147,21 @@ class billingService {
}
}
public async leaveFamily() {
if (!getToken()) {
return;
}
try {
await HTTPService.delete(`${ENDPOINT}/family/leave`, null, null, {
'X-Auth-Token': getToken(),
});
removeData(LS_KEYS.FAMILY_DATA);
} catch (e) {
logError(e, '/family/leave failed');
throw e;
}
}
public async redirectToPayments(
paymentToken: string,
productID: string,

View file

@ -1,7 +1,10 @@
import { FILE_TYPE } from 'constants/file';
import { EnteFile } from 'types/file';
import { Metadata } from 'types/upload';
import { getEndpoint } from 'utils/common/apiUtil';
import { getToken } from 'utils/common/key';
import { logError } from 'utils/sentry';
import { hasFileHash } from 'utils/upload';
import HTTPService from './HTTPService';
const ENDPOINT = getEndpoint();
@ -55,10 +58,12 @@ export async function getDuplicateFiles(
);
if (duplicateFiles.length > 1) {
result.push({
files: duplicateFiles,
size: dupe.size,
});
result.push(
...getDupesGroupedBySameFileHashes(
duplicateFiles,
dupe.size
)
);
}
}
@ -68,6 +73,90 @@ export async function getDuplicateFiles(
}
}
function getDupesGroupedBySameFileHashes(files: EnteFile[], size: number) {
const clubbedDupesByFileHash = clubDuplicatesBySameFileHashes([
{ files, size },
]);
const clubbedFileIDs = new Set<number>();
for (const dupe of clubbedDupesByFileHash) {
for (const file of dupe.files) {
clubbedFileIDs.add(file.id);
}
}
files = files.filter((file) => {
return !clubbedFileIDs.has(file.id);
});
if (files.length > 1) {
clubbedDupesByFileHash.push({
files: [...files],
size,
});
}
return clubbedDupesByFileHash;
}
function clubDuplicatesBySameFileHashes(dupes: DuplicateFiles[]) {
const result: DuplicateFiles[] = [];
for (const dupe of dupes) {
let files: EnteFile[] = [];
const filteredFiles = dupe.files.filter((file) => {
return hasFileHash(file.metadata);
});
if (filteredFiles.length <= 1) {
continue;
}
const dupesSortedByFileHash = filteredFiles
.map((file) => {
return {
file,
hash:
file.metadata.hash ??
`${file.metadata.imageHash}_${file.metadata.videoHash}`,
};
})
.sort((firstFile, secondFile) => {
return firstFile.hash.localeCompare(secondFile.hash);
});
files.push(dupesSortedByFileHash[0].file);
for (let i = 1; i < dupesSortedByFileHash.length; i++) {
if (
areFileHashesSame(
dupesSortedByFileHash[i - 1].file.metadata,
dupesSortedByFileHash[i].file.metadata
)
) {
files.push(dupesSortedByFileHash[i].file);
} else {
if (files.length > 1) {
result.push({
files: [...files],
size: dupe.size,
});
}
files = [dupesSortedByFileHash[i].file];
}
}
if (files.length > 1) {
result.push({
files,
size: dupe.size,
});
}
}
return result;
}
export function clubDuplicatesByTime(dupes: DuplicateFiles[]) {
const result: DuplicateFiles[] = [];
for (const dupe of dupes) {
@ -150,3 +239,14 @@ async function sortDuplicateFiles(
return secondFileRanking - firstFileRanking;
});
}
function areFileHashesSame(firstFile: Metadata, secondFile: Metadata) {
if (firstFile.fileType === FILE_TYPE.LIVE_PHOTO) {
return (
firstFile.imageHash === secondFile.imageHash &&
firstFile.videoHash === secondFile.videoHash
);
} else {
return firstFile.hash === secondFile.hash;
}
}

View file

@ -4,7 +4,6 @@ import { parseFFmpegExtractedMetadata } from 'utils/ffmpeg';
class FFmpegClient {
private ffmpeg: FFmpeg;
private fileReader: FileReader;
private ready: Promise<void> = null;
constructor() {
this.ffmpeg = createFFmpeg({
@ -19,9 +18,6 @@ class FFmpegClient {
if (!this.ffmpeg.isLoaded()) {
await this.ffmpeg.load();
}
if (!this.fileReader) {
this.fileReader = new FileReader();
}
}
async generateThumbnail(file: File) {
@ -31,7 +27,7 @@ class FFmpegClient {
this.ffmpeg.FS(
'writeFile',
inputFileName,
await getUint8ArrayView(this.fileReader, file)
await getUint8ArrayView(file)
);
let seekTime = 1.0;
let thumb = null;
@ -66,7 +62,7 @@ class FFmpegClient {
this.ffmpeg.FS(
'writeFile',
inputFileName,
await getUint8ArrayView(this.fileReader, file)
await getUint8ArrayView(file)
);
let metadata = null;

View file

@ -4,7 +4,9 @@ import QueueProcessor from 'services/queueProcessor';
import { ParsedExtractedMetadata } from 'types/upload';
import { FFmpegWorker } from 'utils/comlink';
import { promiseWithTimeout } from 'utils/common';
const FFMPEG_EXECUTION_WAIT_TIME = 10 * 1000;
class FFmpegService {
private ffmpegWorker = null;
private ffmpegTaskQueue = new QueueProcessor<any>(1);
@ -18,8 +20,11 @@ class FFmpegService {
await this.init();
}
const response = this.ffmpegTaskQueue.queueUpRequest(
async () => await this.ffmpegWorker.generateThumbnail(file)
const response = this.ffmpegTaskQueue.queueUpRequest(() =>
promiseWithTimeout(
this.ffmpegWorker.generateThumbnail(file),
FFMPEG_EXECUTION_WAIT_TIME
)
);
try {
return await response.promise;
@ -39,8 +44,11 @@ class FFmpegService {
await this.init();
}
const response = this.ffmpegTaskQueue.queueUpRequest(
async () => await this.ffmpegWorker.extractVideoMetadata(file)
const response = this.ffmpegTaskQueue.queueUpRequest(() =>
promiseWithTimeout(
this.ffmpegWorker.extractVideoMetadata(file),
FFMPEG_EXECUTION_WAIT_TIME
)
);
try {
return await response.promise;

View file

@ -17,6 +17,7 @@ import { EnteFile, TrashRequest } from 'types/file';
import { SetFiles } from 'types/gallery';
import { MAX_TRASH_BATCH_SIZE } from 'constants/file';
import { BulkUpdateMagicMetadataRequest } from 'types/magicMetadata';
import { logUploadInfo } from 'utils/upload';
const ENDPOINT = getEndpoint();
const FILES_TABLE = 'files';
@ -28,7 +29,23 @@ export const getLocalFiles = async () => {
};
export const setLocalFiles = async (files: EnteFile[]) => {
await localForage.setItem(FILES_TABLE, files);
try {
await localForage.setItem(FILES_TABLE, files);
} catch (e1) {
try {
const storageEstimate = await navigator.storage.estimate();
logError(e1, 'failed to save files to indexedDB', {
storageEstimate,
});
logUploadInfo(
`storage estimate ${JSON.stringify(storageEstimate)}`
);
} catch (e2) {
logError(e1, 'failed to save files to indexedDB');
logError(e2, 'failed to get storage stats');
}
throw e1;
}
};
const getCollectionLastSyncTime = async (collection: Collection) =>

View file

@ -1,10 +1,11 @@
import * as HeicConvert from 'heic-convert';
import { getUint8ArrayView } from 'services/readerService';
export async function convertHEIC(
fileBlob: Blob,
format: string
): Promise<Blob> {
const filedata = new Uint8Array(await fileBlob.arrayBuffer());
const filedata = await getUint8ArrayView(fileBlob);
const result = await HeicConvert({ buffer: filedata, format });
const convertedFileData = new Uint8Array(result);
const convertedFileBlob = new Blob([convertedFileData]);

View file

@ -1,3 +1,4 @@
import { DESKTOP_UPLOAD_TYPE } from 'components/pages/gallery/Upload';
import { Collection } from 'types/collection';
import { ElectronFile, FileWithCollection } from 'types/upload';
import { runningInBrowser } from 'utils/common';
@ -6,6 +7,12 @@ import { logError } from 'utils/sentry';
interface PendingUploads {
files: ElectronFile[];
collectionName: string;
type: DESKTOP_UPLOAD_TYPE;
}
interface selectZipResult {
files: ElectronFile[];
zipPaths: string[];
}
class ImportService {
ElectronAPIs: any;
@ -16,6 +23,14 @@ class ImportService {
this.allElectronAPIsExist = !!this.ElectronAPIs?.getPendingUploads;
}
async getElectronFilesFromGoogleZip(
zipPath: string
): Promise<ElectronFile[]> {
if (this.allElectronAPIsExist) {
return this.ElectronAPIs.getElectronFilesFromGoogleZip(zipPath);
}
}
checkAllElectronAPIsExists = () => this.allElectronAPIsExist;
async showUploadFilesDialog(): Promise<ElectronFile[]> {
@ -30,6 +45,11 @@ class ImportService {
}
}
async showUploadZipDialog(): Promise<selectZipResult> {
if (this.allElectronAPIsExist) {
return this.ElectronAPIs.showUploadZipDialog();
}
}
async getPendingUploads(): Promise<PendingUploads> {
try {
if (this.allElectronAPIsExist) {
@ -39,16 +59,13 @@ class ImportService {
}
} catch (e) {
logError(e, 'failed to getPendingUploads ');
return { files: [], collectionName: null };
return { files: [], collectionName: null, type: null };
}
}
async setToUploadFiles(
files: FileWithCollection[],
collections: Collection[]
) {
async setToUploadCollection(collections: Collection[]) {
if (this.allElectronAPIsExist) {
let collectionName: string;
let collectionName: string = null;
/* collection being one suggest one of two things
1. Either the user has upload to a single existing collection
2. Created a new single collection to upload to
@ -61,13 +78,19 @@ class ImportService {
if (collections.length === 1) {
collectionName = collections[0].name;
}
const filePaths = files.map(
(file) => (file.file as ElectronFile).path
);
this.ElectronAPIs.setToUploadFiles(filePaths);
this.ElectronAPIs.setToUploadCollection(collectionName);
}
}
async setToUploadFiles(
type: DESKTOP_UPLOAD_TYPE.FILES | DESKTOP_UPLOAD_TYPE.ZIPS,
filePaths: string[]
) {
if (this.allElectronAPIsExist) {
this.ElectronAPIs.setToUploadFiles(type, filePaths);
}
}
updatePendingUploads(files: FileWithCollection[]) {
if (this.allElectronAPIsExist) {
const filePaths = [];
@ -89,7 +112,14 @@ class ImportService {
);
}
}
this.ElectronAPIs.setToUploadFiles(filePaths);
this.setToUploadFiles(DESKTOP_UPLOAD_TYPE.FILES, filePaths);
}
}
cancelRemainingUploads() {
if (this.allElectronAPIsExist) {
this.ElectronAPIs.setToUploadCollection(null);
this.ElectronAPIs.setToUploadFiles(DESKTOP_UPLOAD_TYPE.ZIPS, []);
this.ElectronAPIs.setToUploadFiles(DESKTOP_UPLOAD_TYPE.FILES, []);
}
}
}

View file

@ -12,6 +12,7 @@ import { getFileType } from 'services/typeDetectionService';
import { getLocalTrash, getTrashedFiles } from './trashService';
import { EncryptionResult, UploadURL } from 'types/upload';
import { fileAttribute } from 'types/file';
import { USE_CF_PROXY } from 'constants/upload';
const ENDPOINT = getEndpoint();
const REPLACE_THUMBNAIL_THRESHOLD = 500 * 1024; // 500KB
@ -44,7 +45,6 @@ export async function replaceThumbnail(
try {
const token = getToken();
const worker = await new CryptoWorker();
const reader = new FileReader();
const files = await getLocalFiles();
const trash = await getLocalTrash();
const trashFiles = getTrashedFiles(trash);
@ -77,9 +77,8 @@ export async function replaceThumbnail(
[originalThumbnail],
file.metadata.title
);
const fileTypeInfo = await getFileType(reader, dummyImageFile);
const fileTypeInfo = await getFileType(dummyImageFile);
const { thumbnail: newThumbnail } = await generateThumbnail(
reader,
dummyImageFile,
fileTypeInfo
);
@ -110,12 +109,20 @@ export async function uploadThumbnail(
): Promise<fileAttribute> {
const { file: encryptedThumbnail }: EncryptionResult =
await worker.encryptThumbnail(updatedThumbnail, fileKey);
const thumbnailObjectKey = await uploadHttpClient.putFile(
uploadURL,
encryptedThumbnail.encryptedData as Uint8Array,
() => {}
);
let thumbnailObjectKey: string = null;
if (USE_CF_PROXY) {
thumbnailObjectKey = await uploadHttpClient.putFileV2(
uploadURL,
encryptedThumbnail.encryptedData as Uint8Array,
() => {}
);
} else {
thumbnailObjectKey = await uploadHttpClient.putFile(
uploadURL,
encryptedThumbnail.encryptedData as Uint8Array,
() => {}
);
}
return {
objectKey: thumbnailObjectKey,
decryptionHeader: encryptedThumbnail.decryptionHeader,

View file

@ -1,30 +1,21 @@
import { ElectronFile } from 'types/upload';
import { logError } from 'utils/sentry';
export async function getUint8ArrayView(
reader: FileReader,
file: Blob
file: Blob | ElectronFile
): Promise<Uint8Array> {
return await new Promise((resolve, reject) => {
reader.onabort = () => reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => {
// Do whatever you want with the file contents
const result =
typeof reader.result === 'string'
? new TextEncoder().encode(reader.result)
: new Uint8Array(reader.result);
resolve(result);
};
reader.readAsArrayBuffer(file);
});
try {
return new Uint8Array(await file.arrayBuffer());
} catch (e) {
logError(e, 'reading file blob failed', {
fileSize: convertBytesToHumanReadable(file.size),
});
throw e;
}
}
export function getFileStream(
reader: FileReader,
file: File,
chunkSize: number
) {
const fileChunkReader = fileChunkReaderMaker(reader, file, chunkSize);
export function getFileStream(file: File, chunkSize: number) {
const fileChunkReader = fileChunkReaderMaker(file, chunkSize);
const stream = new ReadableStream<Uint8Array>({
async pull(controller: ReadableStreamDefaultController) {
@ -54,17 +45,63 @@ export async function getElectronFileStream(
};
}
async function* fileChunkReaderMaker(
reader: FileReader,
file: File,
chunkSize: number
) {
async function* fileChunkReaderMaker(file: File, chunkSize: number) {
let offset = 0;
while (offset < file.size) {
const blob = file.slice(offset, chunkSize + offset);
const fileChunk = await getUint8ArrayView(reader, blob);
const fileChunk = await getUint8ArrayView(blob);
yield fileChunk;
offset += chunkSize;
}
return null;
}
// Temporary fix for window not defined caused on importing from utils/billing
// because this file is accessed inside worker and util/billing imports constants
// which has reference to window object, which cause error inside worker
// TODO: update worker to not read file themselves but rather have filedata passed to them
function convertBytesToHumanReadable(bytes: number, precision = 2): string {
if (bytes === 0) {
return '0 MB';
}
const i = Math.floor(Math.log(bytes) / Math.log(1024));
const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
return (bytes / Math.pow(1024, i)).toFixed(precision) + ' ' + sizes[i];
}
// depreciated
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async function getUint8ArrayViewOld(
reader: FileReader,
file: Blob
): Promise<Uint8Array> {
return await new Promise((resolve, reject) => {
reader.onabort = () =>
reject(
Error(
`file reading was aborted, file size= ${convertBytesToHumanReadable(
file.size
)}`
)
);
reader.onerror = () =>
reject(
Error(
`file reading has failed, file size= ${convertBytesToHumanReadable(
file.size
)} , reason= ${reader.error}`
)
);
reader.onload = () => {
// Do whatever you want with the file contents
const result =
typeof reader.result === 'string'
? new TextEncoder().encode(reader.result)
: new Uint8Array(reader.result);
resolve(result);
};
reader.readAsArrayBuffer(file);
});
}

View file

@ -91,7 +91,7 @@ export const updateTrash = async (
break;
}
resp = await HTTPService.get(
`${ENDPOINT}/trash/diff`,
`${ENDPOINT}/trash/v2/diff`,
{
sinceTime: time,
},

View file

@ -12,7 +12,6 @@ const TYPE_IMAGE = 'image';
const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
export async function getFileType(
reader: FileReader,
receivedFile: File | ElectronFile
): Promise<FileTypeInfo> {
try {
@ -20,7 +19,7 @@ export async function getFileType(
let typeResult: FileTypeResult;
if (receivedFile instanceof File) {
typeResult = await extractFileType(reader, receivedFile);
typeResult = await extractFileType(receivedFile);
} else {
typeResult = await extractElectronFileType(receivedFile);
}
@ -48,7 +47,7 @@ export async function getFileType(
} catch (e) {
const fileFormat = getFileExtension(receivedFile.name);
const formatMissedByTypeDetection = FORMAT_MISSED_BY_FILE_TYPE_LIB.find(
(a) => a.exactType === fileFormat
(a) => a.exactType === fileFormat.toLocaleLowerCase()
);
if (formatMissedByTypeDetection) {
return formatMissedByTypeDetection;
@ -64,9 +63,9 @@ export async function getFileType(
}
}
async function extractFileType(reader: FileReader, file: File) {
async function extractFileType(file: File) {
const fileChunkBlob = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
return getFileTypeFromBlob(reader, fileChunkBlob);
return getFileTypeFromBlob(fileChunkBlob);
}
async function extractElectronFileType(file: ElectronFile) {
@ -77,9 +76,9 @@ async function extractElectronFileType(file: ElectronFile) {
return fileTypeResult;
}
async function getFileTypeFromBlob(reader: FileReader, fileBlob: Blob) {
async function getFileTypeFromBlob(fileBlob: Blob) {
try {
const initialFiledata = await getUint8ArrayView(reader, fileBlob);
const initialFiledata = await getUint8ArrayView(fileBlob);
return await FileType.fromBuffer(initialFiledata);
} catch (e) {
throw Error(CustomError.TYPE_DETECTION_FAILED);

View file

@ -38,8 +38,7 @@ export async function updateCreationTimeWithExif(
} else {
const fileURL = await downloadManager.getFile(file)[0];
const fileObject = await getFileFromURL(fileURL);
const reader = new FileReader();
const fileTypeInfo = await getFileType(reader, fileObject);
const fileTypeInfo = await getFileType(fileObject);
const exifData = await getRawExif(fileObject, fileTypeInfo);
if (fixOption === FIX_OPTIONS.DATE_TIME_ORIGINAL) {
correctCreationTime = getUnixTimeInMicroSeconds(

View file

@ -1,5 +1,5 @@
import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from 'constants/upload';
import { Location } from 'types/upload';
import { ElectronFile, Location } from 'types/upload';
import exifr from 'exifr';
import piexif from 'piexifjs';
import { FileTypeInfo } from 'types/upload';
@ -28,11 +28,20 @@ interface Exif {
}
export async function getExifData(
receivedFile: File,
receivedFile: File | ElectronFile,
fileTypeInfo: FileTypeInfo
): Promise<ParsedExtractedMetadata> {
let parsedEXIFData = NULL_EXTRACTED_METADATA;
try {
if (!(receivedFile instanceof File)) {
receivedFile = new File(
[await receivedFile.blob()],
receivedFile.name,
{
lastModified: receivedFile.lastModified,
}
);
}
const exifData = await getRawExif(receivedFile, fileTypeInfo);
if (!exifData) {
return parsedEXIFData;

View file

@ -34,16 +34,14 @@ export function getFilename(file: File | ElectronFile) {
}
export async function readFile(
reader: FileReader,
fileTypeInfo: FileTypeInfo,
rawFile: File | ElectronFile
): Promise<FileInMemory> {
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
reader,
rawFile,
fileTypeInfo
);
logUploadInfo(`reading file datal${getFileNameSize(rawFile)} `);
logUploadInfo(`reading file data ${getFileNameSize(rawFile)} `);
let filedata: Uint8Array | DataStream;
if (!(rawFile instanceof File)) {
if (rawFile.size > MULTIPART_PART_SIZE) {
@ -52,12 +50,12 @@ export async function readFile(
FILE_READER_CHUNK_SIZE
);
} else {
filedata = await rawFile.arrayBuffer();
filedata = await getUint8ArrayView(rawFile);
}
} else if (rawFile.size > MULTIPART_PART_SIZE) {
filedata = getFileStream(reader, rawFile, FILE_READER_CHUNK_SIZE);
filedata = getFileStream(rawFile, FILE_READER_CHUNK_SIZE);
} else {
filedata = await getUint8ArrayView(reader, rawFile);
filedata = await getUint8ArrayView(rawFile);
}
logUploadInfo(`read file data successfully ${getFileNameSize(rawFile)} `);

View file

@ -47,11 +47,17 @@ export function getLivePhotoFileType(
};
}
export function getLivePhotoMetadata(imageMetadata: Metadata) {
export function getLivePhotoMetadata(
imageMetadata: Metadata,
videoMetadata: Metadata
) {
return {
...imageMetadata,
title: getLivePhotoName(imageMetadata.title),
fileType: FILE_TYPE.LIVE_PHOTO,
imageHash: imageMetadata.hash,
videoHash: videoMetadata.hash,
hash: undefined,
};
}
@ -66,12 +72,10 @@ export function getLivePhotoName(imageTitle: string) {
}
export async function readLivePhoto(
reader: FileReader,
fileTypeInfo: FileTypeInfo,
livePhotoAssets: LivePhotoAssets
) {
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
reader,
livePhotoAssets.image,
{
exactType: fileTypeInfo.imageType,
@ -79,15 +83,9 @@ export async function readLivePhoto(
}
);
const image =
livePhotoAssets.image instanceof File
? await getUint8ArrayView(reader, livePhotoAssets.image)
: await livePhotoAssets.image.arrayBuffer();
const image = await getUint8ArrayView(livePhotoAssets.image);
const video =
livePhotoAssets.video instanceof File
? await getUint8ArrayView(reader, livePhotoAssets.video)
: await livePhotoAssets.video.arrayBuffer();
const video = await getUint8ArrayView(livePhotoAssets.video);
return {
filedata: await encodeMotionPhoto({
@ -102,101 +100,119 @@ export async function readLivePhoto(
}
export function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
const analysedMediaFiles: FileWithCollection[] = [];
mediaFiles
.sort((firstMediaFile, secondMediaFile) =>
splitFilenameAndExtension(
firstMediaFile.file.name
)[0].localeCompare(
splitFilenameAndExtension(secondMediaFile.file.name)[0]
try {
const analysedMediaFiles: FileWithCollection[] = [];
mediaFiles
.sort((firstMediaFile, secondMediaFile) =>
splitFilenameAndExtension(
firstMediaFile.file.name
)[0].localeCompare(
splitFilenameAndExtension(secondMediaFile.file.name)[0]
)
)
)
.sort(
(firstMediaFile, secondMediaFile) =>
firstMediaFile.collectionID - secondMediaFile.collectionID
);
let index = 0;
while (index < mediaFiles.length - 1) {
const firstMediaFile = mediaFiles[index];
const secondMediaFile = mediaFiles[index + 1];
const { fileTypeInfo: firstFileTypeInfo, metadata: firstFileMetadata } =
UploadService.getFileMetadataAndFileTypeInfo(
.sort(
(firstMediaFile, secondMediaFile) =>
firstMediaFile.collectionID - secondMediaFile.collectionID
);
let index = 0;
while (index < mediaFiles.length - 1) {
const firstMediaFile = mediaFiles[index];
const secondMediaFile = mediaFiles[index + 1];
const {
fileTypeInfo: firstFileTypeInfo,
metadata: firstFileMetadata,
} = UploadService.getFileMetadataAndFileTypeInfo(
firstMediaFile.localID
);
const {
fileTypeInfo: secondFileFileInfo,
metadata: secondFileMetadata,
} = UploadService.getFileMetadataAndFileTypeInfo(
secondMediaFile.localID
);
const firstFileIdentifier: LivePhotoIdentifier = {
collectionID: firstMediaFile.collectionID,
fileType: firstFileTypeInfo.fileType,
name: firstMediaFile.file.name,
size: firstMediaFile.file.size,
};
const secondFileIdentifier: LivePhotoIdentifier = {
collectionID: secondMediaFile.collectionID,
fileType: secondFileFileInfo.fileType,
name: secondMediaFile.file.name,
size: secondMediaFile.file.size,
};
const firstAsset = {
file: firstMediaFile.file,
metadata: firstFileMetadata,
fileTypeInfo: firstFileTypeInfo,
};
const secondAsset = {
file: secondMediaFile.file,
metadata: secondFileMetadata,
fileTypeInfo: secondFileFileInfo,
};
if (
areFilesLivePhotoAssets(firstFileIdentifier, secondFileIdentifier)
) {
let imageAsset: Asset;
let videoAsset: Asset;
if (
firstFileTypeInfo.fileType === FILE_TYPE.IMAGE &&
secondFileFileInfo.fileType === FILE_TYPE.VIDEO
) {
imageAsset = firstAsset;
videoAsset = secondAsset;
} else {
videoAsset = firstAsset;
imageAsset = secondAsset;
}
const livePhotoLocalID = firstMediaFile.localID;
analysedMediaFiles.push({
localID: livePhotoLocalID,
const {
fileTypeInfo: secondFileFileInfo,
metadata: secondFileMetadata,
} = UploadService.getFileMetadataAndFileTypeInfo(
secondMediaFile.localID
);
const firstFileIdentifier: LivePhotoIdentifier = {
collectionID: firstMediaFile.collectionID,
isLivePhoto: true,
livePhotoAssets: {
image: imageAsset.file,
video: videoAsset.file,
},
});
const livePhotoFileTypeInfo: FileTypeInfo = getLivePhotoFileType(
imageAsset.fileTypeInfo,
videoAsset.fileTypeInfo
);
const livePhotoMetadata: Metadata = getLivePhotoMetadata(
imageAsset.metadata
);
uploadService.setFileMetadataAndFileTypeInfo(livePhotoLocalID, {
fileTypeInfo: { ...livePhotoFileTypeInfo },
metadata: { ...livePhotoMetadata },
});
index += 2;
} else {
analysedMediaFiles.push({ ...firstMediaFile, isLivePhoto: false });
index += 1;
fileType: firstFileTypeInfo.fileType,
name: firstMediaFile.file.name,
size: firstMediaFile.file.size,
};
const secondFileIdentifier: LivePhotoIdentifier = {
collectionID: secondMediaFile.collectionID,
fileType: secondFileFileInfo.fileType,
name: secondMediaFile.file.name,
size: secondMediaFile.file.size,
};
const firstAsset = {
file: firstMediaFile.file,
metadata: firstFileMetadata,
fileTypeInfo: firstFileTypeInfo,
};
const secondAsset = {
file: secondMediaFile.file,
metadata: secondFileMetadata,
fileTypeInfo: secondFileFileInfo,
};
if (
areFilesLivePhotoAssets(
firstFileIdentifier,
secondFileIdentifier
)
) {
let imageAsset: Asset;
let videoAsset: Asset;
if (
firstFileTypeInfo.fileType === FILE_TYPE.IMAGE &&
secondFileFileInfo.fileType === FILE_TYPE.VIDEO
) {
imageAsset = firstAsset;
videoAsset = secondAsset;
} else {
videoAsset = firstAsset;
imageAsset = secondAsset;
}
const livePhotoLocalID = firstMediaFile.localID;
analysedMediaFiles.push({
localID: livePhotoLocalID,
collectionID: firstMediaFile.collectionID,
isLivePhoto: true,
livePhotoAssets: {
image: imageAsset.file,
video: videoAsset.file,
},
});
const livePhotoFileTypeInfo: FileTypeInfo =
getLivePhotoFileType(
imageAsset.fileTypeInfo,
videoAsset.fileTypeInfo
);
const livePhotoMetadata: Metadata = getLivePhotoMetadata(
imageAsset.metadata,
videoAsset.metadata
);
uploadService.setFileMetadataAndFileTypeInfo(livePhotoLocalID, {
fileTypeInfo: { ...livePhotoFileTypeInfo },
metadata: { ...livePhotoMetadata },
});
index += 2;
} else {
analysedMediaFiles.push({
...firstMediaFile,
isLivePhoto: false,
});
index += 1;
}
}
if (index === mediaFiles.length - 1) {
analysedMediaFiles.push({
...mediaFiles[index],
isLivePhoto: false,
});
}
return analysedMediaFiles;
} catch (e) {
logError(e, 'failed to cluster live photo');
throw e;
}
if (index === mediaFiles.length - 1) {
analysedMediaFiles.push({ ...mediaFiles[index], isLivePhoto: false });
}
return analysedMediaFiles;
}
function areFilesLivePhotoAssets(

View file

@ -10,10 +10,15 @@ import {
ElectronFile,
} from 'types/upload';
import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from 'constants/upload';
import { splitFilenameAndExtension } from 'utils/file';
import { getVideoMetadata } from './videoMetadataService';
import { getFileNameSize } from 'utils/upload';
import { logUploadInfo } from 'utils/upload';
import {
parseDateFromFusedDateString,
getUnixTimeInMicroSeconds,
tryToParseDateTime,
} from 'utils/time';
import { getFileHash } from 'utils/crypto';
interface ParsedMetadataJSONWithTitle {
title: string;
@ -32,15 +37,6 @@ export async function extractMetadata(
) {
let extractedMetadata: ParsedExtractedMetadata = NULL_EXTRACTED_METADATA;
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
if (!(receivedFile instanceof File)) {
receivedFile = new File(
[await receivedFile.blob()],
receivedFile.name,
{
lastModified: receivedFile.lastModified,
}
);
}
extractedMetadata = await getExifData(receivedFile, fileTypeInfo);
} else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
logUploadInfo(
@ -54,16 +50,19 @@ export async function extractMetadata(
);
}
const fileHash = await getFileHash(receivedFile);
const metadata: Metadata = {
title: `${splitFilenameAndExtension(receivedFile.name)[0]}.${
fileTypeInfo.exactType
}`,
title: receivedFile.name,
creationTime:
extractedMetadata.creationTime ?? receivedFile.lastModified * 1000,
extractedMetadata.creationTime ??
extractDateFromFileName(receivedFile.name) ??
receivedFile.lastModified * 1000,
modificationTime: receivedFile.lastModified * 1000,
latitude: extractedMetadata.location.latitude,
longitude: extractedMetadata.location.longitude,
fileType: fileTypeInfo.fileType,
hash: fileHash,
};
return metadata;
}
@ -74,10 +73,7 @@ export const getMetadataJSONMapKey = (
title: string
) => `${collectionID}-${title}`;
export async function parseMetadataJSON(
reader: FileReader,
receivedFile: File | ElectronFile
) {
export async function parseMetadataJSON(receivedFile: File | ElectronFile) {
try {
if (!(receivedFile instanceof File)) {
receivedFile = new File(
@ -85,18 +81,7 @@ export async function parseMetadataJSON(
receivedFile.name
);
}
const metadataJSON: object = await new Promise((resolve, reject) => {
reader.onabort = () => reject(Error('file reading was aborted'));
reader.onerror = () => reject(Error('file reading has failed'));
reader.onload = () => {
const result =
typeof reader.result !== 'string'
? new TextDecoder().decode(reader.result)
: reader.result;
resolve(JSON.parse(result));
};
reader.readAsText(receivedFile as File);
});
const metadataJSON: object = JSON.parse(await receivedFile.text());
const parsedMetadataJSON: ParsedMetadataJSON =
NULL_PARSED_METADATA_JSON;
@ -149,3 +134,39 @@ export async function parseMetadataJSON(
// ignore
}
}
// tries to extract date from file name if available else returns null
export function extractDateFromFileName(filename: string): number {
try {
filename = filename.trim();
let parsedDate: Date;
if (filename.startsWith('IMG-') || filename.startsWith('VID-')) {
// Whatsapp media files
// sample name IMG-20171218-WA0028.jpg
parsedDate = parseDateFromFusedDateString(filename.split('-')[1]);
} else if (filename.startsWith('Screenshot_')) {
// Screenshots on droid
// sample name Screenshot_20181227-152914.jpg
parsedDate = parseDateFromFusedDateString(
filename.replaceAll('Screenshot_', '')
);
} else if (filename.startsWith('signal-')) {
// signal images
// sample name :signal-2018-08-21-100217.jpg
const dateString = convertSignalNameToFusedDateString(filename);
parsedDate = parseDateFromFusedDateString(dateString);
}
if (!parsedDate) {
parsedDate = tryToParseDateTime(filename);
}
return getUnixTimeInMicroSeconds(parsedDate);
} catch (e) {
logError(e, 'failed to extract date From FileName ');
return null;
}
}
function convertSignalNameToFusedDateString(filename: string) {
const dateStringParts = filename.split('-');
return `${dateStringParts[1]}${dateStringParts[2]}${dateStringParts[3]}-${dateStringParts[4]}`;
}

View file

@ -1,6 +1,7 @@
import {
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
USE_CF_PROXY,
} from 'constants/upload';
import UIService from './uiService';
import UploadHttpClient from './uploadHttpClient';
@ -56,12 +57,20 @@ export async function uploadStreamInParts(
percentPerPart,
index
);
const eTag = await UploadHttpClient.putFilePart(
fileUploadURL,
uploadChunk,
progressTracker
);
let eTag = null;
if (USE_CF_PROXY) {
eTag = await UploadHttpClient.putFilePartV2(
fileUploadURL,
uploadChunk,
progressTracker
);
} else {
eTag = await UploadHttpClient.putFilePart(
fileUploadURL,
uploadChunk,
progressTracker
);
}
partEtags.push({ PartNumber: index + 1, ETag: eTag });
}
const { done } = await streamReader.read();
@ -103,5 +112,9 @@ async function completeMultipartUpload(
{ CompleteMultipartUpload: { Part: partEtags } },
options
);
await UploadHttpClient.completeMultipartUpload(completeURL, body);
if (USE_CF_PROXY) {
await UploadHttpClient.completeMultipartUploadV2(completeURL, body);
} else {
await UploadHttpClient.completeMultipartUpload(completeURL, body);
}
}

View file

@ -24,7 +24,6 @@ interface Dimension {
}
export async function generateThumbnail(
reader: FileReader,
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo
): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
@ -33,10 +32,10 @@ export async function generateThumbnail(
let hasStaticThumbnail = false;
let canvas = document.createElement('canvas');
let thumbnail: Uint8Array;
if (!(file instanceof File)) {
file = new File([await file.blob()], file.name);
}
try {
if (!(file instanceof File)) {
file = new File([await file.blob()], file.name);
}
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
const isHEIC = isFileHEIC(fileTypeInfo.exactType);
canvas = await generateImageThumbnail(file, isHEIC);
@ -72,7 +71,7 @@ export async function generateThumbnail(
}
}
const thumbnailBlob = await thumbnailCanvasToBlob(canvas);
thumbnail = await getUint8ArrayView(reader, thumbnailBlob);
thumbnail = await getUint8ArrayView(thumbnailBlob);
if (thumbnail.length === 0) {
throw Error('EMPTY THUMBNAIL');
}

View file

@ -1,5 +1,5 @@
import HTTPService from 'services/HTTPService';
import { getEndpoint } from 'utils/common/apiUtil';
import { getEndpoint, getUploadEndpoint } from 'utils/common/apiUtil';
import { getToken } from 'utils/common/key';
import { logError } from 'utils/sentry';
import { EnteFile } from 'types/file';
@ -8,6 +8,8 @@ import { UploadFile, UploadURL, MultipartUploadURLs } from 'types/upload';
import { retryHTTPCall } from 'utils/upload/uploadRetrier';
const ENDPOINT = getEndpoint();
const UPLOAD_ENDPOINT = getUploadEndpoint();
const MAX_URL_REQUESTS = 50;
class UploadHttpClient {
@ -106,6 +108,30 @@ class UploadHttpClient {
}
}
async putFileV2(
fileUploadURL: UploadURL,
file: Uint8Array,
progressTracker
): Promise<string> {
try {
await retryHTTPCall(() =>
HTTPService.put(
`${UPLOAD_ENDPOINT}/file-upload`,
file,
null,
{
'UPLOAD-URL': fileUploadURL.url,
},
progressTracker
)
);
return fileUploadURL.objectKey;
} catch (e) {
logError(e, 'putFile to dataStore failed ');
throw e;
}
}
async putFilePart(
partUploadURL: string,
filePart: Uint8Array,
@ -134,6 +160,36 @@ class UploadHttpClient {
}
}
async putFilePartV2(
partUploadURL: string,
filePart: Uint8Array,
progressTracker
) {
try {
const response = await retryHTTPCall(async () => {
const resp = await HTTPService.put(
`${UPLOAD_ENDPOINT}/multipart-upload`,
filePart,
null,
{
'UPLOAD-URL': partUploadURL,
},
progressTracker
);
if (!resp?.data?.etag) {
const err = Error(CustomError.ETAG_MISSING);
logError(err, 'putFile in parts failed');
throw err;
}
return resp;
});
return response.data.etag as string;
} catch (e) {
logError(e, 'put filePart failed');
throw e;
}
}
async completeMultipartUpload(completeURL: string, reqBody: any) {
try {
await retryHTTPCall(() =>
@ -146,6 +202,25 @@ class UploadHttpClient {
throw e;
}
}
async completeMultipartUploadV2(completeURL: string, reqBody: any) {
try {
await retryHTTPCall(() =>
HTTPService.post(
`${UPLOAD_ENDPOINT}/multipart-complete`,
reqBody,
null,
{
'content-type': 'text/xml',
'UPLOAD-URL': completeURL,
}
)
);
} catch (e) {
logError(e, 'put file in parts failed');
throw e;
}
}
}
export default new UploadHttpClient();

View file

@ -5,6 +5,7 @@ import {
sortFilesIntoCollections,
sortFiles,
preservePhotoswipeProps,
decryptFile,
} from 'utils/file';
import { logError } from 'utils/sentry';
import { getMetadataJSONMapKey, parseMetadataJSON } from './metadataService';
@ -35,9 +36,6 @@ import {
import { ComlinkWorker } from 'utils/comlink';
import { FILE_TYPE } from 'constants/file';
import uiService from './uiService';
import { getData, LS_KEYS, setData } from 'utils/storage/localStorage';
import { dedupe } from 'utils/export';
import { convertBytesToHumanReadable } from 'utils/billing';
import { logUploadInfo } from 'utils/upload';
import isElectron from 'is-electron';
import ImportService from 'services/importService';
@ -60,10 +58,8 @@ class UploadManager {
UIService.init(progressUpdater);
this.setFiles = setFiles;
}
private uploadCancelled: boolean;
private resetState() {
this.uploadCancelled = false;
this.filesToBeUploaded = [];
this.remainingFiles = [];
this.failedFiles = [];
@ -115,13 +111,42 @@ class UploadManager {
UploadService.setMetadataAndFileTypeInfoMap(
this.metadataAndFileTypeInfoMap
);
UIService.setUploadStage(UPLOAD_STAGES.START);
logUploadInfo(`clusterLivePhotoFiles called`);
// filter out files whose metadata detection failed or those that have been skipped because the files are too large,
// as they will be rejected during upload and are not valid upload files which we need to clustering
const rejectedFileLocalIDs = new Set(
[...this.metadataAndFileTypeInfoMap.entries()].map(
([localID, metadataAndFileTypeInfo]) => {
if (
!metadataAndFileTypeInfo.metadata ||
!metadataAndFileTypeInfo.fileTypeInfo
) {
return localID;
}
}
)
);
const rejectedFiles = [];
const filesWithMetadata = [];
mediaFiles.forEach((m) => {
if (rejectedFileLocalIDs.has(m.localID)) {
rejectedFiles.push(m);
} else {
filesWithMetadata.push(m);
}
});
const analysedMediaFiles =
UploadService.clusterLivePhotoFiles(mediaFiles);
UploadService.clusterLivePhotoFiles(filesWithMetadata);
const allFiles = [...rejectedFiles, ...analysedMediaFiles];
uiService.setFilenames(
new Map<number, string>(
analysedMediaFiles.map((mediaFile) => [
allFiles.map((mediaFile) => [
mediaFile.localID,
UploadService.getAssetName(mediaFile),
])
@ -129,20 +154,22 @@ class UploadManager {
);
UIService.setHasLivePhoto(
mediaFiles.length !== analysedMediaFiles.length
mediaFiles.length !== allFiles.length
);
logUploadInfo(
`got live photos: ${
mediaFiles.length !== analysedMediaFiles.length
}`
`got live photos: ${mediaFiles.length !== allFiles.length}`
);
await this.uploadMediaFiles(analysedMediaFiles);
await this.uploadMediaFiles(allFiles);
}
UIService.setUploadStage(UPLOAD_STAGES.FINISH);
UIService.setPercentComplete(FILE_UPLOAD_COMPLETED);
} catch (e) {
logError(e, 'uploading failed with error');
logUploadInfo(
`uploading failed with error -> ${e.message}
${(e as Error).stack}`
);
throw e;
} finally {
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
@ -156,18 +183,14 @@ class UploadManager {
logUploadInfo(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length);
const reader = new FileReader();
for (const { file, collectionID } of metadataFiles) {
try {
if (this.uploadCancelled) {
break;
}
logUploadInfo(
`parsing metadata json file ${getFileNameSize(file)}`
);
const parsedMetadataJSONWithTitle = await parseMetadataJSON(
reader,
file
);
if (parsedMetadataJSONWithTitle) {
@ -187,7 +210,7 @@ class UploadManager {
} catch (e) {
logError(e, 'parsing failed for a file');
logUploadInfo(
`successfully parsed metadata json file ${getFileNameSize(
`failed to parse metadata json file ${getFileNameSize(
file
)} error: ${e.message}`
);
@ -203,12 +226,8 @@ class UploadManager {
try {
logUploadInfo(`extractMetadataFromFiles executed`);
UIService.reset(mediaFiles.length);
const reader = new FileReader();
for (const { file, localID, collectionID } of mediaFiles) {
try {
if (this.uploadCancelled) {
break;
}
const { fileTypeInfo, metadata } = await (async () => {
if (file.size >= MAX_FILE_SIZE_SUPPORTED) {
logUploadInfo(
@ -220,7 +239,6 @@ class UploadManager {
return { fileTypeInfo: null, metadata: null };
}
const fileTypeInfo = await UploadService.getFileType(
reader,
file
);
if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) {
@ -264,14 +282,11 @@ class UploadManager {
}
} catch (e) {
logError(e, 'error extracting metadata');
// silently ignore the error
throw e;
}
}
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
if (this.uploadCancelled) {
return;
}
logUploadInfo(`uploadMediaFiles called`);
this.filesToBeUploaded.push(...mediaFiles);
@ -298,60 +313,83 @@ class UploadManager {
this.cryptoWorkers[i] = cryptoWorker;
uploadProcesses.push(
this.uploadNextFileInQueue(
await new this.cryptoWorkers[i].comlink(),
new FileReader()
await new this.cryptoWorkers[i].comlink()
)
);
}
await Promise.all(uploadProcesses);
}
private async uploadNextFileInQueue(worker: any, reader: FileReader) {
private async uploadNextFileInQueue(worker: any) {
while (this.filesToBeUploaded.length > 0) {
if (this.uploadCancelled) {
return;
}
const fileWithCollection = this.filesToBeUploaded.pop();
let fileWithCollection = this.filesToBeUploaded.pop();
const { collectionID } = fileWithCollection;
const existingFilesInCollection =
this.existingFilesCollectionWise.get(collectionID) ?? [];
const collection = this.collections.get(collectionID);
const { fileUploadResult, file } = await uploader(
worker,
reader,
existingFilesInCollection,
this.existingFiles,
{ ...fileWithCollection, collection }
fileWithCollection = { ...fileWithCollection, collection };
const { fileUploadResult, uploadedFile, skipDecryption } =
await uploader(
worker,
existingFilesInCollection,
this.existingFiles,
fileWithCollection
);
UIService.moveFileToResultList(
fileWithCollection.localID,
fileUploadResult
);
if (fileUploadResult === FileUploadResults.UPLOADED) {
this.existingFiles.push(file);
UploadService.reducePendingUploadCount();
await this.postUploadTask(
fileUploadResult,
uploadedFile,
skipDecryption,
fileWithCollection
);
}
}
async postUploadTask(
fileUploadResult: FileUploadResults,
uploadedFile: EnteFile,
skipDecryption: boolean,
fileWithCollection: FileWithCollection
) {
try {
logUploadInfo(`uploadedFile ${JSON.stringify(uploadedFile)}`);
if (
(fileUploadResult === FileUploadResults.UPLOADED ||
fileUploadResult ===
FileUploadResults.UPLOADED_WITH_STATIC_THUMBNAIL) &&
!skipDecryption
) {
const decryptedFile = await decryptFile(
uploadedFile,
fileWithCollection.collection.key
);
this.existingFiles.push(decryptedFile);
this.existingFiles = sortFiles(this.existingFiles);
await setLocalFiles(this.existingFiles);
this.setFiles(preservePhotoswipeProps(this.existingFiles));
if (!this.existingFilesCollectionWise.has(file.collectionID)) {
this.existingFilesCollectionWise.set(file.collectionID, []);
if (
!this.existingFilesCollectionWise.has(
decryptedFile.collectionID
)
) {
this.existingFilesCollectionWise.set(
decryptedFile.collectionID,
[]
);
}
this.existingFilesCollectionWise
.get(file.collectionID)
.push(file);
.get(decryptedFile.collectionID)
.push(decryptedFile);
}
if (fileUploadResult === FileUploadResults.FAILED) {
this.failedFiles.push(fileWithCollection);
setData(LS_KEYS.FAILED_UPLOADS, {
files: dedupe([
...(getData(LS_KEYS.FAILED_UPLOADS)?.files ?? []),
...this.failedFiles.map(
(file) =>
`${
file.file.name
}_${convertBytesToHumanReadable(
file.file.size
)}`
),
]),
});
} else if (fileUploadResult === FileUploadResults.BLOCKED) {
if (
fileUploadResult === FileUploadResults.FAILED ||
fileUploadResult === FileUploadResults.BLOCKED
) {
this.failedFiles.push(fileWithCollection);
}
@ -362,12 +400,13 @@ class UploadManager {
);
ImportService.updatePendingUploads(this.remainingFiles);
}
UIService.moveFileToResultList(
fileWithCollection.localID,
fileUploadResult
} catch (e) {
logError(e, 'failed to do post file upload action');
logUploadInfo(
`failed to do post file upload action -> ${e.message}
${(e as Error).stack}`
);
UploadService.reducePendingUploadCount();
throw e;
}
}
@ -376,11 +415,6 @@ class UploadManager {
...this.collections.values(),
]);
}
cancelRemainingUploads() {
this.remainingFiles = [];
this.uploadCancelled = true;
}
}
export default new UploadManager();

View file

@ -32,6 +32,7 @@ import {
import { encryptFile, getFileSize, readFile } from './fileService';
import { uploadStreamUsingMultipart } from './multiPartUploadService';
import UIService from './uiService';
import { USE_CF_PROXY } from 'constants/upload';
class UploadService {
private uploadURLs: UploadURL[] = [];
@ -76,18 +77,17 @@ class UploadService {
: getFilename(file);
}
async getFileType(reader: FileReader, file: File | ElectronFile) {
return getFileType(reader, file);
async getFileType(file: File | ElectronFile) {
return getFileType(file);
}
async readAsset(
reader: FileReader,
fileTypeInfo: FileTypeInfo,
{ isLivePhoto, file, livePhotoAssets }: UploadAsset
) {
return isLivePhoto
? await readLivePhoto(reader, fileTypeInfo, livePhotoAssets)
: await readFile(reader, fileTypeInfo, file);
? await readLivePhoto(fileTypeInfo, livePhotoAssets)
: await readFile(fileTypeInfo, file);
}
async extractFileMetadata(
@ -142,18 +142,35 @@ class UploadService {
file.localID
);
const fileUploadURL = await this.getUploadURL();
fileObjectKey = await UploadHttpClient.putFile(
fileUploadURL,
file.file.encryptedData,
progressTracker
);
if (USE_CF_PROXY) {
fileObjectKey = await UploadHttpClient.putFileV2(
fileUploadURL,
file.file.encryptedData,
progressTracker
);
} else {
fileObjectKey = await UploadHttpClient.putFile(
fileUploadURL,
file.file.encryptedData,
progressTracker
);
}
}
const thumbnailUploadURL = await this.getUploadURL();
const thumbnailObjectKey = await UploadHttpClient.putFile(
thumbnailUploadURL,
file.thumbnail.encryptedData as Uint8Array,
null
);
let thumbnailObjectKey: string = null;
if (USE_CF_PROXY) {
thumbnailObjectKey = await UploadHttpClient.putFileV2(
thumbnailUploadURL,
file.thumbnail.encryptedData as Uint8Array,
null
);
} else {
thumbnailObjectKey = await UploadHttpClient.putFile(
thumbnailUploadURL,
file.thumbnail.encryptedData as Uint8Array,
null
);
}
const backupedFile: BackupedFile = {
file: {

View file

@ -1,9 +1,9 @@
import { EnteFile } from 'types/file';
import { handleUploadError, CustomError } from 'utils/error';
import { decryptFile } from 'utils/file';
import { logError } from 'utils/sentry';
import {
fileAlreadyInCollection,
findSameFileInOtherCollection,
shouldDedupeAcrossCollection,
} from 'utils/upload';
import UploadHttpClient from './uploadHttpClient';
@ -15,14 +15,15 @@ import { FileWithCollection, BackupedFile, UploadFile } from 'types/upload';
import { logUploadInfo } from 'utils/upload';
import { convertBytesToHumanReadable } from 'utils/billing';
import { sleep } from 'utils/common';
import { addToCollection } from 'services/collectionService';
interface UploadResponse {
fileUploadResult: FileUploadResults;
file?: EnteFile;
uploadedFile?: EnteFile;
skipDecryption?: boolean;
}
export default async function uploader(
worker: any,
reader: FileReader,
existingFilesInCollection: EnteFile[],
existingFiles: EnteFile[],
fileWithCollection: FileWithCollection
@ -54,6 +55,25 @@ export default async function uploader(
return { fileUploadResult: FileUploadResults.ALREADY_UPLOADED };
}
const sameFileInOtherCollection = findSameFileInOtherCollection(
existingFiles,
metadata
);
if (sameFileInOtherCollection) {
logUploadInfo(
`same file in other collection found for ${fileNameSize}`
);
const resultFile = Object.assign({}, sameFileInOtherCollection);
resultFile.collectionID = collection.id;
await addToCollection(collection, [resultFile]);
return {
fileUploadResult: FileUploadResults.UPLOADED,
uploadedFile: resultFile,
skipDecryption: true,
};
}
// iOS exports via album doesn't export files without collection and if user exports all photos, album info is not preserved.
// This change allow users to export by albums, upload to ente. And export all photos -> upload files which are not already uploaded
// as part of the albums
@ -66,11 +86,7 @@ export default async function uploader(
}
logUploadInfo(`reading asset ${fileNameSize}`);
const file = await UploadService.readAsset(
reader,
fileTypeInfo,
uploadAsset
);
const file = await UploadService.readAsset(fileTypeInfo, uploadAsset);
if (file.hasStaticThumbnail) {
metadata.hasStaticThumbnail = true;
@ -103,14 +119,15 @@ export default async function uploader(
logUploadInfo(`uploadFile ${fileNameSize}`);
const uploadedFile = await UploadHttpClient.uploadFile(uploadFile);
const decryptedFile = await decryptFile(uploadedFile, collection.key);
UIService.increaseFileUploaded();
logUploadInfo(`${fileNameSize} successfully uploaded`);
return {
fileUploadResult: FileUploadResults.UPLOADED,
file: decryptedFile,
fileUploadResult: metadata.hasStaticThumbnail
? FileUploadResults.UPLOADED_WITH_STATIC_THUMBNAIL
: FileUploadResults.UPLOADED,
uploadedFile: uploadedFile,
};
} catch (e) {
logUploadInfo(
@ -118,7 +135,7 @@ export default async function uploader(
);
logError(e, 'file upload failed', {
fileFormat: fileTypeInfo.exactType,
fileFormat: fileTypeInfo?.exactType,
});
const error = handleUploadError(e);
switch (error.message) {

View file

@ -2,15 +2,20 @@ import { NULL_EXTRACTED_METADATA } from 'constants/upload';
import ffmpegService from 'services/ffmpeg/ffmpegService';
import { ElectronFile } from 'types/upload';
import { logError } from 'utils/sentry';
import { logUploadInfo } from 'utils/upload';
export async function getVideoMetadata(file: File | ElectronFile) {
let videoMetadata = NULL_EXTRACTED_METADATA;
if (!(file instanceof File)) {
file = new File([await file.blob()], file.name, {
lastModified: file.lastModified,
});
}
try {
if (!(file instanceof File)) {
logUploadInfo('get file blob for video metadata extraction');
file = new File([await file.blob()], file.name, {
lastModified: file.lastModified,
});
logUploadInfo(
'get file blob for video metadata extraction successfully'
);
}
videoMetadata = await ffmpegService.extractMetadata(file);
} catch (e) {
logError(e, 'failed to get video metadata');

View file

@ -1,5 +1,5 @@
import { PAGES } from 'constants/pages';
import { getEndpoint } from 'utils/common/apiUtil';
import { getEndpoint, getFamilyPortalURL } from 'utils/common/apiUtil';
import { clearKeys } from 'utils/storage/sessionStorage';
import router from 'next/router';
import { clearData, getData, LS_KEYS } from 'utils/storage/localStorage';
@ -17,6 +17,8 @@ import {
TwoFactorRecoveryResponse,
UserDetails,
} from 'types/user';
import { getFamilyData, isPartOfFamily } from 'utils/billing';
import { ServerErrorCodes } from 'utils/error';
const ENDPOINT = getEndpoint();
@ -53,6 +55,42 @@ export const getPaymentToken = async () => {
return resp.data['paymentToken'];
};
export const getFamiliesToken = async () => {
try {
const token = getToken();
const resp = await HTTPService.get(
`${ENDPOINT}/users/families-token`,
null,
{
'X-Auth-Token': token,
}
);
return resp.data['familiesToken'];
} catch (e) {
logError(e, 'failed to get family token');
throw e;
}
};
export const getRoadmapRedirectURL = async () => {
try {
const token = getToken();
const resp = await HTTPService.get(
`${ENDPOINT}/users/roadmap/v2`,
null,
{
'X-Auth-Token': token,
}
);
return resp.data['url'];
} catch (e) {
logError(e, 'failed to get roadmap url');
throw e;
}
};
export const verifyOtt = (email: string, ott: string) =>
HTTPService.post(`${ENDPOINT}/users/verify-email`, { email, ott });
@ -124,7 +162,12 @@ export const isTokenValid = async () => {
}
return true;
} catch (e) {
return false;
logError(e, 'session-validity api call failed');
if (e.status?.toString() === ServerErrorCodes.SESSION_EXPIRED) {
return false;
} else {
return true;
}
}
};
@ -245,11 +288,33 @@ export const changeEmail = async (email: string, ott: string) => {
);
};
export const getUserDetails = async (): Promise<UserDetails> => {
const token = getToken();
export const getUserDetailsV2 = async (): Promise<UserDetails> => {
try {
const token = getToken();
const resp = await HTTPService.get(`${ENDPOINT}/users/details`, null, {
'X-Auth-Token': token,
});
return resp.data['details'];
const resp = await HTTPService.get(
`${ENDPOINT}/users/details/v2?memoryCount=false`,
null,
{
'X-Auth-Token': token,
}
);
return resp.data;
} catch (e) {
logError(e, 'failed to get user details v2');
throw e;
}
};
export const getFamilyPortalRedirectURL = async () => {
try {
const jwtToken = await getFamiliesToken();
const isFamilyCreated = isPartOfFamily(getFamilyData());
return `${getFamilyPortalURL()}?token=${jwtToken}&isFamilyCreated=${isFamilyCreated}&redirectURL=${
window.location.origin
}/gallery`;
} catch (e) {
logError(e, 'unable to generate to family portal URL');
throw e;
}
};

View file

@ -25,6 +25,9 @@ export interface Metadata {
longitude: number;
fileType: FILE_TYPE;
hasStaticThumbnail?: boolean;
hash?: string;
imageHash?: string;
videoHash?: string;
}
export interface Location {

View file

@ -70,10 +70,24 @@ export interface TwoFactorRecoveryResponse {
secretDecryptionNonce: string;
}
export interface FamilyMember {
email: string;
usage: number;
id: string;
isAdmin: boolean;
}
export interface FamilyData {
storage: number;
expiry: number;
members: FamilyMember[];
}
export interface UserDetails {
email: string;
usage: number;
fileCount: number;
sharedCollectionCount: number;
subscription: Subscription;
familyData?: FamilyData;
}

View file

@ -7,6 +7,8 @@ import { getData, LS_KEYS } from '../storage/localStorage';
import { CustomError } from '../error';
import { logError } from '../sentry';
import { SetDialogBoxAttributes } from 'types/dialogBox';
import { getFamilyPortalRedirectURL } from 'services/userService';
import { FamilyData, FamilyMember, User } from 'types/user';
const PAYMENT_PROVIDER_STRIPE = 'stripe';
const PAYMENT_PROVIDER_APPSTORE = 'appstore';
@ -44,8 +46,7 @@ export function convertBytesToHumanReadable(
return (bytes / Math.pow(1024, i)).toFixed(precision) + ' ' + sizes[i];
}
export function hasPaidSubscription(subscription?: Subscription) {
subscription = subscription ?? getUserSubscription();
export function hasPaidSubscription(subscription: Subscription) {
return (
subscription &&
isSubscriptionActive(subscription) &&
@ -53,20 +54,17 @@ export function hasPaidSubscription(subscription?: Subscription) {
);
}
export function isSubscribed(subscription?: Subscription) {
subscription = subscription ?? getUserSubscription();
export function isSubscribed(subscription: Subscription) {
return (
hasPaidSubscription(subscription) &&
!isSubscriptionCancelled(subscription)
);
}
export function isSubscriptionActive(subscription?: Subscription): boolean {
subscription = subscription ?? getUserSubscription();
export function isSubscriptionActive(subscription: Subscription): boolean {
return subscription && subscription.expiryTime > Date.now() * 1000;
}
export function isOnFreePlan(subscription?: Subscription) {
subscription = subscription ?? getUserSubscription();
export function isOnFreePlan(subscription: Subscription) {
return (
subscription &&
isSubscriptionActive(subscription) &&
@ -74,15 +72,56 @@ export function isOnFreePlan(subscription?: Subscription) {
);
}
export function isSubscriptionCancelled(subscription?: Subscription) {
subscription = subscription ?? getUserSubscription();
export function isSubscriptionCancelled(subscription: Subscription) {
return subscription && subscription.attributes.isCancelled;
}
// isPartOfFamily return true if the current user is part of some family plan
export function isPartOfFamily(familyData: FamilyData): boolean {
return Boolean(
familyData && familyData.members && familyData.members.length > 0
);
}
// hasNonAdminFamilyMembers return true if the admin user has members in his family
export function hasNonAdminFamilyMembers(familyData: FamilyData): boolean {
return Boolean(isPartOfFamily(familyData) && familyData.members.length > 1);
}
export function isFamilyAdmin(familyData: FamilyData): boolean {
const familyAdmin: FamilyMember = getFamilyPlanAdmin(familyData);
const user: User = getData(LS_KEYS.USER);
return familyAdmin.email === user.email;
}
export function getFamilyPlanAdmin(familyData: FamilyData): FamilyMember {
if (isPartOfFamily(familyData)) {
return familyData.members.find((x) => x.isAdmin);
} else {
logError(
Error(
'verify user is part of family plan before calling this method'
),
'invalid getFamilyPlanAdmin call'
);
}
}
export function getStorage(familyData: FamilyData): number {
const subscription: Subscription = getUserSubscription();
return isPartOfFamily(familyData)
? familyData.storage
: subscription.storage;
}
export function getUserSubscription(): Subscription {
return getData(LS_KEYS.SUBSCRIPTION);
}
export function getFamilyData(): FamilyData {
return getData(LS_KEYS.FAMILY_DATA);
}
export function getPlans(): Plan[] {
return getData(LS_KEYS.PLANS);
}
@ -207,6 +246,25 @@ export async function updatePaymentMethod(
}
}
export async function manageFamilyMethod(
setDialogMessage: SetDialogBoxAttributes,
setLoading: SetLoading
) {
try {
setLoading(true);
const url = await getFamilyPortalRedirectURL();
window.location.href = url;
} catch (error) {
logError(error, 'failed to redirect to family portal');
setLoading(false);
setDialogMessage({
title: constants.ERROR,
content: constants.UNKNOWN_ERROR,
close: { variant: 'danger' },
});
}
}
export async function checkSubscriptionPurchase(
setDialogMessage: SetDialogBoxAttributes,
router: NextRouter,
@ -303,9 +361,6 @@ function handleFailureReason(
}
export function planForSubscription(subscription: Subscription) {
if (!subscription) {
return null;
}
return {
id: subscription.productID,
storage: subscription.storage,

View file

@ -42,3 +42,19 @@ export const getPaymentsURL = () => {
}
return `https://payments.ente.io`;
};
// getFamilyPortalURL returns the endpoint for the family dashboard which can be used to
// create or manage family.
export const getFamilyPortalURL = () => {
if (process.env.NEXT_PUBLIC_ENTE_FAMILY_PORTAL_ENDPOINT !== undefined) {
return process.env.NEXT_PUBLIC_ENTE_FAMILY_PORTAL_ENDPOINT;
}
return `https://family.ente.io`;
};
export const getUploadEndpoint = () => {
if (process.env.NEXT_PUBLIC_ENTE_UPLOAD_ENDPOINT !== undefined) {
return process.env.NEXT_PUBLIC_ENTE_UPLOAD_ENDPOINT;
}
return `https://uploader.ente.io`;
};

View file

@ -3,6 +3,9 @@ export enum OS {
ANDROID = 'android',
IOS = 'ios',
UNKNOWN = 'unknown',
WINDOWS = 'windows',
MAC = 'mac',
LINUX = 'linux',
}
declare global {
@ -30,10 +33,22 @@ const GetDeviceOS = () => {
}
// iOS detection from: http://stackoverflow.com/a/9039885/177710
if (/iPad|iPhone|iPod/.test(userAgent) && !window.MSStream) {
if (/(iPad|iPhone|iPod)/g.test(userAgent) && !window.MSStream) {
return OS.IOS;
}
// credit: https://github.com/MikeKovarik/platform-detect/blob/master/os.mjs
if (userAgent.includes('Windows')) {
return OS.WINDOWS;
}
if (userAgent.includes('Macintosh')) {
return OS.MAC;
}
// Linux must be last
if (userAgent.includes('Linux')) {
return OS.LINUX;
}
return OS.UNKNOWN;
};

View file

@ -1,8 +1,12 @@
import constants from 'utils/strings/constants';
import { CustomError } from 'utils/error';
import GetDeviceOS, { OS } from './deviceDetection';
export const DESKTOP_APP_DOWNLOAD_URL =
const DESKTOP_APP_GITHUB_DOWNLOAD_URL =
'https://github.com/ente-io/bhari-frame/releases/latest';
const APP_DOWNLOAD_ENTE_URL_PREFIX = 'https://ente.io/download';
export function checkConnectivity() {
if (navigator.onLine) {
return true;
@ -20,8 +24,21 @@ export async function sleep(time: number) {
});
}
export function getOSSpecificDesktopAppDownloadLink() {
const os = GetDeviceOS();
let url = '';
if (os === OS.WINDOWS) {
url = `${APP_DOWNLOAD_ENTE_URL_PREFIX}/exe`;
} else if (os === OS.MAC) {
url = `${APP_DOWNLOAD_ENTE_URL_PREFIX}/dmg`;
} else {
url = DESKTOP_APP_GITHUB_DOWNLOAD_URL;
}
return url;
}
export function downloadApp() {
const win = window.open(DESKTOP_APP_DOWNLOAD_URL, '_blank');
const link = getOSSpecificDesktopAppDownloadLink();
const win = window.open(link, '_blank');
win.focus();
}
@ -37,3 +54,24 @@ export function initiateEmail(email: string) {
a.rel = 'noreferrer noopener';
a.click();
}
export const promiseWithTimeout = async (
request: Promise<any>,
timeout: number
) => {
const timeoutRef = { current: null };
const rejectOnTimeout = new Promise((_, reject) => {
timeoutRef.current = setTimeout(
() => reject(Error(CustomError.WAIT_TIME_EXCEEDED)),
timeout
);
});
const requestWithTimeOutCancellation = async () => {
const resp = await request;
clearTimeout(timeoutRef.current);
return resp;
};
return await Promise.race([
requestWithTimeOutCancellation(),
rejectOnTimeout,
]);
};

View file

@ -7,6 +7,10 @@ import { getActualKey, getToken } from 'utils/common/key';
import { setRecoveryKey } from 'services/userService';
import { logError } from 'utils/sentry';
import { ComlinkWorker } from 'utils/comlink';
import { DataStream, ElectronFile } from 'types/upload';
import { cryptoGenericHash } from './libsodium';
import { getElectronFileStream, getFileStream } from 'services/readerService';
import { FILE_READER_CHUNK_SIZE } from 'constants/upload';
export interface B64EncryptionResult {
encryptedData: string;
@ -196,3 +200,14 @@ export async function encryptWithRecoveryKey(key: string) {
return encryptedKey;
}
export default CryptoWorker;
export async function getFileHash(file: File | ElectronFile) {
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
} else {
filedata = await getElectronFileStream(file, FILE_READER_CHUNK_SIZE);
}
const hash = await cryptoGenericHash(filedata.stream);
return hash;
}

View file

@ -252,6 +252,36 @@ export async function hash(input: string) {
);
}
export async function cryptoGenericHash(stream: ReadableStream) {
await sodium.ready;
const state = sodium.crypto_generichash_init(
null,
sodium.crypto_generichash_BYTES_MAX
);
const reader = stream.getReader();
let isDone = false;
while (!isDone) {
const { done, value: chunk } = await reader.read();
if (done) {
isDone = true;
break;
}
const buffer = Uint8Array.from(chunk);
sodium.crypto_generichash_update(state, buffer);
}
const hash = sodium.crypto_generichash_final(
state,
sodium.crypto_generichash_BYTES_MAX
);
const hashString = sodium.to_base64(hash, sodium.base64_variants.ORIGINAL);
return hashString;
}
export async function deriveKey(
passphrase: string,
salt: string,

View file

@ -42,6 +42,7 @@ export enum CustomError {
NO_METADATA = 'no metadata',
TOO_LARGE_LIVE_PHOTO_ASSETS = 'too large live photo assets',
NOT_A_DATE = 'not a date',
FILE_ID_NOT_FOUND = 'file with id not found',
}
function parseUploadErrorCodes(error) {
@ -107,6 +108,7 @@ export function errorWithContext(originalError: Error, context: string) {
originalError.stack;
return errorWithContext;
}
export const parseSharingErrorCodes = (error) => {
let parsedMessage = null;
if (error?.status) {

View file

@ -78,7 +78,6 @@ export async function downloadFile(
}
const fileType = await getFileType(
fileReader,
new File([fileBlob], file.metadata.title)
);
if (
@ -99,12 +98,12 @@ export async function downloadFile(
const originalName = fileNameWithoutExtension(file.metadata.title);
const motionPhoto = await decodeMotionPhoto(fileBlob, originalName);
const image = new File([motionPhoto.image], motionPhoto.imageNameTitle);
const imageType = await getFileType(fileReader, image);
const imageType = await getFileType(image);
tempImageURL = URL.createObjectURL(
new Blob([motionPhoto.image], { type: imageType.mimeType })
);
const video = new File([motionPhoto.video], motionPhoto.videoNameTitle);
const videoType = await getFileType(fileReader, video);
const videoType = await getFileType(video);
tempVideoURL = URL.createObjectURL(
new Blob([motionPhoto.video], { type: videoType.mimeType })
);
@ -309,25 +308,25 @@ export const preservePhotoswipeProps =
return fileWithPreservedProperty;
};
export function fileNameWithoutExtension(filename) {
export function fileNameWithoutExtension(filename: string) {
const lastDotPosition = filename.lastIndexOf('.');
if (lastDotPosition === -1) return filename;
else return filename.substr(0, lastDotPosition);
else return filename.slice(0, lastDotPosition);
}
export function fileExtensionWithDot(filename) {
export function fileExtensionWithDot(filename: string) {
const lastDotPosition = filename.lastIndexOf('.');
if (lastDotPosition === -1) return '';
else return filename.substr(lastDotPosition);
else return filename.slice(lastDotPosition);
}
export function splitFilenameAndExtension(filename): [string, string] {
export function splitFilenameAndExtension(filename: string): [string, string] {
const lastDotPosition = filename.lastIndexOf('.');
if (lastDotPosition === -1) return [filename, null];
else
return [
filename.substr(0, lastDotPosition),
filename.substr(lastDotPosition + 1),
filename.slice(0, lastDotPosition),
filename.slice(lastDotPosition + 1),
];
}
@ -349,9 +348,8 @@ export async function convertForPreview(
fileBlob: Blob
): Promise<Blob[]> {
const convertIfHEIC = async (fileName: string, fileBlob: Blob) => {
const reader = new FileReader();
const mimeType = (
await getFileType(reader, new File([fileBlob], file.metadata.title))
await getFileType(new File([fileBlob], file.metadata.title))
).exactType;
if (isFileHEIC(mimeType)) {
fileBlob = await HEICConverter.convert(fileBlob);

View file

@ -1,5 +1,4 @@
import * as Sentry from '@sentry/nextjs';
import { errorWithContext } from 'utils/error';
import { getUserAnonymizedID } from 'utils/user';
export const logError = (
@ -18,7 +17,17 @@ export const logError = (
...(info && {
info: info,
}),
rootCause: { message: error?.message },
rootCause: { message: error?.message, completeError: error },
},
});
};
// copy of errorWithContext to prevent importing error util
function errorWithContext(originalError: Error, context: string) {
const errorWithContext = new Error(context);
errorWithContext.stack =
errorWithContext.stack.split('\n').slice(2, 4).join('\n') +
'\n' +
originalError.stack;
return errorWithContext;
}

View file

@ -6,6 +6,7 @@ export enum LS_KEYS {
KEY_ATTRIBUTES = 'keyAttributes',
ORIGINAL_KEY_ATTRIBUTES = 'originalKeyAttributes',
SUBSCRIPTION = 'subscription',
FAMILY_DATA = 'familyData',
PLANS = 'plans',
IS_FIRST_LOGIN = 'isFirstLogin',
JUST_SIGNED_UP = 'justSignedUp',
@ -14,7 +15,7 @@ export enum LS_KEYS {
AnonymizeUserID = 'anonymizedUserID',
THUMBNAIL_FIX_STATE = 'thumbnailFixState',
LIVE_PHOTO_INFO_SHOWN_COUNT = 'livePhotoInfoShownCount',
FAILED_UPLOADS = 'failedUploads',
LOGS = 'logs',
USER_DETAILS = 'userDetails',
COLLECTION_SORT_BY = 'collectionSortBy',
@ -28,6 +29,13 @@ export const setData = (key: LS_KEYS, value: object) => {
localStorage.setItem(key, JSON.stringify(value));
};
export const removeData = (key: LS_KEYS) => {
if (typeof localStorage === 'undefined') {
return null;
}
localStorage.removeItem(key);
};
export const getData = (key: LS_KEYS) => {
try {
if (
@ -40,7 +48,7 @@ export const getData = (key: LS_KEYS) => {
const data = localStorage.getItem(key);
return data && JSON.parse(data);
} catch (e) {
logError(e, 'Failed to Parse JSON');
logError(e, 'Failed to Parse JSON for key ' + key);
}
};

View file

@ -272,6 +272,9 @@ const englishConstants = {
USAGE_DETAILS: 'usage',
MANAGE: 'manage',
MANAGEMENT_PORTAL: 'manage payment method',
MANAGE_FAMILY_PORTAL: 'manage family',
LEAVE_FAMILY: 'leave family',
LEAVE_FAMILY_CONFIRM: 'are you sure that you want to leave family?',
CHOOSE_PLAN: 'choose your subscription plan',
MANAGE_PLAN: 'manage your subscription',
CHOOSE_PLAN_BTN: 'choose plan',
@ -286,6 +289,15 @@ const englishConstants = {
</p>
</>
),
FAMILY_PLAN_MANAGE_ADMIN_ONLY: (adminEmail) => (
<>
<p>
only your family plan admin <strong>{adminEmail}</strong> can
change the plan
</p>
</>
),
RENEWAL_ACTIVE_SUBSCRIPTION_INFO: (expiryTime) => (
<p>your subscription will renew on {dateString(expiryTime)}</p>
),
@ -304,6 +316,12 @@ const englishConstants = {
</p>
),
FAMILY_USAGE_INFO: (usage, quota) => (
<p>
you have used {usage} out of your family's {quota} quota
</p>
),
SUBSCRIPTION_PURCHASE_SUCCESS: (expiryTime) => (
<>
<p>we've received your payment</p>
@ -569,6 +587,7 @@ const englishConstants = {
RETRY_FAILED: 'retry failed uploads',
FAILED_UPLOADS: 'failed uploads ',
SKIPPED_FILES: 'ignored uploads',
THUMBNAIL_GENERATION_FAILED_UPLOADS: 'thumbnail generation failed',
UNSUPPORTED_FILES: 'unsupported files',
SUCCESSFUL_UPLOADS: 'successful uploads',
SKIPPED_INFO:
@ -582,6 +601,8 @@ const englishConstants = {
'these files were not uploaded as they exceed the maximum size limit for your storage plan',
TOO_LARGE_INFO:
'these files were not uploaded as they exceed our maximum file size limit',
THUMBNAIL_GENERATION_FAILED_INFO:
'these files were uploaded, but unfortunately we could not generate the thumbnails for them.',
UPLOAD_TO_COLLECTION: 'upload to album',
ARCHIVE: 'Hidden',
ALL_SECTION_NAME: 'All Photos',
@ -732,12 +753,13 @@ const englishConstants = {
DEDUPLICATE_FILES: 'Deduplicate files',
NO_DUPLICATES_FOUND: "you've no duplicate files that can be cleared",
CLUB_BY_CAPTURE_TIME: 'club by capture time',
CLUB_BY_FILE_HASH: 'club by file hashes',
FILES: 'files',
EACH: 'each',
DEDUPLICATION_LOGIC_MESSAGE: (captureTime: boolean) => (
<>
the following files were clubbed based on their sizes
{captureTime && ` and capture time`}, please review and delete items
{captureTime && ' and capture time'}, please review and delete items
you believe are duplicates{' '}
</>
),
@ -754,6 +776,19 @@ const englishConstants = {
' enter the 6-digit code from your authenticator app.',
CREATE_ACCOUNT: 'Create account',
COPIED: 'copied',
CANVAS_BLOCKED_TITLE: 'unable to generate thumbnail',
CANVAS_BLOCKED_MESSAGE: () => (
<>
<p>
it looks like your browser has disabled access to canvas, which
is necessary to generate thumbnails for your photos
</p>
<p>
please enable access to your browser's canvas, or check out our
desktop app
</p>
</>
),
};
export default englishConstants;

View file

@ -5,6 +5,15 @@ export interface TimeDelta {
years?: number;
}
interface DateComponent<T = number> {
year: T;
month: T;
day: T;
hour: T;
minute: T;
second: T;
}
export function dateStringWithMMH(unixTimeInMicroSeconds: number): string {
return new Date(unixTimeInMicroSeconds / 1000).toLocaleDateString('en-US', {
year: 'numeric',
@ -76,3 +85,81 @@ function _addYears(date: Date, years: number) {
result.setFullYear(date.getFullYear() + years);
return result;
}
/*
generates data component for date in format YYYYMMDD-HHMMSS
*/
export function parseDateFromFusedDateString(dateTime: string) {
const dateComponent: DateComponent<string> = {
year: dateTime.slice(0, 4),
month: dateTime.slice(4, 6),
day: dateTime.slice(6, 8),
hour: dateTime.slice(9, 11),
minute: dateTime.slice(11, 13),
second: dateTime.slice(13, 15),
};
return getDateFromComponents(dateComponent);
}
/* sample date format = 2018-08-19 12:34:45
the date has six symbol separated number values
which we would extract and use to form the date
*/
export function tryToParseDateTime(dateTime: string): Date {
const dateComponent = getDateComponentsFromSymbolJoinedString(dateTime);
if (isDateComponentValid(dateComponent)) {
return getDateFromComponents(dateComponent);
} else if (
dateComponent.year?.length === 8 &&
dateComponent.month?.length === 6
) {
// the filename has size 8 consecutive and then 6 consecutive digits
// high possibility that the it is some unhandled date time encoding
const possibleDateTime = dateComponent.year + '-' + dateComponent.month;
return parseDateFromFusedDateString(possibleDateTime);
} else {
return null;
}
}
function getDateComponentsFromSymbolJoinedString(
dateTime: string
): DateComponent<string> {
const [year, month, day, hour, minute, second] =
dateTime.match(/\d+/g) ?? [];
return { year, month, day, hour, minute, second };
}
// has length number of digits in the components
function isDateComponentValid(dateComponent: DateComponent<string>) {
return (
dateComponent.year?.length === 4 &&
dateComponent.month?.length === 2 &&
dateComponent.day?.length === 2
);
}
function parseDateComponentToNumber(
dateComponent: DateComponent<string>
): DateComponent<number> {
return {
year: parseInt(dateComponent.year),
// https://stackoverflow.com/questions/2552483/why-does-the-month-argument-range-from-0-to-11-in-javascripts-date-constructor
month: parseInt(dateComponent.month) - 1,
day: parseInt(dateComponent.day),
hour: parseInt(dateComponent.hour),
minute: parseInt(dateComponent.minute),
second: parseInt(dateComponent.second),
};
}
function getDateFromComponents(dateComponent: DateComponent<string>) {
const { year, month, day, hour, minute, second } =
parseDateComponentToNumber(dateComponent);
const hasTimeValues = hour && minute && second;
return hasTimeValues
? new Date(year, month, day, hour, minute, second)
: new Date(year, month, day);
}

View file

@ -4,6 +4,7 @@ import { convertBytesToHumanReadable } from 'utils/billing';
import { formatDateTime } from 'utils/file';
import { getLogs, saveLogLine } from 'utils/storage';
import { A_SEC_IN_MICROSECONDS } from 'constants/upload';
import { FILE_TYPE } from 'constants/file';
const TYPE_JSON = 'json';
const DEDUPE_COLLECTION = new Set(['icloud library', 'icloudlibrary']);
@ -20,6 +21,25 @@ export function fileAlreadyInCollection(
return false;
}
export function findSameFileInOtherCollection(
existingFiles: EnteFile[],
newFileMetadata: Metadata
) {
if (!hasFileHash(newFileMetadata)) {
return null;
}
for (const existingFile of existingFiles) {
if (
hasFileHash(existingFile.metadata) &&
areFilesWithFileHashSame(existingFile.metadata, newFileMetadata)
) {
return existingFile;
}
}
return null;
}
export function shouldDedupeAcrossCollection(collectionName: string): boolean {
// using set to avoid unnecessary regex for removing spaces for each upload
return DEDUPE_COLLECTION.has(collectionName.toLocaleLowerCase());
@ -29,24 +49,52 @@ export function areFilesSame(
existingFile: Metadata,
newFile: Metadata
): boolean {
/*
* The maximum difference in the creation/modification times of two similar files is set to 1 second.
* This is because while uploading files in the web - browsers and users could have set reduced
* precision of file times to prevent timing attacks and fingerprinting.
* Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
*/
if (
existingFile.fileType === newFile.fileType &&
Math.abs(existingFile.creationTime - newFile.creationTime) <
A_SEC_IN_MICROSECONDS &&
Math.abs(existingFile.modificationTime - newFile.modificationTime) <
A_SEC_IN_MICROSECONDS &&
existingFile.title === newFile.title
) {
return true;
if (hasFileHash(existingFile) && hasFileHash(newFile)) {
return areFilesWithFileHashSame(existingFile, newFile);
} else {
/*
* The maximum difference in the creation/modification times of two similar files is set to 1 second.
* This is because while uploading files in the web - browsers and users could have set reduced
* precision of file times to prevent timing attacks and fingerprinting.
* Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
*/
if (
existingFile.fileType === newFile.fileType &&
Math.abs(existingFile.creationTime - newFile.creationTime) <
A_SEC_IN_MICROSECONDS &&
Math.abs(existingFile.modificationTime - newFile.modificationTime) <
A_SEC_IN_MICROSECONDS &&
existingFile.title === newFile.title
) {
return true;
} else {
return false;
}
}
}
export function hasFileHash(file: Metadata) {
return file.hash || (file.imageHash && file.videoHash);
}
export function areFilesWithFileHashSame(
existingFile: Metadata,
newFile: Metadata
): boolean {
if (
existingFile.fileType !== newFile.fileType ||
existingFile.title !== newFile.title
) {
return false;
}
if (existingFile.fileType === FILE_TYPE.LIVE_PHOTO) {
return (
existingFile.imageHash === newFile.imageHash &&
existingFile.videoHash === newFile.videoHash
);
} else {
return existingFile.hash === newFile.hash;
}
}
export function segregateMetadataAndMediaFiles(

View file

@ -0,0 +1,53 @@
//
// Canvas Blocker &
// Firefox privacy.resistFingerprinting Detector.
// (c) 2018 // JOHN OZBAY // CRYPT.EE
// MIT License
// Credits: https://github.com/johnozbay/canvas-block-detector/blob/master/isCanvasBlocked.js
//
export function isCanvasBlocked() {
// create a 1px image data
let blocked = false;
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
// some blockers just return an undefined ctx. So let's check that first.
if (ctx) {
const imageData = ctx.createImageData(1, 1);
const originalImageData = imageData.data;
// set pixels to RGB 128
originalImageData[0] = 128;
originalImageData[1] = 128;
originalImageData[2] = 128;
originalImageData[3] = 255;
// set this to canvas
ctx.putImageData(imageData, 1, 1);
try {
// now get the data back from canvas.
const checkData = ctx.getImageData(1, 1, 1, 1).data;
// If this is firefox, and privacy.resistFingerprinting is enabled,
// OR a browser extension blocking the canvas,
// This will return RGB all white (255,255,255) instead of the (128,128,128) we put.
// so let's check the R and G to see if they're 255 or 128 (matching what we've initially set)
if (
originalImageData[0] !== checkData[0] &&
originalImageData[1] !== checkData[1]
) {
blocked = true;
}
} catch (error) {
// some extensions will return getImageData null. this is to account for that.
blocked = true;
}
} else {
blocked = true;
}
return blocked;
}