Merge pull request #420 from ente-io/master

release upload logs
This commit is contained in:
Abhinav Kumar 2022-03-04 13:28:58 +05:30 committed by GitHub
commit 1158ad21f3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 191 additions and 57 deletions

View file

@ -1,42 +0,0 @@
import React, { useEffect, useState } from 'react';
import { getData, LS_KEYS } from 'utils/storage/localStorage';
import constants from 'utils/strings/constants';
import MessageDialog from './MessageDialog';
import { FileList } from 'components/pages/gallery/UploadProgress';
import LinkButton from './pages/gallery/LinkButton';
export default function FailedUploads() {
const [listView, setListView] = useState(false);
const [failedFiles, setFailedFiles] = useState([]);
const hideList = () => setListView(false);
const showList = () => setListView(true);
useEffect(() => {
const failedFiles = getData(LS_KEYS.FAILED_UPLOADS)?.files ?? [];
setFailedFiles(failedFiles);
}, [listView]);
return (
failedFiles.length > 0 && (
<>
<LinkButton style={{ marginTop: '30px' }} onClick={showList}>
{constants.FAILED_UPLOADS}
</LinkButton>
<MessageDialog
show={listView}
onHide={hideList}
attributes={{
title: constants.FAILED_UPLOADS,
staticBackdrop: true,
close: { text: constants.CLOSE },
}}>
<FileList>
{failedFiles.map((file) => (
<li key={file}> {file}</li>
))}
</FileList>
</MessageDialog>
</>
)
);
}

View file

@ -35,7 +35,9 @@ import { PAGES } from 'constants/pages';
import { ARCHIVE_SECTION, TRASH_SECTION } from 'constants/collection'; import { ARCHIVE_SECTION, TRASH_SECTION } from 'constants/collection';
import FixLargeThumbnails from './FixLargeThumbnail'; import FixLargeThumbnails from './FixLargeThumbnail';
import { SetLoading } from 'types/gallery'; import { SetLoading } from 'types/gallery';
import FailedUploads from './FailedUploads'; import { downloadAsFile } from 'utils/file';
import { getUploadLogs } from 'utils/upload';
import styled from 'styled-components';
interface Props { interface Props {
collections: Collection[]; collections: Collection[];
setDialogMessage: SetDialogMessage; setDialogMessage: SetDialogMessage;
@ -55,6 +57,7 @@ export default function Sidebar(props: Props) {
const [exportModalView, setExportModalView] = useState(false); const [exportModalView, setExportModalView] = useState(false);
const [fixLargeThumbsView, setFixLargeThumbsView] = useState(false); const [fixLargeThumbsView, setFixLargeThumbsView] = useState(false);
const galleryContext = useContext(GalleryContext); const galleryContext = useContext(GalleryContext);
useEffect(() => { useEffect(() => {
const main = async () => { const main = async () => {
if (!isOpen) { if (!isOpen) {
@ -109,22 +112,24 @@ export default function Sidebar(props: Props) {
} }
} }
const downloadUploadLogs = () => {
const logs = getUploadLogs();
const logString = logs.join('\n');
downloadAsFile(`upload_logs_${Date.now()}.txt`, logString);
};
const router = useRouter(); const router = useRouter();
function onManageClick() { function onManageClick() {
setIsOpen(false); setIsOpen(false);
galleryContext.showPlanSelectorModal(); galleryContext.showPlanSelectorModal();
} }
const Divider = () => ( const Divider = styled.div`
<div height: 1px;
style={{ margin-top: 40px;
height: '1px', background: #242424;
marginTop: '40px', width: 100%;
background: '#242424', `;
width: '100%',
}}
/>
);
return ( return (
<Menu <Menu
isOpen={isOpen} isOpen={isOpen}
@ -293,7 +298,6 @@ export default function Sidebar(props: Props) {
{constants.FIX_LARGE_THUMBNAILS} {constants.FIX_LARGE_THUMBNAILS}
</LinkButton> </LinkButton>
</> </>
<FailedUploads />
<LinkButton <LinkButton
style={{ marginTop: '30px' }} style={{ marginTop: '30px' }}
onClick={openFeedbackURL}> onClick={openFeedbackURL}>
@ -361,12 +365,24 @@ export default function Sidebar(props: Props) {
}> }>
{constants.DELETE_ACCOUNT} {constants.DELETE_ACCOUNT}
</LinkButton> </LinkButton>
<Divider style={{ marginTop: '36px' }} />
<div <div
style={{ style={{
marginTop: '40px', marginTop: '40px',
width: '100%', width: '100%',
}} }}
/> />
<div
style={{
marginTop: '30px',
fontSize: '14px',
textAlign: 'center',
color: 'grey',
cursor: 'pointer',
}}
onClick={downloadUploadLogs}>
{constants.DOWNLOAD_UPLOAD_LOGS}
</div>
</div> </div>
</Menu> </Menu>
); );

View file

@ -9,6 +9,7 @@ import {
ParsedMetadataJSONMap, ParsedMetadataJSONMap,
} from 'types/upload'; } from 'types/upload';
import { logError } from 'utils/sentry'; import { logError } from 'utils/sentry';
import { getFileNameSize, logUploadInfo } from 'utils/upload';
import { encryptFiledata } from './encryptionService'; import { encryptFiledata } from './encryptionService';
import { extractMetadata, getMetadataJSONMapKey } from './metadataService'; import { extractMetadata, getMetadataJSONMapKey } from './metadataService';
import { getFileData, getFileOriginalName } from './readFileService'; import { getFileData, getFileOriginalName } from './readFileService';
@ -32,9 +33,12 @@ export async function readFile(
rawFile, rawFile,
fileTypeInfo fileTypeInfo
); );
logUploadInfo(`reading file datal${getFileNameSize(rawFile)} `);
const filedata = await getFileData(reader, rawFile); const filedata = await getFileData(reader, rawFile);
logUploadInfo(`read file data successfully ${getFileNameSize(rawFile)} `);
return { return {
filedata, filedata,
thumbnail, thumbnail,

View file

@ -11,6 +11,8 @@ import {
import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from 'constants/upload'; import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from 'constants/upload';
import { splitFilenameAndExtension } from 'utils/file'; import { splitFilenameAndExtension } from 'utils/file';
import { getVideoMetadata } from './videoMetadataService'; import { getVideoMetadata } from './videoMetadataService';
import { getFileNameSize } from 'utils/upload';
import { logUploadInfo } from 'utils/upload';
interface ParsedMetadataJSONWithTitle { interface ParsedMetadataJSONWithTitle {
title: string; title: string;
@ -31,7 +33,15 @@ export async function extractMetadata(
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) { if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
extractedMetadata = await getExifData(receivedFile, fileTypeInfo); extractedMetadata = await getExifData(receivedFile, fileTypeInfo);
} else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) { } else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
logUploadInfo(
`getVideoMetadata called for ${getFileNameSize(receivedFile)}`
);
extractedMetadata = await getVideoMetadata(receivedFile); extractedMetadata = await getVideoMetadata(receivedFile);
logUploadInfo(
`videoMetadata successfully extracted ${getFileNameSize(
receivedFile
)}`
);
} }
const metadata: Metadata = { const metadata: Metadata = {

View file

@ -8,6 +8,7 @@ import { isFileHEIC } from 'utils/file';
import { FileTypeInfo } from 'types/upload'; import { FileTypeInfo } from 'types/upload';
import { getUint8ArrayView } from './readFileService'; import { getUint8ArrayView } from './readFileService';
import HEICConverter from 'services/HEICConverter'; import HEICConverter from 'services/HEICConverter';
import { getFileNameSize, logUploadInfo } from 'utils/upload';
const MAX_THUMBNAIL_DIMENSION = 720; const MAX_THUMBNAIL_DIMENSION = 720;
const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10; const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
@ -28,6 +29,7 @@ export async function generateThumbnail(
fileTypeInfo: FileTypeInfo fileTypeInfo: FileTypeInfo
): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> { ): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
try { try {
logUploadInfo(`generating thumbnail for ${getFileNameSize(file)}`);
let hasStaticThumbnail = false; let hasStaticThumbnail = false;
let canvas = document.createElement('canvas'); let canvas = document.createElement('canvas');
let thumbnail: Uint8Array; let thumbnail: Uint8Array;
@ -37,13 +39,29 @@ export async function generateThumbnail(
canvas = await generateImageThumbnail(file, isHEIC); canvas = await generateImageThumbnail(file, isHEIC);
} else { } else {
try { try {
logUploadInfo(
`ffmpeg generateThumbnail called for ${getFileNameSize(
file
)}`
);
const thumb = await FFmpegService.generateThumbnail(file); const thumb = await FFmpegService.generateThumbnail(file);
logUploadInfo(
`ffmpeg thumbnail successfully generated ${getFileNameSize(
file
)}`
);
const dummyImageFile = new File([thumb], file.name); const dummyImageFile = new File([thumb], file.name);
canvas = await generateImageThumbnail( canvas = await generateImageThumbnail(
dummyImageFile, dummyImageFile,
false false
); );
} catch (e) { } catch (e) {
logUploadInfo(
`ffmpeg thumbnail generated failed ${getFileNameSize(
file
)} error: ${e.message}`
);
logError(e, 'failed to generate thumbnail using ffmpeg', { logError(e, 'failed to generate thumbnail using ffmpeg', {
fileFormat: fileTypeInfo.exactType, fileFormat: fileTypeInfo.exactType,
}); });
@ -55,10 +73,18 @@ export async function generateThumbnail(
if (thumbnail.length === 0) { if (thumbnail.length === 0) {
throw Error('EMPTY THUMBNAIL'); throw Error('EMPTY THUMBNAIL');
} }
logUploadInfo(
`thumbnail successfully generated ${getFileNameSize(file)}`
);
} catch (e) { } catch (e) {
logError(e, 'uploading static thumbnail', { logError(e, 'uploading static thumbnail', {
fileFormat: fileTypeInfo.exactType, fileFormat: fileTypeInfo.exactType,
}); });
logUploadInfo(
`thumbnail generation failed ${getFileNameSize(file)} error: ${
e.message
}`
);
thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) =>
c.charCodeAt(0) c.charCodeAt(0)
); );
@ -79,7 +105,9 @@ export async function generateImageThumbnail(file: File, isHEIC: boolean) {
let timeout = null; let timeout = null;
if (isHEIC) { if (isHEIC) {
file = new File([await HEICConverter.convert(file)], null, null); logUploadInfo(`HEICConverter called for ${getFileNameSize(file)}`);
file = new File([await HEICConverter.convert(file)], file.name);
logUploadInfo(`${getFileNameSize(file)} successfully converted`);
} }
let image = new Image(); let image = new Image();
imageURL = URL.createObjectURL(file); imageURL = URL.createObjectURL(file);

View file

@ -9,7 +9,7 @@ import {
} from 'utils/file'; } from 'utils/file';
import { logError } from 'utils/sentry'; import { logError } from 'utils/sentry';
import { getMetadataJSONMapKey, parseMetadataJSON } from './metadataService'; import { getMetadataJSONMapKey, parseMetadataJSON } from './metadataService';
import { segregateMetadataAndMediaFiles } from 'utils/upload'; import { getFileNameSize, segregateMetadataAndMediaFiles } from 'utils/upload';
import uploader from './uploader'; import uploader from './uploader';
import UIService from './uiService'; import UIService from './uiService';
import UploadService from './uploadService'; import UploadService from './uploadService';
@ -35,6 +35,7 @@ import uiService from './uiService';
import { getData, LS_KEYS, setData } from 'utils/storage/localStorage'; import { getData, LS_KEYS, setData } from 'utils/storage/localStorage';
import { dedupe } from 'utils/export'; import { dedupe } from 'utils/export';
import { convertToHumanReadable } from 'utils/billing'; import { convertToHumanReadable } from 'utils/billing';
import { logUploadInfo } from 'utils/upload';
const MAX_CONCURRENT_UPLOADS = 4; const MAX_CONCURRENT_UPLOADS = 4;
const FILE_UPLOAD_COMPLETED = 100; const FILE_UPLOAD_COMPLETED = 100;
@ -81,8 +82,15 @@ class UploadManager {
) { ) {
try { try {
await this.init(newCreatedCollections); await this.init(newCreatedCollections);
logUploadInfo(
`received ${fileWithCollectionToBeUploaded.length} files to upload`
);
const { metadataJSONFiles, mediaFiles } = const { metadataJSONFiles, mediaFiles } =
segregateMetadataAndMediaFiles(fileWithCollectionToBeUploaded); segregateMetadataAndMediaFiles(fileWithCollectionToBeUploaded);
logUploadInfo(
`has ${metadataJSONFiles.length} metadata json files`
);
logUploadInfo(`has ${mediaFiles.length} media files`);
if (metadataJSONFiles.length) { if (metadataJSONFiles.length) {
UIService.setUploadStage( UIService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES
@ -99,6 +107,7 @@ class UploadManager {
this.metadataAndFileTypeInfoMap this.metadataAndFileTypeInfoMap
); );
UIService.setUploadStage(UPLOAD_STAGES.START); UIService.setUploadStage(UPLOAD_STAGES.START);
logUploadInfo(`clusterLivePhotoFiles called`);
const analysedMediaFiles = const analysedMediaFiles =
UploadService.clusterLivePhotoFiles(mediaFiles); UploadService.clusterLivePhotoFiles(mediaFiles);
uiService.setFilenames( uiService.setFilenames(
@ -109,9 +118,16 @@ class UploadManager {
]) ])
) )
); );
UIService.setHasLivePhoto( UIService.setHasLivePhoto(
mediaFiles.length !== analysedMediaFiles.length mediaFiles.length !== analysedMediaFiles.length
); );
logUploadInfo(
`got live photos: ${
mediaFiles.length !== analysedMediaFiles.length
}`
);
await this.uploadMediaFiles(analysedMediaFiles); await this.uploadMediaFiles(analysedMediaFiles);
} }
UIService.setUploadStage(UPLOAD_STAGES.FINISH); UIService.setUploadStage(UPLOAD_STAGES.FINISH);
@ -128,10 +144,16 @@ class UploadManager {
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) { private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
try { try {
logUploadInfo(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length); UIService.reset(metadataFiles.length);
const reader = new FileReader(); const reader = new FileReader();
for (const { file, collectionID } of metadataFiles) { for (const { file, collectionID } of metadataFiles) {
try { try {
logUploadInfo(
`parsing metadata json file ${getFileNameSize(file)}`
);
const parsedMetadataJSONWithTitle = await parseMetadataJSON( const parsedMetadataJSONWithTitle = await parseMetadataJSON(
reader, reader,
file file
@ -145,8 +167,18 @@ class UploadManager {
); );
UIService.increaseFileUploaded(); UIService.increaseFileUploaded();
} }
logUploadInfo(
`successfully parsed metadata json file ${getFileNameSize(
file
)}`
);
} catch (e) { } catch (e) {
logError(e, 'parsing failed for a file'); logError(e, 'parsing failed for a file');
logUploadInfo(
`successfully parsed metadata json file ${getFileNameSize(
file
)} error: ${e.message}`
);
} }
} }
} catch (e) { } catch (e) {
@ -157,12 +189,19 @@ class UploadManager {
private async extractMetadataFromFiles(mediaFiles: FileWithCollection[]) { private async extractMetadataFromFiles(mediaFiles: FileWithCollection[]) {
try { try {
logUploadInfo(`extractMetadataFromFiles executed`);
UIService.reset(mediaFiles.length); UIService.reset(mediaFiles.length);
const reader = new FileReader(); const reader = new FileReader();
for (const { file, localID, collectionID } of mediaFiles) { for (const { file, localID, collectionID } of mediaFiles) {
try { try {
const { fileTypeInfo, metadata } = await (async () => { const { fileTypeInfo, metadata } = await (async () => {
if (file.size >= MAX_FILE_SIZE_SUPPORTED) { if (file.size >= MAX_FILE_SIZE_SUPPORTED) {
logUploadInfo(
`${getFileNameSize(
file
)} rejected because of large size`
);
return { fileTypeInfo: null, metadata: null }; return { fileTypeInfo: null, metadata: null };
} }
const fileTypeInfo = await UploadService.getFileType( const fileTypeInfo = await UploadService.getFileType(
@ -170,8 +209,16 @@ class UploadManager {
file file
); );
if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) { if (fileTypeInfo.fileType === FILE_TYPE.OTHERS) {
logUploadInfo(
`${getFileNameSize(
file
)} rejected because of unknown file format`
);
return { fileTypeInfo, metadata: null }; return { fileTypeInfo, metadata: null };
} }
logUploadInfo(
` extracting ${getFileNameSize(file)} metadata`
);
const metadata = const metadata =
(await UploadService.extractFileMetadata( (await UploadService.extractFileMetadata(
file, file,
@ -181,6 +228,11 @@ class UploadManager {
return { fileTypeInfo, metadata }; return { fileTypeInfo, metadata };
})(); })();
logUploadInfo(
`metadata extraction successful${getFileNameSize(
file
)} `
);
this.metadataAndFileTypeInfoMap.set(localID, { this.metadataAndFileTypeInfoMap.set(localID, {
fileTypeInfo: fileTypeInfo && { ...fileTypeInfo }, fileTypeInfo: fileTypeInfo && { ...fileTypeInfo },
metadata: metadata && { ...metadata }, metadata: metadata && { ...metadata },
@ -188,6 +240,11 @@ class UploadManager {
UIService.increaseFileUploaded(); UIService.increaseFileUploaded();
} catch (e) { } catch (e) {
logError(e, 'metadata extraction failed for a file'); logError(e, 'metadata extraction failed for a file');
logUploadInfo(
`metadata extraction failed ${getFileNameSize(
file
)} error: ${e.message}`
);
} }
} }
} catch (e) { } catch (e) {
@ -197,6 +254,7 @@ class UploadManager {
} }
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) { private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
logUploadInfo(`uploadMediaFiles called`);
this.filesToBeUploaded.push(...mediaFiles); this.filesToBeUploaded.push(...mediaFiles);
UIService.reset(mediaFiles.length); UIService.reset(mediaFiles.length);
@ -232,13 +290,13 @@ class UploadManager {
const existingFilesInCollection = const existingFilesInCollection =
this.existingFilesCollectionWise.get(collectionID) ?? []; this.existingFilesCollectionWise.get(collectionID) ?? [];
const collection = this.collections.get(collectionID); const collection = this.collections.get(collectionID);
const { fileUploadResult, file } = await uploader( const { fileUploadResult, file } = await uploader(
worker, worker,
reader, reader,
existingFilesInCollection, existingFilesInCollection,
{ ...fileWithCollection, collection } { ...fileWithCollection, collection }
); );
if (fileUploadResult === FileUploadResults.UPLOADED) { if (fileUploadResult === FileUploadResults.UPLOADED) {
this.existingFiles.push(file); this.existingFiles.push(file);
this.existingFiles = sortFiles(this.existingFiles); this.existingFiles = sortFiles(this.existingFiles);

View file

@ -9,6 +9,8 @@ import UploadService from './uploadService';
import { FILE_TYPE } from 'constants/file'; import { FILE_TYPE } from 'constants/file';
import { FileUploadResults, MAX_FILE_SIZE_SUPPORTED } from 'constants/upload'; import { FileUploadResults, MAX_FILE_SIZE_SUPPORTED } from 'constants/upload';
import { FileWithCollection, BackupedFile, UploadFile } from 'types/upload'; import { FileWithCollection, BackupedFile, UploadFile } from 'types/upload';
import { logUploadInfo } from 'utils/upload';
import { convertToHumanReadable } from 'utils/billing';
interface UploadResponse { interface UploadResponse {
fileUploadResult: FileUploadResults; fileUploadResult: FileUploadResults;
@ -21,7 +23,11 @@ export default async function uploader(
fileWithCollection: FileWithCollection fileWithCollection: FileWithCollection
): Promise<UploadResponse> { ): Promise<UploadResponse> {
const { collection, localID, ...uploadAsset } = fileWithCollection; const { collection, localID, ...uploadAsset } = fileWithCollection;
const fileNameSize = `${UploadService.getAssetName(
fileWithCollection
)}_${convertToHumanReadable(UploadService.getAssetSize(uploadAsset))}`;
logUploadInfo(`uploader called for ${fileNameSize}`);
UIService.setFileProgress(localID, 0); UIService.setFileProgress(localID, 0);
const { fileTypeInfo, metadata } = const { fileTypeInfo, metadata } =
UploadService.getFileMetadataAndFileTypeInfo(localID); UploadService.getFileMetadataAndFileTypeInfo(localID);
@ -38,8 +44,10 @@ export default async function uploader(
} }
if (fileAlreadyInCollection(existingFilesInCollection, metadata)) { if (fileAlreadyInCollection(existingFilesInCollection, metadata)) {
logUploadInfo(`skipped upload for ${fileNameSize}`);
return { fileUploadResult: FileUploadResults.ALREADY_UPLOADED }; return { fileUploadResult: FileUploadResults.ALREADY_UPLOADED };
} }
logUploadInfo(`reading asset ${fileNameSize}`);
const file = await UploadService.readAsset( const file = await UploadService.readAsset(
reader, reader,
@ -57,12 +65,16 @@ export default async function uploader(
metadata, metadata,
}; };
logUploadInfo(`encryptAsset ${fileNameSize}`);
const encryptedFile = await UploadService.encryptAsset( const encryptedFile = await UploadService.encryptAsset(
worker, worker,
fileWithMetadata, fileWithMetadata,
collection.key collection.key
); );
logUploadInfo(`uploadToBucket ${fileNameSize}`);
const backupedFile: BackupedFile = await UploadService.uploadToBucket( const backupedFile: BackupedFile = await UploadService.uploadToBucket(
encryptedFile.file encryptedFile.file
); );
@ -72,16 +84,23 @@ export default async function uploader(
backupedFile, backupedFile,
encryptedFile.fileKey encryptedFile.fileKey
); );
logUploadInfo(`uploadFile ${fileNameSize}`);
const uploadedFile = await UploadHttpClient.uploadFile(uploadFile); const uploadedFile = await UploadHttpClient.uploadFile(uploadFile);
const decryptedFile = await decryptFile(uploadedFile, collection.key); const decryptedFile = await decryptFile(uploadedFile, collection.key);
UIService.increaseFileUploaded(); UIService.increaseFileUploaded();
logUploadInfo(`${fileNameSize} successfully uploaded`);
return { return {
fileUploadResult: FileUploadResults.UPLOADED, fileUploadResult: FileUploadResults.UPLOADED,
file: decryptedFile, file: decryptedFile,
}; };
} catch (e) { } catch (e) {
logUploadInfo(
`upload failed for ${fileNameSize} ,error: ${e.message}`
);
logError(e, 'file upload failed', { logError(e, 'file upload failed', {
fileFormat: fileTypeInfo.exactType, fileFormat: fileTypeInfo.exactType,
}); });

View file

@ -1,5 +1,13 @@
import { getData, LS_KEYS, setData } from './localStorage'; import { getData, LS_KEYS, setData } from './localStorage';
export interface Log {
type: string;
timestamp: number;
logLine: string;
}
const MAX_LOG_LINES = 1000;
export const isFirstLogin = () => export const isFirstLogin = () =>
getData(LS_KEYS.IS_FIRST_LOGIN)?.status ?? false; getData(LS_KEYS.IS_FIRST_LOGIN)?.status ?? false;
@ -21,3 +29,13 @@ export function getLivePhotoInfoShownCount() {
export function setLivePhotoInfoShownCount(count) { export function setLivePhotoInfoShownCount(count) {
setData(LS_KEYS.LIVE_PHOTO_INFO_SHOWN_COUNT, { count }); setData(LS_KEYS.LIVE_PHOTO_INFO_SHOWN_COUNT, { count });
} }
export function saveLogLine(log: Log) {
setData(LS_KEYS.LOGS, {
logs: [...getLogs(), log].slice(-1 * MAX_LOG_LINES),
});
}
export function getLogs(): Log[] {
return getData(LS_KEYS.LOGS)?.logs ?? [];
}

View file

@ -15,6 +15,7 @@ export enum LS_KEYS {
THUMBNAIL_FIX_STATE = 'thumbnailFixState', THUMBNAIL_FIX_STATE = 'thumbnailFixState',
LIVE_PHOTO_INFO_SHOWN_COUNT = 'livePhotoInfoShownCount', LIVE_PHOTO_INFO_SHOWN_COUNT = 'livePhotoInfoShownCount',
FAILED_UPLOADS = 'failedUploads', FAILED_UPLOADS = 'failedUploads',
LOGS = 'logs',
} }
export const setData = (key: LS_KEYS, value: object) => { export const setData = (key: LS_KEYS, value: object) => {

View file

@ -674,6 +674,7 @@ const englishConstants = {
PLAYBACK_SUPPORT_COMING: 'playback support coming soon...', PLAYBACK_SUPPORT_COMING: 'playback support coming soon...',
LIVE_PHOTO: 'this is a live photo', LIVE_PHOTO: 'this is a live photo',
LIVE: 'LIVE', LIVE: 'LIVE',
DOWNLOAD_UPLOAD_LOGS: 'debug logs',
}; };
export default englishConstants; export default englishConstants;

View file

@ -1,5 +1,8 @@
import { FileWithCollection, Metadata } from 'types/upload'; import { FileWithCollection, Metadata } from 'types/upload';
import { EnteFile } from 'types/file'; import { EnteFile } from 'types/file';
import { convertToHumanReadable } from 'utils/billing';
import { formatDateTime } from 'utils/file';
import { getLogs, saveLogLine } from 'utils/storage';
const TYPE_JSON = 'json'; const TYPE_JSON = 'json';
@ -49,3 +52,21 @@ export function segregateMetadataAndMediaFiles(
}); });
return { mediaFiles, metadataJSONFiles }; return { mediaFiles, metadataJSONFiles };
} }
export function logUploadInfo(log: string) {
saveLogLine({
type: 'upload',
timestamp: Date.now(),
logLine: log,
});
}
export function getUploadLogs() {
return getLogs()
.filter((log) => log.type === 'upload')
.map((log) => `[${formatDateTime(log.timestamp)}] ${log.logLine}`);
}
export function getFileNameSize(file: File) {
return `${file.name}_${convertToHumanReadable(file.size)}`;
}