Extract common types in upload service to upload types
Required for not including code required for upload service in other parts just requiring common types e.g. ffmpegService is included in machine learning web worker while including downloadManager service
This commit is contained in:
parent
3b157731a8
commit
d5199acc43
|
@ -2,11 +2,7 @@ import { getEndpoint } from 'utils/common/apiUtil';
|
|||
import localForage from 'utils/storage/localForage';
|
||||
|
||||
import { getToken } from 'utils/common/key';
|
||||
import {
|
||||
DataStream,
|
||||
EncryptionResult,
|
||||
MetadataObject,
|
||||
} from './upload/uploadService';
|
||||
import { DataStream, EncryptionResult, MetadataObject } from 'types/upload';
|
||||
import { Collection } from './collectionService';
|
||||
import HTTPService from './HTTPService';
|
||||
import { logError } from 'utils/sentry';
|
||||
|
|
|
@ -7,10 +7,11 @@ import { getEndpoint } from 'utils/common/apiUtil';
|
|||
import HTTPService from 'services/HTTPService';
|
||||
import CryptoWorker from 'utils/crypto';
|
||||
import uploadHttpClient from 'services/upload/uploadHttpClient';
|
||||
import { EncryptionResult, UploadURL } from 'services/upload/uploadService';
|
||||
import { UploadURL } from 'services/upload/uploadService';
|
||||
import { SetProgressTracker } from 'components/FixLargeThumbnail';
|
||||
import { getFileType } from './upload/readFileService';
|
||||
import { getLocalTrash, getTrashedFiles } from './trashService';
|
||||
import { EncryptionResult } from 'types/upload';
|
||||
|
||||
const ENDPOINT = getEndpoint();
|
||||
const REPLACE_THUMBNAIL_THRESHOLD = 500 * 1024; // 500KB
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { DataStream, EncryptionResult, isDataStream } from './uploadService';
|
||||
import { DataStream, EncryptionResult } from 'types/upload';
|
||||
import { isDataStream } from './uploadService';
|
||||
|
||||
async function encryptFileStream(worker, fileData: DataStream) {
|
||||
const { stream, chunkCount } = fileData;
|
||||
|
|
|
@ -2,7 +2,7 @@ import { FILE_TYPE } from 'services/fileService';
|
|||
import { logError } from 'utils/sentry';
|
||||
import { getExifData } from './exifService';
|
||||
import { FileTypeInfo } from './readFileService';
|
||||
import { MetadataObject } from './uploadService';
|
||||
import { MetadataObject } from 'types/upload';
|
||||
|
||||
export interface Location {
|
||||
latitude: number;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import {
|
||||
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
||||
DataStream,
|
||||
} from './uploadService';
|
||||
import { FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART } from './uploadService';
|
||||
import UploadHttpClient from './uploadHttpClient';
|
||||
import * as convert from 'xml-js';
|
||||
import UIService, { RANDOM_PERCENTAGE_PROGRESS_FOR_PUT } from './uiService';
|
||||
import { CustomError } from 'utils/common/errorUtil';
|
||||
import { DataStream } from 'types/upload';
|
||||
|
||||
interface PartEtag {
|
||||
PartNumber: number;
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
FORMAT_MISSED_BY_FILE_TYPE_LIB,
|
||||
} from 'services/fileService';
|
||||
import { logError } from 'utils/sentry';
|
||||
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from './uploadService';
|
||||
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from 'types/upload';
|
||||
import FileType from 'file-type/browser';
|
||||
import { CustomError } from 'utils/common/errorUtil';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { fileAttribute, FILE_TYPE } from '../fileService';
|
||||
import { fileAttribute } from '../fileService';
|
||||
import { Collection } from '../collectionService';
|
||||
import { logError } from 'utils/sentry';
|
||||
import UploadHttpClient from './uploadHttpClient';
|
||||
|
@ -14,16 +14,17 @@ import {
|
|||
FileTypeInfo,
|
||||
} from './readFileService';
|
||||
import { encryptFiledata } from './encryptionService';
|
||||
import { ENCRYPTION_CHUNK_SIZE } from 'types';
|
||||
import { uploadStreamUsingMultipart } from './multiPartUploadService';
|
||||
import UIService from './uiService';
|
||||
import { handleUploadError } from 'utils/common/errorUtil';
|
||||
import { MetadataMap } from './uploadManager';
|
||||
|
||||
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
|
||||
export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
|
||||
|
||||
export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
|
||||
import {
|
||||
DataStream,
|
||||
EncryptionResult,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
MetadataObject,
|
||||
MULTIPART_PART_SIZE,
|
||||
} from 'types/upload';
|
||||
|
||||
export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(
|
||||
MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE
|
||||
|
@ -34,34 +35,15 @@ export interface UploadURL {
|
|||
objectKey: string;
|
||||
}
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export function isDataStream(object: any): object is DataStream {
|
||||
return 'stream' in object;
|
||||
}
|
||||
export interface EncryptionResult {
|
||||
file: fileAttribute;
|
||||
key: string;
|
||||
}
|
||||
export interface B64EncryptionResult {
|
||||
encryptedData: string;
|
||||
key: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
export interface MetadataObject {
|
||||
title: string;
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
fileType: FILE_TYPE;
|
||||
hasStaticThumbnail?: boolean;
|
||||
}
|
||||
|
||||
export interface FileInMemory {
|
||||
filedata: Uint8Array | DataStream;
|
||||
thumbnail: Uint8Array;
|
||||
|
|
|
@ -12,11 +12,11 @@ import UploadService, {
|
|||
EncryptedFile,
|
||||
FileInMemory,
|
||||
FileWithMetadata,
|
||||
MetadataObject,
|
||||
UploadFile,
|
||||
} from './uploadService';
|
||||
import uploadService from './uploadService';
|
||||
import { FileTypeInfo, getFileType } from './readFileService';
|
||||
import { MetadataObject } from 'types/upload';
|
||||
|
||||
const TwoSecondInMillSeconds = 2000;
|
||||
const FIVE_GB_IN_BYTES = 5 * 1024 * 1024 * 1024;
|
||||
|
|
27
src/types/upload/index.ts
Normal file
27
src/types/upload/index.ts
Normal file
|
@ -0,0 +1,27 @@
|
|||
import { fileAttribute, FILE_TYPE } from 'services/fileService';
|
||||
import { ENCRYPTION_CHUNK_SIZE } from 'types';
|
||||
|
||||
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
|
||||
export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
|
||||
|
||||
export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export interface EncryptionResult {
|
||||
file: fileAttribute;
|
||||
key: string;
|
||||
}
|
||||
|
||||
export interface MetadataObject {
|
||||
title: string;
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
fileType: FILE_TYPE;
|
||||
hasStaticThumbnail?: boolean;
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import { ExportRecord } from 'services/exportService';
|
||||
import { File } from 'services/fileService';
|
||||
import { MetadataObject } from 'services/upload/uploadService';
|
||||
import { MetadataObject } from 'types/upload';
|
||||
import { formatDate } from 'utils/file';
|
||||
|
||||
export const getExportRecordFileUID = (file: File) =>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { FileWithCollection } from 'services/upload/uploadManager';
|
||||
import { MetadataObject } from 'services/upload/uploadService';
|
||||
import { MetadataObject } from 'types/upload';
|
||||
import { File } from 'services/fileService';
|
||||
const TYPE_JSON = 'json';
|
||||
|
||||
|
|
Loading…
Reference in a new issue