Remove more dead code

...that uses bootstrap instead of spending migration effort on it.
This commit is contained in:
Manav Rathi 2024-03-31 18:21:52 +05:30
parent e84b989484
commit f3a0240f1d
No known key found for this signature in database
3 changed files with 13 additions and 313 deletions

View file

@ -1,228 +0,0 @@
import { addLogLine } from "@ente/shared/logging";
import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl";
import { DEFAULT_ML_SYNC_CONFIG } from "constants/mlConfig";
import { useEffect, useRef, useState } from "react";
import arcfaceAlignmentService from "services/machineLearning/arcfaceAlignmentService";
import arcfaceCropService from "services/machineLearning/arcfaceCropService";
import blazeFaceDetectionService from "services/machineLearning/blazeFaceDetectionService";
import imageSceneService from "services/machineLearning/imageSceneService";
import ssdMobileNetV2Service from "services/machineLearning/ssdMobileNetV2Service";
import { AlignedFace, FaceCrop, ObjectDetection } from "types/machineLearning";
import { getMLSyncConfig } from "utils/machineLearning/config";
import {
getAlignedFaceBox,
ibExtractFaceImage,
ibExtractFaceImageUsingTransform,
} from "utils/machineLearning/faceAlign";
import { ibExtractFaceImageFromCrop } from "utils/machineLearning/faceCrop";
import { FaceCropsRow, FaceImagesRow, ImageBitmapView } from "./ImageViews";
interface MLFileDebugViewProps {
file: File;
}
function drawFaceDetection(face: AlignedFace, ctx: CanvasRenderingContext2D) {
const pointSize = Math.ceil(
Math.max(ctx.canvas.width / 512, face.detection.box.width / 32),
);
ctx.save();
ctx.strokeStyle = "rgba(255, 0, 0, 0.8)";
ctx.lineWidth = pointSize;
ctx.strokeRect(
face.detection.box.x,
face.detection.box.y,
face.detection.box.width,
face.detection.box.height,
);
ctx.restore();
ctx.save();
ctx.strokeStyle = "rgba(0, 255, 0, 0.8)";
ctx.lineWidth = Math.round(pointSize * 1.5);
const alignedBox = getAlignedFaceBox(face.alignment);
ctx.strokeRect(
alignedBox.x,
alignedBox.y,
alignedBox.width,
alignedBox.height,
);
ctx.restore();
ctx.save();
ctx.fillStyle = "rgba(0, 0, 255, 0.8)";
face.detection.landmarks.forEach((l) => {
ctx.beginPath();
ctx.arc(l.x, l.y, pointSize, 0, Math.PI * 2, true);
ctx.fill();
});
ctx.restore();
}
function drawBbox(object: ObjectDetection, ctx: CanvasRenderingContext2D) {
ctx.font = "100px Arial";
ctx.save();
ctx.restore();
ctx.rect(...object.bbox);
ctx.lineWidth = 10;
ctx.strokeStyle = "green";
ctx.fillStyle = "green";
ctx.stroke();
ctx.fillText(
object.score.toFixed(3) + " " + object.class,
object.bbox[0],
object.bbox[1] > 10 ? object.bbox[1] - 5 : 10,
);
}
export default function MLFileDebugView(props: MLFileDebugViewProps) {
// const [imageBitmap, setImageBitmap] = useState<ImageBitmap>();
const [faceCrops, setFaceCrops] = useState<FaceCrop[]>();
const [facesUsingCrops, setFacesUsingCrops] = useState<ImageBitmap[]>();
const [facesUsingImage, setFacesUsingImage] = useState<ImageBitmap[]>();
const [facesUsingTransform, setFacesUsingTransform] =
useState<ImageBitmap[]>();
const canvasRef = useRef(null);
useEffect(() => {
let didCancel = false;
const loadFile = async () => {
// TODO: go through worker for these apis, to not include ml code in main bundle
const imageBitmap = await createImageBitmap(props.file);
const faceDetections =
await blazeFaceDetectionService.detectFaces(imageBitmap);
addLogLine("detectedFaces: ", faceDetections.length);
const objectDetections = await ssdMobileNetV2Service.detectObjects(
imageBitmap,
DEFAULT_ML_SYNC_CONFIG.objectDetection.maxNumBoxes,
DEFAULT_ML_SYNC_CONFIG.objectDetection.minScore,
);
addLogLine("detectedObjects: ", JSON.stringify(objectDetections));
const sceneDetections = await imageSceneService.detectScenes(
imageBitmap,
DEFAULT_ML_SYNC_CONFIG.sceneDetection.minScore,
);
addLogLine("detectedScenes: ", JSON.stringify(sceneDetections));
const mlSyncConfig = await getMLSyncConfig();
const faceCropPromises = faceDetections.map(async (faceDetection) =>
arcfaceCropService.getFaceCrop(
imageBitmap,
faceDetection,
mlSyncConfig.faceCrop,
),
);
const faceCrops = await Promise.all(faceCropPromises);
if (didCancel) return;
setFaceCrops(faceCrops);
const faceAlignments = faceDetections.map((detection) =>
arcfaceAlignmentService.getFaceAlignment(detection),
);
addLogLine("alignedFaces: ", JSON.stringify(faceAlignments));
const canvas: HTMLCanvasElement = canvasRef.current;
canvas.width = imageBitmap.width;
canvas.height = imageBitmap.height;
const ctx = canvas.getContext("2d");
if (didCancel) return;
ctx.drawImage(imageBitmap, 0, 0);
const alignedFaces = faceAlignments.map((alignment, i) => {
return {
detection: faceDetections[i],
alignment,
} as AlignedFace;
});
alignedFaces.forEach((alignedFace) =>
drawFaceDetection(alignedFace, ctx),
);
objectDetections.forEach((object) => drawBbox(object, ctx));
const facesUsingCrops = await Promise.all(
alignedFaces.map((face, i) => {
return ibExtractFaceImageFromCrop(
faceCrops[i],
face.alignment,
112,
);
}),
);
const facesUsingImage = await Promise.all(
alignedFaces.map((face) => {
return ibExtractFaceImage(imageBitmap, face.alignment, 112);
}),
);
const facesUsingTransform = await Promise.all(
alignedFaces.map((face) => {
return ibExtractFaceImageUsingTransform(
imageBitmap,
face.alignment,
112,
);
}),
);
if (didCancel) return;
setFacesUsingCrops(facesUsingCrops);
setFacesUsingImage(facesUsingImage);
setFacesUsingTransform(facesUsingTransform);
};
props.file && loadFile();
return () => {
didCancel = true;
};
}, [props.file]);
return (
<div>
<p></p>
{/* <ImageBitmapView image={imageBitmap}></ImageBitmapView> */}
<canvas
ref={canvasRef}
width={0}
height={0}
style={{ maxWidth: "100%" }}
/>
<p></p>
<div>Face Crops:</div>
<FaceCropsRow>
{faceCrops?.map((faceCrop, i) => (
<ImageBitmapView
key={i}
image={faceCrop.image}
></ImageBitmapView>
))}
</FaceCropsRow>
<p></p>
<div>Face Images using face crops:</div>
<FaceImagesRow>
{facesUsingCrops?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
<div>Face Images using original image:</div>
<FaceImagesRow>
{facesUsingImage?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
<div>Face Images using transfrom:</div>
<FaceImagesRow>
{facesUsingTransform?.map((image, i) => (
<ImageBitmapView key={i} image={image}></ImageBitmapView>
))}
</FaceImagesRow>
</div>
);
}

View file

@ -1,60 +0,0 @@
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import { useState } from "react";
import { Button, Spinner } from "react-bootstrap";
import { EnteFile } from "types/file";
import mlService from "../../services/machineLearning/machineLearningService";
function MLServiceFileInfoButton({
file,
updateMLDataIndex,
setUpdateMLDataIndex,
}: {
file: EnteFile;
updateMLDataIndex: number;
setUpdateMLDataIndex: (num: number) => void;
}) {
const [mlServiceRunning, setMlServiceRunning] = useState(false);
const runMLService = async () => {
setMlServiceRunning(true);
const token = getToken();
const userID = getUserID();
// index 4 is for timeout of 240 seconds
await mlService.syncLocalFile(token, userID, file as EnteFile, null, 4);
setUpdateMLDataIndex(updateMLDataIndex + 1);
setMlServiceRunning(false);
};
return (
<div
style={{
marginTop: "18px",
}}
>
<Button
onClick={runMLService}
disabled={mlServiceRunning}
variant={mlServiceRunning ? "secondary" : "primary"}
>
{!mlServiceRunning ? (
"Run ML Service"
) : (
<>
ML Service Running{" "}
<Spinner
animation="border"
size="sm"
style={{
marginLeft: "5px",
}}
/>
</>
)}
</Button>
</div>
);
}
export default MLServiceFileInfoButton;

View file

@ -10,11 +10,24 @@ import TextSnippetOutlined from "@mui/icons-material/TextSnippetOutlined";
import { Box, DialogProps, Link, Stack, styled } from "@mui/material";
import { Chip } from "components/Chip";
import { EnteDrawer } from "components/EnteDrawer";
import { ObjectLabelList } from "components/MachineLearning/ObjectList";
import {
PhotoPeopleList,
UnidentifiedFaces,
} from "components/MachineLearning/PeopleList";
import Titlebar from "components/Titlebar";
import LinkButton from "components/pages/gallery/LinkButton";
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { GalleryContext } from "pages/gallery";
import { useContext, useEffect, useMemo, useState } from "react";
import { getEXIFLocation } from "services/upload/exifService";
import { EnteFile } from "types/file";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
import {
getMapDisableConfirmationDialog,
getMapEnableConfirmationDialog,
} from "utils/ui";
import { ExifData } from "./ExifData";
import InfoItem from "./InfoItem";
import MapBox from "./MapBox";
@ -22,23 +35,6 @@ import { RenderCaption } from "./RenderCaption";
import { RenderCreationTime } from "./RenderCreationTime";
import { RenderFileName } from "./RenderFileName";
import {
PhotoPeopleList,
UnidentifiedFaces,
} from "components/MachineLearning/PeopleList";
import { ObjectLabelList } from "components/MachineLearning/ObjectList";
// import MLServiceFileInfoButton from 'components/MachineLearning/MLServiceFileInfoButton';
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { GalleryContext } from "pages/gallery";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
import {
getMapDisableConfirmationDialog,
getMapEnableConfirmationDialog,
} from "utils/ui";
export const FileInfoSidebar = styled((props: DialogProps) => (
<EnteDrawer {...props} anchor="right" />
))({
@ -352,14 +348,6 @@ export function FileInfo({
file={file}
updateMLDataIndex={updateMLDataIndex}
/>
{/* <Box pt={1}>
<MLServiceFileInfoButton
file={file}
updateMLDataIndex={updateMLDataIndex}
setUpdateMLDataIndex={setUpdateMLDataIndex}
/>
</Box> */}
</>
)}
</Stack>