diff options
| author | IEatChili <nanunguyen99@gmail.com> | 2024-08-15 14:13:02 -0400 |
|---|---|---|
| committer | IEatChili <nanunguyen99@gmail.com> | 2024-08-15 14:13:02 -0400 |
| commit | 0e975569e5686138e52bdc554b3f0391f42aeead (patch) | |
| tree | bab5aff6665cdd07a37948d943d687c6d5158b2d /src/client/views/search | |
| parent | 9e03f9333641c818ed9c711282f27f7213cbe3c1 (diff) | |
feat: added face recogntion box
Diffstat (limited to 'src/client/views/search')
| -rw-r--r-- | src/client/views/search/FaceRecognitionHandler.tsx | 74 |
1 files changed, 58 insertions, 16 deletions
diff --git a/src/client/views/search/FaceRecognitionHandler.tsx b/src/client/views/search/FaceRecognitionHandler.tsx index fcd38c42f..ef4622ea2 100644 --- a/src/client/views/search/FaceRecognitionHandler.tsx +++ b/src/client/views/search/FaceRecognitionHandler.tsx @@ -1,14 +1,13 @@ import * as faceapi from 'face-api.js'; -import { FaceMatcher, TinyFaceDetectorOptions } from 'face-api.js'; -import { Doc, DocListCast, NumListCast } from '../../../fields/Doc'; +import { FaceMatcher } from 'face-api.js'; +import { Doc, DocListCast } from '../../../fields/Doc'; import { DocData } from '../../../fields/DocSymbols'; import { List } from '../../../fields/List'; -import { ObjectField } from '../../../fields/ObjectField'; -import { ImageCast, StrCast } from '../../../fields/Types'; -import { DocUtils } from '../../documents/DocUtils'; -import { Deserializable } from '../../util/SerializationHelper'; -import { DocumentView } from '../nodes/DocumentView'; +import { ImageCast, NumCast, StrCast } from '../../../fields/Types'; +/** + * A class that handles face recognition. + */ export class FaceRecognitionHandler { static _instance: FaceRecognitionHandler; private loadedModels: boolean = false; @@ -18,8 +17,12 @@ export class FaceRecognitionHandler { constructor() { FaceRecognitionHandler._instance = this; this.loadModels(); + this.examinedDocs = new Set(DocListCast(Doc.UserDoc()[DocData].examinedFaceDocs, [])); } + /** + * Loads the face detection models. + */ async loadModels() { const MODEL_URL = `/models`; await faceapi.loadFaceDetectionModel(MODEL_URL); @@ -32,19 +35,31 @@ export class FaceRecognitionHandler { return FaceRecognitionHandler._instance ?? new FaceRecognitionHandler(); } + /** + * When a document is added, look for matching face documents. + * @param doc The document being analyzed. + */ public async findMatches(doc: Doc) { if (this.loadedModels) { + // If the Dashboard doesn't have a list of face documents yet, initialize the list. if (!Doc.ActiveDashboard![DocData].faceDocuments) { Doc.ActiveDashboard![DocData].faceDocuments = new List<Doc>(); } + // If the doc is currently already been examined, or it is being processed, stop examining the document. if (this.examinedDocs.has(doc) || this.processingDocs.has(doc)) { return; } + // Mark the document as being processed. this.processingDocs.add(doc); try { + if (!Doc.UserDoc()[DocData].faceDocNum) { + Doc.UserDoc()[DocData].faceDocNum = 0; + } + + // Get the image the document contains and analyze for faces. const [name, type] = ImageCast(doc[Doc.LayoutFieldKey(doc)]).url.href.split('.'); const imageURL = `${name}_o.${type}`; @@ -52,30 +67,51 @@ export class FaceRecognitionHandler { const fullFaceDescriptions = await faceapi.detectAllFaces(img).withFaceLandmarks().withFaceDescriptors(); + doc[DocData].faces = new List<List<number>>(); + + // For each face detected, find a match. for (const fd of fullFaceDescriptions) { - const match = this.findMatch(fd.descriptor); + let match = this.findMatch(fd.descriptor); + let converted_list = new List<number>(); + if (match) { + // If a matching Face Document has been found, add the document to the Face Document's associated docs and append the face + // descriptor to the Face Document's descriptor list. const converted_array = Array.from(fd.descriptor); - const converted_list = new List<number>(converted_array); + converted_list = new List<number>(converted_array); match[DocData].associatedDocs = new List<Doc>([...DocListCast(match[DocData].associatedDocs), doc]); match[DocData].faceDescriptors = new List<List<number>>([...(match[DocData].faceDescriptors as List<List<number>>), converted_list]); } else { + // If a matching Face Document has not been found, create a new Face Document. const newFaceDocument = new Doc(); const converted_array = Array.from(fd.descriptor); - const converted_list = new List<number>(converted_array); + converted_list = new List<number>(converted_array); newFaceDocument[DocData].faceDescriptors = new List<List<number>>(); (newFaceDocument[DocData].faceDescriptors as List<List<number>>).push(converted_list); - newFaceDocument[DocData].label = `Person ${DocListCast(Doc.ActiveDashboard![DocData].faceDocuments).length + 1}`; + Doc.UserDoc()[DocData].faceDocNum = NumCast(Doc.UserDoc()[DocData].faceDocNum) + 1; + newFaceDocument[DocData].label = `Face ${Doc.UserDoc()[DocData].faceDocNum}`; newFaceDocument[DocData].associatedDocs = new List<Doc>([doc]); Doc.ActiveDashboard![DocData].faceDocuments = new List<Doc>([...DocListCast(Doc.ActiveDashboard![DocData].faceDocuments), newFaceDocument]); + match = newFaceDocument; } + + // Assign a field in the document of the matching Face Document. + if (doc[DocData][`FACE DESCRIPTOR - ${match[DocData].label}`]) { + doc[DocData][`FACE DESCRIPTOR - ${match[DocData].label}`] = new List<List<number>>([...(doc[DocData][`FACE DESCRIPTOR - ${match[DocData].label}`] as List<List<number>>), converted_list]); + } else { + doc[DocData][`FACE DESCRIPTOR - ${match[DocData].label}`] = new List<List<number>>([converted_list]); + } + + doc[DocData].faces = new List<List<number>>([...(doc[DocData].faces as List<List<number>>), converted_list]); } + // Updates the examined docs field. this.examinedDocs.add(doc); - console.log(this.examinedDocs); - - DocListCast(Doc.ActiveDashboard![DocData].faceDocuments).forEach(doc => console.log(DocListCast(doc[DocData].associatedDocs))); + if (!Doc.UserDoc()[DocData].examinedFaceDocs) { + Doc.UserDoc()[DocData].examinedFaceDocs = new List<Doc>(); + } + Doc.UserDoc()[DocData].examinedFaceDocs = new List<Doc>([...DocListCast(Doc.UserDoc()[DocData].examinedFaceDocs), doc]); } catch (error) { console.error('Error processing document:', error); } finally { @@ -84,6 +120,11 @@ export class FaceRecognitionHandler { } } + /** + * Finds a matching Face Document given a descriptor + * @param cur_descriptor The current descriptor whose match is being searched for. + * @returns The most similar Face Document. + */ private findMatch(cur_descriptor: Float32Array) { if (DocListCast(Doc.ActiveDashboard![DocData].faceDocuments).length < 1) { return null; @@ -95,19 +136,20 @@ export class FaceRecognitionHandler { }); const faceMatcher = new FaceMatcher(faceDescriptors, 0.6); const match = faceMatcher.findBestMatch(cur_descriptor); - if (match.label == 'unknown') { return null; } else { for (const doc of DocListCast(Doc.ActiveDashboard![DocData].faceDocuments)) { if (doc[DocData].label === match.label) { - console.log(match.label); return doc; } } } } + /** + * Loads an image + */ private loadImage = (src: string): Promise<HTMLImageElement> => { return new Promise((resolve, reject) => { const img = new Image(); |
