Unverified Commit cc99df49 by justadudewhohacks Committed by GitHub

Merge pull request #3 from justadudewhohacks/face-extraction

Face extraction
parents f7c90389 d7962a58
import { getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
import { createCanvas, getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
var NetInput = /** @class */ (function () {
function NetInput(mediaArg, dims) {
var _this = this;
......@@ -28,9 +28,7 @@ var NetInput = /** @class */ (function () {
}
// if input is batch type, make sure every canvas has the same dimensions
var _a = this.dims || dims || getMediaDimensions(media), width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height);
this._canvases.push(canvas);
};
......
{"version":3,"file":"NetInput.js","sourceRoot":"","sources":["../src/NetInput.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAE9E;IAGE,kBACE,QAAmB,EACnB,IAAiB;QAFnB,iBA2BC;QAvBC,IAAM,aAAa,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC;YAC3C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;QAEd,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAA;SAC1D;QAED,IAAM,MAAM,GAAG,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;QAG5C,MAAM,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,CAAC;YACtB,IAAI,CAAC,CAAC,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,iBAAiB,CAAC,EAAE;gBACnH,IAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,qBAAmB,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,CAAA;gBACtE,IAAI,OAAO,aAAa,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;oBACxC,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,qEAAkE,CAAC,CAAA;iBACxG;gBACD,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,kHAA+G,CAAC,CAAA;aACrJ;QACH,CAAC,CAAC,CAAA;QAEF,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;QACnB,MAAM,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,EAAxB,CAAwB,CAAC,CAAA;IAC/C,CAAC;IAEO,6BAAU,GAAlB,UAAmB,KAAoB,EAAE,IAAiB;QACxD,IAAI,KAAK,YAAY,iBAAiB,EAAE;YACtC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;YAC1B,OAAM;SACP;QAED,yEAAyE;QACnE,IAAA,mDAAkE,EAAhE,gBAAK,EAAE,kBAAM,CAAmD;QAExE,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC/C,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;QACpB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;QAEtB,mBAAmB,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;QACjE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IAC7B,CAAC;IAED,sBAAW,8BAAQ;aAAnB;YACE,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;;;OAAA;IAED,sBAAW,2BAAK;aAAhB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,CAAA;QACxC,CAAC;;;OAAA;IAED,sBAAW,4BAAM;aAAjB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAA;QACzC,CAAC;;;OAAA;IAED,sBAAW,0BAAI;aAAf;YACQ,IAAA,SAAwB,EAAtB,gBAAK,EAAE,kBAAM,CAAS;YAC9B,OAAO,CAAC,KAAK,GAAG,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;QAC7D,CAAC;;;OAAA;IACH,eAAC;AAAD,CAAC,AAjED,IAiEC"}
\ No newline at end of file
{"version":3,"file":"NetInput.js","sourceRoot":"","sources":["../src/NetInput.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAE5F;IAGE,kBACE,QAAmB,EACnB,IAAiB;QAFnB,iBA2BC;QAvBC,IAAM,aAAa,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC;YAC3C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;QAEd,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAA;SAC1D;QAED,IAAM,MAAM,GAAG,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;QAG5C,MAAM,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,CAAC;YACtB,IAAI,CAAC,CAAC,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,iBAAiB,CAAC,EAAE;gBACnH,IAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,qBAAmB,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,CAAA;gBACtE,IAAI,OAAO,aAAa,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;oBACxC,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,qEAAkE,CAAC,CAAA;iBACxG;gBACD,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,kHAA+G,CAAC,CAAA;aACrJ;QACH,CAAC,CAAC,CAAA;QAEF,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;QACnB,MAAM,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,EAAxB,CAAwB,CAAC,CAAA;IAC/C,CAAC;IAEO,6BAAU,GAAlB,UAAmB,KAAoB,EAAE,IAAiB;QACxD,IAAI,KAAK,YAAY,iBAAiB,EAAE;YACtC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;YAC1B,OAAM;SACP;QAED,yEAAyE;QACnE,IAAA,mDAAkE,EAAhE,gBAAK,EAAE,kBAAM,CAAmD;QAExE,IAAM,MAAM,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;QAC9C,mBAAmB,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;QACjE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IAC7B,CAAC;IAED,sBAAW,8BAAQ;aAAnB;YACE,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;;;OAAA;IAED,sBAAW,2BAAK;aAAhB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,CAAA;QACxC,CAAC;;;OAAA;IAED,sBAAW,4BAAM;aAAjB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAA;QACzC,CAAC;;;OAAA;IAED,sBAAW,0BAAI;aAAf;YACQ,IAAA,SAAwB,EAAtB,gBAAK,EAAE,kBAAM,CAAS;YAC9B,OAAO,CAAC,KAAK,GAAG,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;QAC7D,CAAC;;;OAAA;IACH,eAAC;AAAD,CAAC,AA9DD,IA8DC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { NetInput } from './NetInput';
import { TNetInput } from './types';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export declare function extractFaceTensors(image: tf.Tensor | NetInput | TNetInput, detections: FaceDetectionResult[]): tf.Tensor4D[];
import * as tf from '@tensorflow/tfjs-core';
import { getImageTensor } from './transformInputs';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export function extractFaceTensors(image, detections) {
return tf.tidy(function () {
var imgTensor = getImageTensor(image);
// TODO handle batches
var _a = imgTensor.shape, batchSize = _a[0], imgHeight = _a[1], imgWidth = _a[2], numChannels = _a[3];
var faceTensors = detections.map(function (det) {
var _a = det.forSize(imgWidth, imgHeight).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
return tf.slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels]);
});
return faceTensors;
});
}
//# sourceMappingURL=extractFaceTensors.js.map
\ No newline at end of file
{"version":3,"file":"extractFaceTensors.js","sourceRoot":"","sources":["../src/extractFaceTensors.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAI5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAGnD;;;;;;;;;;GAUG;AACH,MAAM,6BACJ,KAAuC,EACvC,UAAiC;IAEjC,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,SAAS,GAAG,cAAc,CAAC,KAAK,CAAC,CAAA;QAEvC,sBAAsB;QAChB,IAAA,oBAA+D,EAA9D,iBAAS,EAAE,iBAAS,EAAE,gBAAQ,EAAE,mBAAW,CAAmB;QAErE,IAAM,WAAW,GAAG,UAAU,CAAC,GAAG,CAAC,UAAA,GAAG;YAC9B,IAAA,yCAA8D,EAA5D,QAAC,EAAE,QAAC,EAAE,gBAAK,EAAE,kBAAM,CAAyC;YACpE,OAAO,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW,CAAC,CAAC,CAAA;QAC3E,CAAC,CAAC,CAAA;QAEF,OAAO,WAAW,CAAA;IACpB,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export declare function extractFaces(image: HTMLCanvasElement, detections: FaceDetectionResult[]): HTMLCanvasElement[];
import { createCanvas, getContext2dOrThrow } from './utils';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export function extractFaces(image, detections) {
var ctx = getContext2dOrThrow(image);
return detections.map(function (det) {
var _a = det.forSize(image.width, image.height).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
var faceImg = createCanvas({ width: width, height: height });
getContext2dOrThrow(faceImg)
.putImageData(ctx.getImageData(x, y, width, height), 0, 0);
return faceImg;
});
}
//# sourceMappingURL=extractFaces.js.map
\ No newline at end of file
{"version":3,"file":"extractFaces.js","sourceRoot":"","sources":["../src/extractFaces.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAC;AAE5D;;;;;;GAMG;AACH,MAAM,uBACJ,KAAwB,EACxB,UAAiC;IAEjC,IAAM,GAAG,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAA;IAEtC,OAAO,UAAU,CAAC,GAAG,CAAC,UAAA,GAAG;QACjB,IAAA,+CAAoE,EAAlE,QAAC,EAAE,QAAC,EAAE,gBAAK,EAAE,kBAAM,CAA+C;QAE1E,IAAM,OAAO,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;QAC/C,mBAAmB,CAAC,OAAO,CAAC;aACzB,YAAY,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAC5D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
import { FaceDetectionNet } from './types';
export declare class FaceDetectionResult {
private score;
private top;
private left;
private bottom;
private right;
constructor(score: number, top: number, left: number, bottom: number, right: number);
private _score;
private _topRelative;
private _leftRelative;
private _bottomRelative;
private _rightRelative;
constructor(score: number, topRelative: number, leftRelative: number, bottomRelative: number, rightRelative: number);
forSize(width: number, height: number): FaceDetectionNet.Detection;
}
var FaceDetectionResult = /** @class */ (function () {
function FaceDetectionResult(score, top, left, bottom, right) {
this.score = score;
this.top = Math.max(0, top),
this.left = Math.max(0, left),
this.bottom = Math.min(1.0, bottom),
this.right = Math.min(1.0, right);
function FaceDetectionResult(score, topRelative, leftRelative, bottomRelative, rightRelative) {
this._score = score;
this._topRelative = Math.max(0, topRelative),
this._leftRelative = Math.max(0, leftRelative),
this._bottomRelative = Math.min(1.0, bottomRelative),
this._rightRelative = Math.min(1.0, rightRelative);
}
FaceDetectionResult.prototype.forSize = function (width, height) {
var x = Math.floor(this._leftRelative * width);
var y = Math.floor(this._topRelative * height);
return {
score: this.score,
score: this._score,
box: {
top: this.top * height,
left: this.left * width,
bottom: this.bottom * height,
right: this.right * width
x: x,
y: y,
width: Math.floor(this._rightRelative * width) - x,
height: Math.floor(this._bottomRelative * height) - y
}
};
};
......
{"version":3,"file":"FaceDetectionResult.js","sourceRoot":"","sources":["../../src/faceDetectionNet/FaceDetectionResult.ts"],"names":[],"mappings":"AAEA;IAOE,6BACE,KAAa,EACb,GAAW,EACX,IAAY,EACZ,MAAc,EACd,KAAa;QAEb,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;YAC3B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC;YAC7B,IAAI,CAAC,MAAM,GAAI,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC;YACpC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAA;IACnC,CAAC;IAEM,qCAAO,GAAd,UAAe,KAAa,EAAE,MAAc;QAC1C,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,GAAG,EAAE;gBACH,GAAG,EAAE,IAAI,CAAC,GAAG,GAAG,MAAM;gBACtB,IAAI,EAAE,IAAI,CAAC,IAAI,GAAG,KAAK;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM;gBAC5B,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK;aAC1B;SACF,CAAA;IACH,CAAC;IACH,0BAAC;AAAD,CAAC,AAhCD,IAgCC"}
\ No newline at end of file
{"version":3,"file":"FaceDetectionResult.js","sourceRoot":"","sources":["../../src/faceDetectionNet/FaceDetectionResult.ts"],"names":[],"mappings":"AAEA;IAOE,6BACE,KAAa,EACb,WAAmB,EACnB,YAAoB,EACpB,cAAsB,EACtB,aAAqB;QAErB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC;YAC5C,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,CAAC;YAC9C,IAAI,CAAC,eAAe,GAAI,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC;YACrD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,aAAa,CAAC,CAAA;IACpD,CAAC;IAEM,qCAAO,GAAd,UAAe,KAAa,EAAE,MAAc;QAC1C,IAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,aAAa,GAAG,KAAK,CAAC,CAAA;QAChD,IAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,CAAA;QAChD,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,MAAM;YAClB,GAAG,EAAE;gBACH,CAAC,GAAA;gBACD,CAAC,GAAA;gBACD,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC,GAAG,CAAC;gBAClD,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,GAAG,CAAC;aACtD;SACF,CAAA;IACH,CAAC;IACH,0BAAC;AAAD,CAAC,AAlCD,IAkCC"}
\ No newline at end of file
......@@ -56,10 +56,10 @@ export declare namespace FaceDetectionNet {
type Detection = {
score: number;
box: {
top: number;
left: number;
right: number;
bottom: number;
x: number;
y: number;
width: number;
height: number;
};
};
}
......@@ -10,7 +10,13 @@ export function faceRecognitionNet(weights) {
var params = extractParams(weights);
function forward(input) {
return tf.tidy(function () {
var x = normalize(padToSquare(getImageTensor(input)));
// TODO pad on both sides, to keep face centered
var x = padToSquare(getImageTensor(input));
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150]);
}
x = normalize(x);
var out = convDown(x, params.conv32_down);
out = tf.maxPool(out, 3, 2, 'valid');
out = residual(out, params.conv32_1);
......
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAG5C,OAAO,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IAAxD,iBAuDC;IAtDC,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAuC;QACtD,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAM,CAAC,GAAG,SAAS,CAAC,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;YAEvD,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAO,KAAuC;;;;;oBACpE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;oBAChB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;oBAA1B,IAAI,GAAG,SAAmB;oBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAChB,sBAAO,IAAI,EAAA;;;SACZ,CAAA;IAED,IAAM,yBAAyB,GAAG,UAAC,KAAuC;QACxE,IAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QAC7B,IAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;QAChB,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAG5C,OAAO,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IAAxD,iBA6DC;IA5DC,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAuC;QACtD,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,gDAAgD;YAChD,IAAI,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,CAAA;YAC1C,wCAAwC;YACxC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5C,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAA;aAC3C;YACD,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YAEhB,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAO,KAAuC;;;;;oBACpE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;oBAChB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;oBAA1B,IAAI,GAAG,SAAmB;oBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAChB,sBAAO,IAAI,EAAA;;;SACZ,CAAA;IAED,IAAM,yBAAyB,GAAG,UAAC,KAAuC;QACxE,IAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QAC7B,IAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;QAChB,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
......@@ -2,5 +2,8 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput };
import * as tf from '@tensorflow/tfjs-core';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput, tf };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './utils';
......@@ -2,6 +2,9 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput };
import * as tf from '@tensorflow/tfjs-core';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput, tf };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './utils';
//# sourceMappingURL=index.js.map
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAEtC,OAAO,EACL,iBAAiB,EACjB,gBAAgB,EAChB,kBAAkB,EAClB,QAAQ,EACT,CAAA;AAED,cAAc,SAAS,CAAA"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EACL,iBAAiB,EACjB,gBAAgB,EAChB,kBAAkB,EAClB,QAAQ,EACR,EAAE,EACH,CAAA;AAED,cAAc,gBAAgB,CAAA;AAC9B,cAAc,sBAAsB,CAAA;AACpC,cAAc,SAAS,CAAA"}
\ No newline at end of file
......@@ -10,7 +10,9 @@ export declare type DrawBoxOptions = {
color: string;
};
export declare type DrawTextOptions = {
lineWidth: number;
fontSize: number;
fontStyle: string;
color: string;
};
export declare type DrawOptions = DrawBoxOptions & DrawTextOptions;
import { FaceDetectionNet } from './faceDetectionNet/types';
import { DrawBoxOptions, DrawTextOptions } from './types';
import { Dimensions, DrawBoxOptions, DrawOptions, DrawTextOptions } from './types';
export declare function isFloat(num: number): boolean;
export declare function round(num: number): number;
export declare function getElement(arg: string | any): any;
export declare function getContext2dOrThrow(canvas: HTMLCanvasElement): CanvasRenderingContext2D;
export declare function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement): HTMLImageElement | {
export declare function createCanvas({width, height}: Dimensions): HTMLCanvasElement;
export declare function createCanvasWithImageData({width, height}: Dimensions, buf: Uint8ClampedArray): HTMLCanvasElement;
export declare function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement): {
width: number;
height: number;
};
export declare function bufferToImage(buf: Blob): Promise<HTMLImageElement>;
export declare function getDefaultDrawOptions(): DrawOptions;
export declare function drawBox(ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, options: DrawBoxOptions): void;
export declare function drawText(ctx: CanvasRenderingContext2D, x: number, y: number, text: string, options: DrawTextOptions): void;
export declare function drawDetection(canvasArg: string | HTMLCanvasElement, detection: FaceDetectionNet.Detection | FaceDetectionNet.Detection[], options?: DrawBoxOptions & DrawTextOptions & {
......
......@@ -17,7 +17,23 @@ export function getContext2dOrThrow(canvas) {
}
return ctx;
}
export function createCanvas(_a) {
var width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
return canvas;
}
export function createCanvasWithImageData(_a, buf) {
var width = _a.width, height = _a.height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).putImageData(new ImageData(buf, width, height), 0, 0);
return canvas;
}
export function getMediaDimensions(media) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight };
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight };
}
......@@ -39,15 +55,24 @@ export function bufferToImage(buf) {
reader.readAsDataURL(buf);
});
}
export function getDefaultDrawOptions() {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
};
}
export function drawBox(ctx, x, y, w, h, options) {
ctx.strokeStyle = options.color;
ctx.lineWidth = options.lineWidth;
ctx.strokeRect(x, y, w, h);
}
export function drawText(ctx, x, y, text, options) {
var padText = 2 + options.lineWidth;
ctx.fillStyle = options.color;
ctx.font = options.fontSize + "px " + options.fontStyle;
ctx.fillText(text, x, y);
ctx.fillText(text, x + padText, y + padText + (options.fontSize * 0.6));
}
export function drawDetection(canvasArg, detection, options) {
var canvas = getElement(canvasArg);
......@@ -59,13 +84,13 @@ export function drawDetection(canvasArg, detection, options) {
: [detection];
detectionArray.forEach(function (det) {
var score = det.score, box = det.box;
var left = box.left, right = box.right, top = box.top, bottom = box.bottom;
var _a = (options || {}), _b = _a.color, color = _b === void 0 ? 'blue' : _b, _c = _a.lineWidth, lineWidth = _c === void 0 ? 2 : _c, _d = _a.fontSize, fontSize = _d === void 0 ? 20 : _d, _e = _a.fontStyle, fontStyle = _e === void 0 ? 'Georgia' : _e, _f = _a.withScore, withScore = _f === void 0 ? true : _f;
var padText = 2 + lineWidth;
var x = box.x, y = box.y, width = box.width, height = box.height;
var drawOptions = Object.assign(getDefaultDrawOptions(), (options || {}));
var withScore = Object.assign({ withScore: true }, (options || {})).withScore;
var ctx = getContext2dOrThrow(canvas);
drawBox(ctx, left, top, right - left, bottom - top, { lineWidth: lineWidth, color: color });
drawBox(ctx, x, y, width, height, drawOptions);
if (withScore) {
drawText(ctx, left + padText, top + (fontSize * 0.6) + padText, "" + round(score), { fontSize: fontSize, fontStyle: fontStyle, color: color });
drawText(ctx, x, y, "" + round(score), drawOptions);
}
});
}
......
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAGA,MAAM,kBAAkB,GAAW;IACjC,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,CAAA;AACtB,CAAC;AAED,MAAM,gBAAgB,GAAW;IAC/B,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAA;AACpC,CAAC;AAED,MAAM,qBAAqB,GAAiB;IAC1C,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,OAAO,QAAQ,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;KACpC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,8BAA8B,MAAyB;IAC3D,IAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;IACnC,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAA;KAC7C;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,6BAA6B,KAA0C;IAC3E,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,WAAW,EAAE,CAAA;KAC9D;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAED,MAAM,wBAAwB,GAAS;IACrC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,EAAE;YAC1B,OAAO,MAAM,CAAC,kDAAkD,CAAC,CAAA;SAClE;QAED,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAA;QAC/B,MAAM,CAAC,MAAM,GAAG;YACd,IAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;YACvB,GAAG,CAAC,MAAM,GAAG,cAAM,OAAA,OAAO,CAAC,GAAG,CAAC,EAAZ,CAAY,CAAA;YAC/B,GAAG,CAAC,OAAO,GAAG,MAAM,CAAA;YACpB,GAAG,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;QACzB,CAAC,CAAA;QACD,MAAM,CAAC,OAAO,GAAG,MAAM,CAAA;QACvB,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;IAC3B,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM,kBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,CAAS,EACT,CAAS,EACT,OAAuB;IAEvB,GAAG,CAAC,WAAW,GAAG,OAAO,CAAC,KAAK,CAAA;IAC/B,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAA;IACjC,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC5B,CAAC;AAED,MAAM,mBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,IAAY,EACZ,OAAwB;IAExB,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,KAAK,CAAA;IAC7B,GAAG,CAAC,IAAI,GAAM,OAAO,CAAC,QAAQ,WAAM,OAAO,CAAC,SAAW,CAAA;IACvD,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC1B,CAAC;AAED,MAAM,wBACJ,SAAqC,EACrC,SAAoE,EACpE,OAAmE;IAEnE,IAAM,MAAM,GAAG,UAAU,CAAC,SAAS,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,MAAM,YAAY,iBAAiB,CAAC,EAAE;QAC1C,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAA;KAC9E;IAED,IAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;QAC7C,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,CAAC,SAAS,CAAC,CAAA;IAEf,cAAc,CAAC,OAAO,CAAC,UAAC,GAAG;QAEvB,IAAA,iBAAK,EACL,aAAG,CACE;QAGL,IAAA,eAAI,EACJ,iBAAK,EACL,aAAG,EACH,mBAAM,CACD;QAED,IAAA,oBAMa,EALjB,aAAc,EAAd,mCAAc,EACd,iBAAa,EAAb,kCAAa,EACb,gBAAa,EAAb,kCAAa,EACb,iBAAqB,EAArB,0CAAqB,EACrB,iBAAgB,EAAhB,qCAAgB,CACC;QAEnB,IAAM,OAAO,GAAG,CAAC,GAAG,SAAS,CAAA;QAE7B,IAAM,GAAG,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QACvC,OAAO,CACL,GAAG,EACH,IAAI,EACJ,GAAG,EACH,KAAK,GAAG,IAAI,EACZ,MAAM,GAAG,GAAG,EACZ,EAAE,SAAS,WAAA,EAAE,KAAK,OAAA,EAAE,CACrB,CAAA;QACD,IAAI,SAAS,EAAE;YACb,QAAQ,CACN,GAAG,EACH,IAAI,GAAG,OAAO,EACd,GAAG,GAAG,CAAC,QAAQ,GAAG,GAAG,CAAC,GAAG,OAAO,EAChC,KAAG,KAAK,CAAC,KAAK,CAAG,EACjB,EAAE,QAAQ,UAAA,EAAE,SAAS,WAAA,EAAE,KAAK,OAAA,EAAE,CAC/B,CAAA;SACF;IACH,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAGA,MAAM,kBAAkB,GAAW;IACjC,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,CAAA;AACtB,CAAC;AAED,MAAM,gBAAgB,GAAW;IAC/B,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAA;AACpC,CAAC;AAED,MAAM,qBAAqB,GAAiB;IAC1C,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,OAAO,QAAQ,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;KACpC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,8BAA8B,MAAyB;IAC3D,IAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;IACnC,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAA;KAC7C;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,uBAAuB,EAA4B;QAA1B,gBAAK,EAAE,kBAAM;IAC1C,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;IAC/C,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,OAAO,MAAM,CAAA;AACf,CAAC;AAED,MAAM,oCAAoC,EAA4B,EAAE,GAAsB;QAAlD,gBAAK,EAAE,kBAAM;IACvD,IAAM,MAAM,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;IAC9C,mBAAmB,CAAC,MAAM,CAAC,CAAC,YAAY,CAAC,IAAI,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACjF,OAAO,MAAM,CAAA;AACf,CAAC;AAED,MAAM,6BAA6B,KAA0C;IAC3E,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,EAAE,CAAA;KAClE;IACD,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,WAAW,EAAE,CAAA;KAC9D;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAED,MAAM,wBAAwB,GAAS;IACrC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,EAAE;YAC1B,OAAO,MAAM,CAAC,kDAAkD,CAAC,CAAA;SAClE;QAED,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAA;QAC/B,MAAM,CAAC,MAAM,GAAG;YACd,IAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;YACvB,GAAG,CAAC,MAAM,GAAG,cAAM,OAAA,OAAO,CAAC,GAAG,CAAC,EAAZ,CAAY,CAAA;YAC/B,GAAG,CAAC,OAAO,GAAG,MAAM,CAAA;YACpB,GAAG,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;QACzB,CAAC,CAAA;QACD,MAAM,CAAC,OAAO,GAAG,MAAM,CAAA;QACvB,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;IAC3B,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM;IACJ,OAAO;QACL,KAAK,EAAE,MAAM;QACb,SAAS,EAAE,CAAC;QACZ,QAAQ,EAAE,EAAE;QACZ,SAAS,EAAE,SAAS;KACrB,CAAA;AACH,CAAC;AAED,MAAM,kBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,CAAS,EACT,CAAS,EACT,OAAuB;IAEvB,GAAG,CAAC,WAAW,GAAG,OAAO,CAAC,KAAK,CAAA;IAC/B,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAA;IACjC,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC5B,CAAC;AAED,MAAM,mBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,IAAY,EACZ,OAAwB;IAExB,IAAM,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,SAAS,CAAA;IAErC,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,KAAK,CAAA;IAC7B,GAAG,CAAC,IAAI,GAAM,OAAO,CAAC,QAAQ,WAAM,OAAO,CAAC,SAAW,CAAA;IACvD,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,GAAG,GAAG,CAAC,CAAC,CAAA;AACzE,CAAC;AAED,MAAM,wBACJ,SAAqC,EACrC,SAAoE,EACpE,OAAmE;IAEnE,IAAM,MAAM,GAAG,UAAU,CAAC,SAAS,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,MAAM,YAAY,iBAAiB,CAAC,EAAE;QAC1C,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAA;KAC9E;IAED,IAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;QAC7C,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,CAAC,SAAS,CAAC,CAAA;IAEf,cAAc,CAAC,OAAO,CAAC,UAAC,GAAG;QAEvB,IAAA,iBAAK,EACL,aAAG,CACE;QAGL,IAAA,SAAC,EACD,SAAC,EACD,iBAAK,EACL,mBAAM,CACD;QAEP,IAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAC/B,qBAAqB,EAAE,EACvB,CAAC,OAAO,IAAI,EAAE,CAAC,CAChB,CAAA;QAEO,IAAA,yEAAS,CAAwD;QAEzE,IAAM,GAAG,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QACvC,OAAO,CACL,GAAG,EACH,CAAC,EACD,CAAC,EACD,KAAK,EACL,MAAM,EACN,WAAW,CACZ,CAAA;QACD,IAAI,SAAS,EAAE;YACb,QAAQ,CACN,GAAG,EACH,CAAC,EACD,CAAC,EACD,KAAG,KAAK,CAAC,KAAK,CAAG,EACjB,WAAW,CACZ,CAAA;SACF;IACH,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
......@@ -24,6 +24,35 @@ async function initFaceRecognitionNet() {
return facerecognition.faceRecognitionNet(weights)
}
// fetch first image of each class and compute their descriptors
async function initTrainDescriptorsByClass(net) {
return Promise.all(classes.map(
async className => {
const img = await facerecognition.bufferToImage(
await fetchImage(getFaceImageUri(className, 1))
)
const descriptor = await net.computeFaceDescriptor(img)
return {
descriptor,
className
}
}
))
}
function getBestMatch(allDescriptors, queryDescriptor) {
return allDescriptors
.map(
({ descriptor, className }) => ({
distance: facerecognition.round(
facerecognition.euclideanDistance(descriptor, queryDescriptor)
),
className
})
)
.reduce((best, curr) => best.distance < curr.distance ? best : curr)
}
function renderNavBar(navbarId, exampleUri) {
const examples = [
{
......@@ -41,6 +70,14 @@ function renderNavBar(navbarId, exampleUri) {
{
uri: 'face_similarity',
name: 'Face Similarity'
},
{
uri: 'detect_and_draw_faces',
name: 'Detect and Draw Faces'
},
{
uri: 'detect_and_recognize_faces',
name: 'Detect and Recognize Faces'
}
]
......
......@@ -38,3 +38,7 @@
top: 0;
left: 0;
}
#facesContainer canvas {
margin: 10px;
}
\ No newline at end of file
......@@ -15,5 +15,7 @@ app.get('/face_detection', (req, res) => res.sendFile(path.join(viewsDir, 'faceD
app.get('/face_detection_video', (req, res) => res.sendFile(path.join(viewsDir, 'faceDetectionVideo.html')))
app.get('/face_recognition', (req, res) => res.sendFile(path.join(viewsDir, 'faceRecognition.html')))
app.get('/face_similarity', (req, res) => res.sendFile(path.join(viewsDir, 'faceSimilarity.html')))
app.get('/detect_and_draw_faces', (req, res) => res.sendFile(path.join(viewsDir, 'detectAndDrawFaces.html')))
app.get('/detect_and_recognize_faces', (req, res) => res.sendFile(path.join(viewsDir, 'detectAndRecognizeFaces.html')))
app.listen(3000, () => console.log('Listening on port 3000!'))
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="face-recognition.js"></script>
<script src="axios.min.js"></script>
<script src="commons.js"></script>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
</head>
<body>
<div class="center-content page-container">
<div id="navbar"></div>
<div class="progress" id="loader">
<div class="indeterminate"></div>
</div>
<div style="position: relative" class="margin">
<img id="inputImg" src="" style="max-width: 800px;" />
<canvas id="overlay" />
</div>
<div id="facesContainer"></div>
<div class="row side-by-side">
<div id="selectList"></div>
<div class="row">
<label for="minConfidence">Min Confidence:</label>
<input disabled value="0.7" id="minConfidence" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
onclick="onDecreaseThreshold()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
onclick="onIncreaseThreshold()"
>
<i class="material-icons left">+</i>
</button>
</div>
</div>
<script>
let minConfidence = 0.7
let net
function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence)
updateResults()
}
function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence)
updateResults()
}
async function updateResults() {
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
const detections = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', detections.map(det => det.forSize(width, height)))
const faceImages = await facerecognition.extractFaces(input.canvases[0], detections)
$('#facesContainer').empty()
faceImages.forEach(canvas => $('#facesContainer').append(canvas))
}
async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src
updateResults()
}
async function run() {
net = await initFaceDetectionNet()
$('#loader').hide()
onSelectionChanged($('#selectList select').val())
}
$(document).ready(function() {
renderNavBar('#navbar', 'detect_and_draw_faces')
renderImageSelectList(
'#selectList',
async (uri) => {
await onSelectionChanged(uri)
},
'bbt1.jpg'
)
run()
})
</script>
</body>
</html>
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="face-recognition.js"></script>
<script src="axios.min.js"></script>
<script src="commons.js"></script>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
</head>
<body>
<div class="center-content page-container">
<div id="navbar"></div>
<div class="progress" id="loader">
<div class="indeterminate"></div>
</div>
<div style="position: relative" class="margin">
<img id="inputImg" src="" style="max-width: 800px;" />
<canvas id="overlay" />
</div>
<div class="row side-by-side">
<div id="selectList"></div>
<div class="row">
<label for="minConfidence">Min Confidence:</label>
<input disabled value="0.7" id="minConfidence" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
onclick="onDecreaseThreshold()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
onclick="onIncreaseThreshold()"
>
<i class="material-icons left">+</i>
</button>
</div>
</div>
<script>
const threshold = 0.6
let minConfidence = 0.7
let detectionNet, recognitionNet
let trainDescriptorsByClass = []
function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence)
updateResults()
}
function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence)
updateResults()
}
async function updateResults() {
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
const detections = await detectionNet.locateFaces(input, minConfidence)
const detectionsForSize = detections.map(det => det.forSize(width, height))
facerecognition.drawDetection('overlay', detectionsForSize, { withScore: false })
const faceTensors = await facerecognition.extractFaceTensors(input, detections)
const descriptors = await Promise.all(faceTensors.map(t => recognitionNet.computeFaceDescriptor(t)))
// free memory for face image tensors after we computed their descriptors
faceTensors.forEach(t => t.dispose())
descriptors.forEach((descriptor, i) => {
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
const text = `${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`
const { x, y } = detectionsForSize[i].box
facerecognition.drawText(canvas.getContext('2d'), x, y, text, facerecognition.getDefaultDrawOptions())
})
}
async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src
updateResults()
}
async function run() {
detectionNet = await initFaceDetectionNet()
recognitionNet = await initFaceRecognitionNet()
trainDescriptorsByClass = await initTrainDescriptorsByClass(recognitionNet)
$('#loader').hide()
onSelectionChanged($('#selectList select').val())
}
$(document).ready(function() {
renderNavBar('#navbar', 'detect_and_recognize_faces')
renderImageSelectList(
'#selectList',
async (uri) => {
await onSelectionChanged(uri)
},
'bbt1.jpg'
)
run()
})
</script>
</body>
</html>
\ No newline at end of file
......@@ -57,12 +57,13 @@
}
async function updateResults() {
const input = new facerecognition.NetInput('inputImg')
const { width, height } = input
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
result = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', result.map(det => det.forSize(width, height)))
}
......
......@@ -108,30 +108,6 @@
getImg().src = src
}
async function loadTrainingImages() {
return await Promise.all(classes.map(
async className => ({
img: await facerecognition.bufferToImage(
await fetchImage(getFaceImageUri(className, 1))
),
className
})
))
}
function getBestMatch(queryDescriptor) {
return trainDescriptorsByClass
.map(
({ descriptor, className }) => ({
distance: facerecognition.round(
facerecognition.euclideanDistance(descriptor, queryDescriptor)
),
className
})
)
.reduce((best, curr) => best.distance < curr.distance ? best : curr)
}
async function runFaceRecognition() {
async function next() {
const imgBuf = await fetchImage(getFaceImageUri(classes[currClassIdx], currImageIdx))
......@@ -143,7 +119,7 @@
const descriptor = await net.computeFaceDescriptor(input)
displayTimeStats(Date.now() - ts)
const bestMatch = getBestMatch(descriptor)
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
$('#prediction').val(`${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`)
currImageIdx = currClassIdx === (classes.length - 1)
......@@ -164,13 +140,7 @@
net = await initFaceRecognitionNet()
setStatusText('computing initial descriptors...')
const trainImgs = await loadTrainingImages()
trainDescriptorsByClass = await Promise.all(trainImgs.map(
async ({ className, img }) => ({
descriptor: await net.computeFaceDescriptor(img),
className
})
))
trainDescriptorsByClass = await initTrainDescriptorsByClass(net)
$('#loader').hide()
runFaceRecognition()
......
import { Dimensions, TMediaElement, TNetInput } from './types';
import { getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
import { createCanvas, getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
export class NetInput {
private _canvases: HTMLCanvasElement[]
......@@ -42,10 +42,7 @@ export class NetInput {
// if input is batch type, make sure every canvas has the same dimensions
const { width, height } = this.dims || dims || getMediaDimensions(media)
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
const canvas = createCanvas({ width, height })
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height)
this._canvases.push(canvas)
}
......
import * as tf from '@tensorflow/tfjs-core';
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { NetInput } from './NetInput';
import { getImageTensor } from './transformInputs';
import { TNetInput } from './types';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export function extractFaceTensors(
image: tf.Tensor | NetInput | TNetInput,
detections: FaceDetectionResult[]
): tf.Tensor4D[] {
return tf.tidy(() => {
const imgTensor = getImageTensor(image)
// TODO handle batches
const [batchSize, imgHeight, imgWidth, numChannels] = imgTensor.shape
const faceTensors = detections.map(det => {
const { x, y, width, height } = det.forSize(imgWidth, imgHeight).box
return tf.slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels])
})
return faceTensors
})
}
\ No newline at end of file
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { createCanvas, getContext2dOrThrow } from './utils';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export function extractFaces(
image: HTMLCanvasElement,
detections: FaceDetectionResult[]
): HTMLCanvasElement[] {
const ctx = getContext2dOrThrow(image)
return detections.map(det => {
const { x, y, width, height } = det.forSize(image.width, image.height).box
const faceImg = createCanvas({ width, height })
getContext2dOrThrow(faceImg)
.putImageData(ctx.getImageData(x, y, width, height), 0, 0)
return faceImg
})
}
\ No newline at end of file
import { FaceDetectionNet } from './types';
export class FaceDetectionResult {
private score: number
private top: number
private left: number
private bottom: number
private right: number
private _score: number
private _topRelative: number
private _leftRelative: number
private _bottomRelative: number
private _rightRelative: number
constructor(
score: number,
top: number,
left: number,
bottom: number,
right: number
topRelative: number,
leftRelative: number,
bottomRelative: number,
rightRelative: number
) {
this.score = score
this.top = Math.max(0, top),
this.left = Math.max(0, left),
this.bottom = Math.min(1.0, bottom),
this.right = Math.min(1.0, right)
this._score = score
this._topRelative = Math.max(0, topRelative),
this._leftRelative = Math.max(0, leftRelative),
this._bottomRelative = Math.min(1.0, bottomRelative),
this._rightRelative = Math.min(1.0, rightRelative)
}
public forSize(width: number, height: number): FaceDetectionNet.Detection {
const x = Math.floor(this._leftRelative * width)
const y = Math.floor(this._topRelative * height)
return {
score: this.score,
score: this._score,
box: {
top: this.top * height,
left: this.left * width,
bottom: this.bottom * height,
right: this.right * width
x,
y,
width: Math.floor(this._rightRelative * width) - x,
height: Math.floor(this._bottomRelative * height) - y
}
}
}
......
......@@ -69,10 +69,10 @@ export namespace FaceDetectionNet {
export type Detection = {
score: number
box: {
top: number,
left: number,
right: number,
bottom: number
x: number,
y: number,
width: number,
height: number
}
}
......
......@@ -14,7 +14,13 @@ export function faceRecognitionNet(weights: Float32Array) {
function forward(input: tf.Tensor | NetInput | TNetInput) {
return tf.tidy(() => {
const x = normalize(padToSquare(getImageTensor(input)))
// TODO pad on both sides, to keep face centered
let x = padToSquare(getImageTensor(input))
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150])
}
x = normalize(x)
let out = convDown(x, params.conv32_down)
out = tf.maxPool(out, 3, 2, 'valid')
......
......@@ -2,12 +2,16 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
import * as tf from '@tensorflow/tfjs-core';
export {
euclideanDistance,
faceDetectionNet,
faceRecognitionNet,
NetInput
NetInput,
tf
}
export * from './extractFaces'
export * from './extractFaceTensors'
export * from './utils'
\ No newline at end of file
......@@ -15,7 +15,10 @@ export type DrawBoxOptions = {
}
export type DrawTextOptions = {
lineWidth: number
fontSize: number
fontStyle: string
color: string
}
export type DrawOptions = DrawBoxOptions & DrawTextOptions
\ No newline at end of file
import { FaceDetectionNet } from './faceDetectionNet/types';
import { DrawBoxOptions, DrawTextOptions } from './types';
import { Dimensions, DrawBoxOptions, DrawOptions, DrawTextOptions } from './types';
export function isFloat(num: number) {
return num % 1 !== 0
......@@ -24,7 +24,23 @@ export function getContext2dOrThrow(canvas: HTMLCanvasElement): CanvasRenderingC
return ctx
}
export function createCanvas({ width, height}: Dimensions): HTMLCanvasElement {
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
return canvas
}
export function createCanvasWithImageData({ width, height}: Dimensions, buf: Uint8ClampedArray): HTMLCanvasElement {
const canvas = createCanvas({ width, height })
getContext2dOrThrow(canvas).putImageData(new ImageData(buf, width, height), 0, 0)
return canvas
}
export function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight }
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight }
}
......@@ -49,6 +65,15 @@ export function bufferToImage(buf: Blob): Promise<HTMLImageElement> {
})
}
export function getDefaultDrawOptions(): DrawOptions {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
}
}
export function drawBox(
ctx: CanvasRenderingContext2D,
x: number,
......@@ -69,9 +94,11 @@ export function drawText(
text: string,
options: DrawTextOptions
) {
const padText = 2 + options.lineWidth
ctx.fillStyle = options.color
ctx.font = `${options.fontSize}px ${options.fontStyle}`
ctx.fillText(text, x, y)
ctx.fillText(text, x + padText, y + padText + (options.fontSize * 0.6))
}
export function drawDetection(
......@@ -95,38 +122,35 @@ export function drawDetection(
} = det
const {
left,
right,
top,
bottom
x,
y,
width,
height
} = box
const {
color = 'blue',
lineWidth = 2,
fontSize = 20,
fontStyle = 'Georgia',
withScore = true
} = (options || {})
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
const padText = 2 + lineWidth
const { withScore } = Object.assign({ withScore: true }, (options || {}))
const ctx = getContext2dOrThrow(canvas)
drawBox(
ctx,
left,
top,
right - left,
bottom - top,
{ lineWidth, color }
x,
y,
width,
height,
drawOptions
)
if (withScore) {
drawText(
ctx,
left + padText,
top + (fontSize * 0.6) + padText,
x,
y,
`${round(score)}`,
{ fontSize, fontStyle, color }
drawOptions
)
}
})
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment