Unverified Commit 33be8794 by justadudewhohacks Committed by GitHub

Merge pull request #38 from justadudewhohacks/batch-input-continued

face recognition net now works with batch inputs
parents 8ef9b662 7d04da10
......@@ -3,4 +3,4 @@ import { FaceLandmarkNet } from './faceLandmarkNet/FaceLandmarkNet';
import { FaceRecognitionNet } from './faceRecognitionNet/FaceRecognitionNet';
import { FullFaceDescription } from './FullFaceDescription';
import { TNetInput } from './types';
export declare function allFacesFactory(detectionNet: FaceDetectionNet, landmarkNet: FaceLandmarkNet, recognitionNet: FaceRecognitionNet): (input: TNetInput, minConfidence: number) => Promise<FullFaceDescription[]>;
export declare function allFacesFactory(detectionNet: FaceDetectionNet, landmarkNet: FaceLandmarkNet, recognitionNet: FaceRecognitionNet): (input: TNetInput, minConfidence: number, useBatchProcessing?: boolean) => Promise<FullFaceDescription[]>;
......@@ -4,34 +4,45 @@ var tslib_1 = require("tslib");
var extractFaceTensors_1 = require("./extractFaceTensors");
var FullFaceDescription_1 = require("./FullFaceDescription");
function allFacesFactory(detectionNet, landmarkNet, recognitionNet) {
return function (input, minConfidence) {
return function (input, minConfidence, useBatchProcessing) {
if (useBatchProcessing === void 0) { useBatchProcessing = false; }
return tslib_1.__awaiter(this, void 0, void 0, function () {
var detections, faceTensors, faceLandmarksByFace, alignedFaceBoxes, alignedFaceTensors, descriptors;
return tslib_1.__generator(this, function (_a) {
switch (_a.label) {
var detections, faceTensors, faceLandmarksByFace, _a, alignedFaceBoxes, alignedFaceTensors, descriptors, _b;
return tslib_1.__generator(this, function (_c) {
switch (_c.label) {
case 0: return [4 /*yield*/, detectionNet.locateFaces(input, minConfidence)];
case 1:
detections = _a.sent();
return [4 /*yield*/, extractFaceTensors_1.extractFaceTensors(input, detections)
/**
const faceLandmarksByFace = await Promise.all(faceTensors.map(
faceTensor => landmarkNet.detectLandmarks(faceTensor)
)) as FaceLandmarks[]
*/
];
detections = _c.sent();
return [4 /*yield*/, extractFaceTensors_1.extractFaceTensors(input, detections)];
case 2:
faceTensors = _a.sent();
faceTensors = _c.sent();
if (!useBatchProcessing) return [3 /*break*/, 4];
return [4 /*yield*/, landmarkNet.detectLandmarks(faceTensors)];
case 3:
faceLandmarksByFace = _a.sent();
_a = _c.sent();
return [3 /*break*/, 6];
case 4: return [4 /*yield*/, Promise.all(faceTensors.map(function (faceTensor) { return landmarkNet.detectLandmarks(faceTensor); }))];
case 5:
_a = _c.sent();
_c.label = 6;
case 6:
faceLandmarksByFace = _a;
faceTensors.forEach(function (t) { return t.dispose(); });
alignedFaceBoxes = faceLandmarksByFace.map(function (landmarks, i) { return landmarks.align(detections[i].getBox()); });
return [4 /*yield*/, extractFaceTensors_1.extractFaceTensors(input, alignedFaceBoxes)];
case 4:
alignedFaceTensors = _a.sent();
return [4 /*yield*/, Promise.all(alignedFaceTensors.map(function (faceTensor) { return recognitionNet.computeFaceDescriptor(faceTensor); }))];
case 5:
descriptors = _a.sent();
case 7:
alignedFaceTensors = _c.sent();
if (!useBatchProcessing) return [3 /*break*/, 9];
return [4 /*yield*/, recognitionNet.computeFaceDescriptor(alignedFaceTensors)];
case 8:
_b = _c.sent();
return [3 /*break*/, 11];
case 9: return [4 /*yield*/, Promise.all(alignedFaceTensors.map(function (faceTensor) { return recognitionNet.computeFaceDescriptor(faceTensor); }))];
case 10:
_b = _c.sent();
_c.label = 11;
case 11:
descriptors = _b;
alignedFaceTensors.forEach(function (t) { return t.dispose(); });
return [2 /*return*/, detections.map(function (detection, i) {
return new FullFaceDescription_1.FullFaceDescription(detection, faceLandmarksByFace[i].shiftByPoint(detection.getBox()), descriptors[i]);
......
{"version":3,"file":"allFacesFactory.js","sourceRoot":"","sources":["../src/allFacesFactory.ts"],"names":[],"mappings":";;;AAAA,2DAA0D;AAK1D,6DAA4D;AAG5D,yBACE,YAA8B,EAC9B,WAA4B,EAC5B,cAAkC;IAElC,OAAO,UACL,KAAgB,EAChB,aAAqB;;;;;4BAGF,qBAAM,YAAY,CAAC,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAA;;wBAAjE,UAAU,GAAG,SAAoD;wBAEnD,qBAAM,uCAAkB,CAAC,KAAK,EAAE,UAAU,CAAC;4BAC/D;;;;+BAIG;0BAL4D;;wBAAzD,WAAW,GAAG,SAA2C;wBAMnC,qBAAM,WAAW,CAAC,eAAe,CAAC,WAAW,CAAC,EAAA;;wBAApE,mBAAmB,GAAG,SAAiE;wBAE7F,WAAW,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,OAAO,EAAE,EAAX,CAAW,CAAC,CAAA;wBAE/B,gBAAgB,GAAG,mBAAmB,CAAC,GAAG,CAC9C,UAAC,SAAS,EAAE,CAAC,IAAK,OAAA,SAAS,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,EAAvC,CAAuC,CAC1D,CAAA;wBAC0B,qBAAM,uCAAkB,CAAC,KAAK,EAAE,gBAAgB,CAAC,EAAA;;wBAAtE,kBAAkB,GAAG,SAAiD;wBAExD,qBAAM,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAC1D,UAAA,UAAU,IAAI,OAAA,cAAc,CAAC,qBAAqB,CAAC,UAAU,CAAC,EAAhD,CAAgD,CAC/D,CAAC,EAAA;;wBAFI,WAAW,GAAG,SAElB;wBACF,kBAAkB,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,OAAO,EAAE,EAAX,CAAW,CAAC,CAAA;wBAE5C,sBAAO,UAAU,CAAC,GAAG,CAAC,UAAC,SAAS,EAAE,CAAC;gCACjC,OAAA,IAAI,yCAAmB,CACrB,SAAS,EACT,mBAAmB,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,EACvD,WAAW,CAAC,CAAC,CAAC,CACf;4BAJD,CAIC,CACF,EAAA;;;;KAEF,CAAA;AACH,CAAC;AAzCD,0CAyCC"}
\ No newline at end of file
{"version":3,"file":"allFacesFactory.js","sourceRoot":"","sources":["../src/allFacesFactory.ts"],"names":[],"mappings":";;;AAAA,2DAA0D;AAK1D,6DAA4D;AAG5D,yBACE,YAA8B,EAC9B,WAA4B,EAC5B,cAAkC;IAElC,OAAO,UACL,KAAgB,EAChB,aAAqB,EACrB,kBAAmC;QAAnC,mCAAA,EAAA,0BAAmC;;;;;4BAGhB,qBAAM,YAAY,CAAC,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAA;;wBAAjE,UAAU,GAAG,SAAoD;wBAEnD,qBAAM,uCAAkB,CAAC,KAAK,EAAE,UAAU,CAAC,EAAA;;wBAAzD,WAAW,GAAG,SAA2C;6BAEnC,kBAAkB,EAAlB,wBAAkB;wBAC1C,qBAAM,WAAW,CAAC,eAAe,CAAC,WAAW,CAAC,EAAA;;wBAA9C,KAAA,SAAiE,CAAA;;4BACjE,qBAAM,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,CACjC,UAAA,UAAU,IAAI,OAAA,WAAW,CAAC,eAAe,CAAC,UAAU,CAAC,EAAvC,CAAuC,CACtD,CAAC,EAAA;;wBAFA,KAAA,SAEmB,CAAA;;;wBAJjB,mBAAmB,KAIF;wBAEvB,WAAW,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,OAAO,EAAE,EAAX,CAAW,CAAC,CAAA;wBAE/B,gBAAgB,GAAG,mBAAmB,CAAC,GAAG,CAC9C,UAAC,SAAS,EAAE,CAAC,IAAK,OAAA,SAAS,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,EAAvC,CAAuC,CAC1D,CAAA;wBAC0B,qBAAM,uCAAkB,CAAC,KAAK,EAAE,gBAAgB,CAAC,EAAA;;wBAAtE,kBAAkB,GAAG,SAAiD;6BAExD,kBAAkB,EAAlB,wBAAkB;wBAClC,qBAAM,cAAc,CAAC,qBAAqB,CAAC,kBAAkB,CAAC,EAAA;;wBAA9D,KAAA,SAAgF,CAAA;;4BAChF,qBAAM,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,CACxC,UAAA,UAAU,IAAI,OAAA,cAAc,CAAC,qBAAqB,CAAC,UAAU,CAAC,EAAhD,CAAgD,CAC/D,CAAC,EAAA;;wBAFA,KAAA,SAEkB,CAAA;;;wBAJhB,WAAW,KAIK;wBAEtB,kBAAkB,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,OAAO,EAAE,EAAX,CAAW,CAAC,CAAA;wBAE5C,sBAAO,UAAU,CAAC,GAAG,CAAC,UAAC,SAAS,EAAE,CAAC;gCACjC,OAAA,IAAI,yCAAmB,CACrB,SAAS,EACT,mBAAmB,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,SAAS,CAAC,MAAM,EAAE,CAAC,EACvD,WAAW,CAAC,CAAC,CAAC,CACf;4BAJD,CAIC,CACF,EAAA;;;;KAEF,CAAA;AACH,CAAC;AA7CD,0CA6CC"}
\ No newline at end of file
......@@ -5,7 +5,7 @@ export declare class FaceRecognitionNet {
private _params;
load(weightsOrUrl: Float32Array | string | undefined): Promise<void>;
extractWeights(weights: Float32Array): void;
forwardInput(input: NetInput): Promise<tf.Tensor2D>;
forwardInput(input: NetInput): tf.Tensor2D;
forward(input: TNetInput): Promise<tf.Tensor2D>;
computeFaceDescriptor(input: TNetInput): Promise<Float32Array>;
computeFaceDescriptor(input: TNetInput): Promise<Float32Array | Float32Array[]>;
}
......@@ -37,13 +37,11 @@ var FaceRecognitionNet = /** @class */ (function () {
this._params = extractParams_1.extractParams(weights);
};
FaceRecognitionNet.prototype.forwardInput = function (input) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var _this = this;
return tslib_1.__generator(this, function (_a) {
if (!this._params) {
throw new Error('FaceRecognitionNet - load model before inference');
}
return [2 /*return*/, tf.tidy(function () {
return tf.tidy(function () {
var batchTensor = input.toBatchTensor(150, true);
var normalized = normalize_1.normalize(batchTensor);
var out = convLayer_1.convDown(normalized, _this._params.conv32_down);
......@@ -65,8 +63,6 @@ var FaceRecognitionNet = /** @class */ (function () {
var globalAvg = out.mean([1, 2]);
var fullyConnected = tf.matMul(globalAvg, _this._params.fc);
return fullyConnected;
})];
});
});
};
FaceRecognitionNet.prototype.forward = function (input) {
......@@ -84,20 +80,21 @@ var FaceRecognitionNet = /** @class */ (function () {
};
FaceRecognitionNet.prototype.computeFaceDescriptor = function (input) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var result, _a, data;
return tslib_1.__generator(this, function (_b) {
switch (_b.label) {
case 0:
_a = this.forward;
return [4 /*yield*/, toNetInput_1.toNetInput(input, true)];
case 1: return [4 /*yield*/, _a.apply(this, [_b.sent()])];
var _this = this;
var netInput, faceDescriptorTensors, faceDescriptorsForBatch;
return tslib_1.__generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, toNetInput_1.toNetInput(input, true)];
case 1:
netInput = _a.sent();
faceDescriptorTensors = tf.tidy(function () { return tf.unstack(_this.forwardInput(netInput)); });
return [4 /*yield*/, Promise.all(faceDescriptorTensors.map(function (t) { return t.data(); }))];
case 2:
result = _b.sent();
return [4 /*yield*/, result.data()];
case 3:
data = _b.sent();
result.dispose();
return [2 /*return*/, data];
faceDescriptorsForBatch = _a.sent();
faceDescriptorTensors.forEach(function (t) { return t.dispose(); });
return [2 /*return*/, netInput.isBatchInput
? faceDescriptorsForBatch
: faceDescriptorsForBatch[0]];
}
});
});
......
{"version":3,"file":"FaceRecognitionNet.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/FaceRecognitionNet.ts"],"names":[],"mappings":";;;AAAA,0CAA4C;AAG5C,4CAA2C;AAE3C,yCAAuC;AACvC,iDAAgD;AAChD,6DAA4D;AAC5D,yCAAwC;AACxC,iDAAyD;AAGzD;IAAA;IAoEA,CAAC;IAhEc,iCAAI,GAAjB,UAAkB,YAA+C;;;;;;wBAC/D,IAAI,YAAY,YAAY,YAAY,EAAE;4BACxC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAA;4BACjC,sBAAM;yBACP;wBAED,IAAI,YAAY,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpD,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAA;yBACzF;wBACD,KAAA,IAAI,CAAA;wBAAW,qBAAM,yCAAmB,CAAC,YAAY,CAAC,EAAA;;wBAAtD,GAAK,OAAO,GAAG,SAAuC,CAAA;;;;;KACvD;IAEM,2CAAc,GAArB,UAAsB,OAAqB;QACzC,IAAI,CAAC,OAAO,GAAG,6BAAa,CAAC,OAAO,CAAC,CAAA;IACvC,CAAC;IAEY,yCAAY,GAAzB,UAA0B,KAAe;;;;gBACvC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;oBACjB,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;iBACpE;gBAGD,sBAAO,EAAE,CAAC,IAAI,CAAC;wBACb,IAAM,WAAW,GAAG,KAAK,CAAC,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAA;wBAElD,IAAM,UAAU,GAAG,qBAAS,CAAC,WAAW,CAAC,CAAA;wBAEzC,IAAI,GAAG,GAAG,oBAAQ,CAAC,UAAU,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;wBACxD,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;wBAEpC,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAE1C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;wBACjD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;wBAE1C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;wBAClD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;wBAC3C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;wBAE3C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;wBAClD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;wBAC3C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;wBAC3C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;wBAEtD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;wBACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,KAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;wBAE5D,OAAO,cAAc,CAAA;oBACvB,CAAC,CAAC,EAAA;;;KACH;IACY,oCAAO,GAApB,UAAqB,KAAgB;;;;;;wBAC5B,KAAA,IAAI,CAAC,YAAY,CAAA;wBAAC,qBAAM,uBAAU,CAAC,KAAK,EAAE,IAAI,CAAC,EAAA;4BAAtD,sBAAO,SAAA,IAAI,GAAc,SAA6B,EAAC,EAAA;;;;KACxD;IAEY,kDAAqB,GAAlC,UAAmC,KAAgB;;;;;;wBAC5B,KAAA,IAAI,CAAC,OAAO,CAAA;wBAAC,qBAAM,uBAAU,CAAC,KAAK,EAAE,IAAI,CAAC,EAAA;4BAAhD,qBAAM,SAAA,IAAI,GAAS,SAA6B,EAAC,EAAA;;wBAA1D,MAAM,GAAG,SAAiD;wBACnD,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;wBAA1B,IAAI,GAAG,SAAmB;wBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;wBAChB,sBAAO,IAAoB,EAAA;;;;KAC5B;IACH,yBAAC;AAAD,CAAC,AApED,IAoEC;AApEY,gDAAkB"}
\ No newline at end of file
{"version":3,"file":"FaceRecognitionNet.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/FaceRecognitionNet.ts"],"names":[],"mappings":";;;AAAA,0CAA4C;AAG5C,4CAA2C;AAE3C,yCAAuC;AACvC,iDAAgD;AAChD,6DAA4D;AAC5D,yCAAwC;AACxC,iDAAyD;AAGzD;IAAA;IA+EA,CAAC;IA3Ec,iCAAI,GAAjB,UAAkB,YAA+C;;;;;;wBAC/D,IAAI,YAAY,YAAY,YAAY,EAAE;4BACxC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAA;4BACjC,sBAAM;yBACP;wBAED,IAAI,YAAY,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpD,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAA;yBACzF;wBACD,KAAA,IAAI,CAAA;wBAAW,qBAAM,yCAAmB,CAAC,YAAY,CAAC,EAAA;;wBAAtD,GAAK,OAAO,GAAG,SAAuC,CAAA;;;;;KACvD;IAEM,2CAAc,GAArB,UAAsB,OAAqB;QACzC,IAAI,CAAC,OAAO,GAAG,6BAAa,CAAC,OAAO,CAAC,CAAA;IACvC,CAAC;IAEM,yCAAY,GAAnB,UAAoB,KAAe;QAAnC,iBAoCC;QAnCC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;SACpE;QAED,OAAO,EAAE,CAAC,IAAI,CAAC;YACb,IAAM,WAAW,GAAG,KAAK,CAAC,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,CAAA;YAElD,IAAM,UAAU,GAAG,qBAAS,CAAC,WAAW,CAAC,CAAA;YAEzC,IAAI,GAAG,GAAG,oBAAQ,CAAC,UAAU,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;YACxD,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAE1C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;YACjD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAE1C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;YAClD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAE3C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;YAClD,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,wBAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,4BAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;YAEtD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,KAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;YAE5D,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAEY,oCAAO,GAApB,UAAqB,KAAgB;;;;;;wBAC5B,KAAA,IAAI,CAAC,YAAY,CAAA;wBAAC,qBAAM,uBAAU,CAAC,KAAK,EAAE,IAAI,CAAC,EAAA;4BAAtD,sBAAO,SAAA,IAAI,GAAc,SAA6B,EAAC,EAAA;;;;KACxD;IAEY,kDAAqB,GAAlC,UAAmC,KAAgB;;;;;;4BAChC,qBAAM,uBAAU,CAAC,KAAK,EAAE,IAAI,CAAC,EAAA;;wBAAxC,QAAQ,GAAG,SAA6B;wBAExC,qBAAqB,GAAG,EAAE,CAAC,IAAI,CACnC,cAAM,OAAA,EAAE,CAAC,OAAO,CAAC,KAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,EAAvC,CAAuC,CAC9C,CAAA;wBAE+B,qBAAM,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,GAAG,CACzE,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,IAAI,EAAE,EAAR,CAAQ,CACd,CAAC,EAAA;;wBAFI,uBAAuB,GAAG,SAEZ;wBAEpB,qBAAqB,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,CAAC,CAAC,OAAO,EAAE,EAAX,CAAW,CAAC,CAAA;wBAE/C,sBAAO,QAAQ,CAAC,YAAY;gCAC1B,CAAC,CAAC,uBAAuB;gCACzB,CAAC,CAAC,uBAAuB,CAAC,CAAC,CAAC,EAAA;;;;KAC/B;IACH,yBAAC;AAAD,CAAC,AA/ED,IA+EC;AA/EY,gDAAkB"}
\ No newline at end of file
......@@ -3,9 +3,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
var tf = require("@tensorflow/tfjs-core");
function normalize(x) {
return tf.tidy(function () {
var avg_r = tf.fill([1, 150, 150, 1], 122.782);
var avg_g = tf.fill([1, 150, 150, 1], 117.001);
var avg_b = tf.fill([1, 150, 150, 1], 104.298);
var avg_r = tf.fill(x.shape.slice(0, 3).concat([1]), 122.782);
var avg_g = tf.fill(x.shape.slice(0, 3).concat([1]), 117.001);
var avg_b = tf.fill(x.shape.slice(0, 3).concat([1]), 104.298);
var avg_rgb = tf.concat([avg_r, avg_g, avg_b], 3);
return tf.div(tf.sub(x, avg_rgb), tf.scalar(256));
});
......
{"version":3,"file":"normalize.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/normalize.ts"],"names":[],"mappings":";;AAAA,0CAA4C;AAE5C,mBAA0B,CAAc;IACtC,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,OAAO,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAA;QAEnD,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAA;IACnD,CAAC,CAAC,CAAA;AACJ,CAAC;AATD,8BASC"}
\ No newline at end of file
{"version":3,"file":"normalize.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/normalize.ts"],"names":[],"mappings":";;AAAA,0CAA4C;AAE5C,mBAA0B,CAAc;IACtC,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAK,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,SAAE,CAAC,IAAG,OAAO,CAAC,CAAC;QAC5D,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAK,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,SAAE,CAAC,IAAG,OAAO,CAAC,CAAC;QAC5D,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAK,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,SAAE,CAAC,IAAG,OAAO,CAAC,CAAC;QAC5D,IAAM,OAAO,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAA;QAEnD,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAA;IACnD,CAAC,CAAC,CAAA;AACJ,CAAC;AATD,8BASC"}
\ No newline at end of file
......@@ -16,5 +16,5 @@ export declare function loadFaceRecognitionModel(url: string): Promise<void>;
export declare function loadModels(url: string): Promise<[void, void, void]>;
export declare function locateFaces(input: TNetInput, minConfidence?: number, maxResults?: number): Promise<FaceDetection[]>;
export declare function detectLandmarks(input: TNetInput): Promise<FaceLandmarks | FaceLandmarks[]>;
export declare function computeFaceDescriptor(input: TNetInput): Promise<Float32Array>;
export declare const allFaces: (input: tf.Tensor | NetInput | TNetInput, minConfidence: number) => Promise<FullFaceDescription[]>;
export declare function computeFaceDescriptor(input: TNetInput): Promise<Float32Array | Float32Array[]>;
export declare const allFaces: (input: tf.Tensor | NetInput | TNetInput, minConfidence: number, useBatchProcessing?: boolean) => Promise<FullFaceDescription[]>;
{"version":3,"file":"globalApi.js","sourceRoot":"","sources":["../src/globalApi.ts"],"names":[],"mappings":";;AAEA,qDAAoD;AAEpD,wEAAuE;AACvE,qEAAoE;AAEpE,8EAA6E;AAKhE,QAAA,YAAY,GAAG,IAAI,mCAAgB,EAAE,CAAA;AACrC,QAAA,WAAW,GAAG,IAAI,iCAAe,EAAE,CAAA;AACnC,QAAA,cAAc,GAAG,IAAI,uCAAkB,EAAE,CAAA;AAEtD,gCAAuC,GAAW;IAChD,OAAO,oBAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AAC/B,CAAC;AAFD,wDAEC;AAED,+BAAsC,GAAW;IAC/C,OAAO,mBAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AAC9B,CAAC;AAFD,sDAEC;AAED,kCAAyC,GAAW;IAClD,OAAO,sBAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAFD,4DAEC;AAED,oBAA2B,GAAW;IACpC,OAAO,OAAO,CAAC,GAAG,CAAC;QACjB,sBAAsB,CAAC,GAAG,CAAC;QAC3B,qBAAqB,CAAC,GAAG,CAAC;QAC1B,wBAAwB,CAAC,GAAG,CAAC;KAC9B,CAAC,CAAA;AACJ,CAAC;AAND,gCAMC;AAED,qBACE,KAAgB,EAChB,aAAsB,EACtB,UAAmB;IAEnB,OAAO,oBAAY,CAAC,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,UAAU,CAAC,CAAA;AACnE,CAAC;AAND,kCAMC;AAED,yBACE,KAAgB;IAEhB,OAAO,mBAAW,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;AAC3C,CAAC;AAJD,0CAIC;AAED,+BACE,KAAgB;IAEhB,OAAO,sBAAc,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAA;AACpD,CAAC;AAJD,sDAIC;AAEY,QAAA,QAAQ,GAGiB,iCAAe,CACnD,oBAAY,EACZ,mBAAW,EACX,sBAAc,CACf,CAAA"}
\ No newline at end of file
{"version":3,"file":"globalApi.js","sourceRoot":"","sources":["../src/globalApi.ts"],"names":[],"mappings":";;AAEA,qDAAoD;AAEpD,wEAAuE;AACvE,qEAAoE;AAEpE,8EAA6E;AAKhE,QAAA,YAAY,GAAG,IAAI,mCAAgB,EAAE,CAAA;AACrC,QAAA,WAAW,GAAG,IAAI,iCAAe,EAAE,CAAA;AACnC,QAAA,cAAc,GAAG,IAAI,uCAAkB,EAAE,CAAA;AAEtD,gCAAuC,GAAW;IAChD,OAAO,oBAAY,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AAC/B,CAAC;AAFD,wDAEC;AAED,+BAAsC,GAAW;IAC/C,OAAO,mBAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AAC9B,CAAC;AAFD,sDAEC;AAED,kCAAyC,GAAW;IAClD,OAAO,sBAAc,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAFD,4DAEC;AAED,oBAA2B,GAAW;IACpC,OAAO,OAAO,CAAC,GAAG,CAAC;QACjB,sBAAsB,CAAC,GAAG,CAAC;QAC3B,qBAAqB,CAAC,GAAG,CAAC;QAC1B,wBAAwB,CAAC,GAAG,CAAC;KAC9B,CAAC,CAAA;AACJ,CAAC;AAND,gCAMC;AAED,qBACE,KAAgB,EAChB,aAAsB,EACtB,UAAmB;IAEnB,OAAO,oBAAY,CAAC,WAAW,CAAC,KAAK,EAAE,aAAa,EAAE,UAAU,CAAC,CAAA;AACnE,CAAC;AAND,kCAMC;AAED,yBACE,KAAgB;IAEhB,OAAO,mBAAW,CAAC,eAAe,CAAC,KAAK,CAAC,CAAA;AAC3C,CAAC;AAJD,0CAIC;AAED,+BACE,KAAgB;IAEhB,OAAO,sBAAc,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAA;AACpD,CAAC;AAJD,sDAIC;AAEY,QAAA,QAAQ,GAIiB,iCAAe,CACnD,oBAAY,EACZ,mBAAW,EACX,sBAAc,CACf,CAAA"}
\ No newline at end of file
......@@ -2128,9 +2128,9 @@
function normalize(x) {
return tidy(function () {
var avg_r = fill([1, 150, 150, 1], 122.782);
var avg_g = fill([1, 150, 150, 1], 117.001);
var avg_b = fill([1, 150, 150, 1], 104.298);
var avg_r = fill(x.shape.slice(0, 3).concat([1]), 122.782);
var avg_g = fill(x.shape.slice(0, 3).concat([1]), 117.001);
var avg_b = fill(x.shape.slice(0, 3).concat([1]), 104.298);
var avg_rgb = concat([avg_r, avg_g, avg_b], 3);
return div(sub(x, avg_rgb), scalar(256));
});
......@@ -2195,13 +2195,11 @@
this._params = extractParams$2(weights);
};
FaceRecognitionNet.prototype.forwardInput = function (input) {
return __awaiter$1(this, void 0, void 0, function () {
var _this = this;
return __generator$1(this, function (_a) {
if (!this._params) {
throw new Error('FaceRecognitionNet - load model before inference');
}
return [2 /*return*/, tidy(function () {
return tidy(function () {
var batchTensor = input.toBatchTensor(150, true);
var normalized = normalize(batchTensor);
var out = convDown(normalized, _this._params.conv32_down);
......@@ -2223,8 +2221,6 @@
var globalAvg = out.mean([1, 2]);
var fullyConnected = matMul(globalAvg, _this._params.fc);
return fullyConnected;
})];
});
});
};
FaceRecognitionNet.prototype.forward = function (input) {
......@@ -2242,20 +2238,21 @@
};
FaceRecognitionNet.prototype.computeFaceDescriptor = function (input) {
return __awaiter$1(this, void 0, void 0, function () {
var result, _a, data;
return __generator$1(this, function (_b) {
switch (_b.label) {
case 0:
_a = this.forward;
return [4 /*yield*/, toNetInput(input, true)];
case 1: return [4 /*yield*/, _a.apply(this, [_b.sent()])];
var _this = this;
var netInput, faceDescriptorTensors, faceDescriptorsForBatch;
return __generator$1(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, toNetInput(input, true)];
case 1:
netInput = _a.sent();
faceDescriptorTensors = tidy(function () { return unstack(_this.forwardInput(netInput)); });
return [4 /*yield*/, Promise.all(faceDescriptorTensors.map(function (t) { return t.data(); }))];
case 2:
result = _b.sent();
return [4 /*yield*/, result.data()];
case 3:
data = _b.sent();
result.dispose();
return [2 /*return*/, data];
faceDescriptorsForBatch = _a.sent();
faceDescriptorTensors.forEach(function (t) { return t.dispose(); });
return [2 /*return*/, netInput.isBatchInput
? faceDescriptorsForBatch
: faceDescriptorsForBatch[0]];
}
});
});
......@@ -2270,34 +2267,45 @@
}
function allFacesFactory(detectionNet, landmarkNet, recognitionNet) {
return function (input, minConfidence) {
return function (input, minConfidence, useBatchProcessing) {
if (useBatchProcessing === void 0) { useBatchProcessing = false; }
return __awaiter$1(this, void 0, void 0, function () {
var detections, faceTensors, faceLandmarksByFace, alignedFaceBoxes, alignedFaceTensors, descriptors;
return __generator$1(this, function (_a) {
switch (_a.label) {
var detections, faceTensors, faceLandmarksByFace, _a, alignedFaceBoxes, alignedFaceTensors, descriptors, _b;
return __generator$1(this, function (_c) {
switch (_c.label) {
case 0: return [4 /*yield*/, detectionNet.locateFaces(input, minConfidence)];
case 1:
detections = _a.sent();
return [4 /*yield*/, extractFaceTensors(input, detections)
/**
const faceLandmarksByFace = await Promise.all(faceTensors.map(
faceTensor => landmarkNet.detectLandmarks(faceTensor)
)) as FaceLandmarks[]
*/
];
detections = _c.sent();
return [4 /*yield*/, extractFaceTensors(input, detections)];
case 2:
faceTensors = _a.sent();
faceTensors = _c.sent();
if (!useBatchProcessing) return [3 /*break*/, 4];
return [4 /*yield*/, landmarkNet.detectLandmarks(faceTensors)];
case 3:
faceLandmarksByFace = _a.sent();
_a = _c.sent();
return [3 /*break*/, 6];
case 4: return [4 /*yield*/, Promise.all(faceTensors.map(function (faceTensor) { return landmarkNet.detectLandmarks(faceTensor); }))];
case 5:
_a = _c.sent();
_c.label = 6;
case 6:
faceLandmarksByFace = _a;
faceTensors.forEach(function (t) { return t.dispose(); });
alignedFaceBoxes = faceLandmarksByFace.map(function (landmarks, i) { return landmarks.align(detections[i].getBox()); });
return [4 /*yield*/, extractFaceTensors(input, alignedFaceBoxes)];
case 4:
alignedFaceTensors = _a.sent();
return [4 /*yield*/, Promise.all(alignedFaceTensors.map(function (faceTensor) { return recognitionNet.computeFaceDescriptor(faceTensor); }))];
case 5:
descriptors = _a.sent();
case 7:
alignedFaceTensors = _c.sent();
if (!useBatchProcessing) return [3 /*break*/, 9];
return [4 /*yield*/, recognitionNet.computeFaceDescriptor(alignedFaceTensors)];
case 8:
_b = _c.sent();
return [3 /*break*/, 11];
case 9: return [4 /*yield*/, Promise.all(alignedFaceTensors.map(function (faceTensor) { return recognitionNet.computeFaceDescriptor(faceTensor); }))];
case 10:
_b = _c.sent();
_c.label = 11;
case 11:
descriptors = _b;
alignedFaceTensors.forEach(function (t) { return t.dispose(); });
return [2 /*return*/, detections.map(function (detection, i) {
return new FullFaceDescription(detection, faceLandmarksByFace[i].shiftByPoint(detection.getBox()), descriptors[i]);
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -117,6 +117,10 @@ function renderNavBar(navbarId, exampleUri) {
{
uri: 'batch_face_landmarks',
name: 'Batch Face Landmarks'
},
{
uri: 'batch_face_recognition',
name: 'Batch Face Recognition'
}
]
......
......@@ -49,6 +49,10 @@
margin: 20px;
}
.button-sm {
padding: 0 10px !important;
}
#github-link {
display: flex !important;
justify-content: center;
......
......@@ -25,8 +25,7 @@ app.get('/detect_and_draw_landmarks', (req, res) => res.sendFile(path.join(views
app.get('/face_alignment', (req, res) => res.sendFile(path.join(viewsDir, 'faceAlignment.html')))
app.get('/detect_and_recognize_faces', (req, res) => res.sendFile(path.join(viewsDir, 'detectAndRecognizeFaces.html')))
app.get('/batch_face_landmarks', (req, res) => res.sendFile(path.join(viewsDir, 'batchFaceLandmarks.html')))
app.get('/batch_face_recognition', (req, res) => res.sendFile(path.join(viewsDir, 'batchFaceRecognition.html')))
app.post('/fetch_external_image', async (req, res) => {
const { imageUrl } = req.body
......
<!DOCTYPE html>
<html>
<head>
<script src="face-api.js"></script>
<script src="commons.js"></script>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
</head>
<body>
<div id="navbar"></div>
<div class="center-content page-container">
<div>
<div class="progress" id="loader">
<div class="indeterminate"></div>
</div>
<div class="row side-by-side">
<div class="row">
<label for="timeNoBatch">Time for processing each face seperately:</label>
<input disabled value="-" id="timeNoBatch" type="text" class="bold"/>
</div>
<div class="row">
<label for="timeBatch">Time for processing in Batch:</label>
<input disabled value="-" id="timeBatch" type="text" class="bold"/>
</div>
</div>
<div class="row side-by-side">
<div>
<label for="numImages">Num Images:</label>
<input id="numImages" type="text" class="bold" value="32"/>
</div>
<button
class="waves-effect waves-light btn"
onclick="measureTimingsAndDisplay()"
>
Ok
</button>
</div>
<div class="row side-by-side">
<div class="center-content">
<div id="faceContainer"></div>
</div>
</div>
</div>
</div>
<script>
let images = []
let trainDescriptorsByClass = []
let descriptorsByFace = []
let numImages = 32
let maxDistance = 0.6
function onNumImagesChanged(e) {
const val = parseInt(e.target.value) || 32
numImages = Math.min(Math.max(val, 0), 32)
e.target.value = numImages
}
function displayTimeStats(timeNoBatch, timeBatch) {
$('#timeNoBatch').val(`${timeNoBatch} ms`)
$('#timeBatch').val(`${timeBatch} ms`)
}
function drawFaceRecognitionCanvas(img, descriptor) {
const canvas = faceapi.createCanvasFromMedia(img)
$('#faceContainer').append(canvas)
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
const text = `${bestMatch.distance < maxDistance ? bestMatch.className : 'unkown'} (${bestMatch.distance})`
const x = 20, y = canvas.height - 20
faceapi.drawText(
canvas.getContext('2d'),
x,
y,
text,
Object.assign(faceapi.getDefaultDrawOptions(), { color: 'red', fontSize: 16 })
)
}
async function runComputeFaceDescriptors(useBatchInput) {
const ts = Date.now()
descriptorsByFace = useBatchInput
? await faceapi.computeFaceDescriptor(images.slice(0, numImages))
: await Promise.all(images.slice(0, numImages).map(img => faceapi.computeFaceDescriptor(img)))
const time = Date.now() - ts
return time
}
async function measureTimings() {
const timeNoBatch = await runComputeFaceDescriptors(false)
const timeBatch = await runComputeFaceDescriptors(true)
return { timeNoBatch, timeBatch }
}
async function measureTimingsAndDisplay() {
const { timeNoBatch, timeBatch } = await measureTimings()
displayTimeStats(timeNoBatch, timeBatch)
$('#faceContainer').empty()
descriptorsByFace.forEach((descriptor, i) => drawFaceRecognitionCanvas(images[i], descriptor))
}
async function run() {
await faceapi.loadFaceRecognitionModel('/')
trainDescriptorsByClass = await initTrainDescriptorsByClass(faceapi.recognitionNet, 1)
$('#loader').hide()
const imgUris = classes
// skip images with idx 1, as they are used as reference data
.map(clazz => Array.from(Array(4), (_, idx) => getFaceImageUri(clazz, idx + 2)))
.reduce((flat, arr) => flat.concat(arr))
images = await Promise.all(imgUris.map(
async uri => faceapi.bufferToImage(await fetchImage(uri))
))
// warmup
await measureTimings()
// run
measureTimingsAndDisplay()
}
$(document).ready(function() {
$('#numImages').on('change', onNumImagesChanged)
renderNavBar('#navbar', 'batch_face_recognition')
run()
})
</script>
</body>
</html>
\ No newline at end of file
......@@ -30,6 +30,10 @@
>
Ok
</button>
<p>
<input type="checkbox" id="useBatchProcessing" onchange="onChangeUseBatchProcessing(event)" />
<label for="useBatchProcessing">Use Batch Processing</label>
</p>
</div>
<div class="row side-by-side">
<div class="row">
......@@ -37,13 +41,13 @@
<input disabled value="0.7" id="minConfidence" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
class="waves-effect waves-light btn button-sm"
onclick="onDecreaseMinConfidence()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
class="waves-effect waves-light btn button-sm"
onclick="onIncreaseMinConfidence()"
>
<i class="material-icons left">+</i>
......@@ -53,13 +57,13 @@
<input disabled value="0.6" id="maxDistance" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
class="waves-effect waves-light btn button-sm"
onclick="onDecreaseMaxDistance()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
class="waves-effect waves-light btn button-sm"
onclick="onIncreaseMaxDistance()"
>
<i class="material-icons left">+</i>
......@@ -70,9 +74,14 @@
<script>
let maxDistance = 0.6
let minConfidence = 0.7
let useBatchProcessing = false
let detectionNet, recognitionNet, landmarkNet
let trainDescriptorsByClass = []
function onChangeUseBatchProcessing(e) {
useBatchProcessing = $(e.target).prop('checked')
}
function onIncreaseMinConfidence() {
minConfidence = Math.min(faceapi.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence)
......@@ -110,7 +119,7 @@
canvas.width = width
canvas.height = height
const fullFaceDescriptions = (await faceapi.allFaces(inputImgEl, minConfidence))
const fullFaceDescriptions = (await faceapi.allFaces(inputImgEl, minConfidence, useBatchProcessing))
.map(fd => fd.forSize(width, height))
fullFaceDescriptions.forEach(({ detection, descriptor }) => {
......
......@@ -13,18 +13,19 @@ export function allFacesFactory(
) {
return async function(
input: TNetInput,
minConfidence: number
minConfidence: number,
useBatchProcessing: boolean = false
): Promise<FullFaceDescription[]> {
const detections = await detectionNet.locateFaces(input, minConfidence)
const faceTensors = await extractFaceTensors(input, detections)
/**
const faceLandmarksByFace = await Promise.all(faceTensors.map(
const faceLandmarksByFace = useBatchProcessing
? await landmarkNet.detectLandmarks(faceTensors) as FaceLandmarks[]
: await Promise.all(faceTensors.map(
faceTensor => landmarkNet.detectLandmarks(faceTensor)
)) as FaceLandmarks[]
*/
const faceLandmarksByFace = await landmarkNet.detectLandmarks(faceTensors) as FaceLandmarks[]
faceTensors.forEach(t => t.dispose())
......@@ -33,9 +34,12 @@ export function allFacesFactory(
)
const alignedFaceTensors = await extractFaceTensors(input, alignedFaceBoxes)
const descriptors = await Promise.all(alignedFaceTensors.map(
const descriptors = useBatchProcessing
? await recognitionNet.computeFaceDescriptor(alignedFaceTensors) as Float32Array[]
: await Promise.all(alignedFaceTensors.map(
faceTensor => recognitionNet.computeFaceDescriptor(faceTensor)
))
)) as Float32Array[]
alignedFaceTensors.forEach(t => t.dispose())
return detections.map((detection, i) =>
......
......@@ -30,12 +30,11 @@ export class FaceRecognitionNet {
this._params = extractParams(weights)
}
public async forwardInput(input: NetInput): Promise<tf.Tensor2D> {
public forwardInput(input: NetInput): tf.Tensor2D {
if (!this._params) {
throw new Error('FaceRecognitionNet - load model before inference')
}
return tf.tidy(() => {
const batchTensor = input.toBatchTensor(150, true)
......@@ -68,14 +67,26 @@ export class FaceRecognitionNet {
return fullyConnected
})
}
public async forward(input: TNetInput): Promise<tf.Tensor2D> {
return this.forwardInput(await toNetInput(input, true))
}
public async computeFaceDescriptor(input: TNetInput) {
const result = await this.forward(await toNetInput(input, true))
const data = await result.data()
result.dispose()
return data as Float32Array
public async computeFaceDescriptor(input: TNetInput): Promise<Float32Array|Float32Array[]> {
const netInput = await toNetInput(input, true)
const faceDescriptorTensors = tf.tidy(
() => tf.unstack(this.forwardInput(netInput))
)
const faceDescriptorsForBatch = await Promise.all(faceDescriptorTensors.map(
t => t.data()
)) as Float32Array[]
faceDescriptorTensors.forEach(t => t.dispose())
return netInput.isBatchInput
? faceDescriptorsForBatch
: faceDescriptorsForBatch[0]
}
}
\ No newline at end of file
......@@ -2,9 +2,9 @@ import * as tf from '@tensorflow/tfjs-core';
export function normalize(x: tf.Tensor4D): tf.Tensor4D {
return tf.tidy(() => {
const avg_r = tf.fill([1, 150, 150, 1], 122.782);
const avg_g = tf.fill([1, 150, 150, 1], 117.001);
const avg_b = tf.fill([1, 150, 150, 1], 104.298);
const avg_r = tf.fill([...x.shape.slice(0, 3), 1], 122.782);
const avg_g = tf.fill([...x.shape.slice(0, 3), 1], 117.001);
const avg_b = tf.fill([...x.shape.slice(0, 3), 1], 104.298);
const avg_rgb = tf.concat([avg_r, avg_g, avg_b], 3)
return tf.div(tf.sub(x, avg_rgb), tf.scalar(256))
......
......@@ -50,13 +50,14 @@ export function detectLandmarks(
export function computeFaceDescriptor(
input: TNetInput
): Promise<Float32Array> {
): Promise<Float32Array | Float32Array[]> {
return recognitionNet.computeFaceDescriptor(input)
}
export const allFaces: (
input: tf.Tensor | NetInput | TNetInput,
minConfidence: number
minConfidence: number,
useBatchProcessing?: boolean
) => Promise<FullFaceDescription[]> = allFacesFactory(
detectionNet,
landmarkNet,
......
[-0.08900658041238785, 0.10903996974229813, 0.027176279574632645, 0.04400758072733879, -0.14542895555496216, 0.11051996797323227, -0.04482650384306908, -0.05154910683631897, 0.10313281416893005, -0.09580713510513306, 0.11335672438144684, -0.02723177894949913, -0.2017219066619873, 0.09402787685394287, -0.025814395397901535, 0.07219463586807251, -0.12272300571203232, -0.07349629700183868, -0.1723618507385254, -0.1745331585407257, -0.03420797362923622, 0.10511981695890427, 0.0262751504778862, 0.014430010691285133, -0.2035353034734726, -0.2949812114238739, -0.04833773523569107, -0.10960741341114044, 0.08448510617017746, -0.039910122752189636, -0.03964325413107872, -0.099286288022995, -0.16025686264038086, 0.026379037648439407, 0.09079921245574951, 0.07745557278394699, -0.05415252223610878, -0.017411116510629654, 0.16053830087184906, 0.010681805200874805, -0.11814302206039429, 0.0382964164018631, 0.08098040521144867, 0.29891595244407654, 0.1258186250925064, 0.06479117274284363, 0.02330329827964306, -0.07838230580091476, 0.1363348364830017, -0.21215586364269257, 0.07675530016422272, 0.1447518914937973, 0.14686468243598938, 0.06991209089756012, 0.08843740075826645, -0.11935211718082428, -0.015284902416169643, 0.16930945217609406, -0.044002968817949295, 0.16501764953136444, 0.10481955111026764, -0.013367846608161926, -0.05079612880945206, -0.07971523702144623, 0.2541899085044861, 0.07128541171550751, -0.1458708792924881, -0.15604135394096375, 0.11365226656198502, -0.16018034517765045, -0.034580036997795105, 0.05678928270936012, -0.07191935181617737, -0.15881866216659546, -0.1955043375492096, 0.06456604599952698, 0.5308966040611267, 0.13605228066444397, -0.18340089917182922, -0.054736778140068054, -0.09668046236038208, -0.0006025233305990696, 0.06609033048152924, 0.0835171788930893, -0.13018545508384705, -0.07167276740074158, -0.04313529655337334, 0.08809386193752289, 0.29993879795074463, -0.07008976489305496, 0.005112136714160442, 0.1464609056711197, 0.03064284473657608, 0.005341261625289917, -0.03758316487073898, -0.002741048112511635, -0.19020092487335205, -0.005203879438340664, -0.03693881630897522, 0.017715569585561752, 0.025151528418064117, -0.1393381506204605, 0.04255775362253189, 0.080945685505867, -0.23745450377464294, 0.21049565076828003, -0.01615971140563488, -0.0642223060131073, 0.0915207713842392, 0.10660708695650101, -0.14731745421886444, -0.027426915243268013, 0.2378913164138794, -0.2964036166667938, 0.2034282684326172, 0.2009482979774475, 0.04706001281738281, 0.13964271545410156, 0.05233509838581085, 0.11507777869701385, 0.045886922627687454, 0.12765641510486603, -0.15917260944843292, -0.13223722577095032, -0.023241272196173668, -0.129884734749794, -0.027176398783922195, 0.009421694092452526]
\ No newline at end of file
[-0.13293321430683136, 0.09793781489133835, 0.06550372391939163, 0.02364283800125122, -0.043399304151535034, 0.004586201161146164, -0.09000064432621002, -0.05539097636938095, 0.10467389971017838, -0.09715163707733154, 0.18808841705322266, -0.0205547958612442, -0.23795807361602783, -0.026068881154060364, -0.04790578782558441, 0.10736768692731857, -0.1791372150182724, -0.09754926711320877, -0.08212480694055557, -0.07197146117687225, 0.07512062042951584, 0.06562784314155579, -0.06910805404186249, 0.010537944734096527, -0.1353086233139038, -0.29961100220680237, -0.04597249627113342, -0.09019482880830765, 0.04843198508024216, -0.08456507325172424, -0.06385420262813568, 0.09591938555240631, -0.08721363544464111, 0.0029465071856975555, 0.062499962747097015, 0.08367685973644257, 0.004837760701775551, -0.02126195654273033, 0.18138188123703003, -0.0330311618745327, -0.1149168312549591, -0.014434240758419037, 0.04467501491308212, 0.32643717527389526, 0.13417592644691467, 0.049149081110954285, 0.0002636462450027466, -0.030674105510115623, 0.15085124969482422, -0.25617715716362, 0.007638035342097282, 0.20309507846832275, 0.155135378241539, 0.10535001009702682, 0.09949050843715668, -0.19686023890972137, 0.055761925876140594, 0.10784860700368881, -0.16404221951961517, 0.12705324590206146, 0.06780532747507095, -0.12821750342845917, -0.015174079686403275, -0.08541303128004074, 0.23064906895160675, 0.04403648152947426, -0.16575516760349274, -0.10698974132537842, 0.13322079181671143, -0.10516376793384552, -0.03650324046611786, 0.05603502690792084, -0.1468498408794403, -0.21398313343524933, -0.22947216033935547, 0.022328242659568787, 0.4006509780883789, 0.2338075339794159, -0.1980385184288025, 0.05581464245915413, -0.033158354461193085, -0.047999653965234756, 0.10474226623773575, 0.11267579346895218, -0.0938166156411171, -0.005631402134895325, -0.0698985829949379, 0.06661885231733322, 0.18326956033706665, 0.042940653860569, -0.031386956572532654, 0.2056775838136673, 0.011491281911730766, 0.05759737640619278, -0.029466431587934494, -0.04597870260477066, -0.07393362373113632, -0.037820909172296524, -0.07149908691644669, 0.023783499374985695, 0.016364723443984985, -0.09576655924320221, 0.02455282025039196, 0.11984197050333023, -0.11477060616016388, 0.17211446166038513, -0.008100427687168121, 0.09116753190755844, -0.004660069011151791, 0.029939215630292892, -0.10707360506057739, 0.03878428786993027, 0.15494686365127563, -0.2801153063774109, 0.1764734983444214, 0.1614546924829483, 0.09864784777164459, 0.12133727967739105, 0.05214153230190277, 0.04244184494018555, 0.024142231792211533, -0.019513756036758423, -0.22539466619491577, -0.0927465632557869, 0.06196486949920654, -0.09522707760334015, 0.04965142160654068, 0.023237790912389755]
\ No newline at end of file
......@@ -87,7 +87,7 @@ describe('faceLandmarkNet', () => {
await faceLandmarkNet.load('base/weights')
})
it('computes face landmarks', async () => {
it('computes face landmarks for squared input', async () => {
const { width, height } = imgEl1
const result = await faceLandmarkNet.detectLandmarks(imgEl1) as FaceLandmarks
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment