Unverified Commit cc99df49 by justadudewhohacks Committed by GitHub

Merge pull request #3 from justadudewhohacks/face-extraction

Face extraction
parents f7c90389 d7962a58
import { getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
import { createCanvas, getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
var NetInput = /** @class */ (function () {
function NetInput(mediaArg, dims) {
var _this = this;
......@@ -28,9 +28,7 @@ var NetInput = /** @class */ (function () {
}
// if input is batch type, make sure every canvas has the same dimensions
var _a = this.dims || dims || getMediaDimensions(media), width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height);
this._canvases.push(canvas);
};
......
{"version":3,"file":"NetInput.js","sourceRoot":"","sources":["../src/NetInput.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAE9E;IAGE,kBACE,QAAmB,EACnB,IAAiB;QAFnB,iBA2BC;QAvBC,IAAM,aAAa,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC;YAC3C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;QAEd,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAA;SAC1D;QAED,IAAM,MAAM,GAAG,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;QAG5C,MAAM,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,CAAC;YACtB,IAAI,CAAC,CAAC,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,iBAAiB,CAAC,EAAE;gBACnH,IAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,qBAAmB,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,CAAA;gBACtE,IAAI,OAAO,aAAa,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;oBACxC,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,qEAAkE,CAAC,CAAA;iBACxG;gBACD,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,kHAA+G,CAAC,CAAA;aACrJ;QACH,CAAC,CAAC,CAAA;QAEF,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;QACnB,MAAM,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,EAAxB,CAAwB,CAAC,CAAA;IAC/C,CAAC;IAEO,6BAAU,GAAlB,UAAmB,KAAoB,EAAE,IAAiB;QACxD,IAAI,KAAK,YAAY,iBAAiB,EAAE;YACtC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;YAC1B,OAAM;SACP;QAED,yEAAyE;QACnE,IAAA,mDAAkE,EAAhE,gBAAK,EAAE,kBAAM,CAAmD;QAExE,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC/C,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;QACpB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;QAEtB,mBAAmB,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;QACjE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IAC7B,CAAC;IAED,sBAAW,8BAAQ;aAAnB;YACE,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;;;OAAA;IAED,sBAAW,2BAAK;aAAhB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,CAAA;QACxC,CAAC;;;OAAA;IAED,sBAAW,4BAAM;aAAjB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAA;QACzC,CAAC;;;OAAA;IAED,sBAAW,0BAAI;aAAf;YACQ,IAAA,SAAwB,EAAtB,gBAAK,EAAE,kBAAM,CAAS;YAC9B,OAAO,CAAC,KAAK,GAAG,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;QAC7D,CAAC;;;OAAA;IACH,eAAC;AAAD,CAAC,AAjED,IAiEC"}
\ No newline at end of file
{"version":3,"file":"NetInput.js","sourceRoot":"","sources":["../src/NetInput.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,SAAS,CAAC;AAE5F;IAGE,kBACE,QAAmB,EACnB,IAAiB;QAFnB,iBA2BC;QAvBC,IAAM,aAAa,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC;YAC3C,CAAC,CAAC,QAAQ;YACV,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAA;QAEd,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAA;SAC1D;QAED,IAAM,MAAM,GAAG,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;QAG5C,MAAM,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,CAAC;YACtB,IAAI,CAAC,CAAC,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,gBAAgB,IAAI,KAAK,YAAY,iBAAiB,CAAC,EAAE;gBACnH,IAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,qBAAmB,CAAC,MAAG,CAAC,CAAC,CAAC,EAAE,CAAA;gBACtE,IAAI,OAAO,aAAa,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;oBACxC,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,qEAAkE,CAAC,CAAA;iBACxG;gBACD,MAAM,IAAI,KAAK,CAAC,eAAa,OAAO,kHAA+G,CAAC,CAAA;aACrJ;QACH,CAAC,CAAC,CAAA;QAEF,IAAI,CAAC,SAAS,GAAG,EAAE,CAAA;QACnB,MAAM,CAAC,OAAO,CAAC,UAAA,CAAC,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,EAAxB,CAAwB,CAAC,CAAA;IAC/C,CAAC;IAEO,6BAAU,GAAlB,UAAmB,KAAoB,EAAE,IAAiB;QACxD,IAAI,KAAK,YAAY,iBAAiB,EAAE;YACtC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;YAC1B,OAAM;SACP;QAED,yEAAyE;QACnE,IAAA,mDAAkE,EAAhE,gBAAK,EAAE,kBAAM,CAAmD;QAExE,IAAM,MAAM,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;QAC9C,mBAAmB,CAAC,MAAM,CAAC,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAA;QACjE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IAC7B,CAAC;IAED,sBAAW,8BAAQ;aAAnB;YACE,OAAO,IAAI,CAAC,SAAS,CAAA;QACvB,CAAC;;;OAAA;IAED,sBAAW,2BAAK;aAAhB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,KAAK,CAAA;QACxC,CAAC;;;OAAA;IAED,sBAAW,4BAAM;aAAjB;YACE,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAA;QACzC,CAAC;;;OAAA;IAED,sBAAW,0BAAI;aAAf;YACQ,IAAA,SAAwB,EAAtB,gBAAK,EAAE,kBAAM,CAAS;YAC9B,OAAO,CAAC,KAAK,GAAG,CAAC,IAAI,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAC,CAAC,IAAI,CAAA;QAC7D,CAAC;;;OAAA;IACH,eAAC;AAAD,CAAC,AA9DD,IA8DC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { NetInput } from './NetInput';
import { TNetInput } from './types';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export declare function extractFaceTensors(image: tf.Tensor | NetInput | TNetInput, detections: FaceDetectionResult[]): tf.Tensor4D[];
import * as tf from '@tensorflow/tfjs-core';
import { getImageTensor } from './transformInputs';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export function extractFaceTensors(image, detections) {
return tf.tidy(function () {
var imgTensor = getImageTensor(image);
// TODO handle batches
var _a = imgTensor.shape, batchSize = _a[0], imgHeight = _a[1], imgWidth = _a[2], numChannels = _a[3];
var faceTensors = detections.map(function (det) {
var _a = det.forSize(imgWidth, imgHeight).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
return tf.slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels]);
});
return faceTensors;
});
}
//# sourceMappingURL=extractFaceTensors.js.map
\ No newline at end of file
{"version":3,"file":"extractFaceTensors.js","sourceRoot":"","sources":["../src/extractFaceTensors.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAI5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAGnD;;;;;;;;;;GAUG;AACH,MAAM,6BACJ,KAAuC,EACvC,UAAiC;IAEjC,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,SAAS,GAAG,cAAc,CAAC,KAAK,CAAC,CAAA;QAEvC,sBAAsB;QAChB,IAAA,oBAA+D,EAA9D,iBAAS,EAAE,iBAAS,EAAE,gBAAQ,EAAE,mBAAW,CAAmB;QAErE,IAAM,WAAW,GAAG,UAAU,CAAC,GAAG,CAAC,UAAA,GAAG;YAC9B,IAAA,yCAA8D,EAA5D,QAAC,EAAE,QAAC,EAAE,gBAAK,EAAE,kBAAM,CAAyC;YACpE,OAAO,EAAE,CAAC,KAAK,CAAC,SAAS,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW,CAAC,CAAC,CAAA;QAC3E,CAAC,CAAC,CAAA;QAEF,OAAO,WAAW,CAAA;IACpB,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export declare function extractFaces(image: HTMLCanvasElement, detections: FaceDetectionResult[]): HTMLCanvasElement[];
import { createCanvas, getContext2dOrThrow } from './utils';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export function extractFaces(image, detections) {
var ctx = getContext2dOrThrow(image);
return detections.map(function (det) {
var _a = det.forSize(image.width, image.height).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
var faceImg = createCanvas({ width: width, height: height });
getContext2dOrThrow(faceImg)
.putImageData(ctx.getImageData(x, y, width, height), 0, 0);
return faceImg;
});
}
//# sourceMappingURL=extractFaces.js.map
\ No newline at end of file
{"version":3,"file":"extractFaces.js","sourceRoot":"","sources":["../src/extractFaces.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAC;AAE5D;;;;;;GAMG;AACH,MAAM,uBACJ,KAAwB,EACxB,UAAiC;IAEjC,IAAM,GAAG,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAA;IAEtC,OAAO,UAAU,CAAC,GAAG,CAAC,UAAA,GAAG;QACjB,IAAA,+CAAoE,EAAlE,QAAC,EAAE,QAAC,EAAE,gBAAK,EAAE,kBAAM,CAA+C;QAE1E,IAAM,OAAO,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;QAC/C,mBAAmB,CAAC,OAAO,CAAC;aACzB,YAAY,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAC5D,OAAO,OAAO,CAAA;IAChB,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
......@@ -105,12 +105,32 @@
_loop_1(argName);
}
}
function shuffle(array) {
var counter = array.length;
var temp = 0;
var index = 0;
while (counter > 0) {
index = (Math.random() * counter) | 0;
counter--;
temp = array[counter];
array[counter] = array[index];
array[index] = temp;
}
}
function clamp(min, x, max) {
return Math.max(min, Math.min(x, max));
}
function randUniform(a, b) {
return Math.random() * (b - a) + a;
}
function distSquared(a, b) {
var result = 0;
for (var i = 0; i < a.length; i++) {
var diff = Number(a[i]) - Number(b[i]);
result += diff * diff;
}
return result;
}
function assert(expr, msg) {
if (!expr) {
throw new Error(msg);
......@@ -160,6 +180,9 @@
}
return size;
}
function isScalarShape(shape) {
return shape.length === 0;
}
function arraysEqual(n1, n2) {
if (n1.length !== n2.length) {
return false;
......@@ -197,6 +220,14 @@
}
return [1, size];
}
function createShuffledIndices(n) {
var shuffledIndices = new Uint32Array(n);
for (var i = 0; i < n; ++i) {
shuffledIndices[i] = i;
}
shuffle(shuffledIndices);
return shuffledIndices;
}
function rightPad(a, size) {
if (size <= a.length) {
return a;
......@@ -328,6 +359,29 @@
}
}
}
function flattenNameArrayMap(nameArrayMap, keys) {
var xs = [];
if (nameArrayMap instanceof Tensor) {
xs.push(nameArrayMap);
}
else {
var xMap = nameArrayMap;
for (var i = 0; i < keys.length; i++) {
xs.push(xMap[keys[i]]);
}
}
return xs;
}
function unflattenToNameArrayMap(keys, flatArrays) {
if (keys.length !== flatArrays.length) {
throw new Error("Cannot unflatten Tensor[], keys and arrays are not of same length.");
}
var result = {};
for (var i = 0; i < keys.length; i++) {
result[keys[i]] = flatArrays[i];
}
return result;
}
function hasEncodingLoss(oldType, newType) {
if (newType === 'float32') {
return false;
......@@ -403,6 +457,43 @@
return Array.isArray(obj) || typeof obj === 'object';
}
var util = /*#__PURE__*/Object.freeze({
assertArgumentsAreTensors: assertArgumentsAreTensors,
shuffle: shuffle,
clamp: clamp,
randUniform: randUniform,
distSquared: distSquared,
assert: assert,
assertShapesMatch: assertShapesMatch,
assertTypesMatch: assertTypesMatch,
flatten: flatten,
inferShape: inferShape,
sizeFromShape: sizeFromShape,
isScalarShape: isScalarShape,
arraysEqual: arraysEqual,
isInt: isInt,
tanh: tanh,
sizeToSquarishShape: sizeToSquarishShape,
createShuffledIndices: createShuffledIndices,
rightPad: rightPad,
repeatedTry: repeatedTry,
getQueryParams: getQueryParams,
inferFromImplicitShape: inferFromImplicitShape,
squeezeShape: squeezeShape,
getTypedArrayFromDType: getTypedArrayFromDType,
isTensorInList: isTensorInList,
checkForNaN: checkForNaN,
flattenNameArrayMap: flattenNameArrayMap,
unflattenToNameArrayMap: unflattenToNameArrayMap,
hasEncodingLoss: hasEncodingLoss,
copyTypedArray: copyTypedArray,
isTypedArray: isTypedArray,
bytesPerElement: bytesPerElement,
isFunction: isFunction,
extractTensorsFromContainer: extractTensorsFromContainer,
extractTensorsFromAny: extractTensorsFromAny
});
var FORMAT_LIMIT_NUM_VALS = 20;
var FORMAT_NUM_FIRST_LAST_VALS = 3;
var FORMAT_NUM_SIG_DIGITS = 7;
......@@ -5419,23 +5510,44 @@
}());
var batchNormalization = BatchNormOps.batchNormalization;
var batchNormalization2d = BatchNormOps.batchNormalization2d;
var batchNormalization3d = BatchNormOps.batchNormalization3d;
var batchNormalization4d = BatchNormOps.batchNormalization4d;
var concat = ConcatOps.concat;
var concat1d = ConcatOps.concat1d;
var concat2d = ConcatOps.concat2d;
var concat3d = ConcatOps.concat3d;
var concat4d = ConcatOps.concat4d;
var conv1d = ConvOps.conv1d;
var conv2d = ConvOps.conv2d;
var conv2dTranspose = ConvOps.conv2dTranspose;
var depthwiseConv2d = ConvOps.depthwiseConv2d;
var separableConv2d = ConvOps.separableConv2d;
var matMul = MatmulOps.matMul;
var matrixTimesVector = MatmulOps.matrixTimesVector;
var outerProduct = MatmulOps.outerProduct;
var vectorTimesMatrix = MatmulOps.vectorTimesMatrix;
var avgPool = PoolOps.avgPool;
var maxPool = PoolOps.maxPool;
var transpose = TransposeOps.transpose;
var reverse = ReverseOps.reverse;
var reverse1d = ReverseOps.reverse1d;
var reverse2d = ReverseOps.reverse2d;
var reverse3d = ReverseOps.reverse3d;
var reverse4d = ReverseOps.reverse4d;
var slice = SliceOps.slice;
var slice1d = SliceOps.slice1d;
var slice2d = SliceOps.slice2d;
var slice3d = SliceOps.slice3d;
var slice4d = SliceOps.slice4d;
var stridedSlice = StridedSliceOps.stridedSlice;
var argMax = ReductionOps.argMax;
var argMin = ReductionOps.argMin;
var logSumExp = ReductionOps.logSumExp;
var max = ReductionOps.max;
var mean = ReductionOps.mean;
var min = ReductionOps.min;
var moments = ReductionOps.moments;
var sum = ReductionOps.sum;
var unsortedSegmentSum = ReductionOps.unsortedSegmentSum;
var equal = CompareOps.equal;
......@@ -5494,6 +5606,7 @@
var erf = UnaryOps.erf;
var add = BinaryOps.add;
var addStrict = BinaryOps.addStrict;
var atan2 = BinaryOps.atan2;
var div = BinaryOps.div;
var divStrict = BinaryOps.divStrict;
var maximum = BinaryOps.maximum;
......@@ -5514,13 +5627,24 @@
var cast = ArrayOps.cast;
var clone = ArrayOps.clone;
var fromPixels = ArrayOps.fromPixels;
var toPixels = ArrayOps.toPixels;
var ones = ArrayOps.ones;
var onesLike = ArrayOps.onesLike;
var zeros = ArrayOps.zeros;
var zerosLike = ArrayOps.zerosLike;
var eye = ArrayOps.eye;
var rand = ArrayOps.rand;
var randomNormal = ArrayOps.randomNormal;
var truncatedNormal = ArrayOps.truncatedNormal;
var randomUniform = ArrayOps.randomUniform;
var multinomial = ArrayOps.multinomial;
var reshape = ArrayOps.reshape;
var squeeze = ArrayOps.squeeze;
var tile = ArrayOps.tile;
var gather = ArrayOps.gather;
var oneHot = ArrayOps.oneHot;
var linspace = ArrayOps.linspace;
var range = ArrayOps.range;
var buffer = ArrayOps.buffer;
var fill = ArrayOps.fill;
var tensor = ArrayOps.tensor;
......@@ -5536,8 +5660,25 @@
var split = ArrayOps.split;
var cumsum = ArrayOps.cumsum;
var pad = ArrayOps.pad;
var pad1d = ArrayOps.pad1d;
var pad2d = ArrayOps.pad2d;
var pad3d = ArrayOps.pad3d;
var pad4d = ArrayOps.pad4d;
var movingAverage = MovingAverageOps.movingAverage;
var basicLSTMCell = LSTMOps.basicLSTMCell;
var multiRNNCell = LSTMOps.multiRNNCell;
var softmax = SoftmaxOps.softmax;
var localResponseNormalization = LRNOps.localResponseNormalization;
var linalg = LinalgOps;
var losses = {
absoluteDifference: LossOps.absoluteDifference,
computeWeightedLoss: LossOps.computeWeightedLoss,
cosineDistance: LossOps.cosineDistance,
hingeLoss: LossOps.hingeLoss,
logLoss: LossOps.logLoss,
meanSquaredError: LossOps.meanSquaredError,
softmaxCrossEntropy: SoftmaxOps.softmaxCrossEntropy
};
var image = {
resizeBilinear: ImageOps.resizeBilinear,
resizeNearestNeighbor: ImageOps.resizeNearestNeighbor,
......@@ -6551,6 +6692,12 @@
var tidy = Tracking.tidy;
var keep = Tracking.keep;
var dispose = Tracking.dispose;
var time = Tracking.time;
var grad = Gradients.grad;
var valueAndGrad = Gradients.valueAndGrad;
var grads = Gradients.grads;
var valueAndGrads = Gradients.valueAndGrads;
var variableGrads = Gradients.variableGrads;
var customGrad = Gradients.customGrad;
......@@ -7338,6 +7485,13 @@
}
var ENV = getOrMakeEnvironment();
var environment = /*#__PURE__*/Object.freeze({
get Type () { return Type; },
URL_PROPERTIES: URL_PROPERTIES,
Environment: Environment,
ENV: ENV
});
var PARALLELIZE_THRESHOLD = 30;
function computeOptimalWindowSize(inSize) {
if (inSize <= PARALLELIZE_THRESHOLD) {
......@@ -7544,6 +7698,9 @@
function getUnpackedArraySizeFromMatrixSize(matrixSize, channelsPerTexture) {
return matrixSize * channelsPerTexture;
}
function getColorMatrixTextureShapeWidthHeight(rows, columns) {
return [columns * 4, rows];
}
function getMatrixSizeFromUnpackedArraySize(unpackedSize, channelsPerTexture) {
if (unpackedSize % channelsPerTexture !== 0) {
throw new Error("unpackedSize (" + unpackedSize + ") must be a multiple of " +
......@@ -8355,6 +8512,11 @@
callAndCheck(gl, function () { return gl.activeTexture(gl.TEXTURE0 + textureUnit); });
callAndCheck(gl, function () { return gl.bindTexture(gl.TEXTURE_2D, texture); });
}
function unbindTextureUnit(gl, textureUnit) {
validateTextureUnit(gl, textureUnit);
callAndCheck(gl, function () { return gl.activeTexture(gl.TEXTURE0 + textureUnit); });
callAndCheck(gl, function () { return gl.bindTexture(gl.TEXTURE_2D, null); });
}
function getProgramUniformLocationOrThrow(gl, program, uniformName) {
return throwIfNull(gl, function () { return gl.getUniformLocation(program, uniformName); }, 'uniform "' + uniformName + '" not present in program.');
}
......@@ -8365,6 +8527,11 @@
callAndCheck(gl, function () { return bindTextureUnit(gl, texture, textureUnit); });
callAndCheck(gl, function () { return gl.uniform1i(uniformSamplerLocation, textureUnit); });
}
function bindCanvasToFramebuffer(gl) {
callAndCheck(gl, function () { return gl.bindFramebuffer(gl.FRAMEBUFFER, null); });
callAndCheck(gl, function () { return gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); });
callAndCheck(gl, function () { return gl.scissor(0, 0, gl.canvas.width, gl.canvas.height); });
}
function bindColorTextureToFramebuffer(gl, texture, framebuffer) {
callAndCheck(gl, function () { return gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); });
callAndCheck(gl, function () { return gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0); });
......@@ -8435,6 +8602,40 @@
}
}
var webgl_util = /*#__PURE__*/Object.freeze({
createWebGLRenderingContext: createWebGLRenderingContext,
createWebGLRenderingContextFromCanvas: createWebGLRenderingContextFromCanvas,
callAndCheck: callAndCheck,
enableDebugWebGLErrorChecking: enableDebugWebGLErrorChecking,
checkWebGLError: checkWebGLError,
getWebGLErrorMessage: getWebGLErrorMessage,
getExtensionOrThrow: getExtensionOrThrow,
createVertexShader: createVertexShader,
createFragmentShader: createFragmentShader,
createProgram: createProgram,
linkProgram: linkProgram,
validateProgram: validateProgram,
createStaticVertexBuffer: createStaticVertexBuffer,
createStaticIndexBuffer: createStaticIndexBuffer,
queryMaxTextureSize: queryMaxTextureSize,
getChannelsPerTexture: getChannelsPerTexture,
createTexture: createTexture,
validateTextureSize: validateTextureSize,
createFramebuffer: createFramebuffer,
bindVertexBufferToProgramAttribute: bindVertexBufferToProgramAttribute,
bindTextureUnit: bindTextureUnit,
unbindTextureUnit: unbindTextureUnit,
getProgramUniformLocationOrThrow: getProgramUniformLocationOrThrow,
getProgramUniformLocation: getProgramUniformLocation,
bindTextureToProgramUniformSampler: bindTextureToProgramUniformSampler,
bindCanvasToFramebuffer: bindCanvasToFramebuffer,
bindColorTextureToFramebuffer: bindColorTextureToFramebuffer,
unbindColorTextureFromFramebuffer: unbindColorTextureFromFramebuffer,
validateFramebuffer: validateFramebuffer,
getFramebufferErrorMessage: getFramebufferErrorMessage,
getTextureShapeFromLogicalShape: getTextureShapeFromLogicalShape
});
var __awaiter$4 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
......@@ -8563,6 +8764,11 @@
var numChannels = 1;
return createAndConfigureTexture(gl, width, height, numChannels);
}
function createColorMatrixTexture(gl, rows, columns) {
var _a = getColorMatrixTextureShapeWidthHeight(rows, columns), width = _a[0], height = _a[1];
var numChannels = 4;
return createAndConfigureTexture(gl, width, height, numChannels);
}
function createPackedMatrixTexture(gl, rows, columns) {
var _a = getPackedMatrixTextureShapeWidthHeight(rows, columns), width = _a[0], height = _a[1];
var numChannels = 4;
......@@ -8691,6 +8897,25 @@
return decodeMatrixFromPackedRGBA(packedRGBA, rows, columns, matrix);
}
var gpgpu_util = /*#__PURE__*/Object.freeze({
getWebGLContextAttributes: getWebGLContextAttributes,
createWebGLContext: createWebGLContext,
createVertexShader: createVertexShader$1,
createVertexBuffer: createVertexBuffer,
createIndexBuffer: createIndexBuffer,
createMatrixTexture: createMatrixTexture,
createColorMatrixTexture: createColorMatrixTexture,
createPackedMatrixTexture: createPackedMatrixTexture,
bindVertexProgramAttributeStreams: bindVertexProgramAttributeStreams,
uploadPixelDataToTexture: uploadPixelDataToTexture,
uploadMatrixToTexture: uploadMatrixToTexture,
uploadMatrixToPackedTexture: uploadMatrixToPackedTexture,
downloadMatrixFromOutputTextureAsync: downloadMatrixFromOutputTextureAsync,
downloadMatrixFromOutputTexture: downloadMatrixFromOutputTexture,
downloadMatrixFromRGBAColorTexture: downloadMatrixFromRGBAColorTexture,
downloadMatrixFromPackedOutputTexture: downloadMatrixFromPackedOutputTexture
});
var __awaiter$5 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
......@@ -12107,6 +12332,14 @@
return BrowserUtil;
}());
var DTYPE_VALUE_SIZE_MAP = {
'float32': 4,
'int32': 4,
'uint16': 2,
'uint8': 1,
'bool': 1,
};
var __awaiter$8 = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
......@@ -12142,6 +12375,92 @@
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
function encodeWeights(tensors) {
return __awaiter$8(this, void 0, void 0, function () {
var specs, dataPromises, name_1, t, tensorValues;
return __generator$8(this, function (_a) {
switch (_a.label) {
case 0:
specs = [];
dataPromises = [];
for (name_1 in tensors) {
t = tensors[name_1];
if (t.dtype !== 'float32' && t.dtype !== 'int32' && t.dtype !== 'bool') {
throw new Error("Unsupported dtype in weight '" + name_1 + "': " + t.dtype);
}
specs.push({ name: name_1, shape: t.shape, dtype: t.dtype });
dataPromises.push(t.data());
}
return [4, Promise.all(dataPromises)];
case 1:
tensorValues = _a.sent();
return [2, { data: concatenateTypedArrays(tensorValues), specs: specs }];
}
});
});
}
function decodeWeights(buffer, specs) {
var out = {};
var offset = 0;
for (var _i = 0, specs_1 = specs; _i < specs_1.length; _i++) {
var spec = specs_1[_i];
var name_2 = spec.name;
var dtype = spec.dtype;
var shape = spec.shape;
if (spec.quantization != null) {
throw new Error("decodeWeights does not support quantization yet, but encountered " +
("weight '" + name_2 + " with quantization.'"));
}
var size = sizeFromShape(shape);
var value = void 0;
if (dtype === 'float32') {
value = ArrayOps.tensor(new Float32Array(buffer, offset, size), shape, 'float32');
}
else if (dtype === 'int32') {
value =
ArrayOps.tensor(new Int32Array(buffer, offset, size), shape, 'int32');
}
else if (dtype === 'bool') {
value =
ArrayOps.tensor(new Uint8Array(buffer, offset, size), shape, 'bool');
}
else {
throw new Error("Unsupported dtype in weight '" + name_2 + "': " + dtype);
}
out[name_2] = value;
offset += size * DTYPE_VALUE_SIZE_MAP[dtype];
}
return out;
}
function concatenateTypedArrays(xs) {
if (xs === null) {
throw new Error("Invalid input value: " + JSON.stringify(xs));
}
var totalByteLength = 0;
xs.forEach(function (x) {
if (x instanceof Float32Array || x instanceof Int32Array) {
totalByteLength += x.length * 4;
}
else if (x instanceof Uint8Array) {
totalByteLength += x.length;
}
else {
throw new Error("Unsupported TypedArray subtype: " + x.constructor.name);
}
});
var y = new Uint8Array(totalByteLength);
var offset = 0;
xs.forEach(function (x) {
y.set(new Uint8Array(x.buffer), offset);
if (x instanceof Float32Array || x instanceof Int32Array) {
offset += x.length * 4;
}
else {
offset += x.length;
}
});
return y.buffer;
}
function stringByteLength(str) {
return new Blob([str]).size;
}
......@@ -12156,6 +12475,28 @@
}
return buffer.buffer;
}
function concatenateArrayBuffers(buffers) {
var totalByteLength = 0;
buffers.forEach(function (buffer) {
totalByteLength += buffer.byteLength;
});
var temp = new Uint8Array(totalByteLength);
var offset = 0;
buffers.forEach(function (buffer) {
temp.set(new Uint8Array(buffer), offset);
offset += buffer.byteLength;
});
return temp.buffer;
}
function basename(path) {
var SEPARATOR = '/';
path = path.trim();
while (path.endsWith(SEPARATOR)) {
path = path.slice(0, path.length - 1);
}
var items = path.split(SEPARATOR);
return items[items.length - 1];
}
function getModelArtifactsInfoForJSON(modelArtifacts) {
if (modelArtifacts.modelTopology instanceof ArrayBuffer) {
throw new Error('Expected JSON model topology, received ArrayBuffer.');
......@@ -12281,6 +12622,131 @@
};
return ModelStoreManagerRegistry;
}());
function parseURL(url) {
if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {
throw new Error("The url string provided does not contain a scheme. " +
"Supported schemes are: " +
("" + ModelStoreManagerRegistry.getSchemes().join(',')));
}
return {
scheme: url.split(URL_SCHEME_SUFFIX)[0],
path: url.split(URL_SCHEME_SUFFIX)[1],
};
}
function listModels() {
return __awaiter$9(this, void 0, void 0, function () {
var schemes, out, _i, schemes_1, scheme, schemeOut, path, url;
return __generator$9(this, function (_a) {
switch (_a.label) {
case 0:
schemes = ModelStoreManagerRegistry.getSchemes();
out = {};
_i = 0, schemes_1 = schemes;
_a.label = 1;
case 1:
if (!(_i < schemes_1.length)) return [3, 4];
scheme = schemes_1[_i];
return [4, ModelStoreManagerRegistry.getManager(scheme).listModels()];
case 2:
schemeOut = _a.sent();
for (path in schemeOut) {
url = scheme + URL_SCHEME_SUFFIX + path;
out[url] = schemeOut[path];
}
_a.label = 3;
case 3:
_i++;
return [3, 1];
case 4: return [2, out];
}
});
});
}
function removeModel(url) {
return __awaiter$9(this, void 0, void 0, function () {
var schemeAndPath, manager;
return __generator$9(this, function (_a) {
switch (_a.label) {
case 0:
schemeAndPath = parseURL(url);
manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);
return [4, manager.removeModel(schemeAndPath.path)];
case 1: return [2, _a.sent()];
}
});
});
}
function cloneModelInternal(sourceURL, destURL, deleteSource) {
if (deleteSource === void 0) { deleteSource = false; }
return __awaiter$9(this, void 0, void 0, function () {
var loadHandlers, loadHandler, saveHandlers, saveHandler, sourceScheme, sourcePath, sameMedium, modelArtifacts, saveResult;
return __generator$9(this, function (_a) {
switch (_a.label) {
case 0:
assert(sourceURL !== destURL, "Old path and new path are the same: '" + sourceURL + "'");
loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);
assert(loadHandlers.length > 0, "Copying failed because no load handler is found for source URL " + sourceURL + ".");
assert(loadHandlers.length < 2, "Copying failed because more than one (" + loadHandlers.length + ") " +
("load handlers for source URL " + sourceURL + "."));
loadHandler = loadHandlers[0];
saveHandlers = IORouterRegistry.getSaveHandlers(destURL);
assert(saveHandlers.length > 0, "Copying failed because no save handler is found for destination URL " +
(destURL + "."));
assert(saveHandlers.length < 2, "Copying failed because more than one (" + loadHandlers.length + ") " +
("save handlers for destination URL " + destURL + "."));
saveHandler = saveHandlers[0];
sourceScheme = parseURL(sourceURL).scheme;
sourcePath = parseURL(sourceURL).path;
sameMedium = sourceScheme === parseURL(sourceURL).scheme;
return [4, loadHandler.load()];
case 1:
modelArtifacts = _a.sent();
if (!(deleteSource && sameMedium)) return [3, 3];
return [4, ModelStoreManagerRegistry.getManager(sourceScheme)
.removeModel(sourcePath)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [4, saveHandler.save(modelArtifacts)];
case 4:
saveResult = _a.sent();
if (!(deleteSource && !sameMedium)) return [3, 6];
return [4, ModelStoreManagerRegistry.getManager(sourceScheme)
.removeModel(sourcePath)];
case 5:
_a.sent();
_a.label = 6;
case 6: return [2, saveResult.modelArtifactsInfo];
}
});
});
}
function copyModel(sourceURL, destURL) {
return __awaiter$9(this, void 0, void 0, function () {
var deleteSource;
return __generator$9(this, function (_a) {
switch (_a.label) {
case 0:
deleteSource = false;
return [4, cloneModelInternal(sourceURL, destURL, deleteSource)];
case 1: return [2, _a.sent()];
}
});
});
}
function moveModel(sourceURL, destURL) {
return __awaiter$9(this, void 0, void 0, function () {
var deleteSource;
return __generator$9(this, function (_a) {
switch (_a.label) {
case 0:
deleteSource = true;
return [4, cloneModelInternal(sourceURL, destURL, deleteSource)];
case 1: return [2, _a.sent()];
}
});
});
}
var __awaiter$a = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
......@@ -12878,6 +13344,121 @@
BrowserDownloads.URL_SCHEME = 'downloads://';
return BrowserDownloads;
}());
var BrowserFiles = (function () {
function BrowserFiles(files) {
if (files == null || files.length < 1) {
throw new Error("When calling browserFiles, at least 1 file is required, " +
("but received " + files));
}
this.files = files;
}
BrowserFiles.prototype.load = function () {
return __awaiter$c(this, void 0, void 0, function () {
var _this = this;
var jsonFile, weightFiles;
return __generator$c(this, function (_a) {
jsonFile = this.files[0];
weightFiles = this.files.slice(1);
return [2, new Promise(function (resolve, reject) {
var jsonReader = new FileReader();
jsonReader.onload = function (event) {
var modelJSON = JSON.parse(event.target.result);
var modelTopology = modelJSON.modelTopology;
if (modelTopology == null) {
reject(new Error("modelTopology field is missing from file " + jsonFile.name));
return;
}
if (weightFiles.length === 0) {
resolve({ modelTopology: modelTopology });
}
var weightsManifest = modelJSON.weightsManifest;
if (weightsManifest == null) {
reject(new Error("weightManifest field is missing from file " + jsonFile.name));
return;
}
var pathToFile;
try {
pathToFile =
_this.checkManifestAndWeightFiles(weightsManifest, weightFiles);
}
catch (err) {
reject(err);
return;
}
var weightSpecs = [];
var paths = [];
var perFileBuffers = [];
weightsManifest.forEach(function (weightsGroup) {
weightsGroup.paths.forEach(function (path) {
paths.push(path);
perFileBuffers.push(null);
});
weightSpecs.push.apply(weightSpecs, weightsGroup.weights);
});
weightsManifest.forEach(function (weightsGroup) {
weightsGroup.paths.forEach(function (path) {
var weightFileReader = new FileReader();
weightFileReader.onload = function (event) {
var weightData = event.target.result;
var index = paths.indexOf(path);
perFileBuffers[index] = weightData;
if (perFileBuffers.indexOf(null) === -1) {
resolve({
modelTopology: modelTopology,
weightSpecs: weightSpecs,
weightData: concatenateArrayBuffers(perFileBuffers),
});
}
};
weightFileReader.onerror = function (error) {
reject("Failed to weights data from file of path '" + path + "'.");
return;
};
weightFileReader.readAsArrayBuffer(pathToFile[path]);
});
});
};
jsonReader.onerror = function (error) {
reject("Failed to read model topology and weights manifest JSON " +
("from file '" + jsonFile.name + "'. BrowserFiles supports loading ") +
"Keras-style tf.Model artifacts only.");
return;
};
jsonReader.readAsText(jsonFile);
})];
});
});
};
BrowserFiles.prototype.checkManifestAndWeightFiles = function (manifest, files) {
var basenames = [];
var fileNames = files.map(function (file) { return basename(file.name); });
var pathToFile = {};
for (var _i = 0, manifest_1 = manifest; _i < manifest_1.length; _i++) {
var group = manifest_1[_i];
group.paths.forEach(function (path) {
var pathBasename = basename(path);
if (basenames.indexOf(pathBasename) !== -1) {
throw new Error("Duplicate file basename found in weights manifest: " +
("'" + pathBasename + "'"));
}
basenames.push(pathBasename);
if (fileNames.indexOf(pathBasename) === -1) {
throw new Error("Weight file with basename '" + pathBasename + "' is not provided.");
}
else {
pathToFile[path] = files[fileNames.indexOf(pathBasename)];
}
});
}
if (basenames.length !== files.length) {
throw new Error("Mismatch in the number of files in weights manifest " +
("(" + basenames.length + ") and the number of weight files provided ") +
("(" + files.length + ")."));
}
return pathToFile;
};
return BrowserFiles;
}());
var browserDownloadsRouter = function (url) {
if (!ENV.get('IS_BROWSER')) {
return null;
......@@ -12896,6 +13477,9 @@
if (fileNamePrefix === void 0) { fileNamePrefix = 'model'; }
return new BrowserDownloads(fileNamePrefix);
}
function browserFiles(files) {
return new BrowserFiles(files);
}
var __awaiter$d = (undefined && undefined.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
......@@ -13045,6 +13629,166 @@
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
function loadWeights(manifest, filePathPrefix, weightNames, requestOptions) {
if (filePathPrefix === void 0) { filePathPrefix = ''; }
return __awaiter$e(this, void 0, void 0, function () {
var groupIndicesToFetchMap, groupWeightsToFetch, weightsFound, allManifestWeightNames, weightsNotFound, groupIndicesToFetch, requests, responses, buffers, weightsTensorMap, bufferIndexOffset;
return __generator$e(this, function (_a) {
switch (_a.label) {
case 0:
groupIndicesToFetchMap = manifest.map(function () { return false; });
groupWeightsToFetch = {};
weightsFound = weightNames != null ? weightNames.map(function () { return false; }) : [];
allManifestWeightNames = [];
manifest.forEach(function (manifestGroupConfig, groupIndex) {
var groupOffset = 0;
manifestGroupConfig.weights.forEach(function (weightsEntry) {
var rawDtype = ('quantization' in weightsEntry) ?
weightsEntry.quantization.dtype :
weightsEntry.dtype;
var weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] *
sizeFromShape(weightsEntry.shape);
var enqueueWeightsForFetchingFn = function () {
groupIndicesToFetchMap[groupIndex] = true;
if (groupWeightsToFetch[groupIndex] == null) {
groupWeightsToFetch[groupIndex] = [];
}
groupWeightsToFetch[groupIndex].push({
manifestEntry: weightsEntry,
groupOffset: groupOffset,
sizeBytes: weightsBytes
});
};
if (weightNames != null) {
weightNames.forEach(function (weightName, weightIndex) {
if (weightName === weightsEntry.name) {
enqueueWeightsForFetchingFn();
weightsFound[weightIndex] = true;
}
});
}
else {
enqueueWeightsForFetchingFn();
}
allManifestWeightNames.push(weightsEntry.name);
groupOffset += weightsBytes;
});
});
if (!weightsFound.every(function (found) { return found; })) {
weightsNotFound = weightNames.filter(function (weight, i) { return !weightsFound[i]; });
throw new Error("Could not find weights in manifest with names: " +
(weightsNotFound.join(', ') + ". \n") +
"Manifest JSON has weights with names: " +
(allManifestWeightNames.join(', ') + "."));
}
groupIndicesToFetch = groupIndicesToFetchMap.reduce(function (accumulator, shouldFetch, i) {
if (shouldFetch) {
accumulator.push(i);
}
return accumulator;
}, []);
requests = [];
groupIndicesToFetch.forEach(function (i) {
manifest[i].paths.forEach(function (filepath) {
var fetchUrl = filePathPrefix +
(!filePathPrefix.endsWith('/') ? '/' : '') + filepath;
requests.push(fetch(fetchUrl, requestOptions));
});
});
return [4, Promise.all(requests)];
case 1:
responses = _a.sent();
return [4, Promise.all(responses.map(function (response) { return response.arrayBuffer(); }))];
case 2:
buffers = _a.sent();
weightsTensorMap = {};
bufferIndexOffset = 0;
groupIndicesToFetch.forEach(function (i) {
var numBuffers = manifest[i].paths.length;
var groupBytes = 0;
for (var i_1 = 0; i_1 < numBuffers; i_1++) {
groupBytes += buffers[bufferIndexOffset + i_1].byteLength;
}
var groupBuffer = new ArrayBuffer(groupBytes);
var groupByteBuffer = new Uint8Array(groupBuffer);
var groupBufferOffset = 0;
for (var i_2 = 0; i_2 < numBuffers; i_2++) {
var buffer$$1 = new Uint8Array(buffers[bufferIndexOffset + i_2]);
groupByteBuffer.set(buffer$$1, groupBufferOffset);
groupBufferOffset += buffer$$1.byteLength;
}
var weightsEntries = groupWeightsToFetch[i];
weightsEntries.forEach(function (weightsEntry) {
var byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);
var typedArray;
var dtype = weightsEntry.manifestEntry.dtype;
if ('quantization' in weightsEntry.manifestEntry) {
var quantization_1 = weightsEntry.manifestEntry.quantization;
if (quantization_1.dtype !== 'uint8' && quantization_1.dtype !== 'uint16') {
throw new Error("Weight " + weightsEntry.manifestEntry.name + " has unknown " +
("quantization dtype " + quantization_1.dtype + "."));
}
var quantizedArray = (quantization_1.dtype === 'uint8') ?
new Uint8Array(byteBuffer) :
new Uint16Array(byteBuffer);
if (dtype === 'float32') {
typedArray = Float32Array.from(quantizedArray, function (v) { return v * quantization_1.scale + quantization_1.min; });
}
else if (dtype === 'int32') {
typedArray = Int32Array.from(quantizedArray, function (v) { return Math.round(v * quantization_1.scale + quantization_1.min); });
}
else {
throw new Error("Weight " + weightsEntry.manifestEntry.name + " has a dtype not " +
("supported by quantization: " + dtype));
}
}
else {
if (dtype === 'float32') {
typedArray = new Float32Array(byteBuffer);
}
else if (dtype === 'int32') {
typedArray = new Int32Array(byteBuffer);
}
else {
throw new Error("Weight " + weightsEntry.manifestEntry.name + " has unknown dtype " +
(dtype + "."));
}
}
var weightName = weightsEntry.manifestEntry.name;
if (weightsTensorMap[weightName] != null) {
throw new Error("Duplicate weight with name " + weightName + ". " +
"Please make sure weights names are unique in the manifest JSON.");
}
weightsTensorMap[weightName] = tensor(typedArray, weightsEntry.manifestEntry.shape, weightsEntry.manifestEntry.dtype);
});
bufferIndexOffset += numBuffers;
});
return [2, weightsTensorMap];
}
});
});
}
var registerSaveRouter = IORouterRegistry.registerSaveRouter;
var registerLoadRouter = IORouterRegistry.registerLoadRouter;
var getSaveHandlers = IORouterRegistry.getSaveHandlers;
var getLoadHandlers = IORouterRegistry.getLoadHandlers;
var io = /*#__PURE__*/Object.freeze({
browserFiles: browserFiles,
browserHTTPRequest: browserHTTPRequest,
copyModel: copyModel,
decodeWeights: decodeWeights,
encodeWeights: encodeWeights,
getLoadHandlers: getLoadHandlers,
getSaveHandlers: getSaveHandlers,
listModels: listModels,
loadWeights: loadWeights,
moveModel: moveModel,
registerLoadRouter: registerLoadRouter,
registerSaveRouter: registerSaveRouter,
removeModel: removeModel
});
var Serializable = (function () {
function Serializable() {
......@@ -13074,6 +13818,128 @@
return SerializationMap;
}());
var serialization = /*#__PURE__*/Object.freeze({
Serializable: Serializable,
SerializationMap: SerializationMap
});
var WEBGL_ENVS = {
'BACKEND': 'test-webgl'
};
var CPU_ENVS = {
'BACKEND': 'test-cpu'
};
var ALL_ENVS = {};
var TEST_EPSILON = 1e-3;
function expectArraysClose(actual, expected, epsilon) {
if (epsilon === void 0) { epsilon = TEST_EPSILON; }
if (!(actual instanceof Tensor) && !(expected instanceof Tensor)) {
var aType = actual.constructor.name;
var bType = expected.constructor.name;
if (aType !== bType) {
throw new Error("Arrays are of different type actual: " + aType + " " +
("vs expected: " + bType));
}
}
else if (actual instanceof Tensor && expected instanceof Tensor) {
if (actual.dtype !== expected.dtype) {
throw new Error("Arrays are of different type actual: " + actual.dtype + " " +
("vs expected: " + expected.dtype + "."));
}
if (!arraysEqual(actual.shape, expected.shape)) {
throw new Error("Arrays are of different shape actual: " + actual.shape + " " +
("vs expected: " + expected.shape + "."));
}
}
var actualValues;
var expectedValues;
if (actual instanceof Tensor) {
actualValues = actual.dataSync();
}
else {
actualValues = actual;
}
if (expected instanceof Tensor) {
expectedValues = expected.dataSync();
}
else {
expectedValues = expected;
}
if (actualValues.length !== expectedValues.length) {
throw new Error("Arrays have different lengths actual: " + actualValues.length + " vs " +
("expected: " + expectedValues.length + ".\n") +
("Actual: " + actualValues + ".\n") +
("Expected: " + expectedValues + "."));
}
for (var i = 0; i < expectedValues.length; ++i) {
var a = actualValues[i];
var e = expectedValues[i];
if (!areClose(a, Number(e), epsilon)) {
throw new Error("Arrays differ: actual[" + i + "] = " + a + ", expected[" + i + "] = " + e + ".\n" +
("Actual: " + actualValues + ".\n") +
("Expected: " + expectedValues + "."));
}
}
}
function expectPromiseToFail(fn, done) {
fn().then(function () { return done.fail(); }, function () { return done(); });
}
function expectArraysEqual(actual, expected) {
return expectArraysClose(actual, expected, 0);
}
function expectNumbersClose(a, e, epsilon) {
if (epsilon === void 0) { epsilon = TEST_EPSILON; }
if (!areClose(a, e, epsilon)) {
throw new Error("Numbers differ: actual === " + a + ", expected === " + e);
}
}
function areClose(a, e, epsilon) {
if (isNaN(a) && isNaN(e)) {
return true;
}
if (isNaN(a) || isNaN(e) || Math.abs(a - e) > epsilon) {
return false;
}
return true;
}
function expectValuesInRange(actual, low, high) {
var actualVals;
if (actual instanceof Tensor) {
actualVals = actual.dataSync();
}
else {
actualVals = actual;
}
for (var i = 0; i < actualVals.length; i++) {
if (actualVals[i] < low || actualVals[i] > high) {
throw new Error("Value out of range:" + actualVals[i] + " low: " + low + ", high: " + high);
}
}
}
var test_util = /*#__PURE__*/Object.freeze({
WEBGL_ENVS: WEBGL_ENVS,
CPU_ENVS: CPU_ENVS,
ALL_ENVS: ALL_ENVS,
TEST_EPSILON: TEST_EPSILON,
expectArraysClose: expectArraysClose,
expectPromiseToFail: expectPromiseToFail,
expectArraysEqual: expectArraysEqual,
expectNumbersClose: expectNumbersClose,
expectValuesInRange: expectValuesInRange
});
var version = '0.11.0';
var webgl = /*#__PURE__*/Object.freeze({
gpgpu_util: gpgpu_util,
webgl_util: webgl_util,
MathBackendWebGL: MathBackendWebGL,
GPGPUContext: GPGPUContext
});
var __extends$2 = (undefined && undefined.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
......@@ -13841,6 +14707,229 @@
return OptimizerConstructors;
}());
var train = {
sgd: OptimizerConstructors.sgd,
momentum: OptimizerConstructors.momentum,
adadelta: OptimizerConstructors.adadelta,
adagrad: OptimizerConstructors.adagrad,
rmsprop: OptimizerConstructors.rmsprop,
adamax: OptimizerConstructors.adamax,
adam: OptimizerConstructors.adam
};
var setBackend = Environment.setBackend;
var getBackend = Environment.getBackend;
var disposeVariables = Environment.disposeVariables;
var memory = Environment.memory;
var nextFrame = BrowserUtil.nextFrame;
var index = /*#__PURE__*/Object.freeze({
setBackend: setBackend,
getBackend: getBackend,
disposeVariables: disposeVariables,
memory: memory,
version_core: version,
nextFrame: nextFrame,
environment: environment,
io: io,
serialization: serialization,
test_util: test_util,
util: util,
webgl: webgl,
AdadeltaOptimizer: AdadeltaOptimizer,
AdagradOptimizer: AdagradOptimizer,
AdamOptimizer: AdamOptimizer,
AdamaxOptimizer: AdamaxOptimizer,
MomentumOptimizer: MomentumOptimizer,
Optimizer: Optimizer,
RMSPropOptimizer: RMSPropOptimizer,
SGDOptimizer: SGDOptimizer,
Tensor: Tensor,
TensorBuffer: TensorBuffer,
variable: variable,
Variable: Variable,
get Rank () { return Rank; },
get Reduction () { return Reduction; },
ENV: ENV,
Environment: Environment,
doc: doc,
batchNormalization: batchNormalization,
batchNormalization2d: batchNormalization2d,
batchNormalization3d: batchNormalization3d,
batchNormalization4d: batchNormalization4d,
concat: concat,
concat1d: concat1d,
concat2d: concat2d,
concat3d: concat3d,
concat4d: concat4d,
conv1d: conv1d,
conv2d: conv2d,
conv2dTranspose: conv2dTranspose,
depthwiseConv2d: depthwiseConv2d,
separableConv2d: separableConv2d,
matMul: matMul,
matrixTimesVector: matrixTimesVector,
outerProduct: outerProduct,
vectorTimesMatrix: vectorTimesMatrix,
avgPool: avgPool,
maxPool: maxPool,
transpose: transpose,
reverse: reverse,
reverse1d: reverse1d,
reverse2d: reverse2d,
reverse3d: reverse3d,
reverse4d: reverse4d,
slice: slice,
slice1d: slice1d,
slice2d: slice2d,
slice3d: slice3d,
slice4d: slice4d,
stridedSlice: stridedSlice,
argMax: argMax,
argMin: argMin,
logSumExp: logSumExp,
max: max,
mean: mean,
min: min,
moments: moments,
sum: sum,
unsortedSegmentSum: unsortedSegmentSum,
equal: equal,
equalStrict: equalStrict,
greater: greater,
greaterStrict: greaterStrict,
greaterEqual: greaterEqual,
greaterEqualStrict: greaterEqualStrict,
less: less,
lessStrict: lessStrict,
lessEqual: lessEqual,
lessEqualStrict: lessEqualStrict,
notEqual: notEqual,
notEqualStrict: notEqualStrict,
logicalNot: logicalNot,
logicalAnd: logicalAnd,
logicalOr: logicalOr,
logicalXor: logicalXor,
where: where,
abs: abs,
acos: acos,
acosh: acosh,
asin: asin,
asinh: asinh,
atan: atan,
atanh: atanh,
ceil: ceil,
clipByValue: clipByValue,
cos: cos,
cosh: cosh,
elu: elu,
exp: exp,
expm1: expm1,
floor: floor,
sign: sign,
leakyRelu: leakyRelu,
log: log,
log1p: log1p,
logSigmoid: logSigmoid,
neg: neg,
prelu: prelu,
relu: relu,
reciprocal: reciprocal,
round: round,
selu: selu,
sigmoid: sigmoid,
sin: sin,
sinh: sinh,
softplus: softplus,
sqrt: sqrt,
rsqrt: rsqrt,
square: square,
step: step,
tan: tan,
tanh: tanh$1,
erf: erf,
add: add,
addStrict: addStrict,
atan2: atan2,
div: div,
divStrict: divStrict,
maximum: maximum,
maximumStrict: maximumStrict,
minimum: minimum,
minimumStrict: minimumStrict,
mod: mod,
modStrict: modStrict,
mul: mul,
mulStrict: mulStrict,
pow: pow,
powStrict: powStrict,
sub: sub,
subStrict: subStrict,
squaredDifference: squaredDifference,
squaredDifferenceStrict: squaredDifferenceStrict,
norm: norm,
cast: cast,
clone: clone,
fromPixels: fromPixels,
toPixels: toPixels,
ones: ones,
onesLike: onesLike,
zeros: zeros,
zerosLike: zerosLike,
eye: eye,
rand: rand,
randomNormal: randomNormal,
truncatedNormal: truncatedNormal,
randomUniform: randomUniform,
multinomial: multinomial,
reshape: reshape,
squeeze: squeeze,
tile: tile,
gather: gather,
oneHot: oneHot,
linspace: linspace,
range: range,
buffer: buffer,
fill: fill,
tensor: tensor,
scalar: scalar,
tensor1d: tensor1d,
tensor2d: tensor2d,
tensor3d: tensor3d,
tensor4d: tensor4d,
print: print,
expandDims: expandDims,
stack: stack,
unstack: unstack,
split: split,
cumsum: cumsum,
pad: pad,
pad1d: pad1d,
pad2d: pad2d,
pad3d: pad3d,
pad4d: pad4d,
movingAverage: movingAverage,
basicLSTMCell: basicLSTMCell,
multiRNNCell: multiRNNCell,
softmax: softmax,
localResponseNormalization: localResponseNormalization,
linalg: linalg,
losses: losses,
image: image,
operation: operation,
train: train,
tidy: tidy,
keep: keep,
dispose: dispose,
time: time,
grad: grad,
valueAndGrad: valueAndGrad,
grads: grads,
valueAndGrads: valueAndGrads,
variableGrads: variableGrads,
customGrad: customGrad
});
function isFloat(num) {
return num % 1 !== 0;
}
......@@ -13860,7 +14949,23 @@
}
return ctx;
}
function createCanvas(_a) {
var width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
return canvas;
}
function createCanvasWithImageData(_a, buf) {
var width = _a.width, height = _a.height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).putImageData(new ImageData(buf, width, height), 0, 0);
return canvas;
}
function getMediaDimensions(media) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight };
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight };
}
......@@ -13882,15 +14987,24 @@
reader.readAsDataURL(buf);
});
}
function getDefaultDrawOptions() {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
};
}
function drawBox(ctx, x, y, w, h, options) {
ctx.strokeStyle = options.color;
ctx.lineWidth = options.lineWidth;
ctx.strokeRect(x, y, w, h);
}
function drawText(ctx, x, y, text, options) {
var padText = 2 + options.lineWidth;
ctx.fillStyle = options.color;
ctx.font = options.fontSize + "px " + options.fontStyle;
ctx.fillText(text, x, y);
ctx.fillText(text, x + padText, y + padText + (options.fontSize * 0.6));
}
function drawDetection(canvasArg, detection, options) {
var canvas = getElement(canvasArg);
......@@ -13902,13 +15016,13 @@
: [detection];
detectionArray.forEach(function (det) {
var score = det.score, box = det.box;
var left = box.left, right = box.right, top = box.top, bottom = box.bottom;
var _a = (options || {}), _b = _a.color, color = _b === void 0 ? 'blue' : _b, _c = _a.lineWidth, lineWidth = _c === void 0 ? 2 : _c, _d = _a.fontSize, fontSize = _d === void 0 ? 20 : _d, _e = _a.fontStyle, fontStyle = _e === void 0 ? 'Georgia' : _e, _f = _a.withScore, withScore = _f === void 0 ? true : _f;
var padText = 2 + lineWidth;
var x = box.x, y = box.y, width = box.width, height = box.height;
var drawOptions = Object.assign(getDefaultDrawOptions(), (options || {}));
var withScore = Object.assign({ withScore: true }, (options || {})).withScore;
var ctx = getContext2dOrThrow(canvas);
drawBox(ctx, left, top, right - left, bottom - top, { lineWidth: lineWidth, color: color });
drawBox(ctx, x, y, width, height, drawOptions);
if (withScore) {
drawText(ctx, left + padText, top + (fontSize * 0.6) + padText, "" + round$1(score), { fontSize: fontSize, fontStyle: fontStyle, color: color });
drawText(ctx, x, y, "" + round$1(score), drawOptions);
}
});
}
......@@ -13942,9 +15056,7 @@
}
// if input is batch type, make sure every canvas has the same dimensions
var _a = this.dims || dims || getMediaDimensions(media), width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height);
this._canvases.push(canvas);
};
......@@ -14165,21 +15277,23 @@
}
var FaceDetectionResult = /** @class */ (function () {
function FaceDetectionResult(score, top, left, bottom, right) {
this.score = score;
this.top = Math.max(0, top),
this.left = Math.max(0, left),
this.bottom = Math.min(1.0, bottom),
this.right = Math.min(1.0, right);
function FaceDetectionResult(score, topRelative, leftRelative, bottomRelative, rightRelative) {
this._score = score;
this._topRelative = Math.max(0, topRelative),
this._leftRelative = Math.max(0, leftRelative),
this._bottomRelative = Math.min(1.0, bottomRelative),
this._rightRelative = Math.min(1.0, rightRelative);
}
FaceDetectionResult.prototype.forSize = function (width, height) {
var x = Math.floor(this._leftRelative * width);
var y = Math.floor(this._topRelative * height);
return {
score: this.score,
score: this._score,
box: {
top: this.top * height,
left: this.left * width,
bottom: this.bottom * height,
right: this.right * width
x: x,
y: y,
width: Math.floor(this._rightRelative * width) - x,
height: Math.floor(this._bottomRelative * height) - y
}
};
};
......@@ -14606,7 +15720,13 @@
var params = extractParams$1(weights);
function forward(input) {
return tidy(function () {
var x = normalize(padToSquare(getImageTensor(input)));
// TODO pad on both sides, to keep face centered
var x = padToSquare(getImageTensor(input));
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = image.resizeBilinear(x, [150, 150]);
}
x = normalize(x);
var out = convDown(x, params.conv32_down);
out = maxPool(out, 3, 2, 'valid');
out = residual(out, params.conv32_1);
......@@ -14655,16 +15775,64 @@
};
}
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
function extractFaces(image, detections) {
var ctx = getContext2dOrThrow(image);
return detections.map(function (det) {
var _a = det.forSize(image.width, image.height).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
var faceImg = createCanvas({ width: width, height: height });
getContext2dOrThrow(faceImg)
.putImageData(ctx.getImageData(x, y, width, height), 0, 0);
return faceImg;
});
}
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
function extractFaceTensors(image$$1, detections) {
return tidy(function () {
var imgTensor = getImageTensor(image$$1);
// TODO handle batches
var _a = imgTensor.shape, batchSize = _a[0], imgHeight = _a[1], imgWidth = _a[2], numChannels = _a[3];
var faceTensors = detections.map(function (det) {
var _a = det.forSize(imgWidth, imgHeight).box, x = _a.x, y = _a.y, width = _a.width, height = _a.height;
return slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels]);
});
return faceTensors;
});
}
exports.euclideanDistance = euclideanDistance;
exports.faceDetectionNet = faceDetectionNet;
exports.faceRecognitionNet = faceRecognitionNet;
exports.NetInput = NetInput;
exports.tf = index;
exports.extractFaces = extractFaces;
exports.extractFaceTensors = extractFaceTensors;
exports.isFloat = isFloat;
exports.round = round$1;
exports.getElement = getElement;
exports.getContext2dOrThrow = getContext2dOrThrow;
exports.createCanvas = createCanvas;
exports.createCanvasWithImageData = createCanvasWithImageData;
exports.getMediaDimensions = getMediaDimensions;
exports.bufferToImage = bufferToImage;
exports.getDefaultDrawOptions = getDefaultDrawOptions;
exports.drawBox = drawBox;
exports.drawText = drawText;
exports.drawDetection = drawDetection;
......
This source diff could not be displayed because it is too large. You can view the blob instead.
import { FaceDetectionNet } from './types';
export declare class FaceDetectionResult {
private score;
private top;
private left;
private bottom;
private right;
constructor(score: number, top: number, left: number, bottom: number, right: number);
private _score;
private _topRelative;
private _leftRelative;
private _bottomRelative;
private _rightRelative;
constructor(score: number, topRelative: number, leftRelative: number, bottomRelative: number, rightRelative: number);
forSize(width: number, height: number): FaceDetectionNet.Detection;
}
var FaceDetectionResult = /** @class */ (function () {
function FaceDetectionResult(score, top, left, bottom, right) {
this.score = score;
this.top = Math.max(0, top),
this.left = Math.max(0, left),
this.bottom = Math.min(1.0, bottom),
this.right = Math.min(1.0, right);
function FaceDetectionResult(score, topRelative, leftRelative, bottomRelative, rightRelative) {
this._score = score;
this._topRelative = Math.max(0, topRelative),
this._leftRelative = Math.max(0, leftRelative),
this._bottomRelative = Math.min(1.0, bottomRelative),
this._rightRelative = Math.min(1.0, rightRelative);
}
FaceDetectionResult.prototype.forSize = function (width, height) {
var x = Math.floor(this._leftRelative * width);
var y = Math.floor(this._topRelative * height);
return {
score: this.score,
score: this._score,
box: {
top: this.top * height,
left: this.left * width,
bottom: this.bottom * height,
right: this.right * width
x: x,
y: y,
width: Math.floor(this._rightRelative * width) - x,
height: Math.floor(this._bottomRelative * height) - y
}
};
};
......
{"version":3,"file":"FaceDetectionResult.js","sourceRoot":"","sources":["../../src/faceDetectionNet/FaceDetectionResult.ts"],"names":[],"mappings":"AAEA;IAOE,6BACE,KAAa,EACb,GAAW,EACX,IAAY,EACZ,MAAc,EACd,KAAa;QAEb,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC;YAC3B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC;YAC7B,IAAI,CAAC,MAAM,GAAI,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,CAAC;YACpC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAA;IACnC,CAAC;IAEM,qCAAO,GAAd,UAAe,KAAa,EAAE,MAAc;QAC1C,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,GAAG,EAAE;gBACH,GAAG,EAAE,IAAI,CAAC,GAAG,GAAG,MAAM;gBACtB,IAAI,EAAE,IAAI,CAAC,IAAI,GAAG,KAAK;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM;gBAC5B,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK;aAC1B;SACF,CAAA;IACH,CAAC;IACH,0BAAC;AAAD,CAAC,AAhCD,IAgCC"}
\ No newline at end of file
{"version":3,"file":"FaceDetectionResult.js","sourceRoot":"","sources":["../../src/faceDetectionNet/FaceDetectionResult.ts"],"names":[],"mappings":"AAEA;IAOE,6BACE,KAAa,EACb,WAAmB,EACnB,YAAoB,EACpB,cAAsB,EACtB,aAAqB;QAErB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,WAAW,CAAC;YAC5C,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,YAAY,CAAC;YAC9C,IAAI,CAAC,eAAe,GAAI,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC;YACrD,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,aAAa,CAAC,CAAA;IACpD,CAAC;IAEM,qCAAO,GAAd,UAAe,KAAa,EAAE,MAAc;QAC1C,IAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,aAAa,GAAG,KAAK,CAAC,CAAA;QAChD,IAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,CAAA;QAChD,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,MAAM;YAClB,GAAG,EAAE;gBACH,CAAC,GAAA;gBACD,CAAC,GAAA;gBACD,KAAK,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,cAAc,GAAG,KAAK,CAAC,GAAG,CAAC;gBAClD,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,eAAe,GAAG,MAAM,CAAC,GAAG,CAAC;aACtD;SACF,CAAA;IACH,CAAC;IACH,0BAAC;AAAD,CAAC,AAlCD,IAkCC"}
\ No newline at end of file
......@@ -56,10 +56,10 @@ export declare namespace FaceDetectionNet {
type Detection = {
score: number;
box: {
top: number;
left: number;
right: number;
bottom: number;
x: number;
y: number;
width: number;
height: number;
};
};
}
......@@ -10,7 +10,13 @@ export function faceRecognitionNet(weights) {
var params = extractParams(weights);
function forward(input) {
return tf.tidy(function () {
var x = normalize(padToSquare(getImageTensor(input)));
// TODO pad on both sides, to keep face centered
var x = padToSquare(getImageTensor(input));
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150]);
}
x = normalize(x);
var out = convDown(x, params.conv32_down);
out = tf.maxPool(out, 3, 2, 'valid');
out = residual(out, params.conv32_1);
......
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAG5C,OAAO,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IAAxD,iBAuDC;IAtDC,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAuC;QACtD,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAM,CAAC,GAAG,SAAS,CAAC,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;YAEvD,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAO,KAAuC;;;;;oBACpE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;oBAChB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;oBAA1B,IAAI,GAAG,SAAmB;oBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAChB,sBAAO,IAAI,EAAA;;;SACZ,CAAA;IAED,IAAM,yBAAyB,GAAG,UAAC,KAAuC;QACxE,IAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QAC7B,IAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;QAChB,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAG5C,OAAO,EAAE,cAAc,EAAE,WAAW,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IAAxD,iBA6DC;IA5DC,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAuC;QACtD,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,gDAAgD;YAChD,IAAI,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,CAAA;YAC1C,wCAAwC;YACxC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5C,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAA;aAC3C;YACD,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YAEhB,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAO,KAAuC;;;;;oBACpE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;oBAChB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;oBAA1B,IAAI,GAAG,SAAmB;oBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAChB,sBAAO,IAAI,EAAA;;;SACZ,CAAA;IAED,IAAM,yBAAyB,GAAG,UAAC,KAAuC;QACxE,IAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QAC7B,IAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;QAChB,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
......@@ -2,5 +2,8 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput };
import * as tf from '@tensorflow/tfjs-core';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput, tf };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './utils';
......@@ -2,6 +2,9 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput };
import * as tf from '@tensorflow/tfjs-core';
export { euclideanDistance, faceDetectionNet, faceRecognitionNet, NetInput, tf };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './utils';
//# sourceMappingURL=index.js.map
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAEtC,OAAO,EACL,iBAAiB,EACjB,gBAAgB,EAChB,kBAAkB,EAClB,QAAQ,EACT,CAAA;AAED,cAAc,SAAS,CAAA"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EACL,iBAAiB,EACjB,gBAAgB,EAChB,kBAAkB,EAClB,QAAQ,EACR,EAAE,EACH,CAAA;AAED,cAAc,gBAAgB,CAAA;AAC9B,cAAc,sBAAsB,CAAA;AACpC,cAAc,SAAS,CAAA"}
\ No newline at end of file
......@@ -10,7 +10,9 @@ export declare type DrawBoxOptions = {
color: string;
};
export declare type DrawTextOptions = {
lineWidth: number;
fontSize: number;
fontStyle: string;
color: string;
};
export declare type DrawOptions = DrawBoxOptions & DrawTextOptions;
import { FaceDetectionNet } from './faceDetectionNet/types';
import { DrawBoxOptions, DrawTextOptions } from './types';
import { Dimensions, DrawBoxOptions, DrawOptions, DrawTextOptions } from './types';
export declare function isFloat(num: number): boolean;
export declare function round(num: number): number;
export declare function getElement(arg: string | any): any;
export declare function getContext2dOrThrow(canvas: HTMLCanvasElement): CanvasRenderingContext2D;
export declare function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement): HTMLImageElement | {
export declare function createCanvas({width, height}: Dimensions): HTMLCanvasElement;
export declare function createCanvasWithImageData({width, height}: Dimensions, buf: Uint8ClampedArray): HTMLCanvasElement;
export declare function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement): {
width: number;
height: number;
};
export declare function bufferToImage(buf: Blob): Promise<HTMLImageElement>;
export declare function getDefaultDrawOptions(): DrawOptions;
export declare function drawBox(ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, options: DrawBoxOptions): void;
export declare function drawText(ctx: CanvasRenderingContext2D, x: number, y: number, text: string, options: DrawTextOptions): void;
export declare function drawDetection(canvasArg: string | HTMLCanvasElement, detection: FaceDetectionNet.Detection | FaceDetectionNet.Detection[], options?: DrawBoxOptions & DrawTextOptions & {
......
......@@ -17,7 +17,23 @@ export function getContext2dOrThrow(canvas) {
}
return ctx;
}
export function createCanvas(_a) {
var width = _a.width, height = _a.height;
var canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
return canvas;
}
export function createCanvasWithImageData(_a, buf) {
var width = _a.width, height = _a.height;
var canvas = createCanvas({ width: width, height: height });
getContext2dOrThrow(canvas).putImageData(new ImageData(buf, width, height), 0, 0);
return canvas;
}
export function getMediaDimensions(media) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight };
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight };
}
......@@ -39,15 +55,24 @@ export function bufferToImage(buf) {
reader.readAsDataURL(buf);
});
}
export function getDefaultDrawOptions() {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
};
}
export function drawBox(ctx, x, y, w, h, options) {
ctx.strokeStyle = options.color;
ctx.lineWidth = options.lineWidth;
ctx.strokeRect(x, y, w, h);
}
export function drawText(ctx, x, y, text, options) {
var padText = 2 + options.lineWidth;
ctx.fillStyle = options.color;
ctx.font = options.fontSize + "px " + options.fontStyle;
ctx.fillText(text, x, y);
ctx.fillText(text, x + padText, y + padText + (options.fontSize * 0.6));
}
export function drawDetection(canvasArg, detection, options) {
var canvas = getElement(canvasArg);
......@@ -59,13 +84,13 @@ export function drawDetection(canvasArg, detection, options) {
: [detection];
detectionArray.forEach(function (det) {
var score = det.score, box = det.box;
var left = box.left, right = box.right, top = box.top, bottom = box.bottom;
var _a = (options || {}), _b = _a.color, color = _b === void 0 ? 'blue' : _b, _c = _a.lineWidth, lineWidth = _c === void 0 ? 2 : _c, _d = _a.fontSize, fontSize = _d === void 0 ? 20 : _d, _e = _a.fontStyle, fontStyle = _e === void 0 ? 'Georgia' : _e, _f = _a.withScore, withScore = _f === void 0 ? true : _f;
var padText = 2 + lineWidth;
var x = box.x, y = box.y, width = box.width, height = box.height;
var drawOptions = Object.assign(getDefaultDrawOptions(), (options || {}));
var withScore = Object.assign({ withScore: true }, (options || {})).withScore;
var ctx = getContext2dOrThrow(canvas);
drawBox(ctx, left, top, right - left, bottom - top, { lineWidth: lineWidth, color: color });
drawBox(ctx, x, y, width, height, drawOptions);
if (withScore) {
drawText(ctx, left + padText, top + (fontSize * 0.6) + padText, "" + round(score), { fontSize: fontSize, fontStyle: fontStyle, color: color });
drawText(ctx, x, y, "" + round(score), drawOptions);
}
});
}
......
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAGA,MAAM,kBAAkB,GAAW;IACjC,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,CAAA;AACtB,CAAC;AAED,MAAM,gBAAgB,GAAW;IAC/B,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAA;AACpC,CAAC;AAED,MAAM,qBAAqB,GAAiB;IAC1C,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,OAAO,QAAQ,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;KACpC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,8BAA8B,MAAyB;IAC3D,IAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;IACnC,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAA;KAC7C;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,6BAA6B,KAA0C;IAC3E,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,WAAW,EAAE,CAAA;KAC9D;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAED,MAAM,wBAAwB,GAAS;IACrC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,EAAE;YAC1B,OAAO,MAAM,CAAC,kDAAkD,CAAC,CAAA;SAClE;QAED,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAA;QAC/B,MAAM,CAAC,MAAM,GAAG;YACd,IAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;YACvB,GAAG,CAAC,MAAM,GAAG,cAAM,OAAA,OAAO,CAAC,GAAG,CAAC,EAAZ,CAAY,CAAA;YAC/B,GAAG,CAAC,OAAO,GAAG,MAAM,CAAA;YACpB,GAAG,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;QACzB,CAAC,CAAA;QACD,MAAM,CAAC,OAAO,GAAG,MAAM,CAAA;QACvB,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;IAC3B,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM,kBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,CAAS,EACT,CAAS,EACT,OAAuB;IAEvB,GAAG,CAAC,WAAW,GAAG,OAAO,CAAC,KAAK,CAAA;IAC/B,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAA;IACjC,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC5B,CAAC;AAED,MAAM,mBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,IAAY,EACZ,OAAwB;IAExB,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,KAAK,CAAA;IAC7B,GAAG,CAAC,IAAI,GAAM,OAAO,CAAC,QAAQ,WAAM,OAAO,CAAC,SAAW,CAAA;IACvD,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC1B,CAAC;AAED,MAAM,wBACJ,SAAqC,EACrC,SAAoE,EACpE,OAAmE;IAEnE,IAAM,MAAM,GAAG,UAAU,CAAC,SAAS,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,MAAM,YAAY,iBAAiB,CAAC,EAAE;QAC1C,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAA;KAC9E;IAED,IAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;QAC7C,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,CAAC,SAAS,CAAC,CAAA;IAEf,cAAc,CAAC,OAAO,CAAC,UAAC,GAAG;QAEvB,IAAA,iBAAK,EACL,aAAG,CACE;QAGL,IAAA,eAAI,EACJ,iBAAK,EACL,aAAG,EACH,mBAAM,CACD;QAED,IAAA,oBAMa,EALjB,aAAc,EAAd,mCAAc,EACd,iBAAa,EAAb,kCAAa,EACb,gBAAa,EAAb,kCAAa,EACb,iBAAqB,EAArB,0CAAqB,EACrB,iBAAgB,EAAhB,qCAAgB,CACC;QAEnB,IAAM,OAAO,GAAG,CAAC,GAAG,SAAS,CAAA;QAE7B,IAAM,GAAG,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QACvC,OAAO,CACL,GAAG,EACH,IAAI,EACJ,GAAG,EACH,KAAK,GAAG,IAAI,EACZ,MAAM,GAAG,GAAG,EACZ,EAAE,SAAS,WAAA,EAAE,KAAK,OAAA,EAAE,CACrB,CAAA;QACD,IAAI,SAAS,EAAE;YACb,QAAQ,CACN,GAAG,EACH,IAAI,GAAG,OAAO,EACd,GAAG,GAAG,CAAC,QAAQ,GAAG,GAAG,CAAC,GAAG,OAAO,EAChC,KAAG,KAAK,CAAC,KAAK,CAAG,EACjB,EAAE,QAAQ,UAAA,EAAE,SAAS,WAAA,EAAE,KAAK,OAAA,EAAE,CAC/B,CAAA;SACF;IACH,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAGA,MAAM,kBAAkB,GAAW;IACjC,OAAO,GAAG,GAAG,CAAC,KAAK,CAAC,CAAA;AACtB,CAAC;AAED,MAAM,gBAAgB,GAAW;IAC/B,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,GAAG,CAAC,GAAG,GAAG,CAAA;AACpC,CAAC;AAED,MAAM,qBAAqB,GAAiB;IAC1C,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,OAAO,QAAQ,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;KACpC;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,8BAA8B,MAAyB;IAC3D,IAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;IACnC,IAAI,CAAC,GAAG,EAAE;QACR,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAA;KAC7C;IACD,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,uBAAuB,EAA4B;QAA1B,gBAAK,EAAE,kBAAM;IAC1C,IAAM,MAAM,GAAG,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAA;IAC/C,MAAM,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,OAAO,MAAM,CAAA;AACf,CAAC;AAED,MAAM,oCAAoC,EAA4B,EAAE,GAAsB;QAAlD,gBAAK,EAAE,kBAAM;IACvD,IAAM,MAAM,GAAG,YAAY,CAAC,EAAE,KAAK,OAAA,EAAE,MAAM,QAAA,EAAE,CAAC,CAAA;IAC9C,mBAAmB,CAAC,MAAM,CAAC,CAAC,YAAY,CAAC,IAAI,SAAS,CAAC,GAAG,EAAE,KAAK,EAAE,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;IACjF,OAAO,MAAM,CAAA;AACf,CAAC;AAED,MAAM,6BAA6B,KAA0C;IAC3E,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,YAAY,EAAE,MAAM,EAAE,KAAK,CAAC,aAAa,EAAE,CAAA;KAClE;IACD,IAAI,KAAK,YAAY,gBAAgB,EAAE;QACrC,OAAO,EAAE,KAAK,EAAE,KAAK,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,CAAC,WAAW,EAAE,CAAA;KAC9D;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAED,MAAM,wBAAwB,GAAS;IACrC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,CAAC,GAAG,YAAY,IAAI,CAAC,EAAE;YAC1B,OAAO,MAAM,CAAC,kDAAkD,CAAC,CAAA;SAClE;QAED,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAA;QAC/B,MAAM,CAAC,MAAM,GAAG;YACd,IAAM,GAAG,GAAG,IAAI,KAAK,EAAE,CAAA;YACvB,GAAG,CAAC,MAAM,GAAG,cAAM,OAAA,OAAO,CAAC,GAAG,CAAC,EAAZ,CAAY,CAAA;YAC/B,GAAG,CAAC,OAAO,GAAG,MAAM,CAAA;YACpB,GAAG,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;QACzB,CAAC,CAAA;QACD,MAAM,CAAC,OAAO,GAAG,MAAM,CAAA;QACvB,MAAM,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;IAC3B,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM;IACJ,OAAO;QACL,KAAK,EAAE,MAAM;QACb,SAAS,EAAE,CAAC;QACZ,QAAQ,EAAE,EAAE;QACZ,SAAS,EAAE,SAAS;KACrB,CAAA;AACH,CAAC;AAED,MAAM,kBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,CAAS,EACT,CAAS,EACT,OAAuB;IAEvB,GAAG,CAAC,WAAW,GAAG,OAAO,CAAC,KAAK,CAAA;IAC/B,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAA;IACjC,GAAG,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;AAC5B,CAAC;AAED,MAAM,mBACJ,GAA6B,EAC7B,CAAS,EACT,CAAS,EACT,IAAY,EACZ,OAAwB;IAExB,IAAM,OAAO,GAAG,CAAC,GAAG,OAAO,CAAC,SAAS,CAAA;IAErC,GAAG,CAAC,SAAS,GAAG,OAAO,CAAC,KAAK,CAAA;IAC7B,GAAG,CAAC,IAAI,GAAM,OAAO,CAAC,QAAQ,WAAM,OAAO,CAAC,SAAW,CAAA;IACvD,GAAG,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,GAAG,OAAO,EAAE,CAAC,GAAG,OAAO,GAAG,CAAC,OAAO,CAAC,QAAQ,GAAG,GAAG,CAAC,CAAC,CAAA;AACzE,CAAC;AAED,MAAM,wBACJ,SAAqC,EACrC,SAAoE,EACpE,OAAmE;IAEnE,IAAM,MAAM,GAAG,UAAU,CAAC,SAAS,CAAC,CAAA;IACpC,IAAI,CAAC,CAAC,MAAM,YAAY,iBAAiB,CAAC,EAAE;QAC1C,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAA;KAC9E;IAED,IAAM,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC;QAC7C,CAAC,CAAC,SAAS;QACX,CAAC,CAAC,CAAC,SAAS,CAAC,CAAA;IAEf,cAAc,CAAC,OAAO,CAAC,UAAC,GAAG;QAEvB,IAAA,iBAAK,EACL,aAAG,CACE;QAGL,IAAA,SAAC,EACD,SAAC,EACD,iBAAK,EACL,mBAAM,CACD;QAEP,IAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAC/B,qBAAqB,EAAE,EACvB,CAAC,OAAO,IAAI,EAAE,CAAC,CAChB,CAAA;QAEO,IAAA,yEAAS,CAAwD;QAEzE,IAAM,GAAG,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QACvC,OAAO,CACL,GAAG,EACH,CAAC,EACD,CAAC,EACD,KAAK,EACL,MAAM,EACN,WAAW,CACZ,CAAA;QACD,IAAI,SAAS,EAAE;YACb,QAAQ,CACN,GAAG,EACH,CAAC,EACD,CAAC,EACD,KAAG,KAAK,CAAC,KAAK,CAAG,EACjB,WAAW,CACZ,CAAA;SACF;IACH,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
......@@ -24,6 +24,35 @@ async function initFaceRecognitionNet() {
return facerecognition.faceRecognitionNet(weights)
}
// fetch first image of each class and compute their descriptors
async function initTrainDescriptorsByClass(net) {
return Promise.all(classes.map(
async className => {
const img = await facerecognition.bufferToImage(
await fetchImage(getFaceImageUri(className, 1))
)
const descriptor = await net.computeFaceDescriptor(img)
return {
descriptor,
className
}
}
))
}
function getBestMatch(allDescriptors, queryDescriptor) {
return allDescriptors
.map(
({ descriptor, className }) => ({
distance: facerecognition.round(
facerecognition.euclideanDistance(descriptor, queryDescriptor)
),
className
})
)
.reduce((best, curr) => best.distance < curr.distance ? best : curr)
}
function renderNavBar(navbarId, exampleUri) {
const examples = [
{
......@@ -41,6 +70,14 @@ function renderNavBar(navbarId, exampleUri) {
{
uri: 'face_similarity',
name: 'Face Similarity'
},
{
uri: 'detect_and_draw_faces',
name: 'Detect and Draw Faces'
},
{
uri: 'detect_and_recognize_faces',
name: 'Detect and Recognize Faces'
}
]
......
......@@ -37,4 +37,8 @@
position: absolute;
top: 0;
left: 0;
}
#facesContainer canvas {
margin: 10px;
}
\ No newline at end of file
......@@ -15,5 +15,7 @@ app.get('/face_detection', (req, res) => res.sendFile(path.join(viewsDir, 'faceD
app.get('/face_detection_video', (req, res) => res.sendFile(path.join(viewsDir, 'faceDetectionVideo.html')))
app.get('/face_recognition', (req, res) => res.sendFile(path.join(viewsDir, 'faceRecognition.html')))
app.get('/face_similarity', (req, res) => res.sendFile(path.join(viewsDir, 'faceSimilarity.html')))
app.get('/detect_and_draw_faces', (req, res) => res.sendFile(path.join(viewsDir, 'detectAndDrawFaces.html')))
app.get('/detect_and_recognize_faces', (req, res) => res.sendFile(path.join(viewsDir, 'detectAndRecognizeFaces.html')))
app.listen(3000, () => console.log('Listening on port 3000!'))
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="face-recognition.js"></script>
<script src="axios.min.js"></script>
<script src="commons.js"></script>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
</head>
<body>
<div class="center-content page-container">
<div id="navbar"></div>
<div class="progress" id="loader">
<div class="indeterminate"></div>
</div>
<div style="position: relative" class="margin">
<img id="inputImg" src="" style="max-width: 800px;" />
<canvas id="overlay" />
</div>
<div id="facesContainer"></div>
<div class="row side-by-side">
<div id="selectList"></div>
<div class="row">
<label for="minConfidence">Min Confidence:</label>
<input disabled value="0.7" id="minConfidence" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
onclick="onDecreaseThreshold()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
onclick="onIncreaseThreshold()"
>
<i class="material-icons left">+</i>
</button>
</div>
</div>
<script>
let minConfidence = 0.7
let net
function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence)
updateResults()
}
function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence)
updateResults()
}
async function updateResults() {
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
const detections = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', detections.map(det => det.forSize(width, height)))
const faceImages = await facerecognition.extractFaces(input.canvases[0], detections)
$('#facesContainer').empty()
faceImages.forEach(canvas => $('#facesContainer').append(canvas))
}
async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src
updateResults()
}
async function run() {
net = await initFaceDetectionNet()
$('#loader').hide()
onSelectionChanged($('#selectList select').val())
}
$(document).ready(function() {
renderNavBar('#navbar', 'detect_and_draw_faces')
renderImageSelectList(
'#selectList',
async (uri) => {
await onSelectionChanged(uri)
},
'bbt1.jpg'
)
run()
})
</script>
</body>
</html>
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="face-recognition.js"></script>
<script src="axios.min.js"></script>
<script src="commons.js"></script>
<link rel="stylesheet" href="styles.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
</head>
<body>
<div class="center-content page-container">
<div id="navbar"></div>
<div class="progress" id="loader">
<div class="indeterminate"></div>
</div>
<div style="position: relative" class="margin">
<img id="inputImg" src="" style="max-width: 800px;" />
<canvas id="overlay" />
</div>
<div class="row side-by-side">
<div id="selectList"></div>
<div class="row">
<label for="minConfidence">Min Confidence:</label>
<input disabled value="0.7" id="minConfidence" type="text" class="bold">
</div>
<button
class="waves-effect waves-light btn"
onclick="onDecreaseThreshold()"
>
<i class="material-icons left">-</i>
</button>
<button
class="waves-effect waves-light btn"
onclick="onIncreaseThreshold()"
>
<i class="material-icons left">+</i>
</button>
</div>
</div>
<script>
const threshold = 0.6
let minConfidence = 0.7
let detectionNet, recognitionNet
let trainDescriptorsByClass = []
function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence)
updateResults()
}
function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence)
updateResults()
}
async function updateResults() {
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
const detections = await detectionNet.locateFaces(input, minConfidence)
const detectionsForSize = detections.map(det => det.forSize(width, height))
facerecognition.drawDetection('overlay', detectionsForSize, { withScore: false })
const faceTensors = await facerecognition.extractFaceTensors(input, detections)
const descriptors = await Promise.all(faceTensors.map(t => recognitionNet.computeFaceDescriptor(t)))
// free memory for face image tensors after we computed their descriptors
faceTensors.forEach(t => t.dispose())
descriptors.forEach((descriptor, i) => {
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
const text = `${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`
const { x, y } = detectionsForSize[i].box
facerecognition.drawText(canvas.getContext('2d'), x, y, text, facerecognition.getDefaultDrawOptions())
})
}
async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src
updateResults()
}
async function run() {
detectionNet = await initFaceDetectionNet()
recognitionNet = await initFaceRecognitionNet()
trainDescriptorsByClass = await initTrainDescriptorsByClass(recognitionNet)
$('#loader').hide()
onSelectionChanged($('#selectList select').val())
}
$(document).ready(function() {
renderNavBar('#navbar', 'detect_and_recognize_faces')
renderImageSelectList(
'#selectList',
async (uri) => {
await onSelectionChanged(uri)
},
'bbt1.jpg'
)
run()
})
</script>
</body>
</html>
\ No newline at end of file
......@@ -57,12 +57,13 @@
}
async function updateResults() {
const input = new facerecognition.NetInput('inputImg')
const { width, height } = input
const inputImgEl = $('#inputImg').get(0)
const { width, height } = inputImgEl
const canvas = $('#overlay').get(0)
canvas.width = width
canvas.height = height
const input = new facerecognition.NetInput(inputImgEl)
result = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', result.map(det => det.forSize(width, height)))
}
......
......@@ -108,30 +108,6 @@
getImg().src = src
}
async function loadTrainingImages() {
return await Promise.all(classes.map(
async className => ({
img: await facerecognition.bufferToImage(
await fetchImage(getFaceImageUri(className, 1))
),
className
})
))
}
function getBestMatch(queryDescriptor) {
return trainDescriptorsByClass
.map(
({ descriptor, className }) => ({
distance: facerecognition.round(
facerecognition.euclideanDistance(descriptor, queryDescriptor)
),
className
})
)
.reduce((best, curr) => best.distance < curr.distance ? best : curr)
}
async function runFaceRecognition() {
async function next() {
const imgBuf = await fetchImage(getFaceImageUri(classes[currClassIdx], currImageIdx))
......@@ -143,7 +119,7 @@
const descriptor = await net.computeFaceDescriptor(input)
displayTimeStats(Date.now() - ts)
const bestMatch = getBestMatch(descriptor)
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
$('#prediction').val(`${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`)
currImageIdx = currClassIdx === (classes.length - 1)
......@@ -164,13 +140,7 @@
net = await initFaceRecognitionNet()
setStatusText('computing initial descriptors...')
const trainImgs = await loadTrainingImages()
trainDescriptorsByClass = await Promise.all(trainImgs.map(
async ({ className, img }) => ({
descriptor: await net.computeFaceDescriptor(img),
className
})
))
trainDescriptorsByClass = await initTrainDescriptorsByClass(net)
$('#loader').hide()
runFaceRecognition()
......
import { Dimensions, TMediaElement, TNetInput } from './types';
import { getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
import { createCanvas, getContext2dOrThrow, getElement, getMediaDimensions } from './utils';
export class NetInput {
private _canvases: HTMLCanvasElement[]
......@@ -42,10 +42,7 @@ export class NetInput {
// if input is batch type, make sure every canvas has the same dimensions
const { width, height } = this.dims || dims || getMediaDimensions(media)
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
const canvas = createCanvas({ width, height })
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height)
this._canvases.push(canvas)
}
......
import * as tf from '@tensorflow/tfjs-core';
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { NetInput } from './NetInput';
import { getImageTensor } from './transformInputs';
import { TNetInput } from './types';
/**
* Extracts the tensors of the image regions containing the detected faces.
* Returned tensors have to be disposed manually once you don't need them anymore!
* Useful if you want to compute the face descriptors for the face
* images. Using this method is faster then extracting a canvas for each face and
* converting them to tensors individually.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns Tensors of the corresponding image region for each detected face.
*/
export function extractFaceTensors(
image: tf.Tensor | NetInput | TNetInput,
detections: FaceDetectionResult[]
): tf.Tensor4D[] {
return tf.tidy(() => {
const imgTensor = getImageTensor(image)
// TODO handle batches
const [batchSize, imgHeight, imgWidth, numChannels] = imgTensor.shape
const faceTensors = detections.map(det => {
const { x, y, width, height } = det.forSize(imgWidth, imgHeight).box
return tf.slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels])
})
return faceTensors
})
}
\ No newline at end of file
import { FaceDetectionResult } from './faceDetectionNet/FaceDetectionResult';
import { createCanvas, getContext2dOrThrow } from './utils';
/**
* Extracts the image regions containing the detected faces.
*
* @param input The image that face detection has been performed on.
* @param detections The face detection results for that image.
* @returns The Canvases of the corresponding image region for each detected face.
*/
export function extractFaces(
image: HTMLCanvasElement,
detections: FaceDetectionResult[]
): HTMLCanvasElement[] {
const ctx = getContext2dOrThrow(image)
return detections.map(det => {
const { x, y, width, height } = det.forSize(image.width, image.height).box
const faceImg = createCanvas({ width, height })
getContext2dOrThrow(faceImg)
.putImageData(ctx.getImageData(x, y, width, height), 0, 0)
return faceImg
})
}
\ No newline at end of file
import { FaceDetectionNet } from './types';
export class FaceDetectionResult {
private score: number
private top: number
private left: number
private bottom: number
private right: number
private _score: number
private _topRelative: number
private _leftRelative: number
private _bottomRelative: number
private _rightRelative: number
constructor(
score: number,
top: number,
left: number,
bottom: number,
right: number
topRelative: number,
leftRelative: number,
bottomRelative: number,
rightRelative: number
) {
this.score = score
this.top = Math.max(0, top),
this.left = Math.max(0, left),
this.bottom = Math.min(1.0, bottom),
this.right = Math.min(1.0, right)
this._score = score
this._topRelative = Math.max(0, topRelative),
this._leftRelative = Math.max(0, leftRelative),
this._bottomRelative = Math.min(1.0, bottomRelative),
this._rightRelative = Math.min(1.0, rightRelative)
}
public forSize(width: number, height: number): FaceDetectionNet.Detection {
const x = Math.floor(this._leftRelative * width)
const y = Math.floor(this._topRelative * height)
return {
score: this.score,
score: this._score,
box: {
top: this.top * height,
left: this.left * width,
bottom: this.bottom * height,
right: this.right * width
x,
y,
width: Math.floor(this._rightRelative * width) - x,
height: Math.floor(this._bottomRelative * height) - y
}
}
}
......
......@@ -69,10 +69,10 @@ export namespace FaceDetectionNet {
export type Detection = {
score: number
box: {
top: number,
left: number,
right: number,
bottom: number
x: number,
y: number,
width: number,
height: number
}
}
......
......@@ -14,7 +14,13 @@ export function faceRecognitionNet(weights: Float32Array) {
function forward(input: tf.Tensor | NetInput | TNetInput) {
return tf.tidy(() => {
const x = normalize(padToSquare(getImageTensor(input)))
// TODO pad on both sides, to keep face centered
let x = padToSquare(getImageTensor(input))
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150])
}
x = normalize(x)
let out = convDown(x, params.conv32_down)
out = tf.maxPool(out, 3, 2, 'valid')
......
......@@ -2,12 +2,16 @@ import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
import * as tf from '@tensorflow/tfjs-core';
export {
euclideanDistance,
faceDetectionNet,
faceRecognitionNet,
NetInput
NetInput,
tf
}
export * from './extractFaces'
export * from './extractFaceTensors'
export * from './utils'
\ No newline at end of file
......@@ -15,7 +15,10 @@ export type DrawBoxOptions = {
}
export type DrawTextOptions = {
lineWidth: number
fontSize: number
fontStyle: string
color: string
}
\ No newline at end of file
}
export type DrawOptions = DrawBoxOptions & DrawTextOptions
\ No newline at end of file
import { FaceDetectionNet } from './faceDetectionNet/types';
import { DrawBoxOptions, DrawTextOptions } from './types';
import { Dimensions, DrawBoxOptions, DrawOptions, DrawTextOptions } from './types';
export function isFloat(num: number) {
return num % 1 !== 0
......@@ -24,7 +24,23 @@ export function getContext2dOrThrow(canvas: HTMLCanvasElement): CanvasRenderingC
return ctx
}
export function createCanvas({ width, height}: Dimensions): HTMLCanvasElement {
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
return canvas
}
export function createCanvasWithImageData({ width, height}: Dimensions, buf: Uint8ClampedArray): HTMLCanvasElement {
const canvas = createCanvas({ width, height })
getContext2dOrThrow(canvas).putImageData(new ImageData(buf, width, height), 0, 0)
return canvas
}
export function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight }
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight }
}
......@@ -49,6 +65,15 @@ export function bufferToImage(buf: Blob): Promise<HTMLImageElement> {
})
}
export function getDefaultDrawOptions(): DrawOptions {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
}
}
export function drawBox(
ctx: CanvasRenderingContext2D,
x: number,
......@@ -69,9 +94,11 @@ export function drawText(
text: string,
options: DrawTextOptions
) {
const padText = 2 + options.lineWidth
ctx.fillStyle = options.color
ctx.font = `${options.fontSize}px ${options.fontStyle}`
ctx.fillText(text, x, y)
ctx.fillText(text, x + padText, y + padText + (options.fontSize * 0.6))
}
export function drawDetection(
......@@ -95,38 +122,35 @@ export function drawDetection(
} = det
const {
left,
right,
top,
bottom
x,
y,
width,
height
} = box
const {
color = 'blue',
lineWidth = 2,
fontSize = 20,
fontStyle = 'Georgia',
withScore = true
} = (options || {})
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
const padText = 2 + lineWidth
const { withScore } = Object.assign({ withScore: true }, (options || {}))
const ctx = getContext2dOrThrow(canvas)
drawBox(
ctx,
left,
top,
right - left,
bottom - top,
{ lineWidth, color }
x,
y,
width,
height,
drawOptions
)
if (withScore) {
drawText(
ctx,
left + padText,
top + (fontSize * 0.6) + padText,
x,
y,
`${round(score)}`,
{ fontSize, fontStyle, color }
drawOptions
)
}
})
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment