Commit 91898cde by vincent

quantization of face recognition model

parent 9cd6d13f
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from '../NetInput';
import { TNetInput } from '../types';
export declare class FaceRecognitionNet {
private _params;
load(weightsOrUrl: Float32Array | string | undefined): Promise<void>;
extractWeights(weights: Float32Array): void;
forward(input: tf.Tensor | NetInput | TNetInput): tf.Tensor<tf.Rank.R2>;
computeFaceDescriptor(input: tf.Tensor | NetInput | TNetInput): Promise<Int32Array | Uint8Array | Float32Array>;
computeFaceDescriptorSync(input: tf.Tensor | NetInput | TNetInput): Promise<Int32Array | Uint8Array | Float32Array>;
}
import * as tslib_1 from "tslib";
import * as tf from '@tensorflow/tfjs-core';
import { getImageTensor } from '../getImageTensor';
import { padToSquare } from '../padToSquare';
import { convDown } from './convLayer';
import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams';
import { normalize } from './normalize';
import { residual, residualDown } from './residualLayer';
var FaceRecognitionNet = /** @class */ (function () {
function FaceRecognitionNet() {
}
FaceRecognitionNet.prototype.load = function (weightsOrUrl) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var _a;
return tslib_1.__generator(this, function (_b) {
switch (_b.label) {
case 0:
if (weightsOrUrl instanceof Float32Array) {
this.extractWeights(weightsOrUrl);
return [2 /*return*/];
}
if (weightsOrUrl && typeof weightsOrUrl !== 'string') {
throw new Error('FaceLandmarkNet.load - expected model uri, or weights as Float32Array');
}
_a = this;
return [4 /*yield*/, loadQuantizedParams(weightsOrUrl)];
case 1:
_a._params = _b.sent();
return [2 /*return*/];
}
});
});
};
FaceRecognitionNet.prototype.extractWeights = function (weights) {
this._params = extractParams(weights);
};
FaceRecognitionNet.prototype.forward = function (input) {
var _this = this;
if (!this._params) {
throw new Error('FaceRecognitionNet - load model before inference');
}
return tf.tidy(function () {
var x = padToSquare(getImageTensor(input), true);
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150]);
}
x = normalize(x);
var out = convDown(x, _this._params.conv32_down);
out = tf.maxPool(out, 3, 2, 'valid');
out = residual(out, _this._params.conv32_1);
out = residual(out, _this._params.conv32_2);
out = residual(out, _this._params.conv32_3);
out = residualDown(out, _this._params.conv64_down);
out = residual(out, _this._params.conv64_1);
out = residual(out, _this._params.conv64_2);
out = residual(out, _this._params.conv64_3);
out = residualDown(out, _this._params.conv128_down);
out = residual(out, _this._params.conv128_1);
out = residual(out, _this._params.conv128_2);
out = residualDown(out, _this._params.conv256_down);
out = residual(out, _this._params.conv256_1);
out = residual(out, _this._params.conv256_2);
out = residualDown(out, _this._params.conv256_down_out);
var globalAvg = out.mean([1, 2]);
var fullyConnected = tf.matMul(globalAvg, _this._params.fc);
return fullyConnected;
});
};
FaceRecognitionNet.prototype.computeFaceDescriptor = function (input) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var result, data;
return tslib_1.__generator(this, function (_a) {
switch (_a.label) {
case 0:
result = this.forward(input);
return [4 /*yield*/, result.data()];
case 1:
data = _a.sent();
result.dispose();
return [2 /*return*/, data];
}
});
});
};
FaceRecognitionNet.prototype.computeFaceDescriptorSync = function (input) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var result, data;
return tslib_1.__generator(this, function (_a) {
result = this.forward(input);
data = result.dataSync();
result.dispose();
return [2 /*return*/, data];
});
});
};
return FaceRecognitionNet;
}());
export { FaceRecognitionNet };
//# sourceMappingURL=FaceRecognitionNet.js.map
\ No newline at end of file
{"version":3,"file":"FaceRecognitionNet.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/FaceRecognitionNet.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAGzD;IAAA;IA2EA,CAAC;IAvEc,iCAAI,GAAjB,UAAkB,YAA+C;;;;;;wBAC/D,IAAI,YAAY,YAAY,YAAY,EAAE;4BACxC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAA;4BACjC,sBAAM;yBACP;wBAED,IAAI,YAAY,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpD,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAA;yBACzF;wBACD,KAAA,IAAI,CAAA;wBAAW,qBAAM,mBAAmB,CAAC,YAAY,CAAC,EAAA;;wBAAtD,GAAK,OAAO,GAAG,SAAuC,CAAA;;;;;KACvD;IAEM,2CAAc,GAArB,UAAsB,OAAqB;QACzC,IAAI,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IACvC,CAAC;IAEM,oCAAO,GAAd,UAAe,KAAuC;QAAtD,iBAwCC;QAvCC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;SACpE;QAED,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAI,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,CAAA;YAChD,wCAAwC;YACxC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5C,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAA;aAC3C;YACD,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YAEhB,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;YAC/C,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAE1C,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,WAAW,CAAC,CAAA;YACjD,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAC1C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;YAE1C,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;YAClD,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAE3C,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,YAAY,CAAC,CAAA;YAClD,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;YAC3C,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,KAAI,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;YAEtD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,KAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;YAE5D,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAEY,kDAAqB,GAAlC,UAAmC,KAAuC;;;;;;wBAClE,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;wBACrB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;wBAA1B,IAAI,GAAG,SAAmB;wBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;wBAChB,sBAAO,IAAI,EAAA;;;;KACZ;IAEY,sDAAyB,GAAtC,UAAuC,KAAuC;;;;gBACtE,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAA;gBAC5B,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;gBAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;gBAChB,sBAAO,IAAI,EAAA;;;KACZ;IACH,yBAAC;AAAD,CAAC,AA3ED,IA2EC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet } from './types';
export declare function conv(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams): tf.Tensor<tf.Rank.R4>;
export declare function convNoRelu(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams): tf.Tensor<tf.Rank.R4>;
export declare function convDown(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams): tf.Tensor<tf.Rank.R4>;
import { ConvLayerParams } from './types';
export declare function conv(x: tf.Tensor4D, params: ConvLayerParams): tf.Tensor<tf.Rank.R4>;
export declare function convNoRelu(x: tf.Tensor4D, params: ConvLayerParams): tf.Tensor<tf.Rank.R4>;
export declare function convDown(x: tf.Tensor4D, params: ConvLayerParams): tf.Tensor<tf.Rank.R4>;
{"version":3,"file":"convLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/convLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AAIrC,mBACE,CAAc,EACd,MAA0C,EAC1C,OAAyB,EACzB,QAAiB,EACjB,OAAkC;IAAlC,wBAAA,EAAA,gBAAkC;IAE5B,IAAA,gBAA+B,EAA7B,oBAAO,EAAE,cAAI,CAAgB;IAErC,IAAI,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACjD,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,CAAA;IACvB,GAAG,GAAG,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAC9B,OAAO,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;AACtC,CAAC;AAED,MAAM,eAAe,CAAc,EAAE,MAA0C;IAC7E,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;AAC3C,CAAC;AAED,MAAM,qBAAqB,CAAc,EAAE,MAA0C;IACnF,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;AAC5C,CAAC;AAED,MAAM,mBAAmB,CAAc,EAAE,MAA0C;IACjF,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;AACpD,CAAC"}
\ No newline at end of file
{"version":3,"file":"convLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/convLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AAIrC,mBACE,CAAc,EACd,MAAuB,EACvB,OAAyB,EACzB,QAAiB,EACjB,OAAkC;IAAlC,wBAAA,EAAA,gBAAkC;IAE5B,IAAA,gBAA+B,EAA7B,oBAAO,EAAE,cAAI,CAAgB;IAErC,IAAI,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IACjD,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,CAAA;IACvB,GAAG,GAAG,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAC9B,OAAO,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAA;AACtC,CAAC;AAED,MAAM,eAAe,CAAc,EAAE,MAAuB;IAC1D,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;AAC3C,CAAC;AAED,MAAM,qBAAqB,CAAc,EAAE,MAAuB;IAChE,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;AAC5C,CAAC;AAED,MAAM,mBAAmB,CAAc,EAAE,MAAuB;IAC9D,OAAO,SAAS,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAA;AACpD,CAAC"}
\ No newline at end of file
import { FaceRecognitionNet } from './types';
export declare function extractParams(weights: Float32Array): FaceRecognitionNet.NetParams;
import { NetParams } from './types';
export declare function extractParams(weights: Float32Array): NetParams;
{"version":3,"file":"extractParams.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/extractParams.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AAEzE,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAGnC,2BAA2B,cAAsC;IAE/D,6BAA6B,eAAuB,EAAE,UAAkB,EAAE,UAAkB;QAC1F,IAAM,OAAO,GAAG,cAAc,CAAC,eAAe,CAAC,CAAA;QAC/C,IAAM,KAAK,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC,CAAA;QAErE,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;YAClB,MAAM,IAAI,KAAK,CAAC,iCAA+B,KAAK,0BAAqB,OAAO,CAAC,MAAM,sBAAiB,UAAU,sBAAiB,UAAY,CAAC,CAAA;SACjJ;QAED,OAAO,EAAE,CAAC,SAAS,CACjB,EAAE,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,UAAU,EAAE,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,EACjE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CACb,CAAA;IACH,CAAC;IAED,iCAAiC,UAAkB;QACjD,IAAM,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACvD,IAAM,MAAM,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACtD,OAAO;YACL,OAAO,SAAA;YACP,MAAM,QAAA;SACP,CAAA;IACH,CAAC;IAED,gCACE,eAAuB,EACvB,UAAkB,EAClB,UAAkB;QAElB,IAAM,YAAY,GAAG,mBAAmB,CAAC,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QACjF,IAAM,SAAS,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACzD,IAAM,KAAK,GAAG,uBAAuB,CAAC,UAAU,CAAC,CAAA;QAEjD,OAAO;YACL,IAAI,EAAE;gBACJ,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE,SAAS;aAChB;YACD,KAAK,OAAA;SACN,CAAA;IACH,CAAC;IAED,oCAAoC,eAAuB,EAAE,UAAkB,EAAE,UAAkB,EAAE,MAAuB;QAAvB,uBAAA,EAAA,cAAuB;QAC1H,IAAM,KAAK,GAAuC,sBAAsB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QACtI,IAAM,KAAK,GAAuC,sBAAsB,CAAC,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QAEjH,OAAO;YACL,KAAK,OAAA;YACL,KAAK,OAAA;SACN,CAAA;IACH,CAAC;IAED,OAAO;QACL,sBAAsB,wBAAA;QACtB,0BAA0B,4BAAA;KAC3B,CAAA;AAEH,CAAC;AAED,MAAM,wBAAwB,OAAqB;IAC3C,IAAA,mCAG4B,EAFhC,kCAAc,EACd,4CAAmB,CACa;IAE5B,IAAA,sCAG+B,EAFnC,kDAAsB,EACtB,0DAA0B,CACS;IAErC,IAAM,WAAW,GAAG,sBAAsB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACvD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACxD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACxD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IAExD,IAAM,WAAW,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IAClE,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACzD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACzD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IAEzD,IAAM,YAAY,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IACrE,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAE5D,IAAM,YAAY,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IACrE,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,gBAAgB,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAEnE,IAAM,EAAE,GAAG,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAEnF,IAAI,mBAAmB,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,oCAAkC,mBAAmB,EAAE,CAAC,MAAQ,CAAC,CAAA;KAClF;IAED,OAAO;QACL,WAAW,aAAA;QACX,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,WAAW,aAAA;QACX,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,YAAY,cAAA;QACZ,SAAS,WAAA;QACT,SAAS,WAAA;QACT,YAAY,cAAA;QACZ,SAAS,WAAA;QACT,SAAS,WAAA;QACT,gBAAgB,kBAAA;QAChB,EAAE,IAAA;KACH,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"extractParams.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/extractParams.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AAEzE,OAAO,EAAE,OAAO,EAAE,MAAM,UAAU,CAAC;AAGnC,2BAA2B,cAAsC;IAE/D,6BAA6B,eAAuB,EAAE,UAAkB,EAAE,UAAkB;QAC1F,IAAM,OAAO,GAAG,cAAc,CAAC,eAAe,CAAC,CAAA;QAC/C,IAAM,KAAK,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC,CAAA;QAErE,IAAI,OAAO,CAAC,KAAK,CAAC,EAAE;YAClB,MAAM,IAAI,KAAK,CAAC,iCAA+B,KAAK,0BAAqB,OAAO,CAAC,MAAM,sBAAiB,UAAU,sBAAiB,UAAY,CAAC,CAAA;SACjJ;QAED,OAAO,EAAE,CAAC,SAAS,CACjB,EAAE,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,UAAU,EAAE,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC,EACjE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CACb,CAAA;IACH,CAAC;IAED,iCAAiC,UAAkB;QACjD,IAAM,OAAO,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACvD,IAAM,MAAM,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACtD,OAAO;YACL,OAAO,SAAA;YACP,MAAM,QAAA;SACP,CAAA;IACH,CAAC;IAED,gCACE,eAAuB,EACvB,UAAkB,EAClB,UAAkB;QAElB,IAAM,YAAY,GAAG,mBAAmB,CAAC,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QACjF,IAAM,SAAS,GAAG,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAA;QACzD,IAAM,KAAK,GAAG,uBAAuB,CAAC,UAAU,CAAC,CAAA;QAEjD,OAAO;YACL,IAAI,EAAE;gBACJ,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE,SAAS;aAChB;YACD,KAAK,OAAA;SACN,CAAA;IACH,CAAC;IAED,oCACE,eAAuB,EACvB,UAAkB,EAClB,UAAkB,EAClB,MAAuB;QAAvB,uBAAA,EAAA,cAAuB;QAEvB,IAAM,KAAK,GAAoB,sBAAsB,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QACnH,IAAM,KAAK,GAAoB,sBAAsB,CAAC,eAAe,EAAE,UAAU,EAAE,UAAU,CAAC,CAAA;QAE9F,OAAO;YACL,KAAK,OAAA;YACL,KAAK,OAAA;SACN,CAAA;IACH,CAAC;IAED,OAAO;QACL,sBAAsB,wBAAA;QACtB,0BAA0B,4BAAA;KAC3B,CAAA;AAEH,CAAC;AAED,MAAM,wBAAwB,OAAqB;IAC3C,IAAA,mCAG4B,EAFhC,kCAAc,EACd,4CAAmB,CACa;IAE5B,IAAA,sCAG+B,EAFnC,kDAAsB,EACtB,0DAA0B,CACS;IAErC,IAAM,WAAW,GAAG,sBAAsB,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACvD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACxD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACxD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IAExD,IAAM,WAAW,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IAClE,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACzD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IACzD,IAAM,QAAQ,GAAG,0BAA0B,CAAC,KAAK,EAAE,EAAE,EAAE,CAAC,CAAC,CAAA;IAEzD,IAAM,YAAY,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IACrE,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAE5D,IAAM,YAAY,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,EAAE,IAAI,CAAC,CAAA;IACrE,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,SAAS,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAC5D,IAAM,gBAAgB,GAAG,0BAA0B,CAAC,MAAM,EAAE,GAAG,EAAE,CAAC,CAAC,CAAA;IAEnE,IAAM,EAAE,GAAG,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAEnF,IAAI,mBAAmB,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,oCAAkC,mBAAmB,EAAE,CAAC,MAAQ,CAAC,CAAA;KAClF;IAED,OAAO;QACL,WAAW,aAAA;QACX,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,WAAW,aAAA;QACX,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,YAAY,cAAA;QACZ,SAAS,WAAA;QACT,SAAS,WAAA;QACT,YAAY,cAAA;QACZ,SAAS,WAAA;QACT,SAAS,WAAA;QACT,gBAAgB,kBAAA;QAChB,EAAE,IAAA;KACH,CAAA;AACH,CAAC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from '../NetInput';
export declare function faceRecognitionNet(weights: Float32Array): {
computeFaceDescriptor: (input: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | (string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement)[] | tf.Tensor<tf.Rank> | NetInput) => Promise<Int32Array | Uint8Array | Float32Array>;
computeFaceDescriptorSync: (input: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | (string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement)[] | tf.Tensor<tf.Rank> | NetInput) => Int32Array | Uint8Array | Float32Array;
forward: (input: string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement | (string | HTMLCanvasElement | HTMLImageElement | HTMLVideoElement)[] | tf.Tensor<tf.Rank> | NetInput) => tf.Tensor<tf.Rank.R2>;
};
import { FaceRecognitionNet } from './FaceRecognitionNet';
export * from './FaceRecognitionNet';
export declare function faceRecognitionNet(weights: Float32Array): FaceRecognitionNet;
import * as tslib_1 from "tslib";
import * as tf from '@tensorflow/tfjs-core';
import { getImageTensor } from '../getImageTensor';
import { padToSquare } from '../padToSquare';
import { convDown } from './convLayer';
import { extractParams } from './extractParams';
import { normalize } from './normalize';
import { residual, residualDown } from './residualLayer';
import { FaceRecognitionNet } from './FaceRecognitionNet';
export * from './FaceRecognitionNet';
export function faceRecognitionNet(weights) {
var _this = this;
var params = extractParams(weights);
function forward(input) {
return tf.tidy(function () {
var x = padToSquare(getImageTensor(input), true);
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150]);
}
x = normalize(x);
var out = convDown(x, params.conv32_down);
out = tf.maxPool(out, 3, 2, 'valid');
out = residual(out, params.conv32_1);
out = residual(out, params.conv32_2);
out = residual(out, params.conv32_3);
out = residualDown(out, params.conv64_down);
out = residual(out, params.conv64_1);
out = residual(out, params.conv64_2);
out = residual(out, params.conv64_3);
out = residualDown(out, params.conv128_down);
out = residual(out, params.conv128_1);
out = residual(out, params.conv128_2);
out = residualDown(out, params.conv256_down);
out = residual(out, params.conv256_1);
out = residual(out, params.conv256_2);
out = residualDown(out, params.conv256_down_out);
var globalAvg = out.mean([1, 2]);
var fullyConnected = tf.matMul(globalAvg, params.fc);
return fullyConnected;
});
}
var computeFaceDescriptor = function (input) { return tslib_1.__awaiter(_this, void 0, void 0, function () {
var result, data;
return tslib_1.__generator(this, function (_a) {
switch (_a.label) {
case 0:
result = forward(input);
return [4 /*yield*/, result.data()];
case 1:
data = _a.sent();
result.dispose();
return [2 /*return*/, data];
}
});
}); };
var computeFaceDescriptorSync = function (input) {
var result = forward(input);
var data = result.dataSync();
result.dispose();
return data;
};
return {
computeFaceDescriptor: computeFaceDescriptor,
computeFaceDescriptorSync: computeFaceDescriptorSync,
forward: forward
};
var net = new FaceRecognitionNet();
net.extractWeights(weights);
return net;
}
//# sourceMappingURL=index.js.map
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IAAxD,iBA4DC;IA3DC,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAuC;QACtD,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAI,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,KAAK,CAAC,EAAE,IAAI,CAAC,CAAA;YAChD,wCAAwC;YACxC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;gBAC5C,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAA;aAC3C;YACD,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YAEhB,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAO,KAAuC;;;;;oBACpE,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;oBAChB,qBAAM,MAAM,CAAC,IAAI,EAAE,EAAA;;oBAA1B,IAAI,GAAG,SAAmB;oBAChC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAChB,sBAAO,IAAI,EAAA;;;SACZ,CAAA;IAED,IAAM,yBAAyB,GAAG,UAAC,KAAuC;QACxE,IAAM,MAAM,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QAC7B,IAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAC9B,MAAM,CAAC,OAAO,EAAE,CAAA;QAChB,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,cAAc,sBAAsB,CAAC;AAErC,MAAM,6BAA6B,OAAqB;IACtD,IAAM,GAAG,GAAG,IAAI,kBAAkB,EAAE,CAAA;IACpC,GAAG,CAAC,cAAc,CAAC,OAAO,CAAC,CAAA;IAC3B,OAAO,GAAG,CAAA;AACZ,CAAC"}
\ No newline at end of file
export declare function loadQuantizedParams(uri: string | undefined): Promise<any>;
import * as tslib_1 from "tslib";
import { isTensor1D, isTensor2D, isTensor4D } from '../commons/isTensor';
import { loadWeightMap } from '../commons/loadWeightMap';
var DEFAULT_MODEL_NAME = 'face_recognition_model';
function extractorsFactory(weightMap) {
function extractScaleLayerParams(prefix) {
var params = {
weights: weightMap[prefix + "/scale/weights"],
biases: weightMap[prefix + "/scale/biases"]
};
if (!isTensor1D(params.weights)) {
throw new Error("expected weightMap[" + prefix + "/scale/weights] to be a Tensor1D, instead have " + params.weights);
}
if (!isTensor1D(params.biases)) {
throw new Error("expected weightMap[" + prefix + "/scale/biases] to be a Tensor1D, instead have " + params.biases);
}
return params;
}
function extractConvLayerParams(prefix) {
var params = {
filters: weightMap[prefix + "/conv/filters"],
bias: weightMap[prefix + "/conv/bias"]
};
if (!isTensor4D(params.filters)) {
throw new Error("expected weightMap[" + prefix + "/conv/filters] to be a Tensor1D, instead have " + params.filters);
}
if (!isTensor1D(params.bias)) {
throw new Error("expected weightMap[" + prefix + "/conv/bias] to be a Tensor1D, instead have " + params.bias);
}
return {
conv: params,
scale: extractScaleLayerParams(prefix)
};
}
function extractResidualLayerParams(prefix) {
return {
conv1: extractConvLayerParams(prefix + "/conv1"),
conv2: extractConvLayerParams(prefix + "/conv2")
};
}
return {
extractConvLayerParams: extractConvLayerParams,
extractResidualLayerParams: extractResidualLayerParams
};
}
export function loadQuantizedParams(uri) {
return tslib_1.__awaiter(this, void 0, void 0, function () {
var weightMap, _a, extractConvLayerParams, extractResidualLayerParams, conv32_down, conv32_1, conv32_2, conv32_3, conv64_down, conv64_1, conv64_2, conv64_3, conv128_down, conv128_1, conv128_2, conv256_down, conv256_1, conv256_2, conv256_down_out, fc;
return tslib_1.__generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, loadWeightMap(uri, DEFAULT_MODEL_NAME)];
case 1:
weightMap = _b.sent();
_a = extractorsFactory(weightMap), extractConvLayerParams = _a.extractConvLayerParams, extractResidualLayerParams = _a.extractResidualLayerParams;
conv32_down = extractConvLayerParams('conv32_down');
conv32_1 = extractResidualLayerParams('conv32_1');
conv32_2 = extractResidualLayerParams('conv32_2');
conv32_3 = extractResidualLayerParams('conv32_3');
conv64_down = extractResidualLayerParams('conv64_down');
conv64_1 = extractResidualLayerParams('conv64_1');
conv64_2 = extractResidualLayerParams('conv64_2');
conv64_3 = extractResidualLayerParams('conv64_3');
conv128_down = extractResidualLayerParams('conv128_down');
conv128_1 = extractResidualLayerParams('conv128_1');
conv128_2 = extractResidualLayerParams('conv128_2');
conv256_down = extractResidualLayerParams('conv256_down');
conv256_1 = extractResidualLayerParams('conv256_1');
conv256_2 = extractResidualLayerParams('conv256_2');
conv256_down_out = extractResidualLayerParams('conv256_down_out');
fc = weightMap['fc'];
if (!isTensor2D(fc)) {
throw new Error("expected weightMap[fc] to be a Tensor2D, instead have " + fc);
}
return [2 /*return*/, {
conv32_down: conv32_down,
conv32_1: conv32_1,
conv32_2: conv32_2,
conv32_3: conv32_3,
conv64_down: conv64_down,
conv64_1: conv64_1,
conv64_2: conv64_2,
conv64_3: conv64_3,
conv128_down: conv128_down,
conv128_1: conv128_1,
conv128_2: conv128_2,
conv256_down: conv256_down,
conv256_1: conv256_1,
conv256_2: conv256_2,
conv256_down_out: conv256_down_out,
fc: fc
}];
}
});
});
}
//# sourceMappingURL=loadQuantizedParams.js.map
\ No newline at end of file
{"version":3,"file":"loadQuantizedParams.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/loadQuantizedParams.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,qBAAqB,CAAC;AACzE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAGzD,IAAM,kBAAkB,GAAG,wBAAwB,CAAA;AAEnD,2BAA2B,SAAc;IAEvC,iCAAiC,MAAc;QAC7C,IAAM,MAAM,GAAG;YACb,OAAO,EAAE,SAAS,CAAI,MAAM,mBAAgB,CAAC;YAC7C,MAAM,EAAE,SAAS,CAAI,MAAM,kBAAe,CAAC;SAC5C,CAAA;QAED,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE;YAC/B,MAAM,IAAI,KAAK,CAAC,wBAAsB,MAAM,uDAAkD,MAAM,CAAC,OAAS,CAAC,CAAA;SAChH;QAED,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,wBAAsB,MAAM,sDAAiD,MAAM,CAAC,MAAQ,CAAC,CAAA;SAC9G;QAED,OAAO,MAAM,CAAA;IACf,CAAC;IAED,gCAAgC,MAAc;QAC5C,IAAM,MAAM,GAAG;YACb,OAAO,EAAE,SAAS,CAAI,MAAM,kBAAe,CAAC;YAC5C,IAAI,EAAE,SAAS,CAAI,MAAM,eAAY,CAAC;SACvC,CAAA;QAED,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,OAAO,CAAC,EAAE;YAC/B,MAAM,IAAI,KAAK,CAAC,wBAAsB,MAAM,sDAAiD,MAAM,CAAC,OAAS,CAAC,CAAA;SAC/G;QAED,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,wBAAsB,MAAM,mDAA8C,MAAM,CAAC,IAAM,CAAC,CAAA;SACzG;QAED,OAAO;YACL,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,uBAAuB,CAAC,MAAM,CAAC;SACvC,CAAA;IACH,CAAC;IAED,oCAAoC,MAAc;QAChD,OAAO;YACL,KAAK,EAAE,sBAAsB,CAAI,MAAM,WAAQ,CAAC;YAChD,KAAK,EAAE,sBAAsB,CAAI,MAAM,WAAQ,CAAC;SACjD,CAAA;IACH,CAAC;IAED,OAAO;QACL,sBAAsB,wBAAA;QACtB,0BAA0B,4BAAA;KAC3B,CAAA;AAEH,CAAC;AAED,MAAM,8BAAoC,GAAuB;;;;;wBAC7C,qBAAM,aAAa,CAAC,GAAG,EAAE,kBAAkB,CAAC,EAAA;;oBAAxD,SAAS,GAAG,SAA4C;oBAExD,KAGF,iBAAiB,CAAC,SAAS,CAAC,EAF9B,sBAAsB,4BAAA,EACtB,0BAA0B,gCAAA,CACI;oBAE1B,WAAW,GAAG,sBAAsB,CAAC,aAAa,CAAC,CAAA;oBACnD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBACjD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBACjD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBAEjD,WAAW,GAAG,0BAA0B,CAAC,aAAa,CAAC,CAAA;oBACvD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBACjD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBACjD,QAAQ,GAAG,0BAA0B,CAAC,UAAU,CAAC,CAAA;oBAEjD,YAAY,GAAG,0BAA0B,CAAC,cAAc,CAAC,CAAA;oBACzD,SAAS,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAA;oBACnD,SAAS,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAA;oBAEnD,YAAY,GAAG,0BAA0B,CAAC,cAAc,CAAC,CAAA;oBACzD,SAAS,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAA;oBACnD,SAAS,GAAG,0BAA0B,CAAC,WAAW,CAAC,CAAA;oBACnD,gBAAgB,GAAG,0BAA0B,CAAC,kBAAkB,CAAC,CAAA;oBAEjE,EAAE,GAAG,SAAS,CAAC,IAAI,CAAC,CAAA;oBAE1B,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,EAAE;wBACnB,MAAM,IAAI,KAAK,CAAC,2DAAyD,EAAI,CAAC,CAAA;qBAC/E;oBAED,sBAAO;4BACL,WAAW,aAAA;4BACX,QAAQ,UAAA;4BACR,QAAQ,UAAA;4BACR,QAAQ,UAAA;4BACR,WAAW,aAAA;4BACX,QAAQ,UAAA;4BACR,QAAQ,UAAA;4BACR,QAAQ,UAAA;4BACR,YAAY,cAAA;4BACZ,SAAS,WAAA;4BACT,SAAS,WAAA;4BACT,YAAY,cAAA;4BACZ,SAAS,WAAA;4BACT,SAAS,WAAA;4BACT,gBAAgB,kBAAA;4BAChB,EAAE,IAAA;yBACH,EAAA;;;;CACF"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet } from './types';
export declare function residual(x: tf.Tensor4D, params: FaceRecognitionNet.ResidualLayerParams): tf.Tensor4D;
export declare function residualDown(x: tf.Tensor4D, params: FaceRecognitionNet.ResidualLayerParams): tf.Tensor4D;
import { ResidualLayerParams } from './types';
export declare function residual(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D;
export declare function residualDown(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D;
{"version":3,"file":"residualLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/residualLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAGzD,MAAM,mBAAmB,CAAc,EAAE,MAA8C;IACrF,IAAI,GAAG,GAAG,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAC/B,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IACnC,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACpB,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;IAClB,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,uBAAuB,CAAc,EAAE,MAA8C;IACzF,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IACnC,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAEnC,IAAI,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAgB,CAAA;IACxD,IAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAa,MAAM,CAAC,KAAK,CAAC,CAAA;IAChD,IAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAC9C,IAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAE1F,IAAI,aAAa,EAAE;QACjB,IAAM,SAAS,GAAO,GAAG,CAAC,KAAK,QAAqC,CAAA;QACpE,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAa,SAAS,CAAC,CAAA;QAC9C,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAA;QAEjC,IAAM,SAAS,GAAO,GAAG,CAAC,KAAK,QAAqC,CAAA;QACpE,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAa,SAAS,CAAC,CAAA;QAC9C,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAA;KAClC;IAED,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAA;IACvD,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAgB,CAAA;IAExC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;IAClB,OAAO,GAAG,CAAA;AACZ,CAAC"}
\ No newline at end of file
{"version":3,"file":"residualLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/residualLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAGzD,MAAM,mBAAmB,CAAc,EAAE,MAA2B;IAClE,IAAI,GAAG,GAAG,IAAI,CAAC,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAC/B,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IACnC,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACpB,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;IAClB,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,MAAM,uBAAuB,CAAc,EAAE,MAA2B;IACtE,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IACnC,GAAG,GAAG,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,KAAK,CAAC,CAAA;IAEnC,IAAI,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAgB,CAAA;IACxD,IAAM,KAAK,GAAG,EAAE,CAAC,KAAK,CAAa,MAAM,CAAC,KAAK,CAAC,CAAA;IAChD,IAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAC9C,IAAM,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAE1F,IAAI,aAAa,EAAE;QACjB,IAAM,SAAS,GAAO,GAAG,CAAC,KAAK,QAAqC,CAAA;QACpE,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAa,SAAS,CAAC,CAAA;QAC9C,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAA;QAEjC,IAAM,SAAS,GAAO,GAAG,CAAC,KAAK,QAAqC,CAAA;QACpE,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAA;QAChB,IAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAa,SAAS,CAAC,CAAA;QAC9C,GAAG,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAA;KAClC;IAED,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAA;IACvD,GAAG,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,GAAG,CAAgB,CAAA;IAExC,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;IAClB,OAAO,GAAG,CAAA;AACZ,CAAC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet } from './types';
export declare function scale(x: tf.Tensor4D, params: FaceRecognitionNet.ScaleLayerParams): tf.Tensor4D;
import { ScaleLayerParams } from './types';
export declare function scale(x: tf.Tensor4D, params: ScaleLayerParams): tf.Tensor4D;
{"version":3,"file":"scaleLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/scaleLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAG5C,MAAM,gBAAgB,CAAc,EAAE,MAA2C;IAC/E,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAA;AACzD,CAAC"}
\ No newline at end of file
{"version":3,"file":"scaleLayer.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/scaleLayer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAI5C,MAAM,gBAAgB,CAAc,EAAE,MAAwB;IAC5D,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,CAAA;AACzD,CAAC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { ConvParams } from '../commons/types';
export declare namespace FaceRecognitionNet {
type ScaleLayerParams = {
weights: tf.Tensor1D;
biases: tf.Tensor1D;
};
type ResidualLayerParams = {
conv1: ConvLayerParams;
conv2: ConvLayerParams;
};
type ConvLayerParams = {
conv: ConvParams;
scale: ScaleLayerParams;
};
type NetParams = {
conv32_down: ConvLayerParams;
conv32_1: ResidualLayerParams;
conv32_2: ResidualLayerParams;
conv32_3: ResidualLayerParams;
conv64_down: ResidualLayerParams;
conv64_1: ResidualLayerParams;
conv64_2: ResidualLayerParams;
conv64_3: ResidualLayerParams;
conv128_down: ResidualLayerParams;
conv128_1: ResidualLayerParams;
conv128_2: ResidualLayerParams;
conv256_down: ResidualLayerParams;
conv256_1: ResidualLayerParams;
conv256_2: ResidualLayerParams;
conv256_down_out: ResidualLayerParams;
fc: tf.Tensor2D;
};
}
export declare type ScaleLayerParams = {
weights: tf.Tensor1D;
biases: tf.Tensor1D;
};
export declare type ResidualLayerParams = {
conv1: ConvLayerParams;
conv2: ConvLayerParams;
};
export declare type ConvLayerParams = {
conv: ConvParams;
scale: ScaleLayerParams;
};
export declare type NetParams = {
conv32_down: ConvLayerParams;
conv32_1: ResidualLayerParams;
conv32_2: ResidualLayerParams;
conv32_3: ResidualLayerParams;
conv64_down: ResidualLayerParams;
conv64_1: ResidualLayerParams;
conv64_2: ResidualLayerParams;
conv64_3: ResidualLayerParams;
conv128_down: ResidualLayerParams;
conv128_1: ResidualLayerParams;
conv128_2: ResidualLayerParams;
conv256_down: ResidualLayerParams;
conv256_1: ResidualLayerParams;
conv256_2: ResidualLayerParams;
conv256_down_out: ResidualLayerParams;
fc: tf.Tensor2D;
};
import * as tf from '@tensorflow/tfjs-core';
import { euclideanDistance } from './euclideanDistance';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
import { padToSquare } from './padToSquare';
export { euclideanDistance, faceRecognitionNet, NetInput, tf, padToSquare };
export { euclideanDistance, NetInput, tf, padToSquare };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './faceDetectionNet';
export * from './faceLandmarkNet';
export * from './faceRecognitionNet';
export * from './utils';
import * as tf from '@tensorflow/tfjs-core';
import { euclideanDistance } from './euclideanDistance';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
import { padToSquare } from './padToSquare';
export { euclideanDistance, faceRecognitionNet, NetInput, tf, padToSquare };
export { euclideanDistance, NetInput, tf, padToSquare };
export * from './extractFaces';
export * from './extractFaceTensors';
export * from './faceDetectionNet';
export * from './faceLandmarkNet';
export * from './faceRecognitionNet';
export * from './utils';
//# sourceMappingURL=index.js.map
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,QAAQ,EACR,EAAE,EACF,WAAW,EACZ,CAAA;AAED,cAAc,gBAAgB,CAAA;AAC9B,cAAc,sBAAsB,CAAA;AACpC,cAAc,oBAAoB,CAAC;AACnC,cAAc,mBAAmB,CAAC;AAClC,cAAc,SAAS,CAAA"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C,OAAO,EACL,iBAAiB,EACjB,QAAQ,EACR,EAAE,EACF,WAAW,EACZ,CAAA;AAED,cAAc,gBAAgB,CAAA;AAC9B,cAAc,sBAAsB,CAAA;AACpC,cAAc,oBAAoB,CAAC;AACnC,cAAc,mBAAmB,CAAC;AAClC,cAAc,sBAAsB,CAAC;AACrC,cAAc,SAAS,CAAA"}
\ No newline at end of file
......@@ -12,12 +12,6 @@ async function fetchImage(uri) {
return (await fetch(uri)).blob()
}
async function initFaceRecognitionNet() {
const res = await fetch('uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
return faceapi.faceRecognitionNet(weights)
}
// fetch first image of each class and compute their descriptors
async function initTrainDescriptorsByClass(net, numImagesForTraining = 1) {
const maxAvailableImagesPerClass = 5
......
......@@ -146,7 +146,8 @@
await detectionNet.load('/')
landmarkNet = new faceapi.FaceLandmarkNet()
await landmarkNet.load('/')
recognitionNet = await initFaceRecognitionNet()
recognitionNet = new faceapi.FaceRecognitionNet()
await recognitionNet.load('/')
trainDescriptorsByClass = await initTrainDescriptorsByClass(recognitionNet, 1)
$('#loader').hide()
onSelectionChanged($('#selectList select').val())
......
......@@ -136,7 +136,9 @@
try {
setStatusText('loading model file...')
net = await initFaceRecognitionNet()
net = new faceapi.FaceRecognitionNet()
await net.load('/')
setStatusText('computing initial descriptors...')
trainDescriptorsByClass = await initTrainDescriptorsByClass(net)
......
......@@ -61,7 +61,8 @@
}
async function run() {
net = await initFaceRecognitionNet()
net = new faceapi.FaceRecognitionNet()
await net.load('/')
$('#loader').hide()
await onSelectionChanged(1, $('#selectList1 select').val())
await onSelectionChanged(2, $('#selectList2 select').val())
......
import * as tf from '@tensorflow/tfjs-core';
import { getImageTensor } from '../getImageTensor';
import { NetInput } from '../NetInput';
import { padToSquare } from '../padToSquare';
import { TNetInput } from '../types';
import { convDown } from './convLayer';
import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams';
import { normalize } from './normalize';
import { residual, residualDown } from './residualLayer';
import { NetParams } from './types';
export class FaceRecognitionNet {
private _params: NetParams
public async load(weightsOrUrl: Float32Array | string | undefined): Promise<void> {
if (weightsOrUrl instanceof Float32Array) {
this.extractWeights(weightsOrUrl)
return
}
if (weightsOrUrl && typeof weightsOrUrl !== 'string') {
throw new Error('FaceLandmarkNet.load - expected model uri, or weights as Float32Array')
}
this._params = await loadQuantizedParams(weightsOrUrl)
}
public extractWeights(weights: Float32Array) {
this._params = extractParams(weights)
}
public forward(input: tf.Tensor | NetInput | TNetInput) {
if (!this._params) {
throw new Error('FaceRecognitionNet - load model before inference')
}
return tf.tidy(() => {
let x = padToSquare(getImageTensor(input), true)
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150])
}
x = normalize(x)
let out = convDown(x, this._params.conv32_down)
out = tf.maxPool(out, 3, 2, 'valid')
out = residual(out, this._params.conv32_1)
out = residual(out, this._params.conv32_2)
out = residual(out, this._params.conv32_3)
out = residualDown(out, this._params.conv64_down)
out = residual(out, this._params.conv64_1)
out = residual(out, this._params.conv64_2)
out = residual(out, this._params.conv64_3)
out = residualDown(out, this._params.conv128_down)
out = residual(out, this._params.conv128_1)
out = residual(out, this._params.conv128_2)
out = residualDown(out, this._params.conv256_down)
out = residual(out, this._params.conv256_1)
out = residual(out, this._params.conv256_2)
out = residualDown(out, this._params.conv256_down_out)
const globalAvg = out.mean([1, 2]) as tf.Tensor2D
const fullyConnected = tf.matMul(globalAvg, this._params.fc)
return fullyConnected
})
}
public async computeFaceDescriptor(input: tf.Tensor | NetInput | TNetInput) {
const result = this.forward(input)
const data = await result.data()
result.dispose()
return data
}
public async computeFaceDescriptorSync(input: tf.Tensor | NetInput | TNetInput) {
const result = this.forward(input)
const data = result.dataSync()
result.dispose()
return data
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { scale } from './scaleLayer';
import { FaceRecognitionNet } from './types';
import { ConvLayerParams } from './types';
function convLayer(
x: tf.Tensor4D,
params: FaceRecognitionNet.ConvLayerParams,
params: ConvLayerParams,
strides: [number, number],
withRelu: boolean,
padding: 'valid' | 'same' = 'same'
......@@ -19,14 +19,14 @@ function convLayer(
return withRelu ? tf.relu(out) : out
}
export function conv(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams) {
export function conv(x: tf.Tensor4D, params: ConvLayerParams) {
return convLayer(x, params, [1, 1], true)
}
export function convNoRelu(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams) {
export function convNoRelu(x: tf.Tensor4D, params: ConvLayerParams) {
return convLayer(x, params, [1, 1], false)
}
export function convDown(x: tf.Tensor4D, params: FaceRecognitionNet.ConvLayerParams) {
export function convDown(x: tf.Tensor4D, params: ConvLayerParams) {
return convLayer(x, params, [2, 2], true, 'valid')
}
\ No newline at end of file
......@@ -3,7 +3,7 @@ import * as tf from '@tensorflow/tfjs-core';
import { extractWeightsFactory } from '../commons/extractWeightsFactory';
import { ExtractWeightsFunction } from '../commons/types';
import { isFloat } from '../utils';
import { FaceRecognitionNet } from './types';
import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types';
function extractorsFactory(extractWeights: ExtractWeightsFunction) {
......@@ -21,7 +21,7 @@ function extractorsFactory(extractWeights: ExtractWeightsFunction) {
)
}
function extractScaleLayerParams(numWeights: number): FaceRecognitionNet.ScaleLayerParams {
function extractScaleLayerParams(numWeights: number): ScaleLayerParams {
const weights = tf.tensor1d(extractWeights(numWeights))
const biases = tf.tensor1d(extractWeights(numWeights))
return {
......@@ -34,7 +34,7 @@ function extractorsFactory(extractWeights: ExtractWeightsFunction) {
numFilterValues: number,
numFilters: number,
filterSize: number
): FaceRecognitionNet.ConvLayerParams {
): ConvLayerParams {
const conv_filters = extractFilterValues(numFilterValues, numFilters, filterSize)
const conv_bias = tf.tensor1d(extractWeights(numFilters))
const scale = extractScaleLayerParams(numFilters)
......@@ -48,9 +48,14 @@ function extractorsFactory(extractWeights: ExtractWeightsFunction) {
}
}
function extractResidualLayerParams(numFilterValues: number, numFilters: number, filterSize: number, isDown: boolean = false): FaceRecognitionNet.ResidualLayerParams {
const conv1: FaceRecognitionNet.ConvLayerParams = extractConvLayerParams((isDown ? 0.5 : 1) * numFilterValues, numFilters, filterSize)
const conv2: FaceRecognitionNet.ConvLayerParams = extractConvLayerParams(numFilterValues, numFilters, filterSize)
function extractResidualLayerParams(
numFilterValues: number,
numFilters: number,
filterSize: number,
isDown: boolean = false
): ResidualLayerParams {
const conv1: ConvLayerParams = extractConvLayerParams((isDown ? 0.5 : 1) * numFilterValues, numFilters, filterSize)
const conv2: ConvLayerParams = extractConvLayerParams(numFilterValues, numFilters, filterSize)
return {
conv1,
......@@ -65,7 +70,7 @@ function extractorsFactory(extractWeights: ExtractWeightsFunction) {
}
export function extractParams(weights: Float32Array): FaceRecognitionNet.NetParams {
export function extractParams(weights: Float32Array): NetParams {
const {
extractWeights,
getRemainingWeights
......
import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet } from './FaceRecognitionNet';
import { getImageTensor } from '../getImageTensor';
import { NetInput } from '../NetInput';
import { padToSquare } from '../padToSquare';
import { TNetInput } from '../types';
import { convDown } from './convLayer';
import { extractParams } from './extractParams';
import { normalize } from './normalize';
import { residual, residualDown } from './residualLayer';
export * from './FaceRecognitionNet';
export function faceRecognitionNet(weights: Float32Array) {
const params = extractParams(weights)
function forward(input: tf.Tensor | NetInput | TNetInput) {
return tf.tidy(() => {
let x = padToSquare(getImageTensor(input), true)
// work with 150 x 150 sized face images
if (x.shape[1] !== 150 || x.shape[2] !== 150) {
x = tf.image.resizeBilinear(x, [150, 150])
}
x = normalize(x)
let out = convDown(x, params.conv32_down)
out = tf.maxPool(out, 3, 2, 'valid')
out = residual(out, params.conv32_1)
out = residual(out, params.conv32_2)
out = residual(out, params.conv32_3)
out = residualDown(out, params.conv64_down)
out = residual(out, params.conv64_1)
out = residual(out, params.conv64_2)
out = residual(out, params.conv64_3)
out = residualDown(out, params.conv128_down)
out = residual(out, params.conv128_1)
out = residual(out, params.conv128_2)
out = residualDown(out, params.conv256_down)
out = residual(out, params.conv256_1)
out = residual(out, params.conv256_2)
out = residualDown(out, params.conv256_down_out)
const globalAvg = out.mean([1, 2]) as tf.Tensor2D
const fullyConnected = tf.matMul(globalAvg, params.fc)
return fullyConnected
})
}
const computeFaceDescriptor = async (input: tf.Tensor | NetInput | TNetInput) => {
const result = forward(input)
const data = await result.data()
result.dispose()
return data
}
const computeFaceDescriptorSync = (input: tf.Tensor | NetInput | TNetInput) => {
const result = forward(input)
const data = result.dataSync()
result.dispose()
return data
}
return {
computeFaceDescriptor,
computeFaceDescriptorSync,
forward
}
const net = new FaceRecognitionNet()
net.extractWeights(weights)
return net
}
\ No newline at end of file
import { isTensor1D, isTensor2D, isTensor4D } from '../commons/isTensor';
import { loadWeightMap } from '../commons/loadWeightMap';
import { ConvLayerParams, ResidualLayerParams, ScaleLayerParams } from './types';
const DEFAULT_MODEL_NAME = 'face_recognition_model'
function extractorsFactory(weightMap: any) {
function extractScaleLayerParams(prefix: string): ScaleLayerParams {
const params = {
weights: weightMap[`${prefix}/scale/weights`],
biases: weightMap[`${prefix}/scale/biases`]
}
if (!isTensor1D(params.weights)) {
throw new Error(`expected weightMap[${prefix}/scale/weights] to be a Tensor1D, instead have ${params.weights}`)
}
if (!isTensor1D(params.biases)) {
throw new Error(`expected weightMap[${prefix}/scale/biases] to be a Tensor1D, instead have ${params.biases}`)
}
return params
}
function extractConvLayerParams(prefix: string): ConvLayerParams {
const params = {
filters: weightMap[`${prefix}/conv/filters`],
bias: weightMap[`${prefix}/conv/bias`]
}
if (!isTensor4D(params.filters)) {
throw new Error(`expected weightMap[${prefix}/conv/filters] to be a Tensor1D, instead have ${params.filters}`)
}
if (!isTensor1D(params.bias)) {
throw new Error(`expected weightMap[${prefix}/conv/bias] to be a Tensor1D, instead have ${params.bias}`)
}
return {
conv: params,
scale: extractScaleLayerParams(prefix)
}
}
function extractResidualLayerParams(prefix: string): ResidualLayerParams {
return {
conv1: extractConvLayerParams(`${prefix}/conv1`),
conv2: extractConvLayerParams(`${prefix}/conv2`)
}
}
return {
extractConvLayerParams,
extractResidualLayerParams
}
}
export async function loadQuantizedParams(uri: string | undefined): Promise<any> {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const {
extractConvLayerParams,
extractResidualLayerParams
} = extractorsFactory(weightMap)
const conv32_down = extractConvLayerParams('conv32_down')
const conv32_1 = extractResidualLayerParams('conv32_1')
const conv32_2 = extractResidualLayerParams('conv32_2')
const conv32_3 = extractResidualLayerParams('conv32_3')
const conv64_down = extractResidualLayerParams('conv64_down')
const conv64_1 = extractResidualLayerParams('conv64_1')
const conv64_2 = extractResidualLayerParams('conv64_2')
const conv64_3 = extractResidualLayerParams('conv64_3')
const conv128_down = extractResidualLayerParams('conv128_down')
const conv128_1 = extractResidualLayerParams('conv128_1')
const conv128_2 = extractResidualLayerParams('conv128_2')
const conv256_down = extractResidualLayerParams('conv256_down')
const conv256_1 = extractResidualLayerParams('conv256_1')
const conv256_2 = extractResidualLayerParams('conv256_2')
const conv256_down_out = extractResidualLayerParams('conv256_down_out')
const fc = weightMap['fc']
if (!isTensor2D(fc)) {
throw new Error(`expected weightMap[fc] to be a Tensor2D, instead have ${fc}`)
}
return {
conv32_down,
conv32_1,
conv32_2,
conv32_3,
conv64_down,
conv64_1,
conv64_2,
conv64_3,
conv128_down,
conv128_1,
conv128_2,
conv256_down,
conv256_1,
conv256_2,
conv256_down_out,
fc
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { conv, convDown, convNoRelu } from './convLayer';
import { FaceRecognitionNet } from './types';
import { ResidualLayerParams } from './types';
export function residual(x: tf.Tensor4D, params: FaceRecognitionNet.ResidualLayerParams): tf.Tensor4D {
export function residual(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D {
let out = conv(x, params.conv1)
out = convNoRelu(out, params.conv2)
out = tf.add(out, x)
......@@ -11,7 +11,7 @@ export function residual(x: tf.Tensor4D, params: FaceRecognitionNet.ResidualLaye
return out
}
export function residualDown(x: tf.Tensor4D, params: FaceRecognitionNet.ResidualLayerParams): tf.Tensor4D {
export function residualDown(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D {
let out = convDown(x, params.conv1)
out = convNoRelu(out, params.conv2)
......
import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet } from './types';
export function scale(x: tf.Tensor4D, params: FaceRecognitionNet.ScaleLayerParams): tf.Tensor4D {
import { ScaleLayerParams } from './types';
export function scale(x: tf.Tensor4D, params: ScaleLayerParams): tf.Tensor4D {
return tf.add(tf.mul(x, params.weights), params.biases)
}
......@@ -2,39 +2,35 @@ import * as tf from '@tensorflow/tfjs-core';
import { ConvParams } from '../commons/types';
export namespace FaceRecognitionNet {
export type ScaleLayerParams = {
weights: tf.Tensor1D
biases: tf.Tensor1D
}
export type ResidualLayerParams = {
conv1: ConvLayerParams
conv2: ConvLayerParams
}
export type ScaleLayerParams = {
weights: tf.Tensor1D
biases: tf.Tensor1D
}
export type ResidualLayerParams = {
conv1: ConvLayerParams
conv2: ConvLayerParams
}
export type ConvLayerParams = {
conv: ConvParams
scale: ScaleLayerParams
}
export type NetParams = {
conv32_down: ConvLayerParams
conv32_1: ResidualLayerParams
conv32_2: ResidualLayerParams
conv32_3: ResidualLayerParams
conv64_down: ResidualLayerParams
conv64_1: ResidualLayerParams
conv64_2: ResidualLayerParams
conv64_3: ResidualLayerParams
conv128_down: ResidualLayerParams
conv128_1: ResidualLayerParams
conv128_2: ResidualLayerParams
conv256_down: ResidualLayerParams
conv256_1: ResidualLayerParams
conv256_2: ResidualLayerParams
conv256_down_out: ResidualLayerParams
fc: tf.Tensor2D
}
export type ConvLayerParams = {
conv: ConvParams
scale: ScaleLayerParams
}
export type NetParams = {
conv32_down: ConvLayerParams
conv32_1: ResidualLayerParams
conv32_2: ResidualLayerParams
conv32_3: ResidualLayerParams
conv64_down: ResidualLayerParams
conv64_1: ResidualLayerParams
conv64_2: ResidualLayerParams
conv64_3: ResidualLayerParams
conv128_down: ResidualLayerParams
conv128_1: ResidualLayerParams
conv128_2: ResidualLayerParams
conv256_down: ResidualLayerParams
conv256_1: ResidualLayerParams
conv256_2: ResidualLayerParams
conv256_down_out: ResidualLayerParams
fc: tf.Tensor2D
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { euclideanDistance } from './euclideanDistance';
import { faceRecognitionNet } from './faceRecognitionNet';
import { NetInput } from './NetInput';
import { padToSquare } from './padToSquare';
export {
euclideanDistance,
faceRecognitionNet,
NetInput,
tf,
padToSquare
......@@ -17,4 +15,5 @@ export * from './extractFaces'
export * from './extractFaceTensors'
export * from './faceDetectionNet';
export * from './faceLandmarkNet';
export * from './faceRecognitionNet';
export * from './utils'
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
[{"paths":["face_recognition_model-shard1","face_recognition_model-shard2"],"weights":[{"name":"conv128_1/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.4147297756478345e-10,"min":-5.253528433020923e-8}},{"name":"conv128_1/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380667574265425,"min":-0.032899581334170175}},{"name":"conv128_1/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014059314073300829,"min":-1.4059314073300828}},{"name":"conv128_1/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013599334978589825,"min":3.634530782699585}},{"name":"conv128_1/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.887046963276768e-10,"min":-1.1370104007768284e-7}},{"name":"conv128_1/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00021715293474057143,"min":-0.02909849325523657}},{"name":"conv128_1/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00782704236460667,"min":-0.7200878975438136}},{"name":"conv128_1/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029993299409454943,"min":3.630716562271118}},{"name":"conv128_2/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.567012027797675e-10,"min":-5.243507680862582e-8}},{"name":"conv128_2/conv1/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00017718105923895743,"min":-0.022324813464108636}},{"name":"conv128_2/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015933452867994122,"min":-1.5614783810634238}},{"name":"conv128_2/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007940645778880399,"min":4.927767753601074}},{"name":"conv128_2/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0383988570966347e-9,"min":-1.2356946399449953e-7}},{"name":"conv128_2/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0001451439717236687,"min":-0.01712698866339291}},{"name":"conv128_2/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00797275748907351,"min":-0.7414664464838364}},{"name":"conv128_2/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02892604528688917,"min":4.750600814819336}},{"name":"conv128_down/conv1/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.3550543563576545e-10,"min":-4.311503812794078e-8}},{"name":"conv128_down/conv1/conv/filters","shape":[3,3,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00020040544662989823,"min":-0.022245004575918704}},{"name":"conv128_down/conv1/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01211262824488621,"min":-1.6957679542840696}},{"name":"conv128_down/conv1/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007448580685783835,"min":2.830846071243286}},{"name":"conv128_down/conv2/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.031058637304466e-10,"min":-1.1650065642122761e-7}},{"name":"conv128_down/conv2/conv/filters","shape":[3,3,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00022380277514457702,"min":-0.02484210804104805}},{"name":"conv128_down/conv2/scale/biases","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008878476946961646,"min":-1.029903325847551}},{"name":"conv128_down/conv2/scale/weights","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.027663578706629135,"min":3.1111555099487305}},{"name":"conv256_1/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":3.2105515457510146e-10,"min":-3.467395669411096e-8}},{"name":"conv256_1/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00037147209924810076,"min":-0.04234781931428348}},{"name":"conv256_1/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01643658619300992,"min":-1.3149268954407936}},{"name":"conv256_1/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.043242172166412955,"min":5.28542947769165}},{"name":"conv256_1/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":9.13591691187321e-10,"min":-1.2333487831028833e-7}},{"name":"conv256_1/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003289232651392619,"min":-0.041773254672686264}},{"name":"conv256_1/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0164216583850337,"min":-1.3958409627278647}},{"name":"conv256_1/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0573908618852204,"min":4.360693454742432}},{"name":"conv256_2/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.418552539068639e-10,"min":-2.539480166022071e-8}},{"name":"conv256_2/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00010476927912118389,"min":-0.015610622589056398}},{"name":"conv256_2/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01578534350675695,"min":-1.1049740454729864}},{"name":"conv256_2/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06024209564807368,"min":6.598613739013672}},{"name":"conv256_2/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.0822061852320308e-9,"min":-1.515088659324843e-7}},{"name":"conv256_2/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00005543030908002573,"min":-0.007427661416723448}},{"name":"conv256_2/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006792667566561232,"min":-0.8083274404207865}},{"name":"conv256_2/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04302893993901272,"min":2.2855491638183594}},{"name":"conv256_down/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.036909834755123e-10,"min":-6.396875490139006e-8}},{"name":"conv256_down/conv1/conv/filters","shape":[3,3,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0002698827827093648,"min":-0.03994265184098599}},{"name":"conv256_down/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022031106200872685,"min":-3.1063859743230484}},{"name":"conv256_down/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014870181738161573,"min":4.269900798797607}},{"name":"conv256_down/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.693064577513153e-10,"min":-7.630093618364995e-8}},{"name":"conv256_down/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00046430734150549946,"min":-0.03946612402796745}},{"name":"conv256_down/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01290142021927179,"min":-1.1482263995151893}},{"name":"conv256_down/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03475512242784687,"min":3.608360528945923}},{"name":"conv256_down_out/conv1/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.5347887884881677e-10,"min":-6.530095855422961e-8}},{"name":"conv256_down_out/conv1/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000568966465253456,"min":-0.05632768006009214}},{"name":"conv256_down_out/conv1/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04850864223405427,"min":-6.306123490427055}},{"name":"conv256_down_out/conv1/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.017565592597512638,"min":4.594101905822754}},{"name":"conv256_down_out/conv2/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2668428328152895e-9,"min":-2.2549802424112154e-7}},{"name":"conv256_down_out/conv2/conv/filters","shape":[3,3,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003739110687199761,"min":-0.06954745878191555}},{"name":"conv256_down_out/conv2/scale/biases","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021499746921015722,"min":-1.2039858275768804}},{"name":"conv256_down_out/conv2/scale/weights","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04351314469879749,"min":4.31956672668457}},{"name":"conv32_1/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.5952091238361e-8,"min":-0.000001978059313556764}},{"name":"conv32_1/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0007328585666768691,"min":-0.0974701893680236}},{"name":"conv32_1/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0194976619645661,"min":-2.3787147596770644}},{"name":"conv32_1/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02146628510718252,"min":3.1103382110595703}},{"name":"conv32_1/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.600177166424806e-9,"min":-5.70421968636676e-7}},{"name":"conv32_1/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0004114975824075587,"min":-0.05267169054816751}},{"name":"conv32_1/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010974494616190593,"min":-1.240117891629537}},{"name":"conv32_1/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03400764932819441,"min":2.1677730083465576}},{"name":"conv32_2/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.9886454383719385e-9,"min":-7.366033889197485e-7}},{"name":"conv32_2/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005358753251094444,"min":-0.0760942961655411}},{"name":"conv32_2/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.022131107367721257,"min":-2.5229462399202234}},{"name":"conv32_2/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014633869657329485,"min":2.769575357437134}},{"name":"conv32_2/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":6.8779549306497095e-9,"min":-9.010120959151119e-7}},{"name":"conv32_2/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00030145110452876373,"min":-0.03949009469326805}},{"name":"conv32_2/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010553357180427103,"min":-1.2452961472903983}},{"name":"conv32_2/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03929369870354148,"min":4.8010945320129395}},{"name":"conv32_3/conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":4.1064200719547974e-9,"min":-3.0387508532465503e-7}},{"name":"conv32_3/conv1/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003133527642371608,"min":-0.040735859350830905}},{"name":"conv32_3/conv1/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007104101251153385,"min":-0.34810096130651585}},{"name":"conv32_3/conv1/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009252088210161994,"min":2.333256721496582}},{"name":"conv32_3/conv2/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":5.62726418316814e-9,"min":-6.921534945296811e-7}},{"name":"conv32_3/conv2/conv/filters","shape":[3,3,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00029995629892629733,"min":-0.031195455088334923}},{"name":"conv32_3/conv2/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010314425300149357,"min":-1.268674311918371}},{"name":"conv32_3/conv2/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0467432975769043,"min":5.362040996551514}},{"name":"conv32_down/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.471445956577858e-7,"min":-0.00014740315964445472}},{"name":"conv32_down/conv/filters","shape":[7,7,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005260649557207145,"min":-0.07101876902229645}},{"name":"conv32_down/scale/biases","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008471635042452345,"min":-0.931879854669758}},{"name":"conv32_down/scale/weights","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.06814416062598135,"min":5.788674831390381}},{"name":"conv64_1/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.1319172039756998e-9,"min":-1.4941307092479238e-7}},{"name":"conv64_1/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_1/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01425027146058924,"min":-0.6982633015688727}},{"name":"conv64_1/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007802607031429515,"min":3.401733160018921}},{"name":"conv64_1/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.5635019893325435e-9,"min":-2.717312108692496e-7}},{"name":"conv64_1/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_1/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007973166306813557,"min":-0.7415044665336609}},{"name":"conv64_1/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.04062801716374416,"min":3.542381525039673}},{"name":"conv64_2/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":1.2535732661062331e-9,"min":-1.8302169685151004e-7}},{"name":"conv64_2/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_2/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01859012585060269,"min":-2.3795361088771445}},{"name":"conv64_2/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005631206549850164,"min":2.9051668643951416}},{"name":"conv64_2/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.486726369919351e-9,"min":-3.5311514452854786e-7}},{"name":"conv64_2/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_2/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006418555858088475,"min":-0.5263215803632549}},{"name":"conv64_2/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.03740917467603497,"min":5.571568965911865}},{"name":"conv64_3/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":7.432564576875473e-10,"min":-8.47312361763804e-8}},{"name":"conv64_3/conv1/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_3/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010945847922680425,"min":-1.3353934465670119}},{"name":"conv64_3/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006400122362024644,"min":2.268010377883911}},{"name":"conv64_3/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.278228722014533e-9,"min":-3.212302498040492e-7}},{"name":"conv64_3/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_3/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010651412197187834,"min":-1.161003929493474}},{"name":"conv64_3/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029840927498013366,"min":7.038398265838623}},{"name":"conv64_down/conv1/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":8.373908033218849e-10,"min":-1.172347124650639e-7}},{"name":"conv64_down/conv1/conv/filters","shape":[3,3,32,64],"dtype":"float32"},{"name":"conv64_down/conv1/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01691421620986041,"min":-2.0973628100226906}},{"name":"conv64_down/conv1/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0066875364266189875,"min":2.5088400840759277}},{"name":"conv64_down/conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":2.3252014483766877e-9,"min":-2.673981665633191e-7}},{"name":"conv64_down/conv2/conv/filters","shape":[3,3,64,64],"dtype":"float32"},{"name":"conv64_down/conv2/scale/biases","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015429047509735706,"min":-1.5429047509735707}},{"name":"conv64_down/conv2/scale/weights","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.032557439804077146,"min":2.6351239681243896}},{"name":"fc","shape":[256,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000357687911566566,"min":-0.04578405268052045}}]}]
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment