Commit adaed9e7 by vincent

remove duplicated code and use tfjs-image-recognition-base

parent 5add150b
...@@ -25,7 +25,12 @@ module.exports = function(config) { ...@@ -25,7 +25,12 @@ module.exports = function(config) {
'**/*.ts': ['karma-typescript'] '**/*.ts': ['karma-typescript']
}, },
karmaTypescriptConfig: { karmaTypescriptConfig: {
tsconfig: 'tsconfig.test.json' tsconfig: 'tsconfig.test.json',
bundlerOptions: {
transforms: [
require("karma-typescript-es6-transform")()
]
}
}, },
browsers: ['Chrome'], browsers: ['Chrome'],
browserNoActivityTimeout: 60000, browserNoActivityTimeout: 60000,
......
...@@ -5,11 +5,14 @@ ...@@ -5,11 +5,14 @@
"requires": true, "requires": true,
"dependencies": { "dependencies": {
"@tensorflow/tfjs-core": { "@tensorflow/tfjs-core": {
"version": "0.11.9", "version": "0.12.14",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.11.9.tgz", "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.12.14.tgz",
"integrity": "sha512-upxSDwgGMGNiBBoyo8FTF67pCB/odBoN9THJzB65VDpxwHs66fI09caowzKW4fwDTLk6cExSsNlAPFSjra6Rxg==", "integrity": "sha512-BRyTwtwmJnnePTcsqjCr/IrkfZ/AsUA7JJ9O3QlROSoUgkHMzxpePkGXnBcLo3DWsI3C+zDjVaVialcGDDb+Lw==",
"requires": { "requires": {
"seedrandom": "2.4.3" "@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.29",
"@types/webgl2": "0.0.4",
"seedrandom": "2.4.4"
} }
}, },
"@types/estree": { "@types/estree": {
...@@ -30,6 +33,21 @@ ...@@ -30,6 +33,21 @@
"integrity": "sha512-n7wxy8r2tjVcrzZoKJlyZmi1C1VhXGHAGhDEO1iqp7fbsTSsDF3dVA50KFsPg77EXqzNJqbzcna8Mi4m7a1lyw==", "integrity": "sha512-n7wxy8r2tjVcrzZoKJlyZmi1C1VhXGHAGhDEO1iqp7fbsTSsDF3dVA50KFsPg77EXqzNJqbzcna8Mi4m7a1lyw==",
"dev": true "dev": true
}, },
"@types/seedrandom": {
"version": "2.4.27",
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
},
"@types/webgl-ext": {
"version": "0.0.29",
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.29.tgz",
"integrity": "sha512-ZlVjDQU5Vlc9hF4LGdDldujZUf0amwlwGv1RI2bfvdrEHIl6X/7MZVpemJUjS7NxD9XaKfE8SlFrxsfXpUkt/A=="
},
"@types/webgl2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
},
"abbrev": { "abbrev": {
"version": "1.0.9", "version": "1.0.9",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.9.tgz",
...@@ -186,8 +204,7 @@ ...@@ -186,8 +204,7 @@
"version": "2.2.1", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
"integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=",
"dev": true, "dev": true
"optional": true
}, },
"ansi-wrap": { "ansi-wrap": {
"version": "0.1.0", "version": "0.1.0",
...@@ -367,6 +384,635 @@ ...@@ -367,6 +384,635 @@
"dev": true, "dev": true,
"optional": true "optional": true
}, },
"babel-code-frame": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz",
"integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=",
"dev": true,
"requires": {
"chalk": "1.1.3",
"esutils": "2.0.2",
"js-tokens": "3.0.2"
}
},
"babel-core": {
"version": "6.26.3",
"resolved": "https://registry.npmjs.org/babel-core/-/babel-core-6.26.3.tgz",
"integrity": "sha512-6jyFLuDmeidKmUEb3NM+/yawG0M2bDZ9Z1qbZP59cyHLz8kYGKYwpJP0UwUKKUiTRNvxfLesJnTedqczP7cTDA==",
"dev": true,
"requires": {
"babel-code-frame": "6.26.0",
"babel-generator": "6.26.1",
"babel-helpers": "6.24.1",
"babel-messages": "6.23.0",
"babel-register": "6.26.0",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0",
"babylon": "6.18.0",
"convert-source-map": "1.5.1",
"debug": "2.6.9",
"json5": "0.5.1",
"lodash": "4.17.10",
"minimatch": "3.0.4",
"path-is-absolute": "1.0.1",
"private": "0.1.8",
"slash": "1.0.0",
"source-map": "0.5.7"
},
"dependencies": {
"source-map": {
"version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
"dev": true
}
}
},
"babel-generator": {
"version": "6.26.1",
"resolved": "https://registry.npmjs.org/babel-generator/-/babel-generator-6.26.1.tgz",
"integrity": "sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA==",
"dev": true,
"requires": {
"babel-messages": "6.23.0",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0",
"detect-indent": "4.0.0",
"jsesc": "1.3.0",
"lodash": "4.17.10",
"source-map": "0.5.7",
"trim-right": "1.0.1"
},
"dependencies": {
"source-map": {
"version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
"dev": true
}
}
},
"babel-helper-builder-binary-assignment-operator-visitor": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz",
"integrity": "sha1-zORReto1b0IgvK6KAsKzRvmlZmQ=",
"dev": true,
"requires": {
"babel-helper-explode-assignable-expression": "6.24.1",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-call-delegate": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz",
"integrity": "sha1-7Oaqzdx25Bw0YfiL/Fdb0Nqi340=",
"dev": true,
"requires": {
"babel-helper-hoist-variables": "6.24.1",
"babel-runtime": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-define-map": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz",
"integrity": "sha1-pfVtq0GiX5fstJjH66ypgZ+Vvl8=",
"dev": true,
"requires": {
"babel-helper-function-name": "6.24.1",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0",
"lodash": "4.17.10"
}
},
"babel-helper-explode-assignable-expression": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz",
"integrity": "sha1-8luCz33BBDPFX3BZLVdGQArCLKo=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-function-name": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz",
"integrity": "sha1-00dbjAPtmCQqJbSDUasYOZ01gKk=",
"dev": true,
"requires": {
"babel-helper-get-function-arity": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-get-function-arity": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz",
"integrity": "sha1-j3eCqpNAfEHTqlCQj4mwMbG2hT0=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-hoist-variables": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz",
"integrity": "sha1-HssnaJydJVE+rbyZFKc/VAi+enY=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-optimise-call-expression": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz",
"integrity": "sha1-96E0J7qfc/j0+pk8VKl4gtEkQlc=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-regex": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz",
"integrity": "sha1-MlxZ+QL4LyS3T6zu0DY5VPZJXnI=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0",
"lodash": "4.17.10"
}
},
"babel-helper-remap-async-to-generator": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz",
"integrity": "sha1-XsWBgnrXI/7N04HxySg5BnbkVRs=",
"dev": true,
"requires": {
"babel-helper-function-name": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helper-replace-supers": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz",
"integrity": "sha1-v22/5Dk40XNpohPKiov3S2qQqxo=",
"dev": true,
"requires": {
"babel-helper-optimise-call-expression": "6.24.1",
"babel-messages": "6.23.0",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-helpers": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-helpers/-/babel-helpers-6.24.1.tgz",
"integrity": "sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-template": "6.26.0"
}
},
"babel-messages": {
"version": "6.23.0",
"resolved": "https://registry.npmjs.org/babel-messages/-/babel-messages-6.23.0.tgz",
"integrity": "sha1-8830cDhYA1sqKVHG7F7fbGLyYw4=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-check-es2015-constants": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz",
"integrity": "sha1-NRV7EBQm/S/9PaP3XH0ekYNbv4o=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-syntax-async-functions": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz",
"integrity": "sha1-ytnK0RkbWtY0vzCuCHI5HgZHvpU=",
"dev": true
},
"babel-plugin-syntax-exponentiation-operator": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz",
"integrity": "sha1-nufoM3KQ2pUoggGmpX9BcDF4MN4=",
"dev": true
},
"babel-plugin-syntax-trailing-function-commas": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz",
"integrity": "sha1-ugNgk3+NBuQBgKQ/4NVhb/9TLPM=",
"dev": true
},
"babel-plugin-transform-async-to-generator": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz",
"integrity": "sha1-ZTbjeK/2yx1VF6wOQOs+n8jQh2E=",
"dev": true,
"requires": {
"babel-helper-remap-async-to-generator": "6.24.1",
"babel-plugin-syntax-async-functions": "6.13.0",
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-arrow-functions": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz",
"integrity": "sha1-RSaSy3EdX3ncf4XkQM5BufJE0iE=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-block-scoped-functions": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz",
"integrity": "sha1-u8UbSflk1wy42OC5ToICRs46YUE=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-block-scoping": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz",
"integrity": "sha1-1w9SmcEwjQXBL0Y4E7CgnnOxiV8=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0",
"lodash": "4.17.10"
}
},
"babel-plugin-transform-es2015-classes": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz",
"integrity": "sha1-WkxYpQyclGHlZLSyo7+ryXolhNs=",
"dev": true,
"requires": {
"babel-helper-define-map": "6.26.0",
"babel-helper-function-name": "6.24.1",
"babel-helper-optimise-call-expression": "6.24.1",
"babel-helper-replace-supers": "6.24.1",
"babel-messages": "6.23.0",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-computed-properties": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz",
"integrity": "sha1-b+Ko0WiV1WNPTNmZttNICjCBWbM=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-template": "6.26.0"
}
},
"babel-plugin-transform-es2015-destructuring": {
"version": "6.23.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz",
"integrity": "sha1-mXux8auWf2gtKwh2/jWNYOdlxW0=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-duplicate-keys": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz",
"integrity": "sha1-c+s9MQypaePvnskcU3QabxV2Qj4=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-for-of": {
"version": "6.23.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz",
"integrity": "sha1-9HyVsrYT3x0+zC/bdXNiPHUkhpE=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-function-name": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz",
"integrity": "sha1-g0yJhTvDaxrw86TF26qU/Y6sqos=",
"dev": true,
"requires": {
"babel-helper-function-name": "6.24.1",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-literals": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz",
"integrity": "sha1-T1SgLWzWbPkVKAAZox0xklN3yi4=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-modules-amd": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz",
"integrity": "sha1-Oz5UAXI5hC1tGcMBHEvS8AoA0VQ=",
"dev": true,
"requires": {
"babel-plugin-transform-es2015-modules-commonjs": "6.26.2",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0"
}
},
"babel-plugin-transform-es2015-modules-commonjs": {
"version": "6.26.2",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz",
"integrity": "sha512-CV9ROOHEdrjcwhIaJNBGMBCodN+1cfkwtM1SbUHmvyy35KGT7fohbpOxkE2uLz1o6odKK2Ck/tz47z+VqQfi9Q==",
"dev": true,
"requires": {
"babel-plugin-transform-strict-mode": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-modules-systemjs": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz",
"integrity": "sha1-/4mhQrkRmpBhlfXxBuzzBdlAfSM=",
"dev": true,
"requires": {
"babel-helper-hoist-variables": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0"
}
},
"babel-plugin-transform-es2015-modules-umd": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz",
"integrity": "sha1-rJl+YoXNGO1hdq22B9YCNErThGg=",
"dev": true,
"requires": {
"babel-plugin-transform-es2015-modules-amd": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0"
}
},
"babel-plugin-transform-es2015-object-super": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz",
"integrity": "sha1-JM72muIcuDp/hgPa0CH1cusnj40=",
"dev": true,
"requires": {
"babel-helper-replace-supers": "6.24.1",
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-parameters": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz",
"integrity": "sha1-V6w1GrScrxSpfNE7CfZv3wpiXys=",
"dev": true,
"requires": {
"babel-helper-call-delegate": "6.24.1",
"babel-helper-get-function-arity": "6.24.1",
"babel-runtime": "6.26.0",
"babel-template": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-shorthand-properties": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz",
"integrity": "sha1-JPh11nIch2YbvZmkYi5R8U3jiqA=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-spread": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz",
"integrity": "sha1-1taKmfia7cRTbIGlQujdnxdG+NE=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-sticky-regex": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz",
"integrity": "sha1-AMHNsaynERLN8M9hJsLta0V8zbw=",
"dev": true,
"requires": {
"babel-helper-regex": "6.26.0",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-plugin-transform-es2015-template-literals": {
"version": "6.22.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz",
"integrity": "sha1-qEs0UPfp+PH2g51taH2oS7EjbY0=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-typeof-symbol": {
"version": "6.23.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz",
"integrity": "sha1-3sCfHN3/lLUqxz1QXITfWdzOs3I=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-es2015-unicode-regex": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz",
"integrity": "sha1-04sS9C6nMj9yk4fxinxa4frrNek=",
"dev": true,
"requires": {
"babel-helper-regex": "6.26.0",
"babel-runtime": "6.26.0",
"regexpu-core": "2.0.0"
}
},
"babel-plugin-transform-exponentiation-operator": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz",
"integrity": "sha1-KrDJx/MJj6SJB3cruBP+QejeOg4=",
"dev": true,
"requires": {
"babel-helper-builder-binary-assignment-operator-visitor": "6.24.1",
"babel-plugin-syntax-exponentiation-operator": "6.13.0",
"babel-runtime": "6.26.0"
}
},
"babel-plugin-transform-regenerator": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz",
"integrity": "sha1-4HA2lvveJ/Cj78rPi03KL3s6jy8=",
"dev": true,
"requires": {
"regenerator-transform": "0.10.1"
}
},
"babel-plugin-transform-strict-mode": {
"version": "6.24.1",
"resolved": "https://registry.npmjs.org/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz",
"integrity": "sha1-1fr3qleKZbvlkc9e2uBKDGcCB1g=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0"
}
},
"babel-preset-env": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/babel-preset-env/-/babel-preset-env-1.7.0.tgz",
"integrity": "sha512-9OR2afuKDneX2/q2EurSftUYM0xGu4O2D9adAhVfADDhrYDaxXV0rBbevVYoY9n6nyX1PmQW/0jtpJvUNr9CHg==",
"dev": true,
"requires": {
"babel-plugin-check-es2015-constants": "6.22.0",
"babel-plugin-syntax-trailing-function-commas": "6.22.0",
"babel-plugin-transform-async-to-generator": "6.24.1",
"babel-plugin-transform-es2015-arrow-functions": "6.22.0",
"babel-plugin-transform-es2015-block-scoped-functions": "6.22.0",
"babel-plugin-transform-es2015-block-scoping": "6.26.0",
"babel-plugin-transform-es2015-classes": "6.24.1",
"babel-plugin-transform-es2015-computed-properties": "6.24.1",
"babel-plugin-transform-es2015-destructuring": "6.23.0",
"babel-plugin-transform-es2015-duplicate-keys": "6.24.1",
"babel-plugin-transform-es2015-for-of": "6.23.0",
"babel-plugin-transform-es2015-function-name": "6.24.1",
"babel-plugin-transform-es2015-literals": "6.22.0",
"babel-plugin-transform-es2015-modules-amd": "6.24.1",
"babel-plugin-transform-es2015-modules-commonjs": "6.26.2",
"babel-plugin-transform-es2015-modules-systemjs": "6.24.1",
"babel-plugin-transform-es2015-modules-umd": "6.24.1",
"babel-plugin-transform-es2015-object-super": "6.24.1",
"babel-plugin-transform-es2015-parameters": "6.24.1",
"babel-plugin-transform-es2015-shorthand-properties": "6.24.1",
"babel-plugin-transform-es2015-spread": "6.22.0",
"babel-plugin-transform-es2015-sticky-regex": "6.24.1",
"babel-plugin-transform-es2015-template-literals": "6.22.0",
"babel-plugin-transform-es2015-typeof-symbol": "6.23.0",
"babel-plugin-transform-es2015-unicode-regex": "6.24.1",
"babel-plugin-transform-exponentiation-operator": "6.24.1",
"babel-plugin-transform-regenerator": "6.26.0",
"browserslist": "3.2.8",
"invariant": "2.2.4",
"semver": "5.5.0"
}
},
"babel-register": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-register/-/babel-register-6.26.0.tgz",
"integrity": "sha1-btAhFz4vy0htestFxgCahW9kcHE=",
"dev": true,
"requires": {
"babel-core": "6.26.3",
"babel-runtime": "6.26.0",
"core-js": "2.5.7",
"home-or-tmp": "2.0.0",
"lodash": "4.17.10",
"mkdirp": "0.5.1",
"source-map-support": "0.4.18"
}
},
"babel-runtime": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz",
"integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=",
"dev": true,
"requires": {
"core-js": "2.5.7",
"regenerator-runtime": "0.11.1"
}
},
"babel-template": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-template/-/babel-template-6.26.0.tgz",
"integrity": "sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-traverse": "6.26.0",
"babel-types": "6.26.0",
"babylon": "6.18.0",
"lodash": "4.17.10"
}
},
"babel-traverse": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-traverse/-/babel-traverse-6.26.0.tgz",
"integrity": "sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4=",
"dev": true,
"requires": {
"babel-code-frame": "6.26.0",
"babel-messages": "6.23.0",
"babel-runtime": "6.26.0",
"babel-types": "6.26.0",
"babylon": "6.18.0",
"debug": "2.6.9",
"globals": "9.18.0",
"invariant": "2.2.4",
"lodash": "4.17.10"
}
},
"babel-types": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz",
"integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"esutils": "2.0.2",
"lodash": "4.17.10",
"to-fast-properties": "1.0.3"
}
},
"babylon": {
"version": "6.18.0",
"resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz",
"integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==",
"dev": true
},
"backo2": { "backo2": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz", "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz",
...@@ -705,6 +1351,16 @@ ...@@ -705,6 +1351,16 @@
"pako": "1.0.6" "pako": "1.0.6"
} }
}, },
"browserslist": {
"version": "3.2.8",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-3.2.8.tgz",
"integrity": "sha512-WHVocJYavUwVgVViC0ORikPHQquXwVh939TaelZ4WDqpWgTX/FsGhl/+P4qBUAGcRvtOgDgC+xftNWWp2RUTAQ==",
"dev": true,
"requires": {
"caniuse-lite": "1.0.30000878",
"electron-to-chromium": "1.3.61"
}
},
"buffer": { "buffer": {
"version": "5.1.0", "version": "5.1.0",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.1.0.tgz", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.1.0.tgz",
...@@ -809,6 +1465,12 @@ ...@@ -809,6 +1465,12 @@
} }
} }
}, },
"caniuse-lite": {
"version": "1.0.30000878",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000878.tgz",
"integrity": "sha512-/dCGTdLCnjVJno1mFRn7Y6eit3AYaeFzSrMQHCoK0LEQaWl5snuLex1Ky4b8/Qu2ig5NgTX4cJx65hH9546puA==",
"dev": true
},
"caseless": { "caseless": {
"version": "0.12.0", "version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
...@@ -832,7 +1494,6 @@ ...@@ -832,7 +1494,6 @@
"resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
"integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
"dev": true, "dev": true,
"optional": true,
"requires": { "requires": {
"ansi-styles": "2.2.1", "ansi-styles": "2.2.1",
"escape-string-regexp": "1.0.5", "escape-string-regexp": "1.0.5",
...@@ -1318,6 +1979,15 @@ ...@@ -1318,6 +1979,15 @@
"minimalistic-assert": "1.0.1" "minimalistic-assert": "1.0.1"
} }
}, },
"detect-indent": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-4.0.0.tgz",
"integrity": "sha1-920GQ1LN9Docts5hnE7jqUdd4gg=",
"dev": true,
"requires": {
"repeating": "2.0.1"
}
},
"di": { "di": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/di/-/di-0.0.1.tgz", "resolved": "https://registry.npmjs.org/di/-/di-0.0.1.tgz",
...@@ -1382,6 +2052,12 @@ ...@@ -1382,6 +2052,12 @@
"integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=", "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=",
"dev": true "dev": true
}, },
"electron-to-chromium": {
"version": "1.3.61",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.61.tgz",
"integrity": "sha512-XjTdsm6x71Y48lF9EEvGciwXD70b20g0t+3YbrE+0fPFutqV08DSNrZXkoXAp3QuzX7TpL/OW+/VsNoR9GkuNg==",
"dev": true
},
"elliptic": { "elliptic": {
"version": "6.4.0", "version": "6.4.0",
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.0.tgz", "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.0.tgz",
...@@ -1512,8 +2188,7 @@ ...@@ -1512,8 +2188,7 @@
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
"dev": true, "dev": true
"optional": true
}, },
"escodegen": { "escodegen": {
"version": "1.10.0", "version": "1.10.0",
...@@ -2682,6 +3357,12 @@ ...@@ -2682,6 +3357,12 @@
} }
} }
}, },
"globals": {
"version": "9.18.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz",
"integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==",
"dev": true
},
"graceful-fs": { "graceful-fs": {
"version": "4.1.11", "version": "4.1.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz",
...@@ -2761,7 +3442,6 @@ ...@@ -2761,7 +3442,6 @@
"resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
"integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
"dev": true, "dev": true,
"optional": true,
"requires": { "requires": {
"ansi-regex": "2.1.1" "ansi-regex": "2.1.1"
} }
...@@ -2888,6 +3568,16 @@ ...@@ -2888,6 +3568,16 @@
"integrity": "sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0=", "integrity": "sha1-ILt0A9POo5jpHcRxCo/xuCdKJe0=",
"dev": true "dev": true
}, },
"home-or-tmp": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz",
"integrity": "sha1-42w/LSyufXRqhX440Y1fMqeILbg=",
"dev": true,
"requires": {
"os-homedir": "1.0.2",
"os-tmpdir": "1.0.2"
}
},
"hosted-git-info": { "hosted-git-info": {
"version": "2.6.0", "version": "2.6.0",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.6.0.tgz", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.6.0.tgz",
...@@ -3063,6 +3753,15 @@ ...@@ -3063,6 +3753,15 @@
} }
} }
}, },
"invariant": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz",
"integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==",
"dev": true,
"requires": {
"loose-envify": "1.4.0"
}
},
"ip": { "ip": {
"version": "1.1.5", "version": "1.1.5",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz",
...@@ -3451,6 +4150,12 @@ ...@@ -3451,6 +4150,12 @@
"integrity": "sha1-pHheE11d9lAk38kiSVPfWFvSdmw=", "integrity": "sha1-pHheE11d9lAk38kiSVPfWFvSdmw=",
"dev": true "dev": true
}, },
"js-tokens": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz",
"integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=",
"dev": true
},
"js-yaml": { "js-yaml": {
"version": "3.12.0", "version": "3.12.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz",
...@@ -3476,6 +4181,12 @@ ...@@ -3476,6 +4181,12 @@
"dev": true, "dev": true,
"optional": true "optional": true
}, },
"jsesc": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-1.3.0.tgz",
"integrity": "sha1-RsP+yMGJKxKwgz25vHYiF226s0s=",
"dev": true
},
"json-schema": { "json-schema": {
"version": "0.2.3", "version": "0.2.3",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
...@@ -3496,6 +4207,12 @@ ...@@ -3496,6 +4207,12 @@
"integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=",
"dev": true "dev": true
}, },
"json5": {
"version": "0.5.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz",
"integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=",
"dev": true
},
"jsonfile": { "jsonfile": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz",
...@@ -3717,6 +4434,88 @@ ...@@ -3717,6 +4434,88 @@
} }
} }
}, },
"karma-typescript-es6-transform": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/karma-typescript-es6-transform/-/karma-typescript-es6-transform-1.0.4.tgz",
"integrity": "sha1-CrL9L71fFuc0px9EpMv5GyV75Ag=",
"dev": true,
"requires": {
"acorn": "5.7.2",
"babel-core": "6.26.3",
"babel-preset-env": "1.7.0",
"log4js": "1.1.1",
"magic-string": "0.22.5"
},
"dependencies": {
"acorn": {
"version": "5.7.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.2.tgz",
"integrity": "sha512-cJrKCNcr2kv8dlDnbw+JPUGjHZzo4myaxOLmpOX8a+rgX94YeTcTMv/LFJUSByRpc+i4GgVnnhLxvMu/2Y+rqw==",
"dev": true
},
"date-format": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/date-format/-/date-format-0.0.0.tgz",
"integrity": "sha1-CSBoY6sHDrRZrOpVQsvYVrEZZrM=",
"dev": true
},
"isarray": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
"integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=",
"dev": true
},
"log4js": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/log4js/-/log4js-1.1.1.tgz",
"integrity": "sha1-wh0px2BAieTyVYM+f5SzRh3h/0M=",
"dev": true,
"requires": {
"debug": "2.6.9",
"semver": "5.5.0",
"streamroller": "0.4.1"
}
},
"readable-stream": {
"version": "1.1.14",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz",
"integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=",
"dev": true,
"requires": {
"core-util-is": "1.0.2",
"inherits": "2.0.3",
"isarray": "0.0.1",
"string_decoder": "0.10.31"
}
},
"streamroller": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-0.4.1.tgz",
"integrity": "sha1-1DW9WXQ3Or2b2QaDWVEwhRBswF8=",
"dev": true,
"requires": {
"date-format": "0.0.0",
"debug": "0.7.4",
"mkdirp": "0.5.1",
"readable-stream": "1.1.14"
},
"dependencies": {
"debug": {
"version": "0.7.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-0.7.4.tgz",
"integrity": "sha1-BuHqgILCyxTjmAbiLi9vdX+Srzk=",
"dev": true
}
}
},
"string_decoder": {
"version": "0.10.31",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
"integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=",
"dev": true
}
}
},
"kind-of": { "kind-of": {
"version": "6.0.2", "version": "6.0.2",
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz",
...@@ -3989,6 +4788,15 @@ ...@@ -3989,6 +4788,15 @@
"integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=", "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=",
"dev": true "dev": true
}, },
"loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"dev": true,
"requires": {
"js-tokens": "3.0.2"
}
},
"loud-rejection": { "loud-rejection": {
"version": "1.6.0", "version": "1.6.0",
"resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz",
...@@ -4545,6 +5353,12 @@ ...@@ -4545,6 +5353,12 @@
"integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=", "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=",
"dev": true "dev": true
}, },
"os-homedir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=",
"dev": true
},
"os-tmpdir": { "os-tmpdir": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
...@@ -4858,6 +5672,12 @@ ...@@ -4858,6 +5672,12 @@
"integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=", "integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=",
"dev": true "dev": true
}, },
"private": {
"version": "0.1.8",
"resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz",
"integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==",
"dev": true
},
"process": { "process": {
"version": "0.11.10", "version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
...@@ -5106,6 +5926,29 @@ ...@@ -5106,6 +5926,29 @@
"dev": true, "dev": true,
"optional": true "optional": true
}, },
"regenerate": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz",
"integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==",
"dev": true
},
"regenerator-runtime": {
"version": "0.11.1",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz",
"integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==",
"dev": true
},
"regenerator-transform": {
"version": "0.10.1",
"resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.10.1.tgz",
"integrity": "sha512-PJepbvDbuK1xgIgnau7Y90cwaAmO/LCLMI2mPvaXq2heGMR3aWW5/BQvYrhJ8jgmQjXewXvBjzfqKcVOmhjZ6Q==",
"dev": true,
"requires": {
"babel-runtime": "6.26.0",
"babel-types": "6.26.0",
"private": "0.1.8"
}
},
"regex-cache": { "regex-cache": {
"version": "0.4.4", "version": "0.4.4",
"resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.4.tgz", "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.4.tgz",
...@@ -5125,6 +5968,40 @@ ...@@ -5125,6 +5968,40 @@
"safe-regex": "1.1.0" "safe-regex": "1.1.0"
} }
}, },
"regexpu-core": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-2.0.0.tgz",
"integrity": "sha1-SdA4g3uNz4v6W5pCE5k45uoq4kA=",
"dev": true,
"requires": {
"regenerate": "1.4.0",
"regjsgen": "0.2.0",
"regjsparser": "0.1.5"
}
},
"regjsgen": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz",
"integrity": "sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc=",
"dev": true
},
"regjsparser": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz",
"integrity": "sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=",
"dev": true,
"requires": {
"jsesc": "0.5.0"
},
"dependencies": {
"jsesc": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz",
"integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=",
"dev": true
}
}
},
"remap-istanbul": { "remap-istanbul": {
"version": "0.10.1", "version": "0.10.1",
"resolved": "https://registry.npmjs.org/remap-istanbul/-/remap-istanbul-0.10.1.tgz", "resolved": "https://registry.npmjs.org/remap-istanbul/-/remap-istanbul-0.10.1.tgz",
...@@ -5449,9 +6326,9 @@ ...@@ -5449,9 +6326,9 @@
"dev": true "dev": true
}, },
"seedrandom": { "seedrandom": {
"version": "2.4.3", "version": "2.4.4",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz", "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.4.tgz",
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw=" "integrity": "sha512-9A+PDmgm+2du77B5i0Ip2cxOqqHjgNxnBgglxLcX78A2D6c2rTo61z4jnVABpF4cKeDMDG+cmXXvdnqse2VqMA=="
}, },
"semver": { "semver": {
"version": "5.5.0", "version": "5.5.0",
...@@ -5526,6 +6403,12 @@ ...@@ -5526,6 +6403,12 @@
"requestretry": "1.13.0" "requestretry": "1.13.0"
} }
}, },
"slash": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz",
"integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=",
"dev": true
},
"smart-buffer": { "smart-buffer": {
"version": "1.1.15", "version": "1.1.15",
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz", "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz",
...@@ -5773,6 +6656,23 @@ ...@@ -5773,6 +6656,23 @@
"urix": "0.1.0" "urix": "0.1.0"
} }
}, },
"source-map-support": {
"version": "0.4.18",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.4.18.tgz",
"integrity": "sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA==",
"dev": true,
"requires": {
"source-map": "0.5.7"
},
"dependencies": {
"source-map": {
"version": "0.5.7",
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
"integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=",
"dev": true
}
}
},
"source-map-url": { "source-map-url": {
"version": "0.4.0", "version": "0.4.0",
"resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz",
...@@ -5938,7 +6838,6 @@ ...@@ -5938,7 +6838,6 @@
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
"dev": true, "dev": true,
"optional": true,
"requires": { "requires": {
"ansi-regex": "2.1.1" "ansi-regex": "2.1.1"
} }
...@@ -5965,8 +6864,13 @@ ...@@ -5965,8 +6864,13 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
"integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=",
"dev": true, "dev": true
"optional": true },
"tfjs-image-recognition-base": {
"version": "git+https://github.com/justadudewhohacks/tfjs-image-recognition-base.git#2f2072f883dd098bc539e2e89a61878720e400a1",
"requires": {
"@tensorflow/tfjs-core": "0.12.14"
}
}, },
"through2": { "through2": {
"version": "2.0.1", "version": "2.0.1",
...@@ -6050,6 +6954,12 @@ ...@@ -6050,6 +6954,12 @@
"integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=",
"dev": true "dev": true
}, },
"to-fast-properties": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz",
"integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=",
"dev": true
},
"to-object-path": { "to-object-path": {
"version": "0.3.0", "version": "0.3.0",
"resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz",
...@@ -6108,6 +7018,12 @@ ...@@ -6108,6 +7018,12 @@
"integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=",
"dev": true "dev": true
}, },
"trim-right": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz",
"integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=",
"dev": true
},
"tslib": { "tslib": {
"version": "1.9.1", "version": "1.9.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.1.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.1.tgz",
......
...@@ -20,7 +20,8 @@ ...@@ -20,7 +20,8 @@
"author": "justadudewhohacks", "author": "justadudewhohacks",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@tensorflow/tfjs-core": "^0.11.9" "@tensorflow/tfjs-core": "^0.12.14",
"tfjs-image-recognition-base": "git+https://github.com/justadudewhohacks/tfjs-image-recognition-base.git"
}, },
"devDependencies": { "devDependencies": {
"@types/jasmine": "^2.8.8", "@types/jasmine": "^2.8.8",
...@@ -30,6 +31,7 @@ ...@@ -30,6 +31,7 @@
"karma-chrome-launcher": "^2.2.0", "karma-chrome-launcher": "^2.2.0",
"karma-jasmine": "^1.1.2", "karma-jasmine": "^1.1.2",
"karma-typescript": "^3.0.12", "karma-typescript": "^3.0.12",
"karma-typescript-es6-transform": "^1.0.4",
"rollup": "^0.59.1", "rollup": "^0.59.1",
"rollup-plugin-commonjs": "^9.1.3", "rollup-plugin-commonjs": "^9.1.3",
"rollup-plugin-node-resolve": "^3.3.0", "rollup-plugin-node-resolve": "^3.3.0",
......
import { Rect } from './Rect';
import { Dimensions } from './types';
import { isDimensions } from './utils';
export class BoundingBox {
constructor(
private _left: number,
private _top: number,
private _right: number,
private _bottom: number
) {}
public get left() : number {
return this._left
}
public get top() : number {
return this._top
}
public get right() : number {
return this._right
}
public get bottom() : number {
return this._bottom
}
public get width() : number {
return this.right - this.left
}
public get height() : number {
return this.bottom - this.top
}
public get area() : number {
return this.width * this.height
}
public toSquare(): BoundingBox {
let { left, top, right, bottom } = this
const off = (Math.abs(this.width - this.height) / 2)
if (this.width < this.height) {
left -= off
right += off
}
if (this.height < this.width) {
top -= off
bottom += off
}
return new BoundingBox(left, top, right, bottom)
}
public round(): BoundingBox {
return new BoundingBox(
Math.round(this.left),
Math.round(this.top),
Math.round(this.right),
Math.round(this.bottom)
)
}
public padAtBorders(imageHeight: number, imageWidth: number) {
const w = this.width + 1
const h = this.height + 1
let dx = 1
let dy = 1
let edx = w
let edy = h
let x = this.left
let y = this.top
let ex = this.right
let ey = this.bottom
if (ex > imageWidth) {
edx = -ex + imageWidth + w
ex = imageWidth
}
if (ey > imageHeight) {
edy = -ey + imageHeight + h
ey = imageHeight
}
if (x < 1) {
edy = 2 - x
x = 1
}
if (y < 1) {
edy = 2 - y
y = 1
}
return { dy, edy, dx, edx, y, ey, x, ex, w, h }
}
public calibrate(region: BoundingBox) {
return new BoundingBox(
this.left + (region.left * this.width),
this.top + (region.top * this.height),
this.right + (region.right * this.width),
this.bottom + (region.bottom * this.height)
).toSquare().round()
}
public rescale(s: Dimensions | number) {
const scaleX = isDimensions(s) ? (s as Dimensions).width : s as number
const scaleY = isDimensions(s) ? (s as Dimensions).height : s as number
return new BoundingBox(this.left * scaleX, this.top * scaleY, this.right * scaleX, this.bottom * scaleY)
}
public toRect(): Rect {
return new Rect(this.left, this.top, this.width, this.height)
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { isTensor3D, isTensor4D } from './commons/isTensor';
import { padToSquare } from './padToSquare';
import { Point } from './Point';
import { TResolvedNetInput, Dimensions } from './types';
import { createCanvasFromMedia } from './utils';
export class NetInput {
private _inputs: tf.Tensor3D[] = []
private _canvases: HTMLCanvasElement[] = []
private _isManaged: boolean = false
private _isBatchInput: boolean = false
private _inputDimensions: number[][] = []
private _paddings: Point[] = []
private _inputSize: number = 0
constructor(
inputs: tf.Tensor4D | Array<TResolvedNetInput>,
isBatchInput: boolean = false,
keepCanvases: boolean = false
) {
if (isTensor4D(inputs)) {
this._inputs = tf.unstack(inputs as tf.Tensor4D) as tf.Tensor3D[]
}
if (Array.isArray(inputs)) {
this._inputs = inputs.map((input, idx) => {
if (isTensor3D(input)) {
// TODO: make sure not to dispose original tensors passed in by the user
return tf.clone(input as tf.Tensor3D)
}
if (isTensor4D(input)) {
const shape = (input as tf.Tensor4D).shape
const batchSize = shape[0]
if (batchSize !== 1) {
throw new Error(`NetInput - tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`)
}
return (input as tf.Tensor4D).reshape(shape.slice(1) as [number, number, number]) as tf.Tensor3D
}
const canvas = input instanceof HTMLCanvasElement ? input : createCanvasFromMedia(input as HTMLImageElement | HTMLVideoElement)
if (keepCanvases) {
this._canvases[idx] = canvas
}
return tf.fromPixels(canvas)
})
}
this._isBatchInput = this.batchSize > 1 || isBatchInput
this._inputDimensions = this._inputs.map(t => t.shape)
}
public get inputs(): tf.Tensor3D[] {
return this._inputs
}
public get canvases(): HTMLCanvasElement[] {
return this._canvases
}
public get isManaged(): boolean {
return this._isManaged
}
public get isBatchInput(): boolean {
return this._isBatchInput
}
public get batchSize(): number {
return this._inputs.length
}
public get inputDimensions(): number[][] {
return this._inputDimensions
}
public get paddings(): Point[] {
return this._paddings
}
public get inputSize(): number {
return this._inputSize
}
public get relativePaddings(): Point[] {
return Array(this.inputs.length).fill(0).map(
(_, batchIdx) => this.getRelativePaddings(batchIdx)
)
}
public get reshapedInputDimensions(): Dimensions[] {
return Array(this.inputs.length).fill(0).map(
(_, batchIdx) => this.getReshapedInputDimensions(batchIdx)
)
}
public getInputDimensions(batchIdx: number): number[] {
return this._inputDimensions[batchIdx]
}
public getInputHeight(batchIdx: number): number {
return this._inputDimensions[batchIdx][0]
}
public getInputWidth(batchIdx: number): number {
return this._inputDimensions[batchIdx][1]
}
public getPaddings(batchIdx: number): Point {
return this._paddings[batchIdx]
}
public getRelativePaddings(batchIdx: number): Point {
return new Point(
(this.getPaddings(batchIdx).x + this.getInputWidth(batchIdx)) / this.getInputWidth(batchIdx),
(this.getPaddings(batchIdx).y + this.getInputHeight(batchIdx)) / this.getInputHeight(batchIdx)
)
}
public getReshapedInputDimensions(batchIdx: number): Dimensions {
const [h, w] = [this.getInputHeight(batchIdx), this.getInputWidth(batchIdx)]
const f = this.inputSize / Math.max(h, w)
return {
height: Math.floor(h * f),
width: Math.floor(w * f)
}
}
public toBatchTensor(inputSize: number, isCenterInputs: boolean = true): tf.Tensor4D {
this._inputSize = inputSize
return tf.tidy(() => {
const inputTensors = this._inputs.map((inputTensor: tf.Tensor3D) => {
const [originalHeight, originalWidth] = inputTensor.shape
let imgTensor = inputTensor.expandDims().toFloat() as tf.Tensor4D
imgTensor = padToSquare(imgTensor, isCenterInputs)
const [heightAfterPadding, widthAfterPadding] = imgTensor.shape.slice(1)
if (heightAfterPadding !== inputSize || widthAfterPadding !== inputSize) {
imgTensor = tf.image.resizeBilinear(imgTensor, [inputSize, inputSize])
}
this._paddings.push(new Point(
widthAfterPadding - originalWidth,
heightAfterPadding - originalHeight
))
return imgTensor
})
const batchTensor = tf.stack(inputTensors).as4D(this.batchSize, inputSize, inputSize, 3)
if (this.isManaged) {
this.dispose()
}
return batchTensor
})
}
/**
* By setting the isManaged flag, all newly created tensors will be
* automatically disposed after the batch tensor has been created
*/
public managed() {
this._isManaged = true
return this
}
public dispose() {
this._inputs.forEach(t => t.dispose())
}
}
\ No newline at end of file
export interface IPoint {
x: number
y: number
}
export class Point implements IPoint {
public x: number
public y: number
constructor(x: number, y: number) {
this.x = x
this.y = y
}
public add(pt: IPoint): Point {
return new Point(this.x + pt.x, this.y + pt.y)
}
public sub(pt: IPoint): Point {
return new Point(this.x - pt.x, this.y - pt.y)
}
public mul(pt: IPoint): Point {
return new Point(this.x * pt.x, this.y * pt.y)
}
public div(pt: IPoint): Point {
return new Point(this.x / pt.x, this.y / pt.y)
}
public abs(): Point {
return new Point(Math.abs(this.x), Math.abs(this.y))
}
public magnitude(): number {
return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2))
}
public floor(): Point {
return new Point(Math.floor(this.x), Math.floor(this.y))
}
}
\ No newline at end of file
import { BoundingBox } from './BoundingBox';
export interface IRect {
x: number
y: number
width: number
height: number
}
export class Rect implements IRect {
public x: number
public y: number
public width: number
public height: number
constructor(x: number, y: number, width: number, height: number) {
this.x = x
this.y = y
this.width = width
this.height = height
}
public get right() {
return this.x + this.width
}
public get bottom() {
return this.y + this.height
}
public toSquare(): Rect {
let { x, y, width, height } = this
const diff = Math.abs(width - height)
if (width < height) {
x -= (diff / 2)
width += diff
}
if (height < width) {
y -= (diff / 2)
height += diff
}
return new Rect(x, y, width, height)
}
public pad(padX: number, padY: number): Rect {
let { x, y, width, height } = this
return new Rect(x - (padX / 2), y - (padY / 2), width + padX, height + padY)
}
public floor(): Rect {
return new Rect(
Math.floor(this.x),
Math.floor(this.y),
Math.floor(this.width),
Math.floor(this.height)
)
}
public toBoundingBox(): BoundingBox {
return new BoundingBox(this.x, this.y, this.x + this.width, this.y + this.height)
}
public clipAtImageBorders(imgWidth: number, imgHeight: number): Rect {
const { x, y, right, bottom } = this
const clippedX = Math.max(x, 0)
const clippedY = Math.max(y, 0)
const newWidth = right - clippedX
const newHeight = bottom - clippedY
const clippedWidth = Math.min(newWidth, imgWidth - clippedX)
const clippedHeight = Math.min(newHeight, imgHeight - clippedY)
return (new Rect(clippedX, clippedY, clippedWidth, clippedHeight)).floor()
}
}
\ No newline at end of file
import { Point, Rect, TNetInput } from 'tfjs-image-recognition-base';
import { TinyYolov2 } from '.'; import { TinyYolov2 } from '.';
import { FaceDetection } from './classes/FaceDetection';
import { FaceLandmarks68 } from './classes/FaceLandmarks68';
import { FullFaceDescription } from './classes/FullFaceDescription';
import { extractFaceTensors } from './extractFaceTensors'; import { extractFaceTensors } from './extractFaceTensors';
import { FaceDetection } from './FaceDetection';
import { FaceDetectionNet } from './faceDetectionNet/FaceDetectionNet'; import { FaceDetectionNet } from './faceDetectionNet/FaceDetectionNet';
import { FaceLandmarkNet } from './faceLandmarkNet/FaceLandmarkNet'; import { FaceLandmarkNet } from './faceLandmarkNet/FaceLandmarkNet';
import { FaceLandmarks68 } from './faceLandmarkNet/FaceLandmarks68';
import { FaceRecognitionNet } from './faceRecognitionNet/FaceRecognitionNet'; import { FaceRecognitionNet } from './faceRecognitionNet/FaceRecognitionNet';
import { FullFaceDescription } from './FullFaceDescription';
import { Mtcnn } from './mtcnn/Mtcnn'; import { Mtcnn } from './mtcnn/Mtcnn';
import { MtcnnForwardParams } from './mtcnn/types'; import { MtcnnForwardParams } from './mtcnn/types';
import { Rect } from './Rect';
import { TinyYolov2ForwardParams } from './tinyYolov2/types'; import { TinyYolov2ForwardParams } from './tinyYolov2/types';
import { TNetInput } from './types';
function computeDescriptorsFactory( function computeDescriptorsFactory(
recognitionNet: FaceRecognitionNet recognitionNet: FaceRecognitionNet
...@@ -62,7 +62,9 @@ function allFacesFactory( ...@@ -62,7 +62,9 @@ function allFacesFactory(
return detections.map((detection, i) => return detections.map((detection, i) =>
new FullFaceDescription( new FullFaceDescription(
detection, detection,
faceLandmarksByFace[i].shiftByPoint<FaceLandmarks68>(detection.getBox()), faceLandmarksByFace[i].shiftByPoint<FaceLandmarks68>(
new Point(detection.box.x, detection.box.y)
),
descriptors[i] descriptors[i]
) )
) )
......
import { Rect } from './Rect'; import { Dimensions, Rect } from 'tfjs-image-recognition-base';
import { Dimensions } from './types';
export class FaceDetection { export class FaceDetection {
private _score: number private _score: number
......
import { getCenterPoint } from './commons/getCenterPoint'; import { Dimensions, getCenterPoint, Point, Rect } from 'tfjs-image-recognition-base';
import { FaceDetection } from './FaceDetection'; import { FaceDetection } from './FaceDetection';
import { IPoint, Point } from './Point';
import { Rect } from './Rect';
import { Dimensions } from './types';
// face alignment constants // face alignment constants
const relX = 0.5 const relX = 0.5
...@@ -66,7 +64,7 @@ export class FaceLandmarks { ...@@ -66,7 +64,7 @@ export class FaceLandmarks {
) )
} }
public shiftByPoint<T extends FaceLandmarks>(pt: IPoint): T { public shiftByPoint<T extends FaceLandmarks>(pt: Point): T {
return this.shift(pt.x, pt.y) return this.shift(pt.x, pt.y)
} }
......
import { getCenterPoint } from '../commons/getCenterPoint'; import { getCenterPoint, Point } from 'tfjs-image-recognition-base';
import { FaceLandmarks } from '../FaceLandmarks';
import { Point } from '../Point'; import { FaceLandmarks } from './FaceLandmarks';
export class FaceLandmarks5 extends FaceLandmarks { export class FaceLandmarks5 extends FaceLandmarks {
......
import { getCenterPoint } from '../commons/getCenterPoint'; import { getCenterPoint, Point } from 'tfjs-image-recognition-base';
import { FaceDetection } from '../FaceDetection';
import { FaceLandmarks } from '../FaceLandmarks'; import { FaceLandmarks } from '../classes/FaceLandmarks';
import { Point } from '../Point';
import { Rect } from '../Rect';
export class FaceLandmarks68 extends FaceLandmarks { export class FaceLandmarks68 extends FaceLandmarks {
public getJawOutline(): Point[] { public getJawOutline(): Point[] {
......
export * from './FaceDetection';
export * from './FaceLandmarks';
export * from './FaceLandmarks5';
export * from './FaceLandmarks68';
export * from './FullFaceDescription';
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { ParamMapping } from './types';
export class NeuralNetwork<TNetParams> {
protected _params: TNetParams | undefined = undefined
protected _paramMappings: ParamMapping[] = []
constructor(private _name: string) {}
public get params(): TNetParams | undefined {
return this._params
}
public get paramMappings(): ParamMapping[] {
return this._paramMappings
}
public getParamFromPath(paramPath: string): tf.Tensor {
const { obj, objProp } = this.traversePropertyPath(paramPath)
return obj[objProp]
}
public reassignParamFromPath(paramPath: string, tensor: tf.Tensor) {
const { obj, objProp } = this.traversePropertyPath(paramPath)
obj[objProp].dispose()
obj[objProp] = tensor
}
public getParamList() {
return this._paramMappings.map(({ paramPath }) => ({
path: paramPath,
tensor: this.getParamFromPath(paramPath)
}))
}
public getTrainableParams() {
return this.getParamList().filter(param => param.tensor instanceof tf.Variable)
}
public getFrozenParams() {
return this.getParamList().filter(param => !(param.tensor instanceof tf.Variable))
}
public variable() {
this.getFrozenParams().forEach(({ path, tensor }) => {
this.reassignParamFromPath(path, tf.variable(tensor))
})
}
public freeze() {
this.getTrainableParams().forEach(({ path, tensor }) => {
this.reassignParamFromPath(path, tf.tensor(tensor as any))
})
}
public dispose(throwOnRedispose: boolean = true) {
this.getParamList().forEach(param => {
if (throwOnRedispose && param.tensor.isDisposed) {
throw new Error(`param tensor has already been disposed for path ${param.path}`)
}
param.tensor.dispose()
})
this._params = undefined
}
public async load(weightsOrUrl: Float32Array | string | undefined): Promise<void> {
if (weightsOrUrl instanceof Float32Array) {
this.extractWeights(weightsOrUrl)
return
}
if (weightsOrUrl && typeof weightsOrUrl !== 'string') {
throw new Error(`${this._name}.load - expected model uri, or weights as Float32Array`)
}
const {
paramMappings,
params
} = await this.loadQuantizedParams(weightsOrUrl)
this._paramMappings = paramMappings
this._params = params
}
public extractWeights(weights: Float32Array) {
const {
paramMappings,
params
} = this.extractParams(weights)
this._paramMappings = paramMappings
this._params = params
}
private traversePropertyPath(paramPath: string) {
if (!this.params) {
throw new Error(`traversePropertyPath - model has no loaded params`)
}
const result = paramPath.split('/').reduce((res: { nextObj: any, obj?: any, objProp?: string }, objProp) => {
if (!res.nextObj.hasOwnProperty(objProp)) {
throw new Error(`traversePropertyPath - object does not have property ${objProp}, for path ${paramPath}`)
}
return { obj: res.nextObj, objProp, nextObj: res.nextObj[objProp] }
}, { nextObj: this.params })
const { obj, objProp } = result
if (!obj || !objProp || !(obj[objProp] instanceof tf.Tensor)) {
throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`)
}
return { obj, objProp }
}
protected loadQuantizedParams(_: any): Promise<{ params: TNetParams, paramMappings: ParamMapping[] }> {
throw new Error(`${this._name}.loadQuantizedParams - not implemented`)
}
protected extractParams(_: any): { params: TNetParams, paramMappings: ParamMapping[] } {
throw new Error(`${this._name}.extractParams - not implemented`)
}
}
\ No newline at end of file
import { ParamMapping } from './types';
export function disposeUnusedWeightTensors(weightMap: any, paramMappings: ParamMapping[]) {
Object.keys(weightMap).forEach(path => {
if (!paramMappings.some(pm => pm.originalPath === path)) {
weightMap[path].dispose()
}
})
}
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { ExtractWeightsFunction, ParamMapping } from 'tfjs-image-recognition-base';
import { ConvParams, ExtractWeightsFunction, ParamMapping } from './types'; import { ConvParams } from './types';
export function extractConvParamsFactory( export function extractConvParamsFactory(
extractWeights: ExtractWeightsFunction, extractWeights: ExtractWeightsFunction,
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { ExtractWeightsFunction, ParamMapping } from 'tfjs-image-recognition-base';
import { ExtractWeightsFunction, FCParams, ParamMapping } from './types'; import { FCParams } from './types';
export function extractFCParamsFactory( export function extractFCParamsFactory(
extractWeights: ExtractWeightsFunction, extractWeights: ExtractWeightsFunction,
......
import { isTensor } from './isTensor';
import { ParamMapping } from './types';
export function extractWeightEntryFactory(weightMap: any, paramMappings: ParamMapping[]) {
return function<T> (originalPath: string, paramRank: number, mappedPath?: string): T {
const tensor = weightMap[originalPath]
if (!isTensor(tensor, paramRank)) {
throw new Error(`expected weightMap[${originalPath}] to be a Tensor${paramRank}D, instead have ${tensor}`)
}
paramMappings.push(
{ originalPath, paramPath: mappedPath || originalPath }
)
return tensor
}
}
export function extractWeightsFactory(weights: Float32Array) {
let remainingWeights = weights
function extractWeights(numWeights: number): Float32Array {
const ret = remainingWeights.slice(0, numWeights)
remainingWeights = remainingWeights.slice(numWeights)
return ret
}
function getRemainingWeights(): Float32Array {
return remainingWeights
}
return {
extractWeights,
getRemainingWeights
}
}
\ No newline at end of file
import { Point } from '../Point';
export function getCenterPoint(pts: Point[]): Point {
return pts.reduce((sum, pt) => sum.add(pt), new Point(0, 0))
.div(new Point(pts.length, pts.length))
}
\ No newline at end of file
export function isMediaElement(input: any) {
return input instanceof HTMLImageElement
|| input instanceof HTMLVideoElement
|| input instanceof HTMLCanvasElement
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
export function isTensor(tensor: any, dim: number) {
return tensor instanceof tf.Tensor && tensor.shape.length === dim
}
export function isTensor1D(tensor: any) {
return isTensor(tensor, 1)
}
export function isTensor2D(tensor: any) {
return isTensor(tensor, 2)
}
export function isTensor3D(tensor: any) {
return isTensor(tensor, 3)
}
export function isTensor4D(tensor: any) {
return isTensor(tensor, 4)
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
export function getModelUris(uri: string | undefined, defaultModelName: string) {
const defaultManifestFilename = `${defaultModelName}-weights_manifest.json`
if (!uri) {
return {
modelBaseUri: '',
manifestUri: defaultManifestFilename
}
}
if (uri === '/') {
return {
modelBaseUri: '/',
manifestUri: `/${defaultManifestFilename}`
}
}
const protocol = uri.startsWith('http://') ? 'http://' : uri.startsWith('https://') ? 'https://' : '';
uri = uri.replace(protocol, '');
const parts = uri.split('/').filter(s => s)
const manifestFile = uri.endsWith('.json')
? parts[parts.length - 1]
: defaultManifestFilename
let modelBaseUri = protocol + (uri.endsWith('.json') ? parts.slice(0, parts.length - 1) : parts).join('/')
modelBaseUri = uri.startsWith('/') ? `/${modelBaseUri}` : modelBaseUri
return {
modelBaseUri,
manifestUri: modelBaseUri === '/' ? `/${manifestFile}` : `${modelBaseUri}/${manifestFile}`
}
}
export async function loadWeightMap(
uri: string | undefined,
defaultModelName: string
): Promise<any> {
const { manifestUri, modelBaseUri } = getModelUris(uri, defaultModelName)
const manifest = await (await fetch(manifestUri)).json()
return tf.io.loadWeights(manifest, modelBaseUri)
}
\ No newline at end of file
import { BoundingBox } from '../BoundingBox';
import { iou } from '../iou';
export function nonMaxSuppression(
boxes: BoundingBox[],
scores: number[],
iouThreshold: number,
isIOU: boolean = true
): number[] {
let indicesSortedByScore = scores
.map((score, boxIndex) => ({ score, boxIndex }))
.sort((c1, c2) => c1.score - c2.score)
.map(c => c.boxIndex)
const pick: number[] = []
while(indicesSortedByScore.length > 0) {
const curr = indicesSortedByScore.pop() as number
pick.push(curr)
const indices = indicesSortedByScore
const outputs: number[] = []
for (let i = 0; i < indices.length; i++) {
const idx = indices[i]
const currBox = boxes[curr]
const idxBox = boxes[idx]
outputs.push(iou(currBox, idxBox, isIOU))
}
indicesSortedByScore = indicesSortedByScore.filter(
(_, j) => outputs[j] <= iouThreshold
)
}
return pick
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
export function normalize(x: tf.Tensor4D, meanRgb: number[]): tf.Tensor4D {
return tf.tidy(() => {
const [r, g, b] = meanRgb
const avg_r = tf.fill([...x.shape.slice(0, 3), 1], r)
const avg_g = tf.fill([...x.shape.slice(0, 3), 1], g)
const avg_b = tf.fill([...x.shape.slice(0, 3), 1], b)
const avg_rgb = tf.concat([avg_r, avg_g, avg_b], 3)
return tf.sub(x, avg_rgb)
})
}
\ No newline at end of file
...@@ -8,18 +8,4 @@ export type ConvParams = { ...@@ -8,18 +8,4 @@ export type ConvParams = {
export type FCParams = { export type FCParams = {
weights: tf.Tensor2D weights: tf.Tensor2D
bias: tf.Tensor1D bias: tf.Tensor1D
}
export type ExtractWeightsFunction = (numWeights: number) => Float32Array
export type BatchReshapeInfo = {
originalWidth: number
originalHeight: number
paddingX: number
paddingY: number
}
export type ParamMapping = {
originalPath?: string
paramPath: string
} }
\ No newline at end of file
import { FaceDetection } from '../FaceDetection';
import { FaceLandmarks68 } from '../faceLandmarkNet';
import { FaceLandmarks } from '../FaceLandmarks';
import { Point } from '../Point';
import { getContext2dOrThrow, resolveInput, round } from '../utils';
import { DrawBoxOptions, DrawLandmarksOptions, DrawOptions, DrawTextOptions } from './types';
export function getDefaultDrawOptions(): DrawOptions {
return {
color: 'blue',
lineWidth: 2,
fontSize: 20,
fontStyle: 'Georgia'
}
}
export function drawBox(
ctx: CanvasRenderingContext2D,
x: number,
y: number,
w: number,
h: number,
options: DrawBoxOptions
) {
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
ctx.strokeStyle = drawOptions.color
ctx.lineWidth = drawOptions.lineWidth
ctx.strokeRect(x, y, w, h)
}
export function drawText(
ctx: CanvasRenderingContext2D,
x: number,
y: number,
text: string,
options: DrawTextOptions
) {
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
const padText = 2 + drawOptions.lineWidth
ctx.fillStyle = drawOptions.color
ctx.font = `${drawOptions.fontSize}px ${drawOptions.fontStyle}`
ctx.fillText(text, x + padText, y + padText + (drawOptions.fontSize * 0.6))
}
export function drawDetection(
canvasArg: string | HTMLCanvasElement,
detection: FaceDetection | FaceDetection[],
options?: DrawBoxOptions & DrawTextOptions & { withScore: boolean }
) {
const canvas = resolveInput(canvasArg)
if (!(canvas instanceof HTMLCanvasElement)) {
throw new Error('drawBox - expected canvas to be of type: HTMLCanvasElement')
}
const detectionArray = Array.isArray(detection)
? detection
: [detection]
detectionArray.forEach((det) => {
const {
x,
y,
width,
height
} = det.getBox()
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
const { withScore } = Object.assign({ withScore: true }, (options || {}))
const ctx = getContext2dOrThrow(canvas)
drawBox(
ctx,
x,
y,
width,
height,
drawOptions
)
if (withScore) {
drawText(
ctx,
x,
y,
`${round(det.getScore())}`,
drawOptions
)
}
})
}
function drawContour(
ctx: CanvasRenderingContext2D,
points: Point[],
isClosed: boolean = false
) {
ctx.beginPath()
points.slice(1).forEach(({ x, y }, prevIdx) => {
const from = points[prevIdx]
ctx.moveTo(from.x, from.y)
ctx.lineTo(x, y)
})
if (isClosed) {
const from = points[points.length - 1]
const to = points[0]
if (!from || !to) {
return
}
ctx.moveTo(from.x, from.y)
ctx.lineTo(to.x, to.y)
}
ctx.stroke()
}
export function drawLandmarks(
canvasArg: string | HTMLCanvasElement,
faceLandmarks: FaceLandmarks | FaceLandmarks[],
options?: DrawLandmarksOptions & { drawLines: boolean }
) {
const canvas = resolveInput(canvasArg)
if (!(canvas instanceof HTMLCanvasElement)) {
throw new Error('drawLandmarks - expected canvas to be of type: HTMLCanvasElement')
}
const drawOptions = Object.assign(
getDefaultDrawOptions(),
(options || {})
)
const { drawLines } = Object.assign({ drawLines: false }, (options || {}))
const ctx = getContext2dOrThrow(canvas)
const { lineWidth, color } = drawOptions
const faceLandmarksArray = Array.isArray(faceLandmarks) ? faceLandmarks : [faceLandmarks]
faceLandmarksArray.forEach(landmarks => {
if (drawLines && landmarks instanceof FaceLandmarks68) {
ctx.strokeStyle = color
ctx.lineWidth = lineWidth
drawContour(ctx, landmarks.getJawOutline())
drawContour(ctx, landmarks.getLeftEyeBrow())
drawContour(ctx, landmarks.getRightEyeBrow())
drawContour(ctx, landmarks.getNose())
drawContour(ctx, landmarks.getLeftEye(), true)
drawContour(ctx, landmarks.getRightEye(), true)
drawContour(ctx, landmarks.getMouth(), true)
return
}
// else draw points
const ptOffset = lineWidth / 2
ctx.fillStyle = color
landmarks.getPositions().forEach(pt => ctx.fillRect(pt.x - ptOffset, pt.y - ptOffset, lineWidth, lineWidth))
})
}
\ No newline at end of file
export type DrawBoxOptions = {
lineWidth?: number
color?: string
}
export type DrawTextOptions = {
lineWidth?: number
fontSize?: number
fontStyle?: string
color?: string
}
export type DrawLandmarksOptions = {
lineWidth?: number
color?: string
}
export type DrawOptions = {
lineWidth: number
fontSize: number
fontStyle: string
color: string
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { Rect, TNetInput, toNetInput } from 'tfjs-image-recognition-base';
import { FaceDetection } from './FaceDetection'; import { FaceDetection } from './classes/FaceDetection';
import { Rect } from './Rect';
import { toNetInput } from './toNetInput';
import { TNetInput } from './types';
/** /**
* Extracts the tensors of the image regions containing the detected faces. * Extracts the tensors of the image regions containing the detected faces.
...@@ -42,7 +40,7 @@ export async function extractFaceTensors( ...@@ -42,7 +40,7 @@ export async function extractFaceTensors(
.map(box => box.clipAtImageBorders(imgWidth, imgHeight)) .map(box => box.clipAtImageBorders(imgWidth, imgHeight))
const faceTensors = boxes.map(({ x, y, width, height }) => const faceTensors = boxes.map(({ x, y, width, height }) =>
tf.slice(imgTensor, [0, y, x, 0], [1, height, width, numChannels]) tf.slice4d(imgTensor, [0, y, x, 0], [1, height, width, numChannels])
) )
if (netInput.isManaged) { if (netInput.isManaged) {
......
import { FaceDetection } from './FaceDetection'; import {
import { Rect } from './Rect'; createCanvas,
import { toNetInput } from './toNetInput'; getContext2dOrThrow,
import { TNetInput } from './types'; imageTensorToCanvas,
import { createCanvas, getContext2dOrThrow, imageTensorToCanvas } from './utils'; Rect,
TNetInput,
toNetInput,
} from 'tfjs-image-recognition-base';
import { FaceDetection } from './classes/FaceDetection';
/** /**
* Extracts the image regions containing the detected faces. * Extracts the image regions containing the detected faces.
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { NetInput, NeuralNetwork, Rect, TNetInput, toNetInput } from 'tfjs-image-recognition-base';
import { NeuralNetwork } from '../commons/NeuralNetwork'; import { FaceDetection } from '../classes/FaceDetection';
import { FaceDetection } from '../FaceDetection';
import { NetInput } from '../NetInput';
import { Rect } from '../Rect';
import { toNetInput } from '../toNetInput';
import { TNetInput } from '../types';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams'; import { loadQuantizedParams } from './loadQuantizedParams';
import { mobileNetV1 } from './mobileNetV1'; import { mobileNetV1 } from './mobileNetV1';
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { extractWeightsFactory, ExtractWeightsFunction, ParamMapping } from 'tfjs-image-recognition-base';
import { extractWeightsFactory } from '../commons/extractWeightsFactory'; import { ConvParams } from '../commons/types';
import { ConvParams, ExtractWeightsFunction, ParamMapping } from '../commons/types';
import { MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types'; import { MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types';
function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) { function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
import { disposeUnusedWeightTensors } from '../commons/disposeUnusedWeightTensors'; disposeUnusedWeightTensors,
import { extractWeightEntryFactory } from '../commons/extractWeightEntryFactory'; extractWeightEntryFactory,
import { isTensor3D } from '../commons/isTensor'; isTensor3D,
import { loadWeightMap } from '../commons/loadWeightMap'; loadWeightMap,
import { ConvParams, ParamMapping } from '../commons/types'; ParamMapping,
} from 'tfjs-image-recognition-base';
import { ConvParams } from '../commons/types';
import { BoxPredictionParams, MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types'; import { BoxPredictionParams, MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types';
const DEFAULT_MODEL_NAME = 'ssd_mobilenetv1_model' const DEFAULT_MODEL_NAME = 'ssd_mobilenetv1_model'
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { isEven, NetInput, NeuralNetwork, Point, TNetInput, toNetInput } from 'tfjs-image-recognition-base';
import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
import { convLayer } from '../commons/convLayer'; import { convLayer } from '../commons/convLayer';
import { NeuralNetwork } from '../commons/NeuralNetwork';
import { ConvParams } from '../commons/types'; import { ConvParams } from '../commons/types';
import { NetInput } from '../NetInput';
import { Point } from '../Point';
import { toNetInput } from '../toNetInput';
import { TNetInput } from '../types';
import { isEven } from '../utils';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { FaceLandmarks68 } from './FaceLandmarks68';
import { fullyConnectedLayer } from './fullyConnectedLayer'; import { fullyConnectedLayer } from './fullyConnectedLayer';
import { loadQuantizedParams } from './loadQuantizedParams'; import { loadQuantizedParams } from './loadQuantizedParams';
import { NetParams } from './types'; import { NetParams } from './types';
......
import { extractWeightsFactory, ParamMapping } from 'tfjs-image-recognition-base';
import { extractConvParamsFactory } from '../commons/extractConvParamsFactory'; import { extractConvParamsFactory } from '../commons/extractConvParamsFactory';
import { extractFCParamsFactory } from '../commons/extractFCParamsFactory'; import { extractFCParamsFactory } from '../commons/extractFCParamsFactory';
import { extractWeightsFactory } from '../commons/extractWeightsFactory';
import { ParamMapping } from '../commons/types';
import { NetParams } from './types'; import { NetParams } from './types';
export function extractParams(weights: Float32Array): { params: NetParams, paramMappings: ParamMapping[] } { export function extractParams(weights: Float32Array): { params: NetParams, paramMappings: ParamMapping[] } {
......
import { FaceLandmarkNet } from './FaceLandmarkNet'; import { FaceLandmarkNet } from './FaceLandmarkNet';
export * from './FaceLandmarkNet'; export * from './FaceLandmarkNet';
export * from './FaceLandmarks68';
export function createFaceLandmarkNet(weights: Float32Array) { export function createFaceLandmarkNet(weights: Float32Array) {
const net = new FaceLandmarkNet() const net = new FaceLandmarkNet()
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
disposeUnusedWeightTensors,
extractWeightEntryFactory,
loadWeightMap,
ParamMapping,
} from 'tfjs-image-recognition-base';
import { disposeUnusedWeightTensors } from '../commons/disposeUnusedWeightTensors'; import { ConvParams, FCParams } from '../commons/types';
import { extractWeightEntryFactory } from '../commons/extractWeightEntryFactory';
import { loadWeightMap } from '../commons/loadWeightMap';
import { ConvParams, FCParams, ParamMapping } from '../commons/types';
import { NetParams } from './types'; import { NetParams } from './types';
const DEFAULT_MODEL_NAME = 'face_landmark_68_model' const DEFAULT_MODEL_NAME = 'face_landmark_68_model'
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { NetInput, NeuralNetwork, normalize, TNetInput, toNetInput } from 'tfjs-image-recognition-base';
import { NeuralNetwork } from '../commons/NeuralNetwork';
import { normalize } from '../commons/normalize';
import { NetInput } from '../NetInput';
import { toNetInput } from '../toNetInput';
import { TNetInput } from '../types';
import { convDown } from './convLayer'; import { convDown } from './convLayer';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams'; import { loadQuantizedParams } from './loadQuantizedParams';
import { residual, residualDown } from './residualLayer'; import { residual, residualDown } from './residualLayer';
import { NetParams } from './types'; import { NetParams } from './types';
export class FaceRecognitionNet extends NeuralNetwork<NetParams> { export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
constructor() { constructor() {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { extractWeightsFactory, ExtractWeightsFunction, isFloat, ParamMapping } from 'tfjs-image-recognition-base';
import { extractWeightsFactory } from '../commons/extractWeightsFactory'; import { ConvParams } from '../commons/types';
import { ConvParams, ExtractWeightsFunction, ParamMapping } from '../commons/types';
import { isFloat } from '../utils';
import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types'; import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types';
function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) { function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
disposeUnusedWeightTensors,
extractWeightEntryFactory,
isTensor2D,
loadWeightMap,
ParamMapping,
} from 'tfjs-image-recognition-base';
import { disposeUnusedWeightTensors } from '../commons/disposeUnusedWeightTensors';
import { extractWeightEntryFactory } from '../commons/extractWeightEntryFactory';
import { isTensor2D } from '../commons/isTensor';
import { loadWeightMap } from '../commons/loadWeightMap';
import { ParamMapping } from '../commons/types';
import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types'; import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types';
const DEFAULT_MODEL_NAME = 'face_recognition_model' const DEFAULT_MODEL_NAME = 'face_recognition_model'
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { NetInput, TNetInput } from 'tfjs-image-recognition-base';
import { allFacesMtcnnFactory, allFacesSsdMobilenetv1Factory, allFacesTinyYolov2Factory } from './allFacesFactory'; import { allFacesMtcnnFactory, allFacesSsdMobilenetv1Factory, allFacesTinyYolov2Factory } from './allFacesFactory';
import { FaceDetection } from './FaceDetection'; import { FaceDetection } from './classes/FaceDetection';
import { FaceLandmarks68 } from './classes/FaceLandmarks68';
import { FullFaceDescription } from './classes/FullFaceDescription';
import { FaceDetectionNet } from './faceDetectionNet/FaceDetectionNet'; import { FaceDetectionNet } from './faceDetectionNet/FaceDetectionNet';
import { FaceLandmarkNet } from './faceLandmarkNet/FaceLandmarkNet'; import { FaceLandmarkNet } from './faceLandmarkNet/FaceLandmarkNet';
import { FaceLandmarks68 } from './faceLandmarkNet/FaceLandmarks68';
import { FaceRecognitionNet } from './faceRecognitionNet/FaceRecognitionNet'; import { FaceRecognitionNet } from './faceRecognitionNet/FaceRecognitionNet';
import { FullFaceDescription } from './FullFaceDescription';
import { Mtcnn } from './mtcnn/Mtcnn'; import { Mtcnn } from './mtcnn/Mtcnn';
import { MtcnnForwardParams, MtcnnResult } from './mtcnn/types'; import { MtcnnForwardParams, MtcnnResult } from './mtcnn/types';
import { NetInput } from './NetInput';
import { TinyYolov2 } from './tinyYolov2/TinyYolov2'; import { TinyYolov2 } from './tinyYolov2/TinyYolov2';
import { TinyYolov2ForwardParams } from './tinyYolov2/types'; import { TinyYolov2ForwardParams } from './tinyYolov2/types';
import { TNetInput } from './types';
export const detectionNet = new FaceDetectionNet() export const detectionNet = new FaceDetectionNet()
export const landmarkNet = new FaceLandmarkNet() export const landmarkNet = new FaceLandmarkNet()
......
...@@ -4,15 +4,10 @@ export { ...@@ -4,15 +4,10 @@ export {
tf tf
} }
export * from 'tfjs-image-recognition-base';
export * from './BoundingBox'; export * from './classes';
export * from './FaceDetection';
export * from './FullFaceDescription';
export * from './NetInput';
export * from './Point';
export * from './Rect';
export * from './drawing';
export * from './euclideanDistance'; export * from './euclideanDistance';
export * from './extractFaces' export * from './extractFaces'
export * from './extractFaceTensors' export * from './extractFaceTensors'
...@@ -20,11 +15,5 @@ export * from './faceDetectionNet'; ...@@ -20,11 +15,5 @@ export * from './faceDetectionNet';
export * from './faceLandmarkNet'; export * from './faceLandmarkNet';
export * from './faceRecognitionNet'; export * from './faceRecognitionNet';
export * from './globalApi'; export * from './globalApi';
export * from './iou';
export * from './mtcnn'; export * from './mtcnn';
export * from './padToSquare'; export * from './tinyYolov2';
export * from './tinyYolov2'; \ No newline at end of file
export * from './toNetInput';
export * from './utils';
export * from './types';
\ No newline at end of file
import { BoundingBox } from './BoundingBox';
export function iou(box1: BoundingBox, box2: BoundingBox, isIOU: boolean = true) {
const width = Math.max(0.0, Math.min(box1.right, box2.right) - Math.max(box1.left, box2.left) + 1)
const height = Math.max(0.0, Math.min(box1.bottom, box2.bottom) - Math.max(box1.top, box2.top) + 1)
const interSection = width * height
return isIOU
? interSection / (box1.area + box2.area - interSection)
: interSection / Math.min(box1.area, box2.area)
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { NetInput, NeuralNetwork, Point, Rect, TNetInput, toNetInput } from 'tfjs-image-recognition-base';
import { NeuralNetwork } from '../commons/NeuralNetwork'; import { FaceDetection } from '../classes/FaceDetection';
import { FaceDetection } from '../FaceDetection'; import { FaceLandmarks5 } from '../classes/FaceLandmarks5';
import { NetInput } from '../NetInput';
import { Point } from '../Point';
import { Rect } from '../Rect';
import { toNetInput } from '../toNetInput';
import { TNetInput } from '../types';
import { bgrToRgbTensor } from './bgrToRgbTensor'; import { bgrToRgbTensor } from './bgrToRgbTensor';
import { CELL_SIZE } from './config'; import { CELL_SIZE } from './config';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { FaceLandmarks5 } from './FaceLandmarks5';
import { getDefaultMtcnnForwardParams } from './getDefaultMtcnnForwardParams'; import { getDefaultMtcnnForwardParams } from './getDefaultMtcnnForwardParams';
import { getSizesForScale } from './getSizesForScale'; import { getSizesForScale } from './getSizesForScale';
import { loadQuantizedParams } from './loadQuantizedParams'; import { loadQuantizedParams } from './loadQuantizedParams';
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { BoundingBox } from '../BoundingBox';
import { Dimensions } from '../types';
import { createCanvas, getContext2dOrThrow } from '../utils';
import { normalize } from './normalize'; import { normalize } from './normalize';
import { BoundingBox, Dimensions, getContext2dOrThrow, createCanvas } from 'tfjs-image-recognition-base';
export async function extractImagePatches( export async function extractImagePatches(
img: HTMLCanvasElement, img: HTMLCanvasElement,
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { extractWeightsFactory, ExtractWeightsFunction, ParamMapping } from 'tfjs-image-recognition-base';
import { extractConvParamsFactory } from '../commons/extractConvParamsFactory'; import { extractConvParamsFactory } from '../commons/extractConvParamsFactory';
import { extractFCParamsFactory } from '../commons/extractFCParamsFactory'; import { extractFCParamsFactory } from '../commons/extractFCParamsFactory';
import { extractWeightsFactory } from '../commons/extractWeightsFactory'; import { NetParams, ONetParams, PNetParams, RNetParams, SharedParams } from './types';
import { ExtractWeightsFunction, ParamMapping } from '../commons/types';
import { NetParams, PNetParams, RNetParams, SharedParams, ONetParams } from './types';
function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) { function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) {
......
import { Mtcnn } from './Mtcnn'; import { Mtcnn } from './Mtcnn';
export * from './Mtcnn'; export * from './Mtcnn';
export * from './FaceLandmarks5';
export function createMtcnn(weights: Float32Array) { export function createMtcnn(weights: Float32Array) {
const net = new Mtcnn() const net = new Mtcnn()
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
import { disposeUnusedWeightTensors } from '../commons/disposeUnusedWeightTensors'; disposeUnusedWeightTensors,
import { extractWeightEntryFactory } from '../commons/extractWeightEntryFactory'; extractWeightEntryFactory,
import { loadWeightMap } from '../commons/loadWeightMap'; loadWeightMap,
import { ConvParams, FCParams, ParamMapping } from '../commons/types'; ParamMapping,
} from 'tfjs-image-recognition-base';
import { ConvParams, FCParams } from '../commons/types';
import { NetParams, ONetParams, PNetParams, RNetParams, SharedParams } from './types'; import { NetParams, ONetParams, PNetParams, RNetParams, SharedParams } from './types';
const DEFAULT_MODEL_NAME = 'mtcnn_model' const DEFAULT_MODEL_NAME = 'mtcnn_model'
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { BoundingBox, nonMaxSuppression, Point } from 'tfjs-image-recognition-base';
import { BoundingBox } from '../BoundingBox';
import { nonMaxSuppression } from '../commons/nonMaxSuppression';
import { Point } from '../Point';
import { CELL_SIZE, CELL_STRIDE } from './config'; import { CELL_SIZE, CELL_STRIDE } from './config';
import { getSizesForScale } from './getSizesForScale'; import { getSizesForScale } from './getSizesForScale';
import { normalize } from './normalize'; import { normalize } from './normalize';
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { BoundingBox, nonMaxSuppression } from 'tfjs-image-recognition-base';
import { BoundingBox } from '../BoundingBox';
import { nonMaxSuppression } from '../commons/nonMaxSuppression';
import { extractImagePatches } from './extractImagePatches'; import { extractImagePatches } from './extractImagePatches';
import { RNet } from './RNet'; import { RNet } from './RNet';
import { RNetParams } from './types'; import { RNetParams } from './types';
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { BoundingBox, nonMaxSuppression, Point } from 'tfjs-image-recognition-base';
import { BoundingBox } from '../BoundingBox';
import { nonMaxSuppression } from '../commons/nonMaxSuppression';
import { Point } from '../Point';
import { extractImagePatches } from './extractImagePatches'; import { extractImagePatches } from './extractImagePatches';
import { ONet } from './ONet'; import { ONet } from './ONet';
import { ONetParams } from './types'; import { ONetParams } from './types';
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { FaceDetection } from '../classes/FaceDetection';
import { FaceLandmarks5 } from '../classes/FaceLandmarks5';
import { ConvParams, FCParams } from '../commons/types'; import { ConvParams, FCParams } from '../commons/types';
import { FaceDetection } from '../FaceDetection';
import { FaceLandmarks5 } from './FaceLandmarks5';
export type SharedParams = { export type SharedParams = {
conv1: ConvParams conv1: ConvParams
......
import * as tf from '@tensorflow/tfjs-core';
/**
* Pads the smaller dimension of an image tensor with zeros, such that width === height.
*
* @param imgTensor The image tensor.
* @param isCenterImage (optional, default: false) If true, add padding on both sides of the image, such that the image.
* @returns The padded tensor with width === height.
*/
export function padToSquare(
imgTensor: tf.Tensor4D,
isCenterImage: boolean = false
): tf.Tensor4D {
return tf.tidy(() => {
const [height, width] = imgTensor.shape.slice(1)
if (height === width) {
return imgTensor
}
const dimDiff = Math.abs(height - width)
const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1))
const paddingAxis = height > width ? 2 : 1
const createPaddingTensor = (paddingAmount: number): tf.Tensor => {
const paddingTensorShape = imgTensor.shape.slice()
paddingTensorShape[paddingAxis] = paddingAmount
return tf.fill(paddingTensorShape, 0)
}
const paddingTensorAppend = createPaddingTensor(paddingAmount)
const remainingPaddingAmount = dimDiff - paddingTensorAppend.shape[paddingAxis]
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount
? createPaddingTensor(remainingPaddingAmount)
: null
const tensorsToStack = [
paddingTensorPrepend,
imgTensor,
paddingTensorAppend
]
.filter(t => t !== null) as tf.Tensor4D[]
return tf.concat(tensorsToStack, paddingAxis)
})
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
import { BoundingBox } from '../BoundingBox'; BoundingBox,
NetInput,
NeuralNetwork,
nonMaxSuppression,
normalize,
Point,
sigmoid,
TNetInput,
toNetInput,
} from 'tfjs-image-recognition-base';
import { FaceDetection } from '../classes/FaceDetection';
import { convLayer } from '../commons/convLayer'; import { convLayer } from '../commons/convLayer';
import { NeuralNetwork } from '../commons/NeuralNetwork';
import { nonMaxSuppression } from '../commons/nonMaxSuppression';
import { normalize } from '../commons/normalize';
import { FaceDetection } from '../FaceDetection';
import { NetInput } from '../NetInput';
import { Point } from '../Point';
import { toNetInput } from '../toNetInput';
import { TNetInput } from '../types';
import { sigmoid } from '../utils';
import { BOX_ANCHORS, BOX_ANCHORS_SEPARABLE, INPUT_SIZES, IOU_THRESHOLD, MEAN_RGB, NUM_BOXES } from './config'; import { BOX_ANCHORS, BOX_ANCHORS_SEPARABLE, INPUT_SIZES, IOU_THRESHOLD, MEAN_RGB, NUM_BOXES } from './config';
import { convWithBatchNorm } from './convWithBatchNorm'; import { convWithBatchNorm } from './convWithBatchNorm';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
...@@ -18,6 +20,7 @@ import { getDefaultParams } from './getDefaultParams'; ...@@ -18,6 +20,7 @@ import { getDefaultParams } from './getDefaultParams';
import { loadQuantizedParams } from './loadQuantizedParams'; import { loadQuantizedParams } from './loadQuantizedParams';
import { NetParams, PostProcessingParams, TinyYolov2ForwardParams } from './types'; import { NetParams, PostProcessingParams, TinyYolov2ForwardParams } from './types';
export class TinyYolov2 extends NeuralNetwork<NetParams> { export class TinyYolov2 extends NeuralNetwork<NetParams> {
private _withSeparableConvs: boolean private _withSeparableConvs: boolean
......
import { Point } from '../Point'; import { Point } from 'tfjs-image-recognition-base';
export const INPUT_SIZES = { xs: 224, sm: 320, md: 416, lg: 608 } export const INPUT_SIZES = { xs: 224, sm: 320, md: 416, lg: 608 }
export const NUM_BOXES = 5 export const NUM_BOXES = 5
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { extractWeightsFactory, ExtractWeightsFunction, ParamMapping } from 'tfjs-image-recognition-base';
import { extractConvParamsFactory } from '../commons/extractConvParamsFactory'; import { extractConvParamsFactory } from '../commons/extractConvParamsFactory';
import { extractWeightsFactory } from '../commons/extractWeightsFactory';
import { ExtractWeightsFunction, ParamMapping } from '../commons/types';
import { BatchNorm, ConvWithBatchNorm, NetParams, SeparableConvParams } from './types'; import { BatchNorm, ConvWithBatchNorm, NetParams, SeparableConvParams } from './types';
function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) { function extractorsFactory(extractWeights: ExtractWeightsFunction, paramMappings: ParamMapping[]) {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import {
import { disposeUnusedWeightTensors } from '../commons/disposeUnusedWeightTensors'; disposeUnusedWeightTensors,
import { extractWeightEntryFactory } from '../commons/extractWeightEntryFactory'; extractWeightEntryFactory,
import { loadWeightMap } from '../commons/loadWeightMap'; loadWeightMap,
import { ConvParams, ParamMapping } from '../commons/types'; ParamMapping,
} from 'tfjs-image-recognition-base';
import { ConvParams } from '../commons/types';
import { BatchNorm, ConvWithBatchNorm, NetParams, SeparableConvParams } from './types'; import { BatchNorm, ConvWithBatchNorm, NetParams, SeparableConvParams } from './types';
const DEFAULT_MODEL_NAME = 'tiny_yolov2_model' const DEFAULT_MODEL_NAME = 'tiny_yolov2_model'
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { Point } from 'tfjs-image-recognition-base';
import { ConvParams } from '../commons/types'; import { ConvParams } from '../commons/types';
import { Point } from '../Point';
export type BatchNorm = { export type BatchNorm = {
sub: tf.Tensor1D sub: tf.Tensor1D
......
import * as tf from '@tensorflow/tfjs-core';
import { isMediaElement } from './commons/isMediaElement';
import { isTensor3D, isTensor4D } from './commons/isTensor';
import { NetInput } from './NetInput';
import { TNetInput } from './types';
import { awaitMediaLoaded, resolveInput } from './utils';
/**
* Validates the input to make sure, they are valid net inputs and awaits all media elements
* to be finished loading.
*
* @param input The input, which can be a media element or an array of different media elements.
* @param manageCreatedInput If a new NetInput instance is created from the inputs, this flag
* determines, whether to set the NetInput as managed or not.
* @returns A NetInput instance, which can be passed into one of the neural networks.
*/
export async function toNetInput(
inputs: TNetInput,
manageCreatedInput: boolean = false,
keepCanvases: boolean = false
): Promise<NetInput> {
if (inputs instanceof NetInput) {
return inputs
}
const afterCreate = (netInput: NetInput) => manageCreatedInput
? netInput.managed()
: netInput
if (isTensor4D(inputs)) {
return afterCreate(new NetInput(inputs as tf.Tensor4D))
}
let inputArgArray = Array.isArray(inputs)
? inputs
: [inputs]
if (!inputArgArray.length) {
throw new Error('toNetInput - empty array passed as input')
}
const getIdxHint = (idx: number) => Array.isArray(inputs) ? ` at input index ${idx}:` : ''
const inputArray = inputArgArray.map(resolveInput)
inputArray.forEach((input, i) => {
if (!isMediaElement(input) && !isTensor3D(input) && !isTensor4D(input)) {
if (typeof inputArgArray[i] === 'string') {
throw new Error(`toNetInput -${getIdxHint(i)} string passed, but could not resolve HTMLElement for element id ${inputArgArray[i]}`)
}
throw new Error(`toNetInput -${getIdxHint(i)} expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id`)
}
if (isTensor4D(input)) {
// if tf.Tensor4D is passed in the input array, the batch size has to be 1
const batchSize = input.shape[0]
if (batchSize !== 1) {
throw new Error(`toNetInput -${getIdxHint(i)} tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`)
}
}
})
// wait for all media elements being loaded
await Promise.all(
inputArray.map(input => isMediaElement(input) && awaitMediaLoaded(input))
)
return afterCreate(new NetInput(inputArray, Array.isArray(inputs), keepCanvases))
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from './NetInput';
export type TMediaElement = HTMLImageElement | HTMLVideoElement | HTMLCanvasElement
export type TResolvedNetInput = TMediaElement | tf.Tensor3D | tf.Tensor4D
export type TNetInputArg = string | TResolvedNetInput
export type TNetInput = TNetInputArg | Array<TNetInputArg> | NetInput | tf.Tensor4D
export type Dimensions = {
width: number
height: number
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { isTensor4D } from './commons/isTensor';
import { Dimensions } from './types';
export function isFloat(num: number) {
return num % 1 !== 0
}
export function isEven(num: number) {
return num % 2 === 0
}
export function round(num: number) {
return Math.floor(num * 100) / 100
}
export function sigmoid(x: number) {
return 1 / (1 + Math.exp(-x))
}
export function isDimensions(obj: any): boolean {
return obj && obj.width && obj.height
}
export function resolveInput(arg: string | any) {
if (typeof arg === 'string') {
return document.getElementById(arg)
}
return arg
}
export function isLoaded(media: HTMLImageElement | HTMLVideoElement) : boolean {
return (media instanceof HTMLImageElement && media.complete)
|| (media instanceof HTMLVideoElement && media.readyState >= 3)
}
export function awaitMediaLoaded(media: HTMLImageElement | HTMLVideoElement | HTMLCanvasElement) {
return new Promise((resolve, reject) => {
if (media instanceof HTMLCanvasElement || isLoaded(media)) {
return resolve()
}
function onLoad(e: Event) {
if (!e.currentTarget) return
e.currentTarget.removeEventListener('load', onLoad)
e.currentTarget.removeEventListener('error', onError)
resolve(e)
}
function onError(e: Event) {
if (!e.currentTarget) return
e.currentTarget.removeEventListener('load', onLoad)
e.currentTarget.removeEventListener('error', onError)
reject(e)
}
media.addEventListener('load', onLoad)
media.addEventListener('error', onError)
})
}
export function getContext2dOrThrow(canvas: HTMLCanvasElement): CanvasRenderingContext2D {
const ctx = canvas.getContext('2d')
if (!ctx) {
throw new Error('canvas 2d context is null')
}
return ctx
}
export function createCanvas({ width, height }: Dimensions): HTMLCanvasElement {
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
return canvas
}
export function createCanvasFromMedia(media: HTMLImageElement | HTMLVideoElement, dims?: Dimensions): HTMLCanvasElement {
if (!isLoaded(media)) {
throw new Error('createCanvasFromMedia - media has not finished loading yet')
}
const { width, height } = dims || getMediaDimensions(media)
const canvas = createCanvas({ width, height })
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height)
return canvas
}
export function getMediaDimensions(media: HTMLImageElement | HTMLVideoElement) {
if (media instanceof HTMLImageElement) {
return { width: media.naturalWidth, height: media.naturalHeight }
}
if (media instanceof HTMLVideoElement) {
return { width: media.videoWidth, height: media.videoHeight }
}
return media
}
export function bufferToImage(buf: Blob): Promise<HTMLImageElement> {
return new Promise((resolve, reject) => {
if (!(buf instanceof Blob)) {
return reject('bufferToImage - expected buf to be of type: Blob')
}
const reader = new FileReader()
reader.onload = () => {
const img = new Image()
img.onload = () => resolve(img)
img.onerror = reject
img.src = reader.result
}
reader.onerror = reject
reader.readAsDataURL(buf)
})
}
export async function imageTensorToCanvas(
imgTensor: tf.Tensor,
canvas?: HTMLCanvasElement
): Promise<HTMLCanvasElement> {
const targetCanvas = canvas || document.createElement('canvas')
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0)
const imgTensor3D = tf.tidy(() => imgTensor.as3D(height, width, numChannels).toInt())
await tf.toPixels(imgTensor3D, targetCanvas)
imgTensor3D.dispose()
return targetCanvas
}
\ No newline at end of file
import { NetInput } from '../../src/NetInput';
import { bufferToImage } from '../../src/utils';
import { expectAllTensorsReleased, tensor3D } from '../utils';
describe('NetInput', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
const img = await (await fetch('base/test/images/face1.png')).blob()
imgEl = await bufferToImage(img)
})
describe('no memory leaks', () => {
describe('constructor', () => {
it('single image element', async () => {
await expectAllTensorsReleased(() => {
const net = new NetInput([imgEl])
net.dispose()
})
})
it('multiple image elements', async () => {
await expectAllTensorsReleased(() => {
const net = new NetInput([imgEl, imgEl, imgEl])
net.dispose()
})
})
it('single tf.Tensor3D', async () => {
const tensor = tensor3D()
await expectAllTensorsReleased(() => {
const net = new NetInput([tensor])
net.dispose()
})
tensor.dispose()
})
it('multiple tf.Tensor3Ds', async () => {
const tensors = [tensor3D(), tensor3D(), tensor3D()]
await expectAllTensorsReleased(() => {
const net = new NetInput(tensors)
net.dispose()
})
tensors.forEach(t => t.dispose())
})
})
describe('toBatchTensor', () => {
it('single image element', async () => {
await expectAllTensorsReleased(() => {
const net = new NetInput([imgEl])
const batchTensor = net.toBatchTensor(100, false)
net.dispose()
batchTensor.dispose()
})
})
it('multiple image elements', async () => {
await expectAllTensorsReleased(() => {
const net = new NetInput([imgEl, imgEl, imgEl])
const batchTensor = net.toBatchTensor(100, false)
net.dispose()
batchTensor.dispose()
})
})
it('managed, single image element', async () => {
await expectAllTensorsReleased(() => {
const net = (new NetInput([imgEl])).managed()
const batchTensor = net.toBatchTensor(100, false)
batchTensor.dispose()
})
})
it('managed, multiple image elements', async () => {
await expectAllTensorsReleased(() => {
const net = (new NetInput([imgEl, imgEl, imgEl])).managed()
const batchTensor = net.toBatchTensor(100, false)
batchTensor.dispose()
})
})
})
})
})
import * as tf from '@tensorflow/tfjs-core';
import { NeuralNetwork } from '../../../src/commons/NeuralNetwork';
class FakeNeuralNetwork extends NeuralNetwork<any> {
constructor(
convFilter: tf.Tensor = tf.tensor(0),
convBias: tf.Tensor = tf.tensor(0),
fcWeights: tf.Tensor = tf.tensor(0)
) {
super('FakeNeuralNetwork')
this._params = {
conv: {
filter: convFilter,
bias: convBias,
},
fc: fcWeights
}
this._paramMappings = [
{ originalPath: 'conv2d/filter', paramPath: 'conv/filter' },
{ originalPath: 'conv2d/bias', paramPath: 'conv/bias' },
{ originalPath: 'dense/weights', paramPath: 'fc' }
]
}
}
describe('NeuralNetwork', () => {
describe('getParamFromPath', () => {
it('returns correct params', () => tf.tidy(() => {
const convFilter = tf.tensor(0)
const convBias = tf.tensor(0)
const fcWeights = tf.tensor(0)
const net = new FakeNeuralNetwork(convFilter, convBias, fcWeights)
expect(net.getParamFromPath('conv/filter')).toEqual(convFilter)
expect(net.getParamFromPath('conv/bias')).toEqual(convBias)
expect(net.getParamFromPath('fc')).toEqual(fcWeights)
}))
it('throws if param is not a tensor', () => tf.tidy(() => {
const net = new FakeNeuralNetwork(null as any)
const fakePath = 'conv/filter'
expect(
() => net.getParamFromPath(fakePath)
).toThrowError(`traversePropertyPath - parameter is not a tensor, for path ${fakePath}`)
}))
it('throws if key path invalid', () => tf.tidy(() => {
const net = new FakeNeuralNetwork()
const fakePath = 'conv2d/foo'
expect(
() => net.getParamFromPath(fakePath)
).toThrowError(`traversePropertyPath - object does not have property conv2d, for path ${fakePath}`)
}))
})
describe('reassignParamFromPath', () => {
it('sets correct params', () => tf.tidy(() => {
const net = new FakeNeuralNetwork()
const convFilter = tf.tensor(0)
const convBias = tf.tensor(0)
const fcWeights = tf.tensor(0)
net.reassignParamFromPath('conv/filter', convFilter)
net.reassignParamFromPath('conv/bias', convBias)
net.reassignParamFromPath('fc', fcWeights)
expect(net.params.conv.filter).toEqual(convFilter)
expect(net.params.conv.bias).toEqual(convBias)
expect(net.params.fc).toEqual(fcWeights)
}))
it('throws if param is not a tensor', () => tf.tidy(() => {
const net = new FakeNeuralNetwork(null as any)
const fakePath = 'conv/filter'
expect(
() => net.reassignParamFromPath(fakePath, tf.tensor(0))
).toThrowError(`traversePropertyPath - parameter is not a tensor, for path ${fakePath}`)
}))
it('throws if key path invalid', () => tf.tidy(() => {
const net = new FakeNeuralNetwork()
const fakePath = 'conv2d/foo'
expect(
() => net.reassignParamFromPath(fakePath, tf.tensor(0))
).toThrowError(`traversePropertyPath - object does not have property conv2d, for path ${fakePath}`)
}))
})
describe('getParamList', () => {
it('returns param tensors with path', () => tf.tidy(() => {
const convFilter = tf.tensor(0)
const convBias = tf.tensor(0)
const fcWeights = tf.tensor(0)
const net = new FakeNeuralNetwork(convFilter, convBias, fcWeights)
const paramList = net.getParamList()
expect(paramList.length).toEqual(3)
expect(paramList[0].path).toEqual('conv/filter')
expect(paramList[1].path).toEqual('conv/bias')
expect(paramList[2].path).toEqual('fc')
expect(paramList[0].tensor).toEqual(convFilter)
expect(paramList[1].tensor).toEqual(convBias)
expect(paramList[2].tensor).toEqual(fcWeights)
}))
})
describe('getFrozenParams', () => {
it('returns all frozen params', () => tf.tidy(() => {
const convFilter = tf.tensor(0)
const convBias = tf.tensor(0)
const fcWeights = tf.variable(tf.scalar(0))
const net = new FakeNeuralNetwork(convFilter, convBias, fcWeights)
const frozenParams = net.getFrozenParams()
expect(frozenParams.length).toEqual(2)
expect(frozenParams[0].path).toEqual('conv/filter')
expect(frozenParams[1].path).toEqual('conv/bias')
expect(frozenParams[0].tensor).toEqual(convFilter)
expect(frozenParams[1].tensor).toEqual(convBias)
}))
})
describe('getTrainableParams', () => {
it('returns all trainable params', () => tf.tidy(() => {
const convFilter = tf.variable(tf.scalar(0))
const convBias = tf.variable(tf.scalar(0))
const fcWeights = tf.tensor(0)
const net = new FakeNeuralNetwork(convFilter, convBias, fcWeights)
const trainableParams = net.getTrainableParams()
expect(trainableParams.length).toEqual(2)
expect(trainableParams[0].path).toEqual('conv/filter')
expect(trainableParams[1].path).toEqual('conv/bias')
expect(trainableParams[0].tensor).toEqual(convFilter)
expect(trainableParams[1].tensor).toEqual(convBias)
}))
})
describe('dispose', () => {
it('disposes all param tensors', () => tf.tidy(() => {
const numTensors = tf.memory().numTensors
const net = new FakeNeuralNetwork()
net.dispose()
expect(net.params).toBe(undefined)
expect(tf.memory().numTensors - numTensors).toEqual(0)
}))
})
describe('variable', () => {
it('make all param tensors trainable', () => tf.tidy(() => {
const net = new FakeNeuralNetwork()
net.variable()
expect(net.params.conv.filter instanceof tf.Variable).toBe(true)
expect(net.params.conv.bias instanceof tf.Variable).toBe(true)
expect(net.params.fc instanceof tf.Variable).toBe(true)
}))
it('disposes old tensors', () => tf.tidy(() => {
const net = new FakeNeuralNetwork()
const numTensors = tf.memory().numTensors
net.variable()
expect(tf.memory().numTensors - numTensors).toEqual(0)
}))
})
describe('freeze', () => {
it('freezes all param variables', () => tf.tidy(() => {
const net = new FakeNeuralNetwork(
tf.variable(tf.scalar(0)),
tf.variable(tf.scalar(0)),
tf.variable(tf.scalar(0))
)
net.freeze()
expect(net.params.conv.filter instanceof tf.Variable).toBe(false)
expect(net.params.conv.bias instanceof tf.Variable).toBe(false)
expect(net.params.fc instanceof tf.Variable).toBe(false)
}))
it('disposes old tensors', () => tf.tidy(() => {
const net = new FakeNeuralNetwork(
tf.variable(tf.scalar(0)),
tf.variable(tf.scalar(0)),
tf.variable(tf.scalar(0))
)
const numTensors = tf.memory().numTensors
net.freeze()
expect(tf.memory().numTensors - numTensors).toEqual(0)
}))
})
})
import { getModelUris } from '../../../src/commons/loadWeightMap';
const FAKE_DEFAULT_MODEL_NAME = 'fake_model_name'
describe('loadWeightMap', () => {
describe('getModelUris', () => {
it('returns uris from relative url if no argument passed', () => {
const result = getModelUris(undefined, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual('')
})
it('returns uris from relative url for empty string', () => {
const result = getModelUris('', FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual('')
})
it('returns uris for top level url, leading slash preserved', () => {
const result = getModelUris('/', FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`/${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual('/')
})
it('returns uris, given url path', () => {
const uri = 'path/to/modelfiles'
const result = getModelUris(uri, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`${uri}/${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual(uri)
})
it('returns uris, given url path, leading slash preserved', () => {
const uri = '/path/to/modelfiles'
const result = getModelUris(`/${uri}`, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`${uri}/${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual(uri)
})
it('returns uris, given manifest uri', () => {
const uri = 'path/to/modelfiles/model-weights_manifest.json'
const result = getModelUris(uri, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(uri)
expect(result.modelBaseUri).toEqual('path/to/modelfiles')
})
it('returns uris, given manifest uri, leading slash preserved', () => {
const uri = '/path/to/modelfiles/model-weights_manifest.json'
const result = getModelUris(uri, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(uri)
expect(result.modelBaseUri).toEqual('/path/to/modelfiles')
})
it('returns correct uris, given external path', () => {
const uri = 'https://example.com/path/to/modelfiles';
const result = getModelUris(uri, FAKE_DEFAULT_MODEL_NAME)
expect(result.manifestUri).toEqual(`${uri}/${FAKE_DEFAULT_MODEL_NAME}-weights_manifest.json`)
expect(result.modelBaseUri).toEqual(uri)
})
})
})
import * as faceapi from '../../../src'; import { bufferToImage } from 'tfjs-image-recognition-base';
import { FaceLandmarks5 } from '../../../src/mtcnn/FaceLandmarks5';
import { NetInput } from '../../../src/NetInput'; import { FaceLandmarks5 } from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectMtcnnResults } from './expectedResults'; import { expectMtcnnResults } from './expectedResults';
...@@ -11,7 +11,7 @@ describe('allFacesMtcnn', () => { ...@@ -11,7 +11,7 @@ describe('allFacesMtcnn', () => {
beforeAll(async () => { beforeAll(async () => {
const img = await (await fetch('base/test/images/faces.jpg')).blob() const img = await (await fetch('base/test/images/faces.jpg')).blob()
imgEl = await faceapi.bufferToImage(img) imgEl = await bufferToImage(img)
facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsMtcnn.json')).json() facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsMtcnn.json')).json()
}) })
......
import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased, expectRectClose, expectPointClose } from '../../utils';
import { expectedSsdBoxes } from './expectedResults';
import { NetInput } from '../../../src/NetInput';
import { toNetInput } from '../../../src';
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { Point } from '../../../src/Point';
import { bufferToImage, NetInput, Point, toNetInput } from '../../../src';
import { describeWithNets, expectAllTensorsReleased, expectPointClose, expectRectClose } from '../../utils';
import { expectedSsdBoxes } from './expectedResults';
describe('allFacesSsdMobilenetv1', () => { describe('allFacesSsdMobilenetv1', () => {
...@@ -14,7 +13,7 @@ describe('allFacesSsdMobilenetv1', () => { ...@@ -14,7 +13,7 @@ describe('allFacesSsdMobilenetv1', () => {
beforeAll(async () => { beforeAll(async () => {
const img = await (await fetch('base/test/images/faces.jpg')).blob() const img = await (await fetch('base/test/images/faces.jpg')).blob()
imgEl = await faceapi.bufferToImage(img) imgEl = await bufferToImage(img)
facesFaceLandmarkPositions = await (await fetch('base/test/data/facesFaceLandmarkPositions.json')).json() facesFaceLandmarkPositions = await (await fetch('base/test/data/facesFaceLandmarkPositions.json')).json()
facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsSsd.json')).json() facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsSsd.json')).json()
}) })
......
import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased, expectRectClose, expectPointClose, expectMaxDelta } from '../../utils';
import { expectedTinyYolov2SeparableConvBoxes } from './expectedResults';
import { NetInput } from '../../../src/NetInput';
import { toNetInput } from '../../../src';
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { Point } from '../../../src/Point';
import { bufferToImage, NetInput, Point, toNetInput } from '../../../src';
import { SizeType } from '../../../src/tinyYolov2/types'; import { SizeType } from '../../../src/tinyYolov2/types';
import { describeWithNets, expectAllTensorsReleased, expectMaxDelta, expectPointClose, expectRectClose } from '../../utils';
import { expectedTinyYolov2SeparableConvBoxes } from './expectedResults';
describe('allFacesTinyYolov2', () => { describe('allFacesTinyYolov2', () => {
...@@ -15,7 +13,7 @@ describe('allFacesTinyYolov2', () => { ...@@ -15,7 +13,7 @@ describe('allFacesTinyYolov2', () => {
beforeAll(async () => { beforeAll(async () => {
const img = await (await fetch('base/test/images/faces.jpg')).blob() const img = await (await fetch('base/test/images/faces.jpg')).blob()
imgEl = await faceapi.bufferToImage(img) imgEl = await bufferToImage(img)
facesFaceLandmarkPositions = await (await fetch('base/test/data/facesFaceLandmarkPositions.json')).json() facesFaceLandmarkPositions = await (await fetch('base/test/data/facesFaceLandmarkPositions.json')).json()
facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsSsd.json')).json() facesFaceDescriptors = await (await fetch('base/test/data/facesFaceDescriptorsSsd.json')).json()
}) })
......
import * as faceapi from '../../../src'; import * as faceapi from '../../../src';
import { Point } from '../../../src/Point';
import { expectMaxDelta, expectPointClose, expectRectClose } from '../../utils'; import { expectMaxDelta, expectPointClose, expectRectClose } from '../../utils';
import { Point } from '../../../src';
export const expectedSsdBoxes = [ export const expectedSsdBoxes = [
{ x: 48, y: 253, width: 104, height: 129 }, { x: 48, y: 253, width: 104, height: 129 },
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import * as faceapi from '../../../src'; import {
import { isTensor3D } from '../../../src/commons/isTensor'; bufferToImage,
import { Point } from '../../../src/Point'; createFaceLandmarkNet,
import { Dimensions, TMediaElement } from '../../../src/types'; Dimensions,
import { expectMaxDelta, expectAllTensorsReleased, describeWithNets } from '../../utils'; isTensor3D,
import { NetInput } from '../../../src/NetInput'; NetInput,
import { toNetInput } from '../../../src'; Point,
TMediaElement,
toNetInput,
} from '../../../src';
import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68';
import { FaceLandmarkNet } from '../../../src/faceLandmarkNet/FaceLandmarkNet';
import { describeWithNets, expectAllTensorsReleased, expectMaxDelta } from '../../utils';
function getInputDims (input: tf.Tensor | TMediaElement): Dimensions { function getInputDims (input: tf.Tensor | TMediaElement): Dimensions {
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
...@@ -27,11 +33,11 @@ describe('faceLandmarkNet', () => { ...@@ -27,11 +33,11 @@ describe('faceLandmarkNet', () => {
beforeAll(async () => { beforeAll(async () => {
const img1 = await (await fetch('base/test/images/face1.png')).blob() const img1 = await (await fetch('base/test/images/face1.png')).blob()
imgEl1 = await faceapi.bufferToImage(img1) imgEl1 = await bufferToImage(img1)
const img2 = await (await fetch('base/test/images/face2.png')).blob() const img2 = await (await fetch('base/test/images/face2.png')).blob()
imgEl2 = await faceapi.bufferToImage(img2) imgEl2 = await bufferToImage(img2)
const imgRect = await (await fetch('base/test/images/face_rectangular.png')).blob() const imgRect = await (await fetch('base/test/images/face_rectangular.png')).blob()
imgElRect = await faceapi.bufferToImage(imgRect) imgElRect = await bufferToImage(imgRect)
faceLandmarkPositions1 = await (await fetch('base/test/data/faceLandmarkPositions1.json')).json() faceLandmarkPositions1 = await (await fetch('base/test/data/faceLandmarkPositions1.json')).json()
faceLandmarkPositions2 = await (await fetch('base/test/data/faceLandmarkPositions2.json')).json() faceLandmarkPositions2 = await (await fetch('base/test/data/faceLandmarkPositions2.json')).json()
faceLandmarkPositionsRect = await (await fetch('base/test/data/faceLandmarkPositionsRect.json')).json() faceLandmarkPositionsRect = await (await fetch('base/test/data/faceLandmarkPositionsRect.json')).json()
...@@ -42,7 +48,7 @@ describe('faceLandmarkNet', () => { ...@@ -42,7 +48,7 @@ describe('faceLandmarkNet', () => {
it('computes face landmarks for squared input', async () => { it('computes face landmarks for squared input', async () => {
const { width, height } = imgEl1 const { width, height } = imgEl1
const result = await faceLandmarkNet.detectLandmarks(imgEl1) as faceapi.FaceLandmarks68 const result = await faceLandmarkNet.detectLandmarks(imgEl1) as FaceLandmarks68
expect(result.getImageWidth()).toEqual(width) expect(result.getImageWidth()).toEqual(width)
expect(result.getImageHeight()).toEqual(height) expect(result.getImageHeight()).toEqual(height)
expect(result.getShift().x).toEqual(0) expect(result.getShift().x).toEqual(0)
...@@ -56,7 +62,7 @@ describe('faceLandmarkNet', () => { ...@@ -56,7 +62,7 @@ describe('faceLandmarkNet', () => {
it('computes face landmarks for rectangular input', async () => { it('computes face landmarks for rectangular input', async () => {
const { width, height } = imgElRect const { width, height } = imgElRect
const result = await faceLandmarkNet.detectLandmarks(imgElRect) as faceapi.FaceLandmarks68 const result = await faceLandmarkNet.detectLandmarks(imgElRect) as FaceLandmarks68
expect(result.getImageWidth()).toEqual(width) expect(result.getImageWidth()).toEqual(width)
expect(result.getImageHeight()).toEqual(height) expect(result.getImageHeight()).toEqual(height)
expect(result.getShift().x).toEqual(0) expect(result.getShift().x).toEqual(0)
...@@ -74,7 +80,7 @@ describe('faceLandmarkNet', () => { ...@@ -74,7 +80,7 @@ describe('faceLandmarkNet', () => {
it('computes face landmarks for squared input', async () => { it('computes face landmarks for squared input', async () => {
const { width, height } = imgEl1 const { width, height } = imgEl1
const result = await faceLandmarkNet.detectLandmarks(imgEl1) as faceapi.FaceLandmarks68 const result = await faceLandmarkNet.detectLandmarks(imgEl1) as FaceLandmarks68
expect(result.getImageWidth()).toEqual(width) expect(result.getImageWidth()).toEqual(width)
expect(result.getImageHeight()).toEqual(height) expect(result.getImageHeight()).toEqual(height)
expect(result.getShift().x).toEqual(0) expect(result.getShift().x).toEqual(0)
...@@ -88,7 +94,7 @@ describe('faceLandmarkNet', () => { ...@@ -88,7 +94,7 @@ describe('faceLandmarkNet', () => {
it('computes face landmarks for rectangular input', async () => { it('computes face landmarks for rectangular input', async () => {
const { width, height } = imgElRect const { width, height } = imgElRect
const result = await faceLandmarkNet.detectLandmarks(imgElRect) as faceapi.FaceLandmarks68 const result = await faceLandmarkNet.detectLandmarks(imgElRect) as FaceLandmarks68
expect(result.getImageWidth()).toEqual(width) expect(result.getImageWidth()).toEqual(width)
expect(result.getImageHeight()).toEqual(height) expect(result.getImageHeight()).toEqual(height)
expect(result.getShift().x).toEqual(0) expect(result.getShift().x).toEqual(0)
...@@ -112,7 +118,7 @@ describe('faceLandmarkNet', () => { ...@@ -112,7 +118,7 @@ describe('faceLandmarkNet', () => {
faceLandmarkPositionsRect faceLandmarkPositionsRect
] ]
const results = await faceLandmarkNet.detectLandmarks(inputs) as faceapi.FaceLandmarks68[] const results = await faceLandmarkNet.detectLandmarks(inputs) as FaceLandmarks68[]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
expect(results.length).toEqual(3) expect(results.length).toEqual(3)
results.forEach((result, batchIdx) => { results.forEach((result, batchIdx) => {
...@@ -137,7 +143,7 @@ describe('faceLandmarkNet', () => { ...@@ -137,7 +143,7 @@ describe('faceLandmarkNet', () => {
faceLandmarkPositionsRect faceLandmarkPositionsRect
] ]
const results = await faceLandmarkNet.detectLandmarks(inputs) as faceapi.FaceLandmarks68[] const results = await faceLandmarkNet.detectLandmarks(inputs) as FaceLandmarks68[]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
expect(results.length).toEqual(3) expect(results.length).toEqual(3)
results.forEach((result, batchIdx) => { results.forEach((result, batchIdx) => {
...@@ -162,7 +168,7 @@ describe('faceLandmarkNet', () => { ...@@ -162,7 +168,7 @@ describe('faceLandmarkNet', () => {
faceLandmarkPositionsRect faceLandmarkPositionsRect
] ]
const results = await faceLandmarkNet.detectLandmarks(tf.stack(inputs) as tf.Tensor4D) as faceapi.FaceLandmarks68[] const results = await faceLandmarkNet.detectLandmarks(tf.stack(inputs) as tf.Tensor4D) as FaceLandmarks68[]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
expect(results.length).toEqual(2) expect(results.length).toEqual(2)
results.forEach((result, batchIdx) => { results.forEach((result, batchIdx) => {
...@@ -187,7 +193,7 @@ describe('faceLandmarkNet', () => { ...@@ -187,7 +193,7 @@ describe('faceLandmarkNet', () => {
faceLandmarkPositionsRect faceLandmarkPositionsRect
] ]
const results = await faceLandmarkNet.detectLandmarks(inputs) as faceapi.FaceLandmarks68[] const results = await faceLandmarkNet.detectLandmarks(inputs) as FaceLandmarks68[]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
expect(results.length).toEqual(3) expect(results.length).toEqual(3)
results.forEach((result, batchIdx) => { results.forEach((result, batchIdx) => {
...@@ -213,7 +219,7 @@ describe('faceLandmarkNet', () => { ...@@ -213,7 +219,7 @@ describe('faceLandmarkNet', () => {
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/face_landmark_68_model.weights') const res = await fetch('base/weights_uncompressed/face_landmark_68_model.weights')
const weights = new Float32Array(await res.arrayBuffer()) const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createFaceLandmarkNet(weights) const net = createFaceLandmarkNet(weights)
net.dispose() net.dispose()
}) })
}) })
...@@ -224,7 +230,7 @@ describe('faceLandmarkNet', () => { ...@@ -224,7 +230,7 @@ describe('faceLandmarkNet', () => {
it('disposes all param tensors', async () => { it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const net = new faceapi.FaceLandmarkNet() const net = new FaceLandmarkNet()
await net.load('base/weights') await net.load('base/weights')
net.dispose() net.dispose()
}) })
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import * as faceapi from '../../../src'; import { bufferToImage, FaceRecognitionNet, NetInput, toNetInput } from '../../../src';
import { NetInput } from '../../../src/NetInput'; import { createFaceRecognitionNet } from '../../../src/faceRecognitionNet';
import { expectAllTensorsReleased, describeWithNets } from '../../utils'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { toNetInput } from '../../../src';
describe('faceRecognitionNet', () => { describe('faceRecognitionNet', () => {
...@@ -16,11 +15,11 @@ describe('faceRecognitionNet', () => { ...@@ -16,11 +15,11 @@ describe('faceRecognitionNet', () => {
beforeAll(async () => { beforeAll(async () => {
const img1 = await (await fetch('base/test/images/face1.png')).blob() const img1 = await (await fetch('base/test/images/face1.png')).blob()
imgEl1 = await faceapi.bufferToImage(img1) imgEl1 = await bufferToImage(img1)
const img2 = await (await fetch('base/test/images/face2.png')).blob() const img2 = await (await fetch('base/test/images/face2.png')).blob()
imgEl2 = await faceapi.bufferToImage(img2) imgEl2 = await bufferToImage(img2)
const imgRect = await (await fetch('base/test/images/face_rectangular.png')).blob() const imgRect = await (await fetch('base/test/images/face_rectangular.png')).blob()
imgElRect = await faceapi.bufferToImage(imgRect) imgElRect = await bufferToImage(imgRect)
faceDescriptor1 = await (await fetch('base/test/data/faceDescriptor1.json')).json() faceDescriptor1 = await (await fetch('base/test/data/faceDescriptor1.json')).json()
faceDescriptor2 = await (await fetch('base/test/data/faceDescriptor2.json')).json() faceDescriptor2 = await (await fetch('base/test/data/faceDescriptor2.json')).json()
faceDescriptorRect = await (await fetch('base/test/data/faceDescriptorRect.json')).json() faceDescriptorRect = await (await fetch('base/test/data/faceDescriptorRect.json')).json()
...@@ -141,7 +140,7 @@ describe('faceRecognitionNet', () => { ...@@ -141,7 +140,7 @@ describe('faceRecognitionNet', () => {
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/face_recognition_model.weights') const res = await fetch('base/weights_uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer()) const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createFaceRecognitionNet(weights) const net = createFaceRecognitionNet(weights)
net.dispose() net.dispose()
}) })
}) })
...@@ -152,7 +151,7 @@ describe('faceRecognitionNet', () => { ...@@ -152,7 +151,7 @@ describe('faceRecognitionNet', () => {
it('disposes all param tensors', async () => { it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const net = new faceapi.FaceRecognitionNet() const net = new FaceRecognitionNet()
await net.load('base/weights') await net.load('base/weights')
net.dispose() net.dispose()
}) })
......
...@@ -46,7 +46,7 @@ describe('mtcnn', () => { ...@@ -46,7 +46,7 @@ describe('mtcnn', () => {
const results = await mtcnn.forward(imgEl, forwardParams) const results = await mtcnn.forward(imgEl, forwardParams)
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectMtcnnResults(results, [5, 1, 4, 3, 2, 0], 6, 10) expectMtcnnResults(results, [5, 1, 4, 2, 3, 0], 6, 10)
}) })
it('scale steps passed, finds all faces', async () => { it('scale steps passed, finds all faces', async () => {
......
import { extractFaceTensors, Rect } from '../../src'; import { bufferToImage, extractFaceTensors, Rect } from '../../src';
import { bufferToImage } from '../../src/utils';
describe('extractFaceTensors', () => { describe('extractFaceTensors', () => {
......
import { extractFaces, Rect } from '../../src'; import { bufferToImage, createCanvasFromMedia, extractFaces, Rect } from '../../src';
import { bufferToImage, createCanvasFromMedia } from '../../src/utils';
describe('extractFaces', () => { describe('extractFaces', () => {
......
import * as tf from '@tensorflow/tfjs-core';
import { padToSquare } from '../../src/padToSquare';
import { ones, zeros } from '../utils';
describe('padToSquare', () => {
describe('even size', () => {
it('is padded to square by 2 columns', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(24).fill(1), [1, 4, 2, 3])
const result = padToSquare(imgTensor)
expect(result.shape).toEqual([1, 4, 4, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(4)
expect(paddedCols[0].dataSync()).toEqual(ones(12))
expect(paddedCols[1].dataSync()).toEqual(ones(12))
expect(paddedCols[2].dataSync()).toEqual(zeros(12))
expect(paddedCols[3].dataSync()).toEqual(zeros(12))
}))
it('is padded to square by 2 columns and centered', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(24).fill(1), [1, 4, 2, 3])
const result = padToSquare(imgTensor, true)
expect(result.shape).toEqual([1, 4, 4, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(4)
expect(paddedCols[0].dataSync()).toEqual(zeros(12))
expect(paddedCols[1].dataSync()).toEqual(ones(12))
expect(paddedCols[2].dataSync()).toEqual(ones(12))
expect(paddedCols[3].dataSync()).toEqual(zeros(12))
}))
it('is padded to square by 1 column', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(36).fill(1), [1, 4, 3, 3])
const result = padToSquare(imgTensor)
expect(result.shape).toEqual([1, 4, 4, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(4)
expect(paddedCols[0].dataSync()).toEqual(ones(12))
expect(paddedCols[1].dataSync()).toEqual(ones(12))
expect(paddedCols[2].dataSync()).toEqual(ones(12))
expect(paddedCols[3].dataSync()).toEqual(zeros(12))
}))
it('is padded to square by 1 column and centered', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(36).fill(1), [1, 4, 3, 3])
const result = padToSquare(imgTensor, true)
expect(result.shape).toEqual([1, 4, 4, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(4)
expect(paddedCols[0].dataSync()).toEqual(ones(12))
expect(paddedCols[1].dataSync()).toEqual(ones(12))
expect(paddedCols[2].dataSync()).toEqual(ones(12))
expect(paddedCols[3].dataSync()).toEqual(zeros(12))
}))
})
describe('uneven size', () => {
it('is padded to square by 3 columns', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(30).fill(1), [1, 5, 2, 3])
const result = padToSquare(imgTensor)
expect(result.shape).toEqual([1, 5, 5, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(5)
expect(paddedCols[0].dataSync()).toEqual(ones(15))
expect(paddedCols[1].dataSync()).toEqual(ones(15))
expect(paddedCols[2].dataSync()).toEqual(zeros(15))
expect(paddedCols[3].dataSync()).toEqual(zeros(15))
expect(paddedCols[4].dataSync()).toEqual(zeros(15))
}))
it('is padded to square by 3 columns and centered', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(30).fill(1), [1, 5, 2, 3])
const result = padToSquare(imgTensor, true)
expect(result.shape).toEqual([1, 5, 5, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(5)
expect(paddedCols[0].dataSync()).toEqual(zeros(15))
expect(paddedCols[1].dataSync()).toEqual(ones(15))
expect(paddedCols[2].dataSync()).toEqual(ones(15))
expect(paddedCols[3].dataSync()).toEqual(zeros(15))
expect(paddedCols[4].dataSync()).toEqual(zeros(15))
}))
it('is padded to square by 1 column', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(60).fill(1), [1, 5, 4, 3])
const result = padToSquare(imgTensor)
expect(result.shape).toEqual([1, 5, 5, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(5)
expect(paddedCols[0].dataSync()).toEqual(ones(15))
expect(paddedCols[1].dataSync()).toEqual(ones(15))
expect(paddedCols[2].dataSync()).toEqual(ones(15))
expect(paddedCols[3].dataSync()).toEqual(ones(15))
expect(paddedCols[4].dataSync()).toEqual(zeros(15))
}))
it('is padded to square by 1 column and centered', () => tf.tidy(() => {
const imgTensor = tf.tensor4d(Array(60).fill(1), [1, 5, 4, 3])
const result = padToSquare(imgTensor, true)
expect(result.shape).toEqual([1, 5, 5, 3])
const paddedCols = tf.unstack(result, 2)
expect(paddedCols.length).toEqual(5)
expect(paddedCols[0].dataSync()).toEqual(ones(15))
expect(paddedCols[1].dataSync()).toEqual(ones(15))
expect(paddedCols[2].dataSync()).toEqual(ones(15))
expect(paddedCols[3].dataSync()).toEqual(ones(15))
expect(paddedCols[4].dataSync()).toEqual(zeros(15))
}))
})
})
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from '../../src/NetInput';
import { toNetInput } from '../../src/toNetInput';
import { bufferToImage, createCanvasFromMedia } from '../../src/utils';
import { expectAllTensorsReleased } from '../utils';
describe('toNetInput', () => {
let imgEl: HTMLImageElement, canvasEl: HTMLCanvasElement
beforeAll(async () => {
const img = await (await fetch('base/test/images/face1.png')).blob()
imgEl = await bufferToImage(img)
canvasEl = createCanvasFromMedia(imgEl)
})
describe('valid args', () => {
it('from HTMLImageElement', async () => {
const netInput = await toNetInput(imgEl, true)
expect(netInput instanceof NetInput).toBe(true)
expect(netInput.batchSize).toEqual(1)
})
it('from HTMLCanvasElement', async () => {
const netInput = await toNetInput(canvasEl, true)
expect(netInput instanceof NetInput).toBe(true)
expect(netInput.batchSize).toEqual(1)
})
it('from HTMLImageElement array', async () => {
const netInput = await toNetInput([
imgEl,
imgEl
], true)
expect(netInput instanceof NetInput).toBe(true)
expect(netInput.batchSize).toEqual(2)
})
it('from HTMLCanvasElement array', async () => {
const netInput = await toNetInput([
canvasEl,
canvasEl
], true)
expect(netInput instanceof NetInput).toBe(true)
expect(netInput.batchSize).toEqual(2)
})
it('from mixed media array', async () => {
const netInput = await toNetInput([
imgEl,
canvasEl,
canvasEl
], true)
expect(netInput instanceof NetInput).toBe(true)
expect(netInput.batchSize).toEqual(3)
})
})
describe('invalid args', () => {
it('undefined', async () => {
let errorMessage
try {
await toNetInput(undefined as any)
} catch (error) {
errorMessage = error.message;
}
expect(errorMessage).toBe('toNetInput - expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id')
})
it('empty array', async () => {
let errorMessage
try {
await toNetInput([])
} catch (error) {
errorMessage = error.message;
}
expect(errorMessage).toBe('toNetInput - empty array passed as input')
})
it('undefined at input index 1', async () => {
let errorMessage
try {
await toNetInput([document.createElement('img'), undefined] as any)
} catch (error) {
errorMessage = error.message;
}
expect(errorMessage).toBe('toNetInput - at input index 1: expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id')
})
})
describe('no memory leaks', () => {
it('single image element', async () => {
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput(imgEl)
netInput.dispose()
})
})
it('multiple image elements', async () => {
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput([imgEl, imgEl, imgEl])
netInput.dispose()
})
})
it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl)
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput(tensor)
netInput.dispose()
})
tensor.dispose()
})
it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl, imgEl, imgEl].map(el => tf.fromPixels(el))
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput(tensors)
netInput.dispose()
})
tensors.forEach(t => t.dispose())
})
it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput(tensor)
netInput.dispose()
})
tensor.dispose()
})
it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl, imgEl, imgEl]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => {
const netInput = await toNetInput(tensors)
netInput.dispose()
})
tensors.forEach(t => t.dispose())
})
})
})
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { IRect } from '../build/Rect';
import * as faceapi from '../src/'; import * as faceapi from '../src/';
import { NeuralNetwork } from '../src/commons/NeuralNetwork';
import { IPoint } from '../src/'; import { IPoint } from '../src/';
import { allFacesMtcnnFactory, allFacesSsdMobilenetv1Factory, allFacesTinyYolov2Factory } from '../src/allFacesFactory'; import { allFacesMtcnnFactory, allFacesSsdMobilenetv1Factory, allFacesTinyYolov2Factory } from '../src/allFacesFactory';
import { allFacesMtcnnFunction, allFacesSsdMobilenetv1Function, allFacesTinyYolov2, allFacesTinyYolov2Function } from '../src/globalApi'; import { allFacesMtcnnFunction, allFacesSsdMobilenetv1Function, allFacesTinyYolov2Function } from '../src/globalApi';
export function zeros(length: number): Float32Array { import { NeuralNetwork, IRect } from 'tfjs-image-recognition-base';
return new Float32Array(length)
}
export function ones(length: number): Float32Array { jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000
return new Float32Array(length).fill(1)
}
export function expectMaxDelta(val1: number, val2: number, maxDelta: number) { export function expectMaxDelta(val1: number, val2: number, maxDelta: number) {
expect(Math.abs(val1 - val2)).toBeLessThan(maxDelta) expect(Math.abs(val1 - val2)).toBeLessThan(maxDelta)
...@@ -25,10 +19,6 @@ export async function expectAllTensorsReleased(fn: () => any) { ...@@ -25,10 +19,6 @@ export async function expectAllTensorsReleased(fn: () => any) {
expect(tf.memory().numTensors - numTensorsBefore).toEqual(0) expect(tf.memory().numTensors - numTensorsBefore).toEqual(0)
} }
export function tensor3D() {
return tf.tensor3d([[[0]]])
}
export function expectPointClose( export function expectPointClose(
result: IPoint, result: IPoint,
expectedPoint: IPoint, expectedPoint: IPoint,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment