Unverified Commit 123df714 by Vincent Mühler Committed by GitHub

Merge pull request #246 from javyxx/master

Update to tensorflowjs 1.0.1
parents 21ce7136 4c7fb27f
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz",
"integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=",
"requires": { "requires": {
"mime-types": "2.1.18", "mime-types": "~2.1.18",
"negotiator": "0.6.1" "negotiator": "0.6.1"
} }
}, },
...@@ -16,10 +16,10 @@ ...@@ -16,10 +16,10 @@
"resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
"integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=",
"requires": { "requires": {
"co": "4.6.0", "co": "^4.6.0",
"fast-deep-equal": "1.1.0", "fast-deep-equal": "^1.0.0",
"fast-json-stable-stringify": "2.0.0", "fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "0.3.1" "json-schema-traverse": "^0.3.0"
} }
}, },
"array-flatten": { "array-flatten": {
...@@ -58,7 +58,7 @@ ...@@ -58,7 +58,7 @@
"integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
"optional": true, "optional": true,
"requires": { "requires": {
"tweetnacl": "0.14.5" "tweetnacl": "^0.14.3"
} }
}, },
"bytes": { "bytes": {
...@@ -81,7 +81,7 @@ ...@@ -81,7 +81,7 @@
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz",
"integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=",
"requires": { "requires": {
"delayed-stream": "1.0.0" "delayed-stream": "~1.0.0"
} }
}, },
"content-disposition": { "content-disposition": {
...@@ -114,7 +114,7 @@ ...@@ -114,7 +114,7 @@
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
"requires": { "requires": {
"assert-plus": "1.0.0" "assert-plus": "^1.0.0"
} }
}, },
"debug": { "debug": {
...@@ -146,7 +146,7 @@ ...@@ -146,7 +146,7 @@
"integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
"optional": true, "optional": true,
"requires": { "requires": {
"jsbn": "0.1.1" "jsbn": "~0.1.0"
} }
}, },
"ee-first": { "ee-first": {
...@@ -174,36 +174,36 @@ ...@@ -174,36 +174,36 @@
"resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz",
"integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=",
"requires": { "requires": {
"accepts": "1.3.5", "accepts": "~1.3.5",
"array-flatten": "1.1.1", "array-flatten": "1.1.1",
"body-parser": "1.18.2", "body-parser": "1.18.2",
"content-disposition": "0.5.2", "content-disposition": "0.5.2",
"content-type": "1.0.4", "content-type": "~1.0.4",
"cookie": "0.3.1", "cookie": "0.3.1",
"cookie-signature": "1.0.6", "cookie-signature": "1.0.6",
"debug": "2.6.9", "debug": "2.6.9",
"depd": "1.1.2", "depd": "~1.1.2",
"encodeurl": "1.0.2", "encodeurl": "~1.0.2",
"escape-html": "1.0.3", "escape-html": "~1.0.3",
"etag": "1.8.1", "etag": "~1.8.1",
"finalhandler": "1.1.1", "finalhandler": "1.1.1",
"fresh": "0.5.2", "fresh": "0.5.2",
"merge-descriptors": "1.0.1", "merge-descriptors": "1.0.1",
"methods": "1.1.2", "methods": "~1.1.2",
"on-finished": "2.3.0", "on-finished": "~2.3.0",
"parseurl": "1.3.2", "parseurl": "~1.3.2",
"path-to-regexp": "0.1.7", "path-to-regexp": "0.1.7",
"proxy-addr": "2.0.3", "proxy-addr": "~2.0.3",
"qs": "6.5.1", "qs": "6.5.1",
"range-parser": "1.2.0", "range-parser": "~1.2.0",
"safe-buffer": "5.1.1", "safe-buffer": "5.1.1",
"send": "0.16.2", "send": "0.16.2",
"serve-static": "1.13.2", "serve-static": "1.13.2",
"setprototypeof": "1.1.0", "setprototypeof": "1.1.0",
"statuses": "1.4.0", "statuses": "~1.4.0",
"type-is": "1.6.16", "type-is": "~1.6.16",
"utils-merge": "1.0.1", "utils-merge": "1.0.1",
"vary": "1.1.2" "vary": "~1.1.2"
}, },
"dependencies": { "dependencies": {
"body-parser": { "body-parser": {
...@@ -212,15 +212,15 @@ ...@@ -212,15 +212,15 @@
"integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=",
"requires": { "requires": {
"bytes": "3.0.0", "bytes": "3.0.0",
"content-type": "1.0.4", "content-type": "~1.0.4",
"debug": "2.6.9", "debug": "2.6.9",
"depd": "1.1.2", "depd": "~1.1.1",
"http-errors": "1.6.3", "http-errors": "~1.6.2",
"iconv-lite": "0.4.19", "iconv-lite": "0.4.19",
"on-finished": "2.3.0", "on-finished": "~2.3.0",
"qs": "6.5.1", "qs": "6.5.1",
"raw-body": "2.3.2", "raw-body": "2.3.2",
"type-is": "1.6.16" "type-is": "~1.6.15"
} }
}, },
"iconv-lite": { "iconv-lite": {
...@@ -252,7 +252,7 @@ ...@@ -252,7 +252,7 @@
"depd": "1.1.1", "depd": "1.1.1",
"inherits": "2.0.3", "inherits": "2.0.3",
"setprototypeof": "1.0.3", "setprototypeof": "1.0.3",
"statuses": "1.4.0" "statuses": ">= 1.3.1 < 2"
} }
}, },
"setprototypeof": { "setprototypeof": {
...@@ -290,12 +290,12 @@ ...@@ -290,12 +290,12 @@
"integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==",
"requires": { "requires": {
"debug": "2.6.9", "debug": "2.6.9",
"encodeurl": "1.0.2", "encodeurl": "~1.0.2",
"escape-html": "1.0.3", "escape-html": "~1.0.3",
"on-finished": "2.3.0", "on-finished": "~2.3.0",
"parseurl": "1.3.2", "parseurl": "~1.3.2",
"statuses": "1.4.0", "statuses": "~1.4.0",
"unpipe": "1.0.0" "unpipe": "~1.0.0"
} }
}, },
"forever-agent": { "forever-agent": {
...@@ -308,9 +308,9 @@ ...@@ -308,9 +308,9 @@
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz",
"integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=",
"requires": { "requires": {
"asynckit": "0.4.0", "asynckit": "^0.4.0",
"combined-stream": "1.0.6", "combined-stream": "1.0.6",
"mime-types": "2.1.18" "mime-types": "^2.1.12"
} }
}, },
"forwarded": { "forwarded": {
...@@ -328,7 +328,7 @@ ...@@ -328,7 +328,7 @@
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
"requires": { "requires": {
"assert-plus": "1.0.0" "assert-plus": "^1.0.0"
} }
}, },
"har-schema": { "har-schema": {
...@@ -341,8 +341,8 @@ ...@@ -341,8 +341,8 @@
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz",
"integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=",
"requires": { "requires": {
"ajv": "5.5.2", "ajv": "^5.1.0",
"har-schema": "2.0.0" "har-schema": "^2.0.0"
} }
}, },
"http-errors": { "http-errors": {
...@@ -350,10 +350,10 @@ ...@@ -350,10 +350,10 @@
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz",
"integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=",
"requires": { "requires": {
"depd": "1.1.2", "depd": "~1.1.2",
"inherits": "2.0.3", "inherits": "2.0.3",
"setprototypeof": "1.1.0", "setprototypeof": "1.1.0",
"statuses": "1.4.0" "statuses": ">= 1.4.0 < 2"
} }
}, },
"http-signature": { "http-signature": {
...@@ -361,9 +361,9 @@ ...@@ -361,9 +361,9 @@
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
"requires": { "requires": {
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"jsprim": "1.4.1", "jsprim": "^1.2.2",
"sshpk": "1.14.2" "sshpk": "^1.7.0"
} }
}, },
"inherits": { "inherits": {
...@@ -448,7 +448,7 @@ ...@@ -448,7 +448,7 @@
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz",
"integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==",
"requires": { "requires": {
"mime-db": "1.33.0" "mime-db": "~1.33.0"
} }
}, },
"ms": { "ms": {
...@@ -494,7 +494,7 @@ ...@@ -494,7 +494,7 @@
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.3.tgz", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.3.tgz",
"integrity": "sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ==", "integrity": "sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ==",
"requires": { "requires": {
"forwarded": "0.1.2", "forwarded": "~0.1.2",
"ipaddr.js": "1.6.0" "ipaddr.js": "1.6.0"
} }
}, },
...@@ -518,26 +518,26 @@ ...@@ -518,26 +518,26 @@
"resolved": "https://registry.npmjs.org/request/-/request-2.87.0.tgz", "resolved": "https://registry.npmjs.org/request/-/request-2.87.0.tgz",
"integrity": "sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw==", "integrity": "sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw==",
"requires": { "requires": {
"aws-sign2": "0.7.0", "aws-sign2": "~0.7.0",
"aws4": "1.7.0", "aws4": "^1.6.0",
"caseless": "0.12.0", "caseless": "~0.12.0",
"combined-stream": "1.0.6", "combined-stream": "~1.0.5",
"extend": "3.0.1", "extend": "~3.0.1",
"forever-agent": "0.6.1", "forever-agent": "~0.6.1",
"form-data": "2.3.2", "form-data": "~2.3.1",
"har-validator": "5.0.3", "har-validator": "~5.0.3",
"http-signature": "1.2.0", "http-signature": "~1.2.0",
"is-typedarray": "1.0.0", "is-typedarray": "~1.0.0",
"isstream": "0.1.2", "isstream": "~0.1.2",
"json-stringify-safe": "5.0.1", "json-stringify-safe": "~5.0.1",
"mime-types": "2.1.18", "mime-types": "~2.1.17",
"oauth-sign": "0.8.2", "oauth-sign": "~0.8.2",
"performance-now": "2.1.0", "performance-now": "^2.1.0",
"qs": "6.5.1", "qs": "~6.5.1",
"safe-buffer": "5.1.1", "safe-buffer": "^5.1.1",
"tough-cookie": "2.3.4", "tough-cookie": "~2.3.3",
"tunnel-agent": "0.6.0", "tunnel-agent": "^0.6.0",
"uuid": "3.2.1" "uuid": "^3.1.0"
} }
}, },
"safe-buffer": { "safe-buffer": {
...@@ -556,18 +556,18 @@ ...@@ -556,18 +556,18 @@
"integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==", "integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==",
"requires": { "requires": {
"debug": "2.6.9", "debug": "2.6.9",
"depd": "1.1.2", "depd": "~1.1.2",
"destroy": "1.0.4", "destroy": "~1.0.4",
"encodeurl": "1.0.2", "encodeurl": "~1.0.2",
"escape-html": "1.0.3", "escape-html": "~1.0.3",
"etag": "1.8.1", "etag": "~1.8.1",
"fresh": "0.5.2", "fresh": "0.5.2",
"http-errors": "1.6.3", "http-errors": "~1.6.2",
"mime": "1.4.1", "mime": "1.4.1",
"ms": "2.0.0", "ms": "2.0.0",
"on-finished": "2.3.0", "on-finished": "~2.3.0",
"range-parser": "1.2.0", "range-parser": "~1.2.0",
"statuses": "1.4.0" "statuses": "~1.4.0"
} }
}, },
"serve-static": { "serve-static": {
...@@ -575,9 +575,9 @@ ...@@ -575,9 +575,9 @@
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz",
"integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==", "integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==",
"requires": { "requires": {
"encodeurl": "1.0.2", "encodeurl": "~1.0.2",
"escape-html": "1.0.3", "escape-html": "~1.0.3",
"parseurl": "1.3.2", "parseurl": "~1.3.2",
"send": "0.16.2" "send": "0.16.2"
} }
}, },
...@@ -591,15 +591,15 @@ ...@@ -591,15 +591,15 @@
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.2.tgz", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.14.2.tgz",
"integrity": "sha1-xvxhZIo9nE52T9P8306hBeSSupg=", "integrity": "sha1-xvxhZIo9nE52T9P8306hBeSSupg=",
"requires": { "requires": {
"asn1": "0.2.3", "asn1": "~0.2.3",
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"bcrypt-pbkdf": "1.0.1", "bcrypt-pbkdf": "^1.0.0",
"dashdash": "1.14.1", "dashdash": "^1.12.0",
"ecc-jsbn": "0.1.1", "ecc-jsbn": "~0.1.1",
"getpass": "0.1.7", "getpass": "^0.1.1",
"jsbn": "0.1.1", "jsbn": "~0.1.0",
"safer-buffer": "2.1.2", "safer-buffer": "^2.0.2",
"tweetnacl": "0.14.5" "tweetnacl": "~0.14.0"
} }
}, },
"statuses": { "statuses": {
...@@ -612,7 +612,7 @@ ...@@ -612,7 +612,7 @@
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz",
"integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==", "integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==",
"requires": { "requires": {
"punycode": "1.4.1" "punycode": "^1.4.1"
} }
}, },
"tunnel-agent": { "tunnel-agent": {
...@@ -620,7 +620,7 @@ ...@@ -620,7 +620,7 @@
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
"requires": { "requires": {
"safe-buffer": "5.1.1" "safe-buffer": "^5.0.1"
} }
}, },
"tweetnacl": { "tweetnacl": {
...@@ -635,7 +635,7 @@ ...@@ -635,7 +635,7 @@
"integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==",
"requires": { "requires": {
"media-typer": "0.3.0", "media-typer": "0.3.0",
"mime-types": "2.1.18" "mime-types": "~2.1.18"
} }
}, },
"unpipe": { "unpipe": {
...@@ -663,9 +663,9 @@ ...@@ -663,9 +663,9 @@
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
"requires": { "requires": {
"assert-plus": "1.0.0", "assert-plus": "^1.0.0",
"core-util-is": "1.0.2", "core-util-is": "1.0.2",
"extsprintf": "1.3.0" "extsprintf": "^1.2.0"
} }
} }
} }
......
...@@ -24,110 +24,28 @@ ...@@ -24,110 +24,28 @@
"js-tokens": "^4.0.0" "js-tokens": "^4.0.0"
} }
}, },
"@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
"integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=",
"dev": true
},
"@protobufjs/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
"integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==",
"dev": true
},
"@protobufjs/codegen": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
"integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==",
"dev": true
},
"@protobufjs/eventemitter": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
"integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=",
"dev": true
},
"@protobufjs/fetch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
"integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=",
"dev": true,
"requires": {
"@protobufjs/aspromise": "^1.1.1",
"@protobufjs/inquire": "^1.1.0"
}
},
"@protobufjs/float": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
"integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=",
"dev": true
},
"@protobufjs/inquire": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
"integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=",
"dev": true
},
"@protobufjs/path": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
"integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=",
"dev": true
},
"@protobufjs/pool": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
"integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=",
"dev": true
},
"@protobufjs/utf8": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=",
"dev": true
},
"@tensorflow/tfjs": { "@tensorflow/tfjs": {
"version": "0.14.2", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-0.14.2.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs/-/tfjs-1.0.1.tgz",
"integrity": "sha512-d+kBdhn3L/BOIwwc44V1lUrs0O5s49ujhYXVHT9Hs6y3yq+OqPK10am16H1fNcxeMn12/3gGphebglObTD0/Sg==", "integrity": "sha512-EPFnB+ihJc11npoVBm8PWLfgGcMh8KhU2y7T4hpNNDRPTOvZqD/xx5ApVV9j300IHMKcUup25S6V2e5CfTkTbg==",
"dev": true,
"requires": {
"@tensorflow/tfjs-converter": "0.7.2",
"@tensorflow/tfjs-core": "0.14.5",
"@tensorflow/tfjs-data": "0.1.7",
"@tensorflow/tfjs-layers": "0.9.2"
},
"dependencies": {
"@tensorflow/tfjs-core": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.14.5.tgz",
"integrity": "sha512-CSUgKuC17J1Ylr1s6iD1k2/tJr9lD16sUEjtzJbtiuTYCELOwujGK/1htunA7o3BwLuU7aqEI92MoKElEKa7qA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@types/seedrandom": "2.4.27", "@tensorflow/tfjs-converter": "1.0.1",
"@types/webgl-ext": "0.0.30", "@tensorflow/tfjs-core": "1.0.1",
"@types/webgl2": "0.0.4", "@tensorflow/tfjs-data": "1.0.1",
"seedrandom": "2.4.3" "@tensorflow/tfjs-layers": "1.0.1"
}
}
} }
}, },
"@tensorflow/tfjs-converter": { "@tensorflow/tfjs-converter": {
"version": "0.7.2", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-0.7.2.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs-converter/-/tfjs-converter-1.0.1.tgz",
"integrity": "sha512-m46mtaF57x2NcxlNUKdJOCUp3ZSJU9bp9MzyEQ0Iz1bW2kKIxx1DDRjuP0fAeHX5H5Mh/tWIHB9yK6NwLz+aQQ==", "integrity": "sha512-YpvonHCyTM8imuZU025uc2JLHITUEOvxqku01cV4N018pQnKAvbMuIC4xGRWtkTgE4+GArzR5SLEUFV0MrVjhQ==",
"dev": true, "dev": true
"requires": {
"@types/long": "~3.0.32",
"protobufjs": "~6.8.6"
}
}, },
"@tensorflow/tfjs-core": { "@tensorflow/tfjs-core": {
"version": "0.14.2", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.14.2.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs-core/-/tfjs-core-1.0.1.tgz",
"integrity": "sha512-VVbcu6H3ioKCkfkep/gQASfzPnQt3C5v+4ppH9pQ6Lf0lD+l3NMuMJYxa8Wjac1TfiWhFEX58bJvhpMfTGsUlg==", "integrity": "sha512-VIr0SqsezNg/9mLc+fUNYE+0hkZo/F83Pcs9XKjWlE/mpMyjIHH5F2xnn4JAfJO5gWQLtAWHd8P7IzM+1W5r/A==",
"requires": { "requires": {
"@types/seedrandom": "2.4.27", "@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.30", "@types/webgl-ext": "0.0.30",
...@@ -136,9 +54,9 @@ ...@@ -136,9 +54,9 @@
} }
}, },
"@tensorflow/tfjs-data": { "@tensorflow/tfjs-data": {
"version": "0.1.7", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-0.1.7.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs-data/-/tfjs-data-1.0.1.tgz",
"integrity": "sha512-RENjeBdBLq7GS9594kQx2GbM0WQV16VfxzzB0j2sq5vJh9GZQi2DB5Emq2LqZWs5rSeh7PDHZylGOn/ve6f8PA==", "integrity": "sha512-XaB2Uaz5Mzgq81NfQxdA13O27LOlwl//kMLno2P8JGb4D/2I8CaNzlL7HpbBpXp2mZvdFDUZsnK/nbKTka+vqw==",
"dev": true, "dev": true,
"requires": { "requires": {
"@types/node-fetch": "^2.1.2", "@types/node-fetch": "^2.1.2",
...@@ -148,25 +66,25 @@ ...@@ -148,25 +66,25 @@
"dependencies": { "dependencies": {
"node-fetch": { "node-fetch": {
"version": "2.1.2", "version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U=", "integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U=",
"dev": true "dev": true
} }
} }
}, },
"@tensorflow/tfjs-layers": { "@tensorflow/tfjs-layers": {
"version": "0.9.2", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-0.9.2.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs-layers/-/tfjs-layers-1.0.1.tgz",
"integrity": "sha512-peB824cEXRBy5IgZPIodd8zpQ/54VGOYbR+zY+Q1Le7v3Np05EoDcL8Z98MtpBHo6jOM7b/3Lf2zjfJVv2qxJA==", "integrity": "sha512-cI703R/SHRmBstBtA939ri9acSs6lbcDisa2+yc8YMgo38jokO6t06akKPZSZcQFK5gyusDWAYpMDxvI3lcAWA==",
"dev": true "dev": true
}, },
"@tensorflow/tfjs-node": { "@tensorflow/tfjs-node": {
"version": "0.2.3", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-node/-/tfjs-node-0.2.3.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@tensorflow/tfjs-node/-/tfjs-node-1.0.1.tgz",
"integrity": "sha512-+VXi6GLsVXXido2DhzK2e1Y/qM9MvQNbbA00TFgGuVbGMmeX0ey97t6W23dT8dnDVPZprC2XSFumcpRoKe8ENg==", "integrity": "sha512-lGRfG5LgHqXLvVof8Xj3PYVqpyjl/vP282G6ezvE7ikh48orcpFs1P/f+70Bf95cT1LrK0ecmg9ZWE/jftXgRA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@tensorflow/tfjs": "~0.14.2", "@tensorflow/tfjs": "~1.0.1",
"adm-zip": "^0.4.11", "adm-zip": "^0.4.11",
"bindings": "~1.3.0", "bindings": "~1.3.0",
"https-proxy-agent": "^2.2.1", "https-proxy-agent": "^2.2.1",
...@@ -188,12 +106,6 @@ ...@@ -188,12 +106,6 @@
"integrity": "sha512-BaOFpaddRVV8qykJoWHrHtamml880oh0+DIZWbtJgx0pu+KhDF1gER5hSfCIfzyMrbjMuYFnLUfyo1l0JUVU3Q==", "integrity": "sha512-BaOFpaddRVV8qykJoWHrHtamml880oh0+DIZWbtJgx0pu+KhDF1gER5hSfCIfzyMrbjMuYFnLUfyo1l0JUVU3Q==",
"dev": true "dev": true
}, },
"@types/long": {
"version": "3.0.32",
"resolved": "https://registry.npmjs.org/@types/long/-/long-3.0.32.tgz",
"integrity": "sha512-ZXyOOm83p7X8p3s0IYM3VeueNmHpkk/yMlP8CLeOnEcu6hIwPH7YjZBvhQkR0ZFS2DqZAxKtJ/M5fcuv3OU5BA==",
"dev": true
},
"@types/node": { "@types/node": {
"version": "10.12.18", "version": "10.12.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz",
...@@ -201,9 +113,9 @@ ...@@ -201,9 +113,9 @@
"dev": true "dev": true
}, },
"@types/node-fetch": { "@types/node-fetch": {
"version": "2.1.4", "version": "2.1.6",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.1.4.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/@types/node-fetch/-/node-fetch-2.1.6.tgz",
"integrity": "sha512-tR1ekaXUGpmzOcDXWU9BW73YfA2/VW1DF1FH+wlJ82BbCSnWTbdX+JkqWQXWKIGsFPnPsYadbXfNgz28g+ccWg==", "integrity": "sha512-Hv1jgh3pfpUEl2F2mqUd1AfLSk1YbUCeBJFaP36t7esAO617dErqdxWb5cdG2NfJGOofkmBW36fdx0dVewxDRg==",
"dev": true, "dev": true,
"requires": { "requires": {
"@types/node": "*" "@types/node": "*"
...@@ -248,7 +160,7 @@ ...@@ -248,7 +160,7 @@
}, },
"adm-zip": { "adm-zip": {
"version": "0.4.13", "version": "0.4.13",
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.13.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/adm-zip/-/adm-zip-0.4.13.tgz",
"integrity": "sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw==", "integrity": "sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw==",
"dev": true "dev": true
}, },
...@@ -260,7 +172,7 @@ ...@@ -260,7 +172,7 @@
}, },
"agent-base": { "agent-base": {
"version": "4.2.1", "version": "4.2.1",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/agent-base/-/agent-base-4.2.1.tgz",
"integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==", "integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==",
"dev": true, "dev": true,
"requires": { "requires": {
...@@ -567,7 +479,7 @@ ...@@ -567,7 +479,7 @@
}, },
"bindings": { "bindings": {
"version": "1.3.1", "version": "1.3.1",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.1.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/bindings/-/bindings-1.3.1.tgz",
"integrity": "sha512-i47mqjF9UbjxJhxGf+pZ6kSxrnI3wBLlnGI2ArWJ4r0VrvDS7ZYXkprq/pLaBWYq4GM0r4zdHY+NNRqEMU7uew==", "integrity": "sha512-i47mqjF9UbjxJhxGf+pZ6kSxrnI3wBLlnGI2ArWJ4r0VrvDS7ZYXkprq/pLaBWYq4GM0r4zdHY+NNRqEMU7uew==",
"dev": true "dev": true
}, },
...@@ -1473,14 +1385,14 @@ ...@@ -1473,14 +1385,14 @@
} }
}, },
"es6-promise": { "es6-promise": {
"version": "4.2.5", "version": "4.2.6",
"resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/es6-promise/-/es6-promise-4.2.6.tgz",
"integrity": "sha512-n6wvpdE43VFtJq+lUDYDBFUwV8TZbuGXLV4D6wKafg13ldznKsyEvatubnmUe31zcvelSzOHF+XbaT+Bl9ObDg==", "integrity": "sha512-aRVgGdnmW2OiySVPUC9e6m+plolMAJKjZnQlCwNSuK5yQ0JN61DZSO1X1Ufd1foqWRAlig0rhduTCHe7sVtK5Q==",
"dev": true "dev": true
}, },
"es6-promisify": { "es6-promisify": {
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/es6-promisify/-/es6-promisify-5.0.0.tgz",
"integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=",
"dev": true, "dev": true,
"requires": { "requires": {
...@@ -2683,7 +2595,7 @@ ...@@ -2683,7 +2595,7 @@
}, },
"https-proxy-agent": { "https-proxy-agent": {
"version": "2.2.1", "version": "2.2.1",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz",
"integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", "integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==",
"dev": true, "dev": true,
"requires": { "requires": {
...@@ -2693,7 +2605,7 @@ ...@@ -2693,7 +2605,7 @@
"dependencies": { "dependencies": {
"debug": { "debug": {
"version": "3.2.6", "version": "3.2.6",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/debug/-/debug-3.2.6.tgz",
"integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==",
"dev": true, "dev": true,
"requires": { "requires": {
...@@ -2702,7 +2614,7 @@ ...@@ -2702,7 +2614,7 @@
}, },
"ms": { "ms": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/ms/-/ms-2.1.1.tgz",
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"dev": true "dev": true
} }
...@@ -3429,12 +3341,6 @@ ...@@ -3429,12 +3341,6 @@
} }
} }
}, },
"long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"dev": true
},
"loud-rejection": { "loud-rejection": {
"version": "1.6.0", "version": "1.6.0",
"resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz",
...@@ -3735,7 +3641,7 @@ ...@@ -3735,7 +3641,7 @@
}, },
"node-fetch": { "node-fetch": {
"version": "2.3.0", "version": "2.3.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.3.0.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/node-fetch/-/node-fetch-2.3.0.tgz",
"integrity": "sha512-MOd8pV3fxENbryESLgVIeaGKrdl+uaYhCSSVkjeOb/31/njTpcis5aWfdqgNlHIrKOLRbMnfPINPOML2CIFeXA==", "integrity": "sha512-MOd8pV3fxENbryESLgVIeaGKrdl+uaYhCSSVkjeOb/31/njTpcis5aWfdqgNlHIrKOLRbMnfPINPOML2CIFeXA==",
"dev": true "dev": true
}, },
...@@ -4241,39 +4147,10 @@ ...@@ -4241,39 +4147,10 @@
}, },
"progress": { "progress": {
"version": "2.0.3", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", "resolved": "https://tclar30.es.telefonica/artifacts/repo/npm/progress/-/progress-2.0.3.tgz",
"integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==",
"dev": true "dev": true
}, },
"protobufjs": {
"version": "6.8.8",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz",
"integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==",
"dev": true,
"requires": {
"@protobufjs/aspromise": "^1.1.2",
"@protobufjs/base64": "^1.1.2",
"@protobufjs/codegen": "^2.0.4",
"@protobufjs/eventemitter": "^1.1.0",
"@protobufjs/fetch": "^1.1.0",
"@protobufjs/float": "^1.0.2",
"@protobufjs/inquire": "^1.1.0",
"@protobufjs/path": "^1.1.2",
"@protobufjs/pool": "^1.1.0",
"@protobufjs/utf8": "^1.1.0",
"@types/long": "^4.0.0",
"@types/node": "^10.1.0",
"long": "^4.0.0"
},
"dependencies": {
"@types/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz",
"integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q==",
"dev": true
}
}
},
"pseudomap": { "pseudomap": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz",
...@@ -5363,11 +5240,11 @@ ...@@ -5363,11 +5240,11 @@
} }
}, },
"tfjs-image-recognition-base": { "tfjs-image-recognition-base": {
"version": "0.4.1", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/tfjs-image-recognition-base/-/tfjs-image-recognition-base-0.4.1.tgz", "resolved": "https://registry.npmjs.org/tfjs-image-recognition-base/-/tfjs-image-recognition-base-0.5.0.tgz",
"integrity": "sha512-oJOLByZa0OgPaLB17ZlL2qO8dIQwgV1ma4/iEFbzVbF8yd0qOomIuEGvyPzXDfD0dmyPwzj8Ysa3Jq+O375Fpw==", "integrity": "sha512-u/BPhsoieBm8bE3QXd5WdKmo/CKXoeab0x4x11yyg7O24esUM9RqdX9dI5Byw8jcjjrb705izSSznqXr61GTHg==",
"requires": { "requires": {
"@tensorflow/tfjs-core": "0.14.2", "@tensorflow/tfjs-core": "1.0.1",
"tslib": "^1.9.3" "tslib": "^1.9.3"
} }
}, },
......
...@@ -35,12 +35,12 @@ ...@@ -35,12 +35,12 @@
"author": "justadudewhohacks", "author": "justadudewhohacks",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@tensorflow/tfjs-core": "0.14.2", "@tensorflow/tfjs-core": "1.0.1",
"tfjs-image-recognition-base": "^0.4.1", "tfjs-image-recognition-base": "^0.5.0",
"tslib": "^1.9.3" "tslib": "^1.9.3"
}, },
"devDependencies": { "devDependencies": {
"@tensorflow/tfjs-node": "^0.2.3", "@tensorflow/tfjs-node": "^1.0.1",
"@types/jasmine": "^3.3.8", "@types/jasmine": "^3.3.8",
"@types/node": "^10.12.18", "@types/node": "^10.12.18",
"canvas": "2.0.1", "canvas": "2.0.1",
......
...@@ -45,7 +45,7 @@ export class Mtcnn extends NeuralNetwork<NetParams> { ...@@ -45,7 +45,7 @@ export class Mtcnn extends NeuralNetwork<NetParams> {
const imgTensor = tf.tidy(() => const imgTensor = tf.tidy(() =>
bgrToRgbTensor( bgrToRgbTensor(
tf.expandDims(tf.fromPixels(inputCanvas)).toFloat() as tf.Tensor4D tf.expandDims(tf.browser.fromPixels(inputCanvas)).toFloat() as tf.Tensor4D
) )
) )
......
...@@ -28,9 +28,10 @@ function extractBoundingBoxes( ...@@ -28,9 +28,10 @@ function extractBoundingBoxes(
// TODO: fix this!, maybe better to use tf.gather here // TODO: fix this!, maybe better to use tf.gather here
const indices: Point[] = [] const indices: Point[] = []
const scoresData = scoresTensor.arraySync();
for (let y = 0; y < scoresTensor.shape[0]; y++) { for (let y = 0; y < scoresTensor.shape[0]; y++) {
for (let x = 0; x < scoresTensor.shape[1]; x++) { for (let x = 0; x < scoresTensor.shape[1]; x++) {
if (scoresTensor.get(y, x) >= scoreThreshold) { if (scoresData[y][x] >= scoreThreshold) {
indices.push(new Point(x, y)) indices.push(new Point(x, y))
} }
} }
...@@ -44,13 +45,14 @@ function extractBoundingBoxes( ...@@ -44,13 +45,14 @@ function extractBoundingBoxes(
Math.round((idx.x * CELL_STRIDE + CELL_SIZE) / scale) Math.round((idx.x * CELL_STRIDE + CELL_SIZE) / scale)
) )
const score = scoresTensor.get(idx.y, idx.x) const score = scoresData[idx.y][idx.x]
const regionsData = regionsTensor.arraySync()
const region = new MtcnnBox( const region = new MtcnnBox(
regionsTensor.get(idx.y, idx.x, 0), regionsData[idx.y][idx.x][0],
regionsTensor.get(idx.y, idx.x, 1), regionsData[idx.y][idx.x][1],
regionsTensor.get(idx.y, idx.x, 2), regionsData[idx.y][idx.x][2],
regionsTensor.get(idx.y, idx.x, 3) regionsData[idx.y][idx.x][3]
) )
return { return {
......
...@@ -54,13 +54,15 @@ export async function stage2( ...@@ -54,13 +54,15 @@ export async function stage2(
) )
stats.stage2_nms = Date.now() - ts stats.stage2_nms = Date.now() - ts
const regions = indicesNms.map(idx => const regions = indicesNms.map(idx =>{
new MtcnnBox( const regionsData = rnetOuts[indices[idx]].regions.arraySync()
rnetOuts[indices[idx]].regions.get(0, 0), return new MtcnnBox(
rnetOuts[indices[idx]].regions.get(0, 1), regionsData[0][0],
rnetOuts[indices[idx]].regions.get(0, 2), regionsData[0][1],
rnetOuts[indices[idx]].regions.get(0, 3) regionsData[0][2],
regionsData[0][3]
) )
}
) )
finalScores = indicesNms.map(idx => filteredScores[idx]) finalScores = indicesNms.map(idx => filteredScores[idx])
......
...@@ -39,12 +39,14 @@ export async function stage3( ...@@ -39,12 +39,14 @@ export async function stage3(
.filter(c => c.score > scoreThreshold) .filter(c => c.score > scoreThreshold)
.map(({ idx }) => idx) .map(({ idx }) => idx)
const filteredRegions = indices.map(idx => new MtcnnBox( const filteredRegions = indices.map(idx => {
onetOuts[idx].regions.get(0, 0), const regionsData = onetOuts[idx].regions.arraySync();
onetOuts[idx].regions.get(0, 1), return new MtcnnBox(
onetOuts[idx].regions.get(0, 2), regionsData[0][0],
onetOuts[idx].regions.get(0, 3) regionsData[0][1],
)) regionsData[0][2],
regionsData[0][3]
)})
const filteredBoxes = indices const filteredBoxes = indices
.map((idx, i) => inputBoxes[idx].calibrate(filteredRegions[i])) .map((idx, i) => inputBoxes[idx].calibrate(filteredRegions[i]))
const filteredScores = indices.map(idx => scores[idx]) const filteredScores = indices.map(idx => scores[idx])
...@@ -67,11 +69,13 @@ export async function stage3( ...@@ -67,11 +69,13 @@ export async function stage3(
finalBoxes = indicesNms.map(idx => filteredBoxes[idx]) finalBoxes = indicesNms.map(idx => filteredBoxes[idx])
finalScores = indicesNms.map(idx => filteredScores[idx]) finalScores = indicesNms.map(idx => filteredScores[idx])
points = indicesNms.map((idx, i) => points = indicesNms.map((idx, i) =>
Array(5).fill(0).map((_, ptIdx) => Array(5).fill(0).map((_, ptIdx) =>{
new Point( const pointsData = onetOuts[idx].points.arraySync()
((onetOuts[idx].points.get(0, ptIdx) * (finalBoxes[i].width + 1)) + finalBoxes[i].left) , return new Point(
((onetOuts[idx].points.get(0, ptIdx + 5) * (finalBoxes[i].height + 1)) + finalBoxes[i].top) ((pointsData[0][ptIdx] * (finalBoxes[i].width + 1)) + finalBoxes[i].left) ,
((pointsData[0][ptIdx+5] * (finalBoxes[i].height + 1)) + finalBoxes[i].top)
) )
}
) )
) )
} }
......
...@@ -85,15 +85,16 @@ export class SsdMobilenetv1 extends NeuralNetwork<NetParams> { ...@@ -85,15 +85,16 @@ export class SsdMobilenetv1 extends NeuralNetwork<NetParams> {
const padX = inputSize / reshapedDims.width const padX = inputSize / reshapedDims.width
const padY = inputSize / reshapedDims.height const padY = inputSize / reshapedDims.height
const boxesData = boxes.arraySync()
const results = indices const results = indices
.map(idx => { .map(idx => {
const [top, bottom] = [ const [top, bottom] = [
Math.max(0, boxes.get(idx, 0)), Math.max(0, boxesData[idx][0]),
Math.min(1.0, boxes.get(idx, 2)) Math.min(1.0, boxesData[idx][2])
].map(val => val * padY) ].map(val => val * padY)
const [left, right] = [ const [left, right] = [
Math.max(0, boxes.get(idx, 1)), Math.max(0, boxesData[idx][1]),
Math.min(1.0, boxes.get(idx, 3)) Math.min(1.0, boxesData[idx][3])
].map(val => val * padX) ].map(val => val * padX)
return new FaceDetection( return new FaceDetection(
scoresData[idx], scoresData[idx],
......
...@@ -13,13 +13,13 @@ function depthwiseConvLayer( ...@@ -13,13 +13,13 @@ function depthwiseConvLayer(
return tf.tidy(() => { return tf.tidy(() => {
let out = tf.depthwiseConv2d(x, params.filters, strides, 'same') let out = tf.depthwiseConv2d(x, params.filters, strides, 'same')
out = tf.batchNormalization<tf.Rank.R4>( out = tf.batchNorm<tf.Rank.R4>(
out, out,
params.batch_norm_mean, params.batch_norm_mean,
params.batch_norm_variance, params.batch_norm_variance,
epsilon, params.batch_norm_offset,
params.batch_norm_scale, params.batch_norm_scale,
params.batch_norm_offset epsilon
) )
return tf.clipByValue(out, 0, 6) return tf.clipByValue(out, 0, 6)
......
...@@ -49,14 +49,15 @@ export function nonMaxSuppression( ...@@ -49,14 +49,15 @@ export function nonMaxSuppression(
} }
function IOU(boxes: tf.Tensor2D, i: number, j: number) { function IOU(boxes: tf.Tensor2D, i: number, j: number) {
const yminI = Math.min(boxes.get(i, 0), boxes.get(i, 2)) const boxesData = boxes.arraySync()
const xminI = Math.min(boxes.get(i, 1), boxes.get(i, 3)) const yminI = Math.min(boxesData[i][0], boxesData[i][2])
const ymaxI = Math.max(boxes.get(i, 0), boxes.get(i, 2)) const xminI = Math.min(boxesData[i][1], boxesData[i][3])
const xmaxI = Math.max(boxes.get(i, 1), boxes.get(i, 3)) const ymaxI = Math.max(boxesData[i][0], boxesData[i][2])
const yminJ = Math.min(boxes.get(j, 0), boxes.get(j, 2)) const xmaxI = Math.max(boxesData[i][1], boxesData[i][3])
const xminJ = Math.min(boxes.get(j, 1), boxes.get(j, 3)) const yminJ = Math.min(boxesData[j][0], boxesData[j][2])
const ymaxJ = Math.max(boxes.get(j, 0), boxes.get(j, 2)) const xminJ = Math.min(boxesData[j][1], boxesData[j][3])
const xmaxJ = Math.max(boxes.get(j, 1), boxes.get(j, 3)) const ymaxJ = Math.max(boxesData[j][0], boxesData[j][2])
const xmaxJ = Math.max(boxesData[j][1], boxesData[j][3])
const areaI = (ymaxI - yminI) * (xmaxI - xminI) const areaI = (ymaxI - yminI) * (xmaxI - xminI)
const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ) const areaJ = (ymaxJ - yminJ) * (xmaxJ - xminJ)
if (areaI <= 0 || areaJ <= 0) { if (areaI <= 0 || areaJ <= 0) {
......
...@@ -6,7 +6,7 @@ describe('extractFaceTensors', () => { ...@@ -6,7 +6,7 @@ describe('extractFaceTensors', () => {
let imgTensor: tf.Tensor3D let imgTensor: tf.Tensor3D
beforeAll(async () => { beforeAll(async () => {
imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/face1.png'))) imgTensor = tf.browser.fromPixels(createCanvasFromMedia(await loadImage('test/images/face1.png')))
}) })
describe('extracts tensors', () => { describe('extracts tensors', () => {
......
...@@ -57,7 +57,7 @@ describe('faceExpressionNet', () => { ...@@ -57,7 +57,7 @@ describe('faceExpressionNet', () => {
}) })
it('computes face landmarks for batch of tf.Tensor3D', async () => { it('computes face landmarks for batch of tf.Tensor3D', async () => {
const inputs = [imgElAngry, imgElSurprised].map(el => tf.fromPixels(createCanvasFromMedia(el))) const inputs = [imgElAngry, imgElSurprised].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
const results = await faceExpressionNet.predictExpressions(inputs) as FaceExpressionPrediction[][] const results = await faceExpressionNet.predictExpressions(inputs) as FaceExpressionPrediction[][]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
...@@ -80,7 +80,7 @@ describe('faceExpressionNet', () => { ...@@ -80,7 +80,7 @@ describe('faceExpressionNet', () => {
}) })
it('computes face landmarks for batch of mixed inputs', async () => { it('computes face landmarks for batch of mixed inputs', async () => {
const inputs = [imgElAngry, tf.fromPixels(createCanvasFromMedia(imgElSurprised))] const inputs = [imgElAngry, tf.browser.fromPixels(createCanvasFromMedia(imgElSurprised))]
const results = await faceExpressionNet.predictExpressions(inputs) as FaceExpressionPrediction[][] const results = await faceExpressionNet.predictExpressions(inputs) as FaceExpressionPrediction[][]
expect(Array.isArray(results)).toBe(true) expect(Array.isArray(results)).toBe(true)
...@@ -125,7 +125,7 @@ describe('faceExpressionNet', () => { ...@@ -125,7 +125,7 @@ describe('faceExpressionNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgElAngry)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgElAngry))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensor)) const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensor))
...@@ -136,7 +136,7 @@ describe('faceExpressionNet', () => { ...@@ -136,7 +136,7 @@ describe('faceExpressionNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgElAngry, imgElAngry, imgElAngry].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgElAngry, imgElAngry, imgElAngry].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensors)) const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensors))
...@@ -147,7 +147,7 @@ describe('faceExpressionNet', () => { ...@@ -147,7 +147,7 @@ describe('faceExpressionNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgElAngry)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgElAngry)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensor)) const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensor))
...@@ -159,7 +159,7 @@ describe('faceExpressionNet', () => { ...@@ -159,7 +159,7 @@ describe('faceExpressionNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgElAngry, imgElAngry, imgElAngry] const tensors = [imgElAngry, imgElAngry, imgElAngry]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensors)) const outTensor = await faceExpressionNet.forwardInput(await toNetInput(tensors))
...@@ -186,7 +186,7 @@ describe('faceExpressionNet', () => { ...@@ -186,7 +186,7 @@ describe('faceExpressionNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgElAngry)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgElAngry))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceExpressionNet.predictExpressions(tensor) await faceExpressionNet.predictExpressions(tensor)
...@@ -196,7 +196,7 @@ describe('faceExpressionNet', () => { ...@@ -196,7 +196,7 @@ describe('faceExpressionNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgElAngry, imgElAngry, imgElAngry].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgElAngry, imgElAngry, imgElAngry].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -207,7 +207,7 @@ describe('faceExpressionNet', () => { ...@@ -207,7 +207,7 @@ describe('faceExpressionNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgElAngry)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgElAngry)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceExpressionNet.predictExpressions(tensor) await faceExpressionNet.predictExpressions(tensor)
...@@ -218,7 +218,7 @@ describe('faceExpressionNet', () => { ...@@ -218,7 +218,7 @@ describe('faceExpressionNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgElAngry, imgElAngry, imgElAngry] const tensors = [imgElAngry, imgElAngry, imgElAngry]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceExpressionNet.predictExpressions(tensors) await faceExpressionNet.predictExpressions(tensors)
......
...@@ -92,7 +92,7 @@ describe('faceLandmark68Net', () => { ...@@ -92,7 +92,7 @@ describe('faceLandmark68Net', () => {
}) })
it('computes face landmarks for batch of tf.Tensor3D', async () => { it('computes face landmarks for batch of tf.Tensor3D', async () => {
const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(createCanvasFromMedia(el))) const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -117,7 +117,7 @@ describe('faceLandmark68Net', () => { ...@@ -117,7 +117,7 @@ describe('faceLandmark68Net', () => {
}) })
it('computes face landmarks for batch of mixed inputs', async () => { it('computes face landmarks for batch of mixed inputs', async () => {
const inputs = [imgEl1, tf.fromPixels(createCanvasFromMedia(imgEl2)), tf.fromPixels(createCanvasFromMedia(imgElRect))] const inputs = [imgEl1, tf.browser.fromPixels(createCanvasFromMedia(imgEl2)), tf.browser.fromPixels(createCanvasFromMedia(imgElRect))]
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -164,7 +164,7 @@ describe('faceLandmark68Net', () => { ...@@ -164,7 +164,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput([tensor]) const netInput = new NetInput([tensor])
...@@ -176,7 +176,7 @@ describe('faceLandmark68Net', () => { ...@@ -176,7 +176,7 @@ describe('faceLandmark68Net', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput(tensors) const netInput = new NetInput(tensors)
...@@ -188,7 +188,7 @@ describe('faceLandmark68Net', () => { ...@@ -188,7 +188,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensor)) const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensor))
...@@ -200,7 +200,7 @@ describe('faceLandmark68Net', () => { ...@@ -200,7 +200,7 @@ describe('faceLandmark68Net', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensors)) const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensors))
...@@ -227,7 +227,7 @@ describe('faceLandmark68Net', () => { ...@@ -227,7 +227,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensor) await faceLandmark68Net.detectLandmarks(tensor)
...@@ -237,7 +237,7 @@ describe('faceLandmark68Net', () => { ...@@ -237,7 +237,7 @@ describe('faceLandmark68Net', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -248,7 +248,7 @@ describe('faceLandmark68Net', () => { ...@@ -248,7 +248,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensor) await faceLandmark68Net.detectLandmarks(tensor)
...@@ -259,7 +259,7 @@ describe('faceLandmark68Net', () => { ...@@ -259,7 +259,7 @@ describe('faceLandmark68Net', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensors) await faceLandmark68Net.detectLandmarks(tensors)
......
...@@ -91,7 +91,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -91,7 +91,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('computes face landmarks for batch of tf.Tensor3D', async () => { it('computes face landmarks for batch of tf.Tensor3D', async () => {
const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(createCanvasFromMedia(el))) const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -116,7 +116,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -116,7 +116,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('computes face landmarks for batch of mixed inputs', async () => { it('computes face landmarks for batch of mixed inputs', async () => {
const inputs = [imgEl1, tf.fromPixels(createCanvasFromMedia(imgEl2)), tf.fromPixels(createCanvasFromMedia(imgElRect))] const inputs = [imgEl1, tf.browser.fromPixels(createCanvasFromMedia(imgEl2)), tf.browser.fromPixels(createCanvasFromMedia(imgElRect))]
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -164,7 +164,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -164,7 +164,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput([tensor]) const netInput = new NetInput([tensor])
...@@ -176,7 +176,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -176,7 +176,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput(tensors) const netInput = new NetInput(tensors)
...@@ -188,7 +188,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -188,7 +188,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensor)) const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensor))
...@@ -200,7 +200,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -200,7 +200,7 @@ describe('faceLandmark68TinyNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensors)) const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensors))
...@@ -227,7 +227,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -227,7 +227,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1)) const tensor = tf.browser.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensor) await faceLandmark68TinyNet.detectLandmarks(tensor)
...@@ -237,7 +237,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -237,7 +237,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el))) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -248,7 +248,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -248,7 +248,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensor) await faceLandmark68TinyNet.detectLandmarks(tensor)
...@@ -259,7 +259,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -259,7 +259,7 @@ describe('faceLandmark68TinyNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensors) await faceLandmark68TinyNet.detectLandmarks(tensors)
......
...@@ -60,7 +60,7 @@ describe('faceRecognitionNet', () => { ...@@ -60,7 +60,7 @@ describe('faceRecognitionNet', () => {
}) })
it('computes face descriptors for batch of tf.Tensor3D', async () => { it('computes face descriptors for batch of tf.Tensor3D', async () => {
const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(el)) const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.browser.fromPixels(el))
const faceDescriptors = [ const faceDescriptors = [
faceDescriptor1, faceDescriptor1,
...@@ -77,7 +77,7 @@ describe('faceRecognitionNet', () => { ...@@ -77,7 +77,7 @@ describe('faceRecognitionNet', () => {
}) })
it('computes face descriptors for batch of mixed inputs', async () => { it('computes face descriptors for batch of mixed inputs', async () => {
const inputs = [imgEl1, tf.fromPixels(imgEl2), tf.fromPixels(imgElRect)] const inputs = [imgEl1, tf.browser.fromPixels(imgEl2), tf.browser.fromPixels(imgElRect)]
const faceDescriptors = [ const faceDescriptors = [
faceDescriptor1, faceDescriptor1,
...@@ -116,7 +116,7 @@ describe('faceRecognitionNet', () => { ...@@ -116,7 +116,7 @@ describe('faceRecognitionNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.browser.fromPixels(imgEl1)
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput([tensor]) const netInput = new NetInput([tensor])
...@@ -128,7 +128,7 @@ describe('faceRecognitionNet', () => { ...@@ -128,7 +128,7 @@ describe('faceRecognitionNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(el))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput(tensors) const netInput = new NetInput(tensors)
...@@ -140,7 +140,7 @@ describe('faceRecognitionNet', () => { ...@@ -140,7 +140,7 @@ describe('faceRecognitionNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(imgEl1).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceRecognitionNet.forwardInput(await toNetInput(tensor)) const outTensor = await faceRecognitionNet.forwardInput(await toNetInput(tensor))
...@@ -152,7 +152,7 @@ describe('faceRecognitionNet', () => { ...@@ -152,7 +152,7 @@ describe('faceRecognitionNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(el).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceRecognitionNet.forwardInput(await toNetInput(tensors)) const outTensor = await faceRecognitionNet.forwardInput(await toNetInput(tensors))
...@@ -179,7 +179,7 @@ describe('faceRecognitionNet', () => { ...@@ -179,7 +179,7 @@ describe('faceRecognitionNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.browser.fromPixels(imgEl1)
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceRecognitionNet.computeFaceDescriptor(tensor) await faceRecognitionNet.computeFaceDescriptor(tensor)
...@@ -189,7 +189,7 @@ describe('faceRecognitionNet', () => { ...@@ -189,7 +189,7 @@ describe('faceRecognitionNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.browser.fromPixels(el))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -200,7 +200,7 @@ describe('faceRecognitionNet', () => { ...@@ -200,7 +200,7 @@ describe('faceRecognitionNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.browser.fromPixels(imgEl1).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceRecognitionNet.computeFaceDescriptor(tensor) await faceRecognitionNet.computeFaceDescriptor(tensor)
...@@ -211,7 +211,7 @@ describe('faceRecognitionNet', () => { ...@@ -211,7 +211,7 @@ describe('faceRecognitionNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.browser.fromPixels(el).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceRecognitionNet.computeFaceDescriptor(tensors) await faceRecognitionNet.computeFaceDescriptor(tensors)
......
...@@ -15,7 +15,7 @@ describe('ssdMobilenetv1 - node', () => { ...@@ -15,7 +15,7 @@ describe('ssdMobilenetv1 - node', () => {
const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61] const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61]
beforeAll(async () => { beforeAll(async () => {
imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg'))) imgTensor = tf.browser.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg')))
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedSsdBoxes) expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedSsdBoxes)
}) })
......
...@@ -15,7 +15,7 @@ describe('tinyFaceDetector - node', () => { ...@@ -15,7 +15,7 @@ describe('tinyFaceDetector - node', () => {
const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84] const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84]
beforeAll(async () => { beforeAll(async () => {
imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg'))) imgTensor = tf.browser.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg')))
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedTinyFaceDetectorBoxes) expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedTinyFaceDetectorBoxes)
}) })
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment