Unverified Commit 19e27a14 by justadudewhohacks Committed by GitHub

Merge pull request #132 from justadudewhohacks/nodejs

nodejs support
parents 50576405 12981b61
...@@ -6,3 +6,4 @@ proto ...@@ -6,3 +6,4 @@ proto
weights_uncompressed weights_uncompressed
weights_unused weights_unused
docs docs
out
\ No newline at end of file
...@@ -4,16 +4,21 @@ node_js: ...@@ -4,16 +4,21 @@ node_js:
- "node" - "node"
- "10" - "10"
- "8" - "8"
- "6" # node 6 is not compatible with tfjs-node
# - "6"
env: env:
global:
- BACKEND_CPU=true EXCLUDE_UNCOMPRESSED=true - BACKEND_CPU=true EXCLUDE_UNCOMPRESSED=true
matrix:
- ENV=browser
- ENV=node
addons: addons:
chrome: stable chrome: stable
install: npm install install: npm install
before_install: before_install:
- export DISPLAY=:99.0 - export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start - sh -e /etc/init.d/xvfb start
- sleep 3 # give xvfb some time to start
script: script:
- npm run test-travis - if [ $ENV == 'browser' ]; then npm run test-browser; fi
- if [ $ENV == 'node' ]; then npm run test-node; fi
- npm run build - npm run build
\ No newline at end of file
...@@ -16,6 +16,9 @@ Table of Contents: ...@@ -16,6 +16,9 @@ Table of Contents:
* **[Face Detection Models](#models-face-detection)** * **[Face Detection Models](#models-face-detection)**
* **[68 Point Face Landmark Detection Models](#models-face-landmark-detection)** * **[68 Point Face Landmark Detection Models](#models-face-landmark-detection)**
* **[Face Recognition Model](#models-face-recognition)** * **[Face Recognition Model](#models-face-recognition)**
* **[Getting Started](#getting-started)**
* **[face-api.js for the Browser](#getting-started-browser)**
* **[face-api.js for Nodejs](#getting-started-nodejs)**
* **[Usage](#usage)** * **[Usage](#usage)**
* **[Loading the Models](#usage-loading-models)** * **[Loading the Models](#usage-loading-models)**
* **[High Level API](#usage-high-level-api)** * **[High Level API](#usage-high-level-api)**
...@@ -75,15 +78,42 @@ Check out my face-api.js tutorials: ...@@ -75,15 +78,42 @@ Check out my face-api.js tutorials:
## Running the Examples ## Running the Examples
Clone the repository:
``` bash ``` bash
git clone https://github.com/justadudewhohacks/face-api.js.git git clone https://github.com/justadudewhohacks/face-api.js.git
cd face-api.js/examples ```
### Running the Browser Examples
``` bash
cd face-api.js/examples/examples-browser
npm i npm i
npm start npm start
``` ```
Browse to http://localhost:3000/. Browse to http://localhost:3000/.
### Running the Nodejs Examples
``` bash
cd face-api.js/examples/examples-nodejs
npm i
```
Now run one of the examples using ts-node:
``` bash
ts-node faceDetection.ts
```
Or simply compile and run them with node:
``` bash
tsc faceDetection.ts
node faceDetection.js
```
<a name="models"></a> <a name="models"></a>
# Available Models # Available Models
...@@ -130,6 +160,55 @@ The neural net is equivalent to the **FaceRecognizerNet** used in [face-recognit ...@@ -130,6 +160,55 @@ The neural net is equivalent to the **FaceRecognizerNet** used in [face-recognit
The size of the quantized model is roughly 6.2 MB (**face_recognition_model**). The size of the quantized model is roughly 6.2 MB (**face_recognition_model**).
<a name="getting-started"></a>
# Getting Started
<a name="getting-started-browser"></a>
## face-api.js for the Browser
Simply include the latest script from [dist/face-api.js](https://github.com/justadudewhohacks/face-api.js/tree/master/dist).
Or install it via npm:
``` bash
npm i face-api.js
```
<a name="getting-started-nodejs"></a>
## face-api.js for Nodejs
We can use the equivalent API in a nodejs environment by polyfilling some browser specifics, such as HTMLImageElement, HTMLCanvasElement and ImageData. The easiest way to do so is by installing the node-canvas package.
Alternatively you can simply construct your own tensors from image data and pass tensors as inputs to the API.
Furthermore you want to install @tensorflow/tfjs-node (not required, but highly recommended), which speeds things up drastically by compiling and binding to the native Tensorflow C++ library:
``` bash
npm i face-api.js canvas @tensorflow/tfjs-node
```
Now we simply monkey patch the environment to use the polyfills:
``` javascript
// import nodejs bindings to native tensorflow,
// not required, but will speed up things drastically (python required)
import '@tensorflow/tfjs-node';
// implements nodejs wrappers for HTMLCanvasElement, HTMLImageElement, ImageData
import * as canvas from 'canvas';
import * as faceapi from 'face-api.js';
// patch nodejs environment, we need to provide an implementation of
// HTMLCanvasElement and HTMLImageElement, additionally an implementation
// of ImageData is required, in case you want to use the MTCNN
const { Canvas, Image, ImageData } = canvas
faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
```
# Usage # Usage
<a name="usage-loading-models"></a> <a name="usage-loading-models"></a>
...@@ -150,14 +229,38 @@ await faceapi.loadSsdMobilenetv1Model('/models') ...@@ -150,14 +229,38 @@ await faceapi.loadSsdMobilenetv1Model('/models')
// await faceapi.loadFaceRecognitionModel('/models') // await faceapi.loadFaceRecognitionModel('/models')
``` ```
Alternatively, you can also create instance of the neural nets: All global neural network instances are exported via faceapi.nets:
``` javascript
console.log(faceapi.nets)
```
The following is equivalent to `await faceapi.loadSsdMobilenetv1Model('/models')`:
``` javascript
await faceapi.nets.ssdMobilenetv1.loadFromUri('/models')
```
In a nodejs environment you can furthermore load the models directly from disk:
``` javascript
await faceapi.nets.ssdMobilenetv1.loadFromDisk('./models')
```
You can also load the model from a tf.NamedTensorMap:
``` javascript
await faceapi.nets.ssdMobilenetv1.loadFromWeightMap(weightMap)
```
Alternatively, you can also create own instances of the neural nets:
``` javascript ``` javascript
const net = new faceapi.SsdMobilenetv1() const net = new faceapi.SsdMobilenetv1()
await net.load('/models') await net.load('/models')
``` ```
Using instances, you can also load the weights as a Float32Array (in case you want to use the uncompressed models): You can also load the weights as a Float32Array (in case you want to use the uncompressed models):
``` javascript ``` javascript
// using fetch // using fetch
...@@ -205,7 +308,7 @@ By default **detectAllFaces** and **detectSingleFace** utilize the SSD Mobilenet ...@@ -205,7 +308,7 @@ By default **detectAllFaces** and **detectSingleFace** utilize the SSD Mobilenet
``` javascript ``` javascript
const detections1 = await faceapi.detectAllFaces(input, new faceapi.SsdMobilenetv1Options()) const detections1 = await faceapi.detectAllFaces(input, new faceapi.SsdMobilenetv1Options())
const detections2 = await faceapi.detectAllFaces(input, new faceapi.inyFaceDetectorOptions()) const detections2 = await faceapi.detectAllFaces(input, new faceapi.TinyFaceDetectorOptions())
const detections3 = await faceapi.detectAllFaces(input, new faceapi.MtcnnOptions()) const detections3 = await faceapi.detectAllFaces(input, new faceapi.MtcnnOptions())
``` ```
...@@ -513,12 +616,6 @@ const landmarks2 = await faceapi.detectFaceLandmarksTiny(faceImage) ...@@ -513,12 +616,6 @@ const landmarks2 = await faceapi.detectFaceLandmarksTiny(faceImage)
const descriptor = await faceapi.computeFaceDescriptor(alignedFaceImage) const descriptor = await faceapi.computeFaceDescriptor(alignedFaceImage)
``` ```
All global neural network instances are exported via faceapi.nets:
``` javascript
console.log(faceapi.nets)
```
### Extracting a Canvas for an Image Region ### Extracting a Canvas for an Image Region
``` javascript ``` javascript
......
const classes = ['amy', 'bernadette', 'howard', 'leonard', 'penny', 'raj', 'sheldon', 'stuart'] const classes = ['amy', 'bernadette', 'howard', 'leonard', 'penny', 'raj', 'sheldon', 'stuart']
function getFaceImageUri(className, idx) { function getFaceImageUri(className, idx) {
return `images/${className}/${className}${idx}.png` return `${className}/${className}${idx}.png`
} }
function renderFaceImageSelectList(selectListId, onChange, initialValue) { function renderFaceImageSelectList(selectListId, onChange, initialValue) {
......
function getImageUri(imageName) {
return `images/${imageName}`
}
async function requestExternalImage(imageUrl) { async function requestExternalImage(imageUrl) {
const res = await fetch('fetch_external_image', { const res = await fetch('fetch_external_image', {
method: 'post', method: 'post',
......
...@@ -17,7 +17,7 @@ function renderImageSelectList(selectListId, onChange, initialValue) { ...@@ -17,7 +17,7 @@ function renderImageSelectList(selectListId, onChange, initialValue) {
renderOption( renderOption(
select, select,
imageName, imageName,
getImageUri(imageName) imageName
) )
) )
} }
...@@ -25,7 +25,7 @@ function renderImageSelectList(selectListId, onChange, initialValue) { ...@@ -25,7 +25,7 @@ function renderImageSelectList(selectListId, onChange, initialValue) {
renderSelectList( renderSelectList(
selectListId, selectListId,
onChange, onChange,
getImageUri(initialValue), initialValue,
renderChildren renderChildren
) )
} }
......
...@@ -10,10 +10,10 @@ app.use(express.urlencoded({ extended: true })) ...@@ -10,10 +10,10 @@ app.use(express.urlencoded({ extended: true }))
const viewsDir = path.join(__dirname, 'views') const viewsDir = path.join(__dirname, 'views')
app.use(express.static(viewsDir)) app.use(express.static(viewsDir))
app.use(express.static(path.join(__dirname, './public'))) app.use(express.static(path.join(__dirname, './public')))
app.use(express.static(path.join(__dirname, '../weights'))) app.use(express.static(path.join(__dirname, '../images')))
app.use(express.static(path.join(__dirname, '../weights_uncompressed'))) app.use(express.static(path.join(__dirname, '../media')))
app.use(express.static(path.join(__dirname, '../dist'))) app.use(express.static(path.join(__dirname, '../../weights')))
app.use(express.static(path.join(__dirname, './node_modules/axios/dist'))) app.use(express.static(path.join(__dirname, '../../dist')))
app.get('/', (req, res) => res.redirect('/face_and_landmark_detection')) app.get('/', (req, res) => res.redirect('/face_and_landmark_detection'))
app.get('/face_and_landmark_detection', (req, res) => res.sendFile(path.join(viewsDir, 'faceAndLandmarkDetection.html'))) app.get('/face_and_landmark_detection', (req, res) => res.sendFile(path.join(viewsDir, 'faceAndLandmarkDetection.html')))
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
<div class="indeterminate"></div> <div class="indeterminate"></div>
</div> </div>
<div style="position: relative" class="margin"> <div style="position: relative" class="margin">
<video src="media/bbt.mp4" id="inputVideo" autoplay muted loop></video> <video src="bbt.mp4" id="inputVideo" autoplay muted loop></video>
<canvas id="overlay" /> <canvas id="overlay" />
</div> </div>
......
// import nodejs bindings to native tensorflow,
// not required, but will speed up things drastically (python required)
import '@tensorflow/tfjs-node';
// implements nodejs wrappers for HTMLCanvasElement, HTMLImageElement, ImageData
const canvas = require('canvas')
import * as faceapi from '../../../src';
// patch nodejs environment, we need to provide an implementation of
// HTMLCanvasElement and HTMLImageElement, additionally an implementation
// of ImageData is required, in case you want to use the MTCNN
const { Canvas, Image, ImageData } = canvas
faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
export { canvas, faceapi }
\ No newline at end of file
import { NeuralNetwork } from 'tfjs-image-recognition-base';
import { faceapi } from './env';
export const faceDetectionNet = faceapi.nets.ssdMobilenetv1
// export const faceDetectionNet = tinyFaceDetector
// export const faceDetectionNet = mtcnn
// SsdMobilenetv1Options
const minConfidence = 0.5
// TinyFaceDetectorOptions
const inputSize = 408
const scoreThreshold = 0.5
// MtcnnOptions
const minFaceSize = 50
const scaleFactor = 0.8
function getFaceDetectorOptions(net: NeuralNetwork<any>) {
return net === faceapi.nets.ssdMobilenetv1
? new faceapi.SsdMobilenetv1Options({ minConfidence })
: (net === faceapi.nets.tinyFaceDetector
? new faceapi.TinyFaceDetectorOptions({ inputSize, scoreThreshold })
: new faceapi.MtcnnOptions({ minFaceSize, scaleFactor })
)
}
export const faceDetectionOptions = getFaceDetectorOptions(faceDetectionNet)
\ No newline at end of file
export { canvas, faceapi } from './env';
export { faceDetectionNet, faceDetectionOptions } from './faceDetection';
export { saveFile } from './saveFile';
\ No newline at end of file
import * as fs from 'fs';
import * as path from 'path';
const baseDir = path.resolve(__dirname, '../out')
export function saveFile(fileName: string, buf: Buffer) {
if (!fs.existsSync(baseDir)) {
fs.mkdirSync(baseDir)
}
fs.writeFileSync(path.resolve(baseDir, fileName), buf)
}
\ No newline at end of file
import { canvas, faceapi, faceDetectionNet, faceDetectionOptions, saveFile } from './commons';
async function run() {
await faceDetectionNet.loadFromDisk('../../weights')
const img = await canvas.loadImage('../images/bbt1.jpg')
const detections = await faceapi.detectAllFaces(img, faceDetectionOptions)
const out = faceapi.createCanvasFromMedia(img) as any
faceapi.drawDetection(out, detections)
saveFile('faceDetection.jpg', out.toBuffer('image/jpeg'))
}
run()
\ No newline at end of file
import { canvas, faceapi, faceDetectionNet, faceDetectionOptions, saveFile } from './commons';
async function run() {
await faceDetectionNet.loadFromDisk('../../weights')
await faceapi.nets.faceLandmark68Net.loadFromDisk('../../weights')
const img = await canvas.loadImage('../images/bbt1.jpg')
const results = await faceapi.detectAllFaces(img, faceDetectionOptions)
.withFaceLandmarks()
const out = faceapi.createCanvasFromMedia(img) as any
faceapi.drawDetection(out, results.map(res => res.detection))
faceapi.drawLandmarks(out, results.map(res => res.faceLandmarks), { drawLines: true, color: 'red' })
saveFile('faceLandmarkDetection.jpg', out.toBuffer('image/jpeg'))
}
run()
\ No newline at end of file
import { canvas, faceapi, faceDetectionNet, faceDetectionOptions, saveFile } from './commons';
const REFERENCE_IMAGE = '../images/bbt1.jpg'
const QUERY_IMAGE = '../images/bbt4.jpg'
async function run() {
await faceDetectionNet.loadFromDisk('../../weights')
await faceapi.nets.faceLandmark68Net.loadFromDisk('../../weights')
await faceapi.nets.faceRecognitionNet.loadFromDisk('../../weights')
const referenceImage = await canvas.loadImage(REFERENCE_IMAGE)
const queryImage = await canvas.loadImage(QUERY_IMAGE)
const resultsRef = await faceapi.detectAllFaces(referenceImage, faceDetectionOptions)
.withFaceLandmarks()
.withFaceDescriptors()
const resultsQuery = await faceapi.detectAllFaces(queryImage, faceDetectionOptions)
.withFaceLandmarks()
.withFaceDescriptors()
const faceMatcher = new faceapi.FaceMatcher(resultsRef)
const labels = faceMatcher.labeledDescriptors
.map(ld => ld.label)
const refBoxesWithText = resultsRef
.map(res => res.detection.box)
.map((box, i) => new faceapi.BoxWithText(box, labels[i]))
const outRef = faceapi.createCanvasFromMedia(referenceImage) as any
faceapi.drawDetection(outRef, refBoxesWithText)
saveFile('referenceImage.jpg', outRef.toBuffer('image/jpeg'))
const queryBoxesWithText = resultsQuery.map(res => {
const bestMatch = faceMatcher.findBestMatch(res.descriptor)
return new faceapi.BoxWithText(res.detection.box, bestMatch.toString())
})
const outQuery = faceapi.createCanvasFromMedia(queryImage) as any
faceapi.drawDetection(outQuery, queryBoxesWithText)
saveFile('queryImage.jpg', outQuery.toBuffer('image/jpeg'))
}
run()
\ No newline at end of file
{
"requires": true,
"lockfileVersion": 1,
"dependencies": {
"@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
"integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78="
},
"@protobufjs/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
"integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="
},
"@protobufjs/codegen": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
"integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="
},
"@protobufjs/eventemitter": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
"integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A="
},
"@protobufjs/fetch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
"integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=",
"requires": {
"@protobufjs/aspromise": "1.1.2",
"@protobufjs/inquire": "1.1.0"
}
},
"@protobufjs/float": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
"integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E="
},
"@protobufjs/inquire": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
"integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik="
},
"@protobufjs/path": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
"integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0="
},
"@protobufjs/pool": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
"integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q="
},
"@protobufjs/utf8": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA="
},
"@tensorflow/tfjs": {
"version": "0.13.3",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-0.13.3.tgz",
"integrity": "sha512-7WXS6wfCnX/5s2CmAYelRRiRvRkpDbdoL/U+qZ+08lS2NWFWT2y+ga/nWq6brNHUEShLpXNdZ8S++UcG2q2t0g==",
"requires": {
"@tensorflow/tfjs-converter": "0.6.5",
"@tensorflow/tfjs-core": "0.13.8",
"@tensorflow/tfjs-layers": "0.8.3"
}
},
"@tensorflow/tfjs-converter": {
"version": "0.6.5",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-0.6.5.tgz",
"integrity": "sha512-jbmxtRtRV/7JZNmhkjZsPJ651ntrWlxPfzTYF10vPWnS2Tz+u0RjJj9pXRYoO4jfsvFz//kNtXKMhb/Icbp5BA==",
"requires": {
"@types/long": "3.0.32",
"protobufjs": "6.8.8"
}
},
"@tensorflow/tfjs-core": {
"version": "0.13.8",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.13.8.tgz",
"integrity": "sha512-Vp0dU6wRisXZRUOEb7w5lPybJkJw2iR32sTlv1ZY42fReMgP7nYKPVoNaByjJ7VFmJe6fK0yURklI+vc+u8SEQ==",
"requires": {
"@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.30",
"@types/webgl2": "0.0.4",
"seedrandom": "2.4.3"
}
},
"@tensorflow/tfjs-layers": {
"version": "0.8.3",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-0.8.3.tgz",
"integrity": "sha512-RHnTtXx7bE6d+x0byLi90nejy44caBKhYqBR0zmCVzxagxDSln9wd28ojHEh1/6gQyBejhMEQ3gUK4SsKTpjIw=="
},
"@tensorflow/tfjs-node": {
"version": "0.1.19",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-node/-/tfjs-node-0.1.19.tgz",
"integrity": "sha512-iQwUu52H1fZjlwNHB3C0nqIhhfGoHz9o7pu9PTxkcFSAuKRqI4quvt96uT2GS089w+FLFyGwABi2hBjXDsYMJw==",
"requires": {
"@tensorflow/tfjs": "0.13.3",
"adm-zip": "0.4.11",
"bindings": "1.3.0",
"progress": "2.0.1",
"rimraf": "2.6.2",
"tar": "4.4.6"
}
},
"@types/long": {
"version": "3.0.32",
"resolved": "https://registry.npmjs.org/@types/long/-/long-3.0.32.tgz",
"integrity": "sha512-ZXyOOm83p7X8p3s0IYM3VeueNmHpkk/yMlP8CLeOnEcu6hIwPH7YjZBvhQkR0ZFS2DqZAxKtJ/M5fcuv3OU5BA=="
},
"@types/node": {
"version": "10.12.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.0.tgz",
"integrity": "sha512-3TUHC3jsBAB7qVRGxT6lWyYo2v96BMmD2PTcl47H25Lu7UXtFH/2qqmKiVrnel6Ne//0TFYf6uvNX+HW2FRkLQ=="
},
"@types/seedrandom": {
"version": "2.4.27",
"resolved": "http://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.27.tgz",
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
},
"@types/webgl-ext": {
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
"integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
},
"@types/webgl2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
},
"abbrev": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="
},
"adm-zip": {
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.11.tgz",
"integrity": "sha512-L8vcjDTCOIJk7wFvmlEUN7AsSb8T+2JrdP7KINBjzr24TJ5Mwj590sLu3BC7zNZowvJWa/JtPmD8eJCzdtDWjA=="
},
"ansi-regex": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
},
"aproba": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
"integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw=="
},
"are-we-there-yet": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
"integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
"requires": {
"delegates": "1.0.0",
"readable-stream": "2.3.6"
}
},
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c="
},
"bindings": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.0.tgz",
"integrity": "sha512-DpLh5EzMR2kzvX1KIlVC0VkC3iZtHKTgdtZ0a3pglBZdaQFjt5S9g9xd1lE+YvXyfd6mtCeRnrUfOLYiTMlNSw=="
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": {
"balanced-match": "1.0.0",
"concat-map": "0.0.1"
}
},
"canvas": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/canvas/-/canvas-2.0.1.tgz",
"integrity": "sha512-aVESjDBMXGRL+aZqjFtxMVOg8KzHhNcKIscoeC8OROccmApKOriHsnySxq228Kc+3tzB9Qc6tzD4ukp9Zjwz1Q==",
"requires": {
"nan": "2.11.1",
"node-pre-gyp": "0.11.0"
}
},
"chownr": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz",
"integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g=="
},
"code-point-at": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c="
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"console-control-strings": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4="
},
"core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
},
"debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"requires": {
"ms": "2.0.0"
}
},
"deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="
},
"delegates": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o="
},
"detect-libc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups="
},
"fs-minipass": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.5.tgz",
"integrity": "sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ==",
"requires": {
"minipass": "2.3.5"
}
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"gauge": {
"version": "2.7.4",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
"integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
"requires": {
"aproba": "1.2.0",
"console-control-strings": "1.1.0",
"has-unicode": "2.0.1",
"object-assign": "4.1.1",
"signal-exit": "3.0.2",
"string-width": "1.0.2",
"strip-ansi": "3.0.1",
"wide-align": "1.1.3"
}
},
"glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"requires": {
"fs.realpath": "1.0.0",
"inflight": "1.0.6",
"inherits": "2.0.3",
"minimatch": "3.0.4",
"once": "1.4.0",
"path-is-absolute": "1.0.1"
}
},
"has-unicode": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
"integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk="
},
"iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
"integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
"requires": {
"safer-buffer": "2.1.2"
}
},
"ignore-walk": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz",
"integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==",
"requires": {
"minimatch": "3.0.4"
}
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"requires": {
"once": "1.4.0",
"wrappy": "1.0.2"
}
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
},
"ini": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw=="
},
"is-fullwidth-code-point": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
"integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
"requires": {
"number-is-nan": "1.0.1"
}
},
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA=="
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "1.1.11"
}
},
"minimist": {
"version": "0.0.8",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
},
"minipass": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz",
"integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==",
"requires": {
"safe-buffer": "5.1.2",
"yallist": "3.0.2"
}
},
"minizlib": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.1.1.tgz",
"integrity": "sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg==",
"requires": {
"minipass": "2.3.5"
}
},
"mkdirp": {
"version": "0.5.1",
"resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
"requires": {
"minimist": "0.0.8"
}
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
},
"nan": {
"version": "2.11.1",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz",
"integrity": "sha512-iji6k87OSXa0CcrLl9z+ZiYSuR2o+c0bGuNmXdrhTQTakxytAFsC56SArGYoiHlJlFoHSnvmhpceZJaXkVuOtA=="
},
"needle": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/needle/-/needle-2.2.4.tgz",
"integrity": "sha512-HyoqEb4wr/rsoaIDfTH2aVL9nWtQqba2/HvMv+++m8u0dz808MaagKILxtfeSN7QU7nvbQ79zk3vYOJp9zsNEA==",
"requires": {
"debug": "2.6.9",
"iconv-lite": "0.4.24",
"sax": "1.2.4"
}
},
"node-pre-gyp": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz",
"integrity": "sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==",
"requires": {
"detect-libc": "1.0.3",
"mkdirp": "0.5.1",
"needle": "2.2.4",
"nopt": "4.0.1",
"npm-packlist": "1.1.12",
"npmlog": "4.1.2",
"rc": "1.2.8",
"rimraf": "2.6.2",
"semver": "5.6.0",
"tar": "4.4.6"
}
},
"nopt": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",
"integrity": "sha1-0NRoWv1UFRk8jHUFYC0NF81kR00=",
"requires": {
"abbrev": "1.1.1",
"osenv": "0.1.5"
}
},
"npm-bundled": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.5.tgz",
"integrity": "sha512-m/e6jgWu8/v5niCUKQi9qQl8QdeEduFA96xHDDzFGqly0OOjI7c+60KM/2sppfnUU9JJagf+zs+yGhqSOFj71g=="
},
"npm-packlist": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.12.tgz",
"integrity": "sha512-WJKFOVMeAlsU/pjXuqVdzU0WfgtIBCupkEVwn+1Y0ERAbUfWw8R4GjgVbaKnUjRoD2FoQbHOCbOyT5Mbs9Lw4g==",
"requires": {
"ignore-walk": "3.0.1",
"npm-bundled": "1.0.5"
}
},
"npmlog": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
"integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
"requires": {
"are-we-there-yet": "1.1.5",
"console-control-strings": "1.1.0",
"gauge": "2.7.4",
"set-blocking": "2.0.0"
}
},
"number-is-nan": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
"integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0="
},
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"requires": {
"wrappy": "1.0.2"
}
},
"os-homedir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M="
},
"os-tmpdir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ="
},
"osenv": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
"integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
"requires": {
"os-homedir": "1.0.2",
"os-tmpdir": "1.0.2"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
},
"process-nextick-args": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
"integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw=="
},
"progress": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.1.tgz",
"integrity": "sha512-OE+a6vzqazc+K6LxJrX5UPyKFvGnL5CYmq2jFGNIBWHpc4QyE49/YOumcrpQFJpfejmvRtbJzgO1zPmMCqlbBg=="
},
"protobufjs": {
"version": "6.8.8",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz",
"integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==",
"requires": {
"@protobufjs/aspromise": "1.1.2",
"@protobufjs/base64": "1.1.2",
"@protobufjs/codegen": "2.0.4",
"@protobufjs/eventemitter": "1.1.0",
"@protobufjs/fetch": "1.1.0",
"@protobufjs/float": "1.0.2",
"@protobufjs/inquire": "1.1.0",
"@protobufjs/path": "1.1.2",
"@protobufjs/pool": "1.1.0",
"@protobufjs/utf8": "1.1.0",
"@types/long": "4.0.0",
"@types/node": "10.12.0",
"long": "4.0.0"
},
"dependencies": {
"@types/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz",
"integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q=="
}
}
},
"rc": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
"requires": {
"deep-extend": "0.6.0",
"ini": "1.3.5",
"minimist": "1.2.0",
"strip-json-comments": "2.0.1"
},
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
}
}
},
"readable-stream": {
"version": "2.3.6",
"resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
"integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
"requires": {
"core-util-is": "1.0.2",
"inherits": "2.0.3",
"isarray": "1.0.0",
"process-nextick-args": "2.0.0",
"safe-buffer": "5.1.2",
"string_decoder": "1.1.1",
"util-deprecate": "1.0.2"
}
},
"rimraf": {
"version": "2.6.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz",
"integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==",
"requires": {
"glob": "7.1.3"
}
},
"safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"seedrandom": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
"integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
},
"semver": {
"version": "5.6.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz",
"integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg=="
},
"set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
},
"signal-exit": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
"integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0="
},
"string-width": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
"integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
"requires": {
"code-point-at": "1.1.0",
"is-fullwidth-code-point": "1.0.0",
"strip-ansi": "3.0.1"
}
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "5.1.2"
}
},
"strip-ansi": {
"version": "3.0.1",
"resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
"requires": {
"ansi-regex": "2.1.1"
}
},
"strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo="
},
"tar": {
"version": "4.4.6",
"resolved": "https://registry.npmjs.org/tar/-/tar-4.4.6.tgz",
"integrity": "sha512-tMkTnh9EdzxyfW+6GK6fCahagXsnYk6kE6S9Gr9pjVdys769+laCTbodXDhPAjzVtEBazRgP0gYqOjnk9dQzLg==",
"requires": {
"chownr": "1.1.1",
"fs-minipass": "1.2.5",
"minipass": "2.3.5",
"minizlib": "1.1.1",
"mkdirp": "0.5.1",
"safe-buffer": "5.1.2",
"yallist": "3.0.2"
}
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
},
"wide-align": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
"integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
"requires": {
"string-width": "1.0.2"
}
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
},
"yallist": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
"integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k="
}
}
}
{
"author": "justadudewhohacks",
"license": "MIT",
"dependencies": {
"@tensorflow/tfjs-node": "^0.1.19",
"canvas": "^2.0.1"
}
}
let spec_files = ['**/*.test.ts'].concat(
process.env.EXCLUDE_UNCOMPRESSED
? ['!**/*.uncompressed.test.ts']
: []
)
// exclude browser tests
spec_files = spec_files.concat(['!**/*.browser.test.ts'])
module.exports = {
spec_dir: 'test',
spec_files,
random: false
}
\ No newline at end of file
...@@ -22,10 +22,9 @@ let exclude = ( ...@@ -22,10 +22,9 @@ let exclude = (
'faceRecognitionNet', 'faceRecognitionNet',
'ssdMobilenetv1', 'ssdMobilenetv1',
'tinyFaceDetector', 'tinyFaceDetector',
'mtcnn', 'mtcnn'
'tinyYolov2'
] ]
: ['tinyYolov2'] : []
) )
.filter(ex => ex !== process.env.UUT) .filter(ex => ex !== process.env.UUT)
.map(ex => `test/tests/${ex}/*.ts`) .map(ex => `test/tests/${ex}/*.ts`)
...@@ -37,6 +36,10 @@ exclude = exclude.concat( ...@@ -37,6 +36,10 @@ exclude = exclude.concat(
: [] : []
) )
// exclude nodejs tests
exclude = exclude.concat(['**/*.node.test.ts'])
module.exports = function(config) { module.exports = function(config) {
const args = [] const args = []
if (process.env.BACKEND_CPU) { if (process.env.BACKEND_CPU) {
......
...@@ -5,77 +5,139 @@ ...@@ -5,77 +5,139 @@
"requires": true, "requires": true,
"dependencies": { "dependencies": {
"@babel/code-frame": { "@babel/code-frame": {
"version": "7.0.0-rc.3", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0-rc.3.tgz", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz",
"integrity": "sha512-vH+ONMtvkQpjvKAXl5shNFyIpBwmkgKjo+buySLpQsMNDlqbJcFIMiYhwDrK4isZsae+QeHJYbqUJ0BYwyKNZw==", "integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==",
"dev": true, "dev": true,
"requires": { "requires": {
"@babel/highlight": "7.0.0-rc.3" "@babel/highlight": "7.0.0"
} }
}, },
"@babel/highlight": { "@babel/highlight": {
"version": "7.0.0-rc.3", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.0.0-rc.3.tgz", "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.0.0.tgz",
"integrity": "sha512-BwDi54dopTDbp0nIXZ1Ln7dGaZx8Vxqi8IKVFxtHHFhlTMxwVnYZlRFamRy7yOG4KEemRYhENq5ea/cNa9Jvjw==", "integrity": "sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw==",
"dev": true, "dev": true,
"requires": { "requires": {
"chalk": "2.4.1", "chalk": "2.4.1",
"esutils": "2.0.2", "esutils": "2.0.2",
"js-tokens": "4.0.0" "js-tokens": "4.0.0"
},
"dependencies": {
"ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"requires": {
"color-convert": "1.9.2"
} }
}, },
"chalk": { "@protobufjs/aspromise": {
"version": "2.4.1", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
"integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=",
"dev": true
},
"@protobufjs/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz",
"integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==",
"dev": true
},
"@protobufjs/codegen": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz",
"integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==",
"dev": true
},
"@protobufjs/eventemitter": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz",
"integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=",
"dev": true
},
"@protobufjs/fetch": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz",
"integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=",
"dev": true, "dev": true,
"requires": { "requires": {
"ansi-styles": "3.2.1", "@protobufjs/aspromise": "1.1.2",
"escape-string-regexp": "1.0.5", "@protobufjs/inquire": "1.1.0"
"supports-color": "5.5.0"
} }
}, },
"has-flag": { "@protobufjs/float": {
"version": "3.0.0", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz",
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=",
"dev": true "dev": true
}, },
"js-tokens": { "@protobufjs/inquire": {
"version": "4.0.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=",
"dev": true "dev": true
}, },
"supports-color": { "@protobufjs/path": {
"version": "5.5.0", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=",
"dev": true
},
"@protobufjs/pool": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz",
"integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=",
"dev": true
},
"@protobufjs/utf8": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz",
"integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=",
"dev": true
},
"@tensorflow/tfjs": {
"version": "0.13.3",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-0.13.3.tgz",
"integrity": "sha512-7WXS6wfCnX/5s2CmAYelRRiRvRkpDbdoL/U+qZ+08lS2NWFWT2y+ga/nWq6brNHUEShLpXNdZ8S++UcG2q2t0g==",
"dev": true, "dev": true,
"requires": { "requires": {
"has-flag": "3.0.0" "@tensorflow/tfjs-converter": "0.6.5",
} "@tensorflow/tfjs-core": "0.13.8",
"@tensorflow/tfjs-layers": "0.8.3"
} }
},
"@tensorflow/tfjs-converter": {
"version": "0.6.5",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-0.6.5.tgz",
"integrity": "sha512-jbmxtRtRV/7JZNmhkjZsPJ651ntrWlxPfzTYF10vPWnS2Tz+u0RjJj9pXRYoO4jfsvFz//kNtXKMhb/Icbp5BA==",
"dev": true,
"requires": {
"@types/long": "3.0.32",
"protobufjs": "6.8.8"
} }
}, },
"@tensorflow/tfjs-core": { "@tensorflow/tfjs-core": {
"version": "0.13.2", "version": "0.13.8",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.13.2.tgz", "resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-0.13.8.tgz",
"integrity": "sha512-0+/Lfjmv+eMibVaYCe2trSqps5+K+b5BZbzJTwbHM2fpV1l1XJpp6PLyZ05A4mhQcAiuIgNgim48H97Yl+yAMw==", "integrity": "sha512-Vp0dU6wRisXZRUOEb7w5lPybJkJw2iR32sTlv1ZY42fReMgP7nYKPVoNaByjJ7VFmJe6fK0yURklI+vc+u8SEQ==",
"requires": { "requires": {
"@types/seedrandom": "2.4.27", "@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.29", "@types/webgl-ext": "0.0.30",
"@types/webgl2": "0.0.4", "@types/webgl2": "0.0.4",
"seedrandom": "2.4.4" "seedrandom": "2.4.3"
}
},
"@tensorflow/tfjs-layers": {
"version": "0.8.3",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-0.8.3.tgz",
"integrity": "sha512-RHnTtXx7bE6d+x0byLi90nejy44caBKhYqBR0zmCVzxagxDSln9wd28ojHEh1/6gQyBejhMEQ3gUK4SsKTpjIw==",
"dev": true
},
"@tensorflow/tfjs-node": {
"version": "0.1.19",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-node/-/tfjs-node-0.1.19.tgz",
"integrity": "sha512-iQwUu52H1fZjlwNHB3C0nqIhhfGoHz9o7pu9PTxkcFSAuKRqI4quvt96uT2GS089w+FLFyGwABi2hBjXDsYMJw==",
"dev": true,
"requires": {
"@tensorflow/tfjs": "0.13.3",
"adm-zip": "0.4.11",
"bindings": "1.3.0",
"progress": "2.0.1",
"rimraf": "2.6.2",
"tar": "4.4.6"
} }
}, },
"@types/estree": { "@types/estree": {
...@@ -85,15 +147,21 @@ ...@@ -85,15 +147,21 @@
"dev": true "dev": true
}, },
"@types/jasmine": { "@types/jasmine": {
"version": "2.8.8", "version": "2.8.9",
"resolved": "https://registry.npmjs.org/@types/jasmine/-/jasmine-2.8.8.tgz", "resolved": "https://registry.npmjs.org/@types/jasmine/-/jasmine-2.8.9.tgz",
"integrity": "sha512-OJSUxLaxXsjjhob2DBzqzgrkLmukM3+JMpRp0r0E4HTdT1nwDCWhaswjYxazPij6uOdzHCJfNbDjmQ1/rnNbCg==", "integrity": "sha512-8dPZwjosElZOGGYw1nwTvOEMof4gjwAWNFS93nBI091BoEfd5drnHOLRMiRF/LOPuMTn5LgEdv0bTUO8QFVuHQ==",
"dev": true
},
"@types/long": {
"version": "3.0.32",
"resolved": "https://registry.npmjs.org/@types/long/-/long-3.0.32.tgz",
"integrity": "sha512-ZXyOOm83p7X8p3s0IYM3VeueNmHpkk/yMlP8CLeOnEcu6hIwPH7YjZBvhQkR0ZFS2DqZAxKtJ/M5fcuv3OU5BA==",
"dev": true "dev": true
}, },
"@types/node": { "@types/node": {
"version": "10.9.2", "version": "10.12.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.9.2.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.2.tgz",
"integrity": "sha512-pwZnkVyCGJ3LsQ0/3flQK5lCFao4esIzwUVzzk5NvL9vnkEyDhNf4fhHzUMHvyr56gNZywWTS2MR0euabMSz4A==", "integrity": "sha512-53ElVDSnZeFUUFIYzI8WLQ25IhWzb6vbddNp8UHlXQyU0ET2RhV5zg0NfubzU7iNMh5bBXb0htCzfvrSVNgzaQ==",
"dev": true "dev": true
}, },
"@types/seedrandom": { "@types/seedrandom": {
...@@ -102,9 +170,9 @@ ...@@ -102,9 +170,9 @@
"integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE=" "integrity": "sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE="
}, },
"@types/webgl-ext": { "@types/webgl-ext": {
"version": "0.0.29", "version": "0.0.30",
"resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.29.tgz", "resolved": "https://registry.npmjs.org/@types/webgl-ext/-/webgl-ext-0.0.30.tgz",
"integrity": "sha512-ZlVjDQU5Vlc9hF4LGdDldujZUf0amwlwGv1RI2bfvdrEHIl6X/7MZVpemJUjS7NxD9XaKfE8SlFrxsfXpUkt/A==" "integrity": "sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg=="
}, },
"@types/webgl2": { "@types/webgl2": {
"version": "0.0.4", "version": "0.0.4",
...@@ -133,6 +201,12 @@ ...@@ -133,6 +201,12 @@
"integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=", "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=",
"dev": true "dev": true
}, },
"adm-zip": {
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.11.tgz",
"integrity": "sha512-L8vcjDTCOIJk7wFvmlEUN7AsSb8T+2JrdP7KINBjzr24TJ5Mwj590sLu3BC7zNZowvJWa/JtPmD8eJCzdtDWjA==",
"dev": true
},
"after": { "after": {
"version": "0.8.2", "version": "0.8.2",
"resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz", "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz",
...@@ -185,6 +259,21 @@ ...@@ -185,6 +259,21 @@
"ansi-wrap": "0.1.0" "ansi-wrap": "0.1.0"
} }
}, },
"ansi-regex": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
"dev": true
},
"ansi-styles": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
"dev": true,
"requires": {
"color-convert": "1.9.2"
}
},
"ansi-wrap": { "ansi-wrap": {
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz",
...@@ -201,6 +290,22 @@ ...@@ -201,6 +290,22 @@
"normalize-path": "2.1.1" "normalize-path": "2.1.1"
} }
}, },
"aproba": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
"integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==",
"dev": true
},
"are-we-there-yet": {
"version": "1.1.5",
"resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
"integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
"dev": true,
"requires": {
"delegates": "1.0.0",
"readable-stream": "2.3.6"
}
},
"argparse": { "argparse": {
"version": "1.0.10", "version": "1.0.10",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
...@@ -252,6 +357,12 @@ ...@@ -252,6 +357,12 @@
"integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==", "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==",
"dev": true "dev": true
}, },
"arrify": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
"integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=",
"dev": true
},
"asn1.js": { "asn1.js": {
"version": "4.10.1", "version": "4.10.1",
"resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz",
...@@ -422,6 +533,12 @@ ...@@ -422,6 +533,12 @@
"integrity": "sha1-RqoXUftqL5PuXmibsQh9SxTGwgU=", "integrity": "sha1-RqoXUftqL5PuXmibsQh9SxTGwgU=",
"dev": true "dev": true
}, },
"bindings": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/bindings/-/bindings-1.3.0.tgz",
"integrity": "sha512-DpLh5EzMR2kzvX1KIlVC0VkC3iZtHKTgdtZ0a3pglBZdaQFjt5S9g9xd1lE+YvXyfd6mtCeRnrUfOLYiTMlNSw==",
"dev": true
},
"blob": { "blob": {
"version": "0.0.4", "version": "0.0.4",
"resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz", "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz",
...@@ -622,6 +739,12 @@ ...@@ -622,6 +739,12 @@
"integrity": "sha1-+PeLdniYiO858gXNY39o5wISKyw=", "integrity": "sha1-+PeLdniYiO858gXNY39o5wISKyw=",
"dev": true "dev": true
}, },
"buffer-from": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==",
"dev": true
},
"buffer-xor": { "buffer-xor": {
"version": "1.0.3", "version": "1.0.3",
"resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz",
...@@ -694,6 +817,16 @@ ...@@ -694,6 +817,16 @@
} }
} }
}, },
"canvas": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/canvas/-/canvas-2.0.1.tgz",
"integrity": "sha512-aVESjDBMXGRL+aZqjFtxMVOg8KzHhNcKIscoeC8OROccmApKOriHsnySxq228Kc+3tzB9Qc6tzD4ukp9Zjwz1Q==",
"dev": true,
"requires": {
"nan": "2.11.1",
"node-pre-gyp": "0.11.0"
}
},
"center-align": { "center-align": {
"version": "0.1.3", "version": "0.1.3",
"resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz",
...@@ -705,6 +838,17 @@ ...@@ -705,6 +838,17 @@
"lazy-cache": "1.0.4" "lazy-cache": "1.0.4"
} }
}, },
"chalk": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz",
"integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==",
"dev": true,
"requires": {
"ansi-styles": "3.2.1",
"escape-string-regexp": "1.0.5",
"supports-color": "5.5.0"
}
},
"chokidar": { "chokidar": {
"version": "2.0.4", "version": "2.0.4",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.4.tgz", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.4.tgz",
...@@ -725,6 +869,12 @@ ...@@ -725,6 +869,12 @@
"upath": "1.1.0" "upath": "1.1.0"
} }
}, },
"chownr": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz",
"integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==",
"dev": true
},
"cipher-base": { "cipher-base": {
"version": "1.0.4", "version": "1.0.4",
"resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz",
...@@ -791,6 +941,12 @@ ...@@ -791,6 +941,12 @@
"integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=",
"dev": true "dev": true
}, },
"code-point-at": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=",
"dev": true
},
"collection-visit": { "collection-visit": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz",
...@@ -908,6 +1064,12 @@ ...@@ -908,6 +1064,12 @@
"date-now": "0.1.4" "date-now": "0.1.4"
} }
}, },
"console-control-strings": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=",
"dev": true
},
"constants-browserify": { "constants-browserify": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz",
...@@ -1064,6 +1226,12 @@ ...@@ -1064,6 +1226,12 @@
"integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=",
"dev": true "dev": true
}, },
"deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==",
"dev": true
},
"deep-is": { "deep-is": {
"version": "0.1.3", "version": "0.1.3",
"resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz",
...@@ -1120,6 +1288,12 @@ ...@@ -1120,6 +1288,12 @@
} }
} }
}, },
"delegates": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=",
"dev": true
},
"depd": { "depd": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
...@@ -1136,6 +1310,12 @@ ...@@ -1136,6 +1310,12 @@
"minimalistic-assert": "1.0.1" "minimalistic-assert": "1.0.1"
} }
}, },
"detect-libc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
"dev": true
},
"di": { "di": {
"version": "0.0.1", "version": "0.0.1",
"resolved": "https://registry.npmjs.org/di/-/di-0.0.1.tgz", "resolved": "https://registry.npmjs.org/di/-/di-0.0.1.tgz",
...@@ -1644,12 +1824,37 @@ ...@@ -1644,12 +1824,37 @@
"universalify": "0.1.2" "universalify": "0.1.2"
} }
}, },
"fs-minipass": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.5.tgz",
"integrity": "sha512-JhBl0skXjUPCFH7x6x61gQxrKyXsxB5gcgePLZCwfyCGGsTISMoIeObbrvVeP6Xmyaudw4TT43qV2Gz+iyd2oQ==",
"dev": true,
"requires": {
"minipass": "2.3.5"
}
},
"fs.realpath": { "fs.realpath": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true "dev": true
}, },
"gauge": {
"version": "2.7.4",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
"integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=",
"dev": true,
"requires": {
"aproba": "1.2.0",
"console-control-strings": "1.1.0",
"has-unicode": "2.0.1",
"object-assign": "4.1.1",
"signal-exit": "3.0.2",
"string-width": "1.0.2",
"strip-ansi": "3.0.1",
"wide-align": "1.1.3"
}
},
"get-stdin": { "get-stdin": {
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz",
...@@ -1818,6 +2023,12 @@ ...@@ -1818,6 +2023,12 @@
"integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=", "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=",
"dev": true "dev": true
}, },
"has-unicode": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz",
"integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=",
"dev": true
},
"has-value": { "has-value": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz",
...@@ -1931,6 +2142,15 @@ ...@@ -1931,6 +2142,15 @@
"integrity": "sha512-GguP+DRY+pJ3soyIiGPTvdiVXjZ+DbXOxGpXn3eMvNW4x4irjqXm4wHKscC+TfxSJ0yw/S1F24tqdMNsMZTiLA==", "integrity": "sha512-GguP+DRY+pJ3soyIiGPTvdiVXjZ+DbXOxGpXn3eMvNW4x4irjqXm4wHKscC+TfxSJ0yw/S1F24tqdMNsMZTiLA==",
"dev": true "dev": true
}, },
"ignore-walk": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.1.tgz",
"integrity": "sha512-DTVlMx3IYPe0/JJcYP7Gxg7ttZZu3IInhuEhbchuqneY9wWe5Ojy2mXLBaQFUQmo0AW2r3qG7m1mg86js+gnlQ==",
"dev": true,
"requires": {
"minimatch": "3.0.4"
}
},
"indent-string": { "indent-string": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz",
...@@ -1962,6 +2182,12 @@ ...@@ -1962,6 +2182,12 @@
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=",
"dev": true "dev": true
}, },
"ini": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz",
"integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==",
"dev": true
},
"inline-source-map": { "inline-source-map": {
"version": "0.6.2", "version": "0.6.2",
"resolved": "https://registry.npmjs.org/inline-source-map/-/inline-source-map-0.6.2.tgz", "resolved": "https://registry.npmjs.org/inline-source-map/-/inline-source-map-0.6.2.tgz",
...@@ -2104,6 +2330,15 @@ ...@@ -2104,6 +2330,15 @@
"number-is-nan": "1.0.1" "number-is-nan": "1.0.1"
} }
}, },
"is-fullwidth-code-point": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
"integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
"dev": true,
"requires": {
"number-is-nan": "1.0.1"
}
},
"is-glob": { "is-glob": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.0.tgz", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.0.tgz",
...@@ -2292,12 +2527,36 @@ ...@@ -2292,12 +2527,36 @@
} }
} }
}, },
"jasmine": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/jasmine/-/jasmine-3.3.0.tgz",
"integrity": "sha512-haZzMvmoWSI2VCKfDgPqyEOPBQA7C1fgtIMgKNU4hVMcrVkWU5NPOWQqOTA6mVFyKcSUUrnkXu/ZEgY0bRnd6A==",
"dev": true,
"requires": {
"glob": "7.1.2",
"jasmine-core": "3.3.0"
},
"dependencies": {
"jasmine-core": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-3.3.0.tgz",
"integrity": "sha512-3/xSmG/d35hf80BEN66Y6g9Ca5l/Isdeg/j6zvbTYlTzeKinzmaTM4p9am5kYqOmE05D7s1t8FGjzdSnbUbceA==",
"dev": true
}
}
},
"jasmine-core": { "jasmine-core": {
"version": "3.2.1", "version": "3.2.1",
"resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-3.2.1.tgz", "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-3.2.1.tgz",
"integrity": "sha512-pa9tbBWgU0EE4SWgc85T4sa886ufuQdsgruQANhECYjwqgV4z7Vw/499aCaP8ZH79JDS4vhm8doDG9HO4+e4sA==", "integrity": "sha512-pa9tbBWgU0EE4SWgc85T4sa886ufuQdsgruQANhECYjwqgV4z7Vw/499aCaP8ZH79JDS4vhm8doDG9HO4+e4sA==",
"dev": true "dev": true
}, },
"js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
"dev": true
},
"js-yaml": { "js-yaml": {
"version": "3.12.0", "version": "3.12.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz",
...@@ -2601,6 +2860,12 @@ ...@@ -2601,6 +2860,12 @@
} }
} }
}, },
"long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"dev": true
},
"longest": { "longest": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz",
...@@ -2632,6 +2897,12 @@ ...@@ -2632,6 +2897,12 @@
"vlq": "0.2.3" "vlq": "0.2.3"
} }
}, },
"make-error": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==",
"dev": true
},
"map-cache": { "map-cache": {
"version": "0.2.2", "version": "0.2.2",
"resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz",
...@@ -2780,6 +3051,25 @@ ...@@ -2780,6 +3051,25 @@
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=",
"dev": true "dev": true
}, },
"minipass": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz",
"integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==",
"dev": true,
"requires": {
"safe-buffer": "5.1.2",
"yallist": "3.0.2"
}
},
"minizlib": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.1.1.tgz",
"integrity": "sha512-TrfjCjk4jLhcJyGMYymBH6oTXcWjYbUAXTHDbtnWHjZC25h0cdajHuPE1zxb4DVmu8crfh+HwH/WMuyLG0nHBg==",
"dev": true,
"requires": {
"minipass": "2.3.5"
}
},
"mixin-deep": { "mixin-deep": {
"version": "1.3.1", "version": "1.3.1",
"resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz", "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz",
...@@ -2816,6 +3106,12 @@ ...@@ -2816,6 +3106,12 @@
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=",
"dev": true "dev": true
}, },
"nan": {
"version": "2.11.1",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz",
"integrity": "sha512-iji6k87OSXa0CcrLl9z+ZiYSuR2o+c0bGuNmXdrhTQTakxytAFsC56SArGYoiHlJlFoHSnvmhpceZJaXkVuOtA==",
"dev": true
},
"nanomatch": { "nanomatch": {
"version": "1.2.13", "version": "1.2.13",
"resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz",
...@@ -2835,12 +3131,53 @@ ...@@ -2835,12 +3131,53 @@
"to-regex": "3.0.2" "to-regex": "3.0.2"
} }
}, },
"needle": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/needle/-/needle-2.2.4.tgz",
"integrity": "sha512-HyoqEb4wr/rsoaIDfTH2aVL9nWtQqba2/HvMv+++m8u0dz808MaagKILxtfeSN7QU7nvbQ79zk3vYOJp9zsNEA==",
"dev": true,
"requires": {
"debug": "2.6.9",
"iconv-lite": "0.4.23",
"sax": "1.2.4"
}
},
"negotiator": { "negotiator": {
"version": "0.6.1", "version": "0.6.1",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz",
"integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=", "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=",
"dev": true "dev": true
}, },
"node-pre-gyp": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz",
"integrity": "sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==",
"dev": true,
"requires": {
"detect-libc": "1.0.3",
"mkdirp": "0.5.1",
"needle": "2.2.4",
"nopt": "4.0.1",
"npm-packlist": "1.1.12",
"npmlog": "4.1.2",
"rc": "1.2.8",
"rimraf": "2.6.2",
"semver": "5.5.0",
"tar": "4.4.6"
},
"dependencies": {
"nopt": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.1.tgz",
"integrity": "sha1-0NRoWv1UFRk8jHUFYC0NF81kR00=",
"dev": true,
"requires": {
"abbrev": "1.0.9",
"osenv": "0.1.5"
}
}
}
},
"nopt": { "nopt": {
"version": "3.0.6", "version": "3.0.6",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz",
...@@ -2871,6 +3208,34 @@ ...@@ -2871,6 +3208,34 @@
"remove-trailing-separator": "1.1.0" "remove-trailing-separator": "1.1.0"
} }
}, },
"npm-bundled": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.0.5.tgz",
"integrity": "sha512-m/e6jgWu8/v5niCUKQi9qQl8QdeEduFA96xHDDzFGqly0OOjI7c+60KM/2sppfnUU9JJagf+zs+yGhqSOFj71g==",
"dev": true
},
"npm-packlist": {
"version": "1.1.12",
"resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.1.12.tgz",
"integrity": "sha512-WJKFOVMeAlsU/pjXuqVdzU0WfgtIBCupkEVwn+1Y0ERAbUfWw8R4GjgVbaKnUjRoD2FoQbHOCbOyT5Mbs9Lw4g==",
"dev": true,
"requires": {
"ignore-walk": "3.0.1",
"npm-bundled": "1.0.5"
}
},
"npmlog": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
"integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
"dev": true,
"requires": {
"are-we-there-yet": "1.1.5",
"console-control-strings": "1.1.0",
"gauge": "2.7.4",
"set-blocking": "2.0.0"
}
},
"null-check": { "null-check": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/null-check/-/null-check-1.0.0.tgz", "resolved": "https://registry.npmjs.org/null-check/-/null-check-1.0.0.tgz",
...@@ -3010,12 +3375,28 @@ ...@@ -3010,12 +3375,28 @@
"integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=", "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=",
"dev": true "dev": true
}, },
"os-homedir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
"integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=",
"dev": true
},
"os-tmpdir": { "os-tmpdir": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz",
"integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=",
"dev": true "dev": true
}, },
"osenv": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz",
"integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==",
"dev": true,
"requires": {
"os-homedir": "1.0.2",
"os-tmpdir": "1.0.2"
}
},
"pad": { "pad": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/pad/-/pad-2.1.0.tgz", "resolved": "https://registry.npmjs.org/pad/-/pad-2.1.0.tgz",
...@@ -3266,6 +3647,41 @@ ...@@ -3266,6 +3647,41 @@
"integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==",
"dev": true "dev": true
}, },
"progress": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.1.tgz",
"integrity": "sha512-OE+a6vzqazc+K6LxJrX5UPyKFvGnL5CYmq2jFGNIBWHpc4QyE49/YOumcrpQFJpfejmvRtbJzgO1zPmMCqlbBg==",
"dev": true
},
"protobufjs": {
"version": "6.8.8",
"resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz",
"integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==",
"dev": true,
"requires": {
"@protobufjs/aspromise": "1.1.2",
"@protobufjs/base64": "1.1.2",
"@protobufjs/codegen": "2.0.4",
"@protobufjs/eventemitter": "1.1.0",
"@protobufjs/fetch": "1.1.0",
"@protobufjs/float": "1.0.2",
"@protobufjs/inquire": "1.1.0",
"@protobufjs/path": "1.1.2",
"@protobufjs/pool": "1.1.0",
"@protobufjs/utf8": "1.1.0",
"@types/long": "4.0.0",
"@types/node": "10.12.2",
"long": "4.0.0"
},
"dependencies": {
"@types/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz",
"integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q==",
"dev": true
}
}
},
"public-encrypt": { "public-encrypt": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.2.tgz", "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.2.tgz",
...@@ -3365,6 +3781,26 @@ ...@@ -3365,6 +3781,26 @@
"unpipe": "1.0.0" "unpipe": "1.0.0"
} }
}, },
"rc": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
"dev": true,
"requires": {
"deep-extend": "0.6.0",
"ini": "1.3.5",
"minimist": "1.2.0",
"strip-json-comments": "2.0.1"
},
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"dev": true
}
}
},
"read-pkg": { "read-pkg": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz",
...@@ -3552,7 +3988,7 @@ ...@@ -3552,7 +3988,7 @@
"dev": true, "dev": true,
"requires": { "requires": {
"@types/estree": "0.0.39", "@types/estree": "0.0.39",
"@types/node": "10.9.2" "@types/node": "10.12.2"
} }
}, },
"rollup-plugin-commonjs": { "rollup-plugin-commonjs": {
...@@ -3754,7 +4190,7 @@ ...@@ -3754,7 +4190,7 @@
"integrity": "sha512-f6W31EQLzxSEYfN3x6/lyljHqXSoCjXKcTsnwz3evQvHgU1+qTzU2SE0SIG7tbAvaCewp2UaZ5x3k6nYsxOP9A==", "integrity": "sha512-f6W31EQLzxSEYfN3x6/lyljHqXSoCjXKcTsnwz3evQvHgU1+qTzU2SE0SIG7tbAvaCewp2UaZ5x3k6nYsxOP9A==",
"dev": true, "dev": true,
"requires": { "requires": {
"@babel/code-frame": "7.0.0-rc.3", "@babel/code-frame": "7.0.0",
"uglify-js": "3.4.8" "uglify-js": "3.4.8"
} }
}, },
...@@ -3920,10 +4356,16 @@ ...@@ -3920,10 +4356,16 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
"dev": true "dev": true
}, },
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==",
"dev": true
},
"seedrandom": { "seedrandom": {
"version": "2.4.4", "version": "2.4.3",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.4.tgz", "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-2.4.3.tgz",
"integrity": "sha512-9A+PDmgm+2du77B5i0Ip2cxOqqHjgNxnBgglxLcX78A2D6c2rTo61z4jnVABpF4cKeDMDG+cmXXvdnqse2VqMA==" "integrity": "sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw="
}, },
"semver": { "semver": {
"version": "5.5.0", "version": "5.5.0",
...@@ -3931,6 +4373,12 @@ ...@@ -3931,6 +4373,12 @@
"integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==",
"dev": true "dev": true
}, },
"set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=",
"dev": true
},
"set-immediate-shim": { "set-immediate-shim": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz", "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz",
...@@ -4212,6 +4660,16 @@ ...@@ -4212,6 +4660,16 @@
"urix": "0.1.0" "urix": "0.1.0"
} }
}, },
"source-map-support": {
"version": "0.5.9",
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.9.tgz",
"integrity": "sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA==",
"dev": true,
"requires": {
"buffer-from": "1.1.1",
"source-map": "0.6.1"
}
},
"source-map-url": { "source-map-url": {
"version": "0.4.0", "version": "0.4.0",
"resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz",
...@@ -4338,6 +4796,17 @@ ...@@ -4338,6 +4796,17 @@
} }
} }
}, },
"string-width": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
"integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
"dev": true,
"requires": {
"code-point-at": "1.1.0",
"is-fullwidth-code-point": "1.0.0",
"strip-ansi": "3.0.1"
}
},
"string_decoder": { "string_decoder": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
...@@ -4347,6 +4816,15 @@ ...@@ -4347,6 +4816,15 @@
"safe-buffer": "5.1.2" "safe-buffer": "5.1.2"
} }
}, },
"strip-ansi": {
"version": "3.0.1",
"resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
"dev": true,
"requires": {
"ansi-regex": "2.1.1"
}
},
"strip-bom": { "strip-bom": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz",
...@@ -4365,22 +4843,60 @@ ...@@ -4365,22 +4843,60 @@
"get-stdin": "4.0.1" "get-stdin": "4.0.1"
} }
}, },
"strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=",
"dev": true
},
"supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"requires": {
"has-flag": "3.0.0"
},
"dependencies": {
"has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
"dev": true
}
}
},
"tar": {
"version": "4.4.6",
"resolved": "https://registry.npmjs.org/tar/-/tar-4.4.6.tgz",
"integrity": "sha512-tMkTnh9EdzxyfW+6GK6fCahagXsnYk6kE6S9Gr9pjVdys769+laCTbodXDhPAjzVtEBazRgP0gYqOjnk9dQzLg==",
"dev": true,
"requires": {
"chownr": "1.1.1",
"fs-minipass": "1.2.5",
"minipass": "2.3.5",
"minizlib": "1.1.1",
"mkdirp": "0.5.1",
"safe-buffer": "5.1.2",
"yallist": "3.0.2"
}
},
"tfjs-image-recognition-base": { "tfjs-image-recognition-base": {
"version": "0.1.3", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/tfjs-image-recognition-base/-/tfjs-image-recognition-base-0.1.3.tgz", "resolved": "https://registry.npmjs.org/tfjs-image-recognition-base/-/tfjs-image-recognition-base-0.2.0.tgz",
"integrity": "sha512-Vo1arsSkOxtlBedWxw7w2V/mbpp70izAJPu0Cl6WE62ZJ0kLL6TmFphGAr3zKaqrZ0VOyADVedDqFic3aH84RQ==", "integrity": "sha512-i5IIuYXEpAsZ+jkKttDAkhF0Qd5JbbmmTKLzmnd2ynQ6L+lcr2o4bX5Tl/Rqh0OKu603HrCaOiVDutO5EX899Q==",
"requires": { "requires": {
"@tensorflow/tfjs-core": "0.13.2", "@tensorflow/tfjs-core": "0.13.8",
"tslib": "1.9.3" "tslib": "1.9.3"
} }
}, },
"tfjs-tiny-yolov2": { "tfjs-tiny-yolov2": {
"version": "0.2.1", "version": "0.3.0",
"resolved": "https://registry.npmjs.org/tfjs-tiny-yolov2/-/tfjs-tiny-yolov2-0.2.1.tgz", "resolved": "https://registry.npmjs.org/tfjs-tiny-yolov2/-/tfjs-tiny-yolov2-0.3.0.tgz",
"integrity": "sha512-HSdBu6dMyQdtueY32wSO+5IajXDrvu7MufvUBaLD0CubKbfJWM1JogsONUkvp3N948UAI2/K35p9+eEP2woXpw==", "integrity": "sha512-z2xidL8L+HgcNewH1yxL+W32Lul5hoibYkBS4EwxeU0CtWkW8vEffCFub7wd5sggBvr+I2z/LYf+NYH4QXXOfA==",
"requires": { "requires": {
"@tensorflow/tfjs-core": "0.13.2", "@tensorflow/tfjs-core": "0.13.8",
"tfjs-image-recognition-base": "0.1.3", "tfjs-image-recognition-base": "0.2.0",
"tslib": "1.9.3" "tslib": "1.9.3"
} }
}, },
...@@ -4500,6 +5016,30 @@ ...@@ -4500,6 +5016,30 @@
"integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=",
"dev": true "dev": true
}, },
"ts-node": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz",
"integrity": "sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==",
"dev": true,
"requires": {
"arrify": "1.0.1",
"buffer-from": "1.1.1",
"diff": "3.5.0",
"make-error": "1.3.5",
"minimist": "1.2.0",
"mkdirp": "0.5.1",
"source-map-support": "0.5.9",
"yn": "2.0.0"
},
"dependencies": {
"minimist": {
"version": "1.2.0",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"dev": true
}
}
},
"tslib": { "tslib": {
"version": "1.9.3", "version": "1.9.3",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz",
...@@ -4762,6 +5302,15 @@ ...@@ -4762,6 +5302,15 @@
"isexe": "2.0.0" "isexe": "2.0.0"
} }
}, },
"wide-align": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
"integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
"dev": true,
"requires": {
"string-width": "1.0.2"
}
},
"window-size": { "window-size": {
"version": "0.1.0", "version": "0.1.0",
"resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz",
...@@ -4804,6 +5353,12 @@ ...@@ -4804,6 +5353,12 @@
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=", "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
"dev": true "dev": true
}, },
"yallist": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
"integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
"dev": true
},
"yargs": { "yargs": {
"version": "3.10.0", "version": "3.10.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz",
...@@ -4822,6 +5377,12 @@ ...@@ -4822,6 +5377,12 @@
"resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz", "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz",
"integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=", "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk=",
"dev": true "dev": true
},
"yn": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz",
"integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=",
"dev": true
} }
} }
} }
...@@ -12,15 +12,17 @@ ...@@ -12,15 +12,17 @@
"tsc-es6": "tsc --p tsconfig.es6.json", "tsc-es6": "tsc --p tsconfig.es6.json",
"build": "rm -rf ./build && rm -rf ./dist && npm run rollup && npm run rollup-min && npm run tsc && npm run tsc-es6", "build": "rm -rf ./build && rm -rf ./dist && npm run rollup && npm run rollup-min && npm run tsc && npm run tsc-es6",
"test": "karma start", "test": "karma start",
"test-browser": "karma start --single-run",
"test-node": "ts-node node_modules/jasmine/bin/jasmine --config=jasmine-node.js",
"test-all": "npm run test-browser && npm run test-node",
"test-facelandmarknets": "set UUT=faceLandmarkNet&& karma start", "test-facelandmarknets": "set UUT=faceLandmarkNet&& karma start",
"test-facerecognitionnet": "set UUT=faceRecognitionNet&& karma start", "test-facerecognitionnet": "set UUT=faceRecognitionNet&& karma start",
"test-ssdmobilenetv1": "set UUT=ssdMobilenetv1&& karma start", "test-ssdmobilenetv1": "set UUT=ssdMobilenetv1&& karma start",
"test-tinyfacedetector": "set UUT=tinyFaceDetector&& karma start", "test-tinyfacedetector": "set UUT=tinyFaceDetector&& karma start",
"test-mtcnn": "set UUT=mtcnn&& karma start", "test-mtcnn": "set UUT=mtcnn&& karma start",
"test-tinyyolov2": "set UUT=tinyYolov2&& karma start",
"test-cpu": "set BACKEND_CPU=true&& karma start", "test-cpu": "set BACKEND_CPU=true&& karma start",
"test-exclude-uncompressed": "set EXCLUDE_UNCOMPRESSED=true&& karma start", "test-exclude-uncompressed": "set EXCLUDE_UNCOMPRESSED=true&& karma start",
"test-travis": "karma start --single-run", "test-node-exclude-uncompressed": "set EXCLUDE_UNCOMPRESSED=true&& ts-node node_modules/jasmine/bin/jasmine --config=jasmine-node.js",
"docs": "typedoc --options ./typedoc.config.js ./src" "docs": "typedoc --options ./typedoc.config.js ./src"
}, },
"keywords": [ "keywords": [
...@@ -33,14 +35,17 @@ ...@@ -33,14 +35,17 @@
"author": "justadudewhohacks", "author": "justadudewhohacks",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@tensorflow/tfjs-core": "^0.13.2", "@tensorflow/tfjs-core": "0.13.8",
"tfjs-image-recognition-base": "^0.1.3", "tfjs-image-recognition-base": "0.2.0",
"tfjs-tiny-yolov2": "^0.2.1", "tfjs-tiny-yolov2": "0.3.0",
"tslib": "^1.9.3" "tslib": "^1.9.3"
}, },
"devDependencies": { "devDependencies": {
"@tensorflow/tfjs-node": "^0.1.19",
"@types/jasmine": "^2.8.8", "@types/jasmine": "^2.8.8",
"@types/node": "^10.9.2", "@types/node": "^10.9.2",
"canvas": "^2.0.1",
"jasmine": "^3.3.0",
"jasmine-core": "^3.2.1", "jasmine-core": "^3.2.1",
"karma": "^3.0.0", "karma": "^3.0.0",
"karma-chrome-launcher": "^2.2.0", "karma-chrome-launcher": "^2.2.0",
...@@ -51,6 +56,7 @@ ...@@ -51,6 +56,7 @@
"rollup-plugin-node-resolve": "^3.3.0", "rollup-plugin-node-resolve": "^3.3.0",
"rollup-plugin-typescript2": "^0.16.1", "rollup-plugin-typescript2": "^0.16.1",
"rollup-plugin-uglify": "^4.0.0", "rollup-plugin-uglify": "^4.0.0",
"ts-node": "^7.0.1",
"typescript": "2.8.4" "typescript": "2.8.4"
} }
} }
import { getContext2dOrThrow, getDefaultDrawOptions, resolveInput } from 'tfjs-image-recognition-base'; import { env, getContext2dOrThrow, getDefaultDrawOptions, resolveInput } from 'tfjs-image-recognition-base';
import { FaceLandmarks } from '../classes/FaceLandmarks'; import { FaceLandmarks } from '../classes/FaceLandmarks';
import { FaceLandmarks68 } from '../classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
...@@ -11,7 +11,7 @@ export function drawLandmarks( ...@@ -11,7 +11,7 @@ export function drawLandmarks(
options?: DrawLandmarksOptions options?: DrawLandmarksOptions
) { ) {
const canvas = resolveInput(canvasArg) const canvas = resolveInput(canvasArg)
if (!(canvas instanceof HTMLCanvasElement)) { if (!(canvas instanceof env.getEnv().Canvas)) {
throw new Error('drawLandmarks - expected canvas to be of type: HTMLCanvasElement') throw new Error('drawLandmarks - expected canvas to be of type: HTMLCanvasElement')
} }
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { isTensor4D, Rect } from 'tfjs-image-recognition-base'; import { isTensor4D, Rect, isTensor3D } from 'tfjs-image-recognition-base';
import { FaceDetection } from '../classes/FaceDetection'; import { FaceDetection } from '../classes/FaceDetection';
...@@ -18,6 +18,10 @@ export async function extractFaceTensors( ...@@ -18,6 +18,10 @@ export async function extractFaceTensors(
detections: Array<FaceDetection | Rect> detections: Array<FaceDetection | Rect>
): Promise<tf.Tensor3D[]> { ): Promise<tf.Tensor3D[]> {
if (!isTensor3D(imageTensor) && !isTensor4D(imageTensor)) {
throw new Error('extractFaceTensors - expected image tensor to be 3D or 4D')
}
if (isTensor4D(imageTensor) && imageTensor.shape[0] > 1) { if (isTensor4D(imageTensor) && imageTensor.shape[0] > 1) {
throw new Error('extractFaceTensors - batchSize > 1 not supported') throw new Error('extractFaceTensors - batchSize > 1 not supported')
} }
......
import { import {
createCanvas, createCanvas,
env,
getContext2dOrThrow, getContext2dOrThrow,
imageTensorToCanvas, imageTensorToCanvas,
Rect, Rect,
...@@ -21,9 +22,11 @@ export async function extractFaces( ...@@ -21,9 +22,11 @@ export async function extractFaces(
detections: Array<FaceDetection | Rect> detections: Array<FaceDetection | Rect>
): Promise<HTMLCanvasElement[]> { ): Promise<HTMLCanvasElement[]> {
const { Canvas } = env.getEnv()
let canvas = input as HTMLCanvasElement let canvas = input as HTMLCanvasElement
if (!(input instanceof HTMLCanvasElement)) { if (!(input instanceof Canvas)) {
const netInput = await toNetInput(input) const netInput = await toNetInput(input)
if (netInput.batchSize > 1) { if (netInput.batchSize > 1) {
...@@ -31,7 +34,7 @@ export async function extractFaces( ...@@ -31,7 +34,7 @@ export async function extractFaces(
} }
const tensorOrCanvas = netInput.getInput(0) const tensorOrCanvas = netInput.getInput(0)
canvas = tensorOrCanvas instanceof HTMLCanvasElement canvas = tensorOrCanvas instanceof Canvas
? tensorOrCanvas ? tensorOrCanvas
: await imageTensorToCanvas(tensorOrCanvas) : await imageTensorToCanvas(tensorOrCanvas)
} }
......
...@@ -4,9 +4,9 @@ import { ConvParams, SeparableConvParams } from 'tfjs-tiny-yolov2'; ...@@ -4,9 +4,9 @@ import { ConvParams, SeparableConvParams } from 'tfjs-tiny-yolov2';
import { depthwiseSeparableConv } from './depthwiseSeparableConv'; import { depthwiseSeparableConv } from './depthwiseSeparableConv';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
import { FaceLandmark68NetBase } from './FaceLandmark68NetBase'; import { FaceLandmark68NetBase } from './FaceLandmark68NetBase';
import { fullyConnectedLayer } from './fullyConnectedLayer'; import { fullyConnectedLayer } from './fullyConnectedLayer';
import { loadQuantizedParams } from './loadQuantizedParams';
import { DenseBlock4Params, NetParams } from './types'; import { DenseBlock4Params, NetParams } from './types';
function denseBlock( function denseBlock(
...@@ -64,10 +64,13 @@ export class FaceLandmark68Net extends FaceLandmark68NetBase<NetParams> { ...@@ -64,10 +64,13 @@ export class FaceLandmark68Net extends FaceLandmark68NetBase<NetParams> {
}) })
} }
protected loadQuantizedParams(uri: string | undefined) { protected getDefaultModelName(): string {
return loadQuantizedParams(uri) return 'face_landmark_68_model'
} }
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMap(weightMap)
}
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
return extractParams(weights) return extractParams(weights)
......
...@@ -3,7 +3,7 @@ import { IDimensions, isEven, NetInput, NeuralNetwork, Point, TNetInput, toNetIn ...@@ -3,7 +3,7 @@ import { IDimensions, isEven, NetInput, NeuralNetwork, Point, TNetInput, toNetIn
import { FaceLandmarks68 } from '../classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
export class FaceLandmark68NetBase<NetParams> extends NeuralNetwork<NetParams> { export abstract class FaceLandmark68NetBase<NetParams> extends NeuralNetwork<NetParams> {
// TODO: make super.name protected // TODO: make super.name protected
private __name: string private __name: string
...@@ -13,9 +13,7 @@ export class FaceLandmark68NetBase<NetParams> extends NeuralNetwork<NetParams> { ...@@ -13,9 +13,7 @@ export class FaceLandmark68NetBase<NetParams> extends NeuralNetwork<NetParams> {
this.__name = _name this.__name = _name
} }
public runNet(_: NetInput): tf.Tensor2D { public abstract runNet(netInput: NetInput): tf.Tensor2D
throw new Error(`${this.__name} - runNet not implemented`)
}
public postProcess(output: tf.Tensor2D, inputSize: number, originalDimensions: IDimensions[]): tf.Tensor2D { public postProcess(output: tf.Tensor2D, inputSize: number, originalDimensions: IDimensions[]): tf.Tensor2D {
......
...@@ -6,7 +6,7 @@ import { depthwiseSeparableConv } from './depthwiseSeparableConv'; ...@@ -6,7 +6,7 @@ import { depthwiseSeparableConv } from './depthwiseSeparableConv';
import { extractParamsTiny } from './extractParamsTiny'; import { extractParamsTiny } from './extractParamsTiny';
import { FaceLandmark68NetBase } from './FaceLandmark68NetBase'; import { FaceLandmark68NetBase } from './FaceLandmark68NetBase';
import { fullyConnectedLayer } from './fullyConnectedLayer'; import { fullyConnectedLayer } from './fullyConnectedLayer';
import { loadQuantizedParamsTiny } from './loadQuantizedParamsTiny'; import { extractParamsFromWeigthMapTiny } from './extractParamsFromWeigthMapTiny';
import { DenseBlock3Params, TinyNetParams } from './types'; import { DenseBlock3Params, TinyNetParams } from './types';
function denseBlock( function denseBlock(
...@@ -60,8 +60,12 @@ export class FaceLandmark68TinyNet extends FaceLandmark68NetBase<TinyNetParams> ...@@ -60,8 +60,12 @@ export class FaceLandmark68TinyNet extends FaceLandmark68NetBase<TinyNetParams>
}) })
} }
protected loadQuantizedParams(uri: string | undefined) { protected getDefaultModelName(): string {
return loadQuantizedParamsTiny(uri) return 'face_landmark_68_tiny_model'
}
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMapTiny(weightMap)
} }
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
......
import { disposeUnusedWeightTensors, loadWeightMap, ParamMapping } from 'tfjs-image-recognition-base'; import * as tf from '@tensorflow/tfjs-core';
import { disposeUnusedWeightTensors, ParamMapping } from 'tfjs-image-recognition-base';
import { loadParamsFactory } from './loadParamsFactory'; import { loadParamsFactory } from './loadParamsFactory';
import { NetParams } from './types'; import { NetParams } from './types';
const DEFAULT_MODEL_NAME = 'face_landmark_68_model' export function extractParamsFromWeigthMap(
weightMap: tf.NamedTensorMap
): { params: NetParams, paramMappings: ParamMapping[] } {
export async function loadQuantizedParams(
uri: string | undefined
): Promise<{ params: NetParams, paramMappings: ParamMapping[] }> {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const paramMappings: ParamMapping[] = [] const paramMappings: ParamMapping[] = []
const { const {
......
import { disposeUnusedWeightTensors, loadWeightMap, ParamMapping } from 'tfjs-image-recognition-base'; import * as tf from '@tensorflow/tfjs-core';
import { disposeUnusedWeightTensors, ParamMapping } from 'tfjs-image-recognition-base';
import { loadParamsFactory } from './loadParamsFactory'; import { loadParamsFactory } from './loadParamsFactory';
import { TinyNetParams } from './types'; import { TinyNetParams } from './types';
const DEFAULT_MODEL_NAME = 'face_landmark_68_tiny_model' export function extractParamsFromWeigthMapTiny(
weightMap: tf.NamedTensorMap
): { params: TinyNetParams, paramMappings: ParamMapping[] } {
export async function loadQuantizedParamsTiny(
uri: string | undefined
): Promise<{ params: TinyNetParams, paramMappings: ParamMapping[] }> {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const paramMappings: ParamMapping[] = [] const paramMappings: ParamMapping[] = []
const { const {
......
...@@ -3,7 +3,7 @@ import { NetInput, NeuralNetwork, normalize, TNetInput, toNetInput } from 'tfjs- ...@@ -3,7 +3,7 @@ import { NetInput, NeuralNetwork, normalize, TNetInput, toNetInput } from 'tfjs-
import { convDown } from './convLayer'; import { convDown } from './convLayer';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams'; import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
import { residual, residualDown } from './residualLayer'; import { residual, residualDown } from './residualLayer';
import { NetParams } from './types'; import { NetParams } from './types';
...@@ -78,8 +78,12 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> { ...@@ -78,8 +78,12 @@ export class FaceRecognitionNet extends NeuralNetwork<NetParams> {
: faceDescriptorsForBatch[0] : faceDescriptorsForBatch[0]
} }
protected loadQuantizedParams(uri: string | undefined) { protected getDefaultModelName(): string {
return loadQuantizedParams(uri) return 'face_recognition_model'
}
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMap(weightMap)
} }
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
......
...@@ -9,8 +9,6 @@ import { ...@@ -9,8 +9,6 @@ import {
import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types'; import { ConvLayerParams, NetParams, ResidualLayerParams, ScaleLayerParams } from './types';
const DEFAULT_MODEL_NAME = 'face_recognition_model'
function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings) const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
...@@ -46,11 +44,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { ...@@ -46,11 +44,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
} }
export async function loadQuantizedParams( export function extractParamsFromWeigthMap(
uri: string | undefined weightMap: tf.NamedTensorMap
): Promise<{ params: NetParams, paramMappings: ParamMapping[] }> { ): { params: NetParams, paramMappings: ParamMapping[] } {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const paramMappings: ParamMapping[] = [] const paramMappings: ParamMapping[] = []
const { const {
......
import * as tf from '@tensorflow/tfjs-core';
import { TNetInput } from 'tfjs-image-recognition-base'; import { TNetInput } from 'tfjs-image-recognition-base';
import { FaceDetectionWithLandmarks } from '../classes/FaceDetectionWithLandmarks'; import { FaceDetectionWithLandmarks } from '../classes/FaceDetectionWithLandmarks';
import { FullFaceDescription } from '../classes/FullFaceDescription'; import { FullFaceDescription } from '../classes/FullFaceDescription';
import { extractFaces } from '../dom'; import { extractFaces, extractFaceTensors } from '../dom';
import { ComposableTask } from './ComposableTask'; import { ComposableTask } from './ComposableTask';
import { nets } from './nets'; import { nets } from './nets';
...@@ -20,15 +21,20 @@ export class ComputeAllFaceDescriptorsTask extends ComputeFaceDescriptorsTaskBas ...@@ -20,15 +21,20 @@ export class ComputeAllFaceDescriptorsTask extends ComputeFaceDescriptorsTaskBas
public async run(): Promise<FullFaceDescription[]> { public async run(): Promise<FullFaceDescription[]> {
const facesWithLandmarks = await this.detectFaceLandmarksTask const facesWithLandmarks = await this.detectFaceLandmarksTask
const alignedFaceCanvases = await extractFaces(
this.input,
facesWithLandmarks.map(({ landmarks }) => landmarks.align())
)
return await Promise.all(facesWithLandmarks.map(async ({ detection, landmarks }, i) => { const alignedRects = facesWithLandmarks.map(({ alignedRect }) => alignedRect)
const descriptor = await nets.faceRecognitionNet.computeFaceDescriptor(alignedFaceCanvases[i]) as Float32Array const alignedFaces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
? await extractFaceTensors(this.input, alignedRects)
: await extractFaces(this.input, alignedRects)
const fullFaceDescriptions = await Promise.all(facesWithLandmarks.map(async ({ detection, landmarks }, i) => {
const descriptor = await nets.faceRecognitionNet.computeFaceDescriptor(alignedFaces[i]) as Float32Array
return new FullFaceDescription(detection, landmarks, descriptor) return new FullFaceDescription(detection, landmarks, descriptor)
})) }))
alignedFaces.forEach(f => f instanceof tf.Tensor && f.dispose())
return fullFaceDescriptions
} }
} }
...@@ -42,8 +48,12 @@ export class ComputeSingleFaceDescriptorTask extends ComputeFaceDescriptorsTaskB ...@@ -42,8 +48,12 @@ export class ComputeSingleFaceDescriptorTask extends ComputeFaceDescriptorsTaskB
} }
const { detection, landmarks, alignedRect } = detectionWithLandmarks const { detection, landmarks, alignedRect } = detectionWithLandmarks
const alignedFaceCanvas = (await extractFaces(this.input, [alignedRect]))[0] const alignedFaces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
const descriptor = await nets.faceRecognitionNet.computeFaceDescriptor(alignedFaceCanvas) as Float32Array ? await extractFaceTensors(this.input, [alignedRect])
: await extractFaces(this.input, [alignedRect])
const descriptor = await nets.faceRecognitionNet.computeFaceDescriptor(alignedFaces[0]) as Float32Array
alignedFaces.forEach(f => f instanceof tf.Tensor && f.dispose())
return new FullFaceDescription(detection, landmarks, descriptor) return new FullFaceDescription(detection, landmarks, descriptor)
} }
......
import * as tf from '@tensorflow/tfjs-core';
import { TNetInput } from 'tfjs-image-recognition-base'; import { TNetInput } from 'tfjs-image-recognition-base';
import { FaceDetection } from '../classes/FaceDetection'; import { FaceDetection } from '../classes/FaceDetection';
import { FaceDetectionWithLandmarks } from '../classes/FaceDetectionWithLandmarks'; import { FaceDetectionWithLandmarks } from '../classes/FaceDetectionWithLandmarks';
import { FaceLandmarks68 } from '../classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../classes/FaceLandmarks68';
import { extractFaces } from '../dom'; import { extractFaces, extractFaceTensors } from '../dom';
import { FaceLandmark68Net } from '../faceLandmarkNet/FaceLandmark68Net'; import { FaceLandmark68Net } from '../faceLandmarkNet/FaceLandmark68Net';
import { FaceLandmark68TinyNet } from '../faceLandmarkNet/FaceLandmark68TinyNet'; import { FaceLandmark68TinyNet } from '../faceLandmarkNet/FaceLandmark68TinyNet';
import { ComposableTask } from './ComposableTask'; import { ComposableTask } from './ComposableTask';
...@@ -31,12 +32,17 @@ export class DetectAllFaceLandmarksTask extends DetectFaceLandmarksTaskBase<Face ...@@ -31,12 +32,17 @@ export class DetectAllFaceLandmarksTask extends DetectFaceLandmarksTaskBase<Face
public async run(): Promise<FaceDetectionWithLandmarks[]> { public async run(): Promise<FaceDetectionWithLandmarks[]> {
const detections = await this.detectFacesTask const detections = await this.detectFacesTask
const faceCanvases = await extractFaces(this.input, detections)
const faceLandmarksByFace = await Promise.all(faceCanvases.map( const faces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
canvas => this.landmarkNet.detectLandmarks(canvas) ? await extractFaceTensors(this.input, detections)
: await extractFaces(this.input, detections)
const faceLandmarksByFace = await Promise.all(faces.map(
face => this.landmarkNet.detectLandmarks(face)
)) as FaceLandmarks68[] )) as FaceLandmarks68[]
faces.forEach(f => f instanceof tf.Tensor && f.dispose())
return detections.map((detection, i) => return detections.map((detection, i) =>
new FaceDetectionWithLandmarks(detection, faceLandmarksByFace[i]) new FaceDetectionWithLandmarks(detection, faceLandmarksByFace[i])
) )
...@@ -56,10 +62,18 @@ export class DetectSingleFaceLandmarksTask extends DetectFaceLandmarksTaskBase<F ...@@ -56,10 +62,18 @@ export class DetectSingleFaceLandmarksTask extends DetectFaceLandmarksTaskBase<F
return return
} }
const faceCanvas = (await extractFaces(this.input, [detection]))[0] const faces: Array<HTMLCanvasElement | tf.Tensor3D> = this.input instanceof tf.Tensor
? await extractFaceTensors(this.input, [detection])
: await extractFaces(this.input, [detection])
const landmarks = await this.landmarkNet.detectLandmarks(faces[0]) as FaceLandmarks68
faces.forEach(f => f instanceof tf.Tensor && f.dispose())
return new FaceDetectionWithLandmarks( return new FaceDetectionWithLandmarks(
detection, detection,
await this.landmarkNet.detectLandmarks(faceCanvas) as FaceLandmarks68 landmarks
) )
} }
......
...@@ -7,8 +7,8 @@ import { FaceLandmarks5 } from '../classes/FaceLandmarks5'; ...@@ -7,8 +7,8 @@ import { FaceLandmarks5 } from '../classes/FaceLandmarks5';
import { bgrToRgbTensor } from './bgrToRgbTensor'; import { bgrToRgbTensor } from './bgrToRgbTensor';
import { CELL_SIZE } from './config'; import { CELL_SIZE } from './config';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
import { getSizesForScale } from './getSizesForScale'; import { getSizesForScale } from './getSizesForScale';
import { loadQuantizedParams } from './loadQuantizedParams';
import { IMtcnnOptions, MtcnnOptions } from './MtcnnOptions'; import { IMtcnnOptions, MtcnnOptions } from './MtcnnOptions';
import { pyramidDown } from './pyramidDown'; import { pyramidDown } from './pyramidDown';
import { stage1 } from './stage1'; import { stage1 } from './stage1';
...@@ -146,9 +146,12 @@ export class Mtcnn extends NeuralNetwork<NetParams> { ...@@ -146,9 +146,12 @@ export class Mtcnn extends NeuralNetwork<NetParams> {
) )
} }
// none of the param tensors are quantized yet protected getDefaultModelName(): string {
protected loadQuantizedParams(uri: string | undefined) { return 'mtcnn_model'
return loadQuantizedParams(uri) }
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMap(weightMap)
} }
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { Box, createCanvas, getContext2dOrThrow, IDimensions } from 'tfjs-image-recognition-base'; import {
Box,
createCanvas,
createCanvasFromMedia,
env,
getContext2dOrThrow,
IDimensions,
} from 'tfjs-image-recognition-base';
import { normalize } from './normalize'; import { normalize } from './normalize';
...@@ -20,7 +27,7 @@ export async function extractImagePatches( ...@@ -20,7 +27,7 @@ export async function extractImagePatches(
const fromY = y - 1 const fromY = y - 1
const imgData = imgCtx.getImageData(fromX, fromY, (ex - fromX), (ey - fromY)) const imgData = imgCtx.getImageData(fromX, fromY, (ex - fromX), (ey - fromY))
return createImageBitmap(imgData) return env.isNodejs() ? createCanvasFromMedia(imgData) : createImageBitmap(imgData)
})) }))
const imagePatchesDatas: number[][] = [] const imagePatchesDatas: number[][] = []
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { import { disposeUnusedWeightTensors, extractWeightEntryFactory, ParamMapping } from 'tfjs-image-recognition-base';
disposeUnusedWeightTensors,
extractWeightEntryFactory,
loadWeightMap,
ParamMapping,
} from 'tfjs-image-recognition-base';
import { ConvParams, FCParams } from 'tfjs-tiny-yolov2'; import { ConvParams, FCParams } from 'tfjs-tiny-yolov2';
import { NetParams, ONetParams, PNetParams, RNetParams, SharedParams } from './types'; import { NetParams, ONetParams, PNetParams, RNetParams, SharedParams } from './types';
const DEFAULT_MODEL_NAME = 'mtcnn_model'
function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings) const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
...@@ -87,11 +80,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { ...@@ -87,11 +80,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
} }
export async function loadQuantizedParams( export function extractParamsFromWeigthMap(
uri: string | undefined weightMap: tf.NamedTensorMap
): Promise<{ params: NetParams, paramMappings: ParamMapping[] }> { ): { params: NetParams, paramMappings: ParamMapping[] } {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const paramMappings: ParamMapping[] = [] const paramMappings: ParamMapping[] = []
const { const {
......
...@@ -3,7 +3,7 @@ import { NetInput, NeuralNetwork, Rect, TNetInput, toNetInput } from 'tfjs-image ...@@ -3,7 +3,7 @@ import { NetInput, NeuralNetwork, Rect, TNetInput, toNetInput } from 'tfjs-image
import { FaceDetection } from '../classes/FaceDetection'; import { FaceDetection } from '../classes/FaceDetection';
import { extractParams } from './extractParams'; import { extractParams } from './extractParams';
import { loadQuantizedParams } from './loadQuantizedParams'; import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
import { mobileNetV1 } from './mobileNetV1'; import { mobileNetV1 } from './mobileNetV1';
import { nonMaxSuppression } from './nonMaxSuppression'; import { nonMaxSuppression } from './nonMaxSuppression';
import { outputLayer } from './outputLayer'; import { outputLayer } from './outputLayer';
...@@ -116,8 +116,12 @@ export class SsdMobilenetv1 extends NeuralNetwork<NetParams> { ...@@ -116,8 +116,12 @@ export class SsdMobilenetv1 extends NeuralNetwork<NetParams> {
return results return results
} }
protected loadQuantizedParams(uri: string | undefined) { protected getDefaultModelName(): string {
return loadQuantizedParams(uri) return 'ssd_mobilenetv1_model'
}
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMap(weightMap)
} }
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
......
...@@ -3,15 +3,12 @@ import { ...@@ -3,15 +3,12 @@ import {
disposeUnusedWeightTensors, disposeUnusedWeightTensors,
extractWeightEntryFactory, extractWeightEntryFactory,
isTensor3D, isTensor3D,
loadWeightMap,
ParamMapping, ParamMapping,
} from 'tfjs-image-recognition-base'; } from 'tfjs-image-recognition-base';
import { ConvParams } from 'tfjs-tiny-yolov2'; import { ConvParams } from 'tfjs-tiny-yolov2';
import { BoxPredictionParams, MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types'; import { BoxPredictionParams, MobileNetV1, NetParams, PointwiseConvParams, PredictionLayerParams } from './types';
const DEFAULT_MODEL_NAME = 'ssd_mobilenetv1_model'
function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings) const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings)
...@@ -114,11 +111,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) { ...@@ -114,11 +111,10 @@ function extractorsFactory(weightMap: any, paramMappings: ParamMapping[]) {
} }
} }
export async function loadQuantizedParams( export function extractParamsFromWeigthMap(
uri: string | undefined weightMap: tf.NamedTensorMap
): Promise<{ params: NetParams, paramMappings: ParamMapping[] }> { ): { params: NetParams, paramMappings: ParamMapping[] } {
const weightMap = await loadWeightMap(uri, DEFAULT_MODEL_NAME)
const paramMappings: ParamMapping[] = [] const paramMappings: ParamMapping[] = []
const { const {
......
import { Point, TNetInput } from 'tfjs-image-recognition-base'; import * as tf from '@tensorflow/tfjs-core';
import { TinyYolov2 as TinyYolov2Base, ITinyYolov2Options } from 'tfjs-tiny-yolov2'; import { ParamMapping, Point, TNetInput } from 'tfjs-image-recognition-base';
import { ITinyYolov2Options, TinyYolov2 as TinyYolov2Base } from 'tfjs-tiny-yolov2';
import { TinyYolov2NetParams } from 'tfjs-tiny-yolov2/build/commonjs/tinyYolov2/types';
import { FaceDetection } from '../classes'; import { FaceDetection } from '../classes';
import { BOX_ANCHORS, DEFAULT_MODEL_NAME, IOU_THRESHOLD, MEAN_RGB } from './const'; import { BOX_ANCHORS, IOU_THRESHOLD, MEAN_RGB } from './const';
export class TinyFaceDetector extends TinyYolov2Base { export class TinyFaceDetector extends TinyYolov2Base {
...@@ -29,8 +31,11 @@ export class TinyFaceDetector extends TinyYolov2Base { ...@@ -29,8 +31,11 @@ export class TinyFaceDetector extends TinyYolov2Base {
return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight })) return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight }))
} }
protected loadQuantizedParams(modelUri: string | undefined) { protected getDefaultModelName(): string {
const defaultModelName = DEFAULT_MODEL_NAME return 'tiny_face_detector_model'
return super.loadQuantizedParams(modelUri, defaultModelName) as any }
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): { params: TinyYolov2NetParams, paramMappings: ParamMapping[] } {
return super.extractParamsFromWeigthMap(weightMap)
} }
} }
\ No newline at end of file
...@@ -11,5 +11,3 @@ export const BOX_ANCHORS = [ ...@@ -11,5 +11,3 @@ export const BOX_ANCHORS = [
] ]
export const MEAN_RGB: [number, number, number] = [117.001, 114.697, 97.404] export const MEAN_RGB: [number, number, number] = [117.001, 114.697, 97.404]
export const DEFAULT_MODEL_NAME = 'tiny_face_detector_model'
\ No newline at end of file
import { Point, TNetInput } from 'tfjs-image-recognition-base'; import * as tf from '@tensorflow/tfjs-core';
import { ITinyYolov2Options, TinyYolov2 as TinyYolov2Base } from 'tfjs-tiny-yolov2'; import { ParamMapping, Point, TNetInput } from 'tfjs-image-recognition-base';
import { ITinyYolov2Options, TinyYolov2 as TinyYolov2Base, TinyYolov2NetParams } from 'tfjs-tiny-yolov2';
import { FaceDetection } from '../classes'; import { FaceDetection } from '../classes';
import { import {
...@@ -46,8 +47,11 @@ export class TinyYolov2 extends TinyYolov2Base { ...@@ -46,8 +47,11 @@ export class TinyYolov2 extends TinyYolov2Base {
return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight })) return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight }))
} }
protected loadQuantizedParams(modelUri: string | undefined) { protected getDefaultModelName(): string {
const defaultModelName = this.withSeparableConvs ? DEFAULT_MODEL_NAME_SEPARABLE_CONV : DEFAULT_MODEL_NAME return this.withSeparableConvs ? DEFAULT_MODEL_NAME_SEPARABLE_CONV : DEFAULT_MODEL_NAME
return super.loadQuantizedParams(modelUri, defaultModelName) as any }
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): { params: TinyYolov2NetParams, paramMappings: ParamMapping[] } {
return super.extractParamsFromWeigthMap(weightMap)
} }
} }
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { fetchNetWeights, NeuralNetwork } from 'tfjs-image-recognition-base';
import { env, fetchImage, fetchJson } from '../src';
export let fs: any, path: any, canvas: any
jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000
if (env.isNodejs()) {
require('@tensorflow/tfjs-node')
fs = require('fs')
path = require('path')
canvas = require('canvas')
const { Canvas, Image, ImageData } = canvas
env.monkeyPatch({ Canvas, Image, ImageData })
} else {
if ((window['__karma__'].config.jasmine.args as string[]).some(arg => arg === 'backend_cpu')) {
tf.setBackend('cpu')
}
}
export async function initNet<TNet extends NeuralNetwork<any>>(
net: TNet,
uncompressedFilename: string | boolean,
isUnusedModel: boolean = false
) {
if (env.isNodejs()) {
await net.loadFromDisk(path.resolve(__dirname, '../weights'))
} else {
const url = uncompressedFilename
? await fetchNetWeights(`base/weights_uncompressed/${uncompressedFilename}`)
: (isUnusedModel ? 'base/weights_unused' : 'base/weights')
await net.load(url)
}
}
export async function loadImage(uri: string): Promise<HTMLImageElement> {
if (env.isNodejs()) {
return canvas.loadImage(path.resolve(__dirname, '../', uri))
}
return fetchImage(`base${uri.startsWith('/') ? '' : '/'}${uri}`)
}
export async function loadJson<T>(uri: string): Promise<T> {
if (env.isNodejs()) {
return JSON.parse(fs.readFileSync(path.resolve(__dirname, '../', uri)).toString())
}
return fetchJson<T>(`base${uri.startsWith('/') ? '' : '/'}${uri}`)
}
import { bufferToImage, extractFaceTensors, Rect, tf } from '../../../src'; import { createCanvasFromMedia, extractFaceTensors, Rect, tf } from '../../../src';
import { loadImage } from '../../env';
describe('extractFaceTensors', () => { describe('extractFaceTensors', () => {
let imgTensor: tf.Tensor3D let imgTensor: tf.Tensor3D
beforeAll(async () => { beforeAll(async () => {
const img = await (await fetch('base/test/images/face1.png')).blob() imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/face1.png')))
imgTensor = tf.fromPixels(await bufferToImage(img))
}) })
describe('extracts tensors', () => { describe('extracts tensors', () => {
......
import { bufferToImage, createCanvasFromMedia, extractFaces, Rect } from '../../../src'; import { createCanvasFromMedia, env, extractFaces, Rect } from '../../../src';
import { loadImage } from '../../env';
describe('extractFaces', () => { describe('extractFaces', () => {
let imgEl: HTMLImageElement, canvasEl: HTMLCanvasElement let imgEl: HTMLImageElement, canvasEl: HTMLCanvasElement, Canvas: typeof HTMLCanvasElement
beforeAll(async () => { beforeAll(async () => {
const img = await (await fetch('base/test/images/face1.png')).blob() imgEl = await loadImage('test/images/face1.png')
imgEl = await bufferToImage(img)
canvasEl = createCanvasFromMedia(imgEl) canvasEl = createCanvasFromMedia(imgEl)
Canvas = env.getEnv().Canvas
}) })
describe('extracts canvases', () => { describe('extracts canvases', () => {
...@@ -17,7 +18,7 @@ describe('extractFaces', () => { ...@@ -17,7 +18,7 @@ describe('extractFaces', () => {
const canvases = await extractFaces(imgEl, [rect]) const canvases = await extractFaces(imgEl, [rect])
expect(canvases.length).toEqual(1) expect(canvases.length).toEqual(1)
expect(canvases[0] instanceof HTMLCanvasElement).toBe(true) expect(canvases[0] instanceof Canvas).toBe(true)
expect(canvases[0].width).toEqual(50) expect(canvases[0].width).toEqual(50)
expect(canvases[0].height).toEqual(60) expect(canvases[0].height).toEqual(60)
}) })
...@@ -30,10 +31,10 @@ describe('extractFaces', () => { ...@@ -30,10 +31,10 @@ describe('extractFaces', () => {
const canvases = await extractFaces(imgEl, rects) const canvases = await extractFaces(imgEl, rects)
expect(canvases.length).toEqual(2) expect(canvases.length).toEqual(2)
expect(canvases[0] instanceof HTMLCanvasElement).toBe(true) expect(canvases[0] instanceof Canvas).toBe(true)
expect(canvases[0].width).toEqual(50) expect(canvases[0].width).toEqual(50)
expect(canvases[0].height).toEqual(60) expect(canvases[0].height).toEqual(60)
expect(canvases[1] instanceof HTMLCanvasElement).toBe(true) expect(canvases[1] instanceof Canvas).toBe(true)
expect(canvases[1].width).toEqual(70) expect(canvases[1].width).toEqual(70)
expect(canvases[1].height).toEqual(80) expect(canvases[1].height).toEqual(80)
}) })
...@@ -43,7 +44,7 @@ describe('extractFaces', () => { ...@@ -43,7 +44,7 @@ describe('extractFaces', () => {
const canvases = await extractFaces(canvasEl, [rect]) const canvases = await extractFaces(canvasEl, [rect])
expect(canvases.length).toEqual(1) expect(canvases.length).toEqual(1)
expect(canvases[0] instanceof HTMLCanvasElement).toBe(true) expect(canvases[0] instanceof Canvas).toBe(true)
expect(canvases[0].width).toEqual(50) expect(canvases[0].width).toEqual(50)
expect(canvases[0].height).toEqual(60) expect(canvases[0].height).toEqual(60)
}) })
...@@ -56,10 +57,10 @@ describe('extractFaces', () => { ...@@ -56,10 +57,10 @@ describe('extractFaces', () => {
const canvases = await extractFaces(canvasEl, rects) const canvases = await extractFaces(canvasEl, rects)
expect(canvases.length).toEqual(2) expect(canvases.length).toEqual(2)
expect(canvases[0] instanceof HTMLCanvasElement).toBe(true) expect(canvases[0] instanceof Canvas).toBe(true)
expect(canvases[0].width).toEqual(50) expect(canvases[0].width).toEqual(50)
expect(canvases[0].height).toEqual(60) expect(canvases[0].height).toEqual(60)
expect(canvases[1] instanceof HTMLCanvasElement).toBe(true) expect(canvases[1] instanceof Canvas).toBe(true)
expect(canvases[1].width).toEqual(70) expect(canvases[1].width).toEqual(70)
expect(canvases[1].height).toEqual(80) expect(canvases[1].height).toEqual(80)
}) })
......
...@@ -2,11 +2,29 @@ import * as tf from '@tensorflow/tfjs-core'; ...@@ -2,11 +2,29 @@ import * as tf from '@tensorflow/tfjs-core';
import { FaceLandmark68NetBase } from '../../../src/faceLandmarkNet/FaceLandmark68NetBase'; import { FaceLandmark68NetBase } from '../../../src/faceLandmarkNet/FaceLandmark68NetBase';
class FakeFaceLandmark68NetBase extends FaceLandmark68NetBase<any> {
protected getDefaultModelName(): string {
throw new Error('FakeFaceLandmark68NetBase - getDefaultModelName not implemented')
}
protected extractParams(_: any): any {
throw new Error('FakeFaceLandmark68NetBase - extractParams not implemented')
}
protected extractParamsFromWeigthMap(_: any): any {
throw new Error('FakeFaceLandmark68NetBase - extractParamsFromWeigthMap not implemented')
}
public runNet(): any {
throw new Error('FakeFaceLandmark68NetBase - extractParamsFromWeigthMap not implemented')
}
}
describe('FaceLandmark68NetBase', () => { describe('FaceLandmark68NetBase', () => {
describe('postProcess', () => { describe('postProcess', () => {
const net = new FaceLandmark68NetBase('') const net = new FakeFaceLandmark68NetBase('')
describe('single batch', () => { describe('single batch', () => {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { fetchImage, fetchJson, IDimensions, isTensor3D, NetInput, Point, TMediaElement, toNetInput } from '../../../src'; import { createCanvasFromMedia, IDimensions, isTensor3D, NetInput, Point, TMediaElement, toNetInput } from '../../../src';
import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68';
import { createFaceLandmarkNet } from '../../../src/faceLandmarkNet';
import { FaceLandmark68Net } from '../../../src/faceLandmarkNet/FaceLandmark68Net'; import { FaceLandmark68Net } from '../../../src/faceLandmarkNet/FaceLandmark68Net';
import { loadImage, loadJson } from '../../env';
import { describeWithNets, expectAllTensorsReleased, expectMaxDelta, expectPointClose } from '../../utils'; import { describeWithNets, expectAllTensorsReleased, expectMaxDelta, expectPointClose } from '../../utils';
function getInputDims (input: tf.Tensor | TMediaElement): IDimensions { function getInputDims (input: tf.Tensor | TMediaElement): IDimensions {
...@@ -24,12 +24,12 @@ describe('faceLandmark68Net', () => { ...@@ -24,12 +24,12 @@ describe('faceLandmark68Net', () => {
let faceLandmarkPositionsRect: Point[] let faceLandmarkPositionsRect: Point[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = await loadImage('test/images/face1.png')
imgEl2 = await fetchImage('base/test/images/face2.png') imgEl2 = await loadImage('test/images/face2.png')
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = await loadImage('test/images/face_rectangular.png')
faceLandmarkPositions1 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions1.json') faceLandmarkPositions1 = await loadJson<Point[]>('test/data/faceLandmarkPositions1.json')
faceLandmarkPositions2 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions2.json') faceLandmarkPositions2 = await loadJson<Point[]>('test/data/faceLandmarkPositions2.json')
faceLandmarkPositionsRect = await fetchJson<Point[]>('base/test/data/faceLandmarkPositionsRect.json') faceLandmarkPositionsRect = await loadJson<Point[]>('test/data/faceLandmarkPositionsRect.json')
}) })
describeWithNets('quantized weights', { withFaceLandmark68Net: { quantized: true } }, ({ faceLandmark68Net }) => { describeWithNets('quantized weights', { withFaceLandmark68Net: { quantized: true } }, ({ faceLandmark68Net }) => {
...@@ -85,14 +85,14 @@ describe('faceLandmark68Net', () => { ...@@ -85,14 +85,14 @@ describe('faceLandmark68Net', () => {
expect(result.shift.x).toEqual(0) expect(result.shift.x).toEqual(0)
expect(result.shift.y).toEqual(0) expect(result.shift.y).toEqual(0)
result.positions.forEach(({ x, y }, i) => { result.positions.forEach(({ x, y }, i) => {
expectMaxDelta(x, faceLandmarkPositions[batchIdx][i].x, 2) expectMaxDelta(x, faceLandmarkPositions[batchIdx][i].x, 5)
expectMaxDelta(y, faceLandmarkPositions[batchIdx][i].y, 2) expectMaxDelta(y, faceLandmarkPositions[batchIdx][i].y, 5)
}) })
}) })
}) })
it('computes face landmarks for batch of tf.Tensor3D', async () => { it('computes face landmarks for batch of tf.Tensor3D', async () => {
const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(el)) const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(createCanvasFromMedia(el)))
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -117,7 +117,7 @@ describe('faceLandmark68Net', () => { ...@@ -117,7 +117,7 @@ describe('faceLandmark68Net', () => {
}) })
it('computes face landmarks for batch of mixed inputs', async () => { it('computes face landmarks for batch of mixed inputs', async () => {
const inputs = [imgEl1, tf.fromPixels(imgEl2), tf.fromPixels(imgElRect)] const inputs = [imgEl1, tf.fromPixels(createCanvasFromMedia(imgEl2)), tf.fromPixels(createCanvasFromMedia(imgElRect))]
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -145,18 +145,6 @@ describe('faceLandmark68Net', () => { ...@@ -145,18 +145,6 @@ describe('faceLandmark68Net', () => {
describeWithNets('no memory leaks', { withFaceLandmark68Net: { quantized: true } }, ({ faceLandmark68Net }) => { describeWithNets('no memory leaks', { withFaceLandmark68Net: { quantized: true } }, ({ faceLandmark68Net }) => {
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new FaceLandmark68Net()
await net.load('base/weights')
net.dispose()
})
})
})
describe('forwardInput', () => { describe('forwardInput', () => {
it('single image element', async () => { it('single image element', async () => {
...@@ -176,7 +164,7 @@ describe('faceLandmark68Net', () => { ...@@ -176,7 +164,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput([tensor]) const netInput = new NetInput([tensor])
...@@ -188,7 +176,7 @@ describe('faceLandmark68Net', () => { ...@@ -188,7 +176,7 @@ describe('faceLandmark68Net', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput(tensors) const netInput = new NetInput(tensors)
...@@ -200,7 +188,7 @@ describe('faceLandmark68Net', () => { ...@@ -200,7 +188,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensor)) const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensor))
...@@ -212,7 +200,7 @@ describe('faceLandmark68Net', () => { ...@@ -212,7 +200,7 @@ describe('faceLandmark68Net', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensors)) const outTensor = await faceLandmark68Net.forwardInput(await toNetInput(tensors))
...@@ -239,7 +227,7 @@ describe('faceLandmark68Net', () => { ...@@ -239,7 +227,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensor) await faceLandmark68Net.detectLandmarks(tensor)
...@@ -249,7 +237,7 @@ describe('faceLandmark68Net', () => { ...@@ -249,7 +237,7 @@ describe('faceLandmark68Net', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -260,7 +248,7 @@ describe('faceLandmark68Net', () => { ...@@ -260,7 +248,7 @@ describe('faceLandmark68Net', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensor) await faceLandmark68Net.detectLandmarks(tensor)
...@@ -271,7 +259,7 @@ describe('faceLandmark68Net', () => { ...@@ -271,7 +259,7 @@ describe('faceLandmark68Net', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68Net.detectLandmarks(tensors) await faceLandmark68Net.detectLandmarks(tensors)
......
import { fetchImage, fetchJson, Point } from '../../../src'; import { Point } from '../../../src';
import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68';
import { createFaceLandmarkNet } from '../../../src/faceLandmarkNet'; import { loadImage, loadJson } from '../../env';
import { describeWithNets, expectAllTensorsReleased, expectPointClose } from '../../utils'; import { describeWithNets, expectPointClose } from '../../utils';
describe('faceLandmark68Net, uncompressed', () => { describe('faceLandmark68Net, uncompressed', () => {
...@@ -11,10 +11,10 @@ describe('faceLandmark68Net, uncompressed', () => { ...@@ -11,10 +11,10 @@ describe('faceLandmark68Net, uncompressed', () => {
let faceLandmarkPositionsRect: Point[] let faceLandmarkPositionsRect: Point[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = await loadImage('test/images/face1.png')
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = await loadImage('test/images/face_rectangular.png')
faceLandmarkPositions1 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions1.json') faceLandmarkPositions1 = await loadJson<Point[]>('test/data/faceLandmarkPositions1.json')
faceLandmarkPositionsRect = await fetchJson<Point[]>('base/test/data/faceLandmarkPositionsRect.json') faceLandmarkPositionsRect = await loadJson<Point[]>('test/data/faceLandmarkPositionsRect.json')
}) })
describeWithNets('uncompressed weights', { withFaceLandmark68Net: { quantized: false } }, ({ faceLandmark68Net }) => { describeWithNets('uncompressed weights', { withFaceLandmark68Net: { quantized: false } }, ({ faceLandmark68Net }) => {
...@@ -43,16 +43,7 @@ describe('faceLandmark68Net, uncompressed', () => { ...@@ -43,16 +43,7 @@ describe('faceLandmark68Net, uncompressed', () => {
expect(result.shift.y).toEqual(0) expect(result.shift.y).toEqual(0)
result.positions.forEach((pt, i) => { result.positions.forEach((pt, i) => {
const { x, y } = faceLandmarkPositionsRect[i] const { x, y } = faceLandmarkPositionsRect[i]
expectPointClose(pt, { x, y }, 2) expectPointClose(pt, { x, y }, 5)
})
})
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/face_landmark_68_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = createFaceLandmarkNet(weights)
net.dispose()
}) })
}) })
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { fetchImage, fetchJson, IDimensions, isTensor3D, NetInput, Point, TMediaElement, toNetInput } from '../../../src'; import { createCanvasFromMedia, IDimensions, isTensor3D, NetInput, Point, TMediaElement, toNetInput } from '../../../src';
import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68';
import { createFaceLandmarkNet } from '../../../src/faceLandmarkNet'; import { loadImage, loadJson } from '../../env';
import { FaceLandmark68TinyNet } from '../../../src/faceLandmarkNet/FaceLandmark68TinyNet';
import { describeWithNets, expectAllTensorsReleased, expectPointClose } from '../../utils'; import { describeWithNets, expectAllTensorsReleased, expectPointClose } from '../../utils';
function getInputDims (input: tf.Tensor | TMediaElement): IDimensions { function getInputDims (input: tf.Tensor | TMediaElement): IDimensions {
...@@ -24,12 +23,12 @@ describe('faceLandmark68TinyNet', () => { ...@@ -24,12 +23,12 @@ describe('faceLandmark68TinyNet', () => {
let faceLandmarkPositionsRect: Point[] let faceLandmarkPositionsRect: Point[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = await loadImage('test/images/face1.png')
imgEl2 = await fetchImage('base/test/images/face2.png') imgEl2 = await loadImage('test/images/face2.png')
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = await loadImage('test/images/face_rectangular.png')
faceLandmarkPositions1 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions1Tiny.json') faceLandmarkPositions1 = await loadJson<Point[]>('test/data/faceLandmarkPositions1Tiny.json')
faceLandmarkPositions2 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions2Tiny.json') faceLandmarkPositions2 = await loadJson<Point[]>('test/data/faceLandmarkPositions2Tiny.json')
faceLandmarkPositionsRect = await fetchJson<Point[]>('base/test/data/faceLandmarkPositionsRectTiny.json') faceLandmarkPositionsRect = await loadJson<Point[]>('test/data/faceLandmarkPositionsRectTiny.json')
}) })
describeWithNets('quantized weights', { withFaceLandmark68TinyNet: { quantized: true } }, ({ faceLandmark68TinyNet }) => { describeWithNets('quantized weights', { withFaceLandmark68TinyNet: { quantized: true } }, ({ faceLandmark68TinyNet }) => {
...@@ -92,7 +91,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -92,7 +91,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('computes face landmarks for batch of tf.Tensor3D', async () => { it('computes face landmarks for batch of tf.Tensor3D', async () => {
const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(el)) const inputs = [imgEl1, imgEl2, imgElRect].map(el => tf.fromPixels(createCanvasFromMedia(el)))
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -117,7 +116,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -117,7 +116,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('computes face landmarks for batch of mixed inputs', async () => { it('computes face landmarks for batch of mixed inputs', async () => {
const inputs = [imgEl1, tf.fromPixels(imgEl2), tf.fromPixels(imgElRect)] const inputs = [imgEl1, tf.fromPixels(createCanvasFromMedia(imgEl2)), tf.fromPixels(createCanvasFromMedia(imgElRect))]
const faceLandmarkPositions = [ const faceLandmarkPositions = [
faceLandmarkPositions1, faceLandmarkPositions1,
...@@ -145,17 +144,6 @@ describe('faceLandmark68TinyNet', () => { ...@@ -145,17 +144,6 @@ describe('faceLandmark68TinyNet', () => {
describeWithNets('no memory leaks', { withFaceLandmark68TinyNet: { quantized: true } }, ({ faceLandmark68TinyNet }) => { describeWithNets('no memory leaks', { withFaceLandmark68TinyNet: { quantized: true } }, ({ faceLandmark68TinyNet }) => {
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new FaceLandmark68TinyNet()
await net.load('base/weights')
net.dispose()
})
})
})
describe('forwardInput', () => { describe('forwardInput', () => {
...@@ -176,7 +164,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -176,7 +164,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput([tensor]) const netInput = new NetInput([tensor])
...@@ -188,7 +176,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -188,7 +176,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const netInput = new NetInput(tensors) const netInput = new NetInput(tensors)
...@@ -200,7 +188,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -200,7 +188,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensor)) const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensor))
...@@ -212,7 +200,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -212,7 +200,7 @@ describe('faceLandmark68TinyNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensors)) const outTensor = await faceLandmark68TinyNet.forwardInput(await toNetInput(tensors))
...@@ -239,7 +227,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -239,7 +227,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single tf.Tensor3D', async () => { it('single tf.Tensor3D', async () => {
const tensor = tf.fromPixels(imgEl1) const tensor = tf.fromPixels(createCanvasFromMedia(imgEl1))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensor) await faceLandmark68TinyNet.detectLandmarks(tensor)
...@@ -249,7 +237,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -249,7 +237,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('multiple tf.Tensor3Ds', async () => { it('multiple tf.Tensor3Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(el)) const tensors = [imgEl1, imgEl1, imgEl1].map(el => tf.fromPixels(createCanvasFromMedia(el)))
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
...@@ -260,7 +248,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -260,7 +248,7 @@ describe('faceLandmark68TinyNet', () => {
}) })
it('single batch size 1 tf.Tensor4Ds', async () => { it('single batch size 1 tf.Tensor4Ds', async () => {
const tensor = tf.tidy(() => tf.fromPixels(imgEl1).expandDims()) as tf.Tensor4D const tensor = tf.tidy(() => tf.fromPixels(createCanvasFromMedia(imgEl1)).expandDims()) as tf.Tensor4D
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensor) await faceLandmark68TinyNet.detectLandmarks(tensor)
...@@ -271,7 +259,7 @@ describe('faceLandmark68TinyNet', () => { ...@@ -271,7 +259,7 @@ describe('faceLandmark68TinyNet', () => {
it('multiple batch size 1 tf.Tensor4Ds', async () => { it('multiple batch size 1 tf.Tensor4Ds', async () => {
const tensors = [imgEl1, imgEl1, imgEl1] const tensors = [imgEl1, imgEl1, imgEl1]
.map(el => tf.tidy(() => tf.fromPixels(el).expandDims())) as tf.Tensor4D[] .map(el => tf.tidy(() => tf.fromPixels(createCanvasFromMedia(el)).expandDims())) as tf.Tensor4D[]
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
await faceLandmark68TinyNet.detectLandmarks(tensors) await faceLandmark68TinyNet.detectLandmarks(tensors)
......
import { fetchImage, fetchJson, Point } from '../../../src'; import { Point } from '../../../src';
import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68'; import { FaceLandmarks68 } from '../../../src/classes/FaceLandmarks68';
import { createFaceLandmarkNet } from '../../../src/faceLandmarkNet'; import { loadImage, loadJson } from '../../env';
import { describeWithNets, expectAllTensorsReleased, expectPointClose } from '../../utils'; import { describeWithNets, expectPointClose } from '../../utils';
describe('faceLandmark68TinyNet, uncompressed', () => { describe('faceLandmark68TinyNet, uncompressed', () => {
...@@ -11,10 +11,10 @@ describe('faceLandmark68TinyNet, uncompressed', () => { ...@@ -11,10 +11,10 @@ describe('faceLandmark68TinyNet, uncompressed', () => {
let faceLandmarkPositionsRect: Point[] let faceLandmarkPositionsRect: Point[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = await loadImage('test/images/face1.png')
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = await loadImage('test/images/face_rectangular.png')
faceLandmarkPositions1 = await fetchJson<Point[]>('base/test/data/faceLandmarkPositions1Tiny.json') faceLandmarkPositions1 = await loadJson<Point[]>('test/data/faceLandmarkPositions1Tiny.json')
faceLandmarkPositionsRect = await fetchJson<Point[]>('base/test/data/faceLandmarkPositionsRectTiny.json') faceLandmarkPositionsRect = await loadJson<Point[]>('test/data/faceLandmarkPositionsRectTiny.json')
}) })
describeWithNets('uncompressed weights', { withFaceLandmark68TinyNet: { quantized: false } }, ({ faceLandmark68TinyNet }) => { describeWithNets('uncompressed weights', { withFaceLandmark68TinyNet: { quantized: false } }, ({ faceLandmark68TinyNet }) => {
...@@ -47,15 +47,6 @@ describe('faceLandmark68TinyNet, uncompressed', () => { ...@@ -47,15 +47,6 @@ describe('faceLandmark68TinyNet, uncompressed', () => {
}) })
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/face_landmark_68_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = createFaceLandmarkNet(weights)
net.dispose()
})
})
}) })
}) })
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { FaceRecognitionNet, fetchImage, fetchJson, NetInput, toNetInput } from '../../../src'; import { createCanvasFromMedia, NetInput, toNetInput } from '../../../src';
import { euclideanDistance } from '../../../src/euclideanDistance'; import { euclideanDistance } from '../../../src/euclideanDistance';
import { loadImage, loadJson } from '../../env';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
describe('faceRecognitionNet', () => { describe('faceRecognitionNet', () => {
let imgEl1: HTMLImageElement let imgEl1: HTMLCanvasElement
let imgEl2: HTMLImageElement let imgEl2: HTMLCanvasElement
let imgElRect: HTMLImageElement let imgElRect: HTMLCanvasElement
let faceDescriptor1: number[] let faceDescriptor1: number[]
let faceDescriptor2: number[] let faceDescriptor2: number[]
let faceDescriptorRect: number[] let faceDescriptorRect: number[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = createCanvasFromMedia(await loadImage('test/images/face1.png'))
imgEl2 = await fetchImage('base/test/images/face2.png') imgEl2 = createCanvasFromMedia(await loadImage('test/images/face2.png'))
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = createCanvasFromMedia(await loadImage('test/images/face_rectangular.png'))
faceDescriptor1 = await fetchJson<number[]>('base/test/data/faceDescriptor1.json') faceDescriptor1 = await loadJson<number[]>('test/data/faceDescriptor1.json')
faceDescriptor2 = await fetchJson<number[]>('base/test/data/faceDescriptor2.json') faceDescriptor2 = await loadJson<number[]>('test/data/faceDescriptor2.json')
faceDescriptorRect = await fetchJson<number[]>('base/test/data/faceDescriptorRect.json') faceDescriptorRect = await loadJson<number[]>('test/data/faceDescriptorRect.json')
}) })
describeWithNets('quantized weights', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => { describeWithNets('quantized weights', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => {
...@@ -96,18 +97,6 @@ describe('faceRecognitionNet', () => { ...@@ -96,18 +97,6 @@ describe('faceRecognitionNet', () => {
describeWithNets('no memory leaks', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => { describeWithNets('no memory leaks', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => {
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new FaceRecognitionNet()
await net.load('base/weights')
net.dispose()
})
})
})
describe('forwardInput', () => { describe('forwardInput', () => {
it('single image element', async () => { it('single image element', async () => {
......
import { fetchImage, fetchJson } from '../../../src'; import { createCanvasFromMedia } from '../../../src';
import { euclideanDistance } from '../../../src/euclideanDistance'; import { euclideanDistance } from '../../../src/euclideanDistance';
import { createFaceRecognitionNet } from '../../../src/faceRecognitionNet'; import { loadImage, loadJson } from '../../env';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { describeWithNets } from '../../utils';
describe('faceRecognitionNet, uncompressed', () => { describe('faceRecognitionNet, uncompressed', () => {
let imgEl1: HTMLImageElement let imgEl1: HTMLCanvasElement
let imgElRect: HTMLImageElement let imgElRect: HTMLCanvasElement
let faceDescriptor1: number[] let faceDescriptor1: number[]
let faceDescriptorRect: number[] let faceDescriptorRect: number[]
beforeAll(async () => { beforeAll(async () => {
imgEl1 = await fetchImage('base/test/images/face1.png') imgEl1 = createCanvasFromMedia(await loadImage('test/images/face1.png'))
imgElRect = await fetchImage('base/test/images/face_rectangular.png') imgElRect = createCanvasFromMedia(await loadImage('test/images/face_rectangular.png'))
faceDescriptor1 = await fetchJson<number[]>('base/test/data/faceDescriptor1.json') faceDescriptor1 = await loadJson<number[]>('test/data/faceDescriptor1.json')
faceDescriptorRect = await fetchJson<number[]>('base/test/data/faceDescriptorRect.json') faceDescriptorRect = await loadJson<number[]>('test/data/faceDescriptorRect.json')
}) })
describeWithNets('uncompressed weights', { withFaceRecognitionNet: { quantized: false } }, ({ faceRecognitionNet }) => { describeWithNets('uncompressed weights', { withFaceRecognitionNet: { quantized: false } }, ({ faceRecognitionNet }) => {
...@@ -31,14 +31,5 @@ describe('faceRecognitionNet, uncompressed', () => { ...@@ -31,14 +31,5 @@ describe('faceRecognitionNet, uncompressed', () => {
expect(euclideanDistance(result, faceDescriptorRect)).toBeLessThan(0.1) expect(euclideanDistance(result, faceDescriptorRect)).toBeLessThan(0.1)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = createFaceRecognitionNet(weights)
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src'; import { IPoint } from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { loadImage, loadJson } from '../../env';
import { describeWithNets } from '../../utils';
import { expectMtcnnResults } from './expectMtcnnResults'; import { expectMtcnnResults } from './expectMtcnnResults';
import { IPoint, fetchImage, fetchJson } from '../../../src';
describe('mtcnn.forward', () => { describe('mtcnn.forward', () => {
...@@ -9,8 +9,8 @@ describe('mtcnn.forward', () => { ...@@ -9,8 +9,8 @@ describe('mtcnn.forward', () => {
let expectedMtcnnLandmarks: IPoint[][] let expectedMtcnnLandmarks: IPoint[][]
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
expectedMtcnnLandmarks = await fetchJson<IPoint[][]>('base/test/data/mtcnnFaceLandmarkPositions.json') expectedMtcnnLandmarks = await loadJson<IPoint[][]>('test/data/mtcnnFaceLandmarkPositions.json')
}) })
// "quantized" actually means loaded from manifest.json, since there is no quantization applied to the mtcnn model // "quantized" actually means loaded from manifest.json, since there is no quantization applied to the mtcnn model
...@@ -26,8 +26,8 @@ describe('mtcnn.forward', () => { ...@@ -26,8 +26,8 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 2, maxBoxDelta: 10,
maxLandmarksDelta: 5 maxLandmarksDelta: 10
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 0.99, 0.99], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 0.99, 0.99], deltas)
}) })
...@@ -43,7 +43,7 @@ describe('mtcnn.forward', () => { ...@@ -43,7 +43,7 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 15, maxBoxDelta: 15,
maxLandmarksDelta: 13 maxLandmarksDelta: 15
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 0.99], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 0.99], deltas)
}) })
...@@ -61,8 +61,8 @@ describe('mtcnn.forward', () => { ...@@ -61,8 +61,8 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 8, maxBoxDelta: 15,
maxLandmarksDelta: 7 maxLandmarksDelta: 20
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 0.99, 1.0, 1.0], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 0.99, 1.0, 1.0], deltas)
}) })
...@@ -77,20 +77,12 @@ describe('mtcnn.forward', () => { ...@@ -77,20 +77,12 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 8, maxBoxDelta: 15,
maxLandmarksDelta: 10 maxLandmarksDelta: 15
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], deltas)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.Mtcnn()
await net.load('base/weights')
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src'; import { IPoint } from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { loadImage, loadJson } from '../../env';
import { describeWithNets } from '../../utils';
import { expectMtcnnResults } from './expectMtcnnResults'; import { expectMtcnnResults } from './expectMtcnnResults';
import { IPoint, fetchImage, fetchJson } from '../../../src';
describe('mtcnn.forward', () => { describe('mtcnn.forward', () => {
...@@ -9,8 +9,8 @@ describe('mtcnn.forward', () => { ...@@ -9,8 +9,8 @@ describe('mtcnn.forward', () => {
let expectedMtcnnLandmarks: IPoint[][] let expectedMtcnnLandmarks: IPoint[][]
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
expectedMtcnnLandmarks = await fetchJson<IPoint[][]>('base/test/data/mtcnnFaceLandmarkPositions.json') expectedMtcnnLandmarks = await loadJson<IPoint[][]>('test/data/mtcnnFaceLandmarkPositions.json')
}) })
describeWithNets('uncompressed weights', { withMtcnn: { quantized: false } }, ({ mtcnn }) => { describeWithNets('uncompressed weights', { withMtcnn: { quantized: false } }, ({ mtcnn }) => {
...@@ -25,8 +25,8 @@ describe('mtcnn.forward', () => { ...@@ -25,8 +25,8 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 2, maxBoxDelta: 10,
maxLandmarksDelta: 5 maxLandmarksDelta: 10
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 0.99, 0.99], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 0.99, 0.99], deltas)
}) })
...@@ -42,7 +42,7 @@ describe('mtcnn.forward', () => { ...@@ -42,7 +42,7 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 15, maxBoxDelta: 15,
maxLandmarksDelta: 13 maxLandmarksDelta: 15
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 0.99], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 0.99], deltas)
}) })
...@@ -60,8 +60,8 @@ describe('mtcnn.forward', () => { ...@@ -60,8 +60,8 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 8, maxBoxDelta: 15,
maxLandmarksDelta: 7 maxLandmarksDelta: 20
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 0.99, 1.0, 1.0], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 0.99, 1.0, 1.0], deltas)
}) })
...@@ -76,21 +76,12 @@ describe('mtcnn.forward', () => { ...@@ -76,21 +76,12 @@ describe('mtcnn.forward', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 8, maxBoxDelta: 15,
maxLandmarksDelta: 10 maxLandmarksDelta: 15
} }
expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], deltas) expectMtcnnResults(results, expectedMtcnnLandmarks, [1.0, 1.0, 1.0, 1.0, 1.0, 1.0], deltas)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/mtcnn_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createMtcnn(weights)
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src'; import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased, assembleExpectedFullFaceDescriptions, ExpectedFullFaceDescription } from '../../utils';
import { expectedMtcnnBoxes } from './expectMtcnnResults';
import { fetchImage } from '../../../src';
import { MtcnnOptions } from '../../../src/mtcnn/MtcnnOptions'; import { MtcnnOptions } from '../../../src/mtcnn/MtcnnOptions';
import { loadImage } from '../../env';
import { expectFaceDetections } from '../../expectFaceDetections'; import { expectFaceDetections } from '../../expectFaceDetections';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks'; import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import {
assembleExpectedFullFaceDescriptions,
describeWithNets,
expectAllTensorsReleased,
ExpectedFullFaceDescription,
} from '../../utils';
import { expectedMtcnnBoxes } from './expectMtcnnResults';
describe('mtcnn', () => { describe('mtcnn', () => {
...@@ -14,11 +19,11 @@ describe('mtcnn', () => { ...@@ -14,11 +19,11 @@ describe('mtcnn', () => {
const expectedScores = [1.0, 1.0, 1.0, 1.0, 0.99, 0.99] const expectedScores = [1.0, 1.0, 1.0, 1.0, 0.99, 0.99]
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedMtcnnBoxes) expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedMtcnnBoxes)
}) })
describeWithNets('detectAllFaces', { withAllFacesMtcnn: true }, () => { describeWithNets('globalApi', { withAllFacesMtcnn: true }, () => {
it('detectAllFaces', async () => { it('detectAllFaces', async () => {
const options = new MtcnnOptions({ const options = new MtcnnOptions({
...@@ -27,7 +32,7 @@ describe('mtcnn', () => { ...@@ -27,7 +32,7 @@ describe('mtcnn', () => {
const results = await faceapi.detectAllFaces(imgEl, options) const results = await faceapi.detectAllFaces(imgEl, options)
const maxScoreDelta = 0.01 const maxScoreDelta = 0.01
const maxBoxDelta = 2 const maxBoxDelta = 10
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectFaceDetections(results, expectedMtcnnBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(results, expectedMtcnnBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
...@@ -43,7 +48,7 @@ describe('mtcnn', () => { ...@@ -43,7 +48,7 @@ describe('mtcnn', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 2, maxBoxDelta: 10,
maxLandmarksDelta: 6 maxLandmarksDelta: 6
} }
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
...@@ -62,7 +67,7 @@ describe('mtcnn', () => { ...@@ -62,7 +67,7 @@ describe('mtcnn', () => {
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.01,
maxBoxDelta: 2, maxBoxDelta: 10,
maxLandmarksDelta: 6, maxLandmarksDelta: 6,
maxDescriptorDelta: 0.2 maxDescriptorDelta: 0.2
} }
......
import * as faceapi from '../../../src'; import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { loadImage } from '../../env';
import { expectFaceDetections } from '../../expectFaceDetections'; import { expectFaceDetections } from '../../expectFaceDetections';
import { fetchImage } from '../../../src'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedSsdBoxes } from './expectedBoxes'; import { expectedSsdBoxes } from './expectedBoxes';
describe('ssdMobilenetv1.locateFaces', () => { describe('ssdMobilenetv1.locateFaces', () => {
...@@ -9,18 +9,18 @@ describe('ssdMobilenetv1.locateFaces', () => { ...@@ -9,18 +9,18 @@ describe('ssdMobilenetv1.locateFaces', () => {
let imgEl: HTMLImageElement let imgEl: HTMLImageElement
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
}) })
describeWithNets('quantized weights', { withSsdMobilenetv1: { quantized: true } }, ({ ssdMobilenetv1 }) => { describeWithNets('quantized weights', { withSsdMobilenetv1: { quantized: true } }, ({ ssdMobilenetv1 }) => {
it('scores > 0.8', async () => { it('scores > 0.7', async () => {
const detections = await ssdMobilenetv1.locateFaces(imgEl, { minConfidence: 0.8 }) as faceapi.FaceDetection[] const detections = await ssdMobilenetv1.locateFaces(imgEl, { minConfidence: 0.7 }) as faceapi.FaceDetection[]
expect(detections.length).toEqual(4) expect(detections.length).toEqual(4)
const expectedScores = [-1, 0.81, 0.97, 0.88, 0.84, -1] const expectedScores = [-1, 0.81, 0.97, 0.88, 0.84, -1]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 4 const maxBoxDelta = 4
expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
...@@ -32,20 +32,12 @@ describe('ssdMobilenetv1.locateFaces', () => { ...@@ -32,20 +32,12 @@ describe('ssdMobilenetv1.locateFaces', () => {
expect(detections.length).toEqual(6) expect(detections.length).toEqual(6)
const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61] const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 5 const maxBoxDelta = 5
expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.SsdMobilenetv1()
await net.load('base/weights')
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src'; import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { loadImage } from '../../env';
import { expectFaceDetections } from '../../expectFaceDetections'; import { expectFaceDetections } from '../../expectFaceDetections';
import { fetchImage } from '../../../src'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedSsdBoxes } from './expectedBoxes'; import { expectedSsdBoxes } from './expectedBoxes';
describe('ssdMobilenetv1.locateFaces, uncompressed', () => { describe('ssdMobilenetv1.locateFaces, uncompressed', () => {
...@@ -9,7 +9,7 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => { ...@@ -9,7 +9,7 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => {
let imgEl: HTMLImageElement let imgEl: HTMLImageElement
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
}) })
describeWithNets('uncompressed weights', { withSsdMobilenetv1: { quantized: false } }, ({ ssdMobilenetv1 }) => { describeWithNets('uncompressed weights', { withSsdMobilenetv1: { quantized: false } }, ({ ssdMobilenetv1 }) => {
...@@ -20,8 +20,8 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => { ...@@ -20,8 +20,8 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => {
expect(detections.length).toEqual(3) expect(detections.length).toEqual(3)
const expectedScores = [-1, -1, 0.98, 0.88, 0.81, -1] const expectedScores = [-1, -1, 0.98, 0.88, 0.81, -1]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 3 const maxBoxDelta = 5
expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
...@@ -31,22 +31,13 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => { ...@@ -31,22 +31,13 @@ describe('ssdMobilenetv1.locateFaces, uncompressed', () => {
expect(detections.length).toEqual(6) expect(detections.length).toEqual(6)
const expectedScores = [0.57, 0.74, 0.98, 0.88, 0.81, 0.58] const expectedScores = [0.57, 0.76, 0.98, 0.88, 0.81, 0.58]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 3 const maxBoxDelta = 5
expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/ssd_mobilenetv1_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createSsdMobilenetv1(weights)
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased, assembleExpectedFullFaceDescriptions, ExpectedFullFaceDescription } from '../../utils';
import { SsdMobilenetv1Options, createCanvasFromMedia } from '../../../src';
import { expectFaceDetections } from '../../expectFaceDetections';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks';
import { expectedSsdBoxes } from './expectedBoxes';
import { loadImage } from '../../env';
import * as tf from '@tensorflow/tfjs-core';
describe('ssdMobilenetv1 - node', () => {
let imgTensor: faceapi.tf.Tensor3D
let expectedFullFaceDescriptions: ExpectedFullFaceDescription[]
const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61]
beforeAll(async () => {
imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg')))
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedSsdBoxes)
})
describeWithNets('globalApi, tensor inputs', { withAllFacesSsdMobilenetv1: true }, () => {
it('detectAllFaces', async () => {
const options = new SsdMobilenetv1Options({
minConfidence: 0.5
})
const results = await faceapi.detectAllFaces(imgTensor, options)
const maxScoreDelta = 0.05
const maxBoxDelta = 5
expect(results.length).toEqual(6)
expectFaceDetections(results, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('detectAllFaces.withFaceLandmarks()', async () => {
const options = new SsdMobilenetv1Options({
minConfidence: 0.5
})
const results = await faceapi
.detectAllFaces(imgTensor, options)
.withFaceLandmarks()
const deltas = {
maxScoreDelta: 0.05,
maxBoxDelta: 5,
maxLandmarksDelta: 4
}
expect(results.length).toEqual(6)
expectFaceDetectionsWithLandmarks(results, expectedFullFaceDescriptions, expectedScores, deltas)
})
it('detectAllFaces.withFaceLandmarks().withFaceDescriptors()', async () => {
const options = new SsdMobilenetv1Options({
minConfidence: 0.5
})
const results = await faceapi
.detectAllFaces(imgTensor, options)
.withFaceLandmarks()
.withFaceDescriptors()
const deltas = {
maxScoreDelta: 0.05,
maxBoxDelta: 5,
maxLandmarksDelta: 4,
maxDescriptorDelta: 0.2
}
expect(results.length).toEqual(6)
expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas)
})
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
await faceapi
.detectAllFaces(imgTensor, new SsdMobilenetv1Options())
.withFaceLandmarks()
.withFaceDescriptors()
})
})
})
})
\ No newline at end of file
...@@ -5,6 +5,7 @@ import { expectFaceDetections } from '../../expectFaceDetections'; ...@@ -5,6 +5,7 @@ import { expectFaceDetections } from '../../expectFaceDetections';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions'; import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks'; import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks';
import { expectedSsdBoxes } from './expectedBoxes'; import { expectedSsdBoxes } from './expectedBoxes';
import { loadImage } from '../../env';
describe('ssdMobilenetv1', () => { describe('ssdMobilenetv1', () => {
...@@ -13,7 +14,7 @@ describe('ssdMobilenetv1', () => { ...@@ -13,7 +14,7 @@ describe('ssdMobilenetv1', () => {
const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61] const expectedScores = [0.54, 0.81, 0.97, 0.88, 0.84, 0.61]
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedSsdBoxes) expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedSsdBoxes)
}) })
...@@ -26,7 +27,7 @@ describe('ssdMobilenetv1', () => { ...@@ -26,7 +27,7 @@ describe('ssdMobilenetv1', () => {
const results = await faceapi.detectAllFaces(imgEl, options) const results = await faceapi.detectAllFaces(imgEl, options)
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 5 const maxBoxDelta = 5
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectFaceDetections(results, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(results, expectedSsdBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
...@@ -42,7 +43,7 @@ describe('ssdMobilenetv1', () => { ...@@ -42,7 +43,7 @@ describe('ssdMobilenetv1', () => {
.withFaceLandmarks() .withFaceLandmarks()
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.05,
maxBoxDelta: 5, maxBoxDelta: 5,
maxLandmarksDelta: 2 maxLandmarksDelta: 2
} }
...@@ -61,7 +62,7 @@ describe('ssdMobilenetv1', () => { ...@@ -61,7 +62,7 @@ describe('ssdMobilenetv1', () => {
.withFaceDescriptors() .withFaceDescriptors()
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.05,
maxBoxDelta: 5, maxBoxDelta: 5,
maxLandmarksDelta: 2, maxLandmarksDelta: 2,
maxDescriptorDelta: 0.2 maxDescriptorDelta: 0.2
......
import * as faceapi from '../../../src'; import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased } from '../../utils'; import { loadImage } from '../../env';
import { expectFaceDetections } from '../../expectFaceDetections'; import { expectFaceDetections } from '../../expectFaceDetections';
import { fetchImage } from '../../../src'; import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedTinyFaceDetectorBoxes } from './expectedBoxes'; import { expectedTinyFaceDetectorBoxes } from './expectedBoxes';
describe('tinyFaceDetector.locateFaces', () => { describe('tinyFaceDetector.locateFaces', () => {
...@@ -9,7 +9,7 @@ describe('tinyFaceDetector.locateFaces', () => { ...@@ -9,7 +9,7 @@ describe('tinyFaceDetector.locateFaces', () => {
let imgEl: HTMLImageElement let imgEl: HTMLImageElement
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
}) })
describeWithNets('quantized weights', { withTinyFaceDetector: { quantized: true } }, ({ tinyFaceDetector }) => { describeWithNets('quantized weights', { withTinyFaceDetector: { quantized: true } }, ({ tinyFaceDetector }) => {
...@@ -20,7 +20,7 @@ describe('tinyFaceDetector.locateFaces', () => { ...@@ -20,7 +20,7 @@ describe('tinyFaceDetector.locateFaces', () => {
expect(detections.length).toEqual(6) expect(detections.length).toEqual(6)
const expectedScores = [0.77, 0.75, 0.88, 0.77, 0.83, 0.85] const expectedScores = [0.77, 0.75, 0.88, 0.77, 0.83, 0.85]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 40 const maxBoxDelta = 40
expectFaceDetections(detections, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
...@@ -32,20 +32,12 @@ describe('tinyFaceDetector.locateFaces', () => { ...@@ -32,20 +32,12 @@ describe('tinyFaceDetector.locateFaces', () => {
expect(detections.length).toEqual(6) expect(detections.length).toEqual(6)
const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84] const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84]
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 1 const maxBoxDelta = 5
expectFaceDetections(detections, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(detections, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.TinyFaceDetector()
await net.load('base/weights')
net.dispose()
})
})
}) })
}) })
\ No newline at end of file
import * as faceapi from '../../../src';
import { describeWithNets, expectAllTensorsReleased, assembleExpectedFullFaceDescriptions, ExpectedFullFaceDescription } from '../../utils';
import { TinyFaceDetectorOptions, createCanvasFromMedia } from '../../../src';
import { expectFaceDetections } from '../../expectFaceDetections';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks';
import { expectedTinyFaceDetectorBoxes } from './expectedBoxes';
import { loadImage } from '../../env';
import * as tf from '@tensorflow/tfjs-core';
describe('tinyFaceDetector - node', () => {
let imgTensor: faceapi.tf.Tensor3D
let expectedFullFaceDescriptions: ExpectedFullFaceDescription[]
const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84]
beforeAll(async () => {
imgTensor = tf.fromPixels(createCanvasFromMedia(await loadImage('test/images/faces.jpg')))
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedTinyFaceDetectorBoxes)
})
describeWithNets('globalApi, tensor inputs', { withAllFacesTinyFaceDetector: true }, () => {
it('detectAllFaces', async () => {
const options = new TinyFaceDetectorOptions({
inputSize: 416
})
const results = await faceapi.detectAllFaces(imgTensor, options)
const maxScoreDelta = 0.05
const maxBoxDelta = 5
expect(results.length).toEqual(6)
expectFaceDetections(results, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('detectAllFaces.withFaceLandmarks()', async () => {
const options = new TinyFaceDetectorOptions({
inputSize: 416
})
const results = await faceapi
.detectAllFaces(imgTensor, options)
.withFaceLandmarks()
const deltas = {
maxScoreDelta: 0.05,
maxBoxDelta: 5,
maxLandmarksDelta: 10
}
expect(results.length).toEqual(6)
expectFaceDetectionsWithLandmarks(results, expectedFullFaceDescriptions, expectedScores, deltas)
})
it('detectAllFaces.withFaceLandmarks().withFaceDescriptors()', async () => {
const options = new TinyFaceDetectorOptions({
inputSize: 416
})
const results = await faceapi
.detectAllFaces(imgTensor, options)
.withFaceLandmarks()
.withFaceDescriptors()
const deltas = {
maxScoreDelta: 0.05,
maxBoxDelta: 5,
maxLandmarksDelta: 10,
maxDescriptorDelta: 0.2
}
expect(results.length).toEqual(6)
expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas)
})
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
await faceapi
.detectAllFaces(imgTensor, new TinyFaceDetectorOptions())
.withFaceLandmarks()
.withFaceDescriptors()
})
})
})
})
\ No newline at end of file
...@@ -5,6 +5,7 @@ import { expectFaceDetections } from '../../expectFaceDetections'; ...@@ -5,6 +5,7 @@ import { expectFaceDetections } from '../../expectFaceDetections';
import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions'; import { expectFullFaceDescriptions } from '../../expectFullFaceDescriptions';
import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks'; import { expectFaceDetectionsWithLandmarks } from '../../expectFaceDetectionsWithLandmarks';
import { expectedTinyFaceDetectorBoxes } from './expectedBoxes'; import { expectedTinyFaceDetectorBoxes } from './expectedBoxes';
import { loadImage } from '../../env';
describe('tinyFaceDetector', () => { describe('tinyFaceDetector', () => {
...@@ -13,7 +14,7 @@ describe('tinyFaceDetector', () => { ...@@ -13,7 +14,7 @@ describe('tinyFaceDetector', () => {
const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84] const expectedScores = [0.7, 0.82, 0.93, 0.86, 0.79, 0.84]
beforeAll(async () => { beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg') imgEl = await loadImage('test/images/faces.jpg')
expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedTinyFaceDetectorBoxes) expectedFullFaceDescriptions = await assembleExpectedFullFaceDescriptions(expectedTinyFaceDetectorBoxes)
}) })
...@@ -26,8 +27,8 @@ describe('tinyFaceDetector', () => { ...@@ -26,8 +27,8 @@ describe('tinyFaceDetector', () => {
const results = await faceapi.detectAllFaces(imgEl, options) const results = await faceapi.detectAllFaces(imgEl, options)
const maxScoreDelta = 0.01 const maxScoreDelta = 0.05
const maxBoxDelta = 1 const maxBoxDelta = 5
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectFaceDetections(results, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta) expectFaceDetections(results, expectedTinyFaceDetectorBoxes, expectedScores, maxScoreDelta, maxBoxDelta)
}) })
...@@ -42,8 +43,8 @@ describe('tinyFaceDetector', () => { ...@@ -42,8 +43,8 @@ describe('tinyFaceDetector', () => {
.withFaceLandmarks() .withFaceLandmarks()
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.05,
maxBoxDelta: 1, maxBoxDelta: 5,
maxLandmarksDelta: 10 maxLandmarksDelta: 10
} }
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
...@@ -61,8 +62,8 @@ describe('tinyFaceDetector', () => { ...@@ -61,8 +62,8 @@ describe('tinyFaceDetector', () => {
.withFaceDescriptors() .withFaceDescriptors()
const deltas = { const deltas = {
maxScoreDelta: 0.01, maxScoreDelta: 0.05,
maxBoxDelta: 1, maxBoxDelta: 5,
maxLandmarksDelta: 10, maxLandmarksDelta: 10,
maxDescriptorDelta: 0.2 maxDescriptorDelta: 0.2
} }
......
import { IRect } from '../../../src';
import { sortBoxes } from '../../utils';
export const expectedTinyYolov2Boxes: IRect[] = sortBoxes([
{ x: 52, y: 263, width: 106, height: 102 },
{ x: 455, y: 191, width: 103, height: 97 },
{ x: 236, y: 57, width: 90, height: 85 },
{ x: 257, y: 243, width: 86, height: 95 },
{ x: 578, y: 76, width: 86, height: 91 },
{ x: 87, y: 30, width: 92, height: 93 }
])
export const expectedTinyYolov2SeparableConvBoxes: IRect[] = sortBoxes([
{ x: 42, y: 257, width: 111, height: 121 },
{ x: 454, y: 175, width: 104, height: 121 },
{ x: 230, y: 45, width: 94, height: 104 },
{ x: 574, y: 62, width: 88, height: 113 },
{ x: 260, y: 233, width: 82, height: 104 },
{ x: 83, y: 24, width: 85, height: 111 }
])
\ No newline at end of file
import { TinyYolov2SizeType } from 'tfjs-tiny-yolov2';
import { fetchImage, TinyYolov2 } from '../../../src';
import { expectFaceDetections } from '../../expectFaceDetections';
import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedTinyYolov2Boxes } from './expectedBoxes';
describe('tinyYolov2.locateFaces', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg')
})
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true, withSeparableConv: false } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.LG })
const expectedScores = [0.8, 0.85, 0.86, 0.83, 0.86, 0.81]
const maxScoreDelta = 0.01
const maxBoxDelta = 4
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.MD })
const expectedScores = [0.89, 0.81, 0.82, 0.72, 0.81, 0.86]
const maxScoreDelta = 0.01
const maxBoxDelta = 27
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.89, 0.81, 0.82, 0.72, 0.81, 0.86]
const maxScoreDelta = 0.01
const maxBoxDelta = 27
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const net = new TinyYolov2(false)
await net.load('base/weights_unused')
net.dispose()
})
})
})
})
\ No newline at end of file
import { TinyYolov2SizeType } from 'tfjs-tiny-yolov2';
import { createTinyYolov2, fetchImage } from '../../../src';
import { expectFaceDetections } from '../../expectFaceDetections';
import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedTinyYolov2Boxes } from './expectedBoxes';
describe('tinyYolov2.locateFaces, uncompressed', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg')
})
describeWithNets('uncompressed weights', { withTinyYolov2: { quantized: false, withSeparableConv: false } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.LG })
const expectedScores = [0.81, 0.85, 0.86, 0.83, 0.86, 0.81]
const maxScoreDelta = 0.01
const maxBoxDelta = 1
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.MD })
const expectedScores = [0.89, 0.82, 0.82, 0.72, 0.81, 0.86]
const maxScoreDelta = 0.01
const maxBoxDelta = 24
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.89, 0.82, 0.82, 0.72, 0.81, 0.86]
const maxScoreDelta = 0.01
const maxBoxDelta = 24
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('no memory leaks', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/tiny_yolov2_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = createTinyYolov2(weights, false)
net.dispose()
})
})
})
})
\ No newline at end of file
import { TinyYolov2SizeType } from 'tfjs-tiny-yolov2';
import { createTinyYolov2, fetchImage, TinyYolov2 } from '../../../src';
import { expectFaceDetections } from '../../expectFaceDetections';
import { describeWithNets, expectAllTensorsReleased } from '../../utils';
import { expectedTinyYolov2Boxes } from './expectedBoxes';
describe('tinyYolov2.locateFaces, with separable convolutions', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
imgEl = await fetchImage('base/test/images/faces.jpg')
})
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.LG })
const expectedScores = [0.85, 0.88, 0.9, 0.85, 0.9, 0.85]
const maxScoreDelta = 0.01
const maxBoxDelta = 25
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: TinyYolov2SizeType.MD })
const expectedScores = [0.85, 0.8, 0.8, 0.85, 0.85, 0.83]
const maxScoreDelta = 0.01
const maxBoxDelta = 34
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.85, 0.8, 0.8, 0.85, 0.85, 0.83]
const maxScoreDelta = 0.01
const maxBoxDelta = 34
expect(detections.length).toEqual(6)
expectFaceDetections(detections, expectedTinyYolov2Boxes, expectedScores, maxScoreDelta, maxBoxDelta)
})
})
describe('no memory leaks', () => {
describe('NeuralNetwork, uncompressed model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/tiny_yolov2_separable_conv_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = createTinyYolov2(weights)
net.dispose()
})
})
})
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new TinyYolov2()
await net.load('base/weights')
net.dispose()
})
})
})
})
})
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { getContext2dOrThrow } from 'tfjs-image-recognition-base';
import * as faceapi from '../src'; import * as faceapi from '../src';
import { FaceRecognitionNet, IPoint, IRect, Mtcnn, NeuralNetwork, TinyYolov2 } from '../src/'; import { createCanvasFromMedia, FaceRecognitionNet, IPoint, IRect, Mtcnn, TinyYolov2 } from '../src/';
import { FaceDetection } from '../src/classes/FaceDetection'; import { FaceDetection } from '../src/classes/FaceDetection';
import { FaceLandmarks } from '../src/classes/FaceLandmarks'; import { FaceLandmarks } from '../src/classes/FaceLandmarks';
import { FaceLandmark68Net } from '../src/faceLandmarkNet/FaceLandmark68Net'; import { FaceLandmark68Net } from '../src/faceLandmarkNet/FaceLandmark68Net';
import { FaceLandmark68TinyNet } from '../src/faceLandmarkNet/FaceLandmark68TinyNet'; import { FaceLandmark68TinyNet } from '../src/faceLandmarkNet/FaceLandmark68TinyNet';
import { SsdMobilenetv1 } from '../src/ssdMobilenetv1/SsdMobilenetv1'; import { SsdMobilenetv1 } from '../src/ssdMobilenetv1/SsdMobilenetv1';
import { TinyFaceDetector } from '../src/tinyFaceDetector/TinyFaceDetector'; import { TinyFaceDetector } from '../src/tinyFaceDetector/TinyFaceDetector';
import { initNet, loadJson } from './env';
jasmine.DEFAULT_TIMEOUT_INTERVAL = 60000
const args: string[] = window['__karma__'].config.jasmine.args
if (args.some(arg => arg === 'backend_cpu')) {
tf.setBackend('cpu')
}
export function expectMaxDelta(val1: number, val2: number, maxDelta: number) { export function expectMaxDelta(val1: number, val2: number, maxDelta: number) {
expect(Math.abs(val1 - val2)).toBeLessThan(maxDelta) expect(Math.abs(val1 - val2)).toBeLessThan(maxDelta)
...@@ -84,8 +79,8 @@ export async function assembleExpectedFullFaceDescriptions( ...@@ -84,8 +79,8 @@ export async function assembleExpectedFullFaceDescriptions(
detections: IRect[], detections: IRect[],
landmarksFile: string = 'facesFaceLandmarkPositions.json' landmarksFile: string = 'facesFaceLandmarkPositions.json'
): Promise<ExpectedFullFaceDescription[]> { ): Promise<ExpectedFullFaceDescription[]> {
const landmarks = await (await fetch(`base/test/data/${landmarksFile}`)).json() const landmarks = await loadJson(`test/data/${landmarksFile}`)
const descriptors = await (await fetch('base/test/data/facesFaceDescriptors.json')).json() const descriptors = await loadJson('test/data/facesFaceDescriptors.json')
return detections.map((detection, i) => ({ return detections.map((detection, i) => ({
detection, detection,
...@@ -112,7 +107,6 @@ export type InjectNetArgs = { ...@@ -112,7 +107,6 @@ export type InjectNetArgs = {
tinyYolov2: TinyYolov2 tinyYolov2: TinyYolov2
} }
export type DescribeWithNetsOptions = { export type DescribeWithNetsOptions = {
withAllFacesSsdMobilenetv1?: boolean withAllFacesSsdMobilenetv1?: boolean
withAllFacesTinyFaceDetector?: boolean withAllFacesTinyFaceDetector?: boolean
...@@ -127,21 +121,6 @@ export type DescribeWithNetsOptions = { ...@@ -127,21 +121,6 @@ export type DescribeWithNetsOptions = {
withTinyYolov2?: WithTinyYolov2Options withTinyYolov2?: WithTinyYolov2Options
} }
async function loadNetWeights(uri: string): Promise<Float32Array> {
return new Float32Array(await (await fetch(uri)).arrayBuffer())
}
async function initNet<TNet extends NeuralNetwork<any>>(
net: TNet,
uncompressedFilename: string | boolean,
isUnusedModel: boolean = false
) {
const url = uncompressedFilename
? await loadNetWeights(`base/weights_uncompressed/${uncompressedFilename}`)
: (isUnusedModel ? 'base/weights_unused' : 'base/weights')
await net.load(url)
}
export function describeWithNets( export function describeWithNets(
description: string, description: string,
options: DescribeWithNetsOptions, options: DescribeWithNetsOptions,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment