Commit f260f87d by vincent

testcases for tinyYolov2

parent fd42e095
...@@ -20,6 +20,15 @@ export const expectedMtcnnBoxes = [ ...@@ -20,6 +20,15 @@ export const expectedMtcnnBoxes = [
{ x: 451, y: 176, width: 122, height: 122 } { x: 451, y: 176, width: 122, height: 122 }
] ]
export const expectedTinyYolov2Boxes = [
{ x: 52, y: 263, width: 106, height: 102 },
{ x: 455, y: 191, width: 103, height: 97 },
{ x: 236, y: 57, width: 90, height: 85 },
{ x: 257, y: 243, width: 86, height: 95 },
{ x: 578, y: 76, width: 86, height: 91 },
{ x: 87, y: 30, width: 92, height: 93 }
]
export const expectedMtcnnFaceLandmarks = [ export const expectedMtcnnFaceLandmarks = [
[new Point(117, 58), new Point(156, 63), new Point(141, 86), new Point(109, 98), new Point(147, 104)], [new Point(117, 58), new Point(156, 63), new Point(141, 86), new Point(109, 98), new Point(147, 104)],
[new Point(82, 292), new Point(134, 304), new Point(104, 330), new Point(72, 342), new Point(120, 353)], [new Point(82, 292), new Point(134, 304), new Point(104, 330), new Point(72, 342), new Point(120, 353)],
......
import * as faceapi from '../../../src';
import { SizeType } from '../../../src/tinyYolov2/types';
import { describeWithNets, expectAllTensorsReleased, expectRectClose } from '../../utils';
import { expectedTinyYolov2Boxes } from './expectedResults';
describe('tinyYolov2', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
const img = await (await fetch('base/test/images/faces.jpg')).blob()
imgEl = await faceapi.bufferToImage(img)
})
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
const expectedScores = [0.86, 0.86, 0.85, 0.83, 0.81, 0.81]
const maxBoxDelta = 3
const boxOrder = [0, 1, 2, 3, 4, 5]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
const expectedScores = [0.89, 0.87, 0.83, 0.82, 0.81, 0.72]
const maxBoxDelta = 16
const boxOrder = [5, 4, 0, 2, 1, 3]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.89, 0.87, 0.83, 0.82, 0.81, 0.72]
const maxBoxDelta = 16
const boxOrder = [5, 4, 0, 2, 1, 3]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
})
describeWithNets('uncompressed weights', { withTinyYolov2: { quantized: false } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
const expectedScores = [0.86, 0.86, 0.85, 0.83, 0.81, 0.81]
const maxBoxDelta = 1
const boxOrder = [0, 1, 2, 3, 4, 5]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
const expectedScores = [0.89, 0.87, 0.83, 0.83, 0.81, 0.73]
const maxBoxDelta = 14
const boxOrder = [5, 4, 2, 0, 1, 3]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.89, 0.87, 0.83, 0.83, 0.81, 0.73]
const maxBoxDelta = 14
const boxOrder = [5, 4, 2, 0, 1, 3]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
})
})
})
describe('no memory leaks', () => {
describe('NeuralNetwork, uncompressed model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/tiny_yolov2_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createTinyYolov2(weights)
net.dispose()
})
})
})
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.TinyYolov2()
await net.load('base/weights')
net.dispose()
})
})
})
})
})
\ No newline at end of file
...@@ -5,7 +5,8 @@ import * as faceapi from '../src/'; ...@@ -5,7 +5,8 @@ import * as faceapi from '../src/';
import { NeuralNetwork } from '../src/commons/NeuralNetwork'; import { NeuralNetwork } from '../src/commons/NeuralNetwork';
import { IPoint } from '../src/'; import { IPoint } from '../src/';
import { allFacesFactory, allFacesMtcnnFactory } from '../src/allFacesFactory'; import { allFacesFactory, allFacesMtcnnFactory } from '../src/allFacesFactory';
import { allFacesMtcnnFunction, allFacesFunction } from '../src/globalApi'; import { allFacesMtcnnFunction, allFacesFunction, tinyYolov2 } from '../src/globalApi';
import { TinyYolov2 } from '../src/tinyYolov2/TinyYolov2';
export function zeros(length: number): Float32Array { export function zeros(length: number): Float32Array {
return new Float32Array(length) return new Float32Array(length)
...@@ -55,12 +56,13 @@ export type WithNetOptions = { ...@@ -55,12 +56,13 @@ export type WithNetOptions = {
} }
export type InjectNetArgs = { export type InjectNetArgs = {
allFaces: allFacesFunction
allFacesMtcnn: allFacesMtcnnFunction
faceDetectionNet: faceapi.FaceDetectionNet faceDetectionNet: faceapi.FaceDetectionNet
faceLandmarkNet: faceapi.FaceLandmarkNet faceLandmarkNet: faceapi.FaceLandmarkNet
faceRecognitionNet: faceapi.FaceRecognitionNet faceRecognitionNet: faceapi.FaceRecognitionNet
mtcnn: faceapi.Mtcnn mtcnn: faceapi.Mtcnn
allFaces: allFacesFunction tinyYolov2: faceapi.TinyYolov2
allFacesMtcnn: allFacesMtcnnFunction
} }
...@@ -71,6 +73,7 @@ export type DescribeWithNetsOptions = { ...@@ -71,6 +73,7 @@ export type DescribeWithNetsOptions = {
withFaceLandmarkNet?: WithNetOptions withFaceLandmarkNet?: WithNetOptions
withFaceRecognitionNet?: WithNetOptions withFaceRecognitionNet?: WithNetOptions
withMtcnn?: WithNetOptions withMtcnn?: WithNetOptions
withTinyYolov2?: WithNetOptions
} }
async function loadNetWeights(uri: string): Promise<Float32Array> { async function loadNetWeights(uri: string): Promise<Float32Array> {
...@@ -99,17 +102,19 @@ export function describeWithNets( ...@@ -99,17 +102,19 @@ export function describeWithNets(
let faceLandmarkNet: faceapi.FaceLandmarkNet = new faceapi.FaceLandmarkNet() let faceLandmarkNet: faceapi.FaceLandmarkNet = new faceapi.FaceLandmarkNet()
let faceRecognitionNet: faceapi.FaceRecognitionNet = new faceapi.FaceRecognitionNet() let faceRecognitionNet: faceapi.FaceRecognitionNet = new faceapi.FaceRecognitionNet()
let mtcnn: faceapi.Mtcnn = new faceapi.Mtcnn() let mtcnn: faceapi.Mtcnn = new faceapi.Mtcnn()
let tinyYolov2: faceapi.TinyYolov2 = new faceapi.TinyYolov2()
let allFaces = allFacesFactory(faceDetectionNet, faceLandmarkNet, faceRecognitionNet) let allFaces = allFacesFactory(faceDetectionNet, faceLandmarkNet, faceRecognitionNet)
let allFacesMtcnn = allFacesMtcnnFactory(mtcnn, faceRecognitionNet) let allFacesMtcnn = allFacesMtcnnFactory(mtcnn, faceRecognitionNet)
beforeAll(async () => { beforeAll(async () => {
const { const {
withAllFaces,
withAllFacesMtcnn,
withFaceDetectionNet, withFaceDetectionNet,
withFaceLandmarkNet, withFaceLandmarkNet,
withFaceRecognitionNet, withFaceRecognitionNet,
withMtcnn, withMtcnn,
withAllFaces, withTinyYolov2
withAllFacesMtcnn
} = options } = options
if (withFaceDetectionNet || withAllFaces) { if (withFaceDetectionNet || withAllFaces) {
...@@ -118,6 +123,7 @@ export function describeWithNets( ...@@ -118,6 +123,7 @@ export function describeWithNets(
!!withFaceDetectionNet && !withFaceDetectionNet.quantized && 'ssd_mobilenetv1_model.weights' !!withFaceDetectionNet && !withFaceDetectionNet.quantized && 'ssd_mobilenetv1_model.weights'
) )
} }
if (withFaceLandmarkNet || withAllFaces) { if (withFaceLandmarkNet || withAllFaces) {
await initNet<faceapi.FaceLandmarkNet>( await initNet<faceapi.FaceLandmarkNet>(
faceLandmarkNet, faceLandmarkNet,
...@@ -132,22 +138,31 @@ export function describeWithNets( ...@@ -132,22 +138,31 @@ export function describeWithNets(
'face_recognition_model.weights' 'face_recognition_model.weights'
) )
} }
if (withMtcnn || withAllFacesMtcnn) { if (withMtcnn || withAllFacesMtcnn) {
await initNet<faceapi.Mtcnn>( await initNet<faceapi.Mtcnn>(
mtcnn, mtcnn,
!!withMtcnn && !withMtcnn.quantized && 'mtcnn_model.weights' !!withMtcnn && !withMtcnn.quantized && 'mtcnn_model.weights'
) )
} }
if (withTinyYolov2) {
await initNet<faceapi.TinyYolov2>(
tinyYolov2,
!!withTinyYolov2 && !withTinyYolov2.quantized && 'tiny_yolov2_model.weights'
)
}
}) })
afterAll(() => { afterAll(() => {
faceDetectionNet && faceDetectionNet.dispose() faceDetectionNet && faceDetectionNet.dispose()
faceLandmarkNet && faceLandmarkNet.dispose() faceLandmarkNet && faceLandmarkNet.dispose()
faceRecognitionNet && faceRecognitionNet.dispose() faceRecognitionNet && faceRecognitionNet.dispose()
mtcnn && mtcnn.dispose() mtcnn && mtcnn.dispose(),
tinyYolov2 && tinyYolov2.dispose()
}) })
specDefinitions({ faceDetectionNet, faceLandmarkNet, faceRecognitionNet, mtcnn, allFaces, allFacesMtcnn }) specDefinitions({ allFaces, allFacesMtcnn, faceDetectionNet, faceLandmarkNet, faceRecognitionNet, mtcnn, tinyYolov2 })
}) })
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment