Commit 96b41df9 by vincent

added describeWithNets test utility, which handles net creation and disposing of…

added describeWithNets test utility, which handles net creation and disposing of net params for the testbench
parent 9fef1428
import * as faceapi from '../../../src';
import { FaceDetection } from '../../../src/faceDetectionNet/FaceDetection';
import { IRect } from '../../../src/Rect';
import { expectAllTensorsReleased, expectMaxDelta } from '../../utils';
import { describeWithNets, expectAllTensorsReleased, expectMaxDelta } from '../../utils';
function expectRectClose(
result: IRect,
......@@ -33,19 +33,11 @@ describe('faceDetectionNet', () => {
imgEl = await faceapi.bufferToImage(img)
})
describe('uncompressed weights', () => {
let faceDetectionNet: faceapi.FaceDetectionNet
describeWithNets('uncompressed weights', { withFaceDetectionNet: { quantized: false } }, ({ faceDetectionNet }) => {
const expectedScores = [0.98, 0.89, 0.82, 0.75, 0.58, 0.55]
const maxBoxDelta = 1
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_detection_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceDetectionNet = faceapi.faceDetectionNet(weights)
})
it('scores > 0.8', async () => {
const detections = await faceDetectionNet.locateFaces(imgEl) as FaceDetection[]
......@@ -72,18 +64,11 @@ describe('faceDetectionNet', () => {
})
describe('quantized weights', () => {
let faceDetectionNet: faceapi.FaceDetectionNet
describeWithNets('quantized weights', { withFaceDetectionNet: { quantized: true } }, ({ faceDetectionNet }) => {
const expectedScores = [0.97, 0.88, 0.83, 0.82, 0.59, 0.52]
const maxBoxDelta = 5
beforeAll(async () => {
faceDetectionNet = new faceapi.FaceDetectionNet()
await faceDetectionNet.load('base/weights')
})
it('scores > 0.8', async () => {
const detections = await faceDetectionNet.locateFaces(imgEl) as FaceDetection[]
......
......@@ -5,7 +5,7 @@ import { isTensor3D } from '../../../src/commons/isTensor';
import { FaceLandmarks } from '../../../src/faceLandmarkNet/FaceLandmarks';
import { Point } from '../../../src/Point';
import { Dimensions, TMediaElement } from '../../../src/types';
import { expectMaxDelta, expectAllTensorsReleased, tensor3D } from '../../utils';
import { expectMaxDelta, expectAllTensorsReleased, tensor3D, describeWithNets } from '../../utils';
import { NetInput } from '../../../src/NetInput';
import { toNetInput } from '../../../src';
......@@ -38,15 +38,7 @@ describe('faceLandmarkNet', () => {
faceLandmarkPositionsRect = await (await fetch('base/test/data/faceLandmarkPositionsRect.json')).json()
})
describe('uncompressed weights', () => {
let faceLandmarkNet: faceapi.FaceLandmarkNet
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_landmark_68_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceLandmarkNet = faceapi.faceLandmarkNet(weights)
})
describeWithNets('uncompressed weights', { withFaceLandmarkNet: { quantized: false } }, ({ faceLandmarkNet }) => {
it('computes face landmarks for squared input', async () => {
const { width, height } = imgEl1
......@@ -78,14 +70,7 @@ describe('faceLandmarkNet', () => {
})
describe('quantized weights', () => {
let faceLandmarkNet: faceapi.FaceLandmarkNet
beforeAll(async () => {
faceLandmarkNet = new faceapi.FaceLandmarkNet()
await faceLandmarkNet.load('base/weights')
})
describeWithNets('quantized weights', { withFaceLandmarkNet: { quantized: true } }, ({ faceLandmarkNet }) => {
it('computes face landmarks for squared input', async () => {
const { width, height } = imgEl1
......@@ -117,15 +102,7 @@ describe('faceLandmarkNet', () => {
})
describe('batch inputs', () => {
let faceLandmarkNet: faceapi.FaceLandmarkNet
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_landmark_68_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceLandmarkNet = faceapi.faceLandmarkNet(weights)
})
describeWithNets('batch inputs', { withFaceLandmarkNet: { quantized: false } }, ({ faceLandmarkNet }) => {
it('computes face landmarks for batch of image elements', async () => {
const inputs = [imgEl1, imgEl2, imgElRect]
......@@ -229,14 +206,7 @@ describe('faceLandmarkNet', () => {
})
describe('no memory leaks', () => {
let faceLandmarkNet: faceapi.FaceLandmarkNet
beforeAll(async () => {
faceLandmarkNet = new faceapi.FaceLandmarkNet()
await faceLandmarkNet.load('base/weights')
})
describeWithNets('no memory leaks', { withFaceLandmarkNet: { quantized: true } }, ({ faceLandmarkNet }) => {
describe('NeuralNetwork, uncompressed model', () => {
......
......@@ -2,7 +2,7 @@ import * as tf from '@tensorflow/tfjs-core';
import * as faceapi from '../../../src';
import { NetInput } from '../../../src/NetInput';
import { expectAllTensorsReleased } from '../../utils';
import { expectAllTensorsReleased, describeWithNets } from '../../utils';
import { toNetInput } from '../../../src';
describe('faceRecognitionNet', () => {
......@@ -26,15 +26,7 @@ describe('faceRecognitionNet', () => {
faceDescriptorRect = await (await fetch('base/test/data/faceDescriptorRect.json')).json()
})
describe('uncompressed weights', () => {
let faceRecognitionNet: faceapi.FaceRecognitionNet
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceRecognitionNet = faceapi.faceRecognitionNet(weights)
})
describeWithNets('uncompressed weights', { withFaceRecognitionNet: { quantized: false } }, ({ faceRecognitionNet }) => {
it('computes face descriptor for squared input', async () => {
const result = await faceRecognitionNet.computeFaceDescriptor(imgEl1) as Float32Array
......@@ -52,14 +44,7 @@ describe('faceRecognitionNet', () => {
// TODO: figure out why descriptors return NaN in the test cases
/*
describe('quantized weights', () => {
let faceRecognitionNet: faceapi.FaceRecognitionNet
beforeAll(async () => {
faceRecognitionNet = new faceapi.FaceRecognitionNet()
await faceRecognitionNet.load('base/weights')
})
describeWithNets('quantized weights', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => {
it('computes face descriptor for squared input', async () => {
const result = await faceRecognitionNet.computeFaceDescriptor(imgEl1) as Float32Array
......@@ -76,15 +61,7 @@ describe('faceRecognitionNet', () => {
})
*/
describe('batch inputs', () => {
let faceRecognitionNet: faceapi.FaceRecognitionNet
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceRecognitionNet = faceapi.faceRecognitionNet(weights)
})
describeWithNets('batch inputs', { withFaceRecognitionNet: { quantized: false } }, ({ faceRecognitionNet }) => {
it('computes face descriptors for batch of image elements', async () => {
const inputs = [imgEl1, imgEl2, imgElRect]
......@@ -156,19 +133,7 @@ describe('faceRecognitionNet', () => {
})
describe('no memory leaks', () => {
let faceRecognitionNet: faceapi.FaceRecognitionNet
beforeAll(async () => {
const res = await fetch('base/weights/uncompressed/face_recognition_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
faceRecognitionNet = faceapi.faceRecognitionNet(weights)
})
afterAll(async () => {
faceRecognitionNet.dispose()
})
describeWithNets('no memory leaks', { withFaceRecognitionNet: { quantized: false } }, ({ faceRecognitionNet }) => {
describe('NeuralNetwork, uncompressed model', () => {
......
import * as tf from '@tensorflow/tfjs-core';
import * as faceapi from '../src/';
import { NeuralNetwork } from '../src/commons/NeuralNetwork';
export function zeros(length: number): Float32Array {
return new Float32Array(length)
}
......@@ -21,3 +24,84 @@ export async function expectAllTensorsReleased(fn: () => any) {
export function tensor3D() {
return tf.tensor3d([[[0]]])
}
export type WithNetOptions = {
quantized?: boolean
}
export type InjectNetArgs = {
faceDetectionNet: faceapi.FaceDetectionNet
faceLandmarkNet: faceapi.FaceLandmarkNet
faceRecognitionNet: faceapi.FaceRecognitionNet
}
export type DescribeWithNetsOptions = {
withFaceDetectionNet?: WithNetOptions
withFaceLandmarkNet?: WithNetOptions
withFaceRecognitionNet?: WithNetOptions
}
async function loadNetWeights(uri: string): Promise<Float32Array> {
return new Float32Array(await (await fetch(uri)).arrayBuffer())
}
async function initNet<TNet extends NeuralNetwork<any>>(
net: TNet,
uncompressedFilename: string | boolean
) {
await net.load(
uncompressedFilename
? await loadNetWeights(`base/weights/uncompressed/${uncompressedFilename}`)
: 'base/weights'
)
}
export function describeWithNets(
description: string,
options: DescribeWithNetsOptions,
specDefinitions: (nets: InjectNetArgs) => void
) {
describe(description, () => {
let faceDetectionNet: faceapi.FaceDetectionNet = new faceapi.FaceDetectionNet()
let faceLandmarkNet: faceapi.FaceLandmarkNet = new faceapi.FaceLandmarkNet()
let faceRecognitionNet: faceapi.FaceRecognitionNet = new faceapi.FaceRecognitionNet()
beforeAll(async () => {
const {
withFaceDetectionNet,
withFaceLandmarkNet,
withFaceRecognitionNet
} = options
if (withFaceDetectionNet) {
await initNet<faceapi.FaceDetectionNet>(
faceDetectionNet,
!withFaceDetectionNet.quantized && 'face_detection_model.weights'
)
}
if (withFaceLandmarkNet) {
await initNet<faceapi.FaceLandmarkNet>(
faceLandmarkNet,
!withFaceLandmarkNet.quantized && 'face_landmark_68_model.weights'
)
}
if (withFaceRecognitionNet) {
await initNet<faceapi.FaceRecognitionNet>(
faceRecognitionNet,
!withFaceRecognitionNet.quantized && 'face_recognition_model.weights'
)
}
})
afterAll(() => {
faceDetectionNet && faceDetectionNet.dispose()
faceLandmarkNet && faceLandmarkNet.dispose()
faceRecognitionNet && faceRecognitionNet.dispose()
})
specDefinitions({ faceDetectionNet, faceLandmarkNet, faceRecognitionNet })
})
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment