Commit 3e8ae6ca by vincent

remove unused experimental tiny yolov2 model and prototxt of ssd_mobilenet from main repo

parent 0a469331
...@@ -2,4 +2,6 @@ node_modules ...@@ -2,4 +2,6 @@ node_modules
.rpt2_cache .rpt2_cache
.env* .env*
tmp tmp
proto
weights_uncompressed weights_uncompressed
weights_unused
\ No newline at end of file
...@@ -5,5 +5,6 @@ examples ...@@ -5,5 +5,6 @@ examples
proto proto
weights weights
weights_uncompressed weights_uncompressed
weights_unused
test test
tools tools
\ No newline at end of file
...@@ -4,7 +4,8 @@ const dataFiles = [ ...@@ -4,7 +4,8 @@ const dataFiles = [
'test/data/*.json', 'test/data/*.json',
'test/media/*.mp4', 'test/media/*.mp4',
'weights/**/*', 'weights/**/*',
'weights_uncompressed/**/*' 'weights_uncompressed/**/*',
'weights_unused/**/*'
].map(pattern => ({ ].map(pattern => ({
pattern, pattern,
watched: false, watched: false,
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -12,58 +12,6 @@ describe('tinyYolov2', () => { ...@@ -12,58 +12,6 @@ describe('tinyYolov2', () => {
imgEl = await faceapi.bufferToImage(img) imgEl = await faceapi.bufferToImage(img)
}) })
describe('with separable convolutions', () => {
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
const expectedScores = [0.9, 0.9, 0.89, 0.85, 0.85, 0.85]
const maxBoxDelta = 1
const boxOrder = [0, 1, 2, 3, 4, 5]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
const expectedScores = [0.85, 0.85, 0.84, 0.83, 0.8, 0.8]
const maxBoxDelta = 17
const boxOrder = [5, 1, 4, 3, 2, 0]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.85, 0.85, 0.84, 0.83, 0.8, 0.8]
const maxBoxDelta = 17
const boxOrder = [5, 1, 4, 3, 2, 0]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
})
})
describe('without separable convolutions', () => {
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true, withSeparableConv: false } }, ({ tinyYolov2 }) => { describeWithNets('quantized weights', { withTinyYolov2: { quantized: true, withSeparableConv: false } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => { it('inputSize lg, finds all faces', async () => {
...@@ -156,41 +104,8 @@ describe('tinyYolov2', () => { ...@@ -156,41 +104,8 @@ describe('tinyYolov2', () => {
}) })
})
describe('no memory leaks', () => { describe('no memory leaks', () => {
describe('with separable convolutions', () => {
describe('NeuralNetwork, uncompressed model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/tiny_yolov2_separable_conv_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createTinyYolov2(weights)
net.dispose()
})
})
})
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.TinyYolov2()
await net.load('base/weights')
net.dispose()
})
})
})
})
describe('without separable convolutions', () => {
describe('NeuralNetwork, uncompressed model', () => { describe('NeuralNetwork, uncompressed model', () => {
it('disposes all param tensors', async () => { it('disposes all param tensors', async () => {
...@@ -209,7 +124,7 @@ describe('tinyYolov2', () => { ...@@ -209,7 +124,7 @@ describe('tinyYolov2', () => {
it('disposes all param tensors', async () => { it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => { await expectAllTensorsReleased(async () => {
const net = new faceapi.TinyYolov2(false) const net = new faceapi.TinyYolov2(false)
await net.load('base/weights') await net.load('base/weights_unused')
net.dispose() net.dispose()
}) })
}) })
...@@ -218,6 +133,4 @@ describe('tinyYolov2', () => { ...@@ -218,6 +133,4 @@ describe('tinyYolov2', () => {
}) })
})
}) })
\ No newline at end of file
import * as faceapi from '../../../src';
import { SizeType } from '../../../src/tinyYolov2/types';
import { describeWithNets, expectAllTensorsReleased, expectRectClose } from '../../utils';
import { expectedTinyYolov2SeparableConvBoxes } from './expectedResults';
describe('tinyYolov2, with separable convolutions', () => {
let imgEl: HTMLImageElement
beforeAll(async () => {
const img = await (await fetch('base/test/images/faces.jpg')).blob()
imgEl = await faceapi.bufferToImage(img)
})
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true } }, ({ tinyYolov2 }) => {
it('inputSize lg, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
const expectedScores = [0.9, 0.9, 0.89, 0.85, 0.85, 0.85]
const maxBoxDelta = 1
const boxOrder = [0, 1, 2, 3, 4, 5]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize md, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
const expectedScores = [0.85, 0.85, 0.84, 0.83, 0.8, 0.8]
const maxBoxDelta = 17
const boxOrder = [5, 1, 4, 3, 2, 0]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
it('inputSize custom, finds all faces', async () => {
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
const expectedScores = [0.85, 0.85, 0.84, 0.83, 0.8, 0.8]
const maxBoxDelta = 17
const boxOrder = [5, 1, 4, 3, 2, 0]
expect(detections.length).toEqual(6)
detections.forEach((det, i) => {
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
expectRectClose(det.getBox(), expectedTinyYolov2SeparableConvBoxes[boxOrder[i]], maxBoxDelta)
})
})
})
describe('no memory leaks', () => {
describe('NeuralNetwork, uncompressed model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const res = await fetch('base/weights_uncompressed/tiny_yolov2_separable_conv_model.weights')
const weights = new Float32Array(await res.arrayBuffer())
const net = faceapi.createTinyYolov2(weights)
net.dispose()
})
})
})
describe('NeuralNetwork, quantized model', () => {
it('disposes all param tensors', async () => {
await expectAllTensorsReleased(async () => {
const net = new faceapi.TinyYolov2()
await net.load('base/weights')
net.dispose()
})
})
})
})
})
\ No newline at end of file
...@@ -85,12 +85,13 @@ async function loadNetWeights(uri: string): Promise<Float32Array> { ...@@ -85,12 +85,13 @@ async function loadNetWeights(uri: string): Promise<Float32Array> {
async function initNet<TNet extends NeuralNetwork<any>>( async function initNet<TNet extends NeuralNetwork<any>>(
net: TNet, net: TNet,
uncompressedFilename: string | boolean uncompressedFilename: string | boolean,
isUnusedModel: boolean = false
) { ) {
await net.load( await net.load(
uncompressedFilename uncompressedFilename
? await loadNetWeights(`base/weights_uncompressed/${uncompressedFilename}`) ? await loadNetWeights(`base/weights_uncompressed/${uncompressedFilename}`)
: 'base/weights' : (isUnusedModel ? 'base/weights_unused' : 'base/weights')
) )
} }
...@@ -152,7 +153,8 @@ export function describeWithNets( ...@@ -152,7 +153,8 @@ export function describeWithNets(
if (withTinyYolov2) { if (withTinyYolov2) {
await initNet<faceapi.TinyYolov2>( await initNet<faceapi.TinyYolov2>(
tinyYolov2, tinyYolov2,
!!withTinyYolov2 && !withTinyYolov2.quantized && 'tiny_yolov2_model.weights' !!withTinyYolov2 && !withTinyYolov2.quantized && 'tiny_yolov2_model.weights',
withTinyYolov2.withSeparableConv === false
) )
} }
}) })
......
[{"weights":[{"name":"conv0/conv/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036540280370151294,"min":-0.44213739247883066}},{"name":"conv0/conv/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002164303555208094,"min":-0.13418682042290184}},{"name":"conv0/bn/sub","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0028029036288167914,"min":-0.3139252064274806}},{"name":"conv0/bn/truediv","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.05010388411727606,"min":2.7867696285247803}},{"name":"conv1/conv/filters","shape":[3,3,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0017398431020624497,"min":-0.22617960326811845}},{"name":"conv1/conv/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.001145745112615473,"min":-0.12488621727508656}},{"name":"conv1/bn/sub","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00586619587505565,"min":-0.9503237317590153}},{"name":"conv1/bn/truediv","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007056442896525065,"min":1.2661800384521484}},{"name":"conv2/conv/filters","shape":[3,3,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0010545693776186774,"min":-0.12971203344709734}},{"name":"conv2/conv/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0005329197820495157,"min":-0.07034541123053607}},{"name":"conv2/bn/sub","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005863590567719703,"min":-0.691903686990925}},{"name":"conv2/bn/truediv","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006374212339812634,"min":1.4001796245574951}},{"name":"conv3/conv/filters","shape":[3,3,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0006935241175632851,"min":-0.08114232175490436}},{"name":"conv3/conv/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0002854522025468303,"min":-0.04224692597693088}},{"name":"conv3/bn/sub","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008516784275279325,"min":-1.1667994457132675}},{"name":"conv3/bn/truediv","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004096750652088838,"min":1.1394747495651245}},{"name":"conv4/conv/filters","shape":[3,3,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0006537227946169236,"min":-0.06733344784554313}},{"name":"conv4/conv/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00018004616905076831,"min":-0.041770711219778246}},{"name":"conv4/bn/sub","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011157989969440535,"min":-1.5509606057522343}},{"name":"conv4/bn/truediv","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00421752836190018,"min":1.1967103481292725}},{"name":"conv5/conv/filters","shape":[3,3,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0008280676077393925,"min":-0.05299632689532112}},{"name":"conv5/conv/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00007708504312105623,"min":-0.020973851904273033}},{"name":"conv5/bn/sub","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011612189049814262,"min":-1.7302161684223252}},{"name":"conv5/bn/truediv","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0034869993434232826,"min":1.2963157892227173}},{"name":"conv6/conv/filters","shape":[3,3,512,1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0003250038945207409,"min":-0.04030048292057187}},{"name":"conv6/conv/bias","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00009164303220114009,"min":-0.02338002622127533}},{"name":"conv6/bn/sub","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0168181017333386,"min":-2.068626513200648}},{"name":"conv6/bn/truediv","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004455030198190726,"min":1.309550166130066}},{"name":"conv7/conv/filters","shape":[3,3,1024,1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00026194834533859704,"min":-0.033267439858001825}},{"name":"conv7/conv/bias","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0001491123554753322,"min":-0.011928988438026577}},{"name":"conv7/bn/sub","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009263983427309523,"min":-1.1765258952683093}},{"name":"conv7/bn/truediv","shape":[1024],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01123507022857666,"min":0.9360886812210083}},{"name":"conv8/filters","shape":[1,1,1024,30],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.001922343233052422,"min":-0.2614386796951294}},{"name":"conv8/bias","shape":[30],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0009638834233377494,"min":-0.15904076485072866}}],"paths":["tiny_yolov2_model-shard1","tiny_yolov2_model-shard2","tiny_yolov2_model-shard3","tiny_yolov2_model-shard4"]}]
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment