Unverified Commit 5b8bb8c4 by justadudewhohacks Committed by GitHub

Merge pull request #111 from justadudewhohacks/karma-face-recognition-model-issue

Karma face recognition model issue
parents d872a149 21fb4500
...@@ -22,6 +22,7 @@ describe('faceRecognitionNet', () => { ...@@ -22,6 +22,7 @@ describe('faceRecognitionNet', () => {
faceDescriptor2 = await fetchJson<number[]>('base/test/data/faceDescriptor2.json') faceDescriptor2 = await fetchJson<number[]>('base/test/data/faceDescriptor2.json')
faceDescriptorRect = await fetchJson<number[]>('base/test/data/faceDescriptorRect.json') faceDescriptorRect = await fetchJson<number[]>('base/test/data/faceDescriptorRect.json')
}) })
describeWithNets('quantized weights', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => { describeWithNets('quantized weights', { withFaceRecognitionNet: { quantized: true } }, ({ faceRecognitionNet }) => {
it('computes face descriptor for squared input', async () => { it('computes face descriptor for squared input', async () => {
......
...@@ -2,9 +2,7 @@ import { fetchImage, fetchJson } from '../../../src'; ...@@ -2,9 +2,7 @@ import { fetchImage, fetchJson } from '../../../src';
import { euclideanDistance } from '../../../src/euclideanDistance'; import { euclideanDistance } from '../../../src/euclideanDistance';
import { describeWithNets } from '../../utils'; import { describeWithNets } from '../../utils';
// TODO: figure out why quantized weights results in NaNs in testcases describe('faceRecognitionNet, uncompressed', () => {
// apparently (net weight values differ when loading with karma)
xdescribe('faceRecognitionNet, uncompressed', () => {
let imgEl1: HTMLImageElement let imgEl1: HTMLImageElement
let imgElRect: HTMLImageElement let imgElRect: HTMLImageElement
......
...@@ -62,7 +62,7 @@ describe('mtcnn', () => { ...@@ -62,7 +62,7 @@ describe('mtcnn', () => {
const deltas = { const deltas = {
maxBoxDelta: 2, maxBoxDelta: 2,
maxLandmarksDelta: 6, maxLandmarksDelta: 6,
maxDescriptorDelta: 0.4 maxDescriptorDelta: 0.2
} }
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas) expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas)
......
...@@ -61,7 +61,7 @@ describe('ssdMobilenetv1', () => { ...@@ -61,7 +61,7 @@ describe('ssdMobilenetv1', () => {
const deltas = { const deltas = {
maxBoxDelta: 5, maxBoxDelta: 5,
maxLandmarksDelta: 1, maxLandmarksDelta: 1,
maxDescriptorDelta: 0.01 maxDescriptorDelta: 0.1
} }
expect(results.length).toEqual(6) expect(results.length).toEqual(6)
expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas) expectFullFaceDescriptions(results, expectedFullFaceDescriptions, expectedScores, deltas)
......
...@@ -200,9 +200,7 @@ export function describeWithNets( ...@@ -200,9 +200,7 @@ export function describeWithNets(
if (withFaceRecognitionNet || withAllFacesSsdMobilenetv1 || withAllFacesTinyFaceDetector|| withAllFacesMtcnn || withAllFacesTinyYolov2) { if (withFaceRecognitionNet || withAllFacesSsdMobilenetv1 || withAllFacesTinyFaceDetector|| withAllFacesMtcnn || withAllFacesTinyYolov2) {
await initNet<FaceRecognitionNet>( await initNet<FaceRecognitionNet>(
faceRecognitionNet, faceRecognitionNet,
// TODO: figure out why quantized weights results in NaNs in testcases !!withFaceRecognitionNet && !withFaceRecognitionNet.quantized && 'face_recognition_model.weights'
// apparently (net weight values differ when loading with karma)
'face_recognition_model.weights'
) )
} }
......
...@@ -58,7 +58,7 @@ ...@@ -58,7 +58,7 @@
} }
if (isSkipQuantization) { if (isSkipQuantization) {
quantizedTensorArrays.push(tensor.dataSync()) quantizedTensorArrays.push(new Uint8Array(tensor.dataSync().buffer))
weightEntries.push(weightEntry) weightEntries.push(weightEntry)
return return
} }
......
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment