Commit 623052ba by vincent

renamed package

parent 64cf04a4
node_modules node_modules
_data
.rpt2_cache .rpt2_cache
\ No newline at end of file
node_modules
.rpt2_cache
examples
test
proto
weights
\ No newline at end of file
# face-recognition.min.js # face-api.js
**face recognition API for the browser with tensorflow.js** **JavaScript API for face detection and face recognition in the browser with tensorflow.js**
This project implements a ResNet-34 like architecture using the tensorflow.js core API ([@tensorflow/tfjs-core](https://github.com/tensorflow/tfjs-core)) for realtime face recognition in the browser. The neural net is equivalent to the **FaceRecognizerNet** used in [face-recognition.js](https://github.com/justadudewhohacks/face-recognition.js) and the net used in the [dlib](https://github.com/davisking/dlib/blob/master/examples/dnn_face_recognition_ex.cpp) face recognition example. The weights have been trained by [davisking](https://github.com/davisking) and the model achieves a prediction accuracy of 99.38% on the LFW (Labeled Faces in the Wild) benchmark for face recognition. This project implements a ResNet-34 like architecture using the tensorflow.js core API ([@tensorflow/tfjs-core](https://github.com/tensorflow/tfjs-core)) for realtime face recognition in the browser. The neural net is equivalent to the **FaceRecognizerNet** used in [face-recognition.js](https://github.com/justadudewhohacks/face-recognition.js) and the net used in the [dlib](https://github.com/davisking/dlib/blob/master/examples/dnn_face_recognition_ex.cpp) face recognition example. The weights have been trained by [davisking](https://github.com/davisking) and the model achieves a prediction accuracy of 99.38% on the LFW (Labeled Faces in the Wild) benchmark for face recognition.
...@@ -29,7 +29,7 @@ Download the weights file from your server and initialize the net (note, that yo ...@@ -29,7 +29,7 @@ Download the weights file from your server and initialize the net (note, that yo
``` javascript ``` javascript
const res = await axios.get('face_recognition_model.weights', { responseType: 'arraybuffer' }) const res = await axios.get('face_recognition_model.weights', { responseType: 'arraybuffer' })
const weights = new Float32Array(res.data) const weights = new Float32Array(res.data)
const net = facerecognition.faceRecognitionNet(weights) const net = faceapi.faceRecognitionNet(weights)
``` ```
Compute and compare two 150 x 150 sized face images: Compute and compare two 150 x 150 sized face images:
...@@ -43,7 +43,7 @@ const imgData2 = ... ...@@ -43,7 +43,7 @@ const imgData2 = ...
const descriptor1 = await net.computeFaceDescriptor(imgData1) const descriptor1 = await net.computeFaceDescriptor(imgData1)
const descriptor2 = await net.computeFaceDescriptor(imgData2) const descriptor2 = await net.computeFaceDescriptor(imgData2)
const distance = facerecognition.euclidianDistance(descriptor1, descriptor2) const distance = faceapi.euclidianDistance(descriptor1, descriptor2)
if (distance < 0.6) if (distance < 0.6)
console.log('match') console.log('match')
......
(function (global, factory) { (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('crypto')) : typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('crypto')) :
typeof define === 'function' && define.amd ? define(['exports', 'crypto'], factory) : typeof define === 'function' && define.amd ? define(['exports', 'crypto'], factory) :
(factory((global.facerecognition = global.facerecognition || {}),global.crypto)); (factory((global.faceapi = global.faceapi || {}),global.crypto));
}(this, (function (exports,crypto) { 'use strict'; }(this, (function (exports,crypto) { 'use strict';
crypto = crypto && crypto.hasOwnProperty('default') ? crypto['default'] : crypto; crypto = crypto && crypto.hasOwnProperty('default') ? crypto['default'] : crypto;
...@@ -1650,11 +1650,11 @@ ...@@ -1650,11 +1650,11 @@
seedrandom.xor4096 = xor4096; seedrandom.xor4096 = xor4096;
seedrandom.tychei = tychei; seedrandom.tychei = tychei;
var C__Users_user_dev_faceRecognition_min_js_node_modules_seedrandom = seedrandom; var C__Users_user_dev_faceApi_js_node_modules_seedrandom = seedrandom;
var seedrandom$1 = /*#__PURE__*/Object.freeze({ var seedrandom$1 = /*#__PURE__*/Object.freeze({
default: C__Users_user_dev_faceRecognition_min_js_node_modules_seedrandom, default: C__Users_user_dev_faceApi_js_node_modules_seedrandom,
__moduleExports: C__Users_user_dev_faceRecognition_min_js_node_modules_seedrandom __moduleExports: C__Users_user_dev_faceApi_js_node_modules_seedrandom
}); });
var MPRandGauss = (function () { var MPRandGauss = (function () {
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from './NetInput';
import { TNetInput } from './types';
/**
* Pads the smaller dimension of an image tensor with zeros, such that width === height.
*
* @param imgTensor The image tensor.
* @param isCenterImage (optional, default: false) If true, add padding on both sides of the image, such that the image
* @returns The padded tensor with width === height.
*/
export declare function padToSquare(imgTensor: tf.Tensor4D, isCenterImage?: boolean): tf.Tensor4D;
export declare function getImageTensor(input: tf.Tensor | NetInput | TNetInput): tf.Tensor4D;
import * as tf from '@tensorflow/tfjs-core';
import { NetInput } from './NetInput';
/**
* Pads the smaller dimension of an image tensor with zeros, such that width === height.
*
* @param imgTensor The image tensor.
* @param isCenterImage (optional, default: false) If true, add padding on both sides of the image, such that the image
* @returns The padded tensor with width === height.
*/
export function padToSquare(imgTensor, isCenterImage) {
if (isCenterImage === void 0) { isCenterImage = false; }
return tf.tidy(function () {
var _a = imgTensor.shape.slice(1), height = _a[0], width = _a[1];
if (height === width) {
return imgTensor;
}
var paddingAmount = Math.floor(Math.abs(height - width) * (isCenterImage ? 0.5 : 1));
var paddingAxis = height > width ? 2 : 1;
var paddingTensorShape = imgTensor.shape.slice();
paddingTensorShape[paddingAxis] = paddingAmount;
var tensorsToStack = (isCenterImage ? [tf.fill(paddingTensorShape, 0)] : [])
.concat([imgTensor, tf.fill(paddingTensorShape, 0)]);
console.log(tensorsToStack);
return tf.concat(tensorsToStack, paddingAxis);
});
}
export function getImageTensor(input) {
return tf.tidy(function () {
if (input instanceof tf.Tensor) {
var rank = input.shape.length;
if (rank !== 3 && rank !== 4) {
throw new Error('input tensor must be of rank 3 or 4');
}
return (rank === 3 ? input.expandDims(0) : input).toFloat();
}
var netInput = input instanceof NetInput ? input : new NetInput(input);
return tf.concat(netInput.canvases.map(function (canvas) {
return tf.fromPixels(canvas).expandDims(0).toFloat();
}));
});
}
//# sourceMappingURL=transformInputs.js.map
\ No newline at end of file
{"version":3,"file":"transformInputs.js","sourceRoot":"","sources":["../src/transformInputs.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAGtC;;;;;;GAMG;AACH,MAAM,sBACJ,SAAsB,EACtB,aAA8B;IAA9B,8BAAA,EAAA,qBAA8B;IAE9B,OAAO,EAAE,CAAC,IAAI,CAAC;QAEP,IAAA,6BAA0C,EAAzC,cAAM,EAAE,aAAK,CAA4B;QAChD,IAAI,MAAM,KAAK,KAAK,EAAE;YACpB,OAAO,SAAS,CAAA;SACjB;QAED,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,GAAG,KAAK,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QACtF,IAAM,WAAW,GAAG,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC1C,IAAM,kBAAkB,GAAG,SAAS,CAAC,KAAK,CAAC,KAAK,EAAsC,CAAA;QACtF,kBAAkB,CAAC,WAAW,CAAC,GAAG,aAAa,CAAA;QAC/C,IAAM,cAAc,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3E,MAAM,CAAC,CAAC,SAAS,EAAG,EAAE,CAAC,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,CAAkB,CAAA;QACxE,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAA;QAC3B,OAAO,EAAE,CAAC,MAAM,CAAC,cAAc,EAAE,WAAW,CAAC,CAAA;IAC/C,CAAC,CAAC,CAAA;AACJ,CAAC;AAED,MAAM,yBAAyB,KAAuC;IACpE,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAI,KAAK,YAAY,EAAE,CAAC,MAAM,EAAE;YAC9B,IAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,CAAA;YAC/B,IAAI,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAA;aACvD;YAED,OAAO,CAAC,IAAI,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,OAAO,EAAiB,CAAA;SAC3E;QAED,IAAM,QAAQ,GAAG,KAAK,YAAY,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,CAAA;QACxE,OAAO,EAAE,CAAC,MAAM,CACd,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,UAAA,MAAM;YAC1B,OAAA,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE;QAA7C,CAA6C,CAC9C,CACa,CAAA;IAClB,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
...@@ -15,13 +15,13 @@ async function fetchImage(uri) { ...@@ -15,13 +15,13 @@ async function fetchImage(uri) {
async function initFaceDetectionNet() { async function initFaceDetectionNet() {
const res = await axios.get('face_detection_model.weights', { responseType: 'arraybuffer' }) const res = await axios.get('face_detection_model.weights', { responseType: 'arraybuffer' })
const weights = new Float32Array(res.data) const weights = new Float32Array(res.data)
return facerecognition.faceDetectionNet(weights) return faceapi.faceDetectionNet(weights)
} }
async function initFaceRecognitionNet() { async function initFaceRecognitionNet() {
const res = await axios.get('face_recognition_model.weights', { responseType: 'arraybuffer' }) const res = await axios.get('face_recognition_model.weights', { responseType: 'arraybuffer' })
const weights = new Float32Array(res.data) const weights = new Float32Array(res.data)
return facerecognition.faceRecognitionNet(weights) return faceapi.faceRecognitionNet(weights)
} }
// fetch first image of each class and compute their descriptors // fetch first image of each class and compute their descriptors
...@@ -32,7 +32,7 @@ async function initTrainDescriptorsByClass(net, numImagesForTraining = 1) { ...@@ -32,7 +32,7 @@ async function initTrainDescriptorsByClass(net, numImagesForTraining = 1) {
async className => { async className => {
const descriptors = [] const descriptors = []
for (let i = 1; i < (numImagesForTraining + 1); i++) { for (let i = 1; i < (numImagesForTraining + 1); i++) {
const img = await facerecognition.bufferToImage( const img = await faceapi.bufferToImage(
await fetchImage(getFaceImageUri(className, i)) await fetchImage(getFaceImageUri(className, i))
) )
descriptors.push(await net.computeFaceDescriptor(img)) descriptors.push(await net.computeFaceDescriptor(img))
...@@ -47,9 +47,9 @@ async function initTrainDescriptorsByClass(net, numImagesForTraining = 1) { ...@@ -47,9 +47,9 @@ async function initTrainDescriptorsByClass(net, numImagesForTraining = 1) {
function getBestMatch(descriptorsByClass, queryDescriptor) { function getBestMatch(descriptorsByClass, queryDescriptor) {
function computeMeanDistance(descriptorsOfClass) { function computeMeanDistance(descriptorsOfClass) {
return facerecognition.round( return faceapi.round(
descriptorsOfClass descriptorsOfClass
.map(d => facerecognition.euclideanDistance(d, queryDescriptor)) .map(d => faceapi.euclideanDistance(d, queryDescriptor))
.reduce((d1, d2) => d1 + d2, 0) .reduce((d1, d2) => d1 + d2, 0)
/ (descriptorsOfClass.length || 1) / (descriptorsOfClass.length || 1)
) )
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -46,13 +46,13 @@ ...@@ -46,13 +46,13 @@
let net let net
function onIncreaseThreshold() { function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0) minConfidence = Math.min(faceapi.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
function onDecreaseThreshold() { function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1) minConfidence = Math.max(faceapi.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
...@@ -64,18 +64,18 @@ ...@@ -64,18 +64,18 @@
canvas.width = width canvas.width = width
canvas.height = height canvas.height = height
const input = new facerecognition.NetInput(inputImgEl) const input = new faceapi.NetInput(inputImgEl)
const detections = await net.locateFaces(input, minConfidence) const detections = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', detections.map(det => det.forSize(width, height))) faceapi.drawDetection('overlay', detections.map(det => det.forSize(width, height)))
const faceImages = await facerecognition.extractFaces(input.canvases[0], detections) const faceImages = await faceapi.extractFaces(input.canvases[0], detections)
$('#facesContainer').empty() $('#facesContainer').empty()
faceImages.forEach(canvas => $('#facesContainer').append(canvas)) faceImages.forEach(canvas => $('#facesContainer').append(canvas))
} }
async function onSelectionChanged(uri) { async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri) const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src $(`#inputImg`).get(0).src = (await faceapi.bufferToImage(imgBuf)).src
updateResults() updateResults()
} }
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -47,13 +47,13 @@ ...@@ -47,13 +47,13 @@
let trainDescriptorsByClass = [] let trainDescriptorsByClass = []
function onIncreaseThreshold() { function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0) minConfidence = Math.min(faceapi.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
function onDecreaseThreshold() { function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1) minConfidence = Math.max(faceapi.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
...@@ -65,13 +65,13 @@ ...@@ -65,13 +65,13 @@
canvas.width = width canvas.width = width
canvas.height = height canvas.height = height
const input = new facerecognition.NetInput(inputImgEl) const input = new faceapi.NetInput(inputImgEl)
const detections = await detectionNet.locateFaces(input, minConfidence) const detections = await detectionNet.locateFaces(input, minConfidence)
const detectionsForSize = detections.map(det => det.forSize(width, height)) const detectionsForSize = detections.map(det => det.forSize(width, height))
facerecognition.drawDetection('overlay', detectionsForSize, { withScore: false }) faceapi.drawDetection('overlay', detectionsForSize, { withScore: false })
const faceTensors = (await facerecognition.extractFaceTensors(input, detections)) const faceTensors = (await faceapi.extractFaceTensors(input, detections))
const descriptors = await Promise.all(faceTensors.map(t => recognitionNet.computeFaceDescriptor(t))) const descriptors = await Promise.all(faceTensors.map(t => recognitionNet.computeFaceDescriptor(t)))
// free memory for face image tensors after we computed their descriptors // free memory for face image tensors after we computed their descriptors
...@@ -81,13 +81,13 @@ ...@@ -81,13 +81,13 @@
const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor) const bestMatch = getBestMatch(trainDescriptorsByClass, descriptor)
const text = `${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})` const text = `${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`
const { x, y } = detectionsForSize[i].box const { x, y } = detectionsForSize[i].box
facerecognition.drawText(canvas.getContext('2d'), x, y, text, facerecognition.getDefaultDrawOptions()) faceapi.drawText(canvas.getContext('2d'), x, y, text, faceapi.getDefaultDrawOptions())
}) })
} }
async function onSelectionChanged(uri) { async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri) const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src $(`#inputImg`).get(0).src = (await faceapi.bufferToImage(imgBuf)).src
updateResults() updateResults()
} }
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -45,13 +45,13 @@ ...@@ -45,13 +45,13 @@
let net, result let net, result
function onIncreaseThreshold() { function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0) minConfidence = Math.min(faceapi.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
function onDecreaseThreshold() { function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1) minConfidence = Math.max(faceapi.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
updateResults() updateResults()
} }
...@@ -63,14 +63,14 @@ ...@@ -63,14 +63,14 @@
canvas.width = width canvas.width = width
canvas.height = height canvas.height = height
const input = new facerecognition.NetInput(inputImgEl) const input = new faceapi.NetInput(inputImgEl)
result = await net.locateFaces(input, minConfidence) result = await net.locateFaces(input, minConfidence)
facerecognition.drawDetection('overlay', result.map(det => det.forSize(width, height))) faceapi.drawDetection('overlay', result.map(det => det.forSize(width, height)))
} }
async function onSelectionChanged(uri) { async function onSelectionChanged(uri) {
const imgBuf = await fetchImage(uri) const imgBuf = await fetchImage(uri)
$(`#inputImg`).get(0).src = (await facerecognition.bufferToImage(imgBuf)).src $(`#inputImg`).get(0).src = (await faceapi.bufferToImage(imgBuf)).src
updateResults() updateResults()
} }
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -55,25 +55,25 @@ ...@@ -55,25 +55,25 @@
let net, result let net, result
function onIncreaseThreshold() { function onIncreaseThreshold() {
minConfidence = Math.min(facerecognition.round(minConfidence + 0.1), 1.0) minConfidence = Math.min(faceapi.round(minConfidence + 0.1), 1.0)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
} }
function onDecreaseThreshold() { function onDecreaseThreshold() {
minConfidence = Math.max(facerecognition.round(minConfidence - 0.1), 0.1) minConfidence = Math.max(faceapi.round(minConfidence - 0.1), 0.1)
$('#minConfidence').val(minConfidence) $('#minConfidence').val(minConfidence)
} }
function displayTimeStats(timeInMs) { function displayTimeStats(timeInMs) {
$('#time').val(`${timeInMs} ms`) $('#time').val(`${timeInMs} ms`)
$('#fps').val(`${facerecognition.round(1000 / timeInMs)}`) $('#fps').val(`${faceapi.round(1000 / timeInMs)}`)
} }
async function onPlay(videoEl) { async function onPlay(videoEl) {
if(videoEl.paused || videoEl.ended) if(videoEl.paused || videoEl.ended)
return false return false
const input = new facerecognition.NetInput(videoEl) const input = new faceapi.NetInput(videoEl)
const { width, height } = input const { width, height } = input
const canvas = $('#overlay').get(0) const canvas = $('#overlay').get(0)
canvas.width = width canvas.width = width
...@@ -83,7 +83,7 @@ ...@@ -83,7 +83,7 @@
result = await net.locateFaces(input, minConfidence) result = await net.locateFaces(input, minConfidence)
displayTimeStats(Date.now() - ts) displayTimeStats(Date.now() - ts)
facerecognition.drawDetection('overlay', result.map(det => det.forSize(width, height))) faceapi.drawDetection('overlay', result.map(det => det.forSize(width, height)))
setTimeout(() => onPlay(videoEl)) setTimeout(() => onPlay(videoEl))
} }
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -101,7 +101,7 @@ ...@@ -101,7 +101,7 @@
function displayTimeStats(timeInMs) { function displayTimeStats(timeInMs) {
$('#time').val(`${timeInMs} ms`) $('#time').val(`${timeInMs} ms`)
$('#fps').val(`${facerecognition.round(1000 / timeInMs)}`) $('#fps').val(`${faceapi.round(1000 / timeInMs)}`)
} }
function displayImage(src) { function displayImage(src) {
...@@ -111,7 +111,7 @@ ...@@ -111,7 +111,7 @@
async function runFaceRecognition() { async function runFaceRecognition() {
async function next() { async function next() {
const imgBuf = await fetchImage(getFaceImageUri(classes[currClassIdx], currImageIdx)) const imgBuf = await fetchImage(getFaceImageUri(classes[currClassIdx], currImageIdx))
const input = await facerecognition.bufferToImage(imgBuf) const input = await faceapi.bufferToImage(imgBuf)
const imgEl = $('#face').get(0) const imgEl = $('#face').get(0)
imgEl.src = input.src imgEl.src = input.src
......
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="face-recognition.js"></script> <script src="face-api.js"></script>
<script src="axios.min.js"></script> <script src="axios.min.js"></script>
<script src="commons.js"></script> <script src="commons.js"></script>
<link rel="stylesheet" href="styles.css"> <link rel="stylesheet" href="styles.css">
...@@ -40,8 +40,8 @@ ...@@ -40,8 +40,8 @@
let net, descriptors = { desc1: null, desc2: null } let net, descriptors = { desc1: null, desc2: null }
function updateResult() { function updateResult() {
const distance = facerecognition.round( const distance = faceapi.round(
facerecognition.euclideanDistance(descriptors.desc1, descriptors.desc2) faceapi.euclideanDistance(descriptors.desc1, descriptors.desc2)
) )
let text = distance let text = distance
let bgColor = '#ffffff' let bgColor = '#ffffff'
...@@ -55,7 +55,7 @@ ...@@ -55,7 +55,7 @@
async function onSelectionChanged(which, uri) { async function onSelectionChanged(which, uri) {
const imgBuf = await fetchImage(uri) const imgBuf = await fetchImage(uri)
const input = await facerecognition.bufferToImage(imgBuf) const input = await faceapi.bufferToImage(imgBuf)
const imgEl = $(`#face${which}`).get(0) const imgEl = $(`#face${which}`).get(0)
imgEl.src = input.src imgEl.src = input.src
descriptors[`desc${which}`] = await net.computeFaceDescriptor(input) descriptors[`desc${which}`] = await net.computeFaceDescriptor(input)
......
{ {
"name": "face-recognition.min", "name": "face-api",
"version": "0.0.0", "version": "0.0.0",
"description": "face recognition API for the browser with tensorflow.js", "description": "JavaScript API for face detection and face recognition in the browser with tensorflow.js",
"main": "./dist/index.js", "main": "./dist/index.js",
"typings": "./dist/index.d.ts", "typings": "./dist/index.d.ts",
"scripts": { "scripts": {
......
...@@ -15,9 +15,9 @@ export default { ...@@ -15,9 +15,9 @@ export default {
].concat(minify ? uglify() : []), ].concat(minify ? uglify() : []),
output: { output: {
extend: true, extend: true,
file: `dist/face-recognition${minify ? '.min' : ''}.js`, file: `dist/face-api${minify ? '.min' : ''}.js`,
format: 'umd', format: 'umd',
name: 'facerecognition', name: 'faceapi',
globals: { globals: {
'crypto': 'crypto' 'crypto': 'crypto'
} }
......
descriptorHoward=[-0.08900658041238785,0.10903991758823395,0.027176208794116974,0.0440075621008873,-0.14542894065380096,0.11052004992961884,-0.044826459139585495,-0.05154901742935181,0.10313282907009125,-0.09580706059932709,0.11335687339305878,-0.027231775224208832,-0.20172204077243805,0.0940278172492981,-0.02581452578306198,0.07219456881284714,-0.12272307276725769,-0.07349634170532227,-0.17236188054084778,-0.17453305423259735,-0.034208014607429504,0.1051197499036789,0.026275131851434708,0.014430046081542969,-0.20353534817695618,-0.2949812114238739,-0.04833771288394928,-0.10960748046636581,0.08448511362075806,-0.03991013765335083,-0.03964321315288544,-0.09928630292415619,-0.1602567881345749,0.026378951966762543,0.09079921245574951,0.07745552062988281,-0.054152462631464005,-0.017411045730113983,0.16053830087184906,0.010681785643100739,-0.11814303696155548,0.03829622268676758,0.08098047226667404,0.2989161014556885,0.12581878900527954,0.0647912546992302,0.023303285241127014,-0.07838225364685059,0.13633489608764648,-0.21215596795082092,0.07675531506538391,0.1447518765926361,0.14686475694179535,0.0699121281504631,0.08843745291233063,-0.11935200542211533,-0.015284918248653412,0.16930952668190002,-0.04400303214788437,0.1650175303220749,0.10481946915388107,-0.01336788758635521,-0.050796136260032654,-0.0797152891755104,0.2541898190975189,0.07128539681434631,-0.14587090909481049,-0.15604129433631897,0.11365237832069397,-0.16018034517765045,-0.03458000719547272,0.05678928643465042,-0.0719192773103714,-0.15881866216659546,-0.1955045610666275,0.06456597149372101,0.5308966636657715,0.13605226576328278,-0.18340086936950684,-0.05473683401942253,-0.09668048471212387,-0.000602424144744873,0.06609033793210983,0.08351708948612213,-0.13018563389778137,-0.07167275249958038,-0.043135225772857666,0.08809376507997513,0.2999389171600342,-0.07008984684944153,0.005112119019031525,0.1464608609676361,0.030642826110124588,0.005341166630387306,-0.03758299723267555,-0.0027411580085754395,-0.19020092487335205,-0.005203835666179657,-0.03693883866071701,0.01771560311317444,0.0251515731215477,-0.13933824002742767,0.04255777597427368,0.08094561100006104,-0.23745450377464294,0.21049562096595764,-0.016159698367118835,-0.06422223895788193,0.0915207490324974,0.10660701990127563,-0.14731749892234802,-0.027426909655332565,0.23789143562316895,-0.2964036166667938,0.2034282386302948,0.2009483426809311,0.04705991595983505,0.1396426558494568,0.05233515799045563,0.11507779359817505,0.045886870473623276,0.12765640020370483,-0.15917259454727173,-0.13223722577095032,-0.023241132497787476,-0.1298847794532776,-0.027176383882761,0.009421631693840027]
\ No newline at end of file
descriptorLeonard=[0.016118615865707397,0.1272888332605362,-0.013150867074728012,-0.03657906502485275,-0.10901328921318054,-0.004170142114162445,-0.010215671733021736,0.006740286946296692,0.1793280392885208,-0.06382005661725998,0.20762376487255096,-0.016507171094417572,-0.2747085690498352,-0.026863690465688705,-0.0744708999991417,0.1404581367969513,-0.198239266872406,-0.12842532992362976,-0.15176594257354736,-0.12798485159873962,0.07055014371871948,-0.021530020982027054,-0.013443628326058388,0.05855056643486023,-0.10384566336870193,-0.26168593764305115,-0.04933137446641922,-0.10280363261699677,0.02998245507478714,-0.21771246194839478,0.07944433391094208,0.03573431074619293,-0.1271427869796753,-0.02638978511095047,-0.01610453426837921,0.05286967754364014,0.024109765887260437,-0.08603353798389435,0.18842941522598267,0.02356734871864319,-0.16014565527439117,0.07457999885082245,0.04670367389917374,0.32030463218688965,0.18153166770935059,0.031080730259418488,-0.01877094805240631,-0.09274949133396149,0.13673648238182068,-0.20436367392539978,0.03125997632741928,0.1889854222536087,0.07329613715410233,0.03783072903752327,0.12429258227348328,-0.10134827345609665,0.060723669826984406,0.13368329405784607,-0.22554075717926025,0.032615188509225845,0.05100584030151367,-0.008536417037248611,-0.028306663036346436,-0.09284669905900955,0.10282410681247711,0.05005515366792679,-0.05751366168260574,-0.16138313710689545,0.1641443520784378,-0.21432684361934662,-0.1301480382680893,0.051546234637498856,-0.1041136085987091,-0.1479661911725998,-0.32189327478408813,0.0080157071352005,0.40646892786026,0.17670349776744843,-0.11870051920413971,0.06668459624052048,-0.0077753327786922455,-0.0853877067565918,0.03622785955667496,0.022207416594028473,-0.1716664731502533,0.00936036929488182,-0.12650445103645325,0.1116786003112793,0.1721886694431305,0.018712684512138367,-0.029012983664870262,0.19890916347503662,0.004050761461257935,-0.06176470220088959,0.03496668487787247,0.010774612426757812,-0.10733450204133987,0.034917011857032776,-0.1856822967529297,-0.0436706580221653,0.08982815593481064,-0.16002188622951508,-0.01822887361049652,0.05696277320384979,-0.2110522985458374,0.03147541731595993,-0.009835068136453629,-0.059302788227796555,-0.060756754130125046,0.07512637972831726,-0.20573465526103973,0.023113828152418137,0.2548554837703705,-0.23949584364891052,0.17401018738746643,0.2319977581501007,0.09833789616823196,0.024068880826234818,0.12989574670791626,0.04740560054779053,-0.012434778735041618,-0.0918111503124237,-0.15748904645442963,-0.08378004282712936,-0.004624858498573303,-0.005299612879753113,0.055329449474811554,0.04065752774477005]
\ No newline at end of file
descriptorPenny=[-0.005881071090698242,0.035252682864665985,0.07666284590959549,-0.06133250892162323,-0.08197010308504105,0.014184653759002686,0.005242734216153622,-0.19035089015960693,0.13593840599060059,-0.09199994057416916,0.055665574967861176,-0.09802958369255066,-0.25256460905075073,0.05049600824713707,0.003990292549133301,0.15637768805027008,-0.051415905356407166,-0.28471335768699646,-0.16898605227470398,-0.06020534038543701,-0.006151877343654633,-0.011471755802631378,-0.005520425736904144,0.1003769263625145,-0.20014473795890808,-0.26222628355026245,-0.08959437161684036,-0.12957212328910828,-0.029134854674339294,-0.062256816774606705,-0.04312460869550705,0.08265750110149384,-0.14303170144557953,0.011085912585258484,0.10712425410747528,0.04088181257247925,0.0007619466632604599,-0.1466120481491089,0.260177880525589,-0.009320661425590515,-0.25915971398353577,-0.019178807735443115,0.11162054538726807,0.2115343064069748,0.21638226509094238,0.0045770928263664246,0.07745259255170822,-0.08094276487827301,0.13608475029468536,-0.31285080313682556,0.1226205825805664,0.14514890313148499,0.10254833102226257,0.11901462823152542,0.07176060229539871,-0.21035197377204895,-0.016285259276628494,0.11171358823776245,-0.22099058330059052,0.14237764477729797,0.06349137425422668,-0.09368033707141876,-0.07181331515312195,-0.12528066337108612,0.22090691328048706,0.16226458549499512,-0.16031241416931152,-0.2046929895877838,0.1601838767528534,-0.2115079164505005,-0.07380392402410507,0.06603492796421051,-0.19789603352546692,-0.15601742267608643,-0.285112589597702,-0.03280213475227356,0.41335564851760864,0.17521080374717712,-0.2080937772989273,0.026215683668851852,-0.11048824340105057,0.019838571548461914,-0.020542338490486145,0.11541588604450226,0.03076190873980522,-0.0232694149017334,0.0033110976219177246,0.028120767325162888,0.27188798785209656,-0.0759110227227211,-0.06393658369779587,0.22212064266204834,-0.048158712685108185,-0.03439009189605713,-0.08470702916383743,0.07175514847040176,-0.09915667027235031,-0.019413530826568604,-0.08236535638570786,-0.0213082954287529,-0.030551567673683167,0.1010485291481018,0.07094472646713257,0.15863054990768433,-0.200668603181839,0.16280920803546906,-0.07447126507759094,-0.007395192980766296,-0.023165743798017502,0.01478651911020279,-0.1056482344865799,-0.07202574610710144,0.17325882613658905,-0.28997519612312317,0.12274238467216492,0.18649500608444214,0.01748467981815338,0.03461623191833496,-0.009980626404285431,0.06018088012933731,0.11674463003873825,0.033887870609760284,-0.218181312084198,-0.05439690500497818,0.10635554790496826,-0.0007329434156417847,-0.021141983568668365,0.0820411890745163]
\ No newline at end of file
descriptorRaj=[-0.15496353805065155,0.043691255152225494,0.03724939376115799,0.016656994819641113,-0.049970414489507675,0.08455478399991989,-0.045141857117414474,-0.0610889196395874,0.11937284469604492,-0.08181658387184143,0.27349838614463806,0.025957435369491577,-0.2195511758327484,-0.015493396669626236,-0.10711826384067535,0.07597102224826813,-0.08671779185533524,-0.13454240560531616,-0.06495959311723709,-0.036077070981264114,-0.006286047399044037,0.02592059224843979,-0.014918237924575806,0.009378507733345032,-0.11585316807031631,-0.3628930449485779,-0.10048630833625793,-0.07691314816474915,0.001719092484563589,-0.03902893140912056,-0.03469265252351761,-0.020092345774173737,-0.19703790545463562,0.008729912340641022,0.03802715986967087,0.09341692179441452,-0.11134380102157593,-0.009015034884214401,0.14202813804149628,0.026977384462952614,-0.09668654203414917,0.0022040903568267822,0.031198769807815552,0.2793624997138977,0.17439183592796326,0.07265886664390564,0.006268925964832306,-0.13531382381916046,0.05125536024570465,-0.2435196340084076,0.10533908754587173,0.1630076915025711,0.0991237536072731,0.11345860362052917,0.07629163563251495,-0.16580615937709808,-0.024815142154693604,0.15595972537994385,-0.14484508335590363,0.04327581822872162,-0.053320854902267456,0.03464396297931671,-0.005613304674625397,-0.11897846311330795,0.18600404262542725,0.15069319307804108,-0.11564309895038605,-0.18291205167770386,0.1789712905883789,-0.18346670269966125,-0.017721213400363922,0.0858432874083519,-0.07925683259963989,-0.2650236487388611,-0.24118715524673462,0.10106492042541504,0.41431039571762085,0.2085058093070984,-0.09770824015140533,0.02742883935570717,-0.05898953229188919,-0.07701464742422104,0.028122015297412872,-0.0040946416556835175,-0.10734926909208298,-0.016904285177588463,-0.06405238062143326,0.008443355560302734,0.201746866106987,0.04558864235877991,-0.04860920086503029,0.1356211006641388,-0.06206119433045387,0.005768755450844765,0.07818647474050522,0.05297157168388367,-0.10372574627399445,0.042196549475193024,-0.16179141402244568,-0.031241916120052338,-0.11580236256122589,-0.08878964930772781,0.02395106852054596,0.059674158692359924,-0.14112776517868042,0.16106471419334412,-0.01805580034852028,-0.036857277154922485,-0.029434598982334137,0.0012199431657791138,-0.13350501656532288,0.02282126620411873,0.1603221446275711,-0.33153676986694336,0.22605177760124207,0.11830978095531464,0.11549906432628632,0.21082323789596558,0.11426876485347748,0.05354222655296326,-0.03111746534705162,-0.025937475264072418,-0.2007763385772705,-0.04948417842388153,-0.0020427852869033813,-0.045253459364175797,0.04411966726183891,0.02899453043937683]
\ No newline at end of file
descriptorSheldon=[-0.04457738250494003,-0.043990347534418106,-0.025750618427991867,0.016650959849357605,-0.05218123272061348,-0.051934950053691864,0.009930811822414398,-0.016631752252578735,0.11751081049442291,0.040901102125644684,0.221496120095253,-0.08035802841186523,-0.23618969321250916,-0.025104589760303497,-0.02730524353682995,0.10455028712749481,-0.12426766008138657,-0.08866177499294281,-0.14048157632350922,-0.0403841957449913,-0.041014257818460464,0.11965540796518326,-0.015900693833827972,0.08545821160078049,-0.10701243579387665,-0.29199138283729553,-0.09832726418972015,-0.15806680917739868,0.0072528645396232605,-0.09897308796644211,0.02866358309984207,0.17405939102172852,-0.17985643446445465,-0.11151746660470963,0.12910649180412292,0.023578159511089325,-0.048720214515924454,-0.023027973249554634,0.24042245745658875,0.09909329563379288,-0.1479608118534088,-0.03488050401210785,0.023121362552046776,0.35197967290878296,0.1343260109424591,0.025992773473262787,0.027310341596603394,-0.07258803397417068,0.09854952991008759,-0.265401691198349,0.09129731357097626,0.18322107195854187,0.14081576466560364,0.08943489193916321,0.07813340425491333,-0.18660837411880493,0.012191173620522022,0.13849547505378723,-0.19889040291309357,0.14307448267936707,0.04158569499850273,-0.06411264091730118,-0.0019705994054675102,-0.10619327425956726,0.1386486440896988,0.019395824521780014,-0.10675810277462006,-0.06835392862558365,0.19760599732398987,-0.10146252810955048,-0.005423944443464279,0.13664257526397705,-0.10927100479602814,-0.24490387737751007,-0.27884605526924133,0.025820117443799973,0.4047747850418091,0.08949815481901169,-0.21505051851272583,-0.004949783906340599,-0.055362775921821594,-0.06972619891166687,-0.008085280656814575,0.027283571660518646,-0.0860084667801857,0.017455322667956352,-0.07179910689592361,0.0487312376499176,0.2027968317270279,0.011654987931251526,-0.007634447887539864,0.1842338740825653,-0.06324949115514755,-0.07316185534000397,0.122040756046772,0.08807042986154556,-0.050539594143629074,-0.08226519823074341,-0.11426769942045212,0.000004231929779052734,0.027701571583747864,-0.2017858624458313,-0.021970629692077637,0.056306734681129456,-0.17306512594223022,0.20115011930465698,0.008511713705956936,-0.07041959464550018,-0.08531112968921661,-0.04338337481021881,-0.07298384606838226,0.07277841866016388,0.2568773627281189,-0.24525469541549683,0.21955132484436035,0.07719040662050247,0.011913388967514038,0.17791587114334106,-0.05779615789651871,0.10519659519195557,-0.1535077542066574,-0.09388455748558044,-0.1587708741426468,-0.14040732383728027,0.0359625518321991,0.04092983901500702,0.04888205975294113,0.014610011130571365]
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
<!DOCTYPE html>
<html>
<head>
<script src="./imgdata/howard.json"></script>
<script src="./imgdata/leonard.json"></script>
<script src="./imgdata/penny.json"></script>
<script src="./imgdata/raj.json"></script>
<script src="./imgdata/sheldon.json"></script>
<script src="./descriptors/howard.json"></script>
<script src="./descriptors/leonard.json"></script>
<script src="./descriptors/penny.json"></script>
<script src="./descriptors/raj.json"></script>
<script src="./descriptors/sheldon.json"></script>
<script src="../dist/face-recognition.min.js"></script>
</head>
<body>
<label for="file">Load Weights File: </label>
<input type="file" onchange="onWeightsSelected(event)"/>
<script>
function onWeightsSelected(e) {
const selectedFile = e.target.files[0]
const reader = new FileReader()
reader.onload = function(re) {
const weights = new Float32Array(re.target.result)
runTests(weights)
}
reader.readAsArrayBuffer(selectedFile)
}
async function runTests(weights) {
console.log('running...')
const net = facerecognition.faceRecognitionNet(weights)
await run(imgdataHoward, descriptorHoward, net, 'howard')
await run(imgdataLeonard, descriptorLeonard, net, 'leonard')
await run(imgdataPenny, descriptorPenny, net, 'penny')
await run(imgdataRaj, descriptorRaj, net, 'raj')
await run(imgdataSheldon, descriptorSheldon, net, 'howard')
console.log('done')
}
async function run(data, refDescriptor, net, name) {
const input = flatten(flatten(data))
console.time('computeFaceDescriptor')
const desc = await net.computeFaceDescriptor(input)
console.timeEnd('computeFaceDescriptor')
const distance = facerecognition.euclideanDistance(desc, refDescriptor)
if (distance > 1e-6)
console.error('failed for descriptor %s with distance %s', name, distance)
}
function flatten(arr) {
return arr.reduce((res, curr) => res.concat(curr), [])
}
</script>
</body>
</html>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment