Commit 2bb843f3 by vincent

added example

parent 55280b53
export declare function euclideanDistance(arr1: number[], arr2: number[]): number;
export declare function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array): number;
export function euclideanDistance(arr1, arr2) {
if (arr1.length !== arr2.length)
throw new Error('euclideanDistance: arr1.length !== arr2.length');
return Math.sqrt(arr1
.map(function (val, i) { return val - arr2[i]; })
var desc1 = Array.from(arr1);
var desc2 = Array.from(arr2);
return Math.sqrt(desc1
.map(function (val, i) { return val - desc2[i]; })
.reduce(function (res, diff) { return res + Math.pow(diff, 2); }, 0));
}
//# sourceMappingURL=euclideanDistance.js.map
\ No newline at end of file
{"version":3,"file":"euclideanDistance.js","sourceRoot":"","sources":["../src/euclideanDistance.ts"],"names":[],"mappings":"AAAA,MAAM,4BAA4B,IAAc,EAAE,IAAc;IAC9D,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM;QAC7B,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IAEnE,OAAO,IAAI,CAAC,IAAI,CACd,IAAI;SACD,GAAG,CAAC,UAAC,GAAG,EAAE,CAAC,IAAK,OAAA,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,EAAb,CAAa,CAAC;SAC9B,MAAM,CAAC,UAAC,GAAG,EAAE,IAAI,IAAK,OAAA,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC,EAAvB,CAAuB,EAAE,CAAC,CAAC,CACrD,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"euclideanDistance.js","sourceRoot":"","sources":["../src/euclideanDistance.ts"],"names":[],"mappings":"AAAA,MAAM,4BAA4B,IAA6B,EAAE,IAA6B;IAC5F,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM;QAC7B,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IAEnE,IAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IAC9B,IAAM,KAAK,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;IAE9B,OAAO,IAAI,CAAC,IAAI,CACd,KAAK;SACF,GAAG,CAAC,UAAC,GAAG,EAAE,CAAC,IAAK,OAAA,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,EAAd,CAAc,CAAC;SAC/B,MAAM,CAAC,UAAC,GAAG,EAAE,IAAI,IAAK,OAAA,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC,EAAvB,CAAuB,EAAE,CAAC,CAAC,CACrD,CAAA;AACH,CAAC"}
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
import * as tf from '@tensorflow/tfjs-core';
export declare function faceRecognitionNet(weights: Float32Array): {
computeFaceDescriptor: (input: number[]) => Promise<Int32Array | Uint8Array | Float32Array>;
computeFaceDescriptorSync: (input: number[]) => Int32Array | Uint8Array | Float32Array;
computeFaceDescriptor: (input: number[] | ImageData) => Promise<Int32Array | Uint8Array | Float32Array>;
computeFaceDescriptorSync: (input: number[] | ImageData) => Int32Array | Uint8Array | Float32Array;
forward: (input: number[] | ImageData) => tf.Tensor<tf.Rank.R2>;
};
......@@ -33,7 +33,8 @@ export function faceRecognitionNet(weights) {
var computeFaceDescriptorSync = function (input) { return forward(input).dataSync(); };
return {
computeFaceDescriptor: computeFaceDescriptor,
computeFaceDescriptorSync: computeFaceDescriptorSync
computeFaceDescriptorSync: computeFaceDescriptorSync,
forward: forward
};
}
//# sourceMappingURL=index.js.map
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IACtD,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAAe;QAE9B,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAM,CAAC,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;YAE1B,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAC,KAAe,IAAK,OAAA,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,EAArB,CAAqB,CAAA;IACxE,IAAM,yBAAyB,GAAG,UAAC,KAAe,IAAK,OAAA,OAAO,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,EAAzB,CAAyB,CAAA;IAEhF,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;KAC1B,CAAA;AACH,CAAC"}
\ No newline at end of file
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/faceRecognitionNet/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AACvC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEzD,MAAM,6BAA6B,OAAqB;IACtD,IAAM,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC,CAAA;IAErC,iBAAiB,KAA2B;QAE1C,OAAO,EAAE,CAAC,IAAI,CAAC;YAEb,IAAM,CAAC,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;YAE1B,IAAI,GAAG,GAAG,QAAQ,CAAC,CAAC,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YACzC,GAAG,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;YAEpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;YAC3C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YACpC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAA;YAEpC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YAErC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,YAAY,CAAC,CAAA;YAC5C,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC,SAAS,CAAC,CAAA;YACrC,GAAG,GAAG,YAAY,CAAC,GAAG,EAAE,MAAM,CAAC,gBAAgB,CAAC,CAAA;YAEhD,IAAM,SAAS,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAgB,CAAA;YACjD,IAAM,cAAc,GAAG,EAAE,CAAC,MAAM,CAAC,SAAS,EAAE,MAAM,CAAC,EAAE,CAAC,CAAA;YAEtD,OAAO,cAAc,CAAA;QACvB,CAAC,CAAC,CAAA;IACJ,CAAC;IAED,IAAM,qBAAqB,GAAG,UAAC,KAA2B,IAAK,OAAA,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,EAArB,CAAqB,CAAA;IACpF,IAAM,yBAAyB,GAAG,UAAC,KAA2B,IAAK,OAAA,OAAO,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,EAAzB,CAAyB,CAAA;IAE5F,OAAO;QACL,qBAAqB,uBAAA;QACrB,yBAAyB,2BAAA;QACzB,OAAO,SAAA;KACR,CAAA;AACH,CAAC"}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
export declare function normalize(input: number[]): tf.Tensor4D;
export declare function normalize(input: number[] | ImageData): tf.Tensor4D;
......@@ -5,7 +5,9 @@ export function normalize(input) {
var avg_g = tf.fill([1, 150, 150, 1], 117.001);
var avg_b = tf.fill([1, 150, 150, 1], 104.298);
var avg_rgb = tf.concat([avg_r, avg_g, avg_b], 3);
var x = tf.tensor4d(input, [1, 150, 150, 3]);
var x = input instanceof ImageData
? tf.cast(tf.reshape(tf.fromPixels(input), [1, 150, 150, 3]), 'float32')
: tf.tensor4d(input, [1, 150, 150, 3]);
return tf.div(tf.sub(x, avg_rgb), tf.fill(x.shape, 256));
});
}
......
{"version":3,"file":"normalize.js","sourceRoot":"","sources":["../src/normalize.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,MAAM,oBAAoB,KAAe;IACvC,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,OAAO,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAA;QAEnD,IAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;QAC9C,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;IAC1D,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
{"version":3,"file":"normalize.js","sourceRoot":"","sources":["../src/normalize.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,uBAAuB,CAAC;AAE5C,MAAM,oBAAoB,KAA2B;IACnD,OAAO,EAAE,CAAC,IAAI,CAAC;QACb,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QACjD,IAAM,OAAO,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC,CAAA;QAEnD,IAAM,CAAC,GAAG,KAAK,YAAY,SAAS;YAClC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC;YACxE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;QACxC,OAAO,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;IAC1D,CAAC,CAAC,CAAA;AACJ,CAAC"}
\ No newline at end of file
{
"scripts": {
"start": "node server.js"
},
"author": "justadudewhohacks",
"license": "MIT",
"dependencies": {
"axios": "^0.18.0",
"express": "^4.16.3"
}
}
const express = require('express')
const path = require('path')
const app = express()
app.use(express.static(path.join(__dirname, '../images')))
app.use(express.static(path.join(__dirname, '../../weights')))
app.use(express.static(path.join(__dirname, '../../dist')))
app.use(express.static(path.join(__dirname, './node_modules/axios/dist')))
app.use(express.static(path.join(__dirname, 'views')))
app.get('/', (req, res) => res.sendFile('./index.html'))
app.listen(3000, () => console.log('Listening on port 3000!'))
\ No newline at end of file
<!DOCTYPE html>
<html>
<head>
<script src="face-recognition.min.js"></script>
<script src="axios.min.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0-beta/css/materialize.min.css">
</head>
<style>
.center-content {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
.bold {
font-weight: bold;
}
</style>
<body>
<div class="center-content col s12">
<div>
<div class="row center-content">
<label id="status"></label>
</div>
<div class="row center-content">
<img id="face" src=""/>
</div>
<div class="row">
<label for="prediction">Prediction:</label>
<input disabled value="-" id="prediction" type="text" class="bold">
</div>
<div class="row">
<label for="time">Time:</label>
<input disabled value="-" id="time" type="text" class="bold">
</div>
<div class="row">
<label for="fps">Estimated Fps:</label>
<input disabled value="-" id="fps" type="text" class="bold">
</div>
<div class="row">
<button
class="waves-effect waves-light btn"
id="stop"
onclick="onToggleStop()"
>
Stop
</button>
<button
class="waves-effect waves-light btn"
onclick="onSlower()"
>
<i class="material-icons left">-</i> Slower
</button>
<button
class="waves-effect waves-light btn"
onclick="onFaster()"
>
<i class="material-icons left">+</i> Faster
</button>
</div>
<div class="row">
<label for="interval">Interval:</label>
<input disabled value="2000" id="interval" type="text" class="bold">
</div>
</div>
</div>
<script>
const classes = ['amy', 'bernadette', 'howard', 'leonard', 'penny', 'raj', 'sheldon', 'stuart']
// for 150 x 150 sized face images 0.6 is a good threshold to
// judge whether two face descriptors are similar or not
const threshold = 0.6
let interval = 2000
let isStop = false
let trainDescriptorsByClass = []
let net = {}
let currImageIdx = 2, currClassIdx = 0
let to = null
function onSlower() {
interval = Math.max(interval + 100, 0)
document.getElementById('interval').value = interval
}
function onFaster() {
interval = Math.min(interval - 100, 2000)
document.getElementById('interval').value = interval
}
function onToggleStop() {
clearTimeout(to)
isStop = !isStop
document.getElementById('stop').innerHTML = isStop ? 'Continue' : 'Stop'
setStatusText(isStop ? 'stopped' : 'running face recognition:')
if (!isStop) {
runFaceRecognition()
}
}
function round(num) {
return Math.floor(num * 100) / 100
}
function getImageUri(className, idx) {
return `/${className}/${className}${idx}.png`
}
function setStatusText(text) {
document.getElementById('status').value = text
}
function setPrediction(result) {
document.getElementById('prediction').value = result
}
function displayTimeStats(timeInMs) {
document.getElementById('time').value = `${timeInMs} ms`
document.getElementById('fps').value = `${round(1000 / timeInMs)}`
}
function getImg() {
return document.getElementById('face')
}
function displayImage(src) {
getImg().src = src
}
function bufferToImgSrc(buf) {
return new Promise((resolve, reject) => {
const reader = new window.FileReader()
reader.onload = () => resolve(reader.result)
reader.onerror = reject
reader.readAsDataURL(buf)
})
}
function imgSrcToData(src) {
return new Promise((resolve, reject) => {
const canvas = document.createElement('canvas')
canvas.width = 150
canvas.height = 150
const ctx = canvas.getContext('2d')
const img = new Image()
img.onload = function() {
ctx.drawImage(img, 0, 0)
resolve(ctx.getImageData(0, 0, 150, 150))
}
img.onerror = reject
img.src = src
})
}
async function bufferToImageData(buf) {
return imgSrcToData(await bufferToImgSrc(buf))
}
async function fetchModelWeights() {
const res = await axios.get('face_recognition_model.weights', { responseType: 'arraybuffer' })
return new Float32Array(res.data)
}
async function fetchImage(uri) {
return (await axios.get(uri, { responseType: 'blob' })).data
}
async function loadTrainingData(cb) {
return await Promise.all(classes.map(
async className => ({
imgData: await bufferToImageData(
await fetchImage(getImageUri(className, 1))
),
className
})
))
}
function getBestMatch(queryDescriptor) {
return trainDescriptorsByClass
.map(
({ descriptor, className }) => ({
distance: round(facerecognition.euclideanDistance(descriptor, queryDescriptor)),
className
})
)
.reduce((best, curr) => best.distance < curr.distance ? best : curr)
}
async function runFaceRecognition() {
async function next() {
const imgBuf = await fetchImage(getImageUri(classes[currClassIdx], currImageIdx))
displayImage(await bufferToImgSrc(imgBuf))
const imageData = await imgSrcToData(getImg().src)
const ts = Date.now()
const result = await net.forward(imageData)
displayTimeStats(Date.now() - ts)
const descriptor = await result.data()
const bestMatch = getBestMatch(descriptor)
setPrediction(`${bestMatch.distance < threshold ? bestMatch.className : 'unkown'} (${bestMatch.distance})`)
currImageIdx = currClassIdx === (classes.length - 1)
? currImageIdx + 1
: currImageIdx
currClassIdx = (currClassIdx + 1) % classes.length
currImageIdx = (currImageIdx % 6) || 2
to = setTimeout(next, interval)
}
await next(0, 0)
}
async function run() {
try {
setStatusText('loading model file...')
const weights = await fetchModelWeights()
net = facerecognition.faceRecognitionNet(weights)
setStatusText('computing initial descriptors...')
const trainImgDatas = await loadTrainingData()
trainDescriptorsByClass = await Promise.all(trainImgDatas.map(
async ({ className, imgData }) => ({
descriptor: await net.computeFaceDescriptor(imgData),
className
})
))
setStatusText('running face recognition:')
runFaceRecognition()
} catch (err) {
console.error(err)
}
}
run()
</script>
</body>
</html>
\ No newline at end of file
export function euclideanDistance(arr1: number[], arr2: number[]) {
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
if (arr1.length !== arr2.length)
throw new Error('euclideanDistance: arr1.length !== arr2.length')
const desc1 = Array.from(arr1)
const desc2 = Array.from(arr2)
return Math.sqrt(
arr1
.map((val, i) => val - arr2[i])
desc1
.map((val, i) => val - desc2[i])
.reduce((res, diff) => res + Math.pow(diff, 2), 0)
)
}
\ No newline at end of file
......@@ -8,7 +8,7 @@ import { residual, residualDown } from './residualLayer';
export function faceRecognitionNet(weights: Float32Array) {
const params = extractParams(weights)
function forward(input: number[]) {
function forward(input: number[] | ImageData) {
return tf.tidy(() => {
......@@ -42,11 +42,12 @@ export function faceRecognitionNet(weights: Float32Array) {
})
}
const computeFaceDescriptor = (input: number[]) => forward(input).data()
const computeFaceDescriptorSync = (input: number[]) => forward(input).dataSync()
const computeFaceDescriptor = (input: number[] | ImageData) => forward(input).data()
const computeFaceDescriptorSync = (input: number[] | ImageData) => forward(input).dataSync()
return {
computeFaceDescriptor,
computeFaceDescriptorSync
computeFaceDescriptorSync,
forward
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
export function normalize(input: number[]): tf.Tensor4D {
export function normalize(input: number[] | ImageData): tf.Tensor4D {
return tf.tidy(() => {
const avg_r = tf.fill([1, 150, 150, 1], 122.782);
const avg_g = tf.fill([1, 150, 150, 1], 117.001);
const avg_b = tf.fill([1, 150, 150, 1], 104.298);
const avg_rgb = tf.concat([avg_r, avg_g, avg_b], 3)
const x = tf.tensor4d(input, [1, 150, 150, 3])
const x = input instanceof ImageData
? tf.cast(tf.reshape(tf.fromPixels(input), [1, 150, 150, 3]), 'float32')
: tf.tensor4d(input, [1, 150, 150, 3])
return tf.div(tf.sub(x, avg_rgb), tf.fill(x.shape, 256))
})
}
\ No newline at end of file
......@@ -18,10 +18,10 @@
<input type="file" onchange="onWeightsSelected(event)"/>
<script>
function onWeightsSelected(e) {
var selectedFile = e.target.files[0]
var reader = new FileReader()
const selectedFile = e.target.files[0]
const reader = new FileReader()
reader.onload = function(re) {
var weights = new Float32Array(re.target.result)
const weights = new Float32Array(re.target.result)
runTests(weights)
}
reader.readAsArrayBuffer(selectedFile)
......@@ -29,7 +29,7 @@
async function runTests(weights) {
console.log('running...')
var net = facerecognition.faceRecognitionNet(weights)
const net = facerecognition.faceRecognitionNet(weights)
await run(imgdataHoward, descriptorHoward, net, 'howard')
await run(imgdataLeonard, descriptorLeonard, net, 'leonard')
await run(imgdataPenny, descriptorPenny, net, 'penny')
......@@ -39,11 +39,11 @@
}
async function run(data, refDescriptor, net, name) {
var input = flatten(flatten(data))
const input = flatten(flatten(data))
console.time('computeFaceDescriptor')
var desc = await net.computeFaceDescriptor(input)
const desc = await net.computeFaceDescriptor(input)
console.timeEnd('computeFaceDescriptor')
var distance = facerecognition.euclideanDistance(desc, refDescriptor)
const distance = facerecognition.euclideanDistance(desc, refDescriptor)
if (distance > 1e-6)
console.error('failed for descriptor %s with distance %s', name, distance)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment