Commit d50c2b14 by vincent

check in age_gender_model + AgeGenderNet loading from weightmap

parent 58e1e11b
...@@ -170,13 +170,11 @@ ...@@ -170,13 +170,11 @@
} }
async function run() { async function run() {
// load face detection and face expression recognition models // load face detection and age and gender recognition models
// and load face landmark model for face alignment
await changeFaceDetector(SSD_MOBILENETV1) await changeFaceDetector(SSD_MOBILENETV1)
await faceapi.loadFaceLandmarkModel('/') await faceapi.loadFaceLandmarkModel('/')
await faceapi.nets.ageGenderNet.load('/')
// TODO
const weights = await faceapi.fetchNetWeights('tmp/age_gender.weights')
await faceapi.nets.ageGenderNet.load(weights)
// start processing image // start processing image
updateResults() updateResults()
......
...@@ -13,8 +13,8 @@ export function extractParams(weights: Float32Array): { params: NetParams, param ...@@ -13,8 +13,8 @@ export function extractParams(weights: Float32Array): { params: NetParams, param
const extractFCParams = TfjsImageRecognitionBase.extractFCParamsFactory(extractWeights, paramMappings) const extractFCParams = TfjsImageRecognitionBase.extractFCParamsFactory(extractWeights, paramMappings)
const age = extractFCParams(512, 1, 'fc_age') const age = extractFCParams(512, 1, 'fc/age')
const gender = extractFCParams(512, 2, 'fc_gender') const gender = extractFCParams(512, 2, 'fc/gender')
if (getRemainingWeights().length !== 0) { if (getRemainingWeights().length !== 0) {
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`) throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`)
......
...@@ -19,8 +19,8 @@ export function extractParamsFromWeigthMap( ...@@ -19,8 +19,8 @@ export function extractParamsFromWeigthMap(
const params = { const params = {
fc: { fc: {
age: extractFcParams('fc_age'), age: extractFcParams('fc/age'),
gender: extractFcParams('fc_gender') gender: extractFcParams('fc/gender')
} }
} }
......
import * as tf from '@tensorflow/tfjs-core';
import { TfjsImageRecognitionBase } from 'tfjs-image-recognition-base';
export function loadConvParamsFactory(extractWeightEntry: <T>(originalPath: string, paramRank: number) => T) {
return function(prefix: string): TfjsImageRecognitionBase.ConvParams {
const filters = extractWeightEntry<tf.Tensor4D>(`${prefix}/filters`, 4)
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
return { filters, bias }
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { TfjsImageRecognitionBase } from 'tfjs-image-recognition-base'; import { TfjsImageRecognitionBase } from 'tfjs-image-recognition-base';
import { loadConvParamsFactory } from '../common/loadConvParamsFactory';
import { DenseBlock3Params, DenseBlock4Params } from './types'; import { DenseBlock3Params, DenseBlock4Params } from './types';
export function loadParamsFactory(weightMap: any, paramMappings: TfjsImageRecognitionBase.ParamMapping[]) { export function loadParamsFactory(weightMap: any, paramMappings: TfjsImageRecognitionBase.ParamMapping[]) {
const extractWeightEntry = TfjsImageRecognitionBase.extractWeightEntryFactory(weightMap, paramMappings) const extractWeightEntry = TfjsImageRecognitionBase.extractWeightEntryFactory(weightMap, paramMappings)
function extractConvParams(prefix: string): TfjsImageRecognitionBase.ConvParams { const extractConvParams = loadConvParamsFactory(extractWeightEntry)
const filters = extractWeightEntry<tf.Tensor4D>(`${prefix}/filters`, 4) const extractSeparableConvParams = TfjsImageRecognitionBase.loadSeparableConvParamsFactory(extractWeightEntry)
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
return { filters, bias }
}
function extractSeparableConvParams(prefix: string): TfjsImageRecognitionBase.SeparableConvParams {
const depthwise_filter = extractWeightEntry<tf.Tensor4D>(`${prefix}/depthwise_filter`, 4)
const pointwise_filter = extractWeightEntry<tf.Tensor4D>(`${prefix}/pointwise_filter`, 4)
const bias = extractWeightEntry<tf.Tensor1D>(`${prefix}/bias`, 1)
return new TfjsImageRecognitionBase.SeparableConvParams(
depthwise_filter,
pointwise_filter,
bias
)
}
function extractDenseBlock3Params(prefix: string, isFirstLayer: boolean = false): DenseBlock3Params { function extractDenseBlock3Params(prefix: string, isFirstLayer: boolean = false): DenseBlock3Params {
const conv0 = isFirstLayer const conv0 = isFirstLayer
......
...@@ -137,6 +137,7 @@ export const loadFaceLandmarkModel = (url: string) => nets.faceLandmark68Net.loa ...@@ -137,6 +137,7 @@ export const loadFaceLandmarkModel = (url: string) => nets.faceLandmark68Net.loa
export const loadFaceLandmarkTinyModel = (url: string) => nets.faceLandmark68TinyNet.load(url) export const loadFaceLandmarkTinyModel = (url: string) => nets.faceLandmark68TinyNet.load(url)
export const loadFaceRecognitionModel = (url: string) => nets.faceRecognitionNet.load(url) export const loadFaceRecognitionModel = (url: string) => nets.faceRecognitionNet.load(url)
export const loadFaceExpressionModel = (url: string) => nets.faceExpressionNet.load(url) export const loadFaceExpressionModel = (url: string) => nets.faceExpressionNet.load(url)
export const loadAgeGenderModel = (url: string) => nets.ageGenderNet.load(url)
// backward compatibility // backward compatibility
export const loadFaceDetectionModel = loadSsdMobilenetv1Model export const loadFaceDetectionModel = loadSsdMobilenetv1Model
......
...@@ -81,7 +81,7 @@ export class TinyXception extends NeuralNetwork<TinyXceptionParams> { ...@@ -81,7 +81,7 @@ export class TinyXception extends NeuralNetwork<TinyXceptionParams> {
} }
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) { protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap) {
return extractParamsFromWeigthMap(weightMap) return extractParamsFromWeigthMap(weightMap, this._numMainBlocks)
} }
protected extractParams(weights: Float32Array) { protected extractParams(weights: Float32Array) {
......
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
import { TfjsImageRecognitionBase } from 'tfjs-image-recognition-base'; import { TfjsImageRecognitionBase, range } from 'tfjs-image-recognition-base';
import { TinyXceptionParams } from './types'; import { loadConvParamsFactory } from '../common/loadConvParamsFactory';
import { MainBlockParams, ReductionBlockParams, TinyXceptionParams } from './types';
function loadParamsFactory(weightMap: any, paramMappings: TfjsImageRecognitionBase.ParamMapping[]) {
const extractWeightEntry = TfjsImageRecognitionBase.extractWeightEntryFactory(weightMap, paramMappings)
const extractConvParams = loadConvParamsFactory(extractWeightEntry)
const extractSeparableConvParams = TfjsImageRecognitionBase.loadSeparableConvParamsFactory(extractWeightEntry)
function extractReductionBlockParams(mappedPrefix: string): ReductionBlockParams {
const separable_conv0 = extractSeparableConvParams(`${mappedPrefix}/separable_conv0`)
const separable_conv1 = extractSeparableConvParams(`${mappedPrefix}/separable_conv1`)
const expansion_conv = extractConvParams(`${mappedPrefix}/expansion_conv`)
return { separable_conv0, separable_conv1, expansion_conv }
}
function extractMainBlockParams(mappedPrefix: string): MainBlockParams {
const separable_conv0 = extractSeparableConvParams(`${mappedPrefix}/separable_conv0`)
const separable_conv1 = extractSeparableConvParams(`${mappedPrefix}/separable_conv1`)
const separable_conv2 = extractSeparableConvParams(`${mappedPrefix}/separable_conv2`)
return { separable_conv0, separable_conv1, separable_conv2 }
}
return {
extractConvParams,
extractSeparableConvParams,
extractReductionBlockParams,
extractMainBlockParams
}
}
export function extractParamsFromWeigthMap( export function extractParamsFromWeigthMap(
weightMap: tf.NamedTensorMap weightMap: tf.NamedTensorMap,
numMainBlocks: number
): { params: TinyXceptionParams, paramMappings: TfjsImageRecognitionBase.ParamMapping[] } { ): { params: TinyXceptionParams, paramMappings: TfjsImageRecognitionBase.ParamMapping[] } {
throw "extractParamsFromWeigthMap not implemented"; const paramMappings: TfjsImageRecognitionBase.ParamMapping[] = []
const {
extractConvParams,
extractSeparableConvParams,
extractReductionBlockParams,
extractMainBlockParams
} = loadParamsFactory(weightMap, paramMappings)
const paramMappings: TfjsImageRecognitionBase.ParamMapping[] = [] const entry_flow_conv_in = extractConvParams('entry_flow/conv_in')
const entry_flow_reduction_block_0 = extractReductionBlockParams('entry_flow/reduction_block_0')
const entry_flow_reduction_block_1 = extractReductionBlockParams('entry_flow/reduction_block_1')
const entry_flow = {
conv_in: entry_flow_conv_in,
reduction_block_0: entry_flow_reduction_block_0,
reduction_block_1: entry_flow_reduction_block_1
}
const middle_flow = {}
range(numMainBlocks, 0, 1).forEach((idx) => {
middle_flow[`main_block_${idx}`] = extractMainBlockParams(`middle_flow/main_block_${idx}`)
})
const exit_flow_reduction_block = extractReductionBlockParams('exit_flow/reduction_block')
const exit_flow_separable_conv = extractSeparableConvParams('exit_flow/separable_conv')
const exit_flow = {
reduction_block: exit_flow_reduction_block,
separable_conv: exit_flow_separable_conv
}
TfjsImageRecognitionBase.disposeUnusedWeightTensors(weightMap, paramMappings) TfjsImageRecognitionBase.disposeUnusedWeightTensors(weightMap, paramMappings)
return { params: {} as any, paramMappings } return { params: { entry_flow, middle_flow, exit_flow }, paramMappings }
} }
\ No newline at end of file
[{"weights":[{"name":"entry_flow/conv_in/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005431825039433498,"min":-0.7441600304023892}},{"name":"entry_flow/conv_in/bias","shape":[32],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/separable_conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005691980614381678,"min":-0.6090419257388395}},{"name":"entry_flow/reduction_block_0/separable_conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009089225881239947,"min":-1.1179747833925135}},{"name":"entry_flow/reduction_block_0/separable_conv0/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/separable_conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00683894624897078,"min":-0.8138346036275228}},{"name":"entry_flow/reduction_block_0/separable_conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011632566358528886,"min":-1.3028474321552352}},{"name":"entry_flow/reduction_block_0/separable_conv1/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_0/expansion_conv/filters","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010254812240600587,"min":-0.9229331016540528}},{"name":"entry_flow/reduction_block_0/expansion_conv/bias","shape":[64],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/separable_conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0052509616403018725,"min":-0.6406173201168285}},{"name":"entry_flow/reduction_block_1/separable_conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010788509424994973,"min":-1.4564487723743214}},{"name":"entry_flow/reduction_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00553213918910307,"min":-0.7025816770160899}},{"name":"entry_flow/reduction_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013602388606351965,"min":-1.6186842441558837}},{"name":"entry_flow/reduction_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"entry_flow/reduction_block_1/expansion_conv/filters","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007571851038465313,"min":-1.158493208885193}},{"name":"entry_flow/reduction_block_1/expansion_conv/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005766328409606335,"min":-0.6688940955143349}},{"name":"middle_flow/main_block_0/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012136116214826995,"min":-1.5776951079275094}},{"name":"middle_flow/main_block_0/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004314773222979377,"min":-0.5652352922102984}},{"name":"middle_flow/main_block_0/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.01107162026798024,"min":-1.2400214700137868}},{"name":"middle_flow/main_block_0/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_0/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036451735917259667,"min":-0.4848080876995536}},{"name":"middle_flow/main_block_0/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008791744942758598,"min":-1.134135097615859}},{"name":"middle_flow/main_block_0/separable_conv2/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004915751896652521,"min":-0.6095532351849126}},{"name":"middle_flow/main_block_1/separable_conv0/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010868691463096469,"min":-1.3368490499608656}},{"name":"middle_flow/main_block_1/separable_conv0/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005010117269029804,"min":-0.6012140722835765}},{"name":"middle_flow/main_block_1/separable_conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010311148213405235,"min":-1.3816938605963016}},{"name":"middle_flow/main_block_1/separable_conv1/bias","shape":[128],"dtype":"float32"},{"name":"middle_flow/main_block_1/separable_conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004911523706772748,"min":-0.7367285560159123}},{"name":"middle_flow/main_block_1/separable_conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008976466047997568,"min":-1.2207993825276693}},{"name":"middle_flow/main_block_1/separable_conv2/bias","shape":[128],"dtype":"float32"},{"name":"exit_flow/reduction_block/separable_conv0/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005074804436926748,"min":-0.7104726211697447}},{"name":"exit_flow/reduction_block/separable_conv0/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011453078307357489,"min":-1.4545409450344011}},{"name":"exit_flow/reduction_block/separable_conv0/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/reduction_block/separable_conv1/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.007741751390344957,"min":-1.1380374543807086}},{"name":"exit_flow/reduction_block/separable_conv1/pointwise_filter","shape":[1,1,256,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.011347713189966538,"min":-1.497898141075583}},{"name":"exit_flow/reduction_block/separable_conv1/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/reduction_block/expansion_conv/filters","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006717281014311547,"min":-0.8329428457746318}},{"name":"exit_flow/reduction_block/expansion_conv/bias","shape":[256],"dtype":"float32"},{"name":"exit_flow/separable_conv/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027201742518181892,"min":-0.3237007359663645}},{"name":"exit_flow/separable_conv/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.010076364348916447,"min":-1.330080094056971}},{"name":"exit_flow/separable_conv/bias","shape":[512],"dtype":"float32"},{"name":"fc/age/weights","shape":[512,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008674054987290326,"min":-1.2664120281443876}},{"name":"fc/age/bias","shape":[1],"dtype":"float32"},{"name":"fc/gender/weights","shape":[512,2],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029948226377075793,"min":-0.34140978069866407}},{"name":"fc/gender/bias","shape":[2],"dtype":"float32"}],"paths":["age_gender_model-shard1"]}]
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment