Commit 9c006357 by vincent

added a new tiny face detector + latest state of the model weights

parent 2145e28f
import { Point, TNetInput } from 'tfjs-image-recognition-base';
import { TinyYolov2 as TinyYolov2Base, TinyYolov2Types } from 'tfjs-tiny-yolov2';
import { FaceDetection } from '../classes';
import { BOX_ANCHORS, DEFAULT_MODEL_NAME, IOU_THRESHOLD, MEAN_RGB } from './const';
export class TinyFaceDetector extends TinyYolov2Base {
constructor() {
const config = {
withSeparableConvs: true,
iouThreshold: IOU_THRESHOLD,
classes: ['face'],
anchors: BOX_ANCHORS,
meanRgb: MEAN_RGB,
isFirstLayerConv2d: true,
filterSizes: [3, 16, 32, 64, 128, 256, 512]
}
super(config)
}
public get anchors(): Point[] {
return this.config.anchors
}
public async locateFaces(input: TNetInput, forwardParams: TinyYolov2Types.TinyYolov2ForwardParams): Promise<FaceDetection[]> {
const objectDetections = await this.detect(input, forwardParams)
return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight }))
}
protected loadQuantizedParams(modelUri: string | undefined) {
const defaultModelName = DEFAULT_MODEL_NAME
return super.loadQuantizedParams(modelUri, defaultModelName) as any
}
}
\ No newline at end of file
import { TinyYolov2Options } from '../tinyYolov2/TinyYolov2Options';
export class TinyFaceDetectorOptions extends TinyYolov2Options {
protected _name: string = 'TinyFaceDetectorOptions'
}
\ No newline at end of file
import { Point } from 'tfjs-image-recognition-base';
export const IOU_THRESHOLD = 0.4
export const BOX_ANCHORS = [
new Point(1.603231, 2.094468),
new Point(6.041143, 7.080126),
new Point(2.882459, 3.518061),
new Point(4.266906, 5.178857),
new Point(9.041765, 10.66308)
]
export const MEAN_RGB: [number, number, number] = [117.001, 114.697, 97.404]
export const DEFAULT_MODEL_NAME = 'tiny_face_detection_model'
\ No newline at end of file
import { TinyFaceDetector } from './TinyFaceDetector';
export * from './TinyFaceDetector';
export * from './TinyFaceDetectorOptions';
export function createTinyFaceDetector(weights: Float32Array) {
const net = new TinyFaceDetector()
net.extractWeights(weights)
return net
}
\ No newline at end of file
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009022112453685087,"min":-1.1999409563401164}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005271678228004306,"min":-0.9120003334447448}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003994523777681239,"min":-0.5073045197655173}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013741331474453795,"min":-1.3878744789198332}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015656957147168178,"min":-0.3272304043758149}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027797868438795502,"min":-0.3419137817971847}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014880534714343502,"min":-1.5922172144347546}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00211673332195656,"min":-0.38101199795218077}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006029989906385833,"min":-0.8864085162387174}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016246150521671072,"min":-2.030768815208884}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003473230465954425,"min":-0.7884233157716545}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006747295108495974,"min":-0.8096754130195168}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021840399386836032,"min":-2.86109231967552}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004201083674150355,"min":-0.8318145674817703}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008484941604090673,"min":-0.907888751637702}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029725045783846986,"min":-3.804805860332414}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008291378208235198,"min":-1.4841566992741004}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02816048977421779,"min":-4.7028017922943715}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002222977841601652,"min":-0.4001360114882974}}],"paths":["tiny_face_detector_model-shard1"]}]
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment