Commit cb52d620 by vincent

resizeLayer

parent 42e41f1e
import * as tf from '@tensorflow/tfjs-core';
import { resizeLayer } from './resizeLayer';
export function faceDetectionNet() {
async function forward(input: ImageData|ImageData[]) {
const imgTensors = (input instanceof ImageData ? [input] : input)
.map(data => tf.fromPixels(data))
.map(data => tf.expandDims(data, 0)) as tf.Tensor4D[]
const imgTensor = tf.cast(tf.concat(imgTensors, 0), 'float32')
const resized = resizeLayer(imgTensor)
return resized
}
return {
forward
}
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { TensorArray } from '../../tfcpatches/TensorArray';
import { whileLayer } from './whileLayer';
export type PreprocessorParams = any
// TODO: hardcoded params
const elementShape = [512, 512, 3]
const weight = tf.scalar(0.007843137718737125)
const bias = tf.scalar(1)
export function preprocessor(imgTensor: tf.Tensor4D, params: PreprocessorParams) {
const batchSize = imgTensor.shape[0]
const batchSizedArray1 = new TensorArray(batchSize, 'float32')
const batchSizedArray2 = new TensorArray(batchSize, 'float32')
let unusedFlow = null
const indices = tf.range(0, batchSize, 1)
// unstack
unusedFlow = batchSizedArray1.scatter(indices, imgTensor, unusedFlow)
unusedFlow = whileLayer(batchSizedArray1, batchSizedArray2, batchSize, unusedFlow)
// stack
const stacked = batchSizedArray2.gather(indices, unusedFlow, 'float32', elementShape)
return tf.add(tf.mul(stacked, weight), bias)
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
import { TensorArray } from '../../tfcpatches/TensorArray';
export function whileLayer(arr1: TensorArray, arr2: TensorArray, batchSize: number, unusedFlowIn: tf.Scalar): tf.Scalar {
// TODO
const unusedFlowOut = tf.scalar(0)
return unusedFlowOut
}
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core';
// TODO: hardcoded params
const resizedImageSize = [512, 512] as [number, number]
const weight = tf.scalar(0.007843137718737125)
const bias = tf.scalar(1)
export function resizeLayer(imgTensor: tf.Tensor4D) {
const resizedImgs = tf.image.resizeBilinear(imgTensor, resizedImageSize, false)
return tf.sub(tf.mul(resizedImgs, weight), bias)
}
\ No newline at end of file
import { euclideanDistance } from './euclideanDistance'; import { euclideanDistance } from './euclideanDistance';
import { faceDetectionNet } from './faceDetectionNet';
import { faceRecognitionNet } from './faceRecognitionNet'; import { faceRecognitionNet } from './faceRecognitionNet';
import { normalize } from './normalize'; import { normalize } from './normalize';
export { export {
euclideanDistance, euclideanDistance,
faceDetectionNet,
faceRecognitionNet, faceRecognitionNet,
normalize normalize
} }
\ No newline at end of file
import * as tf from '@tensorflow/tfjs-core'; import * as tf from '@tensorflow/tfjs-core';
export class TensorArray { export class TensorArray {
private _tensors: tf.Tensor[] | undefined private _tensors: tf.Tensor[]
constructor( constructor(
private _size: number, size: number,
private _dtype: tf.DataType = null, private _dtype: tf.DataType | null = null,
private _elementShape: number[] = null, private _elementShape: number[] | null = null,
private _dynamicSize: boolean = false, private _dynamicSize: boolean = false,
private _clearAfterRead: boolean = true, private _clearAfterRead: boolean = true,
private _identicalElementShapes: boolean = false, private _identicalElementShapes: boolean = false,
private _tensorArrayName: string = null private _tensorArrayName: string = ''
) { ) {
if (_size) { this._tensors = size
this._tensors = Array(_size).fill(0).map(_ => tf.scalar(0)) ? Array(size).fill(0).map(_ => tf.scalar(0))
} : []
} }
public scatter(indices: tf.Tensor1D, value: tf.Tensor, unusedFlow: tf.Scalar): tf.Scalar { public scatter(indices: tf.Tensor1D, value: tf.Tensor, unusedFlow: tf.Scalar): tf.Scalar {
this.expectValidSize('scatter', indices)
if (indices.shape.length !== 1) { if (indices.shape.length !== 1) {
throw new Error(`scatter - expected rank of indices (${indices.shape.length}) to be 1`) throw new Error(`scatter - expected rank of indices (${indices.shape.length}) to be 1`)
} }
if (indices.shape[0] > this._size) {
throw new Error(`scatter - expected indices.shape[0] (${indices.shape[0]}) to be >= this._size (${this.size})`)
}
if (indices.shape[0] !== value.shape[0]) { if (indices.shape[0] !== value.shape[0]) {
throw new Error(`scatter - expected indices.shape[0] (${indices.shape[0]}) to equal value.shape[0] (${value.shape[0]})`) throw new Error(`scatter - expected indices.shape[0] (${indices.shape[0]}) to equal value.shape[0] (${value.shape[0]})`)
} }
const unstacked = tf.unstack(value, 0) const unstacked = tf.unstack(value, 0)
Array.from(indices.dataSync()).forEach((idx, i) => { Array.from(indices.dataSync()).forEach((idx, i) => {
this._tensors[idx] = unstacked[i] (this._tensors as tf.Tensor[])[idx] = unstacked[i]
}) })
const unusedFlowOut = tf.scalar(0) const unusedFlowOut = tf.scalar(0)
...@@ -38,11 +36,21 @@ export class TensorArray { ...@@ -38,11 +36,21 @@ export class TensorArray {
} }
public gather(indices: tf.Tensor1D, unusedFlow: tf.Scalar, dtype?: tf.DataType, elementShape?: number[]) : tf.Tensor { public gather(indices: tf.Tensor1D, unusedFlow: tf.Scalar, dtype?: tf.DataType, elementShape?: number[]) : tf.Tensor {
this.expectValidSize('gather', indices)
const tensors = Array.from(indices.dataSync()).map(idx => this._tensors[idx]) const tensors = Array.from(indices.dataSync()).map(idx => this._tensors[idx])
return tf.concat(tensors) return tf.concat(tensors)
} }
public size(unusedFlow: tf.Scalar) { public size(unusedFlow?: tf.Scalar) {
return this._size return this._tensors.length
}
private expectValidSize(methodName: string, indices: tf.Tensor1D) {
if (!this._tensors) {
throw new Error('scatter - TensorArray is not initialized')
}
if (indices.shape[0] > this._tensors.length) {
throw new Error(`${methodName} - expected indices.shape[0] (${indices.shape[0]}) to be >= this.size() (${this.size()})`)
}
} }
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment