Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
F
face
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Иван Кубота
face
Commits
4ef65c1b
Commit
4ef65c1b
authored
Aug 12, 2018
by
vincent
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
check in latest train scripts
parent
0320da07
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
202 additions
and
209 deletions
+202
-209
serveTinyYolov2.js
tools/train/serveTinyYolov2.js
+2
-1
overfit.html
tools/train/tinyYolov2/overfit.html
+0
-124
train.html
tools/train/tinyYolov2/train.html
+70
-27
train.js
tools/train/tinyYolov2/train.js
+122
-56
verify.html
tools/train/tinyYolov2/verify.html
+8
-1
No files found.
tools/train/serveTinyYolov2.js
View file @
4ef65c1b
...
...
@@ -25,7 +25,7 @@ const detectionFilenames = fs.readdirSync(detectionsPath)
app
.
use
(
express
.
static
(
trainDataPath
))
app
.
get
(
'/detection_filenames'
,
(
req
,
res
)
=>
res
.
status
(
202
).
send
(
detectionFilenames
))
app
.
get
(
'/'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
publicDir
,
'
overfit
.html'
)))
app
.
get
(
'/'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
publicDir
,
'
train
.html'
)))
app
.
get
(
'/verify'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
publicDir
,
'verify.html'
)))
app
.
listen
(
3000
,
()
=>
console
.
log
(
'Listening on port 3000!'
))
\ No newline at end of file
tools/train/tinyYolov2/overfit.html
deleted
100644 → 0
View file @
0320da07
<!DOCTYPE html>
<html>
<head>
<link
rel=
"stylesheet"
href=
"styles.css"
>
<link
rel=
"stylesheet"
href=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css"
>
<script
type=
"text/javascript"
src=
"https://code.jquery.com/jquery-2.1.1.min.js"
></script>
<script
src=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"
></script>
<script
src=
"face-api.js"
></script>
<script
src=
"commons.js"
></script>
<script
src=
"FileSaver.js"
></script>
<script
src=
"trainUtils.js"
></script>
<script
src=
"train.js"
></script>
<script
src=
"loss.js"
></script>
</head>
<body>
<script>
tf
=
faceapi
.
tf
const
weightsUrl
=
'/tmp/initial_tiny_yolov2_glorot_normal.weights'
const
fromEpoch
=
0
window
.
debug
=
false
window
.
logTrainSteps
=
false
// hyper parameters
window
.
objectScale
=
5
window
.
noObjectScale
=
1
window
.
coordScale
=
1
window
.
saveEveryNthIteration
=
1
window
.
trainSteps
=
4000
//window.optimizer = tf.train.sgd(0.001)
window
.
optimizer
=
tf
.
train
.
adam
(
0.001
,
0.9
,
0.999
,
1
e
-
8
)
// all samples
const
numTrainSamples
=
Infinity
async
function
loadNetWeights
(
uri
)
{
return
new
Float32Array
(
await
(
await
fetch
(
uri
)).
arrayBuffer
())
}
async
function
fetchDetectionFilenames
()
{
return
fetch
(
'/detection_filenames'
).
then
(
res
=>
res
.
json
())
}
async
function
run
()
{
const
weights
=
await
loadNetWeights
(
weightsUrl
)
window
.
net
=
new
faceapi
.
TinyYolov2
(
true
)
window
.
net
.
load
(
weights
)
window
.
net
.
variable
()
window
.
detectionFilenames
=
(
await
fetchDetectionFilenames
()).
slice
(
0
,
numTrainSamples
)
window
.
lossMap
=
{}
console
.
log
(
'ready'
)
}
const
trainSizes
=
[
320
]
function
logLossChange
(
lossType
)
{
const
{
currentLoss
,
prevLoss
,
detectionFilenames
}
=
window
log
(
`
${
lossType
}
:
${
faceapi
.
round
(
currentLoss
[
lossType
])}
(avg:
${
faceapi
.
round
(
currentLoss
[
lossType
]
/
detectionFilenames
.
length
)}
) (delta:
${
currentLoss
[
lossType
]
-
prevLoss
[
lossType
]}
)`
)
}
async
function
train
(
batchSize
=
1
)
{
for
(
let
i
=
fromEpoch
;
i
<
trainSteps
;
i
++
)
{
log
(
'step'
,
i
)
let
ts
=
Date
.
now
()
const
batchCreators
=
createBatchCreators
(
shuffle
(
window
.
detectionFilenames
),
batchSize
)
for
(
let
s
=
0
;
s
<
trainSizes
.
length
;
s
++
)
{
let
ts2
=
Date
.
now
()
await
trainStep
(
batchCreators
,
trainSizes
[
s
])
ts2
=
Date
.
now
()
-
ts2
//log('train for size %s done (%s ms)', trainSizes[s], ts2)
}
ts
=
Date
.
now
()
-
ts
log
()
log
(
'--------------------'
)
log
()
log
(
'step %s done (%s ms)'
,
i
,
ts
)
window
.
prevLoss
=
window
.
currentLoss
window
.
currentLoss
=
Object
.
keys
(
lossMap
)
.
map
(
filename
=>
lossMap
[
filename
])
.
reduce
((
accumulatedLosses
,
losses
)
=>
Object
.
keys
(
losses
)
.
map
(
key
=>
({
[
key
]:
(
accumulatedLosses
[
key
]
||
0
)
+
losses
[
key
]
}))
.
reduce
((
map
,
curr
)
=>
({
...
map
,
...
curr
}),
{}),
{}
)
if
(
window
.
prevLoss
)
{
logLossChange
(
'noObjectLoss'
)
logLossChange
(
'objectLoss'
)
logLossChange
(
'coordLoss'
)
logLossChange
(
'totalLoss'
)
}
log
()
log
(
'--------------------'
)
log
()
if
(((
i
+
1
)
%
saveEveryNthIteration
)
===
0
)
{
saveWeights
(
window
.
net
,
'adam_511_'
+
(
i
+
1
)
+
'.weights'
)
}
}
}
run
()
</script>
</body>
</html>
\ No newline at end of file
tools/train/tinyYolov2/train.html
View file @
4ef65c1b
...
...
@@ -17,22 +17,32 @@
<script>
tf
=
faceapi
.
tf
const
weightsUrl
=
'/tmp/.weights'
const
startIdx224
=
3220
const
startIdx320
=
20688
const
startIdx416
=
950
const
startIdx608
=
15220
const
weightsUrl
=
`/tmp/tmp__224_
${
startIdx224
}
__320_
${
startIdx320
}
__416_
${
startIdx416
}
__608_
${
startIdx608
}
.weights`
const
fromEpoch
=
0
window
.
debug
=
false
window
.
logTrainSteps
=
true
// hyper parameters
window
.
objectScale
=
5
window
.
noObjectScale
=
0.5
window
.
noObjectScale
=
1
window
.
coordScale
=
1
window
.
saveEveryNth
Iteration
=
1
window
.
trainSteps
=
1
00
//window.optimizer = tf.train.sgd(
learningRate
)
const
rescaleEveryNthBatch
=
Infinity
window
.
saveEveryNth
DataIdx
=
100
window
.
trainSteps
=
40
00
//window.optimizer = tf.train.sgd(
0.001
)
window
.
optimizer
=
tf
.
train
.
adam
(
0.001
,
0.9
,
0.999
,
1
e
-
8
)
function
lossFunction
(
labels
,
out
)
{
return
tf
.
losses
.
meanSquaredError
(
labels
,
out
)
}
// all samples
const
numTrainSamples
=
Infinity
async
function
loadNetWeights
(
uri
)
{
return
new
Float32Array
(
await
(
await
fetch
(
uri
)).
arrayBuffer
())
...
...
@@ -47,36 +57,69 @@
window
.
net
=
new
faceapi
.
TinyYolov2
(
true
)
window
.
net
.
load
(
weights
)
window
.
net
.
variable
()
window
.
detectionFilenames
=
await
fetchDetectionFilenames
()
window
.
detectionFilenames
=
(
await
fetchDetectionFilenames
()).
slice
(
0
,
numTrainSamples
)
window
.
lossMap
=
{}
console
.
log
(
'ready'
)
}
const
trainSizes
=
[
608
,
416
,
320
,
224
]
//const trainSizes = [224, 320, 416, 608]
const
trainSizes
=
[
608
]
function
logLossChange
(
lossType
)
{
const
{
currentLoss
,
prevLoss
,
detectionFilenames
}
=
window
log
(
`
${
lossType
}
:
${
faceapi
.
round
(
currentLoss
[
lossType
])}
(avg:
${
faceapi
.
round
(
currentLoss
[
lossType
]
/
detectionFilenames
.
length
)}
) (delta:
${
currentLoss
[
lossType
]
-
prevLoss
[
lossType
]}
)`
)
}
function
onBatchProcessed
(
dataIdx
,
inputSize
)
{
if
(((
dataIdx
+
1
)
%
saveEveryNthDataIdx
)
===
0
)
{
saveWeights
(
window
.
net
,
`tmp__224_
${
startIdx224
}
__320_
${
startIdx320
}
__416_
${
startIdx416
}
__608_
${
startIdx608
+
dataIdx
+
1
}
.weights`
)
}
}
async
function
train
(
batchSize
=
1
)
{
for
(
let
i
=
0
;
i
<
trainSteps
;
i
++
)
{
console
.
log
(
'step'
,
i
)
let
ts
=
Date
.
now
()
async
function
train
()
{
const
batchCreators
=
createBatchCreators
(
shuffle
(
window
.
detectionFilenames
),
batchSize
)
const
batchSize
=
1
for
(
let
s
=
0
;
s
<
trainSizes
.
length
;
s
++
)
{
let
ts2
=
Date
.
now
()
for
(
let
i
=
fromEpoch
;
i
<
trainSteps
;
i
++
)
{
log
(
'step'
,
i
)
let
ts2
=
Date
.
now
()
await
trainStep
(
batchCreators
,
trainSizes
[
s
]
)
const
batchCreators
=
createBatchCreators
(
shuffle
(
window
.
detectionFilenames
),
batchSize
)
ts2
=
Date
.
now
()
-
ts2
console
.
log
(
'train for size %s done (%s ms)'
,
trainSizes
[
s
],
ts2
)
}
await
trainStep
(
batchCreators
,
trainSizes
,
rescaleEveryNthBatch
,
onBatchProcessed
)
ts
=
Date
.
now
()
-
ts
console
.
log
(
'step %s done (%s ms)'
,
i
,
ts
)
ts
2
=
Date
.
now
()
-
ts2
}
if
(((
i
+
1
)
%
saveEveryNthIteration
)
===
0
)
{
saveWeights
(
window
.
net
,
'tiny_yolov2_separable_model_'
+
i
)
}
ts
=
Date
.
now
()
-
ts
log
()
log
(
'--------------------'
)
log
()
log
(
'step %s done (%s ms)'
,
i
,
ts
)
window
.
prevLoss
=
window
.
currentLoss
window
.
currentLoss
=
Object
.
keys
(
lossMap
)
.
map
(
filename
=>
lossMap
[
filename
])
.
reduce
((
accumulatedLosses
,
losses
)
=>
Object
.
keys
(
losses
)
.
map
(
key
=>
({
[
key
]:
(
accumulatedLosses
[
key
]
||
0
)
+
losses
[
key
]
}))
.
reduce
((
map
,
curr
)
=>
({
...
map
,
...
curr
}),
{}),
{}
)
if
(
window
.
prevLoss
)
{
logLossChange
(
'noObjectLoss'
)
logLossChange
(
'objectLoss'
)
logLossChange
(
'coordLoss'
)
logLossChange
(
'totalLoss'
)
}
log
()
log
(
'--------------------'
)
log
()
}
run
()
...
...
tools/train/tinyYolov2/train.js
View file @
4ef65c1b
async
function
trainStep
(
batchCreators
,
inputSize
)
{
await
promiseSequential
(
batchCreators
.
map
((
batchCreator
,
dataIdx
)
=>
async
()
=>
{
// TODO: skip if groundTruthBoxes are too tiny
const
{
imgs
,
groundTruthBoxes
,
filenames
}
=
await
batchCreator
()
const
batchInput
=
(
await
faceapi
.
toNetInput
(
imgs
)).
managed
()
let
ts
=
Date
.
now
()
const
loss
=
optimizer
.
minimize
(()
=>
{
// TBD: batch loss
const
batchIdx
=
0
const
outTensor
=
window
.
net
.
forwardInput
(
batchInput
,
inputSize
)
const
{
noObjectLoss
,
objectLoss
,
coordLoss
,
totalLoss
}
=
computeLoss
(
outTensor
,
groundTruthBoxes
[
batchIdx
],
batchInput
.
getReshapedInputDimensions
(
batchIdx
),
batchInput
.
getRelativePaddings
(
batchIdx
)
)
const
losses
=
{
totalLoss
:
totalLoss
.
dataSync
()[
0
],
noObjectLoss
:
noObjectLoss
.
dataSync
()[
0
],
objectLoss
:
objectLoss
.
dataSync
()[
0
],
coordLoss
:
coordLoss
.
dataSync
()[
0
]
}
const
batchIdx
=
0
if
(
window
.
logTrainSteps
)
{
log
(
`ground truth boxes:
${
groundTruthBoxes
[
batchIdx
].
length
}
`
)
log
(
`noObjectLoss[
${
dataIdx
}
]:
${
losses
.
noObjectLoss
}
`
)
log
(
`objectLoss[
${
dataIdx
}
]:
${
losses
.
objectLoss
}
`
)
log
(
`coordLoss[
${
dataIdx
}
]:
${
losses
.
coordLoss
}
`
)
log
(
`totalLoss[
${
dataIdx
}
]:
${
losses
.
totalLoss
}
`
)
function
minimize
(
groundTruthBoxes
,
batchInput
,
inputSize
,
batch
)
{
const
filename
=
batch
.
filenames
[
batchIdx
]
const
{
dataIdx
}
=
batch
if
(
window
.
lossMap
[
filenames
])
{
log
(
`loss change:
${
losses
.
totalLoss
-
window
.
lossMap
[
filenames
].
totalLoss
}
`
)
}
}
return
optimizer
.
minimize
(()
=>
{
window
.
lossMap
[
filenames
]
=
losses
const
outTensor
=
window
.
net
.
forwardInput
(
batchInput
,
inputSize
)
return
totalLoss
},
true
)
const
{
noObjectLoss
,
objectLoss
,
coordLoss
,
totalLoss
}
=
computeLoss
(
outTensor
,
groundTruthBoxes
,
batchInput
.
getReshapedInputDimensions
(
batchIdx
),
batchInput
.
getRelativePaddings
(
batchIdx
)
)
const
losses
=
{
totalLoss
:
totalLoss
.
dataSync
()[
0
],
noObjectLoss
:
noObjectLoss
.
dataSync
()[
0
],
objectLoss
:
objectLoss
.
dataSync
()[
0
],
coordLoss
:
coordLoss
.
dataSync
()[
0
]
}
const
lossKey
=
`
${
filename
}
_
${
inputSize
}
`
ts
=
Date
.
now
()
-
ts
if
(
window
.
logTrainSteps
)
{
log
(
`trainStep time for dataIdx
${
dataIdx
}
(
${
inputSize
}
):
${
ts
}
ms (
${
ts
/
batchInput
.
batchSize
}
ms / batch element)`
)
log
(
`ground truth boxes:
${
groundTruthBoxes
.
length
}
`
)
log
(
`noObjectLoss[
${
dataIdx
}
]:
${
losses
.
noObjectLoss
}
`
)
log
(
`objectLoss[
${
dataIdx
}
]:
${
losses
.
objectLoss
}
`
)
log
(
`coordLoss[
${
dataIdx
}
]:
${
losses
.
coordLoss
}
`
)
log
(
`totalLoss[
${
dataIdx
}
]:
${
losses
.
totalLoss
}
`
)
if
(
window
.
lossMap
[
lossKey
])
{
log
(
`loss change:
${
losses
.
totalLoss
-
window
.
lossMap
[
lossKey
].
totalLoss
}
`
)
}
}
window
.
lossMap
[
lossKey
]
=
losses
return
totalLoss
},
true
)
}
async
function
trainStep
(
batchCreators
,
inputSizes
,
rescaleEveryNthBatch
,
onBatchProcessed
=
()
=>
{})
{
async
function
step
(
currentBatchCreators
)
{
if
(
!
currentBatchCreators
.
batchCreators
.
length
)
{
return
}
loss
.
dispose
()
await
tf
.
nextFrame
()
//console.log(tf.memory())
}))
await
promiseSequential
(
inputSizes
.
map
(
inputSize
=>
async
()
=>
{
await
promiseSequential
(
currentBatchCreators
.
batchCreators
.
map
(
batchCreator
=>
async
()
=>
{
const
batch
=
await
batchCreator
()
const
{
imgs
,
groundTruthBoxes
,
filenames
,
dataIdx
}
=
batch
const
batchInput
=
await
faceapi
.
toNetInput
(
imgs
)
const
[
imgHeight
,
imgWidth
]
=
batchInput
.
inputs
[
batchIdx
].
shape
// skip groundTruthBoxes, which are too tiny
const
scaleFactor
=
inputSize
/
Math
.
max
(
imgHeight
,
imgWidth
)
const
filteredGroundTruthBoxes
=
groundTruthBoxes
[
batchIdx
].
filter
(({
x
,
y
,
width
,
height
})
=>
{
const
box
=
(
new
faceapi
.
Rect
(
x
,
y
,
width
,
height
))
.
toBoundingBox
()
.
rescale
({
height
:
imgHeight
,
width
:
imgWidth
})
.
rescale
(
scaleFactor
)
const
isTooTiny
=
box
.
width
<
50
||
box
.
height
<
50
if
(
isTooTiny
)
{
log
(
`skipping box for input size
${
inputSize
}
: (
${
Math
.
floor
(
box
.
width
)}
x
${
Math
.
floor
(
box
.
height
)}
)`
)
}
return
!
isTooTiny
})
if
(
!
filteredGroundTruthBoxes
.
length
)
{
log
(
`no boxes for input size
${
inputSize
}
,
${
groundTruthBoxes
[
batchIdx
].
length
}
boxes were too small`
)
batchInput
.
dispose
()
onBatchProcessed
(
dataIdx
,
inputSize
)
return
}
let
ts
=
Date
.
now
()
const
loss
=
minimize
(
filteredGroundTruthBoxes
,
batchInput
,
inputSize
,
batch
)
ts
=
Date
.
now
()
-
ts
if
(
window
.
logTrainSteps
)
{
log
(
`trainStep time for dataIdx
${
dataIdx
}
(
${
inputSize
}
):
${
ts
}
ms`
)
}
loss
.
dispose
()
batchInput
.
dispose
()
onBatchProcessed
(
dataIdx
,
inputSize
)
await
tf
.
nextFrame
()
}))
}))
await
step
(
currentBatchCreators
.
next
(
rescaleEveryNthBatch
))
}
await
step
(
batchCreators
.
next
(
rescaleEveryNthBatch
))
}
function
createBatchCreators
(
detectionFilenames
,
batchSize
)
{
function
createBatchCreators
(
detectionFilenames
,
batchSize
,
)
{
if
(
batchSize
<
1
)
{
throw
new
Error
(
'invalid batch size: '
+
batchSize
)
}
...
...
@@ -75,7 +125,7 @@ function createBatchCreators(detectionFilenames, batchSize) {
pushToBatch
(
detectionFilenames
)
const
batchCreators
=
batches
.
map
(
filenamesForBatch
=>
async
()
=>
{
const
batchCreators
=
batches
.
map
(
(
filenamesForBatch
,
dataIdx
)
=>
async
()
=>
{
const
groundTruthBoxes
=
await
Promise
.
all
(
filenamesForBatch
.
map
(
file
=>
fetch
(
file
).
then
(
res
=>
res
.
json
())
))
...
...
@@ -87,9 +137,24 @@ function createBatchCreators(detectionFilenames, batchSize) {
return
{
imgs
,
groundTruthBoxes
,
filenames
:
filenamesForBatch
filenames
:
filenamesForBatch
,
dataIdx
}
})
return
batchCreators
let
idx
=
0
function
next
(
n
)
{
const
nextBatchCreators
=
batchCreators
.
slice
(
idx
,
idx
+
n
)
idx
+=
n
return
{
batchCreators
:
nextBatchCreators
,
next
}
}
return
{
data
:
batchCreators
,
next
}
}
\ No newline at end of file
tools/train/tinyYolov2/verify.html
View file @
4ef65c1b
...
...
@@ -138,7 +138,14 @@
async
function
run
()
{
$
(
'#imgByNr'
).
keydown
(
onKeyDown
)
const
weights
=
await
loadNetWeights
(
'/tmp/test_n100_320_114.weights'
)
const
startIdx224
=
3220
const
startIdx320
=
20688
const
startIdx416
=
950
const
startIdx608
=
15220
const
weightsUrl
=
`/tmp/tmp__224_
${
startIdx224
}
__320_
${
startIdx320
}
__416_
${
startIdx416
}
__608_
${
startIdx608
}
.weights`
const
weights
=
await
loadNetWeights
(
weightsUrl
)
window
.
net
=
new
faceapi
.
TinyYolov2
(
true
)
await
window
.
net
.
load
(
weights
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment