Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
F
face
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Иван Кубота
face
Commits
e0b53014
Commit
e0b53014
authored
Jun 09, 2018
by
vincent
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
examples for face extraction
parent
cdd2c49d
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
268 additions
and
34 deletions
+268
-34
commons.js
examples/public/commons.js
+37
-0
styles.css
examples/public/styles.css
+5
-0
server.js
examples/server.js
+3
-0
detectAndDrawFaces.html
examples/views/detectAndDrawFaces.html
+102
-0
detectAndRecognizeFaces.html
examples/views/detectAndRecognizeFaces.html
+116
-0
faceDetection.html
examples/views/faceDetection.html
+3
-2
faceRecognition.html
examples/views/faceRecognition.html
+2
-32
No files found.
examples/public/commons.js
View file @
e0b53014
...
...
@@ -24,6 +24,35 @@ async function initFaceRecognitionNet() {
return
facerecognition
.
faceRecognitionNet
(
weights
)
}
// fetch first image of each class and compute their descriptors
async
function
initTrainDescriptorsByClass
(
net
)
{
return
Promise
.
all
(
classes
.
map
(
async
className
=>
{
const
img
=
await
facerecognition
.
bufferToImage
(
await
fetchImage
(
getFaceImageUri
(
className
,
1
))
)
const
descriptor
=
await
net
.
computeFaceDescriptor
(
img
)
return
{
descriptor
,
className
}
}
))
}
function
getBestMatch
(
allDescriptors
,
queryDescriptor
)
{
return
allDescriptors
.
map
(
({
descriptor
,
className
})
=>
({
distance
:
facerecognition
.
round
(
facerecognition
.
euclideanDistance
(
descriptor
,
queryDescriptor
)
),
className
})
)
.
reduce
((
best
,
curr
)
=>
best
.
distance
<
curr
.
distance
?
best
:
curr
)
}
function
renderNavBar
(
navbarId
,
exampleUri
)
{
const
examples
=
[
{
...
...
@@ -41,6 +70,14 @@ function renderNavBar(navbarId, exampleUri) {
{
uri
:
'face_similarity'
,
name
:
'Face Similarity'
},
{
uri
:
'detect_and_draw_faces'
,
name
:
'Detect and Draw Faces'
},
{
uri
:
'detect_and_recognize_faces'
,
name
:
'Detect and Recognize Faces'
}
]
...
...
examples/public/styles.css
View file @
e0b53014
...
...
@@ -37,4 +37,8 @@
position
:
absolute
;
top
:
0
;
left
:
0
;
}
#facesContainer
canvas
{
margin
:
10px
;
}
\ No newline at end of file
examples/server.js
View file @
e0b53014
...
...
@@ -15,5 +15,7 @@ app.get('/face_detection', (req, res) => res.sendFile(path.join(viewsDir, 'faceD
app
.
get
(
'/face_detection_video'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
viewsDir
,
'faceDetectionVideo.html'
)))
app
.
get
(
'/face_recognition'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
viewsDir
,
'faceRecognition.html'
)))
app
.
get
(
'/face_similarity'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
viewsDir
,
'faceSimilarity.html'
)))
app
.
get
(
'/detect_and_draw_faces'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
viewsDir
,
'detectAndDrawFaces.html'
)))
app
.
get
(
'/detect_and_recognize_faces'
,
(
req
,
res
)
=>
res
.
sendFile
(
path
.
join
(
viewsDir
,
'detectAndRecognizeFaces.html'
)))
app
.
listen
(
3000
,
()
=>
console
.
log
(
'Listening on port 3000!'
))
\ No newline at end of file
examples/views/detectAndDrawFaces.html
0 → 100644
View file @
e0b53014
<!DOCTYPE html>
<html>
<head>
<script
src=
"face-recognition.js"
></script>
<script
src=
"axios.min.js"
></script>
<script
src=
"commons.js"
></script>
<link
rel=
"stylesheet"
href=
"styles.css"
>
<link
rel=
"stylesheet"
href=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css"
>
<script
type=
"text/javascript"
src=
"https://code.jquery.com/jquery-2.1.1.min.js"
></script>
<script
src=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"
></script>
</head>
<body>
<div
class=
"center-content page-container"
>
<div
id=
"navbar"
></div>
<div
class=
"progress"
id=
"loader"
>
<div
class=
"indeterminate"
></div>
</div>
<div
style=
"position: relative"
class=
"margin"
>
<img
id=
"inputImg"
src=
""
style=
"max-width: 800px;"
/>
<canvas
id=
"overlay"
/>
</div>
<div
id=
"facesContainer"
></div>
<div
class=
"row side-by-side"
>
<div
id=
"selectList"
></div>
<div
class=
"row"
>
<label
for=
"minConfidence"
>
Min Confidence:
</label>
<input
disabled
value=
"0.7"
id=
"minConfidence"
type=
"text"
class=
"bold"
>
</div>
<button
class=
"waves-effect waves-light btn"
onclick=
"onDecreaseThreshold()"
>
<i
class=
"material-icons left"
>
-
</i>
</button>
<button
class=
"waves-effect waves-light btn"
onclick=
"onIncreaseThreshold()"
>
<i
class=
"material-icons left"
>
+
</i>
</button>
</div>
</div>
<script>
let
minConfidence
=
0.7
let
net
function
onIncreaseThreshold
()
{
minConfidence
=
Math
.
min
(
facerecognition
.
round
(
minConfidence
+
0.1
),
1.0
)
$
(
'#minConfidence'
).
val
(
minConfidence
)
updateResults
()
}
function
onDecreaseThreshold
()
{
minConfidence
=
Math
.
max
(
facerecognition
.
round
(
minConfidence
-
0.1
),
0.1
)
$
(
'#minConfidence'
).
val
(
minConfidence
)
updateResults
()
}
async
function
updateResults
()
{
const
inputImgEl
=
$
(
'#inputImg'
).
get
(
0
)
const
{
width
,
height
}
=
inputImgEl
const
canvas
=
$
(
'#overlay'
).
get
(
0
)
canvas
.
width
=
width
canvas
.
height
=
height
const
input
=
new
facerecognition
.
NetInput
(
inputImgEl
)
const
detections
=
await
net
.
locateFaces
(
input
,
minConfidence
)
facerecognition
.
drawDetection
(
'overlay'
,
detections
.
map
(
det
=>
det
.
forSize
(
width
,
height
)))
const
faceImages
=
await
facerecognition
.
extractFaces
(
input
.
canvases
[
0
],
detections
)
$
(
'#facesContainer'
).
empty
()
faceImages
.
forEach
(
canvas
=>
$
(
'#facesContainer'
).
append
(
canvas
))
}
async
function
onSelectionChanged
(
uri
)
{
const
imgBuf
=
await
fetchImage
(
uri
)
$
(
`#inputImg`
).
get
(
0
).
src
=
(
await
facerecognition
.
bufferToImage
(
imgBuf
)).
src
updateResults
()
}
async
function
run
()
{
net
=
await
initFaceDetectionNet
()
$
(
'#loader'
).
hide
()
onSelectionChanged
(
$
(
'#selectList select'
).
val
())
}
$
(
document
).
ready
(
function
()
{
renderNavBar
(
'#navbar'
,
'detect_and_draw_faces'
)
renderImageSelectList
(
'#selectList'
,
async
(
uri
)
=>
{
await
onSelectionChanged
(
uri
)
},
'bbt1.jpg'
)
run
()
})
</script>
</body>
</html>
\ No newline at end of file
examples/views/detectAndRecognizeFaces.html
0 → 100644
View file @
e0b53014
<!DOCTYPE html>
<html>
<head>
<script
src=
"face-recognition.js"
></script>
<script
src=
"axios.min.js"
></script>
<script
src=
"commons.js"
></script>
<link
rel=
"stylesheet"
href=
"styles.css"
>
<link
rel=
"stylesheet"
href=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.css"
>
<script
type=
"text/javascript"
src=
"https://code.jquery.com/jquery-2.1.1.min.js"
></script>
<script
src=
"https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"
></script>
</head>
<body>
<div
class=
"center-content page-container"
>
<div
id=
"navbar"
></div>
<div
class=
"progress"
id=
"loader"
>
<div
class=
"indeterminate"
></div>
</div>
<div
style=
"position: relative"
class=
"margin"
>
<img
id=
"inputImg"
src=
""
style=
"max-width: 800px;"
/>
<canvas
id=
"overlay"
/>
</div>
<div
class=
"row side-by-side"
>
<div
id=
"selectList"
></div>
<div
class=
"row"
>
<label
for=
"minConfidence"
>
Min Confidence:
</label>
<input
disabled
value=
"0.7"
id=
"minConfidence"
type=
"text"
class=
"bold"
>
</div>
<button
class=
"waves-effect waves-light btn"
onclick=
"onDecreaseThreshold()"
>
<i
class=
"material-icons left"
>
-
</i>
</button>
<button
class=
"waves-effect waves-light btn"
onclick=
"onIncreaseThreshold()"
>
<i
class=
"material-icons left"
>
+
</i>
</button>
</div>
</div>
<script>
const
threshold
=
0.6
let
minConfidence
=
0.7
let
detectionNet
,
recognitionNet
let
trainDescriptorsByClass
=
[]
function
onIncreaseThreshold
()
{
minConfidence
=
Math
.
min
(
facerecognition
.
round
(
minConfidence
+
0.1
),
1.0
)
$
(
'#minConfidence'
).
val
(
minConfidence
)
updateResults
()
}
function
onDecreaseThreshold
()
{
minConfidence
=
Math
.
max
(
facerecognition
.
round
(
minConfidence
-
0.1
),
0.1
)
$
(
'#minConfidence'
).
val
(
minConfidence
)
updateResults
()
}
async
function
updateResults
()
{
const
inputImgEl
=
$
(
'#inputImg'
).
get
(
0
)
const
{
width
,
height
}
=
inputImgEl
const
canvas
=
$
(
'#overlay'
).
get
(
0
)
canvas
.
width
=
width
canvas
.
height
=
height
const
input
=
new
facerecognition
.
NetInput
(
inputImgEl
)
const
detections
=
await
detectionNet
.
locateFaces
(
input
,
minConfidence
)
const
detectionsForSize
=
detections
.
map
(
det
=>
det
.
forSize
(
width
,
height
))
facerecognition
.
drawDetection
(
'overlay'
,
detectionsForSize
,
{
withScore
:
false
})
const
faceTensors
=
await
facerecognition
.
extractFaceTensors
(
input
,
detections
)
const
descriptors
=
await
Promise
.
all
(
faceTensors
.
map
(
t
=>
recognitionNet
.
computeFaceDescriptor
(
t
)))
// free memory for face image tensors after we computed their descriptors
faceTensors
.
forEach
(
t
=>
t
.
dispose
())
descriptors
.
forEach
((
descriptor
,
i
)
=>
{
const
bestMatch
=
getBestMatch
(
trainDescriptorsByClass
,
descriptor
)
const
text
=
`
${
bestMatch
.
distance
<
threshold
?
bestMatch
.
className
:
'unkown'
}
(
${
bestMatch
.
distance
}
)`
const
{
x
,
y
}
=
detectionsForSize
[
i
].
box
facerecognition
.
drawText
(
canvas
.
getContext
(
'2d'
),
x
,
y
,
text
,
facerecognition
.
getDefaultDrawOptions
())
})
}
async
function
onSelectionChanged
(
uri
)
{
const
imgBuf
=
await
fetchImage
(
uri
)
$
(
`#inputImg`
).
get
(
0
).
src
=
(
await
facerecognition
.
bufferToImage
(
imgBuf
)).
src
updateResults
()
}
async
function
run
()
{
detectionNet
=
await
initFaceDetectionNet
()
recognitionNet
=
await
initFaceRecognitionNet
()
trainDescriptorsByClass
=
await
initTrainDescriptorsByClass
(
recognitionNet
)
$
(
'#loader'
).
hide
()
onSelectionChanged
(
$
(
'#selectList select'
).
val
())
}
$
(
document
).
ready
(
function
()
{
renderNavBar
(
'#navbar'
,
'detect_and_recognize_faces'
)
renderImageSelectList
(
'#selectList'
,
async
(
uri
)
=>
{
await
onSelectionChanged
(
uri
)
},
'bbt1.jpg'
)
run
()
})
</script>
</body>
</html>
\ No newline at end of file
examples/views/faceDetection.html
View file @
e0b53014
...
...
@@ -57,12 +57,13 @@
}
async
function
updateResults
()
{
const
input
=
new
facerecognition
.
NetInput
(
'inputImg'
)
const
{
width
,
height
}
=
input
const
input
ImgEl
=
$
(
'#inputImg'
).
get
(
0
)
const
{
width
,
height
}
=
input
ImgEl
const
canvas
=
$
(
'#overlay'
).
get
(
0
)
canvas
.
width
=
width
canvas
.
height
=
height
const
input
=
new
facerecognition
.
NetInput
(
inputImgEl
)
result
=
await
net
.
locateFaces
(
input
,
minConfidence
)
facerecognition
.
drawDetection
(
'overlay'
,
result
.
map
(
det
=>
det
.
forSize
(
width
,
height
)))
}
...
...
examples/views/faceRecognition.html
View file @
e0b53014
...
...
@@ -108,30 +108,6 @@
getImg
().
src
=
src
}
async
function
loadTrainingImages
()
{
return
await
Promise
.
all
(
classes
.
map
(
async
className
=>
({
img
:
await
facerecognition
.
bufferToImage
(
await
fetchImage
(
getFaceImageUri
(
className
,
1
))
),
className
})
))
}
function
getBestMatch
(
queryDescriptor
)
{
return
trainDescriptorsByClass
.
map
(
({
descriptor
,
className
})
=>
({
distance
:
facerecognition
.
round
(
facerecognition
.
euclideanDistance
(
descriptor
,
queryDescriptor
)
),
className
})
)
.
reduce
((
best
,
curr
)
=>
best
.
distance
<
curr
.
distance
?
best
:
curr
)
}
async
function
runFaceRecognition
()
{
async
function
next
()
{
const
imgBuf
=
await
fetchImage
(
getFaceImageUri
(
classes
[
currClassIdx
],
currImageIdx
))
...
...
@@ -143,7 +119,7 @@
const
descriptor
=
await
net
.
computeFaceDescriptor
(
input
)
displayTimeStats
(
Date
.
now
()
-
ts
)
const
bestMatch
=
getBestMatch
(
descriptor
)
const
bestMatch
=
getBestMatch
(
trainDescriptorsByClass
,
descriptor
)
$
(
'#prediction'
).
val
(
`
${
bestMatch
.
distance
<
threshold
?
bestMatch
.
className
:
'unkown'
}
(
${
bestMatch
.
distance
}
)`
)
currImageIdx
=
currClassIdx
===
(
classes
.
length
-
1
)
...
...
@@ -164,13 +140,7 @@
net
=
await
initFaceRecognitionNet
()
setStatusText
(
'computing initial descriptors...'
)
const
trainImgs
=
await
loadTrainingImages
()
trainDescriptorsByClass
=
await
Promise
.
all
(
trainImgs
.
map
(
async
({
className
,
img
})
=>
({
descriptor
:
await
net
.
computeFaceDescriptor
(
img
),
className
})
))
trainDescriptorsByClass
=
await
initTrainDescriptorsByClass
(
net
)
$
(
'#loader'
).
hide
()
runFaceRecognition
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment