Skip to content

Commit f260f87

Browse files
testcases for tinyYolov2
1 parent fd42e09 commit f260f87

File tree

3 files changed

+168
-7
lines changed

3 files changed

+168
-7
lines changed

test/tests/e2e/expectedResults.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,15 @@ export const expectedMtcnnBoxes = [
2020
{ x: 451, y: 176, width: 122, height: 122 }
2121
]
2222

23+
export const expectedTinyYolov2Boxes = [
24+
{ x: 52, y: 263, width: 106, height: 102 },
25+
{ x: 455, y: 191, width: 103, height: 97 },
26+
{ x: 236, y: 57, width: 90, height: 85 },
27+
{ x: 257, y: 243, width: 86, height: 95 },
28+
{ x: 578, y: 76, width: 86, height: 91 },
29+
{ x: 87, y: 30, width: 92, height: 93 }
30+
]
31+
2332
export const expectedMtcnnFaceLandmarks = [
2433
[new Point(117, 58), new Point(156, 63), new Point(141, 86), new Point(109, 98), new Point(147, 104)],
2534
[new Point(82, 292), new Point(134, 304), new Point(104, 330), new Point(72, 342), new Point(120, 353)],

test/tests/e2e/tinyYolov2.test.ts

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
import * as faceapi from '../../../src';
2+
import { SizeType } from '../../../src/tinyYolov2/types';
3+
import { describeWithNets, expectAllTensorsReleased, expectRectClose } from '../../utils';
4+
import { expectedTinyYolov2Boxes } from './expectedResults';
5+
6+
7+
describe('tinyYolov2', () => {
8+
9+
let imgEl: HTMLImageElement
10+
11+
beforeAll(async () => {
12+
const img = await (await fetch('base/test/images/faces.jpg')).blob()
13+
imgEl = await faceapi.bufferToImage(img)
14+
})
15+
16+
describeWithNets('quantized weights', { withTinyYolov2: { quantized: true } }, ({ tinyYolov2 }) => {
17+
18+
it('inputSize lg, finds all faces', async () => {
19+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
20+
21+
const expectedScores = [0.86, 0.86, 0.85, 0.83, 0.81, 0.81]
22+
const maxBoxDelta = 3
23+
const boxOrder = [0, 1, 2, 3, 4, 5]
24+
25+
expect(detections.length).toEqual(6)
26+
detections.forEach((det, i) => {
27+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
28+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
29+
})
30+
})
31+
32+
it('inputSize md, finds all faces', async () => {
33+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
34+
35+
const expectedScores = [0.89, 0.87, 0.83, 0.82, 0.81, 0.72]
36+
const maxBoxDelta = 16
37+
const boxOrder = [5, 4, 0, 2, 1, 3]
38+
39+
expect(detections.length).toEqual(6)
40+
detections.forEach((det, i) => {
41+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
42+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
43+
})
44+
})
45+
46+
it('inputSize custom, finds all faces', async () => {
47+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
48+
49+
const expectedScores = [0.89, 0.87, 0.83, 0.82, 0.81, 0.72]
50+
const maxBoxDelta = 16
51+
const boxOrder = [5, 4, 0, 2, 1, 3]
52+
53+
expect(detections.length).toEqual(6)
54+
detections.forEach((det, i) => {
55+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
56+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
57+
})
58+
})
59+
60+
})
61+
62+
describeWithNets('uncompressed weights', { withTinyYolov2: { quantized: false } }, ({ tinyYolov2 }) => {
63+
64+
it('inputSize lg, finds all faces', async () => {
65+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.LG })
66+
67+
const expectedScores = [0.86, 0.86, 0.85, 0.83, 0.81, 0.81]
68+
const maxBoxDelta = 1
69+
const boxOrder = [0, 1, 2, 3, 4, 5]
70+
71+
expect(detections.length).toEqual(6)
72+
detections.forEach((det, i) => {
73+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
74+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
75+
})
76+
})
77+
78+
it('inputSize md, finds all faces', async () => {
79+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: SizeType.MD })
80+
81+
const expectedScores = [0.89, 0.87, 0.83, 0.83, 0.81, 0.73]
82+
const maxBoxDelta = 14
83+
const boxOrder = [5, 4, 2, 0, 1, 3]
84+
85+
expect(detections.length).toEqual(6)
86+
detections.forEach((det, i) => {
87+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
88+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
89+
})
90+
})
91+
92+
it('inputSize custom, finds all faces', async () => {
93+
const detections = await tinyYolov2.locateFaces(imgEl, { inputSize: 416 })
94+
95+
const expectedScores = [0.89, 0.87, 0.83, 0.83, 0.81, 0.73]
96+
const maxBoxDelta = 14
97+
const boxOrder = [5, 4, 2, 0, 1, 3]
98+
99+
expect(detections.length).toEqual(6)
100+
detections.forEach((det, i) => {
101+
expect(det.getScore()).toBeCloseTo(expectedScores[i], 2)
102+
expectRectClose(det.getBox(), expectedTinyYolov2Boxes[boxOrder[i]], maxBoxDelta)
103+
})
104+
})
105+
106+
})
107+
108+
describe('no memory leaks', () => {
109+
110+
describe('NeuralNetwork, uncompressed model', () => {
111+
112+
it('disposes all param tensors', async () => {
113+
await expectAllTensorsReleased(async () => {
114+
const res = await fetch('base/weights_uncompressed/tiny_yolov2_model.weights')
115+
const weights = new Float32Array(await res.arrayBuffer())
116+
const net = faceapi.createTinyYolov2(weights)
117+
net.dispose()
118+
})
119+
})
120+
121+
})
122+
123+
describe('NeuralNetwork, quantized model', () => {
124+
125+
it('disposes all param tensors', async () => {
126+
await expectAllTensorsReleased(async () => {
127+
const net = new faceapi.TinyYolov2()
128+
await net.load('base/weights')
129+
net.dispose()
130+
})
131+
})
132+
133+
})
134+
135+
})
136+
137+
})

test/utils.ts

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ import * as faceapi from '../src/';
55
import { NeuralNetwork } from '../src/commons/NeuralNetwork';
66
import { IPoint } from '../src/';
77
import { allFacesFactory, allFacesMtcnnFactory } from '../src/allFacesFactory';
8-
import { allFacesMtcnnFunction, allFacesFunction } from '../src/globalApi';
8+
import { allFacesMtcnnFunction, allFacesFunction, tinyYolov2 } from '../src/globalApi';
9+
import { TinyYolov2 } from '../src/tinyYolov2/TinyYolov2';
910

1011
export function zeros(length: number): Float32Array {
1112
return new Float32Array(length)
@@ -55,12 +56,13 @@ export type WithNetOptions = {
5556
}
5657

5758
export type InjectNetArgs = {
59+
allFaces: allFacesFunction
60+
allFacesMtcnn: allFacesMtcnnFunction
5861
faceDetectionNet: faceapi.FaceDetectionNet
5962
faceLandmarkNet: faceapi.FaceLandmarkNet
6063
faceRecognitionNet: faceapi.FaceRecognitionNet
6164
mtcnn: faceapi.Mtcnn
62-
allFaces: allFacesFunction
63-
allFacesMtcnn: allFacesMtcnnFunction
65+
tinyYolov2: faceapi.TinyYolov2
6466
}
6567

6668

@@ -71,6 +73,7 @@ export type DescribeWithNetsOptions = {
7173
withFaceLandmarkNet?: WithNetOptions
7274
withFaceRecognitionNet?: WithNetOptions
7375
withMtcnn?: WithNetOptions
76+
withTinyYolov2?: WithNetOptions
7477
}
7578

7679
async function loadNetWeights(uri: string): Promise<Float32Array> {
@@ -99,17 +102,19 @@ export function describeWithNets(
99102
let faceLandmarkNet: faceapi.FaceLandmarkNet = new faceapi.FaceLandmarkNet()
100103
let faceRecognitionNet: faceapi.FaceRecognitionNet = new faceapi.FaceRecognitionNet()
101104
let mtcnn: faceapi.Mtcnn = new faceapi.Mtcnn()
105+
let tinyYolov2: faceapi.TinyYolov2 = new faceapi.TinyYolov2()
102106
let allFaces = allFacesFactory(faceDetectionNet, faceLandmarkNet, faceRecognitionNet)
103107
let allFacesMtcnn = allFacesMtcnnFactory(mtcnn, faceRecognitionNet)
104108

105109
beforeAll(async () => {
106110
const {
111+
withAllFaces,
112+
withAllFacesMtcnn,
107113
withFaceDetectionNet,
108114
withFaceLandmarkNet,
109115
withFaceRecognitionNet,
110116
withMtcnn,
111-
withAllFaces,
112-
withAllFacesMtcnn
117+
withTinyYolov2
113118
} = options
114119

115120
if (withFaceDetectionNet || withAllFaces) {
@@ -118,6 +123,7 @@ export function describeWithNets(
118123
!!withFaceDetectionNet && !withFaceDetectionNet.quantized && 'ssd_mobilenetv1_model.weights'
119124
)
120125
}
126+
121127
if (withFaceLandmarkNet || withAllFaces) {
122128
await initNet<faceapi.FaceLandmarkNet>(
123129
faceLandmarkNet,
@@ -132,22 +138,31 @@ export function describeWithNets(
132138
'face_recognition_model.weights'
133139
)
134140
}
141+
135142
if (withMtcnn || withAllFacesMtcnn) {
136143
await initNet<faceapi.Mtcnn>(
137144
mtcnn,
138145
!!withMtcnn && !withMtcnn.quantized && 'mtcnn_model.weights'
139146
)
140147
}
148+
149+
if (withTinyYolov2) {
150+
await initNet<faceapi.TinyYolov2>(
151+
tinyYolov2,
152+
!!withTinyYolov2 && !withTinyYolov2.quantized && 'tiny_yolov2_model.weights'
153+
)
154+
}
141155
})
142156

143157
afterAll(() => {
144158
faceDetectionNet && faceDetectionNet.dispose()
145159
faceLandmarkNet && faceLandmarkNet.dispose()
146160
faceRecognitionNet && faceRecognitionNet.dispose()
147-
mtcnn && mtcnn.dispose()
161+
mtcnn && mtcnn.dispose(),
162+
tinyYolov2 && tinyYolov2.dispose()
148163
})
149164

150-
specDefinitions({ faceDetectionNet, faceLandmarkNet, faceRecognitionNet, mtcnn, allFaces, allFacesMtcnn })
165+
specDefinitions({ allFaces, allFacesMtcnn, faceDetectionNet, faceLandmarkNet, faceRecognitionNet, mtcnn, tinyYolov2 })
151166
})
152167
}
153168

0 commit comments

Comments
 (0)