Skip to content

Commit 9c00635

Browse files
added a new tiny face detector + latest state of the model weights
1 parent 2145e28 commit 9c00635

File tree

6 files changed

+67
-0
lines changed

6 files changed

+67
-0
lines changed
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import { Point, TNetInput } from 'tfjs-image-recognition-base';
2+
import { TinyYolov2 as TinyYolov2Base, TinyYolov2Types } from 'tfjs-tiny-yolov2';
3+
4+
import { FaceDetection } from '../classes';
5+
import { BOX_ANCHORS, DEFAULT_MODEL_NAME, IOU_THRESHOLD, MEAN_RGB } from './const';
6+
7+
export class TinyFaceDetector extends TinyYolov2Base {
8+
9+
constructor() {
10+
const config = {
11+
withSeparableConvs: true,
12+
iouThreshold: IOU_THRESHOLD,
13+
classes: ['face'],
14+
anchors: BOX_ANCHORS,
15+
meanRgb: MEAN_RGB,
16+
isFirstLayerConv2d: true,
17+
filterSizes: [3, 16, 32, 64, 128, 256, 512]
18+
}
19+
20+
super(config)
21+
}
22+
23+
public get anchors(): Point[] {
24+
return this.config.anchors
25+
}
26+
27+
public async locateFaces(input: TNetInput, forwardParams: TinyYolov2Types.TinyYolov2ForwardParams): Promise<FaceDetection[]> {
28+
const objectDetections = await this.detect(input, forwardParams)
29+
return objectDetections.map(det => new FaceDetection(det.score, det.relativeBox, { width: det.imageWidth, height: det.imageHeight }))
30+
}
31+
32+
protected loadQuantizedParams(modelUri: string | undefined) {
33+
const defaultModelName = DEFAULT_MODEL_NAME
34+
return super.loadQuantizedParams(modelUri, defaultModelName) as any
35+
}
36+
}
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
import { TinyYolov2Options } from '../tinyYolov2/TinyYolov2Options';
2+
3+
export class TinyFaceDetectorOptions extends TinyYolov2Options {
4+
protected _name: string = 'TinyFaceDetectorOptions'
5+
}

src/tinyFaceDetector/const.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
import { Point } from 'tfjs-image-recognition-base';
2+
3+
export const IOU_THRESHOLD = 0.4
4+
5+
export const BOX_ANCHORS = [
6+
new Point(1.603231, 2.094468),
7+
new Point(6.041143, 7.080126),
8+
new Point(2.882459, 3.518061),
9+
new Point(4.266906, 5.178857),
10+
new Point(9.041765, 10.66308)
11+
]
12+
13+
export const MEAN_RGB: [number, number, number] = [117.001, 114.697, 97.404]
14+
15+
export const DEFAULT_MODEL_NAME = 'tiny_face_detection_model'

src/tinyFaceDetector/index.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import { TinyFaceDetector } from './TinyFaceDetector';
2+
3+
export * from './TinyFaceDetector';
4+
export * from './TinyFaceDetectorOptions';
5+
6+
export function createTinyFaceDetector(weights: Float32Array) {
7+
const net = new TinyFaceDetector()
8+
net.extractWeights(weights)
9+
return net
10+
}
189 KB
Binary file not shown.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009022112453685087,"min":-1.1999409563401164}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005271678228004306,"min":-0.9120003334447448}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003994523777681239,"min":-0.5073045197655173}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013741331474453795,"min":-1.3878744789198332}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015656957147168178,"min":-0.3272304043758149}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0027797868438795502,"min":-0.3419137817971847}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014880534714343502,"min":-1.5922172144347546}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00211673332195656,"min":-0.38101199795218077}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006029989906385833,"min":-0.8864085162387174}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016246150521671072,"min":-2.030768815208884}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003473230465954425,"min":-0.7884233157716545}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006747295108495974,"min":-0.8096754130195168}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021840399386836032,"min":-2.86109231967552}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004201083674150355,"min":-0.8318145674817703}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008484941604090673,"min":-0.907888751637702}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029725045783846986,"min":-3.804805860332414}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008291378208235198,"min":-1.4841566992741004}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.02816048977421779,"min":-4.7028017922943715}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002222977841601652,"min":-0.4001360114882974}}],"paths":["tiny_face_detector_model-shard1"]}]

0 commit comments

Comments
 (0)