-
Notifications
You must be signed in to change notification settings - Fork 0
/
facetracking_API_reference.d.ts
276 lines (234 loc) · 6.35 KB
/
facetracking_API_reference.d.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
// for other MediaPipe JS API references, see https://github.com/google/mediapipe/issues/1408#issuecomment-810652766
/**
* @fileoverview Declarations for the face tracking API.
*/
/**
* Version number of this package.
*/
export const VERSION: string;
/**
* Represents pairs of (start,end) indexes so that we can connect landmarks
* with lines to provide a skeleton when we draw the points.
*/
export declare type LandmarkConnectionArray = Array<[number, number]>;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_LIPS: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_LEFT_EYE: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_LEFT_EYEBROW: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_LEFT_IRIS: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_RIGHT_EYE: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_RIGHT_EYEBROW: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_RIGHT_IRIS: LandmarkConnectionArray;
/**
* Subgroup of FACEMESH_CONNECTIONS.
*/
export declare const FACEMESH_FACE_OVAL: LandmarkConnectionArray;
/**
* onResults returns an array of landmarks. This array provides the combination
* of contours listed above.
*/
export declare const FACEMESH_CONTOURS: LandmarkConnectionArray;
/**
* onResults returns an array of landmarks. This array provides the edges of
* the full set of landmarks.
*/
export declare const FACEMESH_TESSELATION: LandmarkConnectionArray;
/**
* Represents a single normalized landmark.
*/
export declare interface NormalizedLandmark {
x: number;
y: number;
z: number;
visibility?: number;
}
/**
* We support several ways to get image inputs.
*/
export type InputImage = HTMLVideoElement | HTMLImageElement | HTMLCanvasElement;
/**
* Legal inputs.
*/
export interface InputMap {
image: InputImage;
}
/**
* GpuBuffers should all be compatible with Canvas' `drawImage`
*/
type GpuBuffer = HTMLCanvasElement | HTMLImageElement | ImageBitmap;
/**
* One list of landmarks.
*/
export type NormalizedLandmarkList = NormalizedLandmark[];
/**
* Multiple lists of landmarks.
*/
export type NormalizedLandmarkListList = NormalizedLandmarkList[];
/**
* Shows the vertex type of a mesh in order to decode the vertex buffer list.
*/
export interface VertexType {
VERTEX_PT: 0; // Position (XYZ) + Texture (UV)
}
/**
* Shows the type of primitive shape in a mesh in order to give shape.
*/
export interface PrimitiveType {
TRIANGLE: 0;
}
/**
* Represents the Layout of a Matrix for the MatrixData proto
*/
export interface Layout {
COLUMN_MAJOR: 0;
ROW_MAJOR: 1;
}
/**
* A const object harboring all the default variables for the perspective
* camera in FaceGeometry
*/
export interface DefaultCameraParams {
verticalFovDegrees: 63.0;
near: 1.0;
far: 10000.0;
}
/**
* Collects the enums into a single namespace
*/
export declare const FACE_GEOMETRY: {
VertexType: VertexType,
PrimitiveType: PrimitiveType,
Layout: Layout,
DEFAULT_CAMERA_PARAMS: DefaultCameraParams;
};
/**
* A representation of a mesh given by the Mesh3d proto
*/
export interface Mesh {
getVertexBufferList(): Float32Array;
getVertexType(): VertexType;
getIndexBufferList(): Uint32Array;
getPrimitiveType(): PrimitiveType;
}
/**
* A representation of a matrix given by the MatrixData proto.
*/
export interface MatrixData {
getPackedDataList(): number[];
getRows(): number;
getCols(): number;
getLayout(): Layout;
}
/**
* A representation of a face geometry from the face geometry proto.
*/
export interface FaceGeometry {
getMesh(): Mesh;
getPoseTransformMatrix(): MatrixData;
}
/**
* Converts a MatrixData proto to a traditional JS array.
*/
export function matrixDataToMatrix(mat: MatrixData): number[][];
/**
* Possible results from FaceMesh.
*/
export interface Results {
multiFaceLandmarks: NormalizedLandmarkListList;
multiFaceGeometry: FaceGeometry[];
image: GpuBuffer;
}
/**
* Configurable options for FaceMesh.
*/
export interface Options {
cameraNear?: number;
cameraFar?: number;
cameraVerticalFovDegrees?: number;
enableFaceGeometry?: boolean;
selfieMode?: boolean;
maxNumFaces?: number;
refineLandmarks?: boolean;
minDetectionConfidence?: number;
minTrackingConfidence?: number;
}
/**
* Listener for any results from FaceMesh.
*/
export type ResultsListener = (results: Results) => (Promise<void>|void);
/**
* Contains all of the setup options to drive the face solution.
*/
export interface FaceMeshConfig {
locateFile?: (path: string, prefix?: string) => string;
}
/**
* Declares the interface of FaceMesh.
*/
declare interface FaceMeshInterface {
close(): Promise<void>;
onResults(listener: ResultsListener): void;
initialize(): Promise<void>;
reset(): void;
send(inputs: InputMap): Promise<void>;
setOptions(options: Options): void;
}
/**
* Encapsulates the entire FaceMesh solution. All that is needed from the
* developer is the source of the image data. The user will call `send`
* repeatedly and if a face is detected, then the user can receive callbacks
* with this metadata.
*/
export declare class FaceMesh implements FaceMeshInterface {
constructor(config?: FaceMeshConfig);
/**
* Shuts down the object. Call before creating a new instance.
*/
close(): Promise<void>;
/**
* Registers a single callback that will carry any results that occur
* after calling Send().
*/
onResults(listener: ResultsListener): void;
/**
* Initializes the solution. This includes loading ML models and mediapipe
* configurations, as well as setting up potential listeners for metadata. If
* `initialize` is not called manually, then it will be called the first time
* the developer calls `send`.
*/
initialize(): Promise<void>;
/**
* Tells the graph to restart before the next frame is sent.
*/
reset(): void;
/**
* Processes a single frame of data, which depends on the options sent to the
* constructor.
*/
send(inputs: InputMap): Promise<void>;
/**
* Adjusts options in the solution. This may trigger a graph reload the next
* time the graph tries to run.
*/
setOptions(options: Options): void;
}