Pretrained PoseNet model in TensorFlow.js for real-time human pose estimation from images and video streams
—
Constants and data structures defining the 17-point human skeleton model used by PoseNet for pose detection and analysis.
Array of human body part names in the standard order used by PoseNet.
/**
* Array of 17 body part names in standard keypoint order
* Index corresponds to keypoint ID in pose detection
*/
const partNames: string[];The partNames array contains these 17 body parts in order:
const partNames = [
'nose', // 0
'leftEye', // 1
'rightEye', // 2
'leftEar', // 3
'rightEar', // 4
'leftShoulder', // 5
'rightShoulder', // 6
'leftElbow', // 7
'rightElbow', // 8
'leftWrist', // 9
'rightWrist', // 10
'leftHip', // 11
'rightHip', // 12
'leftKnee', // 13
'rightKnee', // 14
'leftAnkle', // 15
'rightAnkle' // 16
];Usage Examples:
import { partNames } from '@tensorflow-models/posenet';
// Get part name by index
const noseName = partNames[0]; // 'nose'
const leftWristName = partNames[9]; // 'leftWrist'
// Find keypoint by part name
const pose = await net.estimateSinglePose(imageElement);
const noseKeypoint = pose.keypoints.find(kp => kp.part === 'nose');
// Iterate through all body parts
partNames.forEach((partName, index) => {
const keypoint = pose.keypoints[index];
console.log(`${partName} (${index}): confidence ${keypoint.score.toFixed(2)}`);
});
// Filter keypoints by body region
const faceParts = partNames.filter(name =>
['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar'].includes(name)
);
const armParts = partNames.filter(name =>
name.includes('Shoulder') || name.includes('Elbow') || name.includes('Wrist')
);Object mapping body part names to their corresponding numeric IDs.
/**
* Mapping from body part names to numeric IDs
* Inverse of partNames array for efficient lookups
*/
const partIds: {[jointName: string]: number};Usage Examples:
import { partIds } from '@tensorflow-models/posenet';
// Look up part ID by name
const noseId = partIds['nose']; // 0
const leftWristId = partIds['leftWrist']; // 9
const rightAnkleId = partIds['rightAnkle']; // 16
// Access keypoint by part name
const pose = await net.estimateSinglePose(imageElement);
const noseKeypoint = pose.keypoints[partIds['nose']];
const leftShoulderKeypoint = pose.keypoints[partIds['leftShoulder']];
// Check if specific parts are detected with high confidence
const requiredParts = ['nose', 'leftShoulder', 'rightShoulder'];
const detectedParts = requiredParts.filter(partName => {
const keypoint = pose.keypoints[partIds[partName]];
return keypoint.score > 0.7;
});
console.log(`Detected ${detectedParts.length}/${requiredParts.length} required parts`);
// Create custom keypoint filters
function getKeypointsByRegion(pose: Pose, region: 'face' | 'arms' | 'legs'): Keypoint[] {
const regionParts = {
face: ['nose', 'leftEye', 'rightEye', 'leftEar', 'rightEar'],
arms: ['leftShoulder', 'rightShoulder', 'leftElbow', 'rightElbow', 'leftWrist', 'rightWrist'],
legs: ['leftHip', 'rightHip', 'leftKnee', 'rightKnee', 'leftAnkle', 'rightAnkle']
};
return regionParts[region].map(partName => pose.keypoints[partIds[partName]]);
}Array defining parent-child relationships in the pose skeleton for pose assembly and tracking.
/**
* Parent-child relationships defining pose skeleton structure
* Used for pose assembly algorithms and skeleton drawing
* Each pair represents a connection in the pose tree structure
*/
const poseChain: [string, string][];The pose chain defines the tree structure with nose as root:
const poseChain = [
['nose', 'leftEye'],
['leftEye', 'leftEar'],
['nose', 'rightEye'],
['rightEye', 'rightEar'],
['nose', 'leftShoulder'],
['leftShoulder', 'leftElbow'],
['leftElbow', 'leftWrist'],
['leftShoulder', 'leftHip'],
['leftHip', 'leftKnee'],
['leftKnee', 'leftAnkle'],
['nose', 'rightShoulder'],
['rightShoulder', 'rightElbow'],
['rightElbow', 'rightWrist'],
['rightShoulder', 'rightHip'],
['rightHip', 'rightKnee'],
['rightKnee', 'rightAnkle']
];Usage Examples:
import { poseChain, partIds } from '@tensorflow-models/posenet';
// Draw skeleton connections
function drawPoseSkeleton(pose: Pose, ctx: CanvasRenderingContext2D) {
poseChain.forEach(([parentPart, childPart]) => {
const parentKeypoint = pose.keypoints[partIds[parentPart]];
const childKeypoint = pose.keypoints[partIds[childPart]];
// Only draw if both keypoints are confident
if (parentKeypoint.score > 0.5 && childKeypoint.score > 0.5) {
ctx.beginPath();
ctx.moveTo(parentKeypoint.position.x, parentKeypoint.position.y);
ctx.lineTo(childKeypoint.position.x, childKeypoint.position.y);
ctx.stroke();
}
});
}
// Find pose tree depth from root (nose)
function getPoseTreeDepth(): number {
const visited = new Set<string>();
const depths = new Map<string, number>();
function traverse(part: string, depth: number) {
if (visited.has(part)) return;
visited.add(part);
depths.set(part, depth);
poseChain
.filter(([parent]) => parent === part)
.forEach(([, child]) => traverse(child, depth + 1));
}
traverse('nose', 0);
return Math.max(...Array.from(depths.values()));
}
// Validate pose chain connectivity
function validatePoseConnectivity(pose: Pose): boolean {
return poseChain.every(([parentPart, childPart]) => {
const parentKp = pose.keypoints[partIds[parentPart]];
const childKp = pose.keypoints[partIds[childPart]];
// Both keypoints should exist and have reasonable confidence
return parentKp && childKp && parentKp.score > 0.1 && childKp.score > 0.1;
});
}Array of body part channel names used internally for pose processing and segmentation.
/**
* Body part channel names for internal pose processing
* Maps to different body regions for advanced pose analysis
*/
const partChannels: string[];The part channels represent different body regions:
const partChannels = [
'left_face',
'right_face',
'right_upper_leg_front',
'right_lower_leg_back',
'right_upper_leg_back',
'left_lower_leg_front',
'left_upper_leg_front',
'left_upper_leg_back',
'left_lower_leg_back',
'right_feet',
'right_lower_leg_front',
'left_feet',
'torso_front',
'torso_back',
'right_upper_arm_front',
'right_upper_arm_back',
'right_lower_arm_back',
'left_lower_arm_front',
'left_upper_arm_front',
'left_upper_arm_back',
'left_lower_arm_back',
'right_hand',
'right_lower_arm_front',
'left_hand'
];Usage Examples:
import { partChannels } from '@tensorflow-models/posenet';
// Advanced pose analysis using part channels
console.log('Available body part channels:', partChannels.length);
// Group channels by body region
const faceChannels = partChannels.filter(channel => channel.includes('face'));
const armChannels = partChannels.filter(channel => channel.includes('arm') || channel.includes('hand'));
const legChannels = partChannels.filter(channel => channel.includes('leg') || channel.includes('feet'));
const torsoChannels = partChannels.filter(channel => channel.includes('torso'));
console.log('Face channels:', faceChannels);
console.log('Arm channels:', armChannels);
console.log('Leg channels:', legChannels);
console.log('Torso channels:', torsoChannels);
// Custom pose analysis based on part channels
function analyzePoseRegions() {
return {
face: faceChannels.length,
arms: armChannels.length,
legs: legChannels.length,
torso: torsoChannels.length,
total: partChannels.length
};
}Total number of keypoints detected by PoseNet.
/**
* Total number of keypoints in PoseNet pose model
* Always 17 for the standard human pose model
*/
const NUM_KEYPOINTS: number;Usage Example:
import { NUM_KEYPOINTS } from '@tensorflow-models/posenet';
// Validate pose completeness
function isPoseComplete(pose: Pose): boolean {
return pose.keypoints.length === NUM_KEYPOINTS;
}
// Calculate pose detection rate
function calculateDetectionRate(pose: Pose, minConfidence: number = 0.5): number {
const detectedKeypoints = pose.keypoints.filter(kp => kp.score >= minConfidence);
return detectedKeypoints.length / NUM_KEYPOINTS;
}
const pose = await net.estimateSinglePose(imageElement);
const detectionRate = calculateDetectionRate(pose, 0.7);
console.log(`Detected ${(detectionRate * 100).toFixed(1)}% of keypoints with high confidence`);interface Keypoint {
score: number;
position: Vector2D;
part: string;
}
interface Vector2D {
x: number;
y: number;
}
interface Pose {
keypoints: Keypoint[];
score: number;
}Install with Tessl CLI
npx tessl i tessl/npm-tensorflow-models--posenet