Tutorial 2: Device calibration
Introduction
Most sensors in Aria glasses are calibrated both extrinsically and intrinsically, allowing you to rectify sensor measurements to real-world quantities. Calibration is performed per device, and the information is stored in the VRS file. This tutorial demonstrates how to work with device calibration in Project Aria using projectaria_tools
.
What You'll Learn
- How to obtain each sensor's calibration data
- Camera calibration: projection and unprojection, and how to post-process images according to calibration (distort).
- IMU calibration: measurement rectification.
- Multi-sensor coordination and sensor poses, and the concept of the "Device" frame.
Pre-requisite:
- Familiarity with VRS basics from
Tutorial_1_vrs_Data_provider_basics.ipynb
. - Download Aria Gen2 sample data from link
Note on Visualization
If visualization window is not showing up, this is due to Rerun
lib's caching issue. Just rerun the specific code cell.
Obtaining Device Calibration Content
Each VRS file's device calibration can be accessed as a DeviceCalibration
instance via the VrsDataProvider
API.
from projectaria_tools.core import data_provider
# Load local VRS file
vrs_file_path = "path/to/your/recording.vrs"
vrs_data_provider = data_provider.create_vrs_data_provider(vrs_file_path)
# Obtain device calibration
device_calib = vrs_data_provider.get_device_calibration()
if device_calib is None:
raise RuntimeError(
"device calibration does not exist! Please use a VRS that contains valid device calibration for this tutorial. "
)
# You can obtain device version (Aria Gen1 vs Gen2), or device subtype (DVT with small/large frame width + short/long temple arms, etc) information from calibration
if device_calib is not None:
device_version = device_calib.get_device_version()
device_subtype = device_calib.get_device_subtype()
print(f"Aria Device Version: {device_version}")
print(f"Device Subtype: {device_subtype}")
Accessing Individual Sensor Calibration
DeviceCalibration
provides APIs to query the intrinsics and extrinsics of each calibrated sensor.
1. Camera calibration content
# Get sensor labels within device calibration
all_labels = device_calib.get_all_labels()
print(f"All sensors within device calibration: {all_labels}")
print(f"Cameras: {device_calib.get_camera_labels()}")
# Query a specific camera's calibration
rgb_camera_label = "camera-rgb"
camera_calib = device_calib.get_camera_calib(rgb_camera_label)
if camera_calib is None:
raise RuntimeError(
"camera-rgb calibration does not exist! Please use a VRS that contains valid RGB camera calibration for this tutorial. "
)
print(f"-------------- camera calibration for {rgb_camera_label} ----------------")
print(f"Image Size: {camera_calib.get_image_size()}")
print(f"Camera Model Type: {camera_calib.get_model_name()}")
print(
f"Camera Intrinsics Params: {camera_calib.get_projection_params()}, \n"
f"where focal is {camera_calib.get_focal_lengths()}, "
f"and principal point is {camera_calib.get_principal_point()}\n"
)
# Get extrinsics (device to camera transformation)
T_device_camera = camera_calib.get_transform_device_camera()
print(f"Camera Extrinsics T_Device_Camera:\n{T_device_camera.to_matrix()}")
2. IMU calibration content
imu_label = "imu-right"
imu_calib = device_calib.get_imu_calib(imu_label)
if imu_calib is None:
raise RuntimeError(
"imu-right calibration does not exist! Please use a VRS that contains valid IMU calibration for this tutorial. "
)
print(f"-------------- IMU calibration for {imu_label} ----------------")
# Get IMU intrinsics parameters
accel_bias = imu_calib.get_accel_model().get_bias()
accel_rectification_matrix = imu_calib.get_accel_model().get_rectification()
gyro_bias = imu_calib.get_gyro_model().get_bias()
gyro_rectification_matrix = imu_calib.get_gyro_model().get_rectification()
print(f"Accelerometer Intrinsics:")
print(f" Bias: {accel_bias}")
print(f" Rectification Matrix:\n{accel_rectification_matrix}")
print(f"Gyroscope Intrinsics:")
print(f" Bias: {gyro_bias}")
print(f" Rectification Matrix:\n{gyro_rectification_matrix}")
# Get extrinsics (device to IMU transformation)
T_device_imu = imu_calib.get_transform_device_imu()
print(f"IMU Extrinsics T_Device_IMU:\n{T_device_imu.to_matrix()}")
print(f" \n ------IMPORTANT----- \n "
f"Please use .raw_to_rectified_[accel,gyro]() and .rectified_to_raw_[accel,gyro]() APIs to apply IMU calibration! \n")
3. Magnetometer, barometer, and microphone calibration content
print(f"Magnetometers: {device_calib.get_magnetometer_labels()}")
print(f"Barometers: {device_calib.get_barometer_labels()}")
print(f"Microphones: {device_calib.get_microphone_labels()}")
# ----------------
# Magnetometer calibration
# ----------------
magnetometer_label = "mag0"
magnetometer_calib = device_calib.get_magnetometer_calib(magnetometer_label)
if magnetometer_calib is None:
raise RuntimeError(
f"{magnetometer_label} calibration does not exist! Please use a VRS that contains valid magnetometer calibration for this tutorial."
)
# Get magnetometer intrinsics parameters
mag_bias = magnetometer_calib.get_model().get_bias()
mag_rectification_matrix = magnetometer_calib.get_model().get_rectification()
print(f"Magnetometer calibration for {magnetometer_label} only have intrinsics:")
print(f" Bias: {mag_bias}")
print(f" Rectification Matrix:\n{mag_rectification_matrix}")
# ----------------
# Barometer calibration
# ----------------
baro_label = "baro0"
baro_calib = device_calib.get_barometer_calib(baro_label)
if baro_calib is None:
raise RuntimeError(
f"{baro_label} calibration does not exist! Please use a VRS that contains valid barometer calibration for this tutorial."
)
print(f"Barometer calibration for {baro_label} only have intrinsics:")
print(f" Slope: {baro_calib.get_slope()}")
print(f" Offset in Pascal:\n{baro_calib.get_offset_pa()}")
# ----------------
# Microphone calibration, containing both mic and speaker calibrations.
# ----------------
microphone_labels = device_calib.get_microphone_labels()
speaker_labels = device_calib.get_speaker_labels()
audio_sensor_labels = device_calib.get_audio_labels()
print(f"Both mic and speakers are calibrated. \n"
f"List of mics that are calibrated: {microphone_labels} \n"
f"List of speakers that are calibrated: {speaker_labels}")
for audio_label in audio_sensor_labels:
audio_calib = device_calib.get_microphone_calib(audio_label)
if audio_calib is None:
print(f"Audio sensor calibration for {audio_label} is not available.")
continue
print(f"Audio sensor calibration for {audio_label} only has intrinsics:")
print(f" sensitivity delta: {audio_calib.get_d_sensitivity_1k_dbv()}")
print(f" \n ------IMPORTANT----- \n "
f"Please use .raw_to_rectified() and .rectified_to_raw() APIs to apply magnetometer, barometer, and microphone calibration!\n")
Camera Intrinsics: Project and Unproject
A camera intrinsic model maps between a 3D point in the camera coordinate system and its corresponding 2D pixel on the sensor. This supports:
- Projection: 3D point → 2D pixel
- Unprojection: 2D pixel → 3D ray
import numpy as np
# Project 3D point to pixel
point_3d = np.array([0.1, 0.05, 1.0]) # Point in camera frame (meters)
pixel = camera_calib.project(point_3d)
if pixel is not None:
print(f"3D point {point_3d} projected to -> pixel {pixel}")
else:
print(f"3D point {point_3d} projected out of camera sensor plane")
# Unproject pixel to 3D ray.
test_pixel = np.array([400, 300])
ray_3d = camera_calib.unproject(test_pixel)
print(f"Pixel {test_pixel} unprojected to -> 3D ray {ray_3d}")
Camera Intrinsics: undistortion
Camera calibration enables post-processing of Aria images, such as undistorting images from a fisheye to a linear camera model. Steps:
- Use
vrs_data_provider
to access the camera image and calibration. - Create a linear camera model using
get_linear_camera_calibration
function. - Apply
distort_by_calibration
to distort or undistort the image from the actual Fisheye camera model to linear camera model.
import rerun as rr
from projectaria_tools.core import calibration
rr.init("rerun_viz_image_undistortion")
# We already obtained RGB camera calibration as `camera_calib`.
# Now, create a linear camera model that is similar to camera_calib
linear_camera_model = calibration.get_linear_camera_calibration(
image_width=camera_calib.get_image_size()[0],
image_height=camera_calib.get_image_size()[1],
focal_length=camera_calib.get_focal_lengths()[0],
label="test_linear_camera",
)
rgb_stream_id = vrs_data_provider.get_stream_id_from_label("camera-rgb")
num_samples = vrs_data_provider.get_num_data(rgb_stream_id)
# Plot a few frames from RGB camera, and also plot the undistorted images
first_few = min(10, num_samples)
for i in range(first_few):
# Query RGB images
image_data, image_record = vrs_data_provider.get_image_data_by_index(
rgb_stream_id, i
)
if not image_data.is_valid():
continue
# Plot original RGB image
timestamp_ns = image_record.capture_timestamp_ns
rr.set_time_nanos("device_time", timestamp_ns)
rr.log("camera_rgb", rr.Image(image_data.to_numpy_array()))
# Undistort RGB image to a linear camera model
undistorted_image = calibration.distort_by_calibration(
arraySrc=image_data.to_numpy_array(),
dstCalib=linear_camera_model,
srcCalib=camera_calib,
)
rr.log("undistorted_camera_rgb", rr.Image(undistorted_image))
rr.notebook_show()
IMU Intrinsics: Measurement Rectification
IMU intrinsics are represented by an affine model. The raw sensor readout (value_raw
) is compensated to obtain the real acceleration or angular velocity (value_compensated
):
value_compensated = M^-1 * (value_raw - bias)
M
is an upper triangular matrix (no global rotation between IMU body and accelerometer frame).
To simulate sensor readout from real values:
value_raw = M * value_compensated + bias
Note that in the following example, the difference between raw reading and compensated IMU signals are pretty close, therefore the plotting may look similar.
def _set_imu_plot_colors(rerun_plot_label):
"""
A helper function to set colors for the IMU plots in rerun
"""
rr.log(
f"{rerun_plot_label}/accl/x[m/sec2]",
rr.SeriesLine(color=[230, 25, 75], name="accel/x[m/sec2]"),
static=True,
) # Red
rr.log(
f"{rerun_plot_label}/accl/y[m/sec2]",
rr.SeriesLine(color=[60, 180, 75], name="accel/y[m/sec2]"),
static=True,
) # Green
rr.log(
f"{rerun_plot_label}/accl/z[m/sec2]",
rr.SeriesLine(color=[0, 130, 200], name="accel/z[m/sec2]"),
static=True,
) # Blue
rr.log(
f"{rerun_plot_label}/gyro/x[rad/sec2]",
rr.SeriesLine(color=[245, 130, 48], name="gyro/x[rad/sec2]"),
static=True,
) # Orange
rr.log(
f"{rerun_plot_label}/gyro/y[rad/sec2]",
rr.SeriesLine(color=[145, 30, 180], name="gyro/y[rad/sec2]"),
static=True,
) # Purple
rr.log(
f"{rerun_plot_label}/gyro/z[rad/sec2]",
rr.SeriesLine(color=[70, 240, 240], name="gyro/z[rad/sec2]"),
static=True,
) # Cyan
def _plot_imu_signals(accel_data, gyro_data, rerun_plot_label):
"""
This is a helper function to plot IMU signals in Rerun 1D plot
"""
rr.log(
f"{rerun_plot_label}/accl/x[m/sec2]",
rr.Scalar(accel_data[0]),
)
rr.log(
f"{rerun_plot_label}/accl/y[m/sec2]",
rr.Scalar(accel_data[1]),
)
rr.log(
f"{rerun_plot_label}/accl/z[m/sec2]",
rr.Scalar(accel_data[2]),
)
rr.log(
f"{rerun_plot_label}/gyro/x[rad/sec2]",
rr.Scalar(gyro_data[0]),
)
rr.log(
f"{rerun_plot_label}/gyro/y[rad/sec2]",
rr.Scalar(gyro_data[1]),
)
rr.log(
f"{rerun_plot_label}/gyro/z[rad/sec2]",
rr.Scalar(gyro_data[2]),
)
rr.init("rerun_viz_imu_rectification")
imu_label = "imu-right"
imu_calib = device_calib.get_imu_calib(imu_label)
imu_stream_id = vrs_data_provider.get_stream_id_from_label(imu_label)
if imu_calib is None or imu_stream_id is None:
raise RuntimeError(
"imu-right calibration or stream data does not exist! Please use a VRS that contains valid IMU calibration and data for this tutorial. "
)
num_samples = vrs_data_provider.get_num_data(imu_stream_id)
first_few = min(5000, num_samples)
# Set same colors for both plots
_set_imu_plot_colors("imu_right")
_set_imu_plot_colors("imu_right_compensated")
for i in range(0, first_few, 50):
# Query IMU data
imu_data = vrs_data_provider.get_imu_data_by_index(imu_stream_id, i)
# Plot raw IMU readings
rr.set_time_nanos("device_time", imu_data.capture_timestamp_ns)
# Get compensated imu data
compensated_accel = imu_calib.raw_to_rectified_accel(imu_data.accel_msec2)
compensated_gyro = imu_calib.raw_to_rectified_gyro(imu_data.gyro_radsec)
# print one sample content
if i == 0:
print(
f"IMU compensation: raw accel {imu_data.accel_msec2} , compensated accel {compensated_accel}"
)
print(
f"IMU compensation: raw gyro {imu_data.gyro_radsec} , compensated gyro {compensated_gyro}"
)
# Plot raw IMU readings
_plot_imu_signals(imu_data.accel_msec2, imu_data.gyro_radsec, "imu_right")
# Plot compensated IMU readings in a separate plot
_plot_imu_signals(compensated_accel, compensated_gyro, "imu_right_compensated")
rr.notebook_show()
5. Accessing Sensor Extrinsics
The core API to query sensor extrinsics is:
get_transform_device_sensor(label = sensor_label, use_cad_calib = False)
This API returns the extrinsics of the sensor, represented as a Sophus::SE3
(translation + rotation). in the reference coordinate frame of Device
.
- The
Device
frame is the reference coordinate system for all sensors. - For Aria-Gen2, the "Device" frame is the left front-facing SLAM camera (
slam-front-left
). - All sensor extrinsics are defined relative to this frame.
The optional parameter use_cad_calib
controls the "source" of the sensor extrinsics.
use_cad_calib=False
(default): this will return the sensor extrinsics from factory calibration, if the sensor's extrinsics is factory-calibrated. This include:- Cameras
- IMUs
use_cad_calib=True
: this will return the sensor's extrinsics in their designed location in CAD. This is useful for sensors without factory-calibrated extrinsics, including:- Magnetometer
- Barometer
- Microphones
from projectaria_tools.utils.rerun_helpers import (
AriaGlassesOutline,
ToTransform3D,
ToBox3D,
)
rr.init("rerun_viz_sensor_extrinsics")
# Obtain a glass outline for visualization. This outline uses factory calibration extrinsics if possible, uses CAD extrinsics if factory calibration is not available.
glass_outline = AriaGlassesOutline(device_calib, use_cad_calib=False)
rr.log("device/glasses_outline", rr.LineStrips3D([glass_outline]), static=True)
# Plot all the sensor locations from either factory calibration (if available) or CAD
sensor_labels = device_calib.get_all_labels()
camera_labels = device_calib.get_camera_labels()
for sensor in sensor_labels:
# Query for sensor extrinsics from factory calibration if possible. Fall back to CAD values if unavailable.
if ("camera" in sensor) or ("imu" in sensor):
T_device_sensor = device_calib.get_transform_device_sensor(label = sensor, get_cad_value = False)
else:
T_device_sensor = device_calib.get_transform_device_sensor(label = sensor, get_cad_value = True)
# Skip if extrinsics cannot be obtained
if T_device_sensor is None:
print(f"Warning: sensor {sensor} does not have extrinsics from neither factory calibration nor CAD, skipping the plotting.")
continue
# Plot sensor labels
rr.log(f"device/{sensor}", ToTransform3D(T_device_sensor), static=True)
rr.log(
f"device/{sensor}/text",
ToBox3D(sensor, [1e-5, 1e-5, 1e-5]),
static=True,
)
# For cameras, also plot camera frustum
if sensor in camera_labels:
camera_calibration = device_calib.get_camera_calib(sensor)
rr.log(f"device/{sensor}_frustum", ToTransform3D(T_device_sensor), static=True)
rr.log(
f"device/{sensor}_frustum",
rr.Pinhole(
resolution=[
camera_calibration.get_image_size()[0],
camera_calibration.get_image_size()[1],
],
focal_length=float(camera_calibration.get_focal_lengths()[0]),
),
static=True,
)
rr.notebook_show()