This quickstart guide will walk you through the process of setting up a multi-sensor streaming application using the Orbbec SDK. This application will utilize various sensors and stream their data simultaneously.
The document refers to the C++ sample MultiStream.
Prerequisites
Set up the Orbbec device.
Download and install the Orbbec SDK.
Headers
First, include all the necessary headers for your application:
#include "window.hpp"
#include "libobsensor/hpp/Pipeline.hpp"
#include "libobsensor/hpp/Error.hpp"
#include <mutex>
#include <thread>
Initializing the Pipeline
The pipeline manages streams and configurations for different sensors:
// Create a pipeline with default device
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
Configuring Streams
Configure which streams to enable based on the sensors available on the device:
auto device = pipe.getDevice();
auto sensorList = device->getSensorList();
for(int i = 0; i < sensorList->count(); i++) {
auto sensorType = sensorList->type(i);
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
continue;
}
auto profiles = pipe.getStreamProfileList(sensorType);
auto profile = profiles->getProfile(OB_PROFILE_DEFAULT);
config->enableStream(profile);
}
Starting the Main Pipeline
Start the main pipeline with the configured streams:
// Start the pipeline with config
std::mutex frameMutex;
std::map<OBFrameType, std::shared_ptr<ob::Frame>> frameMap;
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameset) {
auto count = frameset->frameCount();
for(int i = 0; i < count; i++) {
auto frame = frameset->getFrame(i);
std::unique_lock<std::mutex> lk(frameMutex);
frameMap[frame->type()] = frame;
}
});
Handling IMU Data Separately
Due to the high frame rate of IMU sensors, it is beneficial to handle their data in a separate pipeline:
auto dev = pipe.getDevice();
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
std::mutex imuFrameMutex;
std::map<OBFrameType, std::shared_ptr<ob::Frame>> imuFrameMap;
try {
auto accelProfiles = imuPipeline->getStreamProfileList(OB_SENSOR_ACCEL);
auto gyroProfiles = imuPipeline->getStreamProfileList(OB_SENSOR_GYRO);
auto accelProfile = accelProfiles->getProfile(OB_PROFILE_DEFAULT);
auto gyroProfile = gyroProfiles->getProfile(OB_PROFILE_DEFAULT);
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
imuConfig->enableStream(accelProfile);
imuConfig->enableStream(gyroProfile);
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameset) {
auto count = frameset->frameCount();
for(int i = 0; i < count; i++) {
auto frame = frameset->getFrame(i);
std::unique_lock<std::mutex> lk(imuFrameMutex);
imuFrameMap[frame->type()] = frame;
}
});
}
catch(...) {
std::cout << "IMU sensor not found!" << std::endl;
imuPipeline.reset();
}
Rendering Frames
Create a window for rendering the collected frames:
Window app("MultiStream", 1280, 720, RENDER_GRID);
while(app) {
std::vector<std::shared_ptr<ob::Frame>> framesForRender;
{
std::unique_lock<std::mutex> lock(frameMutex);
for(auto &frame: frameMap) {
framesForRender.push_back(frame.second);
}
}
{
std::unique_lock<std::mutex> lock(imuFrameMutex);
for(auto &frame: imuFrameMap) {
framesForRender.push_back(frame.second);
}
}
app.addToRender(framesForRender);
}
Stopping the Pipeline
Properly stop the pipelines when the application is closing to ensure all resources are freed:
pipe.stop();
if(imuPipeline) {
imuPipeline->stop();
}
Full source
This is a brief overview of the complete application which integrates all components discussed:
#include "window.hpp"
#include "libobsensor/hpp/Pipeline.hpp"
#include "libobsensor/hpp/Error.hpp"
#include <mutex>
#include <thread>
int main(int argc, char **argv) try {
// Create a pipeline with default device
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// enumerate and config all sensors
auto device = pipe.getDevice();
auto sensorList = device->getSensorList();
for(int i = 0; i < sensorList->count(); i++) {
auto sensorType = sensorList->type(i);
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
continue;
}
auto profiles = pipe.getStreamProfileList(sensorType);
auto profile = profiles->getProfile(OB_PROFILE_DEFAULT);
config->enableStream(profile);
}
// Start the pipeline with config
std::mutex frameMutex;
std::map<OBFrameType, std::shared_ptr<ob::Frame>> frameMap;
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameset) {
auto count = frameset->frameCount();
for(int i = 0; i < count; i++) {
auto frame = frameset->getFrame(i);
std::unique_lock<std::mutex> lk(frameMutex);
frameMap[frame->type()] = frame;
}
});
// The IMU frame rate is much faster than the video, so it is advisable to use a separate pipeline to obtain IMU data.
auto dev = pipe.getDevice();
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
std::mutex imuFrameMutex;
std::map<OBFrameType, std::shared_ptr<ob::Frame>> imuFrameMap;
try {
auto accelProfiles = imuPipeline->getStreamProfileList(OB_SENSOR_ACCEL);
auto gyroProfiles = imuPipeline->getStreamProfileList(OB_SENSOR_GYRO);
auto accelProfile = accelProfiles->getProfile(OB_PROFILE_DEFAULT);
auto gyroProfile = gyroProfiles->getProfile(OB_PROFILE_DEFAULT);
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
imuConfig->enableStream(accelProfile);
imuConfig->enableStream(gyroProfile);
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameset) {
auto count = frameset->frameCount();
for(int i = 0; i < count; i++) {
auto frame = frameset->getFrame(i);
std::unique_lock<std::mutex> lk(imuFrameMutex);
imuFrameMap[frame->type()] = frame;
}
});
}
catch(...) {
std::cout << "IMU sensor not found!" << std::endl;
imuPipeline.reset();
}
// Create a window for rendering and set the resolution of the window
Window app("MultiStream", 1280, 720, RENDER_GRID);
while(app) {
std::vector<std::shared_ptr<ob::Frame>> framesForRender;
{
std::unique_lock<std::mutex> lock(frameMutex);
for(auto &frame: frameMap) {
framesForRender.push_back(frame.second);
}
}
{
std::unique_lock<std::mutex> lock(imuFrameMutex);
for(auto &frame: imuFrameMap) {
framesForRender.push_back(frame.second);
}
}
app.addToRender(framesForRender);
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
if(imuPipeline) {
imuPipeline->stop();
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
Next steps
– Learn more about configuring and using individual sensors with the Orbbec SDK.
– Explore advanced features and settings available in the Orbbec SDK documentation.
– Begin integrating Orbbec Device capabilities into your larger projects and applications.