Supported devices: G300 series cameras, such as Gemini G335
Function description: Demonstrate using HDR operation, display HDR processed images, and exit the program with ESC_KEY key
| This example is based on the C++High Level API for demonstration
Create a pipeline and configure the stream
// Create a pipeline with default device
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
std::shared_ptr<ob::VideoStreamProfile> irProfile = nullptr;
try {
// Get all stream profiles of the ir camera, including stream resolution, frame rate, and frame format
auto irProfiles = pipe.getStreamProfileList(OB_SENSOR_IR_LEFT);
if(irProfiles) {
irProfile = std::const_pointer_cast<ob::StreamProfile>(irProfiles->getProfile(OB_PROFILE_DEFAULT))->as<ob::VideoStreamProfile>();
}
config->enableStream(irProfile);
}
catch(...) {
std::cerr << "Current device is not support ir sensor!" << std::endl;
exit(EXIT_FAILURE);
}
// Get all stream profiles of the depth camera, including stream resolution, frame rate, and frame format
auto depthProfiles = pipe.getStreamProfileList(OB_SENSOR_DEPTH);
std::shared_ptr<ob::VideoStreamProfile> depthProfile = nullptr;
if(depthProfiles) {
depthProfile = std::const_pointer_cast<ob::StreamProfile>(depthProfiles->getProfile(OB_PROFILE_DEFAULT))->as<ob::VideoStreamProfile>();
}
config->enableStream(depthProfile);
Open HDR processing
// Create HdrMerage post processor
ob::HdrMerge hdrMerge;
// Open hdr merage
if(pipe.getDevice()->isPropertySupported(OB_STRUCT_DEPTH_HDR_CONFIG, OB_PERMISSION_READ_WRITE)) {
// Get depth exposure value range,the exposure_1 and exposure_2 in OBHdrConfig can be adjusted.
OBIntPropertyRange depthExpRange = pipe.getDevice()->getIntPropertyRange(OB_PROP_DEPTH_EXPOSURE_INT);
// Get depth gain value range,,the gain_1 and gain_1 in OBHdrConfig can be adjusted.
OBIntPropertyRange depthGainRange = pipe.getDevice()->getIntPropertyRange(OB_PROP_DEPTH_GAIN_INT);
OBHdrConfig obHdrConfig;
uint32_t dataSize = sizeof(OBHdrConfig);
pipe.getDevice()->getStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, &obHdrConfig, &dataSize);
// open hdr
obHdrConfig.enable = true;
pipe.getDevice()->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, &obHdrConfig, sizeof(OBHdrConfig));
}
Open pipeline
pipe.start(config);
Get HDR processed images
auto leftIRFrame = frameSet->getFrame(OB_FRAME_IR_LEFT);
if(leftIRFrame) {
framesForRender.push_back(leftIRFrame);
}
auto depthFrame = frameSet->depthFrame();
if(depthFrame != nullptr) {
auto newFrame = hdrMerge.process(frameSet);
auto newFrameSet = newFrame->as<ob::FrameSet>();
if(newFrameSet) {
depthFrame = newFrameSet->depthFrame();
if(depthFrame) {
framesForRender.push_back(depthFrame);
}
}
}
Close pipeline
pipe.stop();
Turn off HDR processing
if(pipe.getDevice()->isPropertySupported(OB_STRUCT_DEPTH_HDR_CONFIG, OB_PERMISSION_READ_WRITE)) {
OBHdrConfig obHdrConfig;
uint32_t dataSize = sizeof(OBHdrConfig);
pipe.getDevice()->getStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, &obHdrConfig, &dataSize);
obHdrConfig.enable = false;
pipe.getDevice()->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, &obHdrConfig, sizeof(OBHdrConfig));
}
Expected Output