依赖opencv,我这里用的412
#include <librealsense2/rs.hpp>
#include<iostream>
// include OpenCV header file
#include <opencv2/opencv.hpp>
using namespace cv;
#define RS_WIDTH 640
#define RS_HEIGHT 480
#define RS_FPS 60
int main(int argc, char** argv) try
{
// judge whether devices is exist or not
rs2::context ctx;
auto list = ctx.query_devices(); // Get a snapshot of currently connected devices
if (list.size() == 0)
throw std::runtime_error("No device detected. Is it plugged in?");
rs2::device dev = list.front();
//
rs2::frameset frames;
//Contruct a pipeline which abstracts the device
rs2::pipeline pipe;//创建一个通信管道//https://baike.so.com/doc/1559953-1649001.html pipeline的解释
//Create a configuration for configuring the pipeline with a non default profile
rs2::config cfg;//创建一个以非默认配置的配置用来配置管道
//Add desired streams to configuration
cfg.enable_stream(RS2_STREAM_COLOR, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_BGR8, RS_FPS);//向配置添加所需的流
cfg.enable_stream(RS2_STREAM_DEPTH, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_Z16, RS_FPS);
cfg.enable_stream(RS2_STREAM_INFRARED, 1, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_Y8, RS_FPS);
cfg.enable_stream(RS2_STREAM_INFRARED, 2, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_Y8, RS_FPS);
// get depth scale
// float depth_scale = get_depth_scale(profile.get_device());
// start stream
pipe.start(cfg);//指示管道使用所请求的配置启动流
int c = 0;
while (true)
{
frames = pipe.wait_for_frames();//等待所有配置的流生成框架
// Align to depth
/*rs2::align align_to_depth(RS2_STREAM_DEPTH);*/
rs2::align align_to_depth(rs2_stream::RS2_STREAM_COLOR);
frames = align_to_depth.process(frames);
//Get each frame
rs2::frame color_frame = frames.get_color_frame();
rs2::frame depth_frame = frames.get_depth_frame();
rs2::video_frame ir_frame_left = frames.get_infrared_frame(1);
rs2::video_frame ir_frame_right = frames.get_infrared_frame(2);
// Creating OpenCV Matrix from a color image
Mat color(Size(RS_WIDTH, RS_HEIGHT), CV_8UC3, (void*)color_frame.get_data(), Mat::AUTO_STEP);
Mat pic_right(Size(RS_WIDTH, RS_HEIGHT), CV_8UC1, (void*)ir_frame_right.get_data());
Mat pic_left(Size(RS_WIDTH, RS_HEIGHT), CV_8UC1, (void*)ir_frame_left.get_data());
Mat pic_depth(Size(RS_WIDTH, RS_HEIGHT), CV_16U, (void*)depth_frame.get_data(), Mat::AUTO_STEP);
// Display in a GUI
namedWindow("Display Image", WINDOW_AUTOSIZE);
imshow("Display Image", color);
waitKey(1);
imshow("Display depth", pic_depth * 15); // *15 for show
waitKey(1);
imshow("Display pic_left", pic_left);
waitKey(1);
imshow("Display pic_right", pic_right);
waitKey(1);
std::string dir = "D:\\Data\\6DOF\\";
if (c >= 100)
{
if (cv::imwrite(dir + "color\\color-" + std::to_string(c) + ".png", color) &&
cv::imwrite(dir + "depth\\depth-" + std::to_string(c) + ".png", pic_depth))
{
std::cout << std::to_string(c) << "st depth; " << std::to_string(c) << "st color" << std::endl;
}
}
c++;
}
return 0;
}
// error
catch (const rs2::error & e)
{
std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl;
return EXIT_FAILURE;
}
catch (const std::exception& e)
{
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
简单封装下:
realsense.h:
#pragma once
#ifndef REALSENSE_H
#define REALSENSE_H
#include<librealsense2/rs.hpp>
#define RS_WIDTH 640
#define RS_HEIGHT 480
#define RS_FPS 60 // 调太高导致相机初始化失败
class RealSense
{
public:
RealSense();
void GetFrames();
~RealSense();
private:
rs2::context ctx;
rs2::frameset frames;
rs2::pipeline pipe;
rs2::config cfg;
public:
rs2::frame color_frame;
rs2::frame depth_frame;
};
#endif
realsense.cpp
#include"realsense.h"
RealSense::RealSense()
{
auto list = ctx.query_devices();
if (list.size() == 0)
throw std::runtime_error("No device detected. Is it plugged in?");
//Add desired streams to configuration
cfg.enable_stream(RS2_STREAM_COLOR, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_BGR8, RS_FPS);
cfg.enable_stream(RS2_STREAM_DEPTH, RS_WIDTH, RS_HEIGHT, RS2_FORMAT_Z16, RS_FPS);
pipe.start(cfg);
}
void RealSense::GetFrames()
{
this->frames = this->pipe.wait_for_frames();//等待所有配置的流生成框架
// Align to depth
rs2::align align_to_depth(rs2_stream::RS2_STREAM_COLOR);
this->frames = align_to_depth.process(this->frames);
//Get each frame
this->color_frame = frames.get_color_frame();
this->depth_frame = frames.get_depth_frame();
}
RealSense::~RealSense()
{
}
test.cpp
#include<iostream>
#include<chrono>
#include <opencv2/opencv.hpp>
using namespace cv;
#include"realsense.h"
int main(int argc, char** argv)
{
RealSense RS;
while (true)
{
std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
RS.GetFrames();
// -> OpenCV
Mat color(cv::Size(RS_WIDTH, RS_HEIGHT), CV_8UC3, (void*)RS.color_frame.get_data(), cv::Mat::AUTO_STEP);
Mat depth(cv::Size(RS_WIDTH, RS_HEIGHT), CV_16U, (void*)RS.depth_frame.get_data(), cv::Mat::AUTO_STEP);
// Display in a GUI
namedWindow("Display Image", WINDOW_AUTOSIZE);
imshow("Display Image", color);
waitKey(1);
imshow("Display depth", depth * 15); // *15 for show
waitKey(1);
std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
std::chrono::duration<double> time_used = std::chrono::duration_cast<std::chrono::duration<double>>(t2 - t1);
std::cout << "the cost of time:" << 1000 * time_used.count() << " milliseconds." << std::endl;
}
return 0;
}
按照时间戳保存
#include <librealsense2/rs.hpp>
#include <iostream>
#include<string>
#include<Windows.h>
#include<opencv2/opencv.hpp>
#define width 640
#define height 480
#define fps 30
int main()
{
// timestamp
SYSTEMTIME st = { 0 };
//
std::string dir = "F:\\Dataset\\cube\\";
std::string png = ".png";
// judge whether devices is exist or not
rs2::context ctx;
auto list = ctx.query_devices(); // Get a snapshot of currently connected devices
if (list.size() == 0)
throw std::runtime_error("No device detected. Is it plugged in?");
rs2::device dev = list.front();
//
rs2::frameset frames;
rs2::pipeline pipe;
rs2::config cfg;
//Add desired streams to configuration
cfg.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_BGR8, fps);//向配置添加所需的流
cfg.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
// start stream
pipe.start(cfg);//指示管道使用所请求的配置启动流
while (true)
{
frames = pipe.wait_for_frames();//等待所有配置的流生成框架
// Align to depth
rs2::align align_to_depth(rs2_stream::RS2_STREAM_COLOR);
frames = align_to_depth.process(frames);
//Get each frame
rs2::frame color_frame = frames.get_color_frame();
rs2::frame depth_frame = frames.get_depth_frame();
rs2::video_frame ir_frame_left = frames.get_infrared_frame(1);
rs2::video_frame ir_frame_right = frames.get_infrared_frame(2);
// Creating OpenCV Matrix from a color image
cv::Mat color(cv::Size(width, height), CV_8UC3, (void*)color_frame.get_data(), cv::Mat::AUTO_STEP);
cv::Mat pic_depth(cv::Size(width, height), CV_16U, (void*)depth_frame.get_data(), cv::Mat::AUTO_STEP);
// Display in a GUI
cv::namedWindow("Display Image", cv::WINDOW_AUTOSIZE);
imshow("Display Image", color);
//cv::waitKey(1);
//cv::imshow("Display depth", pic_depth * 15); // *15 for show
//cv::waitKey(1);
// 按照时间戳保存
GetLocalTime(&st);
std::string t = std::to_string(st.wYear) +
std::to_string(st.wMonth) +
std::to_string(st.wDay) +
std::to_string(st.wHour) +
std::to_string(st.wMinute) +
std::to_string(st.wSecond) +
std::to_string(st.wMilliseconds);
//std::cout << t << std::endl;
if (cv::waitKey(1) == 'C')
{
cv::imwrite(dir + t + png, color);
}
}
return 0;
}