Advertisement

Realsense D435i单目跑ORB_SLAM2(无ROS版)

阅读量:

主要参考MonoEuroC这个文件进行修改,并将其中的数据源由Realsense替换即可。关于如何获取Realsense数据,在之前的博客中也有详细说明。

第一步先进行配置调整,在指定位置创建D435i.yaml文件,并遵循该目录中其他yaml文件的书写规范。将相机自身的内参数值填写进去以完成配置设置。

复制代码
    %YAML:1.0
    
    #--------------------------------------------------------------------------------------------
    # Camera Parameters. Adjust them!
    #--------------------------------------------------------------------------------------------
    
    # Camera calibration and distortion parameters (OpenCV) 
    Camera.fx: 610.973
    Camera.fy: 610.508
    Camera.cx: 326.195
    Camera.cy: 242.118
    
    Camera.k1: 0.13200119
    Camera.k2: -0.27231803
    Camera.p1: 0.00148674
    Camera.p2: -0.00158623
    
    # Camera frames per second 
    Camera.fps: 20.0
    
    # Color order of the images (0: BGR, 1: RGB. It is ignored if images are grayscale)
    Camera.RGB: 1
    
    #--------------------------------------------------------------------------------------------
    # ORB Parameters
    #--------------------------------------------------------------------------------------------
    
    # ORB Extractor: Number of features per image
    ORBextractor.nFeatures: 1000
    
    # ORB Extractor: Scale factor between levels in the scale pyramid 	
    ORBextractor.scaleFactor: 1.2
    
    # ORB Extractor: Number of levels in the scale pyramid	
    ORBextractor.nLevels: 8
    
    # ORB Extractor: Fast threshold
    # Image is divided in a grid. At each cell FAST are extracted imposing a minimum response.
    # Firstly we impose iniThFAST. If no corners are detected we impose a lower value minThFAST
    # You can lower these values if your images have low contrast			
    ORBextractor.iniThFAST: 20
    ORBextractor.minThFAST: 7
    
    #--------------------------------------------------------------------------------------------
    # Viewer Parameters
    #---------------------------------------------------------------------------------------------
    Viewer.KeyFrameSize: 0.05
    Viewer.KeyFrameLineWidth: 1
    Viewer.GraphLineWidth: 0.9
    Viewer.PointSize:2
    Viewer.CameraSize: 0.08
    Viewer.CameraLineWidth: 3
    Viewer.ViewpointX: 0
    Viewer.ViewpointY: -0.7
    Viewer.ViewpointZ: -1.8
    Viewer.ViewpointF: 500
    
    
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
    
    代码解释

然后再新建一个mono_d435i.cc文件。

复制代码
    #include<iostream>
    #include<chrono>
    
    #include <librealsense2/rs.hpp>
    #include<opencv2/core/core.hpp>
    #include<System.h>
    
    using namespace std;
    using namespace cv;
    
    #define width 640
    #define height 480
    #define fps 30
    
    bool SLAM_state  = false;
    
    
    void enable_stream_init(rs2::config cfg)
    {
    //Add desired streams to configuration
    cfg.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_BGR8, fps);//向配置添加所需的流
    cfg.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16,fps);
    cfg.enable_stream(RS2_STREAM_INFRARED, 1, width, height, RS2_FORMAT_Y8, fps);
    cfg.enable_stream(RS2_STREAM_INFRARED, 2, width, height, RS2_FORMAT_Y8, fps);
    cfg.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F);
    cfg.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F);
    }
    
    //按下s结束
    void Stop_thread()
    {
    while(1)
    {
        char c = getchar();
        if (c == 's')
        {
            SLAM_state = false;
        }
        std::chrono::milliseconds dura(5);
        std::this_thread::sleep_for(dura);
    }
    }
    
    
    //字典 内参
    int main(int argc, char **argv)
    {
    if(argc != 3)
    {
        cerr << endl << "Usage: ./mono_tran path_to_vocabulary path_to_settings " << endl;
        return 1;
    }
    //vector<double> vTimestamps;
    
    // Create SLAM system. It initializes all system threads and gets ready to process frames.
    ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::MONOCULAR,true);
    
    SLAM_state = true;
    
    //配置realsense
    rs2::context ctx;
    auto list = ctx.query_devices(); // Get a snapshot of currently connected devices
    if (list.size() == 0)
        throw std::runtime_error("No device detected. Is it plugged in?");
    rs2::device dev = list.front();
    
    rs2::frameset frames;
    //Contruct a pipeline which abstracts the device
    rs2::pipeline pipe;//创建一个通信管道//https://baike.so.com/doc/1559953-1649001.html pipeline的解释
    //Create a configuration for configuring the pipeline with a non default profile
    rs2::config cfg;//创建一个以非默认配置的配置用来配置管道
    enable_stream_init(cfg);
    // start stream
    pipe.start(cfg);//指示管道使用所请求的配置启动流
    for( int i = 0; i < 30 ; i ++)
    {
        frames = pipe.wait_for_frames();
    }
    
    cout << endl << "-------" << endl;
    cout << "Start processing sequence ..." << endl;
    
    thread stop_thread(Stop_thread);
    
    while(SLAM_state)
    {
        frames = pipe.wait_for_frames();//等待所有配置的流生成框架
        //   Align to color
        rs2::align align_to_color(RS2_STREAM_COLOR);
        frames = align_to_color.process(frames);
        // Get imu data
        //Get_imu_data(frames);
    
        //Get each frame
        rs2::frame color_frame = frames.get_color_frame();
        //rs2::depth_frame depth_frame = frames.get_depth_frame();
        //rs2::video_frame ir_frame_left = frames.get_infrared_frame(1);
       // rs2::video_frame ir_frame_right = frames.get_infrared_frame(2);
    
        // Creating OpenCV Matrix from a color image
        Mat color(Size(width, height), CV_8UC3, (void*)color_frame.get_data(), Mat::AUTO_STEP);
        double tframe = color_frame.get_timestamp();
        //Mat pic_right(Size(width,height), CV_8UC1, (void*)ir_frame_right.get_data());
        //Mat pic_left(Size(width,height), CV_8UC1, (void*)ir_frame_left.get_data());
        //Mat pic_depth(Size(width,height), CV_16U, (void*)depth_frame.get_data(), Mat::AUTO_STEP);
        //namedWindow("Display Image", WINDOW_AUTOSIZE );
        //imshow("Display Image", color);
    
    
    #ifdef COMPILEDWITHC11
        std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
    #else
        std::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now();
    #endif
        SLAM.TrackMonocular(color,tframe);
    
        if(color.empty())
        {
            cerr << endl << "Failed to load image at: "
                 <<  tframe << endl;
            return 1;
        }
    
    #ifdef COMPILEDWITHC11
        std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
    #else
        std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
    #endif
        double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();
    
        //TODO:检测跟踪时间
    }
    
    // Stop all threads
    SLAM.Shutdown();
    cout << "-------" << endl << endl;
    SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");
    
    return 0;
    }
    
    
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
      
    
    代码解释

最后配置CMakeLists.txt。

找到这句话:

复制代码
    set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Monocular)
    
    
      
    
    代码解释

在下面增加:

复制代码
    add_executable(mono_D435i
        Examples/Monocular/mono_D435i.cc)
    target_link_libraries(mono_D435i ${PROJECT_NAME})
    
    
      
      
      
    
    代码解释

在上面增加:

复制代码
    find_package(realsense2 REQUIRED)   # 新增
    
    
      
    
    代码解释

在include_directories里增加:

复制代码
    ${realsense2_INCLUDE_DIRS}
    
    
      
    
    代码解释

在target_link_libraries增加:

复制代码
    ${realsense2_LIBRARY}
    
    
      
    
    代码解释

运行./bulid.sh

运行:

复制代码
    ./Examples/Monocular/mono_D435i Vocabulary/ORBvoc.txt Examples/Monocular/Remote.yaml
    
    
      
    
    代码解释

实验效果:

在这里插入图片描述

全部评论 (0)

还没有任何评论哟~