欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

SLAM实践:ORB_SLAM2与D435(二)

程序员文章站 2022-06-11 21:42:05
...

程序修改与运行---构建实时系统

源文件下载:

https://download.csdn.net/download/ly1997th/10394408
https://download.csdn.net/download/ly1997th/1039440

1. 从D435中获取color和depth数据,在librealSense/examples/save-to-disk中,可以看到数据是如何获取的。首先得到depth数据,然后获得color数据,然后存入磁盘。在这里,我们可以使用这种方法获得数据(可以尝试不去存磁盘,直接把realsense的数据流转变为Mat类型,传给ORB_SLAM,但是我没有做出来)同时,需要注意的是,ORB_SLAM还需要时间戳,在veido_frame类(其实是在基类frame)中可以找到函数get_timestamp(),获得时间戳。最后,将这部分内容转化为函数

double LoadImages(cv::Mat &imRGB,cv::Mat &imD)

            1.1 原始示例中代码

    for (auto&& frame : pipe.wait_for_frames())
    {
        // We can only save video frames as pngs, so we skip the rest
        if (auto vf = frame.as<rs2::video_frame>())
        {
            auto stream = frame.get_profile().stream_type();
            // Use the colorizer to get an rgb image for the depth stream
            if (vf.is<rs2::depth_frame>()) vf = color_map(frame);

            // Write images to disk
            std::stringstream png_file;
            png_file << "rs-save-to-disk-output-" << vf.get_profile().stream_name() << ".png";
            stbi_write_png(png_file.str().c_str(), vf.get_width(), vf.get_height(),
                           vf.get_bytes_per_pixel(), vf.get_data(), vf.get_stride_in_bytes());
            std::cout << "Saved " << png_file.str() << std::endl;

            // Record per-frame metadata for UVC streams
            // std::stringstream csv_file;
            // csv_file << "rs-save-to-disk-output-" << vf.get_profile().stream_name()
            //          << "-metadata.csv";
            // metadata_to_csv(vf, csv_file.str());
        }
    }

            1.2  double LoadImages(cv::Mat &imRGB,cv::Mat &imD)

double LoadImages(cv::Mat &imRGB,cv::Mat &imD)
{
    // Declare depth colorizer for pretty visualization of depth data
    rs2::colorizer color_map;

    // Declare RealSense pipeline, encapsulating the actual device and sensors
    rs2::pipeline pipe;
    // Start streaming with default recommended configuration
    pipe.start();

    // Wait for the next set of frames from the camera. Now that autoexposure, etc.
    // has settled, we will write these to disk
    int flag=0;
    double timestamp=0.0;
    int ping=1000;
    for (auto&& frame : pipe.wait_for_frames(ping))
    {
        // We can only save video frames as pngs, so we skip the rest
        if (auto vf = frame.as<rs2::video_frame>())
        {
            // Use the colorizer to get an rgb image for the depth stream
            if (vf.is<rs2::depth_frame>()) vf = color_map(frame);

            // Write images to disk
            timestamp = static_cast<double>(vf.get_timestamp());
            std::stringstream png_file;
            //png_file << n<<"_"<<vf.get_profile().stream_name() << ".png";
            png_file <<timestamp<<vf.get_profile().stream_name() << ".png";
            stbi_write_png(png_file.str().c_str(), vf.get_width(), vf.get_height(),
                           vf.get_bytes_per_pixel(), vf.get_data(), vf.get_stride_in_bytes());
            if(flag==0) 
            {
                imD = cv::imread(png_file.str().c_str(),CV_LOAD_IMAGE_UNCHANGED);
                flag=1;
            }
            else
            {
                imRGB = cv::imread(png_file.str().c_str(),CV_LOAD_IMAGE_UNCHANGED);
                flag=0;
            }
        }
    }
    return timestamp; 
}

2. ORB_SLAM需要修改的部分包括CMakelist.txt和rgbd_tum.cc,这部分需要较大改动。首先看rgbd_tum.cc,由于我们打算完成一个实时系统,所以不在需要数据的读取、color和depth的匹配;可以省去相当部分的工作。最后展示所有的rgbd_tum.cc代码。

#include <iostream>
#include <algorithm>
#include <fstream>
#include <sstream>   
#include <chrono>

#include <librealsense2/rs.hpp>
#include <opencv2/core/core.hpp>
#include <System.h>

using namespace std;

// 3rd party header for writing png files
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include "stb_image_write.h"

double LoadImages(cv::Mat &imRGB,cv::Mat &imD);

int main(int argc, char **argv)
{
    // Create SLAM system. It initializes all system threads and gets ready to process frames.
    ORB_SLAM2::System SLAM(argv[1],argv[2],ORB_SLAM2::System::RGBD,true);

    // Vector for tracking time statistics
    vector<float> vTimesTrack;

    cout << endl << "-------" << endl;
    cout << "Start processing sequence ..." << endl;
    // Main loop
    cv::Mat imRGB, imD;
    const int Max_size=INT_MAX;
    for(int ni=0; ni<Max_size; ni++)
    {
        // load image,depthmap,timestamp 
        double tframe = LoadImages(imRGB,imD);
        if(imRGB.empty())
        {
            cerr << endl << "Failed to load color mage"<<endl;
            return 1;
        }
        if(imD.empty())
        {
            cerr << endl << "Failed to load depth image"<<endl;
            return 1;
        }

#ifdef COMPILEDWITHC11
        std::chrono::steady_clock::time_point t1 = std::chrono::steady_clock::now();
#else
        std::chrono::monotonic_clock::time_point t1 = std::chrono::monotonic_clock::now();
#endif

        // Pass the image to the SLAM system
        SLAM.TrackRGBD(imRGB,imD,tframe);

#ifdef COMPILEDWITHC11
        std::chrono::steady_clock::time_point t2 = std::chrono::steady_clock::now();
#else
        std::chrono::monotonic_clock::time_point t2 = std::chrono::monotonic_clock::now();
#endif

        double ttrack= std::chrono::duration_cast<std::chrono::duration<double> >(t2 - t1).count();

        vTimesTrack.push_back(ttrack);
    }

    // Stop all threads
    SLAM.Shutdown();

    // Tracking time statistics
    sort(vTimesTrack.begin(),vTimesTrack.end());
    float totaltime = 0;
    for(int ni=0; ni<Max_size; ni++)
    {
        totaltime+=vTimesTrack[ni];
    }
    cout << "-------" << endl << endl;
    cout << "median tracking time: " << vTimesTrack[Max_size/2] << endl;
    cout << "mean tracking time: " << totaltime/Max_size << endl;

    // Save camera trajectory
    SLAM.SaveTrajectoryTUM("CameraTrajectory.txt");
    SLAM.SaveKeyFrameTrajectoryTUM("KeyFrameTrajectory.txt");   

    return 0;
}

double LoadImages(cv::Mat &imRGB,cv::Mat &imD)
{
    // Declare depth colorizer for pretty visualization of depth data
    rs2::colorizer color_map;

    // Declare RealSense pipeline, encapsulating the actual device and sensors
    rs2::pipeline pipe;
    // Start streaming with default recommended configuration
    pipe.start();

    int flag=0;
    double timestamp=0.0;
    int ping=1000;
    for (auto&& frame : pipe.wait_for_frames(ping))
    {
        // We can only save video frames as pngs, so we skip the rest
        if (auto vf = frame.as<rs2::video_frame>())
        {
            // Use the colorizer to get an rgb image for the depth stream
            if (vf.is<rs2::depth_frame>()) vf = color_map(frame);

            // Write images to disk
            timestamp = static_cast<double>(vf.get_timestamp());
            std::stringstream png_file;
            //png_file << n<<"_"<<vf.get_profile().stream_name() << ".png";
            png_file <<timestamp<<vf.get_profile().stream_name() << ".png";
            stbi_write_png(png_file.str().c_str(), vf.get_width(), vf.get_height(),
                           vf.get_bytes_per_pixel(), vf.get_data(), vf.get_stride_in_bytes());
            if(flag==0) 
            {
                imD = cv::imread(png_file.str().c_str(),CV_LOAD_IMAGE_UNCHANGED);
                flag=1;
            }
            else
            {
                imRGB = cv::imread(png_file.str().c_str(),CV_LOAD_IMAGE_UNCHANGED);
                flag=0;
            }
        }
    }
    return timestamp; 
}

3.修改CMakeList.txt文件,因为我们此处只关心rgbd部分,同时加入libealsense部分,所以需要进行相当大的修改,主要的工作是需要将save-to-disk的CMakeList.txt融合到ORB_SLAM中去,包括第三方库等等。完整代码如下:

cmake_minimum_required(VERSION 2.8)
project(ORB_SLAM2)

IF(NOT CMAKE_BUILD_TYPE)
  SET(CMAKE_BUILD_TYPE Release)
ENDIF()

MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})

set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3 -march=native ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall   -O3 -march=native")

# Check C++11 or C++0x support
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)
if(COMPILER_SUPPORTS_CXX11)
   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
   add_definitions(-DCOMPILEDWITHC11)
   message(STATUS "Using flag -std=c++11.")
elseif(COMPILER_SUPPORTS_CXX0X)
   set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
   add_definitions(-DCOMPILEDWITHC0X)
   message(STATUS "Using flag -std=c++0x.")
else()
   message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.")
endif()

LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)

find_package(OpenCV 3.0 QUIET)
if(NOT OpenCV_FOUND)
   find_package(OpenCV 2.4.3 QUIET)
   if(NOT OpenCV_FOUND)
      message(FATAL_ERROR "OpenCV > 2.4.3 not found.")
   endif()
endif()

find_package(Eigen3 3.1.0 REQUIRED)
find_package(Pangolin REQUIRED)


if(true)
    find_package(OpenGL)
    if(NOT OPENGL_FOUND)
        message(FATAL_ERROR "\n\n OpenGL package is missing!\n\n")
    endif()

    set(DEPENDENCIES realsense2 ${OPENGL_LIBRARIES})

    if(WIN32)
        list(APPEND DEPENDENCIES glfw3)
    else()
        # Find glfw header
        find_path(GLFW_INCLUDE_DIR NAMES GLFW/glfw3.h
            PATHS /usr/X11R6/include
                  /usr/include/X11
                  /opt/graphics/OpenGL/include
                  /opt/graphics/OpenGL/contrib/libglfw
                  /usr/local/include
                  /usr/include/GL
                  /usr/include
        )
        # Find glfw library
        find_library(GLFW_LIBRARIES NAMES glfw glfw3
                PATHS /usr/lib64
                      /usr/lib
                      /usr/lib/${CMAKE_LIBRARY_ARCHITECTURE}
                      /usr/local/lib64
                      /usr/local/lib
                      /usr/local/lib/${CMAKE_LIBRARY_ARCHITECTURE}
                      /usr/X11R6/lib
        )
        if(APPLE)
            find_library(COCOA_LIBRARY Cocoa)
            find_library(IOKIT_LIBRARY IOKit)
            find_library(COREVIDEO_LIBRARY CoreVideo)
            LIST(APPEND DEPENDENCIES ${COCOA_LIBRARY} ${IOKIT_LIBRARY} ${COREVIDEO_LIBRARY})
        endif()
        list(APPEND DEPENDENCIES m ${GLFW_LIBRARIES} ${LIBUSB1_LIBRARIES})
        include_directories(${GLFW_INCLUDE_DIR})
    endif()
else()
    set(DEPENDENCIES realsense2)
    if(NOT WIN32)
        list(APPEND DEPENDENCIES m ${LIBUSB1_LIBRARIES})
    endif()
endif()

include_directories(
${PROJECT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/include
${EIGEN3_INCLUDE_DIR}
${Pangolin_INCLUDE_DIRS}
${PROJECT_SOURCE_DIR}/Examples/RGB-D/common 
${PROJECT_SOURCE_DIR}/Examples/RGB-D/third-party 
${PROJECT_SOURCE_DIR}/Examples/RGB-D/third-party/tclap/include
)

set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib)

add_library(${PROJECT_NAME} SHARED
src/System.cc
src/Tracking.cc
src/LocalMapping.cc
src/LoopClosing.cc
src/ORBextractor.cc
src/ORBmatcher.cc
src/FrameDrawer.cc
src/Converter.cc
src/MapPoint.cc
src/KeyFrame.cc
src/Map.cc
src/MapDrawer.cc
src/Optimizer.cc
src/PnPsolver.cc
src/Frame.cc
src/KeyFrameDatabase.cc
src/Sim3Solver.cc
src/Initializer.cc
src/Viewer.cc
)

target_link_libraries(${PROJECT_NAME}
${OpenCV_LIBS}
${EIGEN3_LIBS}
${Pangolin_LIBRARIES}
${PROJECT_SOURCE_DIR}/Thirdparty/DBoW2/lib/libDBoW2.so
${PROJECT_SOURCE_DIR}/Thirdparty/g2o/lib/libg2o.so
${DEPENDENCIES}
)

# Build examples

set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/RGB-D)

add_executable(rgbd_ly
Examples/RGB-D/rgbd_tum.cc)
target_link_libraries(rgbd_ly ${PROJECT_NAME})

3. 移动部分文件和第三方库,在save-to-disk中可以看到需要的第三方库,包括common,third-party等,复制到Examples/RGB-D中,同时需要讲用来保存数据的头文件stb_image_write.h在common中找出,放在RGB_D中(理论上放在common或者是ORB_SLAM2/include中也行,但是我没有成功,大家可以都试试,顺便告诉我原因)

4. 最后再次cmake和make整个工程,然后尝试运行(标定部分一直未能完成,有解决的话希望大家周知一下,这个地方我直接采用了组里其他人的数据)

./Examples/RGB-D/rgbd_ly Vocabulary/ORBvoc.txt Examples/RGB-D/D435.yaml

5. 运行成功的话(一般都要折腾好久才行),可以看到以下画面(保存下来的视频截图)。

SLAM实践:ORB_SLAM2与D435(二)

SLAM实践:ORB_SLAM2与D435(二)

SLAM实践:ORB_SLAM2与D435(二)