Skip to content

Commit

Permalink
Performed some testing of ORB-based feature extraction
Browse files Browse the repository at this point in the history
  • Loading branch information
mgladkova committed Jul 23, 2018
1 parent e385586 commit 37be50f
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 12 deletions.
2 changes: 1 addition & 1 deletion include/VisualSLAM.h
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class VisualSLAM {
void readCameraIntrisics(std::string camera_intrinsics_file);
void readGroundTruthData(std::string fileName, int numberFrames, std::vector<Sophus::SE3d>& groundTruthData);

Sophus::SE3d performFrontEndStep(cv::Mat image_left, cv::Mat disparity_map, std::vector<cv::KeyPoint>& keyPointsPrevFrame, cv::Mat& descriptorsPrevFrame); // feature detection / tracking and matching
Sophus::SE3d performFrontEndStep(cv::Mat image_left, cv::Mat disparity_map, std::vector<cv::KeyPoint>& keyPointsPrevFrame, cv::Mat& descriptorsPrevFrame, bool isLeftImage); // feature detection / tracking and matching
Sophus::SE3d performFrontEndStepWithTracking(cv::Mat image_left, cv::Mat disparity_map, std::vector<cv::Point2f>& pointsCurrentFrame, std::vector<cv::Point2f>& pointsPrevFrame, cv::Mat& prevImageLeft, bool isLeftImage);

//bool performPoseGraphOptimization(int keyFrameStep, int numKeyFrames);
Expand Down
2 changes: 1 addition & 1 deletion src/BundleAdjuster.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ bool BundleAdjuster::performBAWithKeyFrames(Map& map_left, Map& map_right, int k
ceres::Problem problem;
ceres::LossFunction* loss_function = new ceres::HuberLoss(2.0);
double baseline = 0.53716;
double confid = 1e2;
double confid = 1e3;
Eigen::Matrix<double, 7, 7> information_matrix = Eigen::MatrixXd::Identity(7, 7)*confid;

for (int i = startFrame; i < currentCameraIndex; i += keyFrameStep){
Expand Down
2 changes: 1 addition & 1 deletion src/VisualOdometry.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ void VisualOdometry::setKeyFrameKeypoints(std::vector<cv::KeyPoint> updatedKeypo
}

void VisualOdometry::extractORBFeatures(cv::Mat frame_new, std::vector<cv::KeyPoint>& keypoints_new, cv::Mat& descriptors_new){
int max_features = 2000;
int max_features = 200;
// Detect ORB features and compute descriptors.
cv::Ptr<cv::Feature2D> orb = cv::ORB::create(max_features);
orb->detect(frame_new, keypoints_new);
Expand Down
69 changes: 62 additions & 7 deletions src/VisualSLAM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -219,18 +219,34 @@ void VisualSLAM::readGroundTruthData(std::string fileName, int numberFrames, std
this->groundTruthData = groundTruthData;
}

Sophus::SE3d VisualSLAM::performFrontEndStep(cv::Mat image_left, cv::Mat disparityCurrentFrame, std::vector<cv::KeyPoint>& keyPointsPrevFrame, cv::Mat& descriptorsPrevFrame){
Sophus::SE3d VisualSLAM::performFrontEndStep(cv::Mat image, cv::Mat disparityCurrentFrame, std::vector<cv::KeyPoint>& keyPointsPrevFrame, cv::Mat& descriptorsPrevFrame, bool isLeftImage){
cv::Mat descriptorsCurrentFrame;
std::vector<cv::KeyPoint> keyPointsCurrentFrame;

VO.extractORBFeatures(image_left, keyPointsCurrentFrame, descriptorsCurrentFrame);
VO.extractORBFeatures(image, keyPointsCurrentFrame, descriptorsCurrentFrame);

Sophus::SE3d pose;
if (keyPointsPrevFrame.empty()){
keyPointsPrevFrame = keyPointsCurrentFrame;
descriptorsCurrentFrame.copyTo(descriptorsPrevFrame);
std::vector<cv::Point2f> p2d_currFrame;
for (auto kpt: keyPointsCurrentFrame){
p2d_currFrame.push_back(kpt.pt);
}
std::vector<uchar> validPoints3D;
std::vector<cv::Point3f> p3d_currFrame = VO.get3DCoordinates(p2d_currFrame, disparityCurrentFrame, K, validPoints3D);

std::vector<int> indices(p2d_currFrame.size());
std::iota(indices.begin(), indices.end(), 0);

if (isLeftImage){
map_left.updateDataCurrentFrame(pose, p2d_currFrame, indices, p3d_currFrame, true, false);
} else {
map_right.updateDataCurrentFrame(pose, p2d_currFrame, indices, p3d_currFrame, true, true);
}

keyPointsPrevFrame = keyPointsCurrentFrame;
descriptorsCurrentFrame.copyTo(descriptorsPrevFrame);

return pose;
return pose;
}

std::vector<cv::DMatch> matches = VO.findGoodORBFeatureMatches(keyPointsPrevFrame, keyPointsCurrentFrame, descriptorsPrevFrame, descriptorsCurrentFrame);
Expand All @@ -242,9 +258,48 @@ Sophus::SE3d VisualSLAM::performFrontEndStep(cv::Mat image_left, cv::Mat dispari
std::vector<uchar> validPoints3D;
std::vector<cv::Point3f> p3d_currFrame = VO.get3DCoordinates(p2d_currFrame, disparityCurrentFrame, K, validPoints3D);

std::vector<int> indices;
std::vector<cv::Point2f> trackedPrevFramePoints, trackedCurrFramePoints;
std::vector<cv::Point3f> trackedPoints3DCurrentFrame;

VO.estimatePose3D2D(p3d_currFrame, p2d_prevFrame, p2d_currFrame, indices, K, pose);
for (int i = 0; i < validPoints3D.size(); i++){
if (validPoints3D[i] && checkPoint2DCoordinates(p2d_currFrame[i], image)){
trackedPrevFramePoints.push_back(p2d_prevFrame[i]);
trackedCurrFramePoints.push_back(p2d_currFrame[i]);
trackedPoints3DCurrentFrame.push_back(p3d_currFrame[i]);
}
}

std::vector<int> trackedPointIndices_dummy(trackedCurrFramePoints.size());
std::iota(trackedPointIndices_dummy.begin(), trackedPointIndices_dummy.end(), 0);

VO.estimatePose3D2D(trackedPoints3DCurrentFrame, trackedPrevFramePoints, trackedCurrFramePoints, trackedPointIndices_dummy, K, pose);

std::vector<int> indices(trackedCurrFramePoints.size());
std::iota(indices.begin(), indices.end(), 0);


int keyFrameStep = 3;
int numKeyFrames = 20;

if (isLeftImage){
map_left.updateDataCurrentFrame(pose, trackedCurrFramePoints, indices, trackedPoints3DCurrentFrame, true, false);

//if ((map_left.getCurrentCameraIndex() - numKeyFrames*keyFrameStep) >= 0){
/*if ((map_left.getCurrentCameraIndex() % (numKeyFrames*keyFrameStep)) == 0 && map_left.getCurrentCameraIndex() > 0){
std::string fileName = "BAFile" + std::to_string(map_left.getCurrentCameraIndex() / (numKeyFrames*keyFrameStep)) + "L.txt";
map_left.writeBAFile(fileName, keyFrameStep, numKeyFrames);
BA.performBAWithKeyFrames(map_left, keyFrameStep, numKeyFrames);
}*/
} else {
map_right.updateDataCurrentFrame(pose, trackedCurrFramePoints, indices, trackedPoints3DCurrentFrame, true, true);

//if ((map_right.getCurrentCameraIndex() - numKeyFrames*keyFrameStep) >= 0){
if ((map_right.getCurrentCameraIndex() % (numKeyFrames*keyFrameStep)) == 0 && map_right.getCurrentCameraIndex() > 0){
std::string fileName = "BAFile" + std::to_string(map_right.getCurrentCameraIndex() / (numKeyFrames*keyFrameStep)) + "R.txt";
//map_right.writeBAFile(fileName, keyFrameStep, numKeyFrames);
BA.performBAWithKeyFrames(map_left, map_right, keyFrameStep, numKeyFrames);
}
}

descriptorsCurrentFrame.copyTo(descriptorsPrevFrame);

Expand Down
7 changes: 5 additions & 2 deletions testVisualSLAM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ int main(int argc, char** argv){
cv::Mat prevImageLeft, prevImageRight;
std::vector<cv::Point2f> pointsCurrentFrame_left, pointsPrevFrame_left;
std::vector<cv::Point2f> pointsCurrentFrame_right, pointsPrevFrame_right;
std::vector<cv::KeyPoint> keypoints;
cv::Mat descriptors;
std::vector<cv::KeyPoint> keypoints_left, keypoints_right;
cv::Mat descriptors_left,descriptors_right;
Eigen::Matrix3d cumR = Eigen::Matrix3d::Identity();

int k = 1;
Expand Down Expand Up @@ -84,6 +84,9 @@ int main(int argc, char** argv){

Sophus::SE3d pose_left = slam.performFrontEndStepWithTracking(images_left[i], disparity_map, pointsCurrentFrame_left, pointsPrevFrame_left, prevImageLeft, true);
Sophus::SE3d pose_right = slam.performFrontEndStepWithTracking(images_right[i], disparity_map, pointsCurrentFrame_right, pointsPrevFrame_right, prevImageRight, false);
//Sophus::SE3d pose_left = slam.performFrontEndStep(images_left[i], disparity_map, keypoints_left, descriptors_left, true);
//Sophus::SE3d pose_right = slam.performFrontEndStep(images_right[i], disparity_map, keypoints_right, descriptors_right, false);

visToolkit->plot2DPoints(images_right[i], pointsCurrentFrame_right);

visToolkit->setDataForPointCloudVisualization(images_left[i], disparity_map);
Expand Down

0 comments on commit 37be50f

Please sign in to comment.