改进orb提取算法,首先通过图像尺寸确定构建的金字塔的层数;将特征点均摊到每层图像上,将剩余的特征点归于图像金字塔的顶层;根据图像灰度信息计算出一种自适应提取阈值,若此阈值提取的特征点数目不满足要求,则降低阈值至 T/2,直到提取的特征点数目达到要求;对每层图像进行网格划分,从图像金字塔的第一层开始遍历网格进行特征提取并对每个网格计数;最后采用GMS匹配算法。给出源码和头文件
时间: 2024-02-12 14:09:01 浏览: 28
以下是改进后的ORB特征提取算法的源码和头文件,您可以参考使用:
```
// improved ORB feature extraction algorithm
#include <opencv2/opencv.hpp>
using namespace cv;
void improvedORB(Mat& img, std::vector<KeyPoint>& keypoints, Mat& descriptors, int maxKeypoints, float scaleFactor, int nLevels, int edgeThreshold, int patchSize, int fastThreshold, int gridRows, int gridCols)
{
// build image pyramid
std::vector<Mat> pyramid(nLevels);
pyramid[0] = img.clone();
for (int i = 1; i < nLevels; i++)
{
float scale = std::pow(scaleFactor, i);
Size sz(cvRound(img.cols * scale), cvRound(img.rows * scale));
resize(img, pyramid[i], sz);
}
// distribute keypoints to pyramid levels
int maxKeypointsPerLevel = cvRound(maxKeypoints / nLevels);
int remainingKeypoints = maxKeypoints;
keypoints.clear();
for (int level = 0; level < nLevels - 1; level++)
{
int nKeypoints = 0;
if (remainingKeypoints > 0)
{
nKeypoints = std::min(maxKeypointsPerLevel, remainingKeypoints);
remainingKeypoints -= nKeypoints;
}
if (nKeypoints > 0)
{
Ptr<ORB> orb = ORB::create(nKeypoints, scaleFactor, level, edgeThreshold, 0, fastThreshold);
orb->detect(pyramid[level], keypoints);
}
}
if (remainingKeypoints > 0)
{
Ptr<ORB> orb = ORB::create(remainingKeypoints, scaleFactor, nLevels - 1, edgeThreshold, 0, fastThreshold);
orb->detect(pyramid[nLevels - 1], keypoints);
}
// compute adaptive threshold
int nKeypoints = keypoints.size();
std::vector<int> hist(256, 0);
for (int i = 0; i < img.rows; i++)
{
const uchar* row = img.ptr<uchar>(i);
for (int j = 0; j < img.cols; j++)
{
hist[row[j]]++;
}
}
int targetKeypoints = cvRound(nKeypoints * 0.8);
int sum = 0;
int adaptiveThreshold = 255;
for (int i = 255; i >= 0; i--)
{
sum += hist[i];
if (sum >= targetKeypoints)
{
adaptiveThreshold = i;
break;
}
}
// reduce threshold if not enough keypoints
int minKeypoints = cvRound(maxKeypoints * 0.6);
while (nKeypoints < minKeypoints)
{
adaptiveThreshold /= 2;
Ptr<ORB> orb = ORB::create(maxKeypoints, scaleFactor, nLevels - 1, edgeThreshold, 0, adaptiveThreshold);
orb->detect(img, keypoints);
nKeypoints = keypoints.size();
}
// extract descriptors
Ptr<ORB> orb = ORB::create(maxKeypoints, scaleFactor, nLevels - 1, edgeThreshold, 0, adaptiveThreshold);
orb->compute(img, keypoints, descriptors);
// compute grid size
int gridWidth = cvCeil((float)img.cols / gridCols);
int gridHeight = cvCeil((float)img.rows / gridRows);
// compute keypoints count per grid cell
std::vector<int> counts(gridRows * gridCols, 0);
for (int i = 0; i < nKeypoints; i++)
{
int x = keypoints[i].pt.x;
int y = keypoints[i].pt.y;
int row = y / gridHeight;
int col = x / gridWidth;
counts[row * gridCols + col]++;
}
// filter keypoints by grid cell count
int newKeypointsCount = 0;
std::vector<KeyPoint> newKeypoints(nKeypoints);
for (int i = 0; i < nKeypoints; i++)
{
int x = keypoints[i].pt.x;
int y = keypoints[i].pt.y;
int row = y / gridHeight;
int col = x / gridWidth;
if (counts[row * gridCols + col] <= 2)
{
newKeypoints[newKeypointsCount++] = keypoints[i];
}
}
// resize new keypoints vector
newKeypoints.resize(newKeypointsCount);
// use GMS matcher
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("GMS");
std::vector<DMatch> matches;
matcher->match(descriptors, descriptors, matches);
// draw keypoints on image
drawKeypoints(img, newKeypoints, img, Scalar(0, 0, 255), DrawMatchesFlags::DRAW_RICH_KEYPOINTS);
// draw matches on image
Mat imgMatches;
drawMatches(img, newKeypoints, img, newKeypoints, matches, imgMatches);
imshow("Matches", imgMatches);
waitKey(0);
}
```
以下是改进后的ORB特征提取算法的头文件:
```
// improved ORB feature extraction algorithm
#include <opencv2/opencv.hpp>
void improvedORB(cv::Mat& img, std::vector<cv::KeyPoint>& keypoints, cv::Mat& descriptors, int maxKeypoints, float scaleFactor = 1.2f, int nLevels = 8, int edgeThreshold = 31, int patchSize = 31, int fastThreshold = 20, int gridRows = 8, int gridCols = 8);
```
相关推荐
![pdf](https://img-home.csdnimg.cn/images/20210720083512.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![](https://csdnimg.cn/download_wenku/file_type_ask_c1.png)
![pdf](https://img-home.csdnimg.cn/images/20210720083512.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![xlsx](https://img-home.csdnimg.cn/images/20210720083732.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![rar](https://img-home.csdnimg.cn/images/20210720083606.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![pdf](https://img-home.csdnimg.cn/images/20210720083512.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)
![zip](https://img-home.csdnimg.cn/images/20210720083736.png)