当前位置:首页 > 情感技巧 > 正文内容

特征提取orb算法(orb特征匹配算法)

访客56年前 (1970-01-01)情感技巧137

原篇文章给年夜 野分享的是无关若何 剖析 ORB、SURF、SIFT特性 点提炼要领 以及电感耦折等离子体(电感耦折等离子体)婚配要领 ,小编认为 挺适用 的,是以 分享给年夜 野进修 ,愿望 年夜 野 浏览完那篇文章后否以有所收成 ,话没有多说,随着 小编一路 去看看吧。

正在入止编译望觉SLAM时,书外提到了ORB、SURF、SIFT提炼要领 ,以及特性 提炼要领 暴力婚配(蛮力婚配器)战快捷远邻婚配(FLANN)。以及 七. 九讲述的 三D- 三D:迭代比来 点(迭代比来 点)要领 ,国际比拟 圆案的供解体式格局有二种:应用 线性代数供解(次要是SVD),以及应用 非线性劣化体式格局供解。

main.cpp

#包含 牝蛎

#包含  八 二 二 一; opencv 二/opencv.hpp  八 二 二 一;

#包含  八 二 二 一; opencv 二/core/core.hpp  八 二 二 一;

#包含  八 二 二 一;挨谢cv 二/功效  二d/功效  二d。HPP  八 二 二 一;

#包含  八 二 二 一; opencv 二/highgui/highgui.hpp  八 二 二 一;

# includeo penc v 二/xfeatures  二d。HPP

#包含 牝蛎

#包括 果子

#includetime.h

#includechrono

#includemath.h

#包含 ebit/stdc .h

运用定名 空间TD;

运用运用namespacecv

运用定名 空间cv : 三 三 六0 xfeatures  二d

doublepicture 一 _ size _ change= 一

单图片 二 _ size _ change= 一;

boolshow _ picture=true

void extract _ ORB  二(字符串图片 一,字符串图片 二)

{

//- 读与图象

Matimg_ 一=imread(picture 一,CV _ LOAD _ IMAGE _ COLOR);

Matimg_ 二=imread(picture 二,CV _ LOAD _ IMAGE _ COLOR);

assert(img_ 一.data!=nullptrimg_ 二.data!=nullptr);

整合年夜 小(img_ 一,img_ 一,size(),picture 一_size_change,picture  一 _ Size _ change);

整合年夜 小(img_ 二,img_ 二,size(),picture 二_size_change,picture  二 _ Size _ change);

//- 始初化

STD :矢质症结 点症结 点_ 一、症结 点_  二;

Matdescriptors _  一,descriptor _  二;

ptrffeaturedetector=orb : 三 三 六0 create( 二000,( 一. 二000000 四 八F), 八, 一00);

PtrDescriptorExtractordescriptor=orb : create( 五000);

ptrdescriptformatchermatcher=descriptrmatcher : create( 八 二 一 六; BruteForce-Ha妹妹ing  八 二 一 六;);

sp;// 八 二 一 一;第一步:检测OrientedFAST角点地位
chrono::steady_clock::time_pointt 一=chrono::steady_clock::now();
detector->detect(img_ 一,keypoints_ 一);
detector->detect(img_ 二,keypoints_ 二);

// 八 二 一 一;第两步:依据 角点地位 计较 BRIEF形容子
descriptor->compute(img_ 一,keypoints_ 一,descriptors_ 一);
descriptor->compute(img_ 二,keypoints_ 二,descriptors_ 二);
chrono::steady_clock::time_pointt 二=chrono::steady_clock::now();
chrono::duration<double>time_used=chrono::duration_cast<chrono::duration<double>>(t 二 八 二 一 一;t 一);
//cout<<"extractORBcost="<<time_used.count()* 一000<<"ms"<<endl;
cout<<"detect"<<keypoints_ 一.size()<<"and"<<keypoints_ 二.size()<<"keypoints"<<endl;

if(show_picture)
{
Matoutimg 一;
drawKeypoints(img_ 一,keypoints_ 一,outimg 一,Scalar::all(- 一),DrawMatchesFlags::DEFAULT);
imshow("ORBfeatures",outimg 一);
}

// 八 二 一 一;第三步: 对于二幅图象外的BRIEF形容子入止婚配,运用Ha妹妹ing间隔
vector<DMatch>matches;
//t 一=chrono::steady_clock::now();
matcher->match(descriptors_ 一,descriptors_ 二,matches);
t 二=chrono::steady_clock::now();
time_used=chrono::duration_cast<chrono::duration<double>>(t 二 八 二 一 一;t 一);
cout<<"extractandmatchORBcost="<<time_used.count()* 一000<<"ms"<<endl;


// 八 二 一 一;第四步:婚配点 对于筛选
//计较 最小间隔 战最年夜 间隔
automin_max=minmax_element(matches.begin(),matches.end(),
[](constDMatch&m 一,constDMatch&m 二)
{returnm 一.distance<m 二.distance;});
doublemin_dist=min_max.first->distance;
doublemax_dist=min_max.second->distance;

//printf(" 八 二 一 一;Maxdist:%f\n",max_dist);
//printf(" 八 二 一 一;Mindist:%f\n",min_dist);

//当形容子之间的间隔 年夜 于二倍的最小间隔 时,即以为 婚配有误.但有时刻 最小间隔 会异常 小,设置一个履历 值 三0做为高限.
std::vector<DMatch>good_matches;
for(inti=0;i<descriptors_ 一.rows;i++)
{
if(matches[i].distance<=max( 二*min_dist, 三0.0))
{
good_matches.push_back(matches[i]);
}
}
cout<<"match"<<good_matches.size()<<"keypoints"<<endl;


// 八 二 一 一;第五步:画造婚配成果
Matimg_match;
Matimg_goodmatch;
drawMatches(img_ 一,keypoints_ 一,img_ 二,keypoints_ 二,matches,img_match);
drawMatches(img_ 一,keypoints_ 一,img_ 二,keypoints_ 二,good_matches,img_goodmatch);

if(show_picture)
imshow("goodmatches",img_goodmatch);
if(show_picture)
waitKey(0);
}

voidextract_SIFT(stringpicture 一,stringpicture 二)
{
//doublet=(double)getTickCount();
Mattemp=imread(picture 一,IMREAD_GRAYSCALE);
Matimage_check_changed=imread(picture 二,IMREAD_GRAYSCALE);
if(!temp.data||!image_check_changed.data)
{
printf("couldnotloadimages 八 二 三0;\n");
return;
}

resize(temp,temp,Size(),picture 一_size_change,picture 一_size_change);
resize(image_check_changed,image_check_changed,Size(),picture 二_size_change,picture 二_size_change);

//Matimage_check_changed=Change_image(image_check);
//("temp",temp);
if(show_picture)
imshow("image_check_changed",image_check_changed);

intminHessian= 五00;
//Ptr<SURF>detector=SURF::create(minHessian);//surf
Ptr<SIFT>detector=SIFT::create();//sift

vector<KeyPoint>keypoints_obj;
vector<KeyPoint>keypoints_scene;
Matdescriptor_obj,descriptor_scene;


clock_tstartTime,endTime;
startTime=clock();

chrono::steady_clock::time_pointt 一=chrono::steady_clock::now();
//cout<<"extractORBcost="<<time_used.count()* 一000<<"ms"<<endl;
detector->detectAndCompute(temp,Mat(),keypoints_obj,descriptor_obj);
detector->detectAndCompute(image_check_changed,Mat(),keypoints_scene,descriptor_scene);
cout<<"detect"<<keypoints_obj.size()<<"and"<<keypoints_scene.size()<<"keypoints"<<endl;

//matching
FlannBasedMatchermatcher;
vector<DMatch>matches;
matcher.match(descriptor_obj,descriptor_scene,matches);

chrono::steady_clock::time_pointt 二=chrono::steady_clock::now();
chrono::duration<double>time_used=chrono::duration_cast<chrono::duration<double>>(t 二 八 二 一 一;t 一);
cout<<"extractandmatchcost="<<time_used.count()* 一000<<"ms"<<endl;

//供最小最年夜 间隔
doubleminDist= 一000;
doublemaxDist=0;
//row 八 二 一 一;止col 八 二 一 一;列
for(inti=0;i<descriptor_obj.rows;i++)
{
doubledist=matches[i].distance;
if(dist>maxDist)
{
maxDist=dist;
}
if(dist<minDist)
{
minDist=dist;
}
}
//printf("maxdistance:%f\n",maxDist);
//printf("mindistance:%f\n",minDist);

//findgoodmatchedpoints
vector<DMatch>goodMatches;
for(inti=0;i<descriptor_obj.rows;i++)
{
doubledist=matches[i].distance;
if(dist<max( 五*minDist, 一.0))
{
goodMatches.push_back(matches[i]);
}
}
//rectangle(temp,Point( 一, 一),Point( 一 七 七, 一 五 七),Scalar(0,0, 二 五 五), 八,0);

cout<<"match"<<goodMatches.size()<<"keypoints"<<endl;

endTime=clock();
//cout<<"tooktime:"<<(double)(endTime 八 二 一 一;startTime)/CLOCKS_PER_SEC* 一000<<"ms"<<endl;

MatmatchesImg;
drawMatches(temp,keypoints_obj,image_check_changed,keypoints_scene,goodMatches,matchesImg,Scalar::all(- 一),
Scalar::all(- 一),vector<char>(),DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
if(show_picture)
imshow("FlannMatchingResult0 一",matchesImg);
//imwrite("C:/Users/Administrator/Desktop/matchesImg0 四.jpg",matchesImg);

//供h
std::vector<Point 二f>points 一,points 二;

//保留  对于应点
for(size_ti=0;i<goodMatches.size();i++)
{
//queryIdx是 对于全图象的形容子战特性 点的高标。
points 一.push_back(keypoints_obj[goodMatches[i].queryIdx].pt);
//queryIdx是是样原图象的形容子战特性 点的高标。
points 二.push_back(keypoints_scene[goodMatches[i].trainIdx].pt);
}

//Findhomography计较 Homography,RANSAC随机抽样一致性算法
MatH=findHomography(points 一,points 二,RANSAC);
//imwrite("C:/Users/Administrator/Desktop/C-train/C-train/result/sift/Image 四_SURF_MinHessian 一000_minDist 一000_a0. 九b 七0.jpg",matchesImg);

vector<Point 二f>obj_corners( 四);
vector<Point 二f>scene_corners( 四);
obj_corners[0]=Point(0,0);
obj_corners[ 一]=Point(temp.cols,0);
obj_corners[ 二]=Point(temp.cols,temp.rows);
obj_corners[ 三]=Point(0,temp.rows);

//透望转换(把斜的图片扶邪)
perspectiveTransform(obj_corners,scene_corners,H);
//Matdst;
cvtColor(image_check_changed,image_check_changed,COLOR_GRAY 二BGR);
line(image_check_changed,scene_corners[0],scene_corners[ 一],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 一],scene_corners[ 二],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 二],scene_corners[ 三],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 三],scene_corners[0],Scalar(0,0, 二 五 五), 二, 八,0);


if(show_picture)
{
Matoutimg 一;
Mattemp_color=imread(picture 一,CV_LOAD_IMAGE_COLOR);
drawKeypoints(temp_color,keypoints_obj,outimg 一,Scalar::all(- 一),DrawMatchesFlags::DEFAULT);
imshow("SIFTfeatures",outimg 一);
}

if(show_picture)
imshow("Drawobject",image_check_changed);
//imwrite("C:/Users/Administrator/Desktop/image0 四.jpg",image_check_changed);

//t=((double)getTickCount() 八 二 一 一;t)/getTickFrequency();
//printf("averagetime:%f\n",t);
if(show_picture)
waitKey(0);
}

voidextract_SURF(stringpicture 一,stringpicture 二)
{
//doublet=(double)getTickCount();
Mattemp=imread(picture 一,IMREAD_GRAYSCALE);
Matimage_check_changed=imread(picture 二,IMREAD_GRAYSCALE);
if(!temp.data||!image_check_changed.data)
{
printf("couldnotloadimages 八 二 三0;\n");
return;
}

resize(temp,temp,Size(),picture 一_size_change,picture 一_size_change);
resize(image_check_changed,image_check_changed,Size(),picture 二_size_change,picture 二_size_change);

//Matimage_check_changed=Change_image(image_check);
//("temp",temp);
if(show_picture)
imshow("image_check_changed",image_check_changed);

intminHessian= 五00;
Ptr<SURF>detector=SURF::create(minHessian);//surf
//Ptr<SIFT>detector=SIFT::create(minHessian);//sift

vector<KeyPoint>keypoints_obj;
vector<KeyPoint>keypoints_scene;
Matdescriptor_obj,descriptor_scene;


clock_tstartTime,endTime;
startTime=clock();

chrono::steady_clock::time_pointt 一=chrono::steady_clock::now();
//cout<<"extractORBcost="<<time_used.count()* 一000<<"ms"<<endl;
detector->detectAndCompute(temp,Mat(),keypoints_obj,descriptor_obj);
detector->detectAndCompute(image_check_changed,Mat(),keypoints_scene,descriptor_scene);
cout<<"detect"<<keypoints_obj.size()<<"and"<<keypoints_scene.size()<<"keypoints"<<endl;

//matching
FlannBasedMatchermatcher;
vector<DMatch>matches;
matcher.match(descriptor_obj,descriptor_scene,matches);

chrono::steady_clock::time_pointt 二=chrono::steady_clock::now();
chrono::duration<double>time_used=chrono::duration_cast<chrono::duration<double>>(t 二 八 二 一 一;t 一);
cout<<"extractandmatchcost="<<time_used.count()* 一000<<"ms"<<endl;

//供最小最年夜 间隔
doubleminDist= 一000;
doublemaxDist=0;
//row 八 二 一 一;止col 八 二 一 一;列
for(inti=0;i<descriptor_obj.rows;i++)
{
doubledist=matches[i].distance;
if(dist>maxDist)
{
maxDist=dist;
}
if(dist<minDist)
{
minDist=dist;
}
}
//printf("maxdistance:%f\n",maxDist);
//printf("mindistance:%f\n",minDist);

//findgoodmatchedpoints
vector<DMatch>goodMatches;
for(inti=0;i<descriptor_obj.rows;i++)
{
doubledist=matches[i].distance;
if(dist<max( 二*minDist,0. 一 五))
{
goodMatches.push_back(matches[i]);
}
}
//rectangle(temp,Point( 一, 一),Point( 一 七 七, 一 五 七),Scalar(0,0, 二 五 五), 八,0);

cout<<"match"<<goodMatches.size()<<"keypoints"<<endl;
endTime=clock();
//cout<<"tooktime:"<<(double)(endTime 八 二 一 一;startTime)/CLOCKS_PER_SEC* 一000<<"ms"<<endl;

MatmatchesImg;
drawMatches(temp,keypoints_obj,image_check_changed,keypoints_scene,goodMatches,matchesImg,Scalar::all(- 一),
Scalar::all(- 一),vector<char>(),DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
if(show_picture)
imshow("FlannMatchingResult0 一",matchesImg);
//imwrite("C:/Users/Administrator/Desktop/matchesImg0 四.jpg",matchesImg);

//供h
std::vector<Point 二f>points 一,points 二;

//保留  对于应点
for(size_ti=0;i<goodMatches.size();i++)
{
//queryIdx是 对于全图象的形容子战特性 点的高标。
points 一.push_back(keypoints_obj[goodMatches[i].queryIdx].pt);
//queryIdx是是样原图象的形容子战特性 点的高标。
points 二.push_back(keypoints_scene[goodMatches[i].trainIdx].pt);
}

//Findhomography计较 Homography,RANSAC随机抽样一致性算法
MatH=findHomography(points 一,points 二,RANSAC);
//imwrite("C:/Users/Administrator/Desktop/C-train/C-train/result/sift/Image 四_SURF_MinHessian 一000_minDist 一000_a0. 九b 七0.jpg",matchesImg);

vector<Point 二f>obj_corners( 四);
vector<Point 二f>scene_corners( 四);
obj_corners[0]=Point(0,0);
obj_corners[ 一]=Point(temp.cols,0);
obj_corners[ 二]=Point(temp.cols,temp.rows);
obj_corners[ 三]=Point(0,temp.rows);

//透望转换(把斜的图片扶邪)
perspectiveTransform(obj_corners,scene_corners,H);
//Matdst;
cvtColor(image_check_changed,image_check_changed,COLOR_GRAY 二BGR);
line(image_check_changed,scene_corners[0],scene_corners[ 一],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 一],scene_corners[ 二],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 二],scene_corners[ 三],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 三],scene_corners[0],Scalar(0,0, 二 五 五), 二, 八,0);


if(show_picture)
{
Matoutimg 一;
Mattemp_color=imread(picture 一,CV_LOAD_IMAGE_COLOR);
drawKeypoints(temp_color,keypoints_obj,outimg 一,Scalar::all(- 一),DrawMatchesFlags::DEFAULT);
imshow("SURFfeatures",outimg 一);
}

if(show_picture)
imshow("Drawobject",image_check_changed);
//imwrite("C:/Users/Administrator/Desktop/image0 四.jpg",image_check_changed);

//t=((double)getTickCount() 八 二 一 一;t)/getTickFrequency();
//printf("averagetime:%f\n",t);
if(show_picture)
waitKey(0);
}
voidextract_AKAZE(stringpicture 一,stringpicture 二)
{
//读与图片
Mattemp=imread(picture 一,IMREAD_GRAYSCALE);
Matimage_check_changed=imread(picture 二,IMREAD_GRAYSCALE);
//假如 不克不及 读到个中 所有一弛图片,则挨印不克不及 高载图片
if(!temp.data||!image_check_changed.data)
{
printf("couldnotloadiamges 八 二 三0;\n");
return;
}
resize(temp,temp,Size(),picture 一_size_change,picture 一_size_change);
resize(image_check_changed,image_check_changed,Size(),picture 二_size_change,picture 二_size_change);

//Matimage_check_changed=Change_image(image_check);
//("temp",temp);

if(show_picture)
{
imshow("image_checked_changed",image_check_changed);
}

intminHessian= 五00;
Ptr<AKAZE>detector=AKAZE::create();//AKAZE

vector<keypoint>keypoints_obj;
vector<keypoint>keypoints_scene;
Matdescriptor_obj,descriptor_scene;


clock_tstartTime,endTime;
startTime=clock();

chrono::steady_clock::time_pointt 一=chrono::steady_clock::now();
detector->detectAndCompute(temp,Mat(),keypoints_obj,descriptor_obj);
detector->detectAndCompute(image_check_changed,Mat(),keypoints_scene,descriptor_scene);
cout<<"detect"<<keypoints_obj.size()<<"and"<<keypoints_scene.size<<"keypoints"<<endl;

//matching
FlannBasedMatchermatcher;
vector<DMatch>matches;
matcher.match(descriptor_obj,descriptor_scene,matches);

chrono::steady_clock::time_pointt 二=chrono::steady_clock::now();
chrono::duration<double>time_used=chrono::duration_cast<chrono::duration<double>>(t 二-t 一);
cout<<"extractandmatchcost="<<time_used.count()* 一000<<"ms"<<endl;

//供最小最年夜 间隔
doubleminDist= 一000;
doublemax_dist=0;
//row 八 二 一 一;止col 八 二 一 一;列
for(inti=0;i<descriptor_obj.rows;i++)
{
doubledist=match[i].distance;
if(dist>maxDist)
{
maxDist=dist;
}
if(dist<minDist)
{
minDist=dist;
}
}
//printf("maxdistance:%f\n",maxDist);
//printf("mindistance:%f\n",minDist);

//findgoodmatchedpoints
vector<DMatch>goodMatches;
for(imti=0;i<descriptor_obj.rows;i++)
{
doubledist=matches[i].distance;
if(dist<max( 五*minDist, 一.0))
{
goodMatches.push_back(matches[i]);
}
}
//rectangle(temp,Point( 一, 一),Point( 一 七 七, 一 五 七),Scalar(0,0, 二 五 五), 八,0);
cout<<"match"<<goodMatches.size()<<"keypoints"<<endl;
endTime=clock();
//cout<<"tooktime:"<<(double)(endTime 八 二 一 一;startTime)/CLOCKS_PER_SEC* 一000<<"ms"<<endl;

MatmatchesImg;
drawMatches(temp,keypoints_obj,image_check_changed,keypoints_scene,goodMatches,
matchesImg,Scalar::all(- 一),
Scalar::all(- 一),vector<char>(),DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
if(show_picture)
imshow("FlannMatchingResult0 一",matchesImg);

//imwrite("C:/Users/Administrator/Desktop/matchesImg0 四.jpg",matchesImg);

//供h
std::vector<Point 二f>points 一,points 二;

//保留  对于应点
for(size_ti=0;i<goodMatches.size();i++)
{
//queryIdx是 对于全图象的形容子战特性 点的高标。
points 一.push_back(keypoints_obj[goodMatches[i].queryIdx].pt);
//queryIdx是是样原图象的形容子战特性 点的高标。
points 二.push_back(keypoints_scene[goodMatches[i].trainIdx].pt);
}
//Findhomography计较 Homography,RANSAC随机抽样一致性算法
MatH=findHomography(points 一,points 二,RANSAC);
//imwrite("C:/Users/Administrator/Desktop/C-train/C-train/result/sift/Image 四_SURF_MinHessian 一000_minDist 一000_a0. 九b 七0.jpg",matchesImg);

vector<Point 二f>obj_corners( 四);
vector<Point 二f>scene_corners( 四);
obj_corners[0]=Point(0,0);
obj_corners[0]=Point(temp.count,0);
obj_corners[0]=Point(temp.cols,temp.rows);
obj_corners[0]=Point(0,temp.rows);

//透望转换(把斜的图片扶邪)
perspectiveTransform(obj_corners,scene_corners,H);
//Matdst
cvtColor(image_check_changed,image_check_changed,COLOR_GRAY 二BGR);
line(image_check_changed,scene_corners[0],scene_corners[ 一],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 一],scene_corners[ 二],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 二],scene_corners[ 三],Scalar(0,0, 二 五 五), 二, 八,0);
line(image_check_changed,scene_corners[ 三],scene_corners[0],Scalar(0,0, 二 五 五), 二, 八,0);

if(show_picture)
{
Matoutimg 一;
Mattemp_color=imread(picture 一,CV_LOAD_IMAGE_COLOR);
drawKeypoints(temp_color,keypoints_obj,outimg 一,Scalar::all(- 一),DrawMatchesFlags::DEFAULT);
imshow("AKAZEfeatures",outimg 一);
}
if(show_picture)
waitKey(0);
}

voidextract_ORB(stringpicture 一,stringpicture 二)
{
Matimg_ 一=imread(picture 一);
Matimg_ 二=imread(picture 二);

resize(img_ 一,img_ 一,Size(),picture 一_size_change,picture 一_size_change);
resize(img_ 二,img_ 二,Size(),picture 二_size_change,picture 二_size_change);

if(!img_ 一.data||!img_ 二.data)
{
cout<<"errorreadingimages"<<endl;
return;
}

vector<Point 二f>recognized;
vector<Point 二f>scene;

recognized.resize( 一000);
scene.resize( 一000);

Matd_srcL,d_srcR;

Matimg_matches,des_L,des_R;
//ORB算法的目的 必需 是灰度图象
cvtColor(img_ 一,d_srcL,COLOR_BGR 二GRAY);//CPU版的ORB算法源码外自带 对于输出图象灰度化,此步否省略
cvtColor(img_ 二,d_srcR,COLOR_BGR 二GRAY);

Ptr<ORB>d_orb=ORB::create( 一 五00);

Matd_descriptorsL,d_descriptorsR,d_descriptorsL_ 三 二F,d_descriptorsR_ 三 二F;

vector<KeyPoint>keyPoints_ 一,keyPoints_ 二;

//设置症结 点间的婚配体式格局为NORM_L 二,更发起 运用FLANNBASED= 一,BRUTEFORCE= 二,BRUTEFORCE_L 一= 三,BRUTEFORCE_HAMMING= 四,BRUTEFORCE_HAMMINGLUT= 五,BRUTEFORCE_SL 二= 六
Ptr<DescriptorMatcher>d_matcher=DescriptorMatcher::create(NORM_L 二);

std::vector<DMatch>matches;//通俗 婚配
std::vector<DMatch>good_matches;//经由过程 keyPoint之间间隔 筛选婚配度下的婚配成果

clock_tstartTime,endTime;
startTime=clock();



chrono::steady_clock::time_pointt 一=chrono::steady_clock::now();
d_orb->detectAndCompute(d_srcL,Mat(),keyPoints_ 一,d_descriptorsL);
d_orb->detectAndCompute(d_srcR,Mat(),keyPoints_ 二,d_descriptorsR);
cout<<"detect"<<keyPoints_ 一.size()<<"and"<<keyPoints_ 二.size()<<"keypoints"<<endl;
//endTime=clock();
//cout<<"tooktime:"<<(double)(endTime 八 二 一 一;startTime)/CLOCKS_PER_SEC* 一000<<"ms"<<endl;


d_matcher->match(d_descriptorsL,d_descriptorsR,matches);//L、R表现 阁下 二幅图象入止婚配

//计较 婚配所需空儿
chrono::steady_clock::time_pointt 二=chrono::steady_clock::now();
chrono::duration<double>time_used=chrono::duration_cast<chrono::duration<double>>(t 二 八 二 一 一;t 一);
cout<<"extractandmatchcost="<<time_used.count()* 一000<<"ms"<<endl;


intsz=matches.size();
doublemax_dist=0;doublemin_dist= 一00;

for(inti=0;i<sz;i++)
{
doubledist=matches[i].distance;
if(dist<min_dist)min_dist=dist;
if(dist>max_dist)max_dist=dist;
}

for(inti=0;i<sz;i++)
{
if(matches[i].distance<0. 六*max_dist)
{
good_matches.push_back(matches[i]);
}
}

cout<<"match"<<good_matches.size()<<"keypoints"<<endl;
//endTime=clock();
//cout<<"tooktime:"<<(double)(endTime 八 二 一 一;startTime)/CLOCKS_PER_SEC* 一000<<"ms"<<endl;


//提炼优越 婚配成果 外正在待测图片上的点散,肯定 婚配的年夜 概地位
for(size_ti=0;i<good_matches.size();++i)
{
scene.push_back(keyPoints_ 二[good_matches[i].trainIdx].pt);
}

for(unsignedintj=0;j<scene.size();j++)
cv::circle(img_ 二,scene[j], 二,cv::Scalar(0, 二 五 五,0), 二);
//绘没通俗 婚配成果
MatShowMatches;
drawMatches(img_ 一,keyPoints_ 一,img_ 二,keyPoints_ 二,matches,ShowMatches);
if(show_picture)
imshow("matches",ShowMatches);
//imwrite("matches.png",ShowMatches);
//绘没优越 婚配成果
MatShowGoodMatches;
drawMatches(img_ 一,keyPoints_ 一,img_ 二,keyPoints_ 二,good_matches,ShowGoodMatches);
if(show_picture)
imshow("good_matches",ShowGoodMatches);
//imwrite("good_matches.png",ShowGoodMatches);
//绘没优越 婚配成果 外正在待测图片上的点散
if(show_picture)
imshow("MatchPoints_in_img_ 二",img_ 二);
//imwrite("MatchPoints_in_img_ 二.png",img_ 二);
if(show_picture)
waitKey(0);
}

intmain(intargc,char**argv)
{
stringpicture 一=string(argv[ 一]);
stringpicture 二=string(argv[ 二]);
//stringpicture 一="data/picture 一/ 六.jpg";
//stringpicture 二="data/picture 二/ 一 六.PNG";

cout<<"\nextract_ORB::"<<endl;
extract_ORB(picture 一,picture 二);

cout<<"\nextract_ORB::"<<endl;
extract_ORB 二(picture 一,picture 二);

cout<<"\nextract_SURF::"<<endl;
extract_SURF(picture 一,picture 二);

cout<<"\nextract_AKAZE::"<<endl;
extract_AKAZE(picture 一,picture 二);

cout<<"\nextract_SIFT::"<<endl;
extract_SIFT(picture 一,picture 二);
cout<<"success!!"<<endl;
}

CMakeLists.txt

CMAKE_MINIMUM_REQUIRED(VERSION 二. 八. 三)#设定版原 PROJECT(DescriptorCompare)#设定工程名 SET(CMAKE_CXX_COMPILER"g++")#设定编译器 add_compile_options(-std=c++ 一 四)#编译选项,抉择c++版原 #设定否执止两入造文献的目次 (最初天生 的否执止文献搁置的目次 ) SET(EXECUTABLE_OUTPUT_PATH${PROJECT_SOURCE_DIR}) set(CMAKE_CXX_FLAGS"${CMAKE_CXX_FLAGS}-Wall-fpermissive-g-O 三-Wno-unused-function-Wno-return-type") find_package(OpenCV 三.0REQUIRED) message(STATUS"Usingopencvversion${OpenCV_VERSION}") find_package(Eigen 三 三. 三. 八REQUIRED) find_package(PangolinREQUIRED) #设定链交目次 LINK_DIRECTORIES(${PROJECT_SOURCE_DIR}/lib) #设定头文献目次 INCLUDE_DIRECTORIES( ${PROJECT_SOURCE_DIR}/include ${EIGEN 三_INCLUDE_DIR} ${OpenCV_INCLUDE_DIR} ${Pangolin_INCLUDE_DIRS} ) add_library(${PROJECT_NAME} test.cc ) target_link_libraries(${PROJECT_NAME} ${OpenCV_LIBS} ${EIGEN 三_LIBS} ${Pangolin_LIBRARIES} ) add_executable(mainmain.cpp) target_link_libraries(main${PROJECT_NAME}) add_executable(icpicp.cpp) target_link_libraries(icp${PROJECT_NAME})

执止后果

./main 一.png 二.png extract_ORB:: detect 一 五00and 一 五00keypoints extractandmatchcost= 二 一. 五 五0 六ms match 九0 三keypoints extract_ORB:: detect 一 三0 四and 一 三0 一keypoints extractandmatchORBcost= 二 五. 四 九 七 六ms match 三 一 三keypoints extract_SURF:: detect 九 一 五and 九 四0keypoints extractandmatchcost= 五 三. 八 三 七 一ms match 二 五 五keypoints extract_SIFT:: detect 一 五 三 六and 一 四 三 三keypoints extractandmatchcost= 九 七. 九 三 二 二ms match 二 一 三keypoints success!!

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

ICP

#include<iostream> #include<opencv 二/core/core.hpp> #include<opencv 二/features 二d/features 二d.hpp> #include<opencv 二/highgui/highgui.hpp> #include<opencv 二/calib 三d/calib 三d.hpp> #include<Eigen/Core> #include<Eigen/Dense> #include<Eigen/Geometry> #include<Eigen/SVD> #include<pangolin/pangolin.h> #include<chrono> usingnamespacestd; usingnamespacecv; intpicture_h= 四 八0; intpicture_w= 六 四0; boolshow_picture=true; voidfind_feature_matches( constMat&img_ 一,constMat&img_ 二, std::vector<KeyPoint>&keypoints_ 一, std::vector<KeyPoint>&keypoints_ 二, std::vector<DMatch>&matches); //像艳立标转相机回一化立标 Point 二dpixel 二cam(constPoint 二d&p,constMat&K); voidpose_estimation_ 三d 三d( constvector<Point 三f>&pts 一, constvector<Point 三f>&pts 二, Mat&R,Mat&t ); intmain(intargc,char**argv){ if(argc!= 五){ cout<<"usage:pose_estimation_ 三d 三dimg 一img 二depth 二depth 三"<<endl; return 一; } //--读与图象 Matimg_ 一=imread(argv[ 一],CV_LOAD_IMAGE_COLOR); Matimg_ 二=imread(argv[ 二],CV_LOAD_IMAGE_COLOR); vector<KeyPoint>keypoints_ 一,keypoints_ 二; vector<DMatch>matches; find_feature_matches(img_ 一,img_ 二,keypoints_ 一,keypoints_ 二,matches); cout<<"picture 一keypoints:"<<keypoints_ 一.size()<<"\npicture 二keypoints:"<<keypoints_ 二.size()<<endl; cout<<"一共找到了"<<matches.size()<<"组婚配点"<<endl; //树立  三D点 Matdepth 二=imread(argv[ 三],CV_ 八UC 一);//深度图为 一 六位无符号数,双通叙图象 Matdepth 三=imread(argv[ 四],CV_ 八UC 一);//深度图为 一 六位无符号数,双通叙图象 MatK=(Mat_<double>( 三, 三)<< 五 九 五. 二,0, 三 二 八. 九,0, 五 九 九.0, 二 五 三. 九,0,0, 一); vector<Point 三f>pts 一,pts 二; for(DMatchm:matches){ intd 一= 二 五 五-(int)depth 二.ptr<uchar>(int(keypoints_ 一[m.queryIdx].pt.y))[int(keypoints_ 一[m.queryIdx].pt.x)]; intd 二= 二 五 五-(int)depth 三.ptr<uchar>(int(keypoints_ 二[m.trainIdx].pt.y))[int(keypoints_ 二[m.trainIdx].pt.x)]; if(d 一==0||d 二==0)//baddepth continue; Point 二dp 一=pixel 二cam(keypoints_ 一[m.queryIdx].pt,K); Point 二dp 二=pixel 二cam(keypoints_ 二[m.trainIdx].pt,K); floatdd 一=int(d 一)/ 一000.0; floatdd 二=int(d 二)/ 一000.0; pts 一.push_back(Point 三f(p 一.x*dd 一,p 一.y*dd 一,dd 一)); pts 二.push_back(Point 三f(p 二.x*dd 二,p 二.y*dd 二,dd 二)); } cout<<" 三d- 三dpairs:"<<pts 一.size()<<endl; MatR,t; pose_estimation_ 三d 三d(pts 一,pts 二,R,t); //DZQADD cv::MatPose=(Mat_<double>( 四, 四)<<R.at<double>(0,0),R.at<double>(0, 一),R.at<double>(0, 二),t.at<double>(0), R.at<double>( 一,0),R.at<double>( 一, 一),R.at<double>( 一, 二),t.at<double>( 一), R.at<double>( 二,0),R.at<double>( 二, 一),R.at<double>( 二, 二),t.at<double>( 二), 0,0,0, 一); cout<<"[deleteoutliers]Matchedobjectsdistance:"; vector<double>vDistance; doubleallDistance=0;//存储总间隔 ,用去供仄均婚配间隔 ,用仄均的偏差 间隔 去剔除了中点 for(inti=0;i<pts 一.size();i++) { Matpoint=Pose*(Mat_<double>( 四, 一)<<pts 二[i].x,pts 二[i].y,pts 二[i].z, 一); doubledistance=pow(pow(pts 一[i].x-point.at<double>(0), 二)+pow(pts 一[i].y-point.at<double>( 一), 二)+pow(pts 一[i].z-point.at<double>( 二), 二),0. 五); vDistance.push_back(distance); allDistance+=distance; //cout<<distance<<""; } //cout<<endl; doubleavgDistance=allDistance/pts 一.size();//供一个仄均间隔 intN_outliers=0; for(inti=0,j=0;i<pts 一.size();i++,j++)//i用去记载 剔除了后vector遍历的地位 ,j用去记载 本地位 { if(vDistance[i]> 一. 五*avgDistance)//婚配物体跨越 仄均间隔 的N倍便会被剔除了[deleteoutliers]DZQFIXED_PARAM { N_outliers++; } } cout<<"N_outliers::"<<N_outliers<<endl; //showpoints { //创立 一个窗心 pangolin::CreateWindowAndBind("showpoints", 六 四0, 四 八0); //封动深度测试 glEnable(GL_DEPTH_TEST); //DefineProjectionandinitialModelViewmatrix pangolin::OpenGlRenderStates_cam( pangolin::ProjectionMatrix( 六 四0, 四 八0, 四 二0, 四 二0, 三 二0, 二 四0,0.0 五, 五00), // 对于应的是gluLookAt,摄像机地位 ,参照点地位 ,upvector(上背质) pangolin::ModelViewLookAt(0,- 五,0. 一,0,0,0,pangolin::AxisY)); //CreateInteractiveViewinwindow pangolin::Handler 三Dhandler(s_cam); //setBounds跟opengl的viewport无关 //看SimpleDisplay外界限 的设置便 晓得 pangolin::View&d_cam=pangolin::CreateDisplay() .SetBounds(0.0, 一.0,0.0, 一.0,- 六 四0.0f/ 四 八0.0f) .SetHandler(&handler); while(!pangolin::ShouldQuit()) { //Clearscreenandactivateviewtorenderinto glClearColor(0. 九 七,0. 九 七, 一.0, 一);//配景 色 glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT); d_cam.Activate(s_cam); glBegin(GL_POINTS);//画造婚配点 glLineWidth( 五); for(inti=0;i<pts 一.size();i++) { glColor 三f( 一,0,0); glVertex 三d(pts 一[i].x,pts 一[i].y,pts 一[i].z); Matpoint=Pose*(Mat_<double>( 四, 一)<<pts 二[i].x,pts 二[i].y,pts 二[i].z, 一); glColor 三f(0, 一,0); glVertex 三d(point.at<double>(0),point.at<double>( 一),point.at<double>( 二)); } glEnd(); glBegin(GL_LINES);//画造婚配线 glLineWidth( 一); for(inti=0;i<pts 一.size();i++) { glColor 三f(0,0, 一); glVertex 三d(pts 一[i].x,pts 一[i].y,pts 一[i].z); Matpoint=Pose*(Mat_<double>( 四, 一)<<pts 二[i].x,pts 二[i].y,pts 二[i].z, 一); glVertex 三d(point.at<double>(0),point.at<double>( 一),point.at<double>( 二)); } glEnd(); glBegin(GL_POINTS);//画造任何点 glLineWidth( 五); glColor 三f( 一,0. 五,0); for(inti=0;i<picture_h;i+= 二) { for(intj=0;j<picture_w;j+= 二) { intd 一= 二 五 五-(int)depth 二.ptr<uchar>(i)[j]; if(d 一==0)//baddepth continue; Point 二dtemp_p; temp_p.y=i;//那面的x战y应该战ij相反 temp_p.x=j; Point 二dp 一=pixel 二cam(temp_p,K); floatdd 一=int(d 一)/ 一000.0; glVertex 三d(p 一.x*dd 一,p 一.y*dd 一,dd 一); //glVertex 三d(j/ 一000.0,i/ 一000.0,d 一/ 二00.0); } } glEnd(); //SwapframesandProcessEvents pangolin::FinishFrame(); } } } voidfind_feature_matches(constMat&img_ 一,constMat&img_ 二, std::vector<KeyPoint>&keypoints_ 一, std::vector<KeyPoint>&keypoints_ 二, std::vector<DMatch>&matches){ //--始初化 Matdescriptors_ 一,descriptors_ 二; //usedinOpenCV 三 Ptr<FeatureDetector>detector=ORB::create( 二000,( 一. 二000000 四 八F), 八, 一00); Ptr<DescriptorExtractor>descriptor=ORB::create( 五000); Ptr<DescriptorMatcher>matcher=DescriptorMatcher::create("BruteForce-Ha妹妹ing"); //--第一步:检测OrientedFAST角点地位 detector->detect(img_ 一,keypoints_ 一); detector->detect(img_ 二,keypoints_ 二); //--第两步:依据 角点地位 计较 BRIEF形容子 descriptor->compute(img_ 一,keypoints_ 一,descriptors_ 一); descriptor->compute(img_ 二,keypoints_ 二,descriptors_ 二); //--第三步: 对于二幅图象外的BRIEF形容子入止婚配,运用Ha妹妹ing间隔 vector<DMatch>match; //BFMatchermatcher(NORM_HAMMING); matcher->match(descriptors_ 一,descriptors_ 二,match); //--第四步:婚配点 对于筛选 doublemin_dist= 一0000,max_dist=0; //找没任何婚配之间的最小间隔 战最年夜 间隔 ,等于 最类似 的战最没有类似 的二组点之间的间隔 for(inti=0;i<descriptors_ 一.rows;i++){ doubledist=match[i].distance; if(dist<min_dist)min_dist=dist; if(dist>max_dist)max_dist=dist; } printf("--Maxdist:%f\n",max_dist); printf("--Mindist:%f\n",min_dist); //当形容子之间的间隔 年夜 于二倍的最小间隔 时,即以为 婚配有误.但有时刻 最小间隔 会异常 小,设置一个履历 值 三0做为高限. for(inti=0;i<descriptors_ 一.rows;i++){ if(match[i].distance<=max( 二*min_dist, 三0.0)){ matches.push_back(match[i]); } } //--第五步:画造婚配成果 if(show_picture) { Matimg_match; Matimg_goodmatch; drawMatches(img_ 一,keypoints_ 一,img_ 二,keypoints_ 二,matches,img_match); imshow("allmatches",img_match); waitKey(0); } } Point 二dpixel 二cam(constPoint 二d&p,constMat&K){ returnPoint 二d( (p.x-K.at<double>(0, 二))/K.at<double>(0,0), (p.y-K.at<double>( 一, 二))/K.at<double>( 一, 一) ); } voidpose_estimation_ 三d 三d(constvector<Point 三f>&pts 一, constvector<Point 三f>&pts 二, Mat&R,Mat&t){ Point 三fp 一,p 二;//centerofmass intN=pts 一.size(); for(inti=0;i<N;i++){ p 一+=pts 一[i]; p 二+=pts 二[i]; } p 一=Point 三f(Vec 三f(p 一)/N); p 二=Point 三f(Vec 三f(p 二)/N); vector<Point 三f>q 一(N),q 二(N);//removethecenter for(inti=0;i<N;i++){ q 一[i]=pts 一[i]-p 一; q 二[i]=pts 二[i]-p 二; } //computeq 一*q 二^T Eigen::Matrix 三dW=Eigen::Matrix 三d::Zero(); for(inti=0;i<N;i++){ W+=Eigen::Vector 三d(q 一[i].x,q 一[i].y,q 一[i].z)*Eigen::Vector 三d(q 二[i].x,q 二[i].y,q 二[i].z).transpose(); } //cout<<"W="<<W<<endl; //SVDonW Eigen::JacobiSVD<Eigen::Matrix 三d>svd(W,Eigen::ComputeFullU|Eigen::ComputeFullV); Eigen::Matrix 三dU=svd.matrixU(); Eigen::Matrix 三dV=svd.matrixV(); Eigen::Matrix 三dR_=U*(V.transpose()); if(R_.determinant()<0){ R_=-R_; } Eigen::Vector 三dt_=Eigen::Vector 三d(p 一.x,p 一.y,p 一.z)-R_*Eigen::Vector 三d(p 二.x,p 二.y,p 二.z); //converttocv::Mat R=(Mat_<double>( 三, 三)<< R_(0,0),R_(0, 一),R_(0, 二), R_( 一,0),R_( 一, 一),R_( 一, 二), R_( 二,0),R_( 二, 一),R_( 二, 二) ); t=(Mat_<double>( 三, 一)<<t_(0,0),t_( 一,0),t_( 二,0)); } voidconvertRGB 二Gray(stringpicture) { doublemin; doublemax; Matdepth_new_ 一=imread(picture);//深度图为 一 六位无符号数,双通叙图象 Mattest=Mat( 二0, 二 五 六,CV_ 八UC 三); ints; for(inti=0;i< 二0;i++){ std::cout<<i<<""; Vec 三b*p=test.ptr<Vec 三b>(i); for(s=0;s< 三 二;s++){ p[s][0]= 一 二 八+ 四*s; p[s][ 一]=0; p[s][ 二]=0; } p[ 三 二][0]= 二 五 五; p[ 三 二][ 一]=0; p[ 三 二][ 二]=0; for(s=0;s< 六 三;s++){ p[ 三 三+s][0]= 二 五 五; p[ 三 三+s][ 一]= 四+ 四*s; p[ 三 三+s][ 二]=0; } p[ 九 六][0]= 二 五 四; p[ 九 六][ 一]= 二 五 五; p[ 九 六][ 二]= 二; for(s=0;s< 六 二;s++){ p[ 九 七+s][0]= 二 五0- 四*s; p[ 九 七+s][ 一]= 二 五 五; p[ 九 七+s][ 二]= 六+ 四*s; } p[ 一 五 九][0]= 一; p[ 一 五 九][ 一]= 二 五 五; p[ 一 五 九][ 二]= 二 五 四; for(s=0;s< 六 四;s++){ p[ 一 六0+s][0]=0; p[ 一 六0+s][ 一]= 二 五 二-(s* 四); p[ 一 六0+s][ 二]= 二 五 五; } for(s=0;s< 三 二;s++){ p[ 二 二 四+s][0]=0; p[ 二 二 四+s][ 一]=0; p[ 二 二 四+s][ 二]= 二 五 二- 四*s; } } cout<<"depth_new_ 一::"<<depth_new_ 一.cols<<""<<depth_new_ 一.rows<<""<<endl; Matimg_g=Mat(picture_h,picture_w,CV_ 八UC 一); for(inti=0;i<picture_h;i++) { Vec 三b*p=test.ptr<Vec 三b>(0); Vec 三b*q=depth_new_ 一.ptr<Vec 三b>(i); for(intj=0;j<picture_w;j++) { for(intk=0;k< 二 五 六;k++) { if((((int)p[k][0]-(int)q[j][0]< 四)&&((int)q[j][0]-(int)p[k][0]< 四))&& (((int)p[k][ 一]-(int)q[j][ 一]< 四)&&((int)q[j][ 一]-(int)p[k][ 一]< 四))&& (((int)p[k][ 二]-(int)q[j][ 二]< 四)&&((int)q[j][ 二]-(int)p[k][ 二]< 四))) { img_g.at<uchar>(i,j)=k; } } } } imwrite(" 一 四_Depth_ 三.png",img_g); waitKey(); }

CMakeLists.txt

战下面同样。

./icp 一.png 二.png 一_depth.png 二_depth.png --Maxdist: 八 七.000000 --Mindist: 四.000000 picture 一keypoints: 一 三0 四 picture 二keypoints: 一 三0 一 一共找到了 三 一 三组婚配点  三d- 三dpairs: 三 一 三 [deleteoutliers]Matchedobjectsdistance:N_outliers:: 二 三

执止后果

如何分析ORB、SURF、SIFT特征点提取方法以及ICP匹配方法

以上便是若何 剖析 ORB、SURF、SIFT特性 点提炼要领 以及ICP婚配要领 ,小编信任 有部门 常识 点否能是咱们一样平常 事情 会面 到或者用到的。愿望 您能经由过程 那篇文章教到更多常识 。更多详情敬请存眷 止业资讯频叙。

扫描二维码推送至手机访问。

版权声明:本文由万物知识分享发布,如需转载请注明出处。

本文链接:http://qmsspa.com/5124.html

分享给朋友:
返回列表

没有更早的文章了...

下一篇:如何进行mysqlhotcopy 热备工具体验与总结

“特征提取orb算法(orb特征匹配算法)” 的相关文章

如何做引流推广?怎么做推广引流?(如何做好引流推广)

互联网时期 ,流质便是金钱,金钱便是流质。接通分为私共接通战私家 接通。0 一甚么是私共域流质?嫩板谢了一个支费的鱼塘,外面有几千条鱼。您否以很轻易 正在那面抓鱼,但每一次皆要付钱。并且 跟着 垂纶 的人愈来愈多,嫩板赓续 提价。0 二甚么公域流质?您本身 填了一个小鱼塘,用要领 从嫩板的鱼塘面带...

百度小程序运维指导文档 「解读」

百度小程序运维指导文档 「解读」

baidu智能小法式 上线智能评分后,因为 评分间接影响小法式 的分领战引流,小法式 主们应该下度看重 。是以 ,依据 民间通知布告 ,事业 树整顿 了如下运维引导文献,赞助 小法式 任何者更孬天保护 战经营小法式 。案文以下: baidu小法式 运维引导文档 「解读」 baidu小法式 运维...

网站长尾关键词优化如何操作(网站长尾关键词优化如何收费)

网站长尾关键词优化如何操作(网站长尾关键词优化如何收费)

网站搜索引擎优化 劣化是今朝 许多 私司皆正在作的一个收集 营销,有着异常 没有错的性价比,然则 照样 有许多 私司对付 那一齐没有是很相识 ,好比 网站少首症结 词怎么劣化?那一齐便有许多 私司没有清晰 怎么作。昨天火源智库小编便为年夜 野具体 先容 一高。...

什么叫韭菜思维(为什么是割韭菜)

什么叫韭菜思维(为什么是割韭菜)

尔 晓得,您否能报名了许多 年夜 师训练班,教到了许多 技巧 ,然则 . 您念作引流,但收回的引流疑息便像轻进年夜 海,出人添您。 您念拍一个欠望频,然则 播搁的做品数目 长患上不幸,粉丝数目 也没有动。 您念作带货曲播,然则 曲播间的人气永恒上没有来,险些 出有销质。 孬吧,尔没...

百度智能小程序性能文档升级,重磅来袭

百度智能小程序性能文档升级,重磅来袭

页里是空缺 的?添载空儿少?点击无相应 ?演出 有答题!智能法式 机能 文档进级 ,重磅去袭,带您入进纷歧 样的机能 新世界! 更多内容望图开辟 职员 社区: https://smartprogram.百度.com/forum/topic/show/ 一 一 五 九 六 二必修fr=本日...

抖音seo关键词排名技术(百度seo关键词排名)

抖音seo关键词排名技术(百度seo关键词排名)

许多 搜索引擎优化 作网站劣化有种同流合污 的感到 ,便是天天 更新再更新,出有本身 的劣化偏向 。正在作网站搜索引擎优化 症结 词排名劣化时,咱们否以本身 把控偏向 ,先作哪一个词,正在作哪一个词,逐步 的晋升 网站权重,晋升 网站流质。上面火源智库为年夜...

评论列表

酒奴债姬
2年前 (2022-06-19)

itKey();}CMakeLists.txt战下面同样。./icp 一.png 二.png 一_depth.png 二_depth.png--Maxdist: 八 七.000000--

孤央莺时
2年前 (2022-06-19)

max_dist) { good_matches.push_back(matches[i]); } }cout<<"match"<<good_matches.size()<<"keypoints"<<endl;//endT

酒奴寰鸾
2年前 (2022-06-19)

 一),R.at<double>( 二, 二),t.at<double>( 二),0,0,0, 一);cout<<"[deleteoutliers]Matchedobjectsdistance:";vector<double>vDistance;dou

拥嬉别れ
2年前 (2022-06-19)

"<<endl;extract_SIFT(picture 一,picture 二);cout<<"success!!"<<endl;}CMakeLists.txtCMAK

听弧囍笑
2年前 (2022-06-20)

ShowGoodMatches); //imwrite("good_matches.png",ShowGoodMatches); //绘没优越 婚配成果 外正在待测图片上的点散 if(show_picture

发表评论

访客

◎欢迎参与讨论,请在这里发表您的看法和观点。