摘要:
比較一下opencv四種不同的訪問(wèn)方式的效率,多次測(cè)試的結(jié)論就是用指針的方式是最快的喜命,方法2沟沙、3、4都是指針的方式壁榕,在release模式下矛紫,方法2、方法3牌里、方法4很接近颊咬,沒(méi)多少差別,在不同尺度下稍微各有一點(diǎn)點(diǎn)優(yōu)劣牡辽,我個(gè)人常用方法3喳篇,因?yàn)樗?jiǎn)潔高效,方法2看起來(lái)似乎也不錯(cuò)态辛,方法4需要圖像數(shù)據(jù)是連續(xù)的才能用麸澜。
假設(shè):圖像image為3通道8bit的圖像,現(xiàn)在要訪問(wèn)它的第row行奏黑,第col列的r,g,b值
1. 方法一:Mat.at<vec>(row,col)
//Mat.at<vec>(i,j)方式訪問(wèn)
cv::Vec3b bgr = image.at<cv::Vec3b>(row, col);
blue = bgr[0];
green = bgr[1];
red = bgr[2];
2. 方法二:Mat.ptr<vec>(row)
//Mat.at<vec>(i,j)方式訪問(wèn)
cv::Vec3b *bgr = image.ptr<cv::Vec3b>(row);
blue = bgr[col][0];
green = bgr[col][1];
red = bgr[col][2];
3. 方法三:Mat.ptr<uchar>(row)
uchar *ptr = image.ptr<uchar>(row);//獲得圖像第row行的首地址
blue = *(ptr + col*3 );
green = *(ptr + col*3 + 1);
red = *(ptr + col*3 + 2);
4. 方法四:Mat.data + 偏移量
//image.data是圖像矩陣的首地址
uchar *ptr = image.data + row * image.cols * 3 + col*3;
blue = *(ptr);
green = *(ptr + 1);
red = *(ptr + 2);
注:其他數(shù)據(jù)類型的訪問(wèn)可參考http://www.reibang.com/p/cfe373dc8c95
測(cè)試
圖片尺寸427 * 640 * 3炊邦,重復(fù)顏色反轉(zhuǎn)操作99次,顯示平均時(shí)間在圖片上
-
debug模式下
-
release模式下
完整源碼
#include "opencv.hpp"
#include "imageProcess.h"
#define TIMES 1
//Mat.at<vec>(i,j)方式訪問(wèn)
void method_1(cv::Mat image)
{
//cv::Mat image = image_.clone();
double startTime = cv::getTickCount();
int w = image.cols;
int h = image.rows;
for (int times = 0; times < TIMES; times++)
{
for (int row = 0; row < h; row++)
{
for (int col = 0; col < w; col++)
{
image.at<cv::Vec3b>(row, col)[0] = 255 - image.at<cv::Vec3b>(row, col)[0];
image.at<cv::Vec3b>(row, col)[1] = 255 - image.at<cv::Vec3b>(row, col)[1];
image.at<cv::Vec3b>(row, col)[2] = 255 - image.at<cv::Vec3b>(row, col)[2];
}
}
}
double endTime = cv::getTickCount();
double t = ((endTime - startTime) / cv::getTickFrequency()) * 1000 / TIMES;
std::ostringstream ss;
ss << "Execute time : " << std::fixed << std::setprecision(2) << t << " ms ";
putText(image, ss.str(), cv::Point(20, 20), cv::FONT_HERSHEY_SIMPLEX, 0.75, cv::Scalar(0, 0, 255), 2, 8);
imshow("method1", image);
}
//Mat.ptr<vec3b>(row);
void method_2(cv::Mat image)
{
//cv::Mat image = image_.clone();
//cv::imshow("2", image);
double startTime = cv::getTickCount();
int w = image.cols;
int h = image.rows;
cv::Vec3b* curr=NULL;
for (int times = 0; times < TIMES; times++)
{
for (int row = 0; row < h; row++)
{
curr = image.ptr<cv::Vec3b>(row);
for (int col = 0; col < w; col++)
{
curr[col][0] = 255 - curr[col][0];
curr[col][1] = 255 - curr[col][1];
curr[col][2] = 255 - curr[col][2];
}
}
}
double endTime = cv::getTickCount();
double t = ((endTime - startTime) / cv::getTickFrequency()) * 1000 / TIMES;
std::ostringstream ss;
ss << "Execute time : " << std::fixed << std::setprecision(2) << t << " ms ";
putText(image, ss.str(), cv::Point(20, 20), cv::FONT_HERSHEY_SIMPLEX, 0.75, cv::Scalar(0, 0, 255), 2, 8);
imshow("method2", image);
}
void method_3(cv::Mat image)
{
//cv::Mat image = image_.clone();
double startTime = cv::getTickCount();
int w = image.cols;
int h = image.rows;
uchar * ptr=NULL;//指針定義應(yīng)該放在這里熟史,如果放在for循環(huán)中定義會(huì)消耗一部分變量分配時(shí)間馁害;
for (int times = 0; times < TIMES; times++)
{
for (int row = 0; row < h; row++)
{
ptr = image.ptr<uchar>(row);
for (int col = 0; col < w; col++)
{
*(ptr + 3 * col) = 255 - *(ptr + 3 * col);
*(ptr + 3 * col + 1) = 255 - *(ptr + 3 * col + 1);
*(ptr + 3 * col + 2) = 255 - *(ptr + 3 * col + 2);
}
}
}
double endTime = cv::getTickCount();
double t = ((endTime - startTime) / cv::getTickFrequency()) * 1000 / TIMES;
std::ostringstream ss;
ss << "Execute time : " << std::fixed << std::setprecision(2) << t << " ms ";
putText(image, ss.str(), cv::Point(20, 20), cv::FONT_HERSHEY_SIMPLEX, 0.75, cv::Scalar(0, 0, 255), 2, 8);
imshow("method3", image);
}
//當(dāng)圖像數(shù)據(jù)是連續(xù)的,就可以從矩陣的第一個(gè)元素開始
//按相對(duì)位置訪問(wèn)整個(gè)矩陣
void method_4(cv::Mat image)
{
//assert(image.isContinuous());
//std::cout <<"image.isContinuous():"<< image.isContinuous()<< std::endl;
double startTime = cv::getTickCount();
int w = image.cols;
int h = image.rows;
uchar * imgPtr = NULL;
for (int times = 0; times < TIMES; times++)
{
//image.data與image.ptr<uchar>(0)等價(jià)蹂匹;
for (int row = 0; row < h; row++) {
imgPtr = image.data + row * image.step;
for (int col = 0; col < w; col++) {
//以下三種方式都是等價(jià)的碘菜,
*(imgPtr + col*3) = 255 - *(imgPtr + col * 3);
*(imgPtr + col * 3 + 1) = 255 - *(imgPtr + col * 3 + 1);
*(imgPtr + col * 3 + 2) = 255 - *(imgPtr + col * 3 + 2);
/*imgPtr[col * 3] = 255 - imgPtr[col * 3];
imgPtr[col * 3 + 1] = 255 - imgPtr[col * 3 + 1];
imgPtr[col * 3 + 2] = 255 - imgPtr[col * 3 + 2];*/
/**(imgPtr++) = 255 - *(imgPtr);
*(imgPtr++) = 255 - *(imgPtr);
*(imgPtr++) = 255 - *(imgPtr);*/
}
}
}
double endTime = cv::getTickCount();
double t = ((endTime - startTime) / cv::getTickFrequency()) * 1000 / TIMES;
std::ostringstream ss;
ss << "Execute time : " << std::fixed << std::setprecision(2) << t << " ms ";
putText(image, ss.str(), cv::Point(20, 20), cv::FONT_HERSHEY_SIMPLEX, 0.75, cv::Scalar(0, 0, 255), 2, 8);
imshow("method4", image);
}
int main()
{
std::string imagePath = "K:\\deepImage\\building.jpg";
cv::Mat m1 = cv::imread(imagePath);
//cv::resize(m1, m1, cv::Size(1000, 1000));
method_1(m1.clone());
method_2(m1.clone());
method_3(m1.clone());
method_4(m1.clone());
cv::waitKey(0);
return 0;
}