标签:
url=OvyNHG3WKjwxbiJDOWund5lfoAPXkxXdzSyFcQqI3NKsJZkqOZjlPegJ4DG75vOosupgaM3iklTPnq3TSzHnZq
fr=aladdin
Image1中的点1。2,3相应到了Image2中的点1,2。3。这样我们就能得到仿射矩阵。于是Image1中的全部点都能通过这个仿射矩阵映射到Image2中
仿射变换后
在做图像增强时。我们改变的时图像的值域,g(x)output f(x)是input h是我们的方法,比方对照度增强直方图均衡化 用g(x) = h(f(x))
可是在坐标变换,改变的是定义域, g(x) = f(h(x))
如果我们有源图像f和坐标映射函数h,我们要怎么计算输出图像g呢?
procedure forwardWarp(f,h,outg):
For every pixelxinf(x)
1. Compute the destination locationx′=h(x).
2. Copy the pixelf(x)tog(x′).
procedure inverseWarp(f,h,outg):
For every pixelx′ing(x′)
1. Compute the source locationx=h?(x′)
2. Resamplef(x)at
locationx
and copy tog(x′)
遍历输出图像的点。映射到源图像,再去取点。
映射函数h就是原来h的逆矩阵。
当然了,也会碰到非整数的情况,接下来还要作一些别的处理。就不细述。
一眼就能看出。我们的映射函数是
获得我们的变换矩阵M
|
附上官方的sample
#include "opencv2/imgproc.hpp" #include <iostream> #include "opencv2/highgui.hpp" using namespace cv; using namespace std; /// Global variables char* source_window = "Source image"; char* warp_window = "Warp"; char* warp_rotate_window = "Warp + Rotate"; /** @function main */ int main( int argc, char** argv ) { Point2f srcTri[3]; Point2f dstTri[3]; Mat rot_mat( 2, 3, CV_32FC1 ); Mat warp_mat( 2, 3, CV_32FC1 ); Mat src, warp_dst, warp_rotate_dst; /// Load the image src = imread( argv[1], 1 ); /// Set the dst image the same type and size as src warp_dst = Mat::zeros( src.rows, src.cols, src.type() ); /// Set your 3 points to calculate the Affine Transform srcTri[0] = Point2f( 0,0 ); srcTri[1] = Point2f( src.cols - 1, 0 ); srcTri[2] = Point2f( 0, src.rows - 1 ); dstTri[0] = Point2f( src.cols*0.0, src.rows*0.33 ); dstTri[1] = Point2f( src.cols*0.85, src.rows*0.25 ); dstTri[2] = Point2f( src.cols*0.15, src.rows*0.7 ); /// Get the Affine Transform warp_mat = getAffineTransform( srcTri, dstTri ); /// Apply the Affine Transform just found to the src image warpAffine( src, warp_dst, warp_mat, warp_dst.size() ); /** Rotating the image after Warp */ /// Compute a rotation matrix with respect to the center of the image Point center = Point( warp_dst.cols/2, warp_dst.rows/2 ); double angle = -50.0; double scale = 0.6; /// 这里获得旋转矩阵,中心是center,角度为-50度,并缩放为原来的0.6倍,也是简单的样例 rot_mat = getRotationMatrix2D( center, angle, scale ); /// Rotate the warped image warpAffine( warp_dst, warp_rotate_dst, rot_mat, warp_dst.size() ); /// Show what you got namedWindow( source_window, CV_WINDOW_AUTOSIZE ); imshow( source_window, src ); namedWindow( warp_window, CV_WINDOW_AUTOSIZE ); imshow( warp_window, warp_dst ); namedWindow( warp_rotate_window, CV_WINDOW_AUTOSIZE ); imshow( warp_rotate_window, warp_rotate_dst ); /// Wait until user exits the program waitKey(0); return 0; }
怎样实现这种变换呢
for( int j = 0; j < src.rows; j++ ) { for( int i = 0; i < src.cols; i++ ) { map_x.at<float>(j,i) = src.cols - i ; map_y.at<float>(j,i) = j ; }}然后再调用
remap( src, dst, map_x, map_y, INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0, 0) );
#include "opencv2/highgui.hpp" #include "opencv2/imgproc.hpp" #include <iostream> #include <stdio.h> using namespace cv; /// Global variables Mat src, dst; Mat map_x, map_y; char* remap_window = "Remap demo"; int ind = 0; /// Function Headers void update_map( void ); /** * @function main */ int main( int argc, char** argv ) { /// Load the image src = imread( argv[1], 1 ); /// Create dst, map_x and map_y with the same size as src: dst.create( src.size(), src.type() ); map_x.create( src.size(), CV_32FC1 ); map_y.create( src.size(), CV_32FC1 ); /// Create window namedWindow( remap_window, CV_WINDOW_AUTOSIZE ); /// Loop while( true ) { /// Each 1 sec. Press ESC to exit the program int c = waitKey( 1000 ); if( (char)c == 27 ) { break; } /// Update map_x & map_y. Then apply remap update_map(); remap( src, dst, map_x, map_y, CV_INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0, 0) ); /// Display results imshow( remap_window, dst ); } return 0; } /** * @function update_map * @brief Fill the map_x and map_y matrices with 4 types of mappings */ void update_map( void ) { ind = ind%4; for( int j = 0; j < src.rows; j++ ) { for( int i = 0; i < src.cols; i++ ) { switch( ind ) { case 0: if( i > src.cols*0.25 && i < src.cols*0.75 && j > src.rows*0.25 && j < src.rows*0.75 ) { map_x.at<float>(j,i) = 2*( i - src.cols*0.25 ) + 0.5 ; map_y.at<float>(j,i) = 2*( j - src.rows*0.25 ) + 0.5 ; } else { map_x.at<float>(j,i) = 0 ; map_y.at<float>(j,i) = 0 ; } break; case 1: map_x.at<float>(j,i) = i ; map_y.at<float>(j,i) = src.rows - j ; break; case 2: map_x.at<float>(j,i) = src.cols - i ; map_y.at<float>(j,i) = j ; break; case 3: map_x.at<float>(j,i) = src.cols - i ; map_y.at<float>(j,i) = src.rows - j ; break; } // end of switch } } ind++; }
#include "opencv2/highgui/highgui.hpp" #include "opencv2/imgproc/imgproc.hpp" #include <iostream> #define K 1.1 using namespace cv; // Global variables Mat src, dst; Mat map_x, map_y; char* remap_window = "Remap demo"; void update_map( void ); int main( int argc, char** argv ) { src = imread( argv[1], 1 ); dst.create( src.rows*K, src.cols*K, src.type() ); map_x.create( src.rows*K, src.cols*K, CV_32FC1 ); map_y.create( src.rows*K, src.cols*K, CV_32FC1 ); namedWindow( remap_window, CV_WINDOW_AUTOSIZE ); update_map(); remap( src, dst, map_x, map_y, INTER_LANCZOS4, BORDER_CONSTANT, Scalar(0, 0, 0) ); imshow( remap_window, dst ); waitKey(0); return 0; } void update_map( void ) { for( int j = 0; j < (int)(K*src.rows); j++ ) { for( int i = 0; i < (int)(K*src.cols); i++ ) { map_x.at<float>(j,i) = i/K; map_y.at<float>(j,i) = j/K; } } }
#include "opencv2/highgui/highgui.hpp" #include "opencv2/imgproc/imgproc.hpp" #include <iostream> using namespace cv; using namespace std; #define K 2 //表示取2个点 #define threshold 0.1 //缩放的程度 // Global variables Mat src, src_copy, dst; Mat map_x, map_y; char* window = "Scale demo"; int samplePointNum = 0; //已经点了几次 Point myPoints[K]; //存放用户点击的坐标 bool flag = false; //点击2次,就设置flag,開始更新 void update_map( void ) { int leftX = threshold*src.cols; int rightX = src.cols - leftX; int recLeftX = myPoints[0].x; int recRightX = myPoints[1].x; for( int j = 0; j < src.rows; j++ ) { for( int i = 0; i < src.cols; i++ ) { if(i > leftX && i < rightX) { map_x.at<float>(j,i) = recLeftX + (i - leftX) * (recRightX - recLeftX)/(rightX - leftX); map_y.at<float>(j,i) = j; } else if(i <= leftX) { map_x.at<float>(j,i) = i * recLeftX/leftX; map_y.at<float>(j,i) = j; } else { map_x.at<float>(j,i) = recRightX + (i- rightX) * (src.cols - recRightX)/(src.cols - rightX); map_y.at<float>(j,i) = j; } } } flag = true; remap( src_copy, dst, map_x, map_y, INTER_LANCZOS4, BORDER_CONSTANT, Scalar(0, 0, 0) ); imshow( window, dst ); imwrite( "./result.jpg", dst ); return; } static void onMouse( int event, int x, int y, int, void* ) { if(samplePointNum == K){ if(!flag) update_map(); return; } if( event != EVENT_LBUTTONDOWN) return; rectangle(src, Point(x-3,y-3), Point(x+3,y+3), Scalar(255,0,0), 1); myPoints[samplePointNum++] = Point(x,y); imshow( window, src ); return; } int main( int argc, char** argv ) { src = imread( argv[1], 1 ); src_copy = src.clone(); dst.create( src.rows, src.cols, src.type() ); map_x.create( src.rows, src.cols, CV_32FC1 ); map_y.create( src.rows, src.cols, CV_32FC1 ); namedWindow( window, CV_WINDOW_AUTOSIZE ); setMouseCallback( window, onMouse, 0 ); imshow( window, src ); waitKey(0); return 0; }
标签:
原文地址:http://www.cnblogs.com/gcczhongduan/p/5276947.html