实战:图像模板匹配算法优化笔记(二)

1,184 阅读5分钟

实战:图像模板匹配算法优化笔记(一):https://juejin.cn/post/6844904130159312909

下面我们来进入正文

本次要实战是:

基于边缘的模板匹配

www.codeproject.com/Articles/99…


我们知道项目主要提出的关键点是模板匹配,模板一共有4个部分组成。

part(1)图像梯度

在模板图像上使用Sobel滤波器,它返回X(GX)和Y(Gy)方向的梯度。根据这个梯度,我们将使用以下公式计算边缘大小和方向:

cvSobel( src, gx, 1,0, 3 );		//gradient in X direction			
	cvSobel( src, gy, 0, 1, 3 );	//gradient in Y direction
	
	nmsEdges = cvCreateMat( Ssize.height, Ssize.width, CV_32F);		//create Matrix to store Final nmsEdges 创建矩阵以存储最终的nmsEdges
	const short* _sdx; 
	const short* _sdy; 
	double fdx,fdy;	
    double MagG, DirG;
	double MaxGradient=-99999.99;
	double direction;
	int *orients = new int[ Ssize.height *Ssize.width];
	int count = 0,i,j; // count variable;
	
	double **magMat;
	CreateDoubleMatrix(magMat ,Ssize);
	
	for( i = 1; i < Ssize.height-1; i++ )
    {
    	for( j = 1; j < Ssize.width-1; j++ )
        { 		 
				_sdx = (short*)(gx->data.ptr + gx->step*i);
				_sdy = (short*)(gy->data.ptr + gy->step*i);
				fdx = _sdx[j]; fdy = _sdy[j];        // read x, y derivatives

				MagG = sqrt((float)(fdx*fdx) + (float)(fdy*fdy)); //Magnitude = Sqrt(gx^2 +gy^2)
				direction =cvFastArctan((float)fdy,(float)fdx);	 //Direction = invtan (Gy / Gx)
				magMat[i][j] = MagG;
				
				if(MagG>MaxGradient)
					MaxGradient=MagG; // get maximum gradient value for normalizing. 获取规格化的最大渐变值。

				
					// get closest angle from 0, 45, 90, 135 set 从0,45,90,135集合中获取最接近的角度
                        if ( (direction>0 && direction < 22.5) || (direction >157.5 && direction < 202.5) || (direction>337.5 && direction<360)  )
                            direction = 0;
                        else if ( (direction>22.5 && direction < 67.5) || (direction >202.5 && direction <247.5)  )
                            direction = 45;
                        else if ( (direction >67.5 && direction < 112.5)||(direction>247.5 && direction<292.5) )
                            direction = 90;
                        else if ( (direction >112.5 && direction < 157.5)||(direction>292.5 && direction<337.5) )
                            direction = 135;
                        else 
							direction = 0;
				
			orients[count] = (int)direction;
			count++;
		}
	}

	
	count=0; // init count

part(2)非极大值抑制(Non-Maximum Suppression,NMS)

       在找到边缘方向后,我们将做一个非极大值抑制算法.非极大值抑制在边缘方向跟踪左和右像素。

      如果当前像素小于左和右像素幅度,则抑制当前像素的大小。这将导致一张薄薄的图像。

double leftPixel,rightPixel;
	
	for( i = 1; i < Ssize.height-1; i++ )
    {
		for( j = 1; j < Ssize.width-1; j++ )
        {
				switch ( orients[count] )
                {
                   case 0:
                        leftPixel  = magMat[i][j-1];
                        rightPixel = magMat[i][j+1];
                        break;
                    case 45:
                        leftPixel  = magMat[i-1][j+1];
						rightPixel = magMat[i+1][j-1];
                        break;
                    case 90:
                        leftPixel  = magMat[i-1][j];
                        rightPixel = magMat[i+1][j];
                        break;
                    case 135:
                        leftPixel  = magMat[i-1][j-1];
                        rightPixel = magMat[i+1][j+1];
                        break;
				 }
				// compare current pixels value with adjacent pixels 比较当前像素值与相邻像素
                if (( magMat[i][j] < leftPixel ) || (magMat[i][j] < rightPixel ) )
					(nmsEdges->data.ptr + nmsEdges->step*i)[j]=0;
                else
                    (nmsEdges->data.ptr + nmsEdges->step*i)[j]=(uchar)(magMat[i][j]/MaxGradient*255);
			
				count++;
			}
		}
	

	int RSum=0,CSum=0;
	int curX,curY;
	int flag=1;  

part(3)阈值分割

使用滞后的阈值需要两个阈值:高阈值和低阈值。  我们应用一个较高的阈值来标记那些我们可以相当确定是真实的边缘。  从这些开始,利用先前导出的方向信息,可以通过图像跟踪其他边缘。 在跟踪边缘时,我们应用较低的阈值,只要找到起点,就可以跟踪边缘的模糊部分。

//Hysterisis threshold
	for( i = 1; i < Ssize.height-1; i++ )
    {
		for( j = 1; j < Ssize.width; j++ )
        {
			_sdx = (short*)(gx->data.ptr + gx->step*i);
			_sdy = (short*)(gy->data.ptr + gy->step*i);
			fdx = _sdx[j]; fdy = _sdy[j];
				
			MagG = sqrt(fdx*fdx + fdy*fdy); //Magnitude = Sqrt(gx^2 +gy^2)
			DirG =cvFastArctan((float)fdy,(float)fdx);	 //Direction = tan(y/x)
		
			////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]= MagG;
			flag=1;
			if(((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j]) < maxContrast)
			{
				if(((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j])< minContrast)
				{
					
					(nmsEdges->data.ptr + nmsEdges->step*i)[j]=0;
					flag=0; // remove from edge 从边缘移除
					////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]=0;
				}
				else
				{   // if any of 8 neighboring pixel is not greater than max contraxt remove from edge
					if( (((double)((nmsEdges->data.ptr + nmsEdges->step*(i-1)))[j-1]) < maxContrast)	&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*(i-1)))[j]) < maxContrast)		&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*(i-1)))[j+1]) < maxContrast)	&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j-1]) < maxContrast)		&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j+1]) < maxContrast)		&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*(i+1)))[j-1]) < maxContrast)	&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*(i+1)))[j]) < maxContrast)		&&
						(((double)((nmsEdges->data.ptr + nmsEdges->step*(i+1)))[j+1]) < maxContrast)	)
					{
						(nmsEdges->data.ptr + nmsEdges->step*i)[j]=0;
						flag=0;
						////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]=0;
					}
				}
				
			}

part(4)保存数据集 模板匹配

提取边缘后,将所选边的X和Y导数与坐标信息一起保存为模板模型。这些坐标将被重新排列,以反映作为重心的起始点。 

寻找基于边缘的模板模型 。

该算法的下一个任务是使用模板模型在搜索图像中查找对象。

我们可以看到从模板映像中创建的模型,其中包含一组点:Image 5,以及它在X和Y方向的梯度Image 6,在哪里I=1…n,n模板(T)数据集中的元素数。 

如果模板模型与搜索图像完全匹配,则此函数将返回分数1,该分数对应于搜索图像中可见对象的部分。如果对象不在搜索图像中,则得分为0。

cvSobel( src, Sdx, 1, 0, 3 );  // find X derivatives
	cvSobel( src, Sdy, 0, 1, 3 ); // find Y derivatives
		
	// stoping criterias to search for model
	double normMinScore = minScore /noOfCordinates; // precompute minumum score 
	double normGreediness = ((1- greediness * minScore)/(1-greediness)) /noOfCordinates; // precompute greedniness 
		
	for( i = 0; i < Ssize.height; i++ )
    {
		 _Sdx = (short*)(Sdx->data.ptr + Sdx->step*(i));
		 _Sdy = (short*)(Sdy->data.ptr + Sdy->step*(i));
		
		 for( j = 0; j < Ssize.width; j++ )
		{
				iSx=_Sdx[j];  // X derivative of Source image
				iSy=_Sdy[j];  // Y derivative of Source image

				gradMag=sqrt((iSx*iSx)+(iSy*iSy)); //Magnitude = Sqrt(dx^2 +dy^2)
							
				if(gradMag!=0) // hande divide by zero
					matGradMag[i][j]=1/gradMag;   // 1/Sqrt(dx^2 +dy^2)
				else
					matGradMag[i][j]=0;
				
		}
	}
	for( i = 0; i < Ssize.height; i++ )
    {
			for( j = 0; j < Ssize.width; j++ )
             { 
				 partialSum = 0; // initilize partialSum measure
				 for(m=0;m<noOfCordinates;m++)
				 {
					 curX	= i + cordinates[m].x ;	// template X coordinate
					 curY	= j + cordinates[m].y ; // template Y coordinate
					 iTx	= edgeDerivativeX[m];	// template X derivative
					 iTy	= edgeDerivativeY[m];    // template Y derivative

					 if(curX<0 ||curY<0||curX>Ssize.height-1 ||curY>Ssize.width-1)
						 continue;
					 
					 _Sdx = (short*)(Sdx->data.ptr + Sdx->step*(curX));
					 _Sdy = (short*)(Sdy->data.ptr + Sdy->step*(curX));
						
					 iSx=_Sdx[curY]; // get curresponding  X derivative from source image
					 iSy=_Sdy[curY];// get curresponding  Y derivative from source image
						
					if((iSx!=0 || iSy!=0) && (iTx!=0 || iTy!=0))
					 {
						 //partial Sum  = Sum of(((Source X derivative* Template X drivative) + Source Y derivative * Template Y derivative)) / Edge magnitude of(Template)* edge magnitude of(Source))
						 partialSum = partialSum + ((iSx*iTx)+(iSy*iTy))*(edgeMagnitude[m] * matGradMag[curX][curY]);
									
					 }

					sumOfCoords = m + 1;
					partialScore = partialSum /sumOfCoords ;
					// check termination criteria
					// if partial score score is less than the score than needed to make the required score at that position
					// break serching at that coordinate.
					if( partialScore < (MIN((minScore -1) + normGreediness*sumOfCoords,normMinScore*  sumOfCoords)))
						break;
									
				}
				if(partialScore > resultScore)
				{
					resultScore = partialScore; //  Match score
					resultPoint->x = i;			// result coordinate X		
					resultPoint->y = j;			// result coordinate Y
				}
			} 
		}
	
	// free used resources and return score
	ReleaseDoubleMatrix(matGradMag ,Ssize.height);
	cvReleaseMat( &Sdx );
	cvReleaseMat( &Sdy );
	
	return resultScore;
}