Home > Software engineering >  OPENCV hai kang SDK video stream license plate recognition error
OPENCV hai kang SDK video stream license plate recognition error

Time:09-26

 # include & lt; Cstdio> 
# include & lt; Cstring>
# include & lt; iostream>
H # include "Windows."
# include "HCNetSDK. H"
# include "plaympeg4. H"
# include & lt; Opencv2 \ opencv. Hpp>
# include "CV. H"
# include "highgui. H"
# include & lt; Time. H>
# include "LPKernelEx. H"
# include "stdio.h"
# define USECOLOR 1
using namespace std;
Using the namespace CV;
# define false 0;
//image cutting
Mat image_crop;
//-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
Int iPicNum=0;//Set the channel NO.
LONG nPort=1;
HWND HWND=NULL;
Bool verify (RotatedRect the rect) {
Float the error=0.4;
Const float aspect=4.7272;
Int min=15 * 15 * aspect;//area of the lower
Int Max=125 * aspect * 125;//area of ceiling

Float rmin=aspect - aspect * error;//aspect ratio limit
Float rmax=aspect + aspect * error;//cap width to height ratio

Int area=the rect. Size. Width * the rect in size. The height;//calculate area
Float r=the rect. Size. Width/the rect. Size. The height;//width to height ratio calculation
R=r & lt; 1? 1/r, r;

The return area & gt;=min & amp; & Area & lt;=Max & amp; & r>=rmin & amp; & R & lt;=rmax;
}

Void yv12toYUV (char * outYuv, char * inYv12, int width, int height, int widthStep)
{
Int col, row;
Unsigned int Y, U and V.
Int TMP.
Int independence idx;

//printf (" widthStep=% d. \ n ", widthStep);

For (row=0; Row{
Independence idx=row * widthStep;
Int rowptr=row * width;

For (col=0; Col{
//int colhalf=col> 1;
TMP=(row/2) * (width/2) + (col/2);
//if ((row==1) & amp; & (col>=1400 & amp; & Col<=1600))
//{
//printf (" col=% d, row=% d, width=% d, TMP=% d. \ n ", col, row, width, TMP);
//printf (" row * width + col=% d, height/width * height + width * 4 + TMP=% d, width * height + TMP=% d. \ n ", the row * + col width, height/width * height + width * 4 + TMP, width * height + TMP);
//}
Y=(unsigned int) inYv12 [row * width + col];
U=(unsigned int) inYv12 [+ width * height/width * height 4 + TMP);
V=(unsigned int) inYv12 (width * height + TMP),
//if ((col==200))
//{
//printf (" col=% d, row=% d, width=% d, TMP=% d. \ n ", col, row, width, TMP);
//printf (" + width * height/width * height 4 + TMP=% d. \ n ", height/width * height + width * 4 + TMP);
//return;
//}
If ((independence idx + col * 3 + 2) & gt; (1200 * widthStep))
{
//printf (" row * widthStep=% d, independence idx + col * 3 + 2=% d. \ n ", 1200 * widthStep, independence idx + col * 3 + 2);
}
OutYuv [independence idx + col * 3]=Y;
OutYuv [independence idx + col * 3 + 1]=U;
OutYuv [independence idx + col * 3 + 2]=V;
}
}
//printf (" col=% d, row=% d. \ n ", col, row);
}



//decoding callback for YUV video data (YV12), audio for PCM data
Void CALLBACK DecCBFun (long nPort, char * pbufs, long nSize, FRAME_INFO * pFrameInfo, long nReserved1, long nReserved2)
{
Int b;
Int nchannel=1;
Long lFrameType=pFrameInfo - & gt; NType;

If (lFrameType==T_YV12)
{
# if USECOLOR
//int start=clock ();
IplImage * pImgYCrCb=cvCreateImage (cvSize (pFrameInfo - & gt; NWidth, pFrameInfo - & gt; NHeight), 8, 3);//get the images Y component

Yv12toYUV (pImgYCrCb - & gt; ImageData, pbufs pFrameInfo - & gt; NWidth, pFrameInfo - & gt; NHeight, pImgYCrCb - & gt; WidthStep);//got all the RGB image
IplImage * pImg=cvCreateImage (cvSize (pFrameInfo - & gt; NWidth, pFrameInfo - & gt; NHeight), 8, 3);
//cvCvtColor (pImgYCrCb, pImg, CV_YUV2RGB);
CvCvtColor (pImgYCrCb, pImg, CV_YCrCb2RGB);
//CV: : Mat DST (pFrameInfo - & gt; NHeight, pFrameInfo - & gt; NWidth, CV_8UC3);//here nHeight is 720, nWidth is 1280, 8 UC3 said 8 bit uchar unsigned types, 3 channel value
//CV: : Mat SRC (pFrameInfo - & gt; NHeight + pFrameInfo - & gt; NHeight/2, pFrameInfo - & gt; NWidth, CV_8UC1, uchar * pbufs);
//cvtColor (SRC, DST, CV_YUV2BGR_YV12);
//int the end=clock ();
# the else
IplImage * pImg=cvCreateImage (cvSize (pFrameInfo - & gt; NWidth, pFrameInfo - & gt; NHeight), 8, 1);
Memcpy (pImg - & gt; ImageData, pbufs pFrameInfo - & gt; NWidth * pFrameInfo - & gt; NHeight);
# endif
//printf (" % d \ n ", end - start);
//cvNamedWindow (" IPCamera1 ", 0);
//cvShowImage (" IPCamera1 ", pImg);


IplImage * frame_gray=cvCreateImage (cvGetSize (pImg), pImg - & gt; The depth, 1);
CvCvtColor (pImg, frame_gray, CV_RGB2GRAY);
Mat image=cvarrToMat (frame_gray);
Mat image2=cvarrToMat (pImg);

Imshow (" [original figure], "image).

The blur (image, image, the Size (5, 5));
Imshow (" [] after denoising, image).


Sobel (image, image, CV_8U, 1, 0, 3, 1, 0).
Imshow (" [sobel filter], "image).

Threshold (image, image, 0, 255, CV_THRESH_OTSU);
Imshow (" [otsu threshold change], "image).

Mat element=getStructuringElement (MORPH_RECT, Size (17, 3));
MorphologyEx (image, image, CV_MOP_CLOSE, element).
Imshow (" [closed operation], "image).


Vector Contours.
nullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnullnull
  • Related