Sketch
when using the Haikang camera to collect images, additional data such as icons and text need to be added to the images. You can choose to use the drawing callback function of Haikang SDK to overlay characters, images, etc. (please refer to the previous article); you can also use the decoding callback function of Haikang SDK to decode the video stream data for post-processing. The flow of this method is as follows: when calling the video preview function net? DVR? Realplay? V40(), the third parameter is set as the function pointer of the real-time data callback function RealDataCallBack(), then the video stream data decoding function DecCallbackFUN() is registered in the real datacallback() callback function, and finally the data is analyzed, decoded, superimposed characters, etc. are processed in the DecCallbackFUN() function.
this paper only calls decoding callback function to convert YV12 format video stream data to RGB32 format data, and then convert RGB.
Code
1. Camera operation code
struct CameraInfo { public string strIP; public short nPort; public string strUserName; public string strPassword; } class IDeviceCamera { public Image m_img = null; public virtual bool InitCamera( CameraInfo stInfo ) { return true; } } class DeviceCamera : IDeviceCamera { private CameraInfo m_stCameraInfo; private bool m_bInitSDK = false; private Int32 m_lUserID = -1; private Int32 m_lRealHandle = -1; private Int32 m_lPort = -1; CHCNetSDK.REALDATACALLBACK RealData = null; //Must be defined as a member variable public override bool InitCamera( CameraInfo stInfo ) { m_stCameraInfo = stInfo; m_bInitSDK = CHCNetSDK.NET_DVR_Init(); if ( !m_bInitSDK ) { uint nError = CHCNetSDK.NET_DVR_GetLastError(); } CHCNetSDK.NET_DVR_SetConnectTime( 5000, 1 ); CHCNetSDK.NET_DVR_SetReconnect( 10000, 1 ); if ( m_bInitSDK == false ) { MessageBox.Show( "NET_DVR_Init error!" ); return false; } else { //To save the SDK log CHCNetSDK.NET_DVR_SetLogToFile( 3, "C:\\SdkLog\\", true ); } string DVRIPAddress = stInfo.strIP; //Device IP address or domain name device IP Int16 DVRPortNumber = stInfo.nPort; //Device service port string DVRUserName = stInfo.strUserName;//Device login User name to login string DVRPassword = stInfo.strPassword;//Password to login CHCNetSDK.NET_DVR_DEVICEINFO_V30 DeviceInfo = new CHCNetSDK.NET_DVR_DEVICEINFO_V30(); m_lUserID = CHCNetSDK.NET_DVR_Login_V30( DVRIPAddress, DVRPortNumber, DVRUserName, DVRPassword, ref DeviceInfo ); if ( m_lUserID < 0 ) { MessageBox.Show( "Login failed!" ); CHCNetSDK.NET_DVR_Cleanup(); return false; } // CHCNetSDK.NET_DVR_PREVIEWINFO lpPreviewInfo = new CHCNetSDK.NET_DVR_PREVIEWINFO(); lpPreviewInfo.hPlayWnd = (IntPtr)null; lpPreviewInfo.lChannel = 1; lpPreviewInfo.dwStreamType = 0; //Code stream type: 0-main code stream, 1-sub code stream, 2-code stream 3, 3-code stream 4, and so on lpPreviewInfo.dwLinkMode = 0; //Connection mode: 0-tcp, 1-udp, 2-multicast, 3-rtp, 4-RTP/RTSP, 5-RSTP/HTTP lpPreviewInfo.bBlocked = true; //0-nonblocking, 1-blocking lpPreviewInfo.dwDisplayBufNum = 15; //The maximum number of buffer frames in the playback buffer of the playlist //Using callback function to get camera data RealData = new CHCNetSDK.REALDATACALLBACK( RealDataCallBack );//Preview live stream callback function IntPtr pUser = new IntPtr();//user data m_lRealHandle = CHCNetSDK.NET_DVR_RealPlay_V40( m_lUserID, ref lpPreviewInfo, RealData, pUser ); CHCNetSDK.NET_DVR_RigisterDrawFun( m_lRealHandle, new CHCNetSDK.DRAWFUN( cbDrawFun ), 0 );//Callback functions: drawing icons return true; } private uint nLastErr = 0; private static PlayCtrl.DECCBFUN m_fDisplayFun = null; private IntPtr m_ptrRealHandle; public void RealDataCallBack( Int32 lRealHandle, UInt32 dwDataType, IntPtr pBuffer, UInt32 dwBufSize, IntPtr pUser ) { //The following data processing suggests using delegation switch ( dwDataType ) { case CHCNetSDK.NET_DVR_SYSHEAD: // sys head if ( dwBufSize > 0 ) { if ( m_lPort >= 0 ) return; //The same code stream does not need to call the open stream interface multiple times //Get the port to play if ( !PlayCtrl.PlayM4_GetPort( ref m_lPort ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); break; } //Set the stream mode: real time stream mode if ( !PlayCtrl.PlayM4_SetStreamOpenMode( m_lPort, PlayCtrl.STREAME_REALTIME ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); //str = "Set STREAME_REALTIME mode failed, error code= " + nLastErr; //this.BeginInvoke( AlarmInfo, str ); } //Open stream if ( !PlayCtrl.PlayM4_OpenStream( m_lPort, pBuffer, dwBufSize, 2 * 1024 * 1024 ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); //str = "PlayM4_OpenStream failed, error code= " + nLastErr; //this.BeginInvoke( AlarmInfo, str ); break; } //Set the number of display buffer if ( !PlayCtrl.PlayM4_SetDisplayBuf( m_lPort, 15 ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); //str = "PlayM4_SetDisplayBuf failed, error code= " + nLastErr; //this.BeginInvoke( AlarmInfo, str ); } //Set the display mode set the display mode //if ( !PlayCtrl.PlayM4_SetOverlayMode( m_lPort, 0, 0) ) //play off screen //{ // nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); // //str = "PlayM4_SetOverlayMode failed, error code= " + nLastErr; // //this.BeginInvoke( AlarmInfo, str ); //} //Set decoding callback function of decoded data m_fDisplayFun = new PlayCtrl.DECCBFUN( DecCallbackFUN ); if ( !PlayCtrl.PlayM4_SetDecCallBackEx( m_lPort, m_fDisplayFun, IntPtr.Zero, 0 ) ) { //this.BeginInvoke( AlarmInfo, "PlayM4_SetDisplayCallBack fail" ); } //Start to play if ( !PlayCtrl.PlayM4_Play( m_lPort, m_ptrRealHandle ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); //str = "PlayM4_Play failed, error code= " + nLastErr; //this.BeginInvoke( AlarmInfo, str ); break; } } break; case CHCNetSDK.NET_DVR_STREAMDATA: // video stream data default: //the other data if ( dwBufSize > 0 && m_lPort != -1 ) { for ( int i = 0; i < 999; i++ ) { //Input the stream data to decode if ( !PlayCtrl.PlayM4_InputData( m_lPort, pBuffer, dwBufSize ) ) { nLastErr = PlayCtrl.PlayM4_GetLastError( m_lPort ); //str = "PlayM4_InputData failed, error code= " + nLastErr; Thread.Sleep( 2 ); } else break; } } break; } } //Decode callback function private void DecCallbackFUN( int nPort, IntPtr pBuf, int nSize, ref PlayCtrl.FRAME_INFO pFrameInfo, int nReserved1, int nReserved2 ) { if ( pFrameInfo.nType == 3 ) //#define T_YV12 3 { byte[] byteBuffYV12 = new byte[ nSize ]; Marshal.Copy( pBuf, byteBuffYV12, 0, nSize ); long lRGBSize = (long)pFrameInfo.nWidth * pFrameInfo.nHeight * 4; byte[] bufferRGB32 = new byte[ lRGBSize ]; CommonFun.YV12_to_RGB32( byteBuffYV12, bufferRGB32, pFrameInfo.nWidth, pFrameInfo.nHeight ); byteBuffYV12 = null; Bitmap bmpFromGRB32 = CommonFun.RGB32_to_Image( bufferRGB32, pFrameInfo.nWidth, pFrameInfo.nHeight ); bufferRGB32 = null; if ( null == bmpFromGRB32 ) return; m_img = bmpFromGRB32; } } }
Note: the obtained m ﹣ img is used for PictureBox display.
2. Convert YV12 to RGB32 and RGB32 to Image code
class CommonFun { public static bool YV12_to_RGB32( byte[] buffYV12, byte[] bufferRGB32, int nWidth, int nHeight ) { if( buffYV12.Length < 0 || bufferRGB32.Length < 0 ) return false; long nYLen = (long)nWidth * nHeight; int nHfWidth = ( nWidth >> 1 ); if ( nYLen < 1 || nHfWidth < 1 ) return false; byte[] byteYData = buffYV12.Skip( 0 ).Take( nWidth*nHeight ).ToArray(); byte[] byteUData = buffYV12.Skip( nWidth*nHeight ).Take( (nHeight/2)*(nWidth/2) ).ToArray(); byte[] byteVData = buffYV12.Skip( nWidth*nHeight + (nHeight/2)*(nWidth/2) ).Take( (nHeight/2)*(nWidth/2) ).ToArray(); if ( byteYData.Length < 0 || byteVData.Length < 0 || byteUData.Length < 0 ) return false; int[] nRgb = new int[4]; for( int nRow = 0; nRow < nHeight; nRow++ ) { for( int nCol = 0; nCol < nWidth; nCol++ ) { int Y = byteYData[nRow*nWidth + nCol]; int U = byteUData[(nRow / 2)*(nWidth / 2) + (nCol / 2)]; int V = byteVData[(nRow / 2)*(nWidth / 2) + (nCol / 2)]; int R = Y + (U - 128) + (((U - 128) * 103) >> 8); int G = Y - (((V - 128) * 88) >> 8) - (((U - 128) * 183) >> 8); int B = Y + (V - 128) + (((V - 128) * 198) >> 8); // r component value R = R<0 ? 0 : R; nRgb[2] = R > 255 ? 255 : R; // g component value G = G<0 ? 0 : G; nRgb[1] = G > 255 ? 255 : G; // b component value B = B<0 ? 0 : B; nRgb[0] = B > 255 ? 255 : B; //A component value nRgb[ 3 ] = 255; //Out RGB Buffer bufferRGB32[4 * (nRow*nWidth + nCol) + 0] = (byte)nRgb[0]; bufferRGB32[4 * (nRow*nWidth + nCol) + 1] = (byte)nRgb[1]; bufferRGB32[4 * (nRow*nWidth + nCol) + 2] = (byte)nRgb[2]; bufferRGB32[4 * (nRow*nWidth + nCol) + 3] = (byte)nRgb[3]; } } return true; } public static Bitmap RGB32_to_Image( byte[] byteBuff, int nWidth, int nHeight ) { if ( byteBuff.Length <= 0 || byteBuff.Length < nWidth*nHeight ) return null; Bitmap bmp = new Bitmap( nWidth, nHeight, PixelFormat.Format32bppArgb ); //Lock memory data BitmapData bmpData = bmp.LockBits( new Rectangle( 0, 0, nWidth, nHeight ), ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb ); //Enter color data System.Runtime.InteropServices.Marshal.Copy( byteBuff, 0, bmpData.Scan0, byteBuff.Length ); //Unlock bmp.UnlockBits( bmpData ); return bmp; } }