A previous visual computing project called receiving the front-end, so there is a strange demand that the front-end opens the camera, and at the same time needs to send the camera data back to the back-end for image processing (such as beauty ah head with an ornament and so on), which involves data coding coupling between the front-end and the service-end, think about since any image is in memory. It's all a uchar matrix, so I figured it out.
Usually, the expression of image in memory is a uchar string, or byte stream. Because I often need to write cross-language calls, I usually use string and bit stream to interact in memory. Here I adopt the same idea. We encode the image of opencv as png, and then code it again as base64, which is transmitted to the user through websocket. Front end. The general process is as follows.
First, suppose our front-end opens websocket to connect the back-end, and the back-end opens the camera to take data from the camera to the back-end. After learning through a series of image processing machines, the back-end transmits the coded image back to the front-end.
Front-end code:
<html> <head> <title> Camera Test </title> </head> <body> <video style="display:none;" id="video" autoplay ></video> <canvas id="canvas" width="480" height="320"></canvas> <img id="target" width="480" height="320"></img> <h1>Action:Normal</h1> <script> var video = document.getElementById('video'); var canvas = document.getElementById('canvas'); var image = document.getElementById('target'); var capture = document.getElementById('capture'); var context = canvas.getContext('2d'); var ws = new WebSocket("ws://127.0.0.1:9002"); ws.binaryType = "arraybuffer"; ws.onopen = function() { ws.send("I'm client"); }; ws.onmessage = function (evt) { console.log("resive"); try{ //Display the base64 image returned from the back end image.src="data:image/png;base64,"+evt.data; }catch{ } }; ws.onclose = function() { alert("Closed"); }; ws.onerror = function(err) { alert("Error: " + err); }; function getUserMediaToPhoto(constraints,success,error) { if(navigator.mediaDevices.getUserMedia){ //The latest standard API navigator.mediaDevices.getUserMedia(constraints).then(success).catch(error); }else if (navigator.webkitGetUserMedia) { //webkit core browser navigator.webkitGetUserMedia(constraints,success,error); }else if(navigator.mozGetUserMedia){ //firefox Browser navigator.mozGetUserMedia(constraints,success,error); }else if(navigator.getUserMedia){ //Old version API navigator.getUserMedia(constraints,success,error); } } //Successful callback function function success(stream){ //Compatible with webkit core browser var CompatibleURL = window.URL || window.webkitURL; //Converting video Stream to video Source video.src = CompatibleURL.createObjectURL(stream); //video.play();// Play Video } function error(error) { console.log('Failed to access user media:',error.name,error.message); } if(navigator.mediaDevices.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.getUserMedia){ getUserMediaToPhoto({video:{width:480,height:320}},success,error); }else{ alert('Your browser does not support access to user media devices'); } //This function is to convert the base64 image above canvas into a string of image data streams. function dataURItoBlob(dataURI) { // convert base64/URLEncoded data component to raw binary data held in a string var byteString; if (dataURI.split(',')[0].indexOf('base64') >= 0) byteString = atob(dataURI.split(',')[1]); else byteString = unescape(dataURI.split(',')[1]); // separate out the mime component var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]; // write the bytes of the string to a typed array var ia = new Uint8Array(byteString.length); for (var i = 0; i < byteString.length; i++) { ia[i] = byteString.charCodeAt(i); } return new Blob([ia], {type:mimeString}); } timer = setInterval( function () { context.drawImage(video,0,0,480,320); var data = canvas.toDataURL('image/jpeg', 1.0); newblob = dataURItoBlob(data); //Send image data converted into strings ws.send(newblob); }, 50);//Here, our front-end still needs to be delayed, if our back-end computing is not very real-time, and it happens that my project back-end computing scale is very large, so it needs 50 ms of waiting. </script> </body> </html>
C++ server side (here you need to use websocket++ readers compile by themselves)
network.h
#pragma once namespace network::wsocket { class sc_websocket{ public: sc_websocket(int id, std::string address, int port) ; void Run(); ~sc_websocket(); private: int port; int id; std::string address; }; }
network.cpp
#include <websocketpp/config/asio_no_tls.hpp> #include <websocketpp/server.hpp> #include <websocketpp/config/asio_no_tls.hpp> #include <websocketpp/server.hpp> #include "network.h" using websocketpp::lib::placeholders::_1; using websocketpp::lib::placeholders::_2; using websocketpp::lib::bind; typedef websocketpp::server<websocketpp::config::asio> WebsocketServer; typedef WebsocketServer::message_ptr message_ptr; //Decoding base64 data static std::string base64Decode(const char* Data, int DataByte) { //Decoding table const char DecodeTable[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62, // '+' 0, 0, 0, 63, // '/' 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, // '0'-'9' 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, // 'A'-'Z' 0, 0, 0, 0, 0, 0, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // 'a'-'z' }; std::string strDecode; int nValue; int i = 0; while (i < DataByte) { if (*Data != '\r' && *Data != '\n') { nValue = DecodeTable[*Data++] << 18; nValue += DecodeTable[*Data++] << 12; strDecode += (nValue & 0x00FF0000) >> 16; if (*Data != '=') { nValue += DecodeTable[*Data++] << 6; strDecode += (nValue & 0x0000FF00) >> 8; if (*Data != '=') { nValue += DecodeTable[*Data++]; strDecode += nValue & 0x000000FF; } } i += 4; } else { Data++; i++; } } return strDecode; } //Coding base64 data static std::string base64Encode(const unsigned char* Data, int DataByte) { //Coding table const char EncodeTable[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; //Return value std::string strEncode; unsigned char Tmp[4] = { 0 }; int LineLength = 0; for (int i = 0; i < (int)(DataByte / 3); i++) { Tmp[1] = *Data++; Tmp[2] = *Data++; Tmp[3] = *Data++; strEncode += EncodeTable[Tmp[1] >> 2]; strEncode += EncodeTable[((Tmp[1] << 4) | (Tmp[2] >> 4)) & 0x3F]; strEncode += EncodeTable[((Tmp[2] << 2) | (Tmp[3] >> 6)) & 0x3F]; strEncode += EncodeTable[Tmp[3] & 0x3F]; if (LineLength += 4, LineLength == 76) { strEncode += "\r\n"; LineLength = 0; } } //Coding the remaining data int Mod = DataByte % 3; if (Mod == 1) { Tmp[1] = *Data++; strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2]; strEncode += EncodeTable[((Tmp[1] & 0x03) << 4)]; strEncode += "=="; } else if (Mod == 2) { Tmp[1] = *Data++; Tmp[2] = *Data++; strEncode += EncodeTable[(Tmp[1] & 0xFC) >> 2]; strEncode += EncodeTable[((Tmp[1] & 0x03) << 4) | ((Tmp[2] & 0xF0) >> 4)]; strEncode += EncodeTable[((Tmp[2] & 0x0F) << 2)]; strEncode += "="; } return strEncode; } //imgType includes open CV files such as png bmp jpg jpeg that can be coded and decoded static std::string Mat2Base64(const cv::Mat &img, std::string imgType) { //Mat to base64 std::string img_data; std::vector<uchar> vecImg; std::vector<int> vecCompression_params; vecCompression_params.push_back(CV_IMWRITE_JPEG_QUALITY); vecCompression_params.push_back(90); imgType = "." + imgType; //The point is that it is an important function responsible for transforming an image from an open CV Mat to a coded image bit stream. cv::imencode(imgType, img, vecImg, vecCompression_params); img_data = base64Encode(vecImg.data(), vecImg.size()); return img_data; } //base64 to Mat static cv::Mat Base2Mat(std::string &base64_data) { cv::Mat img; std::string s_mat; s_mat = base64Decode(base64_data.data(), base64_data.size()); std::vector<char> base64_img(s_mat.begin(), s_mat.end()); img = cv::imdecode(base64_img, CV_LOAD_IMAGE_COLOR); return img; } void OnOpen(WebsocketServer *server, websocketpp::connection_hdl hdl) { std::cout << "have client connected" << std::endl; } void OnClose(WebsocketServer *server, websocketpp::connection_hdl hdl) { std::cout << "have client disconnected" << std::endl; } void OnMessage(WebsocketServer *server, websocketpp::connection_hdl hdl, message_ptr msg) { std::string image_str = msg->get_payload(); std::vector<char>img_vec(image_str.begin(), image_str.end()); try { //Decode the image string from the front end cv::Mat img = cv::imdecode(img_vec, CV_LOAD_IMAGE_COLOR); if (!img.empty()) { cv::imshow("", img); cv::Mat output = Your Image Processing Function(img); if (!output.empty()) { //Convert the image you've processed to a string and return it to the front end std::string strRespon = Mat2Base64(output, "bmp"); server->send(hdl, strRespon, websocketpp::frame::opcode::text); } cv::waitKey(1); } } catch (const std::exception&) { std::cout <<" Decoding exception" << std::endl; } } namespace network::wsocket { sc_websocket::sc_websocket(int id, std::string address, int port) { } sc_websocket::~sc_websocket() { } void sc_websocket::Run() { WebsocketServer server; server.set_access_channels(websocketpp::log::alevel::all); server.clear_access_channels(websocketpp::log::alevel::frame_payload); // Initialize Asio server.init_asio(); // Register our message handler server.set_open_handler(bind(&OnOpen, &server, ::_1)); server.set_close_handler(bind(&OnClose, &server, _1)); server.set_message_handler(bind(&OnMessage, &server, _1, _2)); // Listen on port 9002 server.listen(9002); // Start the server accept loop server.start_accept(); // Start the ASIO io_service run loop server.run(); } }
Code inevitably typed in the wrong time, what problems contact the author. The difficulty of the whole server is that the method of encoding and decoding couples the data on both sides. It took me several days to figure it out. And then, I think it's really a magic thing. When you understand it deeply into memory, it's like a obedient child.