Java 使用web套接字通信保存图像
我想将一个映像从客户端发送到服务器并使用它,但它似乎并不完整。此图像是使用WebRTC从网络摄像头捕获的,首先我将其放入Java 使用web套接字通信保存图像,java,websocket,webrtc,Java,Websocket,Webrtc,我想将一个映像从客户端发送到服务器并使用它,但它似乎并不完整。此图像是使用WebRTC从网络摄像头捕获的,首先我将其放入canvas1,然后将其发送到服务器,然后添加到canvas2。嗯,我可以看到这两个图像,但我不能保存在服务器端 下面是我的密码 服务器端: @ServerEndpoint(“/imageEcho”) 公共类WebSocketImageServer{ 私有记录器=Logger.getLogger(WebSocketImageServer.class); private stat
canvas1
,然后将其发送到服务器,然后添加到canvas2
。嗯,我可以看到这两个图像,但我不能保存在服务器端
下面是我的密码
服务器端:
@ServerEndpoint(“/imageEcho”)
公共类WebSocketImageServer{
私有记录器=Logger.getLogger(WebSocketImageServer.class);
private static Set clients=Collections.synchronizedSet(new HashSet());
@奥诺彭
公共开放(会议){
//设置最大缓冲区大小
setMaxBinaryMessageBufferSize(1024*512);
//将会话添加到已连接的会话集
添加(会话);
this.logger.info(“Client”+session.getId()+“connected”);
}
@一次
公共void onClose(会话){
//从已连接的会话集中删除会话
客户端。删除(会话);
this.logger.info(“Client”+session.getId()+“disconnected”);
}
@一个错误
公共无效onError(可丢弃错误){
this.logger.error(error.getMessage());
}
@OnMessage
public void processVideo(字节[]图像数据,会话){
试一试{
if(session.isOpen()){
FileUtils.WriteByteArrayFile(新文件(“c:\\image.png”)、imageData);
//将字节数组包装到缓冲区中
ByteBuffer buf=ByteBuffer.wrap(图像数据);
session.getBasicRemote().sendBinary(buf);
}
}捕获(IOE异常){
试一试{
session.close();
}捕获(IOE1异常){
}
}
}
}
客户端:
app.controller(“StreamController”,函数($scope,StreamService){
var video=document.getElementById(“视频”);
video.videoWidth=480;
video.videoHeight=360;
var canvas1=document.getElementById(“图像”);
var canvas2=document.getElementById(“newImage”);
/*WebRTC*/
//配置将共享的介质类型。
变量约束={
音频:是的,
视频:真的
};
//启动网络摄像头。
函数成功回调(流){
//将流添加到视频。
video.srcObject=流;
connect();
takeSnapshot();
}
函数errorCallback(错误){
日志('navigator.getUserMedia错误:',错误);
}
函数takeSnapshot(){
变量计时器=设置间隔(函数(){
//将图像从视频添加到画布。
canvas1.getContext('2d').drawImage(视频,0,0,canvas1.width,canvas1.height);
//从画布获取图像并将其发送到服务器。
sendFile();
}, 30000);
}
getUserMedia(约束、successCallback、errorCallback);
/*网袋*/
var wsUri=“ws://localhost:8081/witsoftwareerecognitionweb/imageEcho”;
var-websocket=null;
函数连接(){
如果(websocket==null){
//创建web套接字。
websocket=新的websocket(wsUri);
//定义WebSocket的类型,可以是UTF-8字符串、缓冲区或blob。
websocket.binaryType='arraybuffer';
//添加WebSocket默认方法。
websocket.onopen=函数(evt){
onOpen(evt)
};
websocket.onmessage=函数(evt){
onMessage(evt)
};
websocket.onerror=函数(evt){
onError(evt)
};
}
};
功能开启(evt){
log(“连接到端点!”);
}
函数onError(evt){
console.log('错误:'+evt.data);
}
消息函数(evt){
//我们的WebSocket只接受通信抛出二进制(缓冲区)。
if(数组缓冲区的evt.data实例){
convertFromBinary(evt.data);
log(“文件已接收”);
log('canvas1:'+canvas1.toDataURL());
log('canvas2:'+canvas2.toDataURL());
}
}
函数转换器二进制(图像){
var buffer=newarraybuffer(image.data.length);
var字节=新的Uint8Array(缓冲区);
对于(var i=0;iTo acelerate)帮助过程,我制作了一个样本以重现问题。要acelerate帮助过程,我制作了一个样本以重现问题。
@ServerEndpoint("/imageEcho")
public class WebSocketImageServer {
private Logger logger = Logger.getLogger(WebSocketImageServer.class);
private static Set<Session> clients = Collections.synchronizedSet(new HashSet<Session>());
@OnOpen
public void onOpen (Session session) {
// Set max buffer size
session.setMaxBinaryMessageBufferSize(1024*512);
// Add session to the connected sessions set
clients.add(session);
this.logger.info("Client " + session.getId() + " connected.");
}
@OnClose
public void onClose (Session session) {
// Remove session from the connected sessions set
clients.remove(session);
this.logger.info("Client " + session.getId() + " disconnected.");
}
@OnError
public void onError(Throwable error) {
this.logger.error(error.getMessage());
}
@OnMessage
public void processVideo(byte[] imageData, Session session) {
try {
if (session.isOpen()) {
FileUtils.writeByteArrayToFile(new File("c:\\image.png"), imageData);
// Wrap a byte array into a buffer
ByteBuffer buf = ByteBuffer.wrap(imageData);
session.getBasicRemote().sendBinary(buf);
}
} catch (IOException e) {
try {
session.close();
} catch (IOException e1) {
}
}
}
}
app.controller("StreamController", function($scope, StreamService) {
var video = document.getElementById("video");
video.videoWidth = 480;
video.videoHeight = 360;
var canvas1 = document.getElementById("image");
var canvas2 = document.getElementById("newImage");
/* WebRTC */
//Configure waht kind of media will be shared.
var constraints = {
audio: true,
video: true
};
// Start Webcam.
function successCallback(stream) {
// Add stream to video.
video.srcObject = stream;
connect();
takeSnapshot();
}
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
function takeSnapshot() {
var timer = setInterval(function () {
//Add an image from video to canvas.
canvas1.getContext('2d').drawImage(video, 0, 0, canvas1.width, canvas1.height);
// Get image from canvas and send it to server.
sendFile();
}, 30000);
}
navigator.getUserMedia(constraints, successCallback, errorCallback);
/* WebSocket */
var wsUri = "ws://localhost:8081/WITSoftwareRecognitionWeb /imageEcho";
var websocket = null;
function connect() {
if (websocket == null) {
// Create a web socket.
websocket = new WebSocket(wsUri);
// Define type of WebSocket, can be UTF-8 String, buffer or blob.
websocket.binaryType = 'arraybuffer';
// Add WebSocket default methods.
websocket.onopen = function(evt) {
onOpen(evt)
};
websocket.onmessage = function(evt) {
onMessage(evt)
};
websocket.onerror = function(evt) {
onError(evt)
};
}
};
function onOpen(evt) {
console.log("Connected to Endpoint!");
}
function onError(evt) {
console.log('ERROR: ' + evt.data);
}
function onMessage(evt) {
// Our WebSocket only accepts communication throw binary (buffer).
if(evt.data instanceof ArrayBuffer) {
convertFromBinary(evt.data);
console.log("File received.");
console.log('canvas 1: ' + canvas1.toDataURL());
console.log('canvas 2: ' + canvas2.toDataURL());
}
}
function convertToBinary(image) {
var buffer = new ArrayBuffer(image.data.length);
var bytes = new Uint8Array(buffer);
for (var i=0; i<bytes.length; i++) {
bytes[i] = image.data[i];
}
return buffer;
}
function convertFromBinary(buffer) {
var bytes = new Uint8Array(buffer);
var image = canvas2.getContext('2d').createImageData(canvas2.width, canvas2.height);
for (var i=0; i<bytes.length; i++) {
image.data[i] = bytes[i];
}
//Add the new image to the newImage canvas.
canvas2.getContext('2d').putImageData(image,0,0);
}
function sendFile() {
if (websocket != null) {
var url = canvas1.toDataURL("image/png");
console.log(url);
var image = canvas1.getContext('2d').getImageData(0, 0, canvas1.width, canvas1.height);
var buffer = convertToBinary(image);
websocket.send(buffer);
console.log("File sended.");
}
};
});