Modify Websocket GET BASE64 mechanism to get concise FPS between HQ and LQ.

Support multiple quality between users.
This commit is contained in:
2024-11-11 11:34:29 +07:00
parent 2450f9f42a
commit 1fe4716bab
15 changed files with 131 additions and 10840 deletions

View File

@@ -4,7 +4,7 @@
*/ */
let pan_speed = 20; let pan_speed = 20;
let tilt_speed = 20; let tilt_speed = 20;
let video_quality = "LQ";
/** /**
* @type {WebSocket} * @type {WebSocket}
*/ */
@@ -12,21 +12,20 @@ let ws;
let camerastream; let camerastream;
let no_stream_counter = 0;
document.addEventListener("DOMContentLoaded", function(){ document.addEventListener("DOMContentLoaded", function(){
camerastream = document.getElementById("camerastream"); camerastream = document.getElementById("camerastream");
setInterval(function (){ setInterval(function (){
no_stream_counter++; if (ws.readyState === WebSocket.OPEN){
if (no_stream_counter>=20){ ws.send(JSON.stringify({
if (camerastream){ command: "GET BASE64",
if (camerastream.src !== "public/images/not-available.png"){ data: video_quality
camerastream.src = "public/images/not-available.png"; }));
}
} } else update_camerastream(null);
} },10);
},100);
ws = new WebSocket("/ws"); ws = new WebSocket("/ws");
@@ -50,23 +49,22 @@ document.addEventListener("DOMContentLoaded", function(){
command: "GET RESOLUTION", command: "GET RESOLUTION",
data: 0 data: 0
})); }));
ws.send(JSON.stringify({
command: "STREAMING STATUS",
data: 0
}));
} }
ws.onmessage = function(event){ ws.onmessage = function(event){
//console.log("Received data from server: " + event.data); //console.log("Received data from server: " + event.data);
/** /**
* @type {{reply: string, data: string|number}} dx * @type {{reply: string, data: string|number, additional: string}} dx
*/ */
let dx = JSON.parse(event.data); let dx = JSON.parse(event.data);
switch (dx.reply){ switch (dx.reply){
case "GET BASE64": case "GET BASE64":
if (dx.data.startsWith("data:image/jpeg;base64,")){ if (dx.data.startsWith("data:image/jpeg;base64,")){
update_camerastream(dx.data); update_camerastream(dx.data);
no_stream_counter = 0;
} else update_camerastream(null); } else update_camerastream(null);
if (dx.additional && dx.additional.length>0){
$('#streaming_status').html(dx.additional);
}
break; break;
case "SET VOLUME": case "SET VOLUME":
console.log("Set Volume: "+dx.data); console.log("Set Volume: "+dx.data);
@@ -121,10 +119,7 @@ document.addEventListener("DOMContentLoaded", function(){
case 'SET VIDEO QUALITY': case 'SET VIDEO QUALITY':
console.log("Set Video Quality: "+dx.data); console.log("Set Video Quality: "+dx.data);
break; break;
case "STREAMING STATUS":
console.log("Streaming Status: "+dx.data);
$('#streaming_status').html(dx.data);
break;
} }
} }
@@ -144,11 +139,14 @@ document.addEventListener("DOMContentLoaded", function(){
* @param {String} value * @param {String} value
*/ */
function update_camerastream(value){ function update_camerastream(value){
if (camerastream){
if (value && value.length>0){ if (value && value.length>0){
if (camerastream) camerastream.src = value; camerastream.src = value;
} else { } else {
if (camerastream) camerastream.src = "public/images/not-available.png"; if (camerastream.src!== "public/images/not-available.png") camerastream.src = "public/images/not-available.png";
} }
}
} }
@@ -167,7 +165,7 @@ function play_on(index){
} }
function show_play_and_hide_stop(index){ function show_play_and_hide_stop(index){
$('#btn_play'+index).prop("className", "btn-play1 show"); ; $('#btn_play'+index).prop("className", "btn-play1 show");
$('#btn_stop'+index).prop("className", "btn-play1 hide"); $('#btn_stop'+index).prop("className", "btn-play1 hide");
} }
@@ -387,13 +385,7 @@ function cleartilt(){
function change_video_quality(){ function change_video_quality(){
let btn = document.getElementById("quality_video") let btn = document.getElementById("quality_video")
console.log('High Quality Video: '+btn.checked?'ON':'OFF'); video_quality = btn.checked?"HQ":"LQ";
if (ws.readyState === WebSocket.OPEN){
ws.send(JSON.stringify({
command: "SET VIDEO QUALITY",
data: btn.checked?"HQ":"LQ"
}));
}
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,9 +1,7 @@
package Camera; package Camera;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer; import java.util.function.Consumer;
import Other.SomeCodes; import Other.SomeCodes;
@@ -11,24 +9,19 @@ import lombok.Getter;
import lombok.Setter; import lombok.Setter;
import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber; import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.opencv.opencv_core.Mat;
public class GrabbingTask implements Runnable { public class GrabbingTask implements Runnable {
@Setter private Consumer<String> onMessageUpdate; @Setter private Consumer<String> onMessageUpdate;
@Setter private Consumer<Mat> onMatUpdate; @Setter private Consumer<Frame> onHQFrameUpdate;
@Setter private Consumer<Frame> onFrameUpdate; @Setter private Consumer<Frame> onLQFrameUpdate;
@Setter private Consumer<String> onBase64Update; @Setter private Consumer<String> onHQBase64Update;
@Setter private Consumer<String> onStreamingStatusUpdate; @Setter private Consumer<String> onLQBase64Update;
private final AtomicBoolean isGrabbing; private final AtomicBoolean isGrabbing;
private final FrameGrabber grabber; private final FrameGrabber grabber;
@Getter private final int lowquality_width = 640; @Getter private final int lowquality_width = 640;
@Getter private final int lowquality_height = 360; @Getter private final int lowquality_height = 360;
@Getter @Setter private boolean HQ = false;
@Getter private int streaming_width = 0;
@Getter private int streaming_height = 0;
@Getter private int streaming_fps = 0;
AtomicBoolean streamingstatuschanged = new AtomicBoolean(false);
private void updateMessage(String message) { private void updateMessage(String message) {
if (onMessageUpdate != null) { if (onMessageUpdate != null) {
@@ -36,87 +29,66 @@ public class GrabbingTask implements Runnable {
} }
} }
private void updateMat(Mat value) {
if (onMatUpdate != null) {
onMatUpdate.accept(value);
private void updateHQBase64(String base64) {
if (onHQBase64Update != null) {
onHQBase64Update.accept(base64);
} }
} }
private void updateBase64(String base64) { private void updateLQBase64(String base64) {
if (onBase64Update != null) { if (onLQBase64Update != null) {
onBase64Update.accept(base64); onLQBase64Update.accept(base64);
} }
} }
private void updateFrame(Frame frame) { private void updateHQFrame(Frame frame) {
if (onFrameUpdate != null) { if (onHQFrameUpdate != null) {
onFrameUpdate.accept(frame); onHQFrameUpdate.accept(frame);
} }
} }
private void updateStreamingStatus(String status) { private void updateLQFrame(Frame frame) {
if (onStreamingStatusUpdate != null) { if (onLQFrameUpdate != null) {
onStreamingStatusUpdate.accept(status); onLQFrameUpdate.accept(frame);
} }
} }
public GrabbingTask(AtomicBoolean isGrabbing, FrameGrabber grabber) { public GrabbingTask(AtomicBoolean isGrabbing, FrameGrabber grabber) {
this.isGrabbing = isGrabbing; this.isGrabbing = isGrabbing;
this.grabber = grabber; this.grabber = grabber;
} }
public String GetStreamingStatus(){
return "Streaming at " + streaming_width + "x" + streaming_height + " " + streaming_fps + "fps";
}
@Override @Override
public void run() { public void run() {
isGrabbing.set(true); isGrabbing.set(true);
AtomicInteger framecount = new AtomicInteger(0);
TimerTask task = new TimerTask() {
@Override
public void run() {
if (streaming_fps != framecount.get()) {
streaming_fps = framecount.get();
streamingstatuschanged.set(true);
}
framecount.set(0);
if (streamingstatuschanged.get()) {
updateStreamingStatus(GetStreamingStatus());
streamingstatuschanged.set(false);
}
}
};
Timer timer = new Timer();
timer.scheduleAtFixedRate(task, 1000, 1000);
while (isGrabbing.get()) { while (isGrabbing.get()) {
try { try {
//Thread.sleep(100); // 10 fps
Frame fr =grabber.grab(); Frame fr =grabber.grab();
if (fr!=null){ if (fr!=null){
if (!HQ) fr = SomeCodes.ResizeFrame(fr, lowquality_width, lowquality_height); updateHQFrame(fr);
updateFrame(fr); updateHQBase64(SomeCodes.FrameToBase64(fr));
updateBase64(SomeCodes.BufferedImageToBase64(SomeCodes.FrameToBufferedImage(fr))); Frame resized = SomeCodes.ResizeFrame(fr, lowquality_width, lowquality_height);
Mat mat = SomeCodes.matConverter.convert(fr); updateLQFrame(resized);
updateMat(mat); updateLQBase64(SomeCodes.FrameToBase64(resized));
if (streaming_width != fr.imageWidth) {
streaming_width = fr.imageWidth;
streamingstatuschanged.set(true);
}
if (streaming_height != fr.imageHeight) {
streaming_height = fr.imageHeight;
streamingstatuschanged.set(true);
}
framecount.incrementAndGet();
} else updateMessage("Grabber returned null frame"); } else updateMessage("Grabber returned null frame");
} catch (Exception e) { } catch (Exception e) {
updateMessage("Error grabbing frame: " + e.getMessage()); updateMessage("Error grabbing frame: " + e.getMessage());
} }
} }
timer.cancel();
} }
} }

View File

@@ -1,11 +0,0 @@
package Camera;
import org.bytedeco.javacv.Frame;
import org.bytedeco.opencv.opencv_core.Mat;
public interface RtspEvent {
void onMatReceived(Mat mat);
void onFrameReceived(Frame frame);
void onBase64Received(String base64);
void onStreamingStatusReceived(String status);
}

View File

@@ -3,26 +3,22 @@ import lombok.Getter;
import org.bytedeco.ffmpeg.global.avutil; import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber; import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.Frame;
import org.bytedeco.opencv.opencv_core.Mat;
import org.tinylog.Logger; import org.tinylog.Logger;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
@SuppressWarnings("unused")
public class RtspGrabber { public class RtspGrabber {
private final String rtspUrl; private final String rtspUrl;
private FFmpegFrameGrabber grabber; private FFmpegFrameGrabber grabber;
private final AtomicBoolean isGrabbing = new AtomicBoolean(false); private final AtomicBoolean isGrabbing = new AtomicBoolean(false);
private @Getter Frame lastFrame = null; private @Getter Frame lastHQFrame = null;
private @Getter String lastBase64 = null; private @Getter Frame lastLQFrame = null;
private @Getter Mat lastMat = null; private @Getter String lastHQBase64 = null;
private GrabbingTask grabbingTask = null; private @Getter String lastLQBase64 = null;
private @Getter int HQWidth = 0;
public RtspGrabber(String ip, int port, String username, String password, String path) { private @Getter int HQHeight = 0;
rtspUrl = "rtsp://" + username + ":" + password + "@" + ip + ":" + port + path; private @Getter int LQWidth = 0;
Logger.info("RtspGrabber created with url: " + rtspUrl); private @Getter int LQHeight = 0;
}
public RtspGrabber(String ip, String path) { public RtspGrabber(String ip, String path) {
this.rtspUrl = "rtsp://" + ip + path; this.rtspUrl = "rtsp://" + ip + path;
@@ -32,16 +28,15 @@ public class RtspGrabber {
/** /**
* Start grabbing frames from rtsp * Start grabbing frames from rtsp
* @param useTcp Use tcp instead of udp * @param useTcp Use tcp instead of udp
* @param event Event to be called when frame is received
*/ */
public void Start(boolean useTcp, final int width, final int height, RtspEvent event){ public void Start(boolean useTcp, final int width, final int height){
try{ try{
grabber = FFmpegFrameGrabber.createDefault(rtspUrl); grabber = FFmpegFrameGrabber.createDefault(rtspUrl);
if (useTcp) grabber.setOption("rtsp_transport", "tcp"); if (useTcp) grabber.setOption("rtsp_transport", "tcp");
//grabber.setImageWidth(width);
//grabber.setImageHeight(height);
grabber.setImageWidth(width);
grabber.setImageHeight(height);
grabber.setPixelFormat(avutil.AV_PIX_FMT_BGR24); grabber.setPixelFormat(avutil.AV_PIX_FMT_BGR24);
grabber.start(); grabber.start();
avutil.av_log_set_level(avutil.AV_LOG_ERROR); avutil.av_log_set_level(avutil.AV_LOG_ERROR);
@@ -51,29 +46,28 @@ public class RtspGrabber {
Logger.info("Grabber started"); Logger.info("Grabber started");
GrabbingTask tt = new GrabbingTask(isGrabbing, grabber); GrabbingTask tt = new GrabbingTask(isGrabbing, grabber);
tt.setOnMessageUpdate(Logger::info); tt.setOnMessageUpdate(Logger::info);
tt.setOnMatUpdate(value -> { tt.setOnHQFrameUpdate(value -> {
// Kalau butuh Mat untuk diproses if (value!=null){
lastMat = value; if (value.imageWidth>0 && value.imageHeight>0){
if (event!=null) event.onMatReceived(value); lastHQFrame = value;
}); HQWidth = value.imageWidth;
tt.setOnFrameUpdate(value -> { HQHeight = value.imageHeight;
// Kalau butuh Frame untuk ditampilkan }
lastFrame = value;
if (event!=null) event.onFrameReceived(value); }
}); });
tt.setOnBase64Update(value -> { tt.setOnLQFrameUpdate(value -> {
// Kalau butuh Base64 untuk dikirim ke Websocket if (value!=null){
lastBase64 = value; if (value.imageWidth>0 && value.imageHeight>0){
if (event!=null) event.onBase64Received(value); lastLQFrame = value;
LQWidth = value.imageWidth;
LQHeight = value.imageHeight;
}
}
}); });
tt.setOnStreamingStatusUpdate(value -> { tt.setOnHQBase64Update(value -> lastHQBase64 = value);
// Kalau butuh status streaming tt.setOnLQBase64Update(value -> lastLQBase64 = value);
if (event!=null) event.onStreamingStatusReceived(value);
});
new Thread(tt).start(); new Thread(tt).start();
grabbingTask = tt;
} catch (Exception e){ } catch (Exception e){
Logger.error("Error starting grabber: " + e.getMessage()); Logger.error("Error starting grabber: " + e.getMessage());
@@ -95,26 +89,6 @@ public class RtspGrabber {
} }
} }
/**
* Check if grabber is grabbing
* @return True if grabbing
*/
public boolean IsGrabbing(){
return isGrabbing.get();
}
public void ChangeVideoQuality(boolean HQ){
if (IsGrabbing()){
if (grabbingTask!=null){
grabbingTask.setHQ(HQ);
}
}
}
public String GetStreamingStatus(){
if (grabbingTask!=null){
return grabbingTask.GetStreamingStatus();
}
return "No Status";
}
} }

View File

@@ -2,7 +2,6 @@ package Camera;
import org.tinylog.Logger; import org.tinylog.Logger;
import java.awt.*;
import java.net.http.HttpClient; import java.net.http.HttpClient;
import java.net.http.HttpRequest; import java.net.http.HttpRequest;
import java.net.http.HttpResponse; import java.net.http.HttpResponse;

View File

@@ -1,5 +1,6 @@
package Other; package Other;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacv.Frame; import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter; import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter; import org.bytedeco.javacv.OpenCVFrameConverter;
@@ -8,10 +9,12 @@ import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Size; import org.bytedeco.opencv.opencv_core.Size;
import org.bytedeco.opencv.opencv_core.UMat; import org.bytedeco.opencv.opencv_core.UMat;
import org.bytedeco.opencv.opencv_java;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
import org.opencv.core.MatOfByte;
import org.opencv.imgcodecs.Imgcodecs;
import org.tinylog.Logger; import org.tinylog.Logger;
import java.awt.image.BufferedImage;
import java.io.*; import java.io.*;
import java.net.Inet4Address; import java.net.Inet4Address;
import java.net.Inet6Address; import java.net.Inet6Address;
@@ -19,19 +22,26 @@ import java.net.InetAddress;
import java.nio.file.*; import java.nio.file.*;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.Base64;
import java.util.Properties; import java.util.Properties;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class SomeCodes { public class SomeCodes {
static{
Loader.load(opencv_java.class);
}
public final static String currentDirectory = System.getProperty("user.dir"); public final static String currentDirectory = System.getProperty("user.dir");
public final static Path audioPath = Path.of(currentDirectory, "audiofiles"); public final static Path audioPath = Path.of(currentDirectory, "audiofiles");
private static final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); private static final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
public static final OpenCVFrameConverter.ToMat matConverter = new OpenCVFrameConverter.ToMat(); public static final OpenCVFrameConverter.ToMat matConverter = new OpenCVFrameConverter.ToMat();
public static final OpenCVFrameConverter.ToOrgOpenCvCoreMat CoreMatConverter = new OpenCVFrameConverter.ToOrgOpenCvCoreMat();
public static final Java2DFrameConverter frameConverter = new Java2DFrameConverter(); public static final Java2DFrameConverter frameConverter = new Java2DFrameConverter();
public static final Path logsPath = Path.of(currentDirectory, "logs"); public static final Path logsPath = Path.of(currentDirectory, "logs");
public static final boolean haveOpenCL = opencv_core.haveOpenCL(); public static final boolean haveOpenCL = opencv_core.haveOpenCL();
public static boolean useOpenCL; public static boolean useOpenCL;
private static final Base64.Encoder base64encoder = java.util.Base64.getEncoder();
public static String[] GetAudioFiles(){ public static String[] GetAudioFiles(){
try{ try{
@@ -168,31 +178,28 @@ public class SomeCodes {
return ""; return "";
} }
public static BufferedImage FrameToBufferedImage(Frame frame){
return frameConverter.getBufferedImage(frame);
}
public static BufferedImage MatToBufferedImage(Mat mat){ // Function ini pakai opencv, bukan javacv, jadi perlu Loader.load(opencv_java.class) di awal
return frameConverter.getBufferedImage(matConverter.convert(mat)); // lebih optimal untuk konversi frame ke base64
} public static String FrameToBase64(Frame frame){
if (frame!=null){
public static String BufferedImageToBase64(BufferedImage image){ org.opencv.core.Mat converted = CoreMatConverter.convert(frame);
if (image!=null){ if (converted!=null){
ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (!converted.empty()){
try{ MatOfByte mob = new MatOfByte();
javax.imageio.ImageIO.write(image, "jpg", baos); Imgcodecs.imencode(".jpg", converted, mob);
baos.flush(); byte[] jpgdata = mob.toArray();
byte[] imageInByte = baos.toByteArray(); mob.release();
baos.close(); converted.release();
return java.util.Base64.getEncoder().encodeToString(imageInByte); return base64encoder.encodeToString(jpgdata);
} catch (Exception e){ }
Logger.error("Error converting BufferedImage to Base64: "+e.getMessage());
} }
} }
return ""; return "";
} }
public static @NotNull Properties LoadProperties(String filename){ public static @NotNull Properties LoadProperties(String filename){
try{ try{
InputStream is = new FileInputStream(filename); InputStream is = new FileInputStream(filename);

View File

@@ -5,7 +5,6 @@ import io.javalin.Javalin;
import io.javalin.http.UploadedFile; import io.javalin.http.UploadedFile;
import io.javalin.util.JavalinException; import io.javalin.util.JavalinException;
import io.javalin.websocket.*; import io.javalin.websocket.*;
import lombok.extern.java.Log;
import org.tinylog.Logger; import org.tinylog.Logger;
import java.nio.file.Files; import java.nio.file.Files;

View File

@@ -3,8 +3,16 @@ package Web;
public class WebsocketReply { public class WebsocketReply {
public String reply; public String reply;
public String data; public String data;
public String additional;
public WebsocketReply(String reply, String data){ public WebsocketReply(String reply, String data){
this.reply = reply; this.reply = reply;
this.data = data; this.data = data;
this.additional = "";
}
public WebsocketReply(String reply, String data, String additional){
this.reply = reply;
this.data = data;
this.additional = additional;
} }
} }

View File

@@ -4,7 +4,6 @@ import Audio.AudioFileProperties;
import Audio.AudioPlayer; import Audio.AudioPlayer;
import Audio.PlaybackEvent; import Audio.PlaybackEvent;
import Camera.PanTiltController; import Camera.PanTiltController;
import Camera.RtspEvent;
import Camera.RtspGrabber; import Camera.RtspGrabber;
import Camera.VapixProtocol; import Camera.VapixProtocol;
import Other.SomeCodes; import Other.SomeCodes;
@@ -12,9 +11,7 @@ import Web.WebServer;
import Web.WebsocketCommand; import Web.WebsocketCommand;
import Web.WebsocketEvent; import Web.WebsocketEvent;
import Web.WebsocketReply; import Web.WebsocketReply;
import org.bytedeco.javacv.Frame;
import org.bytedeco.opencv.global.opencv_core; import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.opencv_core.Mat;
import org.tinylog.Logger; import org.tinylog.Logger;
import java.io.File; import java.io.File;
@@ -119,30 +116,8 @@ public class Main {
if (ValidString(rtsppath)){ if (ValidString(rtsppath)){
rtspGrabber = new RtspGrabber(targetip, rtsppath); rtspGrabber = new RtspGrabber(targetip, rtsppath);
RtspEvent re = new RtspEvent() {
@Override
public void onMatReceived(Mat mat) {
//TODO : kalau butuh Mat, ambil disini
}
@Override rtspGrabber.Start(true, 1920, 1080);
public void onFrameReceived(Frame frame) {
//TODO : kalau butuh Frame, ambil disini
}
@Override
public void onBase64Received(String base64) {
WebsocketReply wr = new WebsocketReply("GET BASE64", "data:image/jpeg;base64,"+ base64);
webServer.SendtoAll(wr);
}
@Override
public void onStreamingStatusReceived(String status) {
WebsocketReply wr = new WebsocketReply("STREAMING STATUS", status);
webServer.SendtoAll(wr);
}
};
rtspGrabber.Start(true, 1920, 1080, re);
} else Logger.error("Invalid Camera Path"); } else Logger.error("Invalid Camera Path");
} else Logger.error("Invalid Camera IP"); } else Logger.error("Invalid Camera IP");
} }
@@ -276,25 +251,17 @@ public class Main {
// Live Streaming Related Commands // Live Streaming Related Commands
case "GET BASE64": case "GET BASE64":
if (rtspGrabber!=null){ if (rtspGrabber!=null){
return new WebsocketReply("GET BASE64", "data:image/jpeg;base64,"+ rtspGrabber.getLastBase64()); if (Objects.equals(command.data,"HQ"))
return new WebsocketReply("GET BASE64", "data:image/jpeg;base64,"+ rtspGrabber.getLastHQBase64(), String.format("Streaming at %dx%d", rtspGrabber.getHQWidth(), rtspGrabber.getHQHeight()));
else
return new WebsocketReply("GET BASE64", "data:image/jpeg;base64,"+ rtspGrabber.getLastLQBase64(), String.format("Streaming at %dx%d", rtspGrabber.getLQWidth(), rtspGrabber.getLQHeight()));
} else return new WebsocketReply("GET BASE64", "RTSP Grabber not initialized"); } else return new WebsocketReply("GET BASE64", "RTSP Grabber not initialized");
case "GET RESOLUTION": case "GET RESOLUTION":
if (vapixProtocol!=null){ if (vapixProtocol!=null){
int[] res = vapixProtocol.GetCurrentResolution(1); int[] res = vapixProtocol.GetCurrentResolution(1);
return new WebsocketReply("GET RESOLUTION", String.format("%dx%d", res[0], res[1])); return new WebsocketReply("GET RESOLUTION", String.format("%dx%d", res[0], res[1]));
} else return new WebsocketReply("GET RESOLUTION", "VapixProtocol not initialized"); } else return new WebsocketReply("GET RESOLUTION", "VapixProtocol not initialized");
case "SET VIDEO QUALITY":
if (Objects.equals(command.data,"HQ")){
if (rtspGrabber!=null) rtspGrabber.ChangeVideoQuality(true);
return new WebsocketReply("SET VIDEO QUALITY", "High Quality");
} else if (Objects.equals(command.data,"LQ")){
if (rtspGrabber!=null) rtspGrabber.ChangeVideoQuality(false);
return new WebsocketReply("SET VIDEO QUALITY", "Low Quality");
} else return new WebsocketReply("SET VIDEO QUALITY", "Invalid Video Quality");
case "STREAMING STATUS":
if (rtspGrabber!=null){
return new WebsocketReply("STREAMING STATUS", rtspGrabber.GetStreamingStatus());
} else return new WebsocketReply("STREAMING STATUS", "RTSP Grabber not initialized");
default: default:
return new WebsocketReply("UNKNOWN COMMAND", command.command); return new WebsocketReply("UNKNOWN COMMAND", command.command);
} }