1328 lines
49 KiB
Java
1328 lines
49 KiB
Java
package id.co.gtc.erhacam;
|
|
|
|
import Camera.CameraProperty;
|
|
import Camera.LiveCamEvent;
|
|
import Camera.ObsbotMeet2Preset;
|
|
import Config.CameraConfigEnum;
|
|
import ErhaAPI.PhotoResult;
|
|
import com.google.zxing.BinaryBitmap;
|
|
import com.google.zxing.Result;
|
|
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
|
|
import com.google.zxing.common.HybridBinarizer;
|
|
import javafx.application.Platform;
|
|
|
|
import javafx.fxml.FXML;
|
|
import javafx.scene.control.Slider;
|
|
import javafx.scene.image.Image;
|
|
import javafx.scene.image.ImageView;
|
|
import javafx.scene.control.Label;
|
|
import javafx.scene.layout.AnchorPane;
|
|
import lombok.Getter;
|
|
import lombok.Setter;
|
|
|
|
import org.bytedeco.javacv.Frame;
|
|
import org.bytedeco.javacv.FrameGrabber;
|
|
import org.bytedeco.javacv.OpenCVFrameGrabber;
|
|
import org.bytedeco.opencv.global.opencv_core;
|
|
import org.bytedeco.opencv.global.opencv_imgcodecs;
|
|
import org.bytedeco.opencv.global.opencv_imgproc;
|
|
import org.bytedeco.opencv.opencv_core.*;
|
|
import org.opencv.videoio.Videoio;
|
|
import org.tinylog.Logger;
|
|
|
|
|
|
import java.awt.image.BufferedImage;
|
|
import java.nio.file.Path;
|
|
import java.time.LocalDateTime;
|
|
import java.util.List;
|
|
import java.util.Timer;
|
|
import java.util.TimerTask;
|
|
import java.util.concurrent.CountDownLatch;
|
|
import java.util.concurrent.Semaphore;
|
|
import java.util.concurrent.atomic.AtomicBoolean;
|
|
import java.util.concurrent.atomic.AtomicInteger;
|
|
import java.util.concurrent.atomic.AtomicLong;
|
|
|
|
import static Config.SomeCodes.*;
|
|
import static id.co.gtc.erhacam.Detectors.*;
|
|
import static org.bytedeco.opencv.global.opencv_imgproc.*;
|
|
|
|
@SuppressWarnings({"unused"})
|
|
public class Cameradetail {
|
|
private static final boolean isCudaAvailable ;
|
|
static{
|
|
isCudaAvailable = opencv_core.getCudaEnabledDeviceCount()>0;
|
|
if (isCudaAvailable){
|
|
System.out.println("CUDA is available");
|
|
opencv_core.printCudaDeviceInfo(0);
|
|
} else {
|
|
System.out.println("CUDA is not available");
|
|
}
|
|
|
|
}
|
|
private final AtomicBoolean Capturing = new AtomicBoolean(false);
|
|
private CountDownLatch TakingPhoto = null;
|
|
private final Semaphore IsGrabbingLiveView = new Semaphore(0);
|
|
private OpenCVFrameGrabber mGrabber = null;
|
|
private LiveCamEvent event = null;
|
|
private @Getter @Setter CameraConfigEnum cameraConfigEnum = CameraConfigEnum.CameraConfigCenter;
|
|
private @Getter int LiveFPS = 0;
|
|
|
|
|
|
|
|
@FXML
|
|
private Label cameratitle;
|
|
|
|
@FXML
|
|
private ImageView camerastream;
|
|
|
|
@FXML
|
|
private AnchorPane streamanchor;
|
|
|
|
@FXML
|
|
private Label camerastatus;
|
|
|
|
@FXML
|
|
private Slider brightnessSlider;
|
|
@FXML
|
|
private Slider contrastSlider;
|
|
@FXML
|
|
private Slider saturationSlider;
|
|
@FXML
|
|
private Slider hueSlider;
|
|
@FXML
|
|
private Slider gainSlider;
|
|
@FXML
|
|
private Slider exposureSlider;
|
|
|
|
@FXML
|
|
private Label face_indicator;
|
|
|
|
@FXML
|
|
private Label eye_indicator;
|
|
|
|
@FXML
|
|
private Label BlinkCounterLabel;
|
|
|
|
@FXML
|
|
private Label sharpness_indicator;
|
|
|
|
|
|
private @Getter final UMat BestMat = new UMat();
|
|
private @Getter final UMat LiveMat = new UMat();
|
|
private @Getter final UMat ReducedMat = new UMat();
|
|
private @Getter final UMat GrayMat = new UMat();
|
|
|
|
private @Getter Rect BestMatROI;
|
|
private @Getter Rect ReducedMatROI;
|
|
private @Getter Rect LiveMatROI;
|
|
|
|
private boolean IsPortrait = false;
|
|
|
|
// putar portrait
|
|
private @Getter Size LiveSize = new Size(360, 640);
|
|
private @Getter Size ReducedSize = new Size(720, 1280);
|
|
private @Getter Size BestSize = new Size(2160, 3840);
|
|
|
|
private int realwidth = 0;
|
|
private int realheight = 0;
|
|
|
|
|
|
//TODO ini angka dari Erha, cek apakah masih cocok atau tidak
|
|
private @Getter @Setter Size FullCropSize = new Size(1036,1036);
|
|
|
|
public int getBestWidth(){
|
|
return BestSize.width();
|
|
}
|
|
|
|
public int getBestHeight(){
|
|
return BestSize.height();
|
|
}
|
|
|
|
public int getRealWidth(){
|
|
return realwidth;
|
|
}
|
|
public int getRealHeight(){
|
|
return realheight;
|
|
}
|
|
|
|
int[] paramjpeg = {opencv_imgcodecs.IMWRITE_JPEG_QUALITY, 100};
|
|
int[] parampng = {opencv_imgcodecs.IMWRITE_PNG_COMPRESSION, 0};
|
|
|
|
private boolean use_qr = false;
|
|
private boolean use_face = false;
|
|
private int _hardwareID = -1;
|
|
|
|
private void setSliderValue(Slider sld, CameraProperty prop, double value){
|
|
|
|
if (sld!=null){
|
|
if (prop!=null){
|
|
if (Platform.isFxApplicationThread()){
|
|
sld.setMin(prop.Min);
|
|
sld.setMax(prop.Max);
|
|
sld.setValue(value);
|
|
} else {
|
|
Platform.runLater(()->{
|
|
sld.setMin(prop.Min);
|
|
sld.setMax(prop.Max);
|
|
sld.setValue(value);
|
|
});
|
|
}
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
private void resize_streamanchor(){
|
|
if (streamanchor!=null){
|
|
if (streamanchor.getHeight()!=0){
|
|
if (streamanchor.getWidth()!=0){
|
|
camerastream.setFitHeight(streamanchor.getHeight()-10);
|
|
//camerastream.setFitWidth(streamanchor.getWidth());
|
|
camerastream.setPreserveRatio(true);
|
|
}
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
@FXML
|
|
public void initialize(){
|
|
|
|
streamanchor.heightProperty().addListener(obs -> resize_streamanchor());
|
|
streamanchor.widthProperty().addListener(obs -> resize_streamanchor());
|
|
|
|
Platform.runLater(()->{
|
|
setSliderValue(brightnessSlider, ObsbotMeet2Preset.Brightness, config.getBrightness(cameraConfigEnum));
|
|
setSliderValue(contrastSlider, ObsbotMeet2Preset.Contrast, config.getContrast(cameraConfigEnum));
|
|
setSliderValue(saturationSlider, ObsbotMeet2Preset.Saturation, config.getSaturation(cameraConfigEnum));
|
|
setSliderValue(hueSlider, ObsbotMeet2Preset.Hue, config.getHue(cameraConfigEnum));
|
|
setSliderValue(gainSlider, ObsbotMeet2Preset.Gain, config.getGain(cameraConfigEnum));
|
|
setSliderValue(exposureSlider, ObsbotMeet2Preset.ExposureTime, config.getExposure(cameraConfigEnum));
|
|
|
|
});
|
|
|
|
|
|
|
|
brightnessSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setBrightness(newVal.doubleValue());
|
|
config.setBrightness(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Brightness for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
contrastSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setContrast(newVal.doubleValue());
|
|
config.setContrast(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Contrast for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
saturationSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setSaturation(newVal.doubleValue());
|
|
config.setSaturation(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Saturation for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
hueSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setHue(newVal.doubleValue());
|
|
config.setHue(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Hue for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
gainSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setGain(newVal.doubleValue());
|
|
config.setGain(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Gain for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
exposureSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setExposure(newVal.doubleValue());
|
|
config.setExposure(cameraConfigEnum, newVal.doubleValue());
|
|
raise_log("Exposure for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
|
|
}
|
|
|
|
@FXML
|
|
public void resetClick(){
|
|
brightnessSlider.adjustValue(ObsbotMeet2Preset.Brightness.Default);
|
|
contrastSlider.adjustValue(ObsbotMeet2Preset.Contrast.Default);
|
|
saturationSlider.adjustValue(ObsbotMeet2Preset.Saturation.Default);
|
|
hueSlider.adjustValue(ObsbotMeet2Preset.Hue.Default);
|
|
gainSlider.adjustValue(ObsbotMeet2Preset.Gain.Default);
|
|
exposureSlider.adjustValue(ObsbotMeet2Preset.ExposureTime.Default);
|
|
|
|
}
|
|
|
|
|
|
|
|
public boolean isCapturing(){
|
|
return Capturing.get();
|
|
}
|
|
|
|
/**
|
|
* Set Camera Title
|
|
* @param title Title of the Camera
|
|
*/
|
|
public void setCameraTitle(String title){
|
|
|
|
if (ValidString(title)){
|
|
LabelSetText(cameratitle, title,null);
|
|
}
|
|
}
|
|
|
|
public void setSharpness_indicator(double value){
|
|
if (value >= config.getSharpnessThreshold()){
|
|
LabelSetText(sharpness_indicator, "OK","-fx-text-fill: green; -fx-border-color: black");
|
|
} else {
|
|
LabelSetText(sharpness_indicator,"BAD","-fx-text-fill: red; -fx-border-color: black");
|
|
}
|
|
}
|
|
|
|
public void setSaturation(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_SATURATION, value);
|
|
}
|
|
}
|
|
|
|
public double getSaturation(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_SATURATION);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setHue(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_HUE, value);
|
|
}
|
|
}
|
|
|
|
public double getHue(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_HUE);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setGain(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_GAIN, value);
|
|
}
|
|
}
|
|
|
|
public double getGain(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_GAIN);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Get Camera Title
|
|
* @return Title of the Camera, or empty string if not set
|
|
*/
|
|
public String getCameraTitle(){
|
|
if (cameratitle!=null){
|
|
return cameratitle.getText();
|
|
}
|
|
return "";
|
|
}
|
|
|
|
/**
|
|
* Set Camera Status
|
|
* @param status Status of the Camera
|
|
*/
|
|
public void setCameraStatus(String status){
|
|
LabelSetText(camerastatus, status,null);
|
|
}
|
|
|
|
/**
|
|
* Get Camera Status
|
|
* @return Status of the Camera, or empty string if not set
|
|
*/
|
|
public String getCameraStatus(){
|
|
if (camerastatus!=null){
|
|
return camerastatus.getText();
|
|
}
|
|
return "";
|
|
}
|
|
|
|
/**
|
|
* Set Camera Stream
|
|
* @param image Image to be displayed
|
|
*/
|
|
public void setCameraStream(Image image){
|
|
if (image!=null){
|
|
if (camerastream!=null){
|
|
if (Platform.isFxApplicationThread()){
|
|
camerastream.setImage(image);
|
|
} else {
|
|
Platform.runLater(()->camerastream.setImage(image));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Camera Stream
|
|
* @return Image of the Camera Stream, or null if not set
|
|
*/
|
|
public Image getCameraStream(){
|
|
if (camerastream!=null){
|
|
return camerastream.getImage();
|
|
}
|
|
return null;
|
|
}
|
|
|
|
public void setFPS(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FPS, value);
|
|
}
|
|
}
|
|
|
|
public double getFPS(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FPS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Set Camera Grabber and Target Width and Height
|
|
* @param hardwareID Hardware ID of the Camera
|
|
* @param grabber Camera Grabber
|
|
* @param livewidth Width used on live view
|
|
* @param liveheight Height used on live view
|
|
* @param photowidth Width used on photo capture
|
|
* @param photoheight Height used on photo capture
|
|
* @param reducedwidth Width used on reduced resolution
|
|
* @param reducedheight Height used on reduced resolution
|
|
* @param isPotrait if true, set to portrait mode, otherwise landscape
|
|
*/
|
|
public void SetGrabber(int hardwareID, OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight, int reducedwidth, int reducedheight, boolean isPotrait){
|
|
if (mGrabber!=null) {
|
|
StopLiveView();
|
|
}
|
|
|
|
IsPortrait = isPotrait;
|
|
|
|
if (IsPortrait){
|
|
// putar portrait
|
|
LiveSize = new Size(liveheight, livewidth);
|
|
BestSize = new Size(photoheight, photowidth);
|
|
ReducedSize = new Size(reducedheight, reducedwidth);
|
|
} else {
|
|
LiveSize = new Size(livewidth, liveheight);
|
|
BestSize = new Size(photowidth, photoheight);
|
|
ReducedSize = new Size(reducedwidth, reducedheight);
|
|
}
|
|
|
|
mGrabber = grabber;
|
|
_hardwareID = hardwareID;
|
|
}
|
|
|
|
//Exposure and Focus Tricks :
|
|
// https://stackoverflow.com/questions/53545945/how-to-set-camera-to-auto-exposure-with-opencv-3-4-2
|
|
// https://github.com/opencv/opencv/issues/9738
|
|
|
|
/**
|
|
* Set Auto Exposure Mode
|
|
* @param ON if true, set autoexposure on, otherwise off
|
|
*/
|
|
public void setAutoExposure(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTO_EXPOSURE, ON?ObsbotMeet2Preset.AutoExposure.On:ObsbotMeet2Preset.AutoExposure.Off);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Auto Exposure Mode
|
|
* @return true if autoexposure is on, otherwise off
|
|
*/
|
|
public boolean getAutoExposure(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_EXPOSURE)==ObsbotMeet2Preset.AutoExposure.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Set Exposure when Auto Exposure is Off
|
|
* @param value exposure value
|
|
*/
|
|
public void setExposure(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_EXPOSURE, value);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Exposure when Auto Exposure is Off
|
|
* @return exposure value
|
|
*/
|
|
public double getExposure(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_EXPOSURE);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Set Auto Focus
|
|
* @param ON if true, set autofocus on, otherwise off
|
|
*/
|
|
public void setAutoFocus(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTOFOCUS, ON?ObsbotMeet2Preset.AutoFocus.On:ObsbotMeet2Preset.AutoFocus.Off);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Auto Focus
|
|
* @return true if autofocus is on, otherwise off
|
|
*/
|
|
public boolean getAutoFocus(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTOFOCUS)==ObsbotMeet2Preset.AutoFocus.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
public void setAutoWB(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTO_WB, ON?ObsbotMeet2Preset.AutoWhiteBalance.On:ObsbotMeet2Preset.AutoWhiteBalance.Off);
|
|
}
|
|
}
|
|
|
|
public boolean getAutoWB(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_WB)==ObsbotMeet2Preset.AutoWhiteBalance.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Set Focus when Auto Focus is Off
|
|
* @param value focus value
|
|
*/
|
|
public void setFocus(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FOCUS, value);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Focus when Auto Focus is Off
|
|
* @return focus value
|
|
*/
|
|
public double getFocus(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FOCUS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setBrightness(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_BRIGHTNESS, value);
|
|
}
|
|
}
|
|
|
|
public double getBrightness(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_BRIGHTNESS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setContrast(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_CONTRAST, value);
|
|
}
|
|
}
|
|
|
|
public double getContrast(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_CONTRAST);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setFrameWidth(int width){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FRAME_WIDTH, width);
|
|
}
|
|
}
|
|
|
|
public double getFrameWidth(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_WIDTH);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setFrameHeight(int height){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FRAME_HEIGHT, height);
|
|
}
|
|
}
|
|
|
|
public double getFrameHeight(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_HEIGHT);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setSharpness(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_SHARPNESS, value);
|
|
}
|
|
}
|
|
|
|
public double getSharpness(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_SHARPNESS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setGamma(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_GAMMA, value);
|
|
}
|
|
}
|
|
|
|
public double getGamma(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_GAMMA);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public boolean PutText(String filename, String text, double fontScale, Scalar textColor, int thickness){
|
|
if (ValidString(filename)){
|
|
Mat mat = opencv_imgcodecs.imread(filename);
|
|
if (PutText(mat, text, fontScale, textColor, thickness)){
|
|
return opencv_imgcodecs.imwrite(filename, mat);
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
public boolean PutText(UMat Mat, String text, double fontScale, Scalar textColor, int thickness){
|
|
if (!Mat.empty()){
|
|
if (text!=null && !text.isBlank()){
|
|
//String timestamp = prefix+" "+SomeCodes.GetDateTimeString();
|
|
int fontFace = FONT_HERSHEY_SIMPLEX;
|
|
//double fontScale = 4.0;
|
|
//int thickness = 2;
|
|
//Scalar textColor = new Scalar(255, 255, 255, 0); // white color in BGR format
|
|
int[] baseline = {0};
|
|
Size textSize = getTextSize(text, fontFace, fontScale, thickness, baseline);
|
|
// position of the text in the bottom right corner
|
|
int textX = Mat.cols() - textSize.width() - 10; // 10 pixels from the right
|
|
int textY = Mat.rows() - 10; // 10 pixels from the bottom
|
|
opencv_imgproc.putText(Mat, text, new Point(textX, textY), fontFace, fontScale, textColor, thickness, LINE_8, false);
|
|
return true;
|
|
}
|
|
|
|
}
|
|
return false;
|
|
}
|
|
|
|
public boolean PutText(Mat Mat, String text, double fontScale, Scalar textColor, int thickness){
|
|
if (!Mat.empty()){
|
|
if (text!=null && !text.isBlank()){
|
|
//String timestamp = prefix+" "+SomeCodes.GetDateTimeString();
|
|
int fontFace = FONT_HERSHEY_SIMPLEX;
|
|
//double fontScale = 4.0;
|
|
//int thickness = 2;
|
|
//Scalar textColor = new Scalar(255, 255, 255, 0); // white color in BGR format
|
|
int[] baseline = {0};
|
|
Size textSize = getTextSize(text, fontFace, fontScale, thickness, baseline);
|
|
// position of the text in the bottom right corner
|
|
int textX = Mat.cols() - textSize.width() - 10; // 10 pixels from the right
|
|
int textY = Mat.rows() - 10; // 10 pixels from the bottom
|
|
opencv_imgproc.putText(Mat, text, new Point(textX, textY), fontFace, fontScale, textColor, thickness, LINE_8, false);
|
|
return true;
|
|
}
|
|
|
|
}
|
|
return false;
|
|
}
|
|
|
|
public String GetFullQualityPhotoPath(String directory, String prefix){
|
|
//if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
//return Path.of(directory, "FullQuality", makeFileName(prefix,".png")).toString();
|
|
return Path.of(config.getFullQualityDirectory(), makeFileName(prefix,".png")).toString();
|
|
}
|
|
|
|
public String GetReducedPhotoPath(String directory, String prefix){
|
|
//if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
//return Path.of(directory, "Compressed", makeReducedFileName(prefix,".jpg")).toString();
|
|
return Path.of(config.getCompressedDirectory(), makeFileName(prefix,".jpg")).toString();
|
|
}
|
|
|
|
public String GetFullQualityCropPhotoPath(String directory, String prefix){
|
|
//if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
//return Path.of(directory, "FullQualityCrop", makeFileName(prefix,".png")).toString();
|
|
return Path.of(config.getFullQualityCropDirectory(), makeFileName(prefix,".png")).toString();
|
|
}
|
|
|
|
public String GetReducedCropPhotoPath(String directory, String prefix){
|
|
//if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
//return Path.of(directory, "CompressedCrop", makeReducedFileName(prefix,".jpg")).toString();
|
|
return Path.of(config.getCompressedCropDirectory(), makeFileName(prefix,".jpg")).toString();
|
|
}
|
|
|
|
/**
|
|
* Take Photo from Camera
|
|
* @param directory directory to save the photo, if null, will use default directory
|
|
* @param prefix filename prefix
|
|
* @return filename path of the saved photo, or null if failed
|
|
*/
|
|
public PhotoResult TakePhoto(String directory, String prefix) {
|
|
PhotoResult result = new PhotoResult(cameratitle.getText());
|
|
if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
|
|
if (mGrabber!=null){
|
|
try{
|
|
// wait if the camera is still capturing
|
|
IsGrabbingLiveView.acquire();
|
|
|
|
TakingPhoto = new CountDownLatch(1);
|
|
|
|
|
|
if (!BestMat.empty()){
|
|
UMat cloned;
|
|
synchronized (BestMat){
|
|
cloned = BestMat.clone();
|
|
}
|
|
|
|
|
|
// save BestMat at quality 9 PNG
|
|
String filename = GetFullQualityPhotoPath(directory, prefix);
|
|
if (opencv_imgcodecs.imwrite(filename, cloned, parampng)){
|
|
result.setFullres(filename);
|
|
} else System.out.println("TakePhoto failed, Unable to Save FullQUality Photo for camera "+cameratitle.getText());
|
|
|
|
String xx = CropBestMat(directory, prefix, BestMatROI);
|
|
if (ValidFile(xx)) {
|
|
result.setFullcrop(xx);
|
|
result.setBestROI(new Rect(BestMatROI.x(), BestMatROI.y(), BestMatROI.width(), BestMatROI.height()));
|
|
}
|
|
|
|
|
|
// save ReducedMat at 100% JPEG
|
|
String reducedfilename = GetReducedPhotoPath(directory, prefix);
|
|
opencv_imgproc.resize(cloned, ReducedMat, ReducedSize);
|
|
if (!opencv_imgcodecs.imwrite(reducedfilename, ReducedMat, paramjpeg)){
|
|
System.out.println("TakePhoto failed, Unable to Save Reduced Photo for camera "+cameratitle.getText());
|
|
} else result.setCompressedfile(reducedfilename);
|
|
|
|
String xy = CropReducedMat(directory, prefix, ReducedMatROI);
|
|
if (ValidFile(xy)){
|
|
result.setCompressedcrop(xy);
|
|
result.setReducedROI(new Rect(ReducedMatROI.x(), ReducedMatROI.y(), ReducedMatROI.width(), ReducedMatROI.height()));
|
|
}
|
|
cloned.release();
|
|
|
|
} else raise_log("TakePhoto failed, Live View is Empty");
|
|
} catch (InterruptedException e){
|
|
System.out.println("TakePhoto IsGrabbingLiveView interrupted");
|
|
}
|
|
|
|
|
|
TakingPhoto.countDown();
|
|
|
|
} else raise_log("TakePhoto failed, Grabber is null");
|
|
return result;
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
public String CropBestMat(String directory, String prefix, Rect ROI){
|
|
UMat cloned;
|
|
synchronized (BestMat){
|
|
cloned = BestMat.clone();
|
|
}
|
|
if (!cloned.empty()) {
|
|
if (ValidROI(ROI)){
|
|
if (ROIInsideUMat(ROI, cloned)){
|
|
UMat cropped = CropUMat(cloned, ROI);
|
|
if (cropped != null) {
|
|
String filename = GetFullQualityCropPhotoPath(directory, prefix);
|
|
if (opencv_imgcodecs.imwrite(filename, cropped, parampng)) {
|
|
Logger.info("CropBestMat success, saved as " + filename);
|
|
return filename;
|
|
} else Logger.error("CropBestMat failed, Unable to Save BestMat as ",filename);
|
|
} else Logger.error("CropBestMat failed, Unable to Crop BestMat");
|
|
} else Logger.error("CropBestMat failed, ROI is outside BestMat");
|
|
} else Logger.error("CropBestMat failed, ROI is invalid");
|
|
} else Logger.error("CropBestMat failed, BestMat is empty");
|
|
cloned.release();
|
|
return null;
|
|
}
|
|
|
|
public String CropReducedMat(String directory, String prefix, Rect ROI){
|
|
if (!ReducedMat.empty()){
|
|
if (ValidROI(ROI)){
|
|
if (ROIInsideUMat(ROI,ReducedMat)){
|
|
UMat cropped = CropUMat(ReducedMat, ROI);
|
|
if (cropped!=null){
|
|
String filename = GetReducedCropPhotoPath(directory, prefix);
|
|
if (opencv_imgcodecs.imwrite(filename, cropped, paramjpeg)){
|
|
Logger.info("CropReducedMat success, saved as ",filename);
|
|
return filename;
|
|
} else Logger.error("CropReducedMat failed, Unable to Save ReducedMat as ",filename);
|
|
} else Logger.error("CropReducedMat failed, Unable to Crop ReducedMat");
|
|
} else Logger.error("CropReducedMat failed, ROI is outside ReducedMat");
|
|
} else Logger.error("CropReducedMat failed, ROI is invalid");
|
|
} else Logger.error("CropReducedMat failed, ReducedMat is empty");
|
|
return null;
|
|
}
|
|
|
|
|
|
|
|
|
|
@SuppressWarnings("SameParameterValue")
|
|
private String makeFileName(String prefix, String extension){
|
|
LocalDateTime ldt = LocalDateTime.now();
|
|
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
|
|
return prefix+" "+timetag+" "+cameratitle.getText() + extension;
|
|
}
|
|
|
|
@SuppressWarnings("SameParameterValue")
|
|
private String makeReducedFileName(String prefix, String extension){
|
|
LocalDateTime ldt = LocalDateTime.now();
|
|
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
|
|
return prefix+" "+timetag+" "+cameratitle.getText() + "_reduced" + extension;
|
|
}
|
|
|
|
public void StopLiveView(){
|
|
Capturing.set(false);
|
|
if (mGrabber!=null){
|
|
try{
|
|
mGrabber.close();
|
|
System.out.println("Camera "+cameratitle.getText()+" stopped");
|
|
setCameraStatus("Camera Stopped");
|
|
} catch (Exception e){
|
|
raise_log("StopLiveView failed, Unable to Stop Camera, Error: " + e.getMessage());
|
|
}
|
|
}
|
|
|
|
TakingPhoto = null;
|
|
IsGrabbingLiveView.drainPermits();
|
|
|
|
// stop FPS calculation
|
|
timer.cancel();
|
|
// stop camera capture thread
|
|
cam_capture.interrupt();
|
|
// stop qr detection thread
|
|
qr_detect.interrupt();
|
|
// stop face detection thread
|
|
face_detect.interrupt();
|
|
}
|
|
|
|
Timer timer = new java.util.Timer();
|
|
// FPS Calculator
|
|
AtomicInteger fps = new AtomicInteger(0);
|
|
|
|
// use for locking
|
|
final Object lockObject = new Object();
|
|
|
|
|
|
|
|
// QR Detection Thread
|
|
Semaphore qr_semaphore = new Semaphore(0);
|
|
Thread qr_detect = new Thread(()->{
|
|
while(Capturing.get()){
|
|
try {
|
|
qr_semaphore.acquire();
|
|
UMat gray;
|
|
synchronized (lockObject){
|
|
gray = GrayMat;
|
|
}
|
|
String qr = DetectQRFromMat(gray);
|
|
if (ValidBarCode(qr)){
|
|
if (event!=null) event.onDetectedQRCode(qr);
|
|
}
|
|
} catch (InterruptedException e) {
|
|
System.out.println(Thread.currentThread().getName()+" interrupted");
|
|
}
|
|
}
|
|
});
|
|
|
|
// Face Detection Thread
|
|
Semaphore face_semaphore = new Semaphore(0);
|
|
Thread face_detect = new Thread(()->{
|
|
// eye state = -1 means unknown, 0 means closed, 1 means open
|
|
final AtomicInteger eye_state = new AtomicInteger(-1);
|
|
final AtomicBoolean waiting_for_second_blink = new AtomicBoolean(false);
|
|
final AtomicLong last_blink = new AtomicLong(0);
|
|
final AtomicInteger no_face_counter = new AtomicInteger(0);
|
|
final AtomicInteger face_counter = new AtomicInteger(0);
|
|
final AtomicInteger blink_counter = new AtomicInteger(0);
|
|
//final AtomicInteger no_eye_counter = new AtomicInteger(0);
|
|
//final AtomicInteger have_eye_counter = new AtomicInteger(0);
|
|
while(Capturing.get()){
|
|
try {
|
|
face_semaphore.acquire();
|
|
UMat gray;
|
|
synchronized (lockObject){
|
|
gray = GrayMat;
|
|
}
|
|
|
|
DetectorResult theface = null;
|
|
boolean have_frontal_face = false;
|
|
boolean have_left_45_face = false;
|
|
int _face_width = 0;
|
|
int _face_height = 0;
|
|
|
|
List<DetectorResult> frontalfaces = HaveFrontalFace(gray);
|
|
if (!frontalfaces.isEmpty()){
|
|
for(DetectorResult rect : frontalfaces){
|
|
if (rect.haveFace() ){
|
|
rect.FaceRectangle(LiveMat);
|
|
if (rect.getFaceWidth()>_face_width) _face_width = rect.getFaceWidth();
|
|
if (rect.getFaceHeight()>_face_height) _face_height = rect.getFaceHeight();
|
|
theface = rect;
|
|
have_frontal_face = true;
|
|
if (rect.haveEyes()){
|
|
rect.EyesRectangle(LiveMat);
|
|
}
|
|
|
|
}
|
|
}
|
|
} else {
|
|
// gak punya frontal face
|
|
// coba cek punya profile left face 45 gak
|
|
List<DetectorResult> Left45Faces = HaveLeft45Face(gray);
|
|
if (!Left45Faces.isEmpty()){
|
|
for(DetectorResult rect : Left45Faces){
|
|
if (rect.haveFace()){
|
|
rect.FaceRectangle(LiveMat);
|
|
if (rect.getFaceWidth()>_face_width) _face_width = rect.getFaceWidth();
|
|
if (rect.getFaceHeight()>_face_height) _face_height = rect.getFaceHeight();
|
|
have_left_45_face = true;
|
|
if (rect.haveEyes()){
|
|
rect.EyesRectangle(LiveMat);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (have_frontal_face){
|
|
if (face_counter.incrementAndGet()<5) continue;
|
|
no_face_counter.set(0);
|
|
if (event!=null) event.onFrontalFaceDetector(true, _face_width, _face_height);
|
|
LabelVisible(face_indicator,true);
|
|
|
|
if (theface.getFace()!=null){
|
|
LiveMatROI = new Rect(theface.getFace().x(), theface.getFace().y(), theface.getFace().width(), theface.getFace().height());
|
|
}
|
|
|
|
if (theface.getEyesCount()>=2){
|
|
// ada mata (buka mata)
|
|
// if (have_eye_counter.incrementAndGet()<5) continue;
|
|
// no_eye_counter.set(0);
|
|
|
|
if (event!=null) event.onEyeDetector(true);
|
|
LabelVisible(eye_indicator,true);
|
|
|
|
// Valid eye condition
|
|
|
|
if (eye_state.get()!=1){
|
|
// transisi dari tutup mata ke buka mata
|
|
if (eye_state.get()==-1) {
|
|
System.out.println("First Eye Detected from camera "+cameratitle.getText());
|
|
eye_state.set(1);
|
|
} else {
|
|
eye_state.set(1);
|
|
|
|
blink_counter.incrementAndGet();
|
|
if (event!=null) event.onBlink(blink_counter.get());
|
|
LabelSetText(BlinkCounterLabel, String.valueOf(blink_counter.get()),null);
|
|
|
|
long now = System.currentTimeMillis();
|
|
if (waiting_for_second_blink.get()){
|
|
long diff = now - last_blink.get();
|
|
// kalau beda waktu antara blink 1 dan blink 2 kurang dari 3 detik
|
|
if (diff<=3000){
|
|
System.out.println("Double Blink Detected from camera "+cameratitle.getText());
|
|
if (event!=null) event.onDoubleBlink((int)diff);
|
|
}
|
|
waiting_for_second_blink.set(false);
|
|
} else {
|
|
waiting_for_second_blink.set(true);
|
|
System.out.println("First Blink Detected from camera "+cameratitle.getText());
|
|
}
|
|
last_blink.set(now);
|
|
}
|
|
|
|
}
|
|
} else {
|
|
// ada muka, tidak ada mata
|
|
// transisi dari buka mata ke tutup mata
|
|
// if (no_eye_counter.incrementAndGet()<5) continue;
|
|
// have_eye_counter.set(0);
|
|
|
|
if (event!=null) event.onEyeDetector(false);
|
|
LabelVisible(eye_indicator,false);
|
|
// Valid no eye condition
|
|
|
|
|
|
if (eye_state.get()!=0){
|
|
eye_state.set(0);
|
|
|
|
}
|
|
}
|
|
|
|
} else if (have_left_45_face ){
|
|
if (event!=null) event.onProfileFaceDetector(true, _face_width, _face_height);
|
|
LabelVisible(face_indicator,true);
|
|
|
|
|
|
} else {
|
|
// no face detected, but let's not cancel the previous state immediately
|
|
if (no_face_counter.incrementAndGet()<30) continue;
|
|
// beneran dianggap no face detected
|
|
eye_state.set(-1);
|
|
last_blink.set(0);
|
|
waiting_for_second_blink.set(false);
|
|
face_counter.set(0);
|
|
blink_counter.set(0);
|
|
// no_eye_counter.set(0);
|
|
// have_eye_counter.set(0);
|
|
|
|
if (event!=null) {
|
|
event.onFrontalFaceDetector(false, _face_width, _face_height);
|
|
event.onProfileFaceDetector(false, _face_width, _face_height);
|
|
event.onEyeDetector(false);
|
|
event.onBlink(blink_counter.get());
|
|
|
|
LabelSetText(BlinkCounterLabel, "",null);
|
|
LabelVisible(face_indicator,false);
|
|
LabelVisible(eye_indicator,false);
|
|
}
|
|
|
|
|
|
}
|
|
|
|
Mat imgmat = new Mat();
|
|
|
|
LiveMat.copyTo(imgmat);
|
|
// copy back to CPU
|
|
|
|
setCameraStream(MatToImage(imgmat));
|
|
imgmat.release();
|
|
} catch (Exception e) {
|
|
//System.out.println(Thread.currentThread().getName()+" interrupted");
|
|
}
|
|
}
|
|
});
|
|
|
|
private void flush(){
|
|
if (mGrabber!=null){
|
|
long now = System.currentTimeMillis();
|
|
long delta = 0;
|
|
while (delta<32){
|
|
// flushing stale frames
|
|
// 30 fps means 33 ms per frame
|
|
// so if grab is quicker than 30 ms , its stale frame
|
|
try{
|
|
if (mGrabber==null) throw new FrameGrabber.Exception("Grabber is null");
|
|
Frame frame = mGrabber.grab(); // grab frame
|
|
delta = System.currentTimeMillis() - now;
|
|
now = System.currentTimeMillis();
|
|
} catch (FrameGrabber.Exception ignored) {
|
|
|
|
}
|
|
|
|
}
|
|
}
|
|
}
|
|
|
|
// Camera Capture Thread
|
|
Thread cam_capture = new Thread(()->{
|
|
while (Capturing.get()) {
|
|
try {
|
|
// selama proses pengambilan foto, jangan ambil frame
|
|
if (TakingPhoto!=null) {
|
|
TakingPhoto.await();
|
|
flush();
|
|
TakingPhoto = null;
|
|
}
|
|
|
|
IsGrabbingLiveView.drainPermits();
|
|
IsGrabbingLiveView.release();
|
|
//IsGrabbingLiveView.set(true);
|
|
Frame frame = null;
|
|
|
|
if (Capturing.get()) {
|
|
try{
|
|
if (mGrabber!=null)
|
|
frame = mGrabber.grab();
|
|
else throw new FrameGrabber.Exception("Grabber is null");
|
|
|
|
} catch (FrameGrabber.Exception e){
|
|
if (Capturing.get()){
|
|
// kalau ada exception padahal masih capturing. Kalau sudah tidak capturing, tidak peduli
|
|
if (ValidString(e.getMessage())){
|
|
String msg = e.getMessage();
|
|
System.out.println("Exception on "+Thread.currentThread().getName()+" :"+msg);
|
|
if (msg.contains("start() been called")){
|
|
if (Capturing.get()){
|
|
System.out.println("Camera "+Thread.currentThread().getName()+" has been stopped, restarting");
|
|
mGrabber.restart();
|
|
} else {
|
|
System.out.println("Camera "+Thread.currentThread().getName()+" has been stopped, not restarting");
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
//IsGrabbingLiveView.set(false);
|
|
if (frame==null) continue;
|
|
if (frame.image==null) continue;
|
|
if (frame.image.length==0) continue;
|
|
if (realwidth!=frame.imageWidth || realheight!=frame.imageHeight) {
|
|
realwidth = frame.imageWidth;
|
|
realheight = frame.imageHeight;
|
|
}
|
|
|
|
Mat mat = matconverter.convert(frame); // convert to Mat
|
|
if (mat.empty()) continue;
|
|
|
|
|
|
fps.incrementAndGet();
|
|
|
|
UMat originalmat = new UMat();
|
|
mat.copyTo(originalmat); // copy to originalmat for using OpenCL
|
|
if (config.isMirrorCamera()){
|
|
// revisi 18/03/2025
|
|
UMat flippedmat = new UMat();
|
|
opencv_core.flip(originalmat, flippedmat, 0); // flip vertical
|
|
flippedmat.copyTo(originalmat);
|
|
flippedmat.release();
|
|
}
|
|
if (config.isFlipCamera()){
|
|
// revisi 18/03/2025
|
|
UMat flippedmat = new UMat();
|
|
opencv_core.flip(originalmat, flippedmat, 1); // flip horizontal
|
|
flippedmat.copyTo(originalmat);
|
|
flippedmat.release();
|
|
}
|
|
|
|
// rotate 90 degree counter clockwise karena kamera potrait
|
|
opencv_core.rotate(originalmat, BestMat, opencv_core.ROTATE_90_COUNTERCLOCKWISE);
|
|
|
|
originalmat.release();
|
|
|
|
if (!BestMat.empty()) {
|
|
|
|
// LiveMat and GrayMat are synchronized
|
|
synchronized (lockObject){
|
|
opencv_imgproc.resize(BestMat, LiveMat, LiveSize); // resize to LiveSize
|
|
opencv_imgproc.cvtColor(LiveMat,GrayMat, COLOR_BGR2GRAY); // convert to grayscale
|
|
}
|
|
|
|
if (use_qr){
|
|
qr_semaphore.release();
|
|
}
|
|
if (use_face){
|
|
face_semaphore.release();
|
|
}
|
|
|
|
|
|
}
|
|
} catch ( FrameGrabber.Exception fe){
|
|
System.out.println("FrameGrabber Exception in" + Thread.currentThread().getName() + " : " + fe.getMessage());
|
|
fe.printStackTrace();
|
|
} catch (InterruptedException e) {
|
|
System.out.println(Thread.currentThread().getName()+" interrupted");
|
|
} catch (Exception e){
|
|
System.out.println(Thread.currentThread().getName()+" exception : "+e.getMessage());
|
|
e.printStackTrace();
|
|
}
|
|
|
|
}
|
|
});
|
|
|
|
public boolean StartLiveView(LiveCamEvent event, String cameratitle, boolean use_qr , boolean use_face) {
|
|
this.event = event;
|
|
if (mGrabber != null) {
|
|
try {
|
|
|
|
if (use_qr) raise_log("QR Reader loaded");
|
|
if (use_face) raise_log("Face detector loaded");
|
|
// capture with best resolution
|
|
if (IsPortrait){
|
|
setFrameHeight(BestSize.width());
|
|
setFrameWidth(BestSize.height());
|
|
} else {
|
|
setFrameHeight(BestSize.height());
|
|
setFrameWidth(BestSize.width());
|
|
}
|
|
|
|
LiveFPS = 0;
|
|
mGrabber.start();
|
|
System.out.println("Camera "+cameratitle+" started");
|
|
|
|
Capturing.set(true);
|
|
if (event!=null) event.onStartCapturing();
|
|
|
|
|
|
|
|
TimerTask fpsTask = new TimerTask() {
|
|
@Override
|
|
public void run() {
|
|
if (Capturing.get()){
|
|
int fpsval = fps.getAndSet(0);
|
|
if (fpsval!=LiveFPS){
|
|
LiveFPS = fpsval;
|
|
if (event!=null) event.onIntervalUpdate();
|
|
AutoCloseAlert.ChangeCamStatus(switch (cameratitle){
|
|
case "01" -> 1;
|
|
case "02" -> 2;
|
|
case "03" -> 3;
|
|
case "04" -> 4;
|
|
case "05" -> 5;
|
|
default -> 0;
|
|
}, LiveFPS>0 );
|
|
}
|
|
} else {
|
|
fps.set(0);
|
|
this.cancel();
|
|
}
|
|
}
|
|
};
|
|
|
|
timer.scheduleAtFixedRate(fpsTask, 1000, 1000);
|
|
|
|
|
|
this.use_qr = use_qr;
|
|
this.use_face = use_face;
|
|
cam_capture.setName("cam_capture "+cameratitle);
|
|
cam_capture.setDaemon(true);
|
|
cam_capture.start();
|
|
//System.out.println("Starting cam_capture thread");
|
|
|
|
qr_detect.setName("qr_detect "+cameratitle);
|
|
qr_detect.setDaemon(true);
|
|
qr_detect.start();
|
|
//System.out.println("Starting qr_detect thread");
|
|
|
|
face_detect.setName("face_detect "+cameratitle);
|
|
face_detect.setDaemon(true);
|
|
face_detect.start();
|
|
//System.out.println("Starting face_detect thread");
|
|
|
|
return true;
|
|
} catch (Exception e) {
|
|
raise_log("StartLiveView failed, Unable to Start Camera, Error: " + e.getMessage());
|
|
}
|
|
} else raise_log("StartLiveView failed, grabber is null");
|
|
return false;
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
* Remap LiveMatROI to BestMatROI and ReducedMatROI Resolution
|
|
* @param scaleX scale factor for width
|
|
* @param scaleY scale factor for height
|
|
*/
|
|
public void RemapROI(double scaleX, double scaleY, boolean printdebug){
|
|
BestMatROI = null;
|
|
ReducedMatROI = null;
|
|
if (ValidROI(LiveMatROI)){
|
|
if (ROIInsideUMat(LiveMatROI, LiveMat)){
|
|
if (printdebug) System.out.println("LiveMatROI camera "+cameratitle.getText()+" = "+RectToString(LiveMatROI));
|
|
|
|
double scaleXBest = 1.0*BestSize.width()/LiveSize.width();
|
|
double scaleYBest = 1.0*BestSize.height()/LiveSize.height();
|
|
int XBest = (int) (LiveMatROI.x()*scaleXBest);
|
|
int YBest = (int) (LiveMatROI.y()*scaleYBest);
|
|
int WBest = (int) (LiveMatROI.width()*scaleXBest);
|
|
int HBest = (int) (LiveMatROI.height()*scaleYBest);
|
|
int deltaWBest = (int) (BestSize.width() * scaleX);
|
|
int deltaHBest = (int) (BestSize.height() * scaleY);
|
|
XBest = XBest - deltaWBest/2;
|
|
if (XBest<0) XBest = 0;
|
|
YBest = YBest - deltaHBest/2;
|
|
if (YBest<0) YBest = 0;
|
|
WBest = WBest + deltaWBest;
|
|
if (WBest>BestSize.width()) WBest = BestSize.width();
|
|
HBest = HBest + deltaHBest;
|
|
if (HBest>BestSize.height()) HBest = BestSize.height();
|
|
BestMatROI = new Rect(XBest, YBest, WBest, HBest);
|
|
|
|
if (printdebug){
|
|
System.out.println("scaleXBest = "+scaleXBest+" scaleYBest = "+scaleYBest);
|
|
System.out.println("BestMatROI camera "+cameratitle.getText()+" = "+RectToString(BestMatROI));
|
|
}
|
|
|
|
double scaleXReduced = 1.0*ReducedSize.width()/LiveSize.width();
|
|
double scaleYReduced = 1.0*ReducedSize.height()/LiveSize.height();
|
|
int XReduced = (int) (LiveMatROI.x()*scaleXReduced);
|
|
int YReduced = (int) (LiveMatROI.y()*scaleYReduced);
|
|
int WReduced = (int) (LiveMatROI.width()*scaleXReduced);
|
|
int HReduced = (int) (LiveMatROI.height()*scaleYReduced);
|
|
int deltaWReduced = (int) (ReducedSize.width() * scaleX);
|
|
int deltaHReduced = (int) (ReducedSize.height() * scaleY);
|
|
XReduced = XReduced - deltaWReduced/2;
|
|
if (XReduced<0) XReduced = 0;
|
|
YReduced = YReduced - deltaHReduced/2;
|
|
if (YReduced<0) YReduced = 0;
|
|
WReduced = WReduced + deltaWReduced;
|
|
if (WReduced>ReducedSize.width()) WReduced = ReducedSize.width();
|
|
HReduced = HReduced + deltaHReduced;
|
|
if (HReduced>ReducedSize.height()) HReduced = ReducedSize.height();
|
|
ReducedMatROI = new Rect(XReduced, YReduced, WReduced, HReduced);
|
|
if (printdebug){
|
|
System.out.println("scaleXReduced = "+scaleXReduced+" scaleYReduced = "+scaleYReduced);
|
|
System.out.println("ReducedMatROI camera "+cameratitle.getText()+" = "+RectToString(ReducedMatROI));
|
|
}
|
|
|
|
|
|
} //else System.out.println("LiveMatROI is Outside LiveMat for camera "+cameratitle.getText());
|
|
} //else System.out.println("LiveMatROI is invalid for camera "+cameratitle.getText());
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
* Detect QR Code from Mat
|
|
* @param graymat Mat in Gray Scale
|
|
* @return QR Code Text, or null if not detected
|
|
*/
|
|
private String DetectQRFromMat(UMat graymat){
|
|
if (qrreader!=null){
|
|
Mat mat = new Mat();
|
|
graymat.copyTo(mat); // back to CPU, because zxing only accept BufferedImage
|
|
BufferedImage bufferedImage = matToBufferedImage(mat);
|
|
String title = cameratitle.getText();
|
|
BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(new BufferedImageLuminanceSource(bufferedImage)));
|
|
try{
|
|
Result result = qrreader.decode(binaryBitmap);
|
|
if (result!=null){
|
|
return result.getText();
|
|
}
|
|
} catch (Exception ignored) {
|
|
}
|
|
mat.release();
|
|
}
|
|
return null;
|
|
}
|
|
|
|
|
|
|
|
private void raise_log(String msg){
|
|
if (event!=null) event.onLog(msg);
|
|
}
|
|
}
|