813 lines
28 KiB
Java
813 lines
28 KiB
Java
package unusedcodes;
|
|
|
|
import Camera.ArducamIMX477Preset;
|
|
import Camera.CameraProperty;
|
|
import Camera.LiveCamEvent;
|
|
import Config.CameraConfigEnum;
|
|
import com.google.zxing.BinaryBitmap;
|
|
import com.google.zxing.NotFoundException;
|
|
import com.google.zxing.Result;
|
|
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
|
|
import com.google.zxing.common.HybridBinarizer;
|
|
import javafx.application.Platform;
|
|
import javafx.concurrent.Task;
|
|
import javafx.fxml.FXML;
|
|
import javafx.scene.control.CheckBox;
|
|
import javafx.scene.control.Label;
|
|
import javafx.scene.control.Slider;
|
|
import javafx.scene.image.Image;
|
|
import javafx.scene.image.ImageView;
|
|
import javafx.scene.image.PixelFormat;
|
|
import javafx.scene.image.WritableImage;
|
|
import javafx.scene.layout.AnchorPane;
|
|
import lombok.Getter;
|
|
import lombok.Setter;
|
|
import lombok.val;
|
|
import org.bytedeco.javacv.Frame;
|
|
import org.bytedeco.javacv.OpenCVFrameGrabber;
|
|
import org.bytedeco.opencv.opencv_core.Mat;
|
|
import org.bytedeco.opencv.opencv_core.RectVector;
|
|
import org.bytedeco.opencv.opencv_core.Scalar;
|
|
import org.bytedeco.opencv.opencv_core.UMat;
|
|
import org.opencv.videoio.Videoio;
|
|
|
|
import java.awt.image.BufferedImage;
|
|
import java.awt.image.DataBufferByte;
|
|
import java.nio.ByteBuffer;
|
|
import java.nio.file.Path;
|
|
import java.time.LocalDateTime;
|
|
import java.util.concurrent.atomic.AtomicBoolean;
|
|
|
|
import static Config.SomeCodes.*;
|
|
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
|
|
import static org.bytedeco.opencv.global.opencv_core.mean;
|
|
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
|
|
import static org.bytedeco.opencv.global.opencv_imgproc.*;
|
|
|
|
@SuppressWarnings({"unused"})
|
|
public class Cameradetail_Arducam {
|
|
private final AtomicBoolean Capturing = new AtomicBoolean(false);
|
|
private final AtomicBoolean TakingPhoto = new AtomicBoolean(false);
|
|
private final AtomicBoolean IsGrabbingLiveView = new AtomicBoolean(false);
|
|
private OpenCVFrameGrabber mGrabber = null;
|
|
private int liveWidth = 640;
|
|
private int liveHeight = 480;
|
|
private int photoWidth = 640;
|
|
private int photoHeight = 480;
|
|
private LiveCamEvent event = null;
|
|
private final String photo_extension = ".jpg";
|
|
private @Getter @Setter CameraConfigEnum cameraConfigEnum = CameraConfigEnum.CameraConfigCenter;
|
|
//private CascadeClassifier faceDetector;
|
|
|
|
/**
|
|
* Get detected QR text from Live View
|
|
*/
|
|
private @Getter String qrtext = null;
|
|
|
|
@FXML
|
|
private Label cameratitle;
|
|
|
|
@FXML
|
|
private ImageView camerastream;
|
|
|
|
@FXML
|
|
private AnchorPane streamanchor;
|
|
|
|
@FXML
|
|
private Label camerastatus;
|
|
|
|
@FXML
|
|
private Slider brightnessSlider;
|
|
@FXML
|
|
private Slider contrastSlider;
|
|
@FXML
|
|
private Slider saturationSlider;
|
|
@FXML
|
|
private Slider hueSlider;
|
|
@FXML
|
|
private Slider gainSlider;
|
|
@FXML
|
|
private Slider exposureSlider;
|
|
@FXML
|
|
private CheckBox AutoExposure;
|
|
@FXML
|
|
private CheckBox AutoWhiteBalance;
|
|
@FXML
|
|
private CheckBox AutoFocus;
|
|
|
|
|
|
|
|
private void setSliderValue(Slider sld, CameraProperty prop, double value){
|
|
sld.setMin(prop.Min);
|
|
sld.setMax(prop.Max);
|
|
sld.setValue(value);
|
|
}
|
|
|
|
@FXML
|
|
public void initialize(){
|
|
|
|
camerastream.fitHeightProperty().bind(streamanchor.heightProperty());
|
|
//camerastream.fitWidthProperty().bind(streamanchor.widthProperty());
|
|
camerastream.setPreserveRatio(true);
|
|
|
|
Platform.runLater(()->{
|
|
setSliderValue(brightnessSlider, ArducamIMX477Preset.Brightness, config.getBrightness(cameraConfigEnum));
|
|
setSliderValue(contrastSlider, ArducamIMX477Preset.Contrast, config.getContrast(cameraConfigEnum));
|
|
setSliderValue(saturationSlider, ArducamIMX477Preset.Saturation, config.getSaturation(cameraConfigEnum));
|
|
setSliderValue(hueSlider, ArducamIMX477Preset.Hue, config.getHue(cameraConfigEnum));
|
|
setSliderValue(gainSlider, ArducamIMX477Preset.Gain, config.getGain(cameraConfigEnum));
|
|
setSliderValue(exposureSlider, ArducamIMX477Preset.ExposureTime, config.getExposure(cameraConfigEnum));
|
|
AutoExposure.setSelected(config.getAutoExposure(cameraConfigEnum));
|
|
AutoWhiteBalance.setSelected(config.getAutoWhiteBalance(cameraConfigEnum));
|
|
AutoFocus.setSelected(config.getAutoFocus(cameraConfigEnum));
|
|
});
|
|
|
|
AutoExposure.selectedProperty().addListener((obs, oldVal, newVal) -> {
|
|
setAutoExposure(newVal);
|
|
config.setAutoExposure(cameraConfigEnum, newVal);
|
|
if (event!=null) event.onLog("AutoExposure for "+getCameraTitle()+" changed to " + newVal);
|
|
});
|
|
|
|
AutoWhiteBalance.selectedProperty().addListener((obs, oldVal, newVal) -> {
|
|
setAutoWB(newVal);
|
|
config.setAutoWhiteBalance(cameraConfigEnum, newVal);
|
|
if (event!=null) event.onLog("AutoWhiteBalance for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
AutoFocus.selectedProperty().addListener((obs, oldVal, newVal) -> {
|
|
setAutoFocus(newVal);
|
|
config.setAutoFocus(cameraConfigEnum, newVal);
|
|
if (event!=null) event.onLog("AutoFocus for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
|
|
brightnessSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setBrightness(newVal.doubleValue());
|
|
config.setBrightness(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Brightness for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
contrastSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setContrast(newVal.doubleValue());
|
|
config.setContrast(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Contrast for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
saturationSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setSaturation(newVal.doubleValue());
|
|
config.setSaturation(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Saturation for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
hueSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setHue(newVal.doubleValue());
|
|
config.setHue(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Hue for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
gainSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setGain(newVal.doubleValue());
|
|
config.setGain(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Gain for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
exposureSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
setExposure(newVal.doubleValue());
|
|
config.setExposure(cameraConfigEnum, newVal.doubleValue());
|
|
if (event!=null) event.onLog("Exposure for "+getCameraTitle()+" changed to "+newVal);
|
|
});
|
|
|
|
}
|
|
|
|
@FXML
|
|
public void resetClick(){
|
|
brightnessSlider.adjustValue(ArducamIMX477Preset.Brightness.Default);
|
|
contrastSlider.adjustValue(ArducamIMX477Preset.Contrast.Default);
|
|
saturationSlider.adjustValue(ArducamIMX477Preset.Saturation.Default);
|
|
hueSlider.adjustValue(ArducamIMX477Preset.Hue.Default);
|
|
gainSlider.adjustValue(ArducamIMX477Preset.Gain.Default);
|
|
exposureSlider.adjustValue(ArducamIMX477Preset.ExposureTime.Default);
|
|
AutoExposure.setSelected(true);
|
|
AutoFocus.setSelected(true);
|
|
AutoWhiteBalance.setSelected(true);
|
|
}
|
|
|
|
|
|
|
|
public boolean isCapturing(){
|
|
return Capturing.get();
|
|
}
|
|
|
|
/**
|
|
* Set Camera Title
|
|
* @param title Title of the Camera
|
|
*/
|
|
public void setCameraTitle(String title){
|
|
LabelSetText(this.cameratitle, title, null);
|
|
}
|
|
|
|
public void setSaturation(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_SATURATION, value);
|
|
}
|
|
}
|
|
|
|
public double getSaturation(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_SATURATION);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setHue(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_HUE, value);
|
|
}
|
|
}
|
|
|
|
public double getHue(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_HUE);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setGain(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_GAIN, value);
|
|
}
|
|
}
|
|
|
|
public double getGain(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_GAIN);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Get Camera Title
|
|
* @return Title of the Camera, or empty string if not set
|
|
*/
|
|
public String getCameraTitle(){
|
|
if (cameratitle!=null){
|
|
return cameratitle.getText();
|
|
}
|
|
return "";
|
|
}
|
|
|
|
/**
|
|
* Set Camera Status
|
|
* @param status Status of the Camera
|
|
*/
|
|
public void setCameraStatus(String status){
|
|
LabelSetText(this.camerastatus, status,null);
|
|
}
|
|
|
|
/**
|
|
* Get Camera Status
|
|
* @return Status of the Camera, or empty string if not set
|
|
*/
|
|
public String getCameraStatus(){
|
|
if (camerastatus!=null){
|
|
return camerastatus.getText();
|
|
}
|
|
return "";
|
|
}
|
|
|
|
/**
|
|
* Set Camera Stream
|
|
* @param image Image to be displayed
|
|
*/
|
|
public void setCameraStream(Image image){
|
|
if (image!=null){
|
|
if (camerastream!=null){
|
|
camerastream.setImage(image);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Camera Stream
|
|
* @return Image of the Camera Stream, or null if not set
|
|
*/
|
|
public Image getCameraStream(){
|
|
if (camerastream!=null){
|
|
return camerastream.getImage();
|
|
}
|
|
return null;
|
|
}
|
|
|
|
public void setFPS(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FPS, value);
|
|
}
|
|
}
|
|
|
|
public double getFPS(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FPS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Set Camera Grabber and Target Width and Height
|
|
* @param grabber Camera Grabber
|
|
* @param livewidth Width used on live view
|
|
* @param liveheight Height used on live view
|
|
* @param photowidth Width used on photo capture
|
|
* @param photoheight Height used on photo capture
|
|
*/
|
|
public void SetGrabber(OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight){
|
|
if (mGrabber!=null) {
|
|
StopLiveView();
|
|
}
|
|
liveHeight = liveheight;
|
|
liveWidth = livewidth;
|
|
photoHeight = photoheight;
|
|
photoWidth = photowidth;
|
|
mGrabber = grabber;
|
|
}
|
|
|
|
//Exposure and Focus Tricks :
|
|
// https://stackoverflow.com/questions/53545945/how-to-set-camera-to-auto-exposure-with-opencv-3-4-2
|
|
// https://github.com/opencv/opencv/issues/9738
|
|
|
|
/**
|
|
* Set Auto Exposure Mode
|
|
* @param ON if true, set autoexposure on, otherwise off
|
|
*/
|
|
public void setAutoExposure(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTO_EXPOSURE, ON?ArducamIMX477Preset.AutoExposure.On:ArducamIMX477Preset.AutoExposure.Off);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Auto Exposure Mode
|
|
* @return true if autoexposure is on, otherwise off
|
|
*/
|
|
public boolean getAutoExposure(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_EXPOSURE)==ArducamIMX477Preset.AutoExposure.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Set Exposure when Auto Exposure is Off
|
|
* @param value exposure value
|
|
*/
|
|
public void setExposure(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_EXPOSURE, value);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Exposure when Auto Exposure is Off
|
|
* @return exposure value
|
|
*/
|
|
public double getExposure(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_EXPOSURE);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
/**
|
|
* Set Auto Focus
|
|
* @param ON if true, set autofocus on, otherwise off
|
|
*/
|
|
public void setAutoFocus(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTOFOCUS, ON?ArducamIMX477Preset.AutoFocus.On:ArducamIMX477Preset.AutoFocus.Off);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Auto Focus
|
|
* @return true if autofocus is on, otherwise off
|
|
*/
|
|
public boolean getAutoFocus(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTOFOCUS)==ArducamIMX477Preset.AutoFocus.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
public void setAutoWB(boolean ON){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_AUTO_WB, ON?ArducamIMX477Preset.AutoWhiteBalance.On:ArducamIMX477Preset.AutoWhiteBalance.Off);
|
|
}
|
|
}
|
|
|
|
public boolean getAutoWB(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_WB)==ArducamIMX477Preset.AutoWhiteBalance.On;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Set Focus when Auto Focus is Off
|
|
* @param value focus value
|
|
*/
|
|
public void setFocus(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FOCUS, value);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get Focus when Auto Focus is Off
|
|
* @return focus value
|
|
*/
|
|
public double getFocus(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FOCUS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setBrightness(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_BRIGHTNESS, value);
|
|
}
|
|
}
|
|
|
|
public double getBrightness(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_BRIGHTNESS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setContrast(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_CONTRAST, value);
|
|
}
|
|
}
|
|
|
|
public double getContrast(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_CONTRAST);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setFrameWidth(int width){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FRAME_WIDTH, width);
|
|
}
|
|
}
|
|
|
|
public double getFrameWidth(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_WIDTH);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setFrameHeight(int height){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_FRAME_HEIGHT, height);
|
|
}
|
|
}
|
|
|
|
public double getFrameHeight(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_HEIGHT);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setSharpness(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_SHARPNESS, value);
|
|
}
|
|
}
|
|
|
|
public double getSharpness(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_SHARPNESS);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
public void setGamma(double value){
|
|
if (mGrabber!=null){
|
|
mGrabber.setOption(Videoio.CAP_PROP_GAMMA, value);
|
|
}
|
|
}
|
|
|
|
public double getGamma(){
|
|
if (mGrabber!=null){
|
|
return mGrabber.getOption(Videoio.CAP_PROP_GAMMA);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
* Take Photo from Camera
|
|
* @param directory directory to save the photo, if null, will use default directory
|
|
* @param prefix filename prefix
|
|
* @return filename path of the saved photo, or null if failed
|
|
*/
|
|
public String TakePhoto(String directory, String prefix){
|
|
if (!ValidDirectory(directory)) directory = currentDirectory;
|
|
if (mGrabber!=null){
|
|
try{
|
|
long nanos = System.nanoTime();
|
|
while(IsGrabbingLiveView.get()) Wait(10);
|
|
long delta = System.nanoTime() - nanos;
|
|
double ms = delta / 1000000.0;
|
|
if (event!=null) event.onLog("Waited IsGrabbingLiveView for "+ms+" miliseconds");
|
|
nanos = System.nanoTime();
|
|
TakingPhoto.set(true);
|
|
|
|
setFrameHeight(photoHeight);
|
|
setFrameWidth(photoWidth);
|
|
mGrabber.restart();
|
|
long delta2 = System.nanoTime() - nanos;
|
|
double ms2 = delta2 / 1000000.0;
|
|
if (event!=null) event.onLog("Set Frame Width and Height for Photo Capture for "+ms2+" miliseconds");
|
|
nanos = System.nanoTime();
|
|
|
|
|
|
setAutoWB(true);
|
|
Wait(1000);
|
|
|
|
setAutoExposure(true);
|
|
Wait(1000);
|
|
|
|
long delta3 = System.nanoTime() - nanos;
|
|
double ms3 = delta3 / 1000000.0;
|
|
if (event!=null) event.onLog("Re-enable Auto WB and Auto Exposure for "+ms3+" miliseconds");
|
|
nanos = System.nanoTime();
|
|
|
|
mGrabber.grab(); // buang frame pertama
|
|
|
|
long delta4 = System.nanoTime() - nanos;
|
|
double ms4 = delta4 / 1000000.0;
|
|
if (event!=null) event.onLog("Buang frame pertama for "+ms4+" miliseconds");
|
|
nanos = System.nanoTime();
|
|
|
|
|
|
|
|
Frame frame;
|
|
int retry = 0;
|
|
while(true){
|
|
frame = mGrabber.grab();
|
|
if (frame!=null) {
|
|
if (frame.imageHeight==photoHeight){
|
|
if (frame.imageWidth==photoWidth){
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
retry++;
|
|
if (retry>5) {
|
|
if (event!=null) event.onLog("TakePhoto failed, Unable to Take Photo correctly after 5 retries");
|
|
break;
|
|
}
|
|
}
|
|
|
|
long delta5 = System.nanoTime() - nanos;
|
|
double ms5 = delta4 / 1000000.0;
|
|
if (event!=null) event.onLog("Retry count = "+retry+", Grab Frame for "+ms5+" miliseconds");
|
|
|
|
|
|
if (frame!=null && retry < 5){
|
|
if (event!=null) event.onLog("TakePhoto got frame with width: " + frame.imageWidth + " and height: " + frame.imageHeight);
|
|
val mat = matconverter.convert(frame);
|
|
String filename = Path.of(directory, makeFileName(prefix)).toString();
|
|
if (imwrite(filename, mat)){
|
|
long delta6 = System.nanoTime() - nanos;
|
|
double ms6 = delta6 / 1000000.0;
|
|
if (event!=null) event.onLog("TakePhoto success, Photo saved to " + filename + " for "+ms6+" miliseconds");
|
|
//if (event!=null) event.onLog("TakePhoto success, Photo saved to " + filename);
|
|
return filename;
|
|
} else if (event!=null) event.onLog("TakePhoto failed, Unable to Save Photo");
|
|
}
|
|
} catch (Exception e){
|
|
if (event!=null) event.onLog("TakePhoto failed, Unable to Take Photo, Error: " + e.getMessage());
|
|
} finally {
|
|
// restart camera for live view
|
|
try{
|
|
setFrameWidth(liveWidth);
|
|
setFrameHeight(liveHeight);
|
|
mGrabber.restart();
|
|
TakingPhoto.set(false);
|
|
if (event!=null) event.onLog("TakePhoto finished, Camera Restarted for Live View");
|
|
} catch (Exception e){
|
|
if (event!=null) event.onLog("TakePhoto failed, Unable to Restart Camera, Error: " + e.getMessage());
|
|
}
|
|
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
|
|
|
|
private String makeFileName(String prefix){
|
|
//make filename with prefix_POSITION_YYYY-MM-DD_HH-MM-SS
|
|
|
|
LocalDateTime ldt = LocalDateTime.now();
|
|
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
|
|
return prefix+"_"
|
|
+ switch(cameratitle.getText()){
|
|
case "Camera Left 90" -> "LEFT90";
|
|
case "Camera Left 45" -> "LEFT45";
|
|
case "Camera Center" -> "CENTER";
|
|
case "Camera Right 45" -> "RIGHT45";
|
|
case "Camera Right 90" -> "RIGHT90";
|
|
default -> "UNKNOWN";
|
|
}
|
|
+ "_" + timetag + photo_extension;
|
|
|
|
}
|
|
|
|
public void StopLiveView(){
|
|
Capturing.set(false);
|
|
if (mGrabber!=null){
|
|
try{
|
|
mGrabber.close();
|
|
Platform.runLater(()->setCameraStatus("Camera Stopped"));
|
|
} catch (Exception e){
|
|
if (event!=null) event.onLog("StopLiveView failed, Unable to Stop Camera, Error: " + e.getMessage());
|
|
}
|
|
}
|
|
|
|
TakingPhoto.set(false);
|
|
IsGrabbingLiveView.set(false);
|
|
}
|
|
|
|
|
|
public boolean StartLiveView(LiveCamEvent event, String cameratitle, final boolean use_qr , final boolean use_face) {
|
|
this.event = event;
|
|
if (mGrabber != null) {
|
|
try {
|
|
//StopLiveView();
|
|
|
|
if (use_qr && event!=null) event.onLog("QR Reader loaded");
|
|
if (use_face && event!=null) event.onLog("Face detector loaded");
|
|
setFrameHeight(liveHeight);
|
|
setFrameWidth(liveWidth);
|
|
mGrabber.start();
|
|
|
|
Capturing.set(true);
|
|
//val converter = new OpenCVFrameConverter.ToMat();
|
|
Platform.runLater(()->setCameraStatus("Camera Started, " + liveWidth + "x" + liveHeight));
|
|
if (event!=null) event.onLog("Camera started with resolution " + liveWidth + "x" + liveHeight);
|
|
|
|
AutoExposure.setSelected(true);
|
|
AutoFocus.setSelected(true);
|
|
AutoWhiteBalance.setSelected(true);
|
|
|
|
val task = new Task<Image>() {
|
|
@Override
|
|
protected Image call() {
|
|
while (Capturing.get()) {
|
|
try {
|
|
// selama proses pengambilan foto, jangan ambil frame
|
|
while(TakingPhoto.get() && Capturing.get()){
|
|
Wait(10);
|
|
}
|
|
|
|
if (!Capturing.get()) return null;
|
|
IsGrabbingLiveView.set(true);
|
|
val frame = mGrabber.grab();
|
|
IsGrabbingLiveView.set(false);
|
|
if (frame != null) {
|
|
val mat = matconverter.convert(frame);
|
|
val umat = new UMat(); // use OpenCL
|
|
mat.copyTo(umat);
|
|
val graymat = new UMat(); // use OpenCL
|
|
cvtColor(umat,graymat, COLOR_BGR2GRAY);
|
|
if (use_qr){
|
|
String qr = DetectQRFromMat(graymat);
|
|
if (qr!=null) {
|
|
if (!qr.equals(qrtext)){
|
|
qrtext = qr;
|
|
if (event!=null) event.onLog("QR Detected: " + qr);
|
|
if (event!=null) event.onDetectedQRCode(qr);
|
|
}
|
|
}
|
|
}
|
|
if (use_face){
|
|
RectVector face = DetectFace(graymat);
|
|
if (face!=null && face.size()>0){
|
|
if (event!=null) event.onFrontalFaceDetector(true,photoWidth, photoHeight);
|
|
for(int i=0; i<face.size(); i++){
|
|
val rect = face.get(i);
|
|
rectangle(umat, rect, Scalar.GREEN);
|
|
}
|
|
} else if (event!=null) event.onFrontalFaceDetector(false, photoWidth, photoHeight);
|
|
|
|
}
|
|
|
|
umat.copyTo(mat); // balik to CPU
|
|
val rgbmat = new Mat(mat.size(), CV_8UC3);
|
|
cvtColor(mat, rgbmat, COLOR_BGR2RGB);
|
|
updateValue(matToWritableImage(rgbmat, mat.cols(), mat.rows()));
|
|
}
|
|
} catch (Exception e) {
|
|
if (event!=null) event.onLog("Unable to Grab Frame, Error: " + e.getMessage());
|
|
//if (!Capturing.get()) Platform.runLater(this::StopLiveView);
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
};
|
|
|
|
// value dari task, yaitu image, akan diupdate ke camerastream
|
|
task.valueProperty().addListener((obs, oldVal, newVal) -> {
|
|
if (newVal != null) {
|
|
setCameraStream(newVal);
|
|
}
|
|
});
|
|
|
|
// start task
|
|
Thread taskThread = new Thread(task);
|
|
taskThread.setDaemon(true);
|
|
taskThread.start();
|
|
|
|
|
|
return true;
|
|
} catch (Exception e) {
|
|
if (event!=null) event.onLog("StartLiveView failed, Unable to Start Camera, Error: " + e.getMessage());
|
|
}
|
|
} else if (event!=null) event.onLog("StartLiveView failed, grabber is null");
|
|
return false;
|
|
}
|
|
|
|
/*private void UpdateCameraStreamFromMat(Mat mat){
|
|
val rgbmat = new Mat(mat.size(), CV_8UC3);
|
|
cvtColor(mat, rgbmat, COLOR_BGR2RGB);
|
|
val updated = matToWritableImage(rgbmat, mat.cols(), mat.rows());
|
|
Platform.runLater(()->setCameraStream(updated));
|
|
}*/
|
|
|
|
/**
|
|
* Detect QR Code from Mat
|
|
* @param graymat Mat in Gray Scale
|
|
* @return QR Code Text, or null if not detected
|
|
*/
|
|
private String DetectQRFromMat(UMat graymat){
|
|
if (qrreader!=null){
|
|
Mat mat = new Mat();
|
|
graymat.copyTo(mat); // back to CPU, because zxing only accept BufferedImage
|
|
val bufferedImage = matToBufferedImage(mat);
|
|
val title = cameratitle.getText();
|
|
val binaryBitmap = new BinaryBitmap(new HybridBinarizer(new BufferedImageLuminanceSource(bufferedImage)));
|
|
try{
|
|
Result result = qrreader.decode(binaryBitmap);
|
|
if (result!=null){
|
|
return result.getText();
|
|
}
|
|
} catch (NotFoundException ignored) {
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* Detect Face from Mat
|
|
* @param graymat Mat in Gray Scale
|
|
* @return true if face detected, otherwise false
|
|
*/
|
|
private RectVector DetectFace(UMat graymat){
|
|
// if (frontalfaceDetector!=null){
|
|
// val face = new RectVector();
|
|
// frontalfaceDetector.detectMultiScale(graymat, face);
|
|
// return face;
|
|
// }
|
|
return null;
|
|
}
|
|
|
|
private double getBrightnessFromGrayMat(Mat graymat){
|
|
Scalar mean = mean(graymat);
|
|
return mean.get(0);
|
|
}
|
|
|
|
private WritableImage matToWritableImage(Mat mat, int cols, int rows){
|
|
WritableImage writableImage = new WritableImage(cols, rows);
|
|
ByteBuffer buffer = mat.createBuffer();
|
|
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
|
|
writableImage.getPixelWriter().setPixels(0, 0, mat.cols(), mat.rows(), pixelFormat, buffer, mat.cols() * 3);
|
|
return writableImage;
|
|
}
|
|
|
|
private BufferedImage matToBufferedImage(Mat mat){
|
|
int type = BufferedImage.TYPE_BYTE_GRAY;
|
|
if (mat.channels() > 1) {
|
|
type = BufferedImage.TYPE_3BYTE_BGR;
|
|
}
|
|
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
|
|
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
|
|
|
|
mat.data().get(data);
|
|
return image;
|
|
}
|
|
|
|
|
|
}
|