first commit

This commit is contained in:
2024-11-09 08:55:17 +07:00
commit f6ee4817e6
98 changed files with 85493 additions and 0 deletions

View File

@@ -0,0 +1,760 @@
package id.co.gtc.erhacam;
import Camera.ArducamIMX477Preset;
import Camera.CameraProperty;
import Camera.LiveCamEvent;
import Config.CameraConfigEnum;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.NotFoundException;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.common.HybridBinarizer;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.scene.control.CheckBox;
import javafx.scene.control.Slider;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.control.Label;
import javafx.scene.image.PixelFormat;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import lombok.Getter;
import lombok.Setter;
import lombok.val;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameGrabber;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.*;
import org.opencv.videoio.Videoio;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static Config.SomeCodes.*;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
import static org.bytedeco.opencv.global.opencv_core.mean;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
@SuppressWarnings({"unused"})
public class Cameradetail {
private final AtomicBoolean Capturing = new AtomicBoolean(false);
private final AtomicBoolean TakingPhoto = new AtomicBoolean(false);
private final AtomicBoolean IsGrabbingLiveView = new AtomicBoolean(false);
private OpenCVFrameGrabber mGrabber = null;
private LiveCamEvent event = null;
private @Getter @Setter CameraConfigEnum cameraConfigEnum = CameraConfigEnum.CameraConfigCenter;
private @Getter int LiveFPS = 0;
/**
* Get detected QR text from Live View
*/
private @Getter String qrtext = null;
@FXML
private Label cameratitle;
@FXML
private ImageView camerastream;
@FXML
private AnchorPane streamanchor;
@FXML
private Label camerastatus;
@FXML
private Slider brightnessSlider;
@FXML
private Slider contrastSlider;
@FXML
private Slider saturationSlider;
@FXML
private Slider hueSlider;
@FXML
private Slider gainSlider;
@FXML
private Slider exposureSlider;
@FXML
private CheckBox AutoExposure;
@FXML
private CheckBox AutoWhiteBalance;
@FXML
private CheckBox AutoFocus;
private final UMat BestMat = new UMat();
private final UMat LiveMat = new UMat();
private Size LiveSize = new Size(640, 480);
private Size PhotoSize = new Size(1920, 1080);
private void setSliderValue(Slider sld, CameraProperty prop, double value){
sld.setMin(prop.Min);
sld.setMax(prop.Max);
sld.setValue(value);
}
@FXML
public void initialize(){
camerastream.fitHeightProperty().bind(streamanchor.heightProperty());
//camerastream.fitWidthProperty().bind(streamanchor.widthProperty());
camerastream.setPreserveRatio(true);
Platform.runLater(()->{
setSliderValue(brightnessSlider, ArducamIMX477Preset.Brightness, config.getBrightness(cameraConfigEnum));
setSliderValue(contrastSlider, ArducamIMX477Preset.Contrast, config.getContrast(cameraConfigEnum));
setSliderValue(saturationSlider, ArducamIMX477Preset.Saturation, config.getSaturation(cameraConfigEnum));
setSliderValue(hueSlider, ArducamIMX477Preset.Hue, config.getHue(cameraConfigEnum));
setSliderValue(gainSlider, ArducamIMX477Preset.Gain, config.getGain(cameraConfigEnum));
setSliderValue(exposureSlider, ArducamIMX477Preset.ExposureTime, config.getExposure(cameraConfigEnum));
AutoExposure.setSelected(config.getAutoExposure(cameraConfigEnum));
AutoWhiteBalance.setSelected(config.getAutoWhiteBalance(cameraConfigEnum));
AutoFocus.setSelected(config.getAutoFocus(cameraConfigEnum));
});
AutoExposure.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoExposure(newVal);
config.setAutoExposure(cameraConfigEnum, newVal);
raise_log("AutoExposure for "+getCameraTitle()+" changed to " + newVal);
});
AutoWhiteBalance.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoWB(newVal);
config.setAutoWhiteBalance(cameraConfigEnum, newVal);
raise_log("AutoWhiteBalance for "+getCameraTitle()+" changed to "+newVal);
});
AutoFocus.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoFocus(newVal);
config.setAutoFocus(cameraConfigEnum, newVal);
raise_log("AutoFocus for "+getCameraTitle()+" changed to "+newVal);
});
brightnessSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setBrightness(newVal.doubleValue());
config.setBrightness(cameraConfigEnum, newVal.doubleValue());
raise_log("Brightness for "+getCameraTitle()+" changed to "+newVal);
});
contrastSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setContrast(newVal.doubleValue());
config.setContrast(cameraConfigEnum, newVal.doubleValue());
raise_log("Contrast for "+getCameraTitle()+" changed to "+newVal);
});
saturationSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setSaturation(newVal.doubleValue());
config.setSaturation(cameraConfigEnum, newVal.doubleValue());
raise_log("Saturation for "+getCameraTitle()+" changed to "+newVal);
});
hueSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setHue(newVal.doubleValue());
config.setHue(cameraConfigEnum, newVal.doubleValue());
raise_log("Hue for "+getCameraTitle()+" changed to "+newVal);
});
gainSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setGain(newVal.doubleValue());
config.setGain(cameraConfigEnum, newVal.doubleValue());
raise_log("Gain for "+getCameraTitle()+" changed to "+newVal);
});
exposureSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setExposure(newVal.doubleValue());
config.setExposure(cameraConfigEnum, newVal.doubleValue());
raise_log("Exposure for "+getCameraTitle()+" changed to "+newVal);
});
}
@FXML
public void resetClick(){
brightnessSlider.adjustValue(ArducamIMX477Preset.Brightness.Default);
contrastSlider.adjustValue(ArducamIMX477Preset.Contrast.Default);
saturationSlider.adjustValue(ArducamIMX477Preset.Saturation.Default);
hueSlider.adjustValue(ArducamIMX477Preset.Hue.Default);
gainSlider.adjustValue(ArducamIMX477Preset.Gain.Default);
exposureSlider.adjustValue(ArducamIMX477Preset.ExposureTime.Default);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
}
public boolean isCapturing(){
return Capturing.get();
}
/**
* Set Camera Title
* @param title Title of the Camera
*/
public void setCameraTitle(String title){
if (ValidString(title)){
if (cameratitle!=null){
cameratitle.setText(title);
}
}
}
public void setSaturation(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SATURATION, value);
}
}
public double getSaturation(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SATURATION);
}
return 0;
}
public void setHue(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_HUE, value);
}
}
public double getHue(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_HUE);
}
return 0;
}
public void setGain(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAIN, value);
}
}
public double getGain(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAIN);
}
return 0;
}
/**
* Get Camera Title
* @return Title of the Camera, or empty string if not set
*/
public String getCameraTitle(){
if (cameratitle!=null){
return cameratitle.getText();
}
return "";
}
/**
* Set Camera Status
* @param status Status of the Camera
*/
public void setCameraStatus(String status){
if (ValidString(status)){
if (camerastatus!=null){
camerastatus.setText(status);
}
}
}
/**
* Get Camera Status
* @return Status of the Camera, or empty string if not set
*/
public String getCameraStatus(){
if (camerastatus!=null){
return camerastatus.getText();
}
return "";
}
/**
* Set Camera Stream
* @param image Image to be displayed
*/
public void setCameraStream(Image image){
if (image!=null){
if (camerastream!=null){
camerastream.setImage(image);
}
}
}
/**
* Get Camera Stream
* @return Image of the Camera Stream, or null if not set
*/
public Image getCameraStream(){
if (camerastream!=null){
return camerastream.getImage();
}
return null;
}
public void setFPS(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FPS, value);
}
}
public double getFPS(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FPS);
}
return 0;
}
/**
* Set Camera Grabber and Target Width and Height
* @param grabber Camera Grabber
* @param livewidth Width used on live view
* @param liveheight Height used on live view
* @param photowidth Width used on photo capture
* @param photoheight Height used on photo capture
*/
public void SetGrabber(OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight){
if (mGrabber!=null) {
StopLiveView();
}
LiveSize = new Size(livewidth, liveheight);
PhotoSize = new Size(photowidth, photoheight);
mGrabber = grabber;
}
//Exposure and Focus Tricks :
// https://stackoverflow.com/questions/53545945/how-to-set-camera-to-auto-exposure-with-opencv-3-4-2
// https://github.com/opencv/opencv/issues/9738
/**
* Set Auto Exposure Mode
* @param ON if true, set autoexposure on, otherwise off
*/
public void setAutoExposure(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_EXPOSURE, ON?ArducamIMX477Preset.AutoExposure.On:ArducamIMX477Preset.AutoExposure.Off);
}
}
/**
* Get Auto Exposure Mode
* @return true if autoexposure is on, otherwise off
*/
public boolean getAutoExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_EXPOSURE)==ArducamIMX477Preset.AutoExposure.On;
}
return false;
}
/**
* Set Exposure when Auto Exposure is Off
* @param value exposure value
*/
public void setExposure(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_EXPOSURE, value);
}
}
/**
* Get Exposure when Auto Exposure is Off
* @return exposure value
*/
public double getExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_EXPOSURE);
}
return 0;
}
/**
* Set Auto Focus
* @param ON if true, set autofocus on, otherwise off
*/
public void setAutoFocus(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTOFOCUS, ON?ArducamIMX477Preset.AutoFocus.On:ArducamIMX477Preset.AutoFocus.Off);
}
}
/**
* Get Auto Focus
* @return true if autofocus is on, otherwise off
*/
public boolean getAutoFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTOFOCUS)==ArducamIMX477Preset.AutoFocus.On;
}
return false;
}
public void setAutoWB(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_WB, ON?ArducamIMX477Preset.AutoWhiteBalance.On:ArducamIMX477Preset.AutoWhiteBalance.Off);
}
}
public boolean getAutoWB(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_WB)==ArducamIMX477Preset.AutoWhiteBalance.On;
}
return false;
}
/**
* Set Focus when Auto Focus is Off
* @param value focus value
*/
public void setFocus(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FOCUS, value);
}
}
/**
* Get Focus when Auto Focus is Off
* @return focus value
*/
public double getFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FOCUS);
}
return 0;
}
public void setBrightness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_BRIGHTNESS, value);
}
}
public double getBrightness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_BRIGHTNESS);
}
return 0;
}
public void setContrast(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_CONTRAST, value);
}
}
public double getContrast(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_CONTRAST);
}
return 0;
}
public void setFrameWidth(int width){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_WIDTH, width);
}
}
public double getFrameWidth(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_WIDTH);
}
return 0;
}
public void setFrameHeight(int height){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_HEIGHT, height);
}
}
public double getFrameHeight(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_HEIGHT);
}
return 0;
}
public void setSharpness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SHARPNESS, value);
}
}
public double getSharpness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SHARPNESS);
}
return 0;
}
public void setGamma(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAMMA, value);
}
}
public double getGamma(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAMMA);
}
return 0;
}
/**
* Take Photo from Camera
* @param directory directory to save the photo, if null, will use default directory
* @param prefix filename prefix
* @return filename path of the saved photo, or null if failed
*/
@SuppressWarnings("BusyWait")
public String TakePhoto(String directory, String prefix) throws InterruptedException {
String result = null;
if (!ValidDirectory(directory)) directory = currentDirectory;
if (mGrabber!=null){
while(IsGrabbingLiveView.get()){
Thread.sleep(10);
}
TakingPhoto.set(true);
if (!BestMat.empty()){
Size sz = BestMat.size();
raise_log("TakePhoto got frame with width: " + sz.width() + " and height: " + sz.height());
String filename = Path.of(directory, makeFileName(prefix)).toString();
if (imwrite(filename, BestMat)){
raise_log("TakePhoto success, Photo saved to " + filename);
result = filename;
} else raise_log("TakePhoto failed, Unable to Save Photo");
} else raise_log("TakePhoto failed, Live View is Empty");
} else raise_log("TakePhoto failed, Grabber is null");
TakingPhoto.set(false);
return result;
}
private String makeFileName(String prefix){
//make filename with prefix_POSITION_YYYY-MM-DD_HH-MM-SS
LocalDateTime ldt = LocalDateTime.now();
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
return prefix+"_"
+ switch(cameratitle.getText()){
case "Camera Left 90" -> "LEFT90";
case "Camera Left 45" -> "LEFT45";
case "Camera Center" -> "CENTER";
case "Camera Right 45" -> "RIGHT45";
case "Camera Right 90" -> "RIGHT90";
default -> "UNKNOWN";
}
+ "_" + timetag + ".jpg";
}
public void StopLiveView(){
Capturing.set(false);
if (mGrabber!=null){
try{
mGrabber.stop();
Platform.runLater(()->setCameraStatus("Camera Stopped"));
} catch (Exception e){
raise_log("StopLiveView failed, Unable to Stop Camera, Error: " + e.getMessage());
}
}
TakingPhoto.set(false);
IsGrabbingLiveView.set(false);
}
public boolean StartLiveView(LiveCamEvent event, String cameratitle, final boolean use_qr , final boolean use_face) {
this.event = event;
if (mGrabber != null) {
try {
if (use_qr) raise_log("QR Reader loaded");
if (use_face) raise_log("Face detector loaded");
// capture with best resolution
setFrameHeight(PhotoSize.height());
setFrameWidth(PhotoSize.width());
LiveFPS = 0;
mGrabber.start();
Capturing.set(true);
// just information
String ss = String.format("Camera Started with resolution %dx%d@%d", PhotoSize.width(), PhotoSize.height(),LiveFPS);
Platform.runLater(()->setCameraStatus(ss));
raise_log(ss);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
Task<Image> task = new Task<>() {
@SuppressWarnings("BusyWait")
@Override
protected Image call() {
// repeat until capturing is false
AtomicInteger fps = new AtomicInteger(0);
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
LiveFPS = fps.getAndSet(0);
}
};
Timer timer = new java.util.Timer();
timer.scheduleAtFixedRate(timerTask, 1000, 1000);
while (Capturing.get()) {
try {
// selama proses pengambilan foto, jangan ambil frame
while(TakingPhoto.get() && Capturing.get()){
Thread.sleep(10);
}
if (!Capturing.get()) return null;
IsGrabbingLiveView.set(true);
Frame frame = mGrabber.grab(); // grab frame
Mat mat = matconverter.convert(frame); // convert to Mat
mat.copyTo(BestMat); // copy to BestMat for using OpenCL
fps.addAndGet(1);
IsGrabbingLiveView.set(false);
if (frame != null) {
opencv_imgproc.resize(BestMat, LiveMat, LiveSize); // resize to LiveSize
UMat graymat = new UMat(); // use OpenCL for grayscale
opencv_imgproc.cvtColor(LiveMat,graymat, COLOR_BGR2GRAY); // convert to grayscale
if (use_qr){
String qr = DetectQRFromMat(graymat);
if (qr!=null) {
if (!qr.equals(qrtext)){
qrtext = qr;
raise_log("QR Detected: " + qr);
if (event!=null) event.onDetectedQRCode(qr);
}
}
}
if (use_face){
RectVector face = DetectFace(graymat);
if (face!=null && face.size()>0){
if (event!=null) event.onFaceDetector(true, PhotoSize.width(), PhotoSize.height());
for(int i=0; i<face.size(); i++){
Rect rect = face.get(i);
rectangle(LiveMat, rect, Scalar.GREEN);
}
} else if (event!=null) event.onFaceDetector(false, PhotoSize.width(), PhotoSize.height());
}
UMat rgbmat = new UMat(LiveMat.size(), CV_8UC3);
cvtColor(LiveMat, rgbmat, COLOR_BGR2RGB);
Mat imgmat = new Mat();
rgbmat.copyTo(imgmat); // copy back to CPU
// Update Task Value usign matToWritableImage
updateValue(matToWritableImage(imgmat, imgmat.cols(), imgmat.rows()));
}
} catch (Exception e) {
raise_log("Unable to Grab Frame, Error: " + e.getMessage());
//if (!Capturing.get()) Platform.runLater(this::StopLiveView);
}
}
timer.cancel();
return null;
}
};
// value dari task, yaitu image, akan diupdate ke camerastream
task.valueProperty().addListener((obs, oldVal, newVal) -> {
if (newVal != null) {
setCameraStream(newVal);
}
});
// start task
new Thread(task).start();
return true;
} catch (Exception e) {
raise_log("StartLiveView failed, Unable to Start Camera, Error: " + e.getMessage());
}
} else raise_log("StartLiveView failed, grabber is null");
return false;
}
/**
* Detect QR Code from Mat
* @param graymat Mat in Gray Scale
* @return QR Code Text, or null if not detected
*/
private String DetectQRFromMat(UMat graymat){
if (qrreader!=null){
Mat mat = new Mat();
graymat.copyTo(mat); // back to CPU, because zxing only accept BufferedImage
BufferedImage bufferedImage = matToBufferedImage(mat);
String title = cameratitle.getText();
BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(new BufferedImageLuminanceSource(bufferedImage)));
try{
Result result = qrreader.decode(binaryBitmap);
if (result!=null){
return result.getText();
}
} catch (NotFoundException ignored) {
}
}
return null;
}
/**
* Detect Face from Mat
* @param graymat Mat in Gray Scale
* @return RectVector if face detected, otherwise false
*/
private RectVector DetectFace(UMat graymat){
if (faceDetector!=null){
RectVector face = new RectVector();
faceDetector.detectMultiScale(graymat, face);
return face;
}
return null;
}
private double getBrightnessFromGrayMat(Mat graymat){
Scalar mean = mean(graymat);
return mean.get(0);
}
private WritableImage matToWritableImage(Mat mat, int cols, int rows){
WritableImage writableImage = new WritableImage(cols, rows);
ByteBuffer buffer = mat.createBuffer();
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
writableImage.getPixelWriter().setPixels(0, 0, mat.cols(), mat.rows(), pixelFormat, buffer, mat.cols() * 3);
return writableImage;
}
private BufferedImage matToBufferedImage(Mat mat){
int type = BufferedImage.TYPE_BYTE_GRAY;
if (mat.channels() > 1) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
mat.data().get(data);
return image;
}
private void raise_log(String msg){
if (event!=null) event.onLog(msg);
}
}