Commit 14042025
This commit is contained in:
@@ -10,6 +10,9 @@ import javafx.scene.control.Label;
|
||||
import javafx.scene.control.TextArea;
|
||||
import javafx.scene.control.TextField;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.PixelFormat;
|
||||
import javafx.scene.image.WritableImage;
|
||||
import org.bytedeco.javacv.Frame;
|
||||
import org.bytedeco.javacv.Java2DFrameConverter;
|
||||
import org.bytedeco.javacv.OpenCVFrameConverter;
|
||||
import org.bytedeco.opencv.global.opencv_core;
|
||||
@@ -22,11 +25,13 @@ import org.bytedeco.opencv.opencv_core.UMat;
|
||||
import org.tinylog.Logger;
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.awt.image.DataBufferByte;
|
||||
import java.io.File;
|
||||
import java.io.InputStream;
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDateTime;
|
||||
@@ -756,4 +761,33 @@ public class SomeCodes {
|
||||
public static boolean IsBlurred(UMat mat, double threshold){
|
||||
return CalculateSharpness(mat)<threshold;
|
||||
}
|
||||
|
||||
public static Image MatToImage(Mat mat){
|
||||
Frame frame = matconverter.convert(mat);
|
||||
BufferedImage bufferedImage = matToBufferedImage(mat);
|
||||
return SwingFXUtils.toFXImage(bufferedImage, null);
|
||||
}
|
||||
|
||||
|
||||
public static WritableImage matToWritableImage(Mat mat){
|
||||
int cols = mat.cols();
|
||||
int rows = mat.rows();
|
||||
WritableImage writableImage = new WritableImage(cols, rows);
|
||||
ByteBuffer buffer = mat.createBuffer();
|
||||
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
|
||||
writableImage.getPixelWriter().setPixels(0, 0, cols, rows, pixelFormat, buffer, cols * 3);
|
||||
return writableImage;
|
||||
}
|
||||
|
||||
public static BufferedImage matToBufferedImage(Mat mat){
|
||||
int type = BufferedImage.TYPE_BYTE_GRAY;
|
||||
if (mat.channels() > 1) {
|
||||
type = BufferedImage.TYPE_3BYTE_BGR;
|
||||
}
|
||||
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
|
||||
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
|
||||
|
||||
mat.data().get(data);
|
||||
return image;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,7 @@ import javafx.animation.KeyFrame;
|
||||
import javafx.animation.PauseTransition;
|
||||
import javafx.animation.Timeline;
|
||||
import javafx.application.Platform;
|
||||
import javafx.beans.InvalidationListener;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.beans.value.ChangeListener;
|
||||
import javafx.beans.value.ObservableValue;
|
||||
import javafx.event.EventHandler;
|
||||
|
||||
import javafx.geometry.Pos;
|
||||
import javafx.scene.Node;
|
||||
import javafx.scene.Scene;
|
||||
@@ -169,29 +165,27 @@ public class AutoCloseAlert {
|
||||
int width = (int) Screen.getPrimary().getBounds().getWidth();
|
||||
int height = (int) Screen.getPrimary().getBounds().getHeight();
|
||||
|
||||
ImageView imageView = new ImageView();
|
||||
imageView.setPreserveRatio(true);
|
||||
imageView.setFitWidth(width);
|
||||
imageView.setFitHeight(height);
|
||||
HBox hbox = new HBox();
|
||||
hbox.setAlignment(Pos.CENTER);
|
||||
int fitwidth = width / pictures.length;
|
||||
for(Image i : pictures){
|
||||
ImageView iv = new ImageView(i);
|
||||
iv.setPreserveRatio(true);
|
||||
iv.setFitHeight(height);
|
||||
iv.setFitWidth(fitwidth);
|
||||
hbox.getChildren().add(iv);
|
||||
}
|
||||
|
||||
BorderPane borderPane = new BorderPane();
|
||||
borderPane.setCenter(imageView);
|
||||
borderPane.setCenter(hbox);
|
||||
|
||||
alertStage.setScene(new Scene(borderPane, width, height));
|
||||
alertStage.centerOnScreen();
|
||||
|
||||
Timeline timeline = new Timeline();
|
||||
timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(0), event -> {
|
||||
alertStage.show();
|
||||
}));
|
||||
timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(0), event -> alertStage.show()));
|
||||
|
||||
for(int xx = 0; xx < pictures.length; xx++){
|
||||
final int index = xx;
|
||||
timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(seconds*(index+1)), event -> {
|
||||
imageView.setImage(pictures[index]);
|
||||
}));
|
||||
}
|
||||
timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(seconds* (pictures.length+1)), event -> {
|
||||
timeline.getKeyFrames().add(new KeyFrame(Duration.seconds(seconds* (pictures.length)), event -> {
|
||||
alertStage.close();
|
||||
if (currentAlertStage == alertStage) {
|
||||
currentAlertStage = null;
|
||||
@@ -205,6 +199,7 @@ public class AutoCloseAlert {
|
||||
shownTitle = "";
|
||||
shownContent = "";
|
||||
shownHeader = "";
|
||||
shownBanner = null;
|
||||
|
||||
}
|
||||
|
||||
@@ -359,5 +354,6 @@ public class AutoCloseAlert {
|
||||
shownTitle = "";
|
||||
shownContent = "";
|
||||
shownHeader = "";
|
||||
shownBanner= null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import Camera.ObsbotMeet2Preset;
|
||||
import Config.CameraConfigEnum;
|
||||
import ErhaAPI.PhotoResult;
|
||||
import com.google.zxing.BinaryBitmap;
|
||||
import com.google.zxing.NotFoundException;
|
||||
import com.google.zxing.Result;
|
||||
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
|
||||
import com.google.zxing.common.HybridBinarizer;
|
||||
@@ -17,8 +16,6 @@ import javafx.scene.control.Slider;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import javafx.scene.control.Label;
|
||||
import javafx.scene.image.PixelFormat;
|
||||
import javafx.scene.image.WritableImage;
|
||||
import javafx.scene.layout.AnchorPane;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
@@ -35,8 +32,6 @@ import org.tinylog.Logger;
|
||||
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.awt.image.DataBufferByte;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
@@ -50,7 +45,6 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static Config.SomeCodes.*;
|
||||
import static id.co.gtc.erhacam.Detectors.*;
|
||||
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
|
||||
import static org.bytedeco.opencv.global.opencv_imgproc.*;
|
||||
|
||||
@SuppressWarnings({"unused"})
|
||||
@@ -130,6 +124,10 @@ public class Cameradetail {
|
||||
private @Getter Size ReducedSize = new Size(720, 1280);
|
||||
private @Getter Size BestSize = new Size(2160, 3840);
|
||||
|
||||
private int realwidth = 0;
|
||||
private int realheight = 0;
|
||||
|
||||
|
||||
//TODO ini angka dari Erha, cek apakah masih cocok atau tidak
|
||||
private @Getter @Setter Size FullCropSize = new Size(1036,1036);
|
||||
|
||||
@@ -141,12 +139,19 @@ public class Cameradetail {
|
||||
return BestSize.height();
|
||||
}
|
||||
|
||||
public int getRealWidth(){
|
||||
return realwidth;
|
||||
}
|
||||
public int getRealHeight(){
|
||||
return realheight;
|
||||
}
|
||||
|
||||
int[] paramjpeg = {opencv_imgcodecs.IMWRITE_JPEG_QUALITY, 100};
|
||||
int[] parampng = {opencv_imgcodecs.IMWRITE_PNG_COMPRESSION, 0};
|
||||
|
||||
private boolean use_qr = false;
|
||||
private boolean use_face = false;
|
||||
|
||||
private int _hardwareID = -1;
|
||||
|
||||
private void setSliderValue(Slider sld, CameraProperty prop, double value){
|
||||
|
||||
@@ -379,6 +384,7 @@ public class Cameradetail {
|
||||
|
||||
/**
|
||||
* Set Camera Grabber and Target Width and Height
|
||||
* @param hardwareID Hardware ID of the Camera
|
||||
* @param grabber Camera Grabber
|
||||
* @param livewidth Width used on live view
|
||||
* @param liveheight Height used on live view
|
||||
@@ -388,7 +394,7 @@ public class Cameradetail {
|
||||
* @param reducedheight Height used on reduced resolution
|
||||
* @param isPotrait if true, set to portrait mode, otherwise landscape
|
||||
*/
|
||||
public void SetGrabber(OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight, int reducedwidth, int reducedheight, boolean isPotrait){
|
||||
public void SetGrabber(int hardwareID, OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight, int reducedwidth, int reducedheight, boolean isPotrait){
|
||||
if (mGrabber!=null) {
|
||||
StopLiveView();
|
||||
}
|
||||
@@ -406,10 +412,8 @@ public class Cameradetail {
|
||||
ReducedSize = new Size(reducedwidth, reducedheight);
|
||||
}
|
||||
|
||||
|
||||
|
||||
mGrabber = grabber;
|
||||
|
||||
_hardwareID = hardwareID;
|
||||
}
|
||||
|
||||
//Exposure and Focus Tricks :
|
||||
@@ -681,6 +685,7 @@ public class Cameradetail {
|
||||
try{
|
||||
// wait if the camera is still capturing
|
||||
IsGrabbingLiveView.acquire();
|
||||
|
||||
TakingPhoto = new CountDownLatch(1);
|
||||
|
||||
|
||||
@@ -718,6 +723,7 @@ public class Cameradetail {
|
||||
System.out.println("TakePhoto IsGrabbingLiveView interrupted");
|
||||
}
|
||||
|
||||
|
||||
TakingPhoto.countDown();
|
||||
|
||||
} else raise_log("TakePhoto failed, Grabber is null");
|
||||
@@ -726,6 +732,8 @@ public class Cameradetail {
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
public String CropBestMat(String directory, String prefix, Rect ROI){
|
||||
UMat cloned = new UMat();
|
||||
BestMat.copyTo(cloned);
|
||||
@@ -845,8 +853,8 @@ public class Cameradetail {
|
||||
final AtomicInteger no_face_counter = new AtomicInteger(0);
|
||||
final AtomicInteger face_counter = new AtomicInteger(0);
|
||||
final AtomicInteger blink_counter = new AtomicInteger(0);
|
||||
final AtomicInteger no_eye_counter = new AtomicInteger(0);
|
||||
final AtomicInteger have_eye_counter = new AtomicInteger(0);
|
||||
//final AtomicInteger no_eye_counter = new AtomicInteger(0);
|
||||
//final AtomicInteger have_eye_counter = new AtomicInteger(0);
|
||||
while(Capturing.get()){
|
||||
try {
|
||||
face_semaphore.acquire();
|
||||
@@ -907,8 +915,8 @@ public class Cameradetail {
|
||||
|
||||
if (theface.getEyesCount()>=2){
|
||||
// ada mata (buka mata)
|
||||
if (have_eye_counter.incrementAndGet()<5) continue;
|
||||
no_eye_counter.set(0);
|
||||
// if (have_eye_counter.incrementAndGet()<5) continue;
|
||||
// no_eye_counter.set(0);
|
||||
|
||||
if (event!=null) event.onEyeDetector(true);
|
||||
LabelVisible(eye_indicator,true);
|
||||
@@ -922,7 +930,7 @@ public class Cameradetail {
|
||||
System.out.println("First Eye Detected from camera "+cameratitle.getText());
|
||||
eye_state.set(1);
|
||||
} else {
|
||||
System.out.println("Transition from close to open eyes");
|
||||
//System.out.println("Transition from close to open eyes");
|
||||
eye_state.set(1);
|
||||
|
||||
blink_counter.incrementAndGet();
|
||||
@@ -949,8 +957,8 @@ public class Cameradetail {
|
||||
} else {
|
||||
// ada muka, tidak ada mata
|
||||
// transisi dari buka mata ke tutup mata
|
||||
if (no_eye_counter.incrementAndGet()<5) continue;
|
||||
have_eye_counter.set(0);
|
||||
// if (no_eye_counter.incrementAndGet()<5) continue;
|
||||
// have_eye_counter.set(0);
|
||||
|
||||
if (event!=null) event.onEyeDetector(false);
|
||||
LabelVisible(eye_indicator,false);
|
||||
@@ -958,7 +966,7 @@ public class Cameradetail {
|
||||
|
||||
|
||||
if (eye_state.get()!=0){
|
||||
System.out.println("Transition from open to close eyes");
|
||||
//System.out.println("Transition from open to close eyes");
|
||||
eye_state.set(0);
|
||||
|
||||
}
|
||||
@@ -978,8 +986,8 @@ public class Cameradetail {
|
||||
waiting_for_second_blink.set(false);
|
||||
face_counter.set(0);
|
||||
blink_counter.set(0);
|
||||
no_eye_counter.set(0);
|
||||
have_eye_counter.set(0);
|
||||
// no_eye_counter.set(0);
|
||||
// have_eye_counter.set(0);
|
||||
|
||||
if (event!=null) {
|
||||
event.onFrontalFaceDetector(false, _face_width, _face_height);
|
||||
@@ -994,34 +1002,60 @@ public class Cameradetail {
|
||||
|
||||
|
||||
}
|
||||
UMat rgbmat = new UMat(LiveMat.size(), CV_8UC3);
|
||||
cvtColor(LiveMat, rgbmat, COLOR_BGR2RGB);
|
||||
|
||||
Mat imgmat = new Mat();
|
||||
rgbmat.copyTo(imgmat); // copy back to CPU
|
||||
// Update Task Value usign matToWritableImage
|
||||
setCameraStream(matToWritableImage(imgmat));
|
||||
//updateValue(matToWritableImage(imgmat));
|
||||
} catch (InterruptedException e) {
|
||||
LiveMat.copyTo(imgmat); // copy back to CPU
|
||||
|
||||
setCameraStream(MatToImage(imgmat));
|
||||
} catch (Exception e) {
|
||||
System.out.println(Thread.currentThread().getName()+" interrupted");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
private void flush(){
|
||||
if (mGrabber!=null){
|
||||
long now = System.currentTimeMillis();
|
||||
long delta = 0;
|
||||
while (delta<32){
|
||||
// flushing stale frames
|
||||
// 30 fps means 33 ms per frame
|
||||
// so if grab is quicker than 30 ms , its stale frame
|
||||
try{
|
||||
if (mGrabber==null) throw new FrameGrabber.Exception("Grabber is null");
|
||||
Frame frame = mGrabber.grab(); // grab frame
|
||||
delta = System.currentTimeMillis() - now;
|
||||
now = System.currentTimeMillis();
|
||||
} catch (FrameGrabber.Exception ignored) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Camera Capture Thread
|
||||
Thread cam_capture = new Thread(()->{
|
||||
while (Capturing.get()) {
|
||||
try {
|
||||
// selama proses pengambilan foto, jangan ambil frame
|
||||
if (TakingPhoto!=null) TakingPhoto.await();
|
||||
if (TakingPhoto!=null) {
|
||||
TakingPhoto.await();
|
||||
flush();
|
||||
TakingPhoto = null;
|
||||
}
|
||||
|
||||
IsGrabbingLiveView.drainPermits();
|
||||
IsGrabbingLiveView.release();
|
||||
//IsGrabbingLiveView.set(true);
|
||||
Frame frame = null;
|
||||
|
||||
if (Capturing.get()) {
|
||||
try{
|
||||
frame = mGrabber.grab(); // grab frame
|
||||
if (mGrabber!=null)
|
||||
frame = mGrabber.grab();
|
||||
else throw new FrameGrabber.Exception("Grabber is null");
|
||||
|
||||
} catch (FrameGrabber.Exception e){
|
||||
if (Capturing.get()){
|
||||
// kalau ada exception padahal masih capturing. Kalau sudah tidak capturing, tidak peduli
|
||||
@@ -1031,10 +1065,7 @@ public class Cameradetail {
|
||||
if (msg.contains("start() been called")){
|
||||
if (Capturing.get()){
|
||||
System.out.println("Camera "+Thread.currentThread().getName()+" has been stopped, restarting");
|
||||
mGrabber.close();
|
||||
//Wait(100);
|
||||
mGrabber.start();
|
||||
mGrabber.flush();
|
||||
mGrabber.restart();
|
||||
} else {
|
||||
System.out.println("Camera "+Thread.currentThread().getName()+" has been stopped, not restarting");
|
||||
}
|
||||
@@ -1047,7 +1078,17 @@ public class Cameradetail {
|
||||
|
||||
//IsGrabbingLiveView.set(false);
|
||||
if (frame==null) continue;
|
||||
if (frame.image==null) continue;
|
||||
if (frame.image.length==0) continue;
|
||||
if (realwidth!=frame.imageWidth || realheight!=frame.imageHeight) {
|
||||
realwidth = frame.imageWidth;
|
||||
realheight = frame.imageHeight;
|
||||
}
|
||||
|
||||
Mat mat = matconverter.convert(frame); // convert to Mat
|
||||
if (mat.empty()) continue;
|
||||
|
||||
|
||||
fps.incrementAndGet();
|
||||
|
||||
UMat originalmat = new UMat();
|
||||
@@ -1091,10 +1132,12 @@ public class Cameradetail {
|
||||
}
|
||||
} catch ( FrameGrabber.Exception fe){
|
||||
System.out.println("FrameGrabber Exception in" + Thread.currentThread().getName() + " : " + fe.getMessage());
|
||||
fe.printStackTrace();
|
||||
} catch (InterruptedException e) {
|
||||
System.out.println(Thread.currentThread().getName()+" interrupted");
|
||||
} catch (Exception e){
|
||||
System.out.println(Thread.currentThread().getName()+" exception : "+e.getMessage());
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1118,7 +1161,6 @@ public class Cameradetail {
|
||||
|
||||
LiveFPS = 0;
|
||||
mGrabber.start();
|
||||
mGrabber.flush();
|
||||
System.out.println("Camera "+cameratitle+" started");
|
||||
|
||||
Capturing.set(true);
|
||||
@@ -1158,17 +1200,17 @@ public class Cameradetail {
|
||||
cam_capture.setName("cam_capture "+cameratitle);
|
||||
cam_capture.setDaemon(true);
|
||||
cam_capture.start();
|
||||
System.out.println("Starting cam_capture thread");
|
||||
//System.out.println("Starting cam_capture thread");
|
||||
|
||||
qr_detect.setName("qr_detect "+cameratitle);
|
||||
qr_detect.setDaemon(true);
|
||||
qr_detect.start();
|
||||
System.out.println("Starting qr_detect thread");
|
||||
//System.out.println("Starting qr_detect thread");
|
||||
|
||||
face_detect.setName("face_detect "+cameratitle);
|
||||
face_detect.setDaemon(true);
|
||||
face_detect.start();
|
||||
System.out.println("Starting face_detect thread");
|
||||
//System.out.println("Starting face_detect thread");
|
||||
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
@@ -1262,34 +1304,13 @@ public class Cameradetail {
|
||||
if (result!=null){
|
||||
return result.getText();
|
||||
}
|
||||
} catch (NotFoundException ignored) {
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private WritableImage matToWritableImage(Mat mat){
|
||||
int cols = mat.cols();
|
||||
int rows = mat.rows();
|
||||
WritableImage writableImage = new WritableImage(cols, rows);
|
||||
ByteBuffer buffer = mat.createBuffer();
|
||||
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
|
||||
writableImage.getPixelWriter().setPixels(0, 0, cols, rows, pixelFormat, buffer, cols * 3);
|
||||
return writableImage;
|
||||
}
|
||||
|
||||
private BufferedImage matToBufferedImage(Mat mat){
|
||||
int type = BufferedImage.TYPE_BYTE_GRAY;
|
||||
if (mat.channels() > 1) {
|
||||
type = BufferedImage.TYPE_3BYTE_BGR;
|
||||
}
|
||||
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
|
||||
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
|
||||
|
||||
mat.data().get(data);
|
||||
return image;
|
||||
}
|
||||
|
||||
private void raise_log(String msg){
|
||||
if (event!=null) event.onLog(msg);
|
||||
|
||||
@@ -839,7 +839,7 @@ public class CaptureView {
|
||||
reducewidth = ObsbotMeet2.Mode3.getWidth();
|
||||
reduceheight = ObsbotMeet2.Mode3.getHeight();
|
||||
}
|
||||
image.SetGrabber(grabber, livewidth,liveheight,photowidth,photoheight,reducewidth,reduceheight, true);
|
||||
image.SetGrabber(devicenumber,grabber, livewidth,liveheight,photowidth,photoheight,reducewidth,reduceheight, true);
|
||||
|
||||
boolean use_face_detector = true;
|
||||
boolean use_qr_detector = true;
|
||||
@@ -1011,9 +1011,9 @@ public class CaptureView {
|
||||
|
||||
private void update_status(Cameradetail image){
|
||||
String sb = "Camera Started, " +
|
||||
image.getBestWidth() +
|
||||
image.getRealWidth() +
|
||||
"x" +
|
||||
image.getBestHeight() +
|
||||
image.getRealHeight() +
|
||||
"@" +
|
||||
image.getLiveFPS();
|
||||
|
||||
|
||||
@@ -26,15 +26,20 @@ public class DetectorResult {
|
||||
|
||||
public void FaceRectangle(UMat mat){
|
||||
if (haveFace()){
|
||||
try{
|
||||
rectangle(mat, Face, Scalar.GREEN, linethickness, linetype, lineshift);
|
||||
|
||||
rectangle(mat, Face, Scalar.GREEN, linethickness, linetype, lineshift);
|
||||
} catch (Exception ignored){}
|
||||
}
|
||||
}
|
||||
|
||||
public void EyesRectangle(UMat mat){
|
||||
if (haveEyes()){
|
||||
for(Rect eye : Eyes){
|
||||
rectangle(mat, eye, Scalar.BLUE);
|
||||
try{
|
||||
rectangle(mat, eye, Scalar.BLUE);
|
||||
|
||||
} catch (Exception ignored){}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,41 +180,29 @@ public class Detectors {
|
||||
*/
|
||||
public static RectVector DetectEye(UMat graymat, int facewidth){
|
||||
//return Detect(graymat, eyeDetector);
|
||||
int minwidth = (int)(facewidth*0.2);
|
||||
int minwidth = Math.max((int)(facewidth*0.25), 24);
|
||||
int maxwidth = (int)(facewidth*0.4);
|
||||
Size minsize = new Size(minwidth, minwidth);
|
||||
Size maxsize = new Size(maxwidth, maxwidth);
|
||||
return Detect(graymat, eyeDetector, scaleFactor, minNeighbors, flags, minsize, maxsize);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
private static RectVector Detect(UMat graymat, CascadeClassifier detector){
|
||||
if (detector!=null){
|
||||
if (graymat!=null){
|
||||
if (!graymat.empty()){
|
||||
RectVector detected = new RectVector();
|
||||
detector.detectMultiScale(graymat, detected);
|
||||
return detected;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static RectVector Detect(UMat graymat, CascadeClassifier detector, double scaleFactor, int minNeighbors, int flags, Size minSize, Size maxSize){
|
||||
if (detector!=null){
|
||||
if (graymat!=null){
|
||||
if (graymat!=null && graymat.channels()==1){
|
||||
if (!graymat.empty()){
|
||||
if (minSize!=null){
|
||||
if (maxSize!=null){
|
||||
RectVector detected = new RectVector();
|
||||
detector.detectMultiScale(graymat, detected, scaleFactor, minNeighbors, flags, minSize, maxSize);
|
||||
return detected;
|
||||
try{
|
||||
RectVector detected = new RectVector();
|
||||
detector.detectMultiScale(graymat, detected, scaleFactor, minNeighbors, flags, minSize, maxSize);
|
||||
|
||||
return detected;
|
||||
} catch (Exception e){
|
||||
System.out.println("Detectors Detect Error, Message : "+e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ public class MainApplication extends Application {
|
||||
Screen screen = Screen.getPrimary();
|
||||
Rectangle2D screenbound = screen.getBounds();
|
||||
Scene scene = new Scene(fxmlLoader.load(), screenbound.getWidth(), screenbound.getHeight());
|
||||
stage.setTitle("MultiCam Capture App for ERHA 11042025-004");
|
||||
stage.setTitle("MultiCam Capture App for ERHA 14042025-041");
|
||||
stage.setScene(scene);
|
||||
stage.setResizable(true);
|
||||
stage.setMaximized(true);
|
||||
|
||||
Reference in New Issue
Block a user