first commit

This commit is contained in:
2024-11-09 08:55:17 +07:00
commit f6ee4817e6
98 changed files with 85493 additions and 0 deletions

View File

@@ -0,0 +1,112 @@
package BASS;
import lombok.Getter;
import org.tinylog.Logger;
@SuppressWarnings("unused")
public class AudioPlayer {
private final Bass bass = Bass.Instance;
private @Getter boolean inited = false;
/**
* Initialize AudioPlayer
* @param deviceid device id to be used, 0 = no speaker, 1 = first device, 2 ... n = other devices
* @param samplingrate sampling rate to be used, 44100 = CD quality, 48000 = DVD quality
*/
public AudioPlayer(int deviceid, int samplingrate){
if (bass.BASS_GetVersion()!=0){
if (deviceid>-1){
int initflags = Bass.BASS_DEVICE_16BITS | Bass.BASS_DEVICE_STEREO | Bass.BASS_DEVICE_FREQ | Bass.BASS_DEVICE_REINIT;
if (bass.BASS_Init(deviceid,samplingrate,initflags)){
inited = true;
} else Logger.error("AudioPlayer initialization failed, BASS_Init failed, error code: " + bass.BASS_ErrorGetCode());
} else Logger.error("AudioPlayer initialization failed, deviceid is not correct");
} else Logger.error("AudioPlayer initialization failed, BASS version is not correct");
}
/**
* Free AudioPlayer
*/
public void Free(){
if (inited){
if (bass.BASS_Free()){
Logger.info("AudioPlayer Free success");
} else Logger.error("AudioPlayer Free failed, error code: "+bass.BASS_ErrorGetCode());
inited = false;
}
}
/**
* Set Output Volume
* @param volume volume level, 0-100
*/
public void setOutputVolume(int volume){
if (volume<0) volume = 0;
if (volume>100) volume = 100;
if (inited){
if (!bass.BASS_SetVolume(volume/100f)){
Logger.error("AudioPlayer SetVolume failed, error code: "+bass.BASS_ErrorGetCode());
}
} else Logger.info("AudioPlayer SetVolume failed, AudioPlayer is not initialized");
}
/**
* Get Output Volume
* @return volume level, 0-100
*/
public int getOutputVolume(){
if (inited){
float volume = bass.BASS_GetVolume();
if (volume>=0 && volume<=1){
return (int)(volume*100);
} else Logger.info("AudioPlayer GetVolume failed, volume is not correct");
} else Logger.info("AudioPlayer GetVolume failed, AudioPlayer is not initialized");
return -1;
}
/**
* Play Audio File
* @param filename File to be played
* @param playbackstatus PlaybackStatus callback
* @return true if success, false if failed
*/
public boolean PlayFile(final String filename, final PlaybackStatus playbackstatus){
if (inited){
int filehandle = bass.BASS_StreamCreateFile(false, filename, 0, 0, 0);
if (filehandle!=0){
if (bass.BASS_ChannelStart(filehandle)){
new Thread(()->{
if (playbackstatus!=null) playbackstatus.onPlaybackStarted(filename);
boolean iscontinue = true;
while(iscontinue){
switch (bass.BASS_ChannelIsActive(filehandle)) {
case Bass.BASS_ACTIVE_PAUSED_DEVICE :
if (playbackstatus!=null) playbackstatus.onPlaybackFailure(filename);
iscontinue = false;
break;
case Bass.BASS_ACTIVE_STOPPED:
iscontinue = false;
break;
default : {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
iscontinue = false;
}
}
}
}
if (playbackstatus!=null) playbackstatus.onPlaybackFinished(filename);
}).start();
return true;
} else Logger.error("AudioPlayer PlayFile failed, BASS_ChannelStart failed, error code: "+bass.BASS_ErrorGetCode());
} else Logger.error("AudioPlayer PlayFile failed, BASS_StreamCreateFile failed, error code: "+bass.BASS_ErrorGetCode());
} else Logger.info("AudioPlayer PlayFile failed, AudioPlayer is not initialized");
return false;
}
}

View File

@@ -0,0 +1,785 @@
package BASS;
import com.sun.jna.*;
import java.io.IOException;
@SuppressWarnings("unused")
public interface Bass extends Library {
Bass Instance = (Bass) Native.load("bass", Bass.class);
int BASSVERSION = 0x204; // API version
String BASSVERSIONTEXT = "2.4";
// Error codes returned by BASS_ErrorGetCode
int BASS_OK = 0; // all is OK
int BASS_ERROR_MEM = 1; // memory error
int BASS_ERROR_FILEOPEN = 2; // can't open the file
int BASS_ERROR_DRIVER = 3; // can't find a free/valid driver
int BASS_ERROR_BUFLOST = 4; // the sample buffer was lost
int BASS_ERROR_HANDLE = 5; // invalid handle
int BASS_ERROR_FORMAT = 6; // unsupported sample format
int BASS_ERROR_POSITION = 7; // invalid position
int BASS_ERROR_INIT = 8; // BASS_Init has not been successfully called
int BASS_ERROR_START = 9; // BASS_Start has not been successfully called
int BASS_ERROR_SSL = 10; // SSL/HTTPS support isn't available
int BASS_ERROR_REINIT = 11; // device needs to be reinitialized
int BASS_ERROR_ALREADY = 14; // already initialized/paused/whatever
int BASS_ERROR_NOTAUDIO = 17; // file does not contain audio
int BASS_ERROR_NOCHAN = 18; // can't get a free channel
int BASS_ERROR_ILLTYPE = 19; // an illegal type was specified
int BASS_ERROR_ILLPARAM = 20; // an illegal parameter was specified
int BASS_ERROR_NO3D = 21; // no 3D support
int BASS_ERROR_NOEAX = 22; // no EAX support
int BASS_ERROR_DEVICE = 23; // illegal device number
int BASS_ERROR_NOPLAY = 24; // not playing
int BASS_ERROR_FREQ = 25; // illegal sample rate
int BASS_ERROR_NOTFILE = 27; // the stream is not a file stream
int BASS_ERROR_NOHW = 29; // no hardware voices available
int BASS_ERROR_EMPTY = 31; // the file has no sample data
int BASS_ERROR_NONET = 32; // no internet connection could be opened
int BASS_ERROR_CREATE = 33; // couldn't create the file
int BASS_ERROR_NOFX = 34; // effects are not available
int BASS_ERROR_NOTAVAIL = 37; // requested data/action is not available
int BASS_ERROR_DECODE = 38; // the channel is a "decoding channel"
int BASS_ERROR_DX = 39; // a sufficient DirectX version is not installed
int BASS_ERROR_TIMEOUT = 40; // connection timedout
int BASS_ERROR_FILEFORM = 41; // unsupported file format
int BASS_ERROR_SPEAKER = 42; // unavailable speaker
int BASS_ERROR_VERSION = 43; // invalid BASS version (used by add-ons)
int BASS_ERROR_CODEC = 44; // codec is not available/supported
int BASS_ERROR_ENDED = 45; // the channel/file has ended
int BASS_ERROR_BUSY = 46; // the device is busy
int BASS_ERROR_UNSTREAMABLE = 47; // unstreamable file
int BASS_ERROR_PROTOCOL = 48; // unsupported protocol
int BASS_ERROR_DENIED = 49; // access denied
int BASS_ERROR_UNKNOWN = -1; // some other mystery problem
int BASS_ERROR_JAVA_CLASS = 500; // object class problem
// BASS_SetConfig options
int BASS_CONFIG_BUFFER = 0;
int BASS_CONFIG_UPDATEPERIOD = 1;
int BASS_CONFIG_GVOL_SAMPLE = 4;
int BASS_CONFIG_GVOL_STREAM = 5;
int BASS_CONFIG_GVOL_MUSIC = 6;
int BASS_CONFIG_CURVE_VOL = 7;
int BASS_CONFIG_CURVE_PAN = 8;
int BASS_CONFIG_FLOATDSP = 9;
int BASS_CONFIG_3DALGORITHM = 10;
int BASS_CONFIG_NET_TIMEOUT = 11;
int BASS_CONFIG_NET_BUFFER = 12;
int BASS_CONFIG_PAUSE_NOPLAY = 13;
int BASS_CONFIG_NET_PREBUF = 15;
int BASS_CONFIG_NET_PASSIVE = 18;
int BASS_CONFIG_REC_BUFFER = 19;
int BASS_CONFIG_NET_PLAYLIST = 21;
int BASS_CONFIG_MUSIC_VIRTUAL = 22;
int BASS_CONFIG_VERIFY = 23;
int BASS_CONFIG_UPDATETHREADS = 24;
int BASS_CONFIG_DEV_BUFFER = 27;
int BASS_CONFIG_DEV_DEFAULT = 36;
int BASS_CONFIG_NET_READTIMEOUT = 37;
int BASS_CONFIG_HANDLES = 41;
int BASS_CONFIG_SRC = 43;
int BASS_CONFIG_SRC_SAMPLE = 44;
int BASS_CONFIG_ASYNCFILE_BUFFER = 45;
int BASS_CONFIG_OGG_PRESCAN = 47;
int BASS_CONFIG_DEV_NONSTOP = 50;
int BASS_CONFIG_VERIFY_NET = 52;
int BASS_CONFIG_DEV_PERIOD = 53;
int BASS_CONFIG_FLOAT = 54;
int BASS_CONFIG_NET_SEEK = 56;
int BASS_CONFIG_AM_DISABLE = 58;
int BASS_CONFIG_NET_PLAYLIST_DEPTH = 59;
int BASS_CONFIG_NET_PREBUF_WAIT = 60;
int BASS_CONFIG_ANDROID_SESSIONID = 62;
int BASS_CONFIG_ANDROID_AAUDIO = 67;
int BASS_CONFIG_SAMPLE_ONEHANDLE = 69;
int BASS_CONFIG_DEV_TIMEOUT = 70;
int BASS_CONFIG_NET_META = 71;
int BASS_CONFIG_NET_RESTRATE = 72;
int BASS_CONFIG_REC_DEFAULT = 73;
int BASS_CONFIG_NORAMP = 74;
// BASS_SetConfigPtr options
int BASS_CONFIG_NET_AGENT = 16;
int BASS_CONFIG_NET_PROXY = 17;
int BASS_CONFIG_LIBSSL = 64;
int BASS_CONFIG_FILENAME = 75;
int BASS_CONFIG_THREAD = 0x40000000; // flag: thread-specific setting
// BASS_Init flags
int BASS_DEVICE_8BITS = 1; // unused
int BASS_DEVICE_MONO = 2; // mono
int BASS_DEVICE_3D = 4; // unused
int BASS_DEVICE_16BITS = 8; // limit output to 16-bit
int BASS_DEVICE_REINIT = 128; // reinitialize
int BASS_DEVICE_LATENCY = 0x100; // unused
int BASS_DEVICE_SPEAKERS = 0x800; // force enabling of speaker assignment
int BASS_DEVICE_NOSPEAKER = 0x1000; // ignore speaker arrangement
int BASS_DEVICE_DMIX = 0x2000; // use ALSA "dmix" plugin
int BASS_DEVICE_FREQ = 0x4000; // set device sample rate
int BASS_DEVICE_STEREO = 0x8000; // limit output to stereo
int BASS_DEVICE_AUDIOTRACK = 0x20000; // use AudioTrack output
int BASS_DEVICE_DSOUND = 0x40000; // use DirectSound output
int BASS_DEVICE_SOFTWARE = 0x80000; // disable hardware/fastpath output
@Structure.FieldOrder({"name", "driver", "flags"})
class BASS_DEVICEINFO extends Structure {
public String name; // description
public String driver; // driver
public int flags;
}
// BASS_DEVICEINFO flags
int BASS_DEVICE_ENABLED = 1;
int BASS_DEVICE_DEFAULT = 2;
int BASS_DEVICE_INIT = 4;
@Structure.FieldOrder({"flags", "hwsize", "hwfree", "freesam", "free3d", "minrate", "maxrate", "eax", "minbuf", "dsver", "latency", "initflags", "speakers", "freq"})
class BASS_INFO extends Structure{
public int flags; // device capabilities (DSCAPS_xxx flags)
public int hwsize; // unused
public int hwfree; // unused
public int freesam; // unused
public int free3d; // unused
public int minrate; // unused
public int maxrate; // unused
public int eax; // unused
public int minbuf; // recommended minimum buffer length in ms
public int dsver; // DirectSound version
public int latency; // average delay (in ms) before start of playback
public int initflags; // BASS_Init "flags" parameter
public int speakers; // number of speakers available
public int freq; // current output rate
}
// Recording device info structure
@Structure.FieldOrder({"flags", "formats", "inputs", "singlein", "freq"})
class BASS_RECORDINFO extends Structure {
public int flags; // device capabilities (DSCCAPS_xxx flags)
public int formats; // supported standard formats (WAVE_FORMAT_xxx flags)
public int inputs; // number of inputs
public boolean singlein; // TRUE = only 1 input can be set at a time
public int freq; // current input rate
}
// Sample info structure
@Structure.FieldOrder({"freq", "chans", "flags", "length", "max", "origres", "chans", "mingap", "mode3d", "mindist", "maxdist", "iangle", "oangle", "outvol", "vam", "priority"})
class BASS_SAMPLE extends Structure {
public int freq; // default playback rate
public float volume; // default volume (0-1)
public float pan; // default pan (-1=left, 0=middle, 1=right)
public int flags; // BASS_SAMPLE_xxx flags
public int length; // length (in bytes)
public int max; // maximum simultaneous playbacks
public int origres; // original resolution bits
public int chans; // number of channels
public int mingap; // minimum gap (ms) between creating channels
public int mode3d; // BASS_3DMODE_xxx mode
public float mindist; // minimum distance
public float maxdist; // maximum distance
public int iangle; // angle of inside projection cone
public int oangle; // angle of outside projection cone
public float outvol; // delta-volume outside the projection cone
public int vam; // unused
public int priority; // unused
}
int BASS_SAMPLE_8BITS = 1; // 8 bit
int BASS_SAMPLE_FLOAT = 256; // 32-bit floating-point
int BASS_SAMPLE_MONO = 2; // mono
int BASS_SAMPLE_LOOP = 4; // looped
int BASS_SAMPLE_3D = 8; // 3D functionality
int BASS_SAMPLE_SOFTWARE = 16; // unused
int BASS_SAMPLE_MUTEMAX = 32; // mute at max distance (3D only)
int BASS_SAMPLE_VAM = 64; // unused
int BASS_SAMPLE_FX = 128; // unused
int BASS_SAMPLE_OVER_VOL = 0x10000; // override lowest volume
int BASS_SAMPLE_OVER_POS = 0x20000; // override longest playing
int BASS_SAMPLE_OVER_DIST = 0x30000; // override furthest from listener (3D only)
int BASS_STREAM_PRESCAN = 0x20000; // scan file for accurate seeking and length
int BASS_STREAM_AUTOFREE = 0x40000; // automatically free the stream when it stops/ends
int BASS_STREAM_RESTRATE = 0x80000; // restrict the download rate of internet file streams
int BASS_STREAM_BLOCK = 0x100000; // download/play internet file stream in small blocks
int BASS_STREAM_DECODE = 0x200000; // don't play the stream, only decode (BASS_ChannelGetData)
int BASS_STREAM_STATUS = 0x800000; // give server status info (HTTP/ICY tags) in DOWNLOADPROC
int BASS_MP3_IGNOREDELAY = 0x200; // ignore LAME/Xing/VBRI/iTunes delay & padding info
int BASS_MP3_SETPOS = BASS_STREAM_PRESCAN;
int BASS_MUSIC_FLOAT = BASS_SAMPLE_FLOAT;
int BASS_MUSIC_MONO = BASS_SAMPLE_MONO;
int BASS_MUSIC_LOOP = BASS_SAMPLE_LOOP;
int BASS_MUSIC_3D = BASS_SAMPLE_3D;
int BASS_MUSIC_FX = BASS_SAMPLE_FX;
int BASS_MUSIC_AUTOFREE = BASS_STREAM_AUTOFREE;
int BASS_MUSIC_DECODE = BASS_STREAM_DECODE;
int BASS_MUSIC_PRESCAN = BASS_STREAM_PRESCAN; // calculate playback length
int BASS_MUSIC_CALCLEN = BASS_MUSIC_PRESCAN;
int BASS_MUSIC_RAMP = 0x200; // normal ramping
int BASS_MUSIC_RAMPS = 0x400; // sensitive ramping
int BASS_MUSIC_SURROUND = 0x800; // surround sound
int BASS_MUSIC_SURROUND2 = 0x1000; // surround sound (mode 2)
int BASS_MUSIC_FT2PAN = 0x2000; // apply FastTracker 2 panning to XM files
int BASS_MUSIC_FT2MOD = 0x2000; // play .MOD as FastTracker 2 does
int BASS_MUSIC_PT1MOD = 0x4000; // play .MOD as ProTracker 1 does
int BASS_MUSIC_NONINTER = 0x10000; // non-interpolated sample mixing
int BASS_MUSIC_SINCINTER = 0x800000; // sinc interpolated sample mixing
int BASS_MUSIC_POSRESET = 0x8000; // stop all notes when moving position
int BASS_MUSIC_POSRESETEX = 0x400000; // stop all notes and reset bmp/etc when moving position
int BASS_MUSIC_STOPBACK = 0x80000; // stop the music on a backwards jump effect
int BASS_MUSIC_NOSAMPLE = 0x100000; // don't load the samples
// Speaker assignment flags
int BASS_SPEAKER_FRONT = 0x1000000; // front speakers
int BASS_SPEAKER_REAR = 0x2000000; // rear speakers
int BASS_SPEAKER_CENLFE = 0x3000000; // center & LFE speakers (5.1)
int BASS_SPEAKER_SIDE = 0x4000000; // side speakers (7.1)
static int BASS_SPEAKER_N(int n) { return n<<24; } // n'th pair of speakers (max 15)
int BASS_SPEAKER_LEFT = 0x10000000; // modifier: left
int BASS_SPEAKER_RIGHT = 0x20000000; // modifier: right
int BASS_SPEAKER_FRONTLEFT = BASS_SPEAKER_FRONT | BASS_SPEAKER_LEFT;
int BASS_SPEAKER_FRONTRIGHT = BASS_SPEAKER_FRONT | BASS_SPEAKER_RIGHT;
int BASS_SPEAKER_REARLEFT = BASS_SPEAKER_REAR | BASS_SPEAKER_LEFT;
int BASS_SPEAKER_REARRIGHT = BASS_SPEAKER_REAR | BASS_SPEAKER_RIGHT;
int BASS_SPEAKER_CENTER = BASS_SPEAKER_CENLFE | BASS_SPEAKER_LEFT;
int BASS_SPEAKER_LFE = BASS_SPEAKER_CENLFE | BASS_SPEAKER_RIGHT;
int BASS_SPEAKER_SIDELEFT = BASS_SPEAKER_SIDE | BASS_SPEAKER_LEFT;
int BASS_SPEAKER_SIDERIGHT = BASS_SPEAKER_SIDE | BASS_SPEAKER_RIGHT;
int BASS_SPEAKER_REAR2 = BASS_SPEAKER_SIDE;
int BASS_SPEAKER_REAR2LEFT = BASS_SPEAKER_SIDELEFT;
int BASS_SPEAKER_REAR2RIGHT = BASS_SPEAKER_SIDERIGHT;
int BASS_ASYNCFILE = 0x40000000; // read file asynchronously
int BASS_RECORD_PAUSE = 0x8000; // start recording paused
// Channel info structure
@Structure.FieldOrder({"freq", "chans", "flags", "ctype", "origres", "plugin", "sample", "filename"})
class BASS_CHANNELINFO extends Structure {
public int freq; // default playback rate
public int chans; // channels
public int flags;
public int ctype; // type of channel
public int origres; // original resolution
public int plugin;
public int sample;
public String filename;
}
int BASS_ORIGRES_FLOAT = 0x10000;
// BASS_CHANNELINFO types
int BASS_CTYPE_SAMPLE = 1;
int BASS_CTYPE_RECORD = 2;
int BASS_CTYPE_STREAM = 0x10000;
int BASS_CTYPE_STREAM_VORBIS = 0x10002;
int BASS_CTYPE_STREAM_OGG = 0x10002;
int BASS_CTYPE_STREAM_MP1 = 0x10003;
int BASS_CTYPE_STREAM_MP2 = 0x10004;
int BASS_CTYPE_STREAM_MP3 = 0x10005;
int BASS_CTYPE_STREAM_AIFF = 0x10006;
int BASS_CTYPE_STREAM_CA = 0x10007;
int BASS_CTYPE_STREAM_MF = 0x10008;
int BASS_CTYPE_STREAM_AM = 0x10009;
int BASS_CTYPE_STREAM_SAMPLE = 0x1000a;
int BASS_CTYPE_STREAM_DUMMY = 0x18000;
int BASS_CTYPE_STREAM_DEVICE = 0x18001;
int BASS_CTYPE_STREAM_WAV = 0x40000; // WAVE flag (LOWORD=codec)
int BASS_CTYPE_STREAM_WAV_PCM = 0x50001;
int BASS_CTYPE_STREAM_WAV_FLOAT = 0x50003;
int BASS_CTYPE_MUSIC_MOD = 0x20000;
int BASS_CTYPE_MUSIC_MTM = 0x20001;
int BASS_CTYPE_MUSIC_S3M = 0x20002;
int BASS_CTYPE_MUSIC_XM = 0x20003;
int BASS_CTYPE_MUSIC_IT = 0x20004;
int BASS_CTYPE_MUSIC_MO3 = 0x00100; // MO3 flag
@Structure.FieldOrder({"ctype", "name", "exts"})
class BASS_PLUGINFORM extends Structure {
int ctype; // channel type
String name; // format description
String exts; // file extension filter (*.ext1;*.ext2;etc...)
}
@Structure.FieldOrder({"version", "formatc", "formats"})
class BASS_PLUGININFO extends Structure {
int version; // version (same form as BASS_GetVersion)
int formatc; // number of formats
BASS_PLUGINFORM[] formats; // the array of formats
}
// 3D vector (for 3D positions/velocities/orientations)
class BASS_3DVECTOR {
BASS_3DVECTOR() {}
BASS_3DVECTOR(float _x, float _y, float _z) { x=_x; y=_y; z=_z; }
float x; // +=right, -=left
float y; // +=up, -=down
float z; // +=front, -=behind
}
// 3D channel modes
int BASS_3DMODE_NORMAL = 0; // normal 3D processing
int BASS_3DMODE_RELATIVE = 1; // position is relative to the listener
int BASS_3DMODE_OFF = 2; // no 3D processing
// software 3D mixing algorithms (used with BASS_CONFIG_3DALGORITHM)
int BASS_3DALG_DEFAULT = 0;
int BASS_3DALG_OFF = 1;
int BASS_3DALG_FULL = 2;
int BASS_3DALG_LIGHT = 3;
// BASS_SampleGetChannel flags
int BASS_SAMCHAN_NEW = 1; // get a new playback channel
int BASS_SAMCHAN_STREAM = 2; // create a stream
interface STREAMPROC extends Callback
{
int STREAMPROC(int handle, Pointer buffer, int length, Pointer user);
/* User stream callback function.
handle : The stream that needs writing
buffer : Buffer to write the samples in
length : Number of bytes to write
user : The 'user' parameter value given when calling BASS_StreamCreate
RETURN : Number of bytes written. Set the BASS_STREAMPROC_END flag to end
the stream. */
}
int BASS_STREAMPROC_END = 0x80000000; // end of user stream flag
// Special STREAMPROCs
int STREAMPROC_DUMMY = 0; // "dummy" stream
int STREAMPROC_PUSH = -1; // push stream
int STREAMPROC_DEVICE = -2; // device mix stream
int STREAMPROC_DEVICE_3D = -3; // device 3D mix stream
// BASS_StreamCreateFileUser file systems
int STREAMFILE_NOBUFFER = 0;
int STREAMFILE_BUFFER = 1;
int STREAMFILE_BUFFERPUSH = 2;
interface BASS_FILEPROCS extends Callback
{
// User file stream callback functions
void FILECLOSEPROC(Pointer user);
long FILELENPROC(Pointer user) ;
int FILEREADPROC(Pointer buffer, int length, Pointer user);
boolean FILESEEKPROC(long offset, Pointer user);
}
// BASS_StreamPutFileData options
int BASS_FILEDATA_END = 0; // end & close the file
// BASS_StreamGetFilePosition modes
int BASS_FILEPOS_CURRENT = 0;
int BASS_FILEPOS_DECODE = BASS_FILEPOS_CURRENT;
int BASS_FILEPOS_DOWNLOAD = 1;
int BASS_FILEPOS_END = 2;
int BASS_FILEPOS_START = 3;
int BASS_FILEPOS_CONNECTED = 4;
int BASS_FILEPOS_BUFFER = 5;
int BASS_FILEPOS_SOCKET = 6;
int BASS_FILEPOS_ASYNCBUF = 7;
int BASS_FILEPOS_SIZE = 8;
int BASS_FILEPOS_BUFFERING = 9;
int BASS_FILEPOS_AVAILABLE = 10;
interface DOWNLOADPROC extends Callback
{
void DOWNLOADPROC(Pointer buffer, int length, Pointer user);
/* Internet stream download callback function.
buffer : Buffer containing the downloaded data... NULL=end of download
length : Number of bytes in the buffer
user : The 'user' parameter value given when calling BASS_StreamCreateURL */
}
// BASS_ChannelSetSync types
int BASS_SYNC_POS = 0;
int BASS_SYNC_END = 2;
int BASS_SYNC_META = 4;
int BASS_SYNC_SLIDE = 5;
int BASS_SYNC_STALL = 6;
int BASS_SYNC_DOWNLOAD = 7;
int BASS_SYNC_FREE = 8;
int BASS_SYNC_SETPOS = 11;
int BASS_SYNC_MUSICPOS = 10;
int BASS_SYNC_MUSICINST = 1;
int BASS_SYNC_MUSICFX = 3;
int BASS_SYNC_OGG_CHANGE = 12;
int BASS_SYNC_DEV_FAIL = 14;
int BASS_SYNC_DEV_FORMAT = 15;
int BASS_SYNC_THREAD = 0x20000000; // flag: call sync in other thread
int BASS_SYNC_MIXTIME = 0x40000000; // flag: sync at mixtime, else at playtime
int BASS_SYNC_ONETIME = 0x80000000; // flag: sync only once, else continuously
interface SYNCPROC extends Callback
{
void SYNCPROC(int handle, int channel, int data, Pointer user);
/* Sync callback function.
handle : The sync that has occured
channel: Channel that the sync occured in
data : Additional data associated with the sync's occurance
user : The 'user' parameter given when calling BASS_ChannelSetSync */
}
interface DSPPROC extends Callback
{
void DSPPROC(int handle, int channel, Pointer buffer, int length, Pointer user);
/* DSP callback function.
handle : The DSP handle
channel: Channel that the DSP is being applied to
buffer : Buffer to apply the DSP to
length : Number of bytes in the buffer
user : The 'user' parameter given when calling BASS_ChannelSetDSP */
}
interface RECORDPROC extends Callback
{
boolean RECORDPROC(int handle, Pointer buffer, int length, Pointer user);
/* Recording callback function.
handle : The recording handle
buffer : Buffer containing the recorded sample data
length : Number of bytes
user : The 'user' parameter value given when calling BASS_RecordStart
RETURN : true = continue recording, false = stop */
}
// BASS_ChannelIsActive return values
int BASS_ACTIVE_STOPPED = 0;
int BASS_ACTIVE_PLAYING =1;
int BASS_ACTIVE_STALLED = 2;
int BASS_ACTIVE_PAUSED = 3;
int BASS_ACTIVE_PAUSED_DEVICE = 4;
// Channel attributes
int BASS_ATTRIB_FREQ = 1;
int BASS_ATTRIB_VOL = 2;
int BASS_ATTRIB_PAN = 3;
int BASS_ATTRIB_EAXMIX = 4;
int BASS_ATTRIB_NOBUFFER = 5;
int BASS_ATTRIB_VBR = 6;
int BASS_ATTRIB_CPU = 7;
int BASS_ATTRIB_SRC = 8;
int BASS_ATTRIB_NET_RESUME = 9;
int BASS_ATTRIB_SCANINFO = 10;
int BASS_ATTRIB_NORAMP = 11;
int BASS_ATTRIB_BITRATE = 12;
int BASS_ATTRIB_BUFFER = 13;
int BASS_ATTRIB_GRANULE = 14;
int BASS_ATTRIB_USER = 15;
int BASS_ATTRIB_TAIL = 16;
int BASS_ATTRIB_PUSH_LIMIT = 17;
int BASS_ATTRIB_DOWNLOADPROC = 18;
int BASS_ATTRIB_VOLDSP = 19;
int BASS_ATTRIB_VOLDSP_PRIORITY = 20;
int BASS_ATTRIB_MUSIC_AMPLIFY = 0x100;
int BASS_ATTRIB_MUSIC_PANSEP = 0x101;
int BASS_ATTRIB_MUSIC_PSCALER = 0x102;
int BASS_ATTRIB_MUSIC_BPM = 0x103;
int BASS_ATTRIB_MUSIC_SPEED = 0x104;
int BASS_ATTRIB_MUSIC_VOL_GLOBAL = 0x105;
int BASS_ATTRIB_MUSIC_VOL_CHAN = 0x200; // + channel #
int BASS_ATTRIB_MUSIC_VOL_INST = 0x300; // + instrument #
// BASS_ChannelSlideAttribute flags
int BASS_SLIDE_LOG = 0x1000000;
// BASS_ChannelGetData flags
int BASS_DATA_AVAILABLE = 0; // query how much data is buffered
int BASS_DATA_NOREMOVE = 0x10000000; // flag: don't remove data from recording buffer
int BASS_DATA_FIXED = 0x20000000; // unused
int BASS_DATA_FLOAT = 0x40000000; // flag: return floating-point sample data
int BASS_DATA_FFT256 = 0x80000000; // 256 sample FFT
int BASS_DATA_FFT512 = 0x80000001; // 512 FFT
int BASS_DATA_FFT1024 = 0x80000002; // 1024 FFT
int BASS_DATA_FFT2048 = 0x80000003; // 2048 FFT
int BASS_DATA_FFT4096 = 0x80000004; // 4096 FFT
int BASS_DATA_FFT8192 = 0x80000005; // 8192 FFT
int BASS_DATA_FFT16384 = 0x80000006; // 16384 FFT
int BASS_DATA_FFT32768 = 0x80000007; // 32768 FFT
int BASS_DATA_FFT_INDIVIDUAL = 0x10; // FFT flag: FFT for each channel, else all combined
int BASS_DATA_FFT_NOWINDOW = 0x20; // FFT flag: no Hanning window
int BASS_DATA_FFT_REMOVEDC = 0x40; // FFT flag: pre-remove DC bias
int BASS_DATA_FFT_COMPLEX = 0x80; // FFT flag: return complex data
int BASS_DATA_FFT_NYQUIST = 0x100; // FFT flag: return extra Nyquist value
// BASS_ChannelGetLevelEx flags
int BASS_LEVEL_MONO = 1; // get mono level
int BASS_LEVEL_STEREO = 2; // get stereo level
int BASS_LEVEL_RMS = 4; // get RMS levels
int BASS_LEVEL_VOLPAN = 8; // apply VOL/PAN attributes to the levels
int BASS_LEVEL_NOREMOVE = 16; // don't remove data from recording buffer
// BASS_ChannelGetTags types : what's returned
int BASS_TAG_ID3 = 0; // ID3v1 tags : TAG_ID3
int BASS_TAG_ID3V2 = 1; // ID3v2 tags : ByteBuffer
int BASS_TAG_OGG = 2; // OGG comments : String array
int BASS_TAG_HTTP = 3; // HTTP headers : String array
int BASS_TAG_ICY = 4; // ICY headers : String array
int BASS_TAG_META = 5; // ICY metadata : String
int BASS_TAG_APE = 6; // APE tags : String array
int BASS_TAG_MP4 = 7; // MP4/iTunes metadata : String array
int BASS_TAG_VENDOR = 9; // OGG encoder : String
int BASS_TAG_LYRICS3 = 10; // Lyric3v2 tag : String
int BASS_TAG_WAVEFORMAT = 14; // WAVE format : ByteBuffer containing WAVEFORMATEEX structure
int BASS_TAG_AM_NAME = 16; // Android Media codec name : String
int BASS_TAG_ID3V2_2 = 17; // ID3v2 tags (2nd block) : ByteBuffer
int BASS_TAG_AM_MIME = 18; // Android Media MIME type : String
int BASS_TAG_LOCATION = 19; // redirected URL : String
int BASS_TAG_RIFF_INFO = 0x100; // RIFF "INFO" tags : String array
int BASS_TAG_RIFF_BEXT = 0x101; // RIFF/BWF "bext" tags : TAG_BEXT
int BASS_TAG_RIFF_CART = 0x102; // RIFF/BWF "cart" tags : TAG_CART
int BASS_TAG_RIFF_DISP = 0x103; // RIFF "DISP" text tag : String
int BASS_TAG_RIFF_CUE = 0x104; // RIFF "cue " chunk : TAG_CUE structure
int BASS_TAG_RIFF_SMPL = 0x105; // RIFF "smpl" chunk : TAG_SMPL structure
int BASS_TAG_APE_BINARY = 0x1000; // + index #, binary APE tag : TAG_APE_BINARY
int BASS_TAG_MUSIC_NAME = 0x10000; // MOD music name : String
int BASS_TAG_MUSIC_MESSAGE = 0x10001; // MOD message : String
int BASS_TAG_MUSIC_ORDERS = 0x10002; // MOD order list : ByteBuffer
int BASS_TAG_MUSIC_AUTH = 0x10003; // MOD author : UTF-8 string
int BASS_TAG_MUSIC_INST = 0x10100; // + instrument #, MOD instrument name : String
int BASS_TAG_MUSIC_CHAN = 0x10200; // + channel #, MOD channel name : String
int BASS_TAG_MUSIC_SAMPLE = 0x10300; // + sample #, MOD sample name : String
int BASS_TAG_BYTEBUFFER = 0x10000000; // flag: return a ByteBuffer instead of a String or TAG_ID3
// ID3v1 tag structure
@Structure.FieldOrder({"id", "title", "artist", "album", "year", "comment", "genre", "track"})
class TAG_ID3 extends Structure {
String id;
String title;
String artist;
String album;
String year;
String comment;
byte genre;
byte track;
}
// Binary APE tag structure
@Structure.FieldOrder({"key", "data", "length"})
class TAG_APE_BINARY extends Structure {
String key;
Pointer data;
int length;
}
// BASS_ChannelGetLength/GetPosition/SetPosition modes
int BASS_POS_BYTE = 0; // byte position
int BASS_POS_MUSIC_ORDER = 1; // order.row position, MAKELONG(order,row)
int BASS_POS_OGG = 3; // OGG bitstream number
int BASS_POS_END = 0x10; // trimmed end position
int BASS_POS_LOOP = 0x11; // loop start positiom
int BASS_POS_FLUSH = 0x1000000; // flag: flush decoder/FX buffers
int BASS_POS_RESET = 0x2000000; // flag: reset user file buffers
int BASS_POS_RELATIVE = 0x4000000; // flag: seek relative to the current position
int BASS_POS_INEXACT = 0x8000000; // flag: allow seeking to inexact position
int BASS_POS_DECODE = 0x10000000; // flag: get the decoding (not playing) position
int BASS_POS_DECODETO = 0x20000000; // flag: decode to the position instead of seeking
int BASS_POS_SCAN = 0x40000000; // flag: scan to the position
// BASS_ChannelSetDevice/GetDevice option
int BASS_NODEVICE = 0x20000;
// DX8 effect types, use with BASS_ChannelSetFX
int BASS_FX_DX8_CHORUS = 0;
int BASS_FX_DX8_COMPRESSOR = 1;
int BASS_FX_DX8_DISTORTION = 2;
int BASS_FX_DX8_ECHO = 3;
int BASS_FX_DX8_FLANGER = 4;
int BASS_FX_DX8_GARGLE = 5;
int BASS_FX_DX8_I3DL2REVERB = 6;
int BASS_FX_DX8_PARAMEQ = 7;
int BASS_FX_DX8_REVERB = 8;
int BASS_FX_VOLUME = 9;
@Structure.FieldOrder({"fWetDryMix", "fDepth", "fFeedback", "fFrequency", "lWaveform", "fDelay", "lPhase"})
class BASS_DX8_CHORUS extends Structure {
float fWetDryMix;
float fDepth;
float fFeedback;
float fFrequency;
int lWaveform; // 0=triangle, 1=sine
float fDelay;
int lPhase; // BASS_DX8_PHASE_xxx
}
class BASS_DX8_DISTORTION extends Structure {
float fGain;
float fEdge;
float fPostEQCenterFrequency;
float fPostEQBandwidth;
float fPreLowpassCutoff;
}
class BASS_DX8_ECHO extends Structure {
float fWetDryMix;
float fFeedback;
float fLeftDelay;
float fRightDelay;
boolean lPanDelay;
}
class BASS_DX8_FLANGER extends Structure {
float fWetDryMix;
float fDepth;
float fFeedback;
float fFrequency;
int lWaveform; // 0=triangle, 1=sine
float fDelay;
int lPhase; // BASS_DX8_PHASE_xxx
}
class BASS_DX8_PARAMEQ extends Structure {
float fCenter;
float fBandwidth;
float fGain;
}
class BASS_DX8_REVERB extends Structure {
float fInGain;
float fReverbMix;
float fReverbTime;
float fHighFreqRTRatio;
}
int BASS_DX8_PHASE_NEG_180 = 0;
int BASS_DX8_PHASE_NEG_90 = 1;
int BASS_DX8_PHASE_ZERO = 2;
int BASS_DX8_PHASE_90 = 3;
int BASS_DX8_PHASE_180 = 4;
class BASS_FX_VOLUME_PARAM extends Structure {
float fTarget;
float fCurrent;
float fTime;
int lCurve;
}
class FloatValue {
public float value;
}
boolean BASS_SetConfig(int option, int value);
int BASS_GetConfig(int option);
boolean BASS_SetConfigPtr(int option, Pointer value);
Object BASS_GetConfigPtr(int option);
int BASS_GetVersion();
int BASS_ErrorGetCode();
boolean BASS_GetDeviceInfo(int device, BASS_DEVICEINFO info);
boolean BASS_Init(int device, int freq, int flags);
boolean BASS_Free();
boolean BASS_SetDevice(int device);
int BASS_GetDevice();
boolean BASS_GetInfo(BASS_INFO info);
boolean BASS_Start();
boolean BASS_Stop();
boolean BASS_Pause();
int BASS_IsStarted();
boolean BASS_Update(int length);
float BASS_GetCPU();
boolean BASS_SetVolume(float volume);
float BASS_GetVolume();
boolean BASS_Set3DFactors(float distf, float rollf, float doppf);
boolean BASS_Get3DFactors(FloatValue distf, FloatValue rollf, FloatValue doppf);
boolean BASS_Set3DPosition(BASS_3DVECTOR pos, BASS_3DVECTOR vel, BASS_3DVECTOR front, BASS_3DVECTOR top);
boolean BASS_Get3DPosition(BASS_3DVECTOR pos, BASS_3DVECTOR vel, BASS_3DVECTOR front, BASS_3DVECTOR top);
void BASS_Apply3D();
int BASS_PluginLoad(String file, int flags);
boolean BASS_PluginFree(int handle);
boolean BASS_PluginEnable(int handle, boolean enable);
BASS_PLUGININFO BASS_PluginGetInfo(int handle);
int BASS_SampleLoad(String file, long offset, int length, int max, int flags);
int BASS_SampleLoad(Pointer file, long offset, int length, int max, int flags);
int BASS_SampleCreate(int length, int freq, int chans, int max, int flags);
boolean BASS_SampleFree(int handle);
boolean BASS_SampleSetData(int handle, Pointer buffer);
boolean BASS_SampleGetData(int handle, Pointer buffer);
boolean BASS_SampleGetInfo(int handle, BASS_SAMPLE info);
boolean BASS_SampleSetInfo(int handle, BASS_SAMPLE info);
int BASS_SampleGetChannel(int handle, boolean onlynew);
int BASS_SampleGetChannels(int handle, int[] channels);
boolean BASS_SampleStop(int handle);
int BASS_StreamCreate(int freq, int chans, int flags, STREAMPROC proc, Pointer user);
int BASS_StreamCreateFile(boolean mem, String file, long offset, long length, int flags);
int BASS_StreamCreateFile(Pointer file, long offset, long length, int flags);
int BASS_StreamCreateURL(String url, int offset, int flags, DOWNLOADPROC proc, Pointer user);
int BASS_StreamCreateFileUser(int system, int flags, BASS_FILEPROCS procs, Pointer user);
boolean BASS_StreamFree(int handle);
long BASS_StreamGetFilePosition(int handle, int mode);
int BASS_StreamPutData(int handle, Pointer buffer, int length);
int BASS_StreamPutFileData(int handle, Pointer buffer, int length);
int BASS_MusicLoad(String file, long offset, int length, int flags, int freq);
int BASS_MusicLoad(Pointer file, long offset, int length, int flags, int freq);
boolean BASS_MusicFree(int handle);
boolean BASS_RecordGetDeviceInfo(int device, BASS_DEVICEINFO info);
boolean BASS_RecordInit(int device);
boolean BASS_RecordFree();
boolean BASS_RecordSetDevice(int device);
int BASS_RecordGetDevice();
boolean BASS_RecordGetInfo(BASS_RECORDINFO info);
String BASS_RecordGetInputName(int input);
boolean BASS_RecordSetInput(int input, int flags, float volume);
int BASS_RecordGetInput(int input, FloatValue volume);
int BASS_RecordStart(int freq, int chans, int flags, RECORDPROC proc, Pointer user);
double BASS_ChannelBytes2Seconds(int handle, long pos);
long BASS_ChannelSeconds2Bytes(int handle, double pos);
int BASS_ChannelGetDevice(int handle);
boolean BASS_ChannelSetDevice(int handle, int device);
int BASS_ChannelIsActive(int handle);
boolean BASS_ChannelGetInfo(int handle, BASS_CHANNELINFO info);
Object BASS_ChannelGetTags(int handle, int tags);
long BASS_ChannelFlags(int handle, int flags, int mask);
boolean BASS_ChannelLock(int handle, boolean lock);
boolean BASS_ChannelFree(int handle);
boolean BASS_ChannelPlay(int handle, boolean restart);
boolean BASS_ChannelStart(int handle);
boolean BASS_ChannelStop(int handle);
boolean BASS_ChannelPause(int handle);
boolean BASS_ChannelUpdate(int handle, int length);
boolean BASS_ChannelSetAttribute(int handle, int attrib, float value);
boolean BASS_ChannelGetAttribute(int handle, int attrib, FloatValue value);
boolean BASS_ChannelSlideAttribute(int handle, int attrib, float value, int time);
boolean BASS_ChannelIsSliding(int handle, int attrib);
boolean BASS_ChannelSetAttributeEx(int handle, int attrib, Pointer value, int size);
boolean BASS_ChannelSetAttributeDOWNLOADPROC(int handle, DOWNLOADPROC proc, Pointer user);
int BASS_ChannelGetAttributeEx(int handle, int attrib, Pointer value, int size);
boolean BASS_ChannelSet3DAttributes(int handle, int mode, float min, float max, int iangle, int oangle, float outvol);
boolean BASS_ChannelGet3DAttributes(int handle, Integer mode, FloatValue min, FloatValue max, Integer iangle, Integer oangle, FloatValue outvol);
boolean BASS_ChannelSet3DPosition(int handle, BASS_3DVECTOR pos, BASS_3DVECTOR orient, BASS_3DVECTOR vel);
boolean BASS_ChannelGet3DPosition(int handle, BASS_3DVECTOR pos, BASS_3DVECTOR orient, BASS_3DVECTOR vel);
long BASS_ChannelGetLength(int handle, int mode);
boolean BASS_ChannelSetPosition(int handle, long pos, int mode);
long BASS_ChannelGetPosition(int handle, int mode);
int BASS_ChannelGetLevel(int handle);
boolean BASS_ChannelGetLevelEx(int handle, float[] levels, float length, int flags);
int BASS_ChannelGetData(int handle, Pointer buffer, int length);
int BASS_ChannelSetSync(int handle, int type, long param, SYNCPROC proc, Pointer user);
boolean BASS_ChannelRemoveSync(int handle, int sync);
boolean BASS_ChannelSetLink(int handle, int chan);
boolean BASS_ChannelRemoveLink(int handle, int chan);
int BASS_ChannelSetDSP(int handle, DSPPROC proc, Pointer user, int priority);
boolean BASS_ChannelRemoveDSP(int handle, int dsp);
int BASS_ChannelSetFX(int handle, int type, int priority);
boolean BASS_ChannelRemoveFX(int handle, int fx);
boolean BASS_FXSetParameters(int handle, Object params);
boolean BASS_FXGetParameters(int handle, Object params);
boolean BASS_FXSetPriority(int handle, int priority);
boolean BASS_FXReset(int handle);
// gak bisa
int BASS_StreamCreate(int freq, int chans, int flags, int proc, Pointer user);
}

View File

@@ -0,0 +1,7 @@
package BASS;
public interface PlaybackStatus {
void onPlaybackStarted(String filename);
void onPlaybackFinished(String filename);
void onPlaybackFailure(String filename);
}

View File

@@ -0,0 +1,27 @@
package Camera;
import lombok.Getter;
@Getter
public enum AcerQHD {
Mode1(3.7, 2560, 1440, "16:9"),
Mode2(2.1, 1920, 1080, "16:9"),
Mode3(0.9, 1280, 720, "16:9"),
Mode4(0.2, 640, 360, "16:9"),
ModeBest(3.7, 2560, 1440, "16:9"),
ModeLive(0.3, 640, 480, "4:3");
private final double Megapixel;
private final int width;
private final int height;
private final String aspectRatio;
AcerQHD(double Megapixel, int width, int height, String aspectRatio) {
this.Megapixel = Megapixel;
this.width = width;
this.height = height;
this.aspectRatio = aspectRatio;
}
}

View File

@@ -0,0 +1,31 @@
package Camera;
import lombok.Getter;
@Getter
public enum ArducamIMX477 {
Mode1(8.3, 3840, 2160, "16:9"),
Mode2(3.7, 2560, 1440, "16:9"),
Mode3(2.1, 1920, 1080, "16:9"),
Mode4(0.9, 1280, 720, "16:9"),
Mode5(12.3, 4032, 3040, "4:3"),
Mode6(5.0, 2592, 1944, "4:3"),
Mode7(1.9, 1600, 1200, "4:3"),
Mode8(1.2, 1280, 960, "4:3"),
ModeBest(12.3, 4032, 3040, "4:3"),
ModeLive(0.3, 640, 480, "4:3");
private final double Megapixel;
private final int width;
private final int height;
private final String aspectRatio;
ArducamIMX477(double Megapixel, int width, int height, String aspectRatio) {
this.Megapixel = Megapixel;
this.width = width;
this.height = height;
this.aspectRatio = aspectRatio;
}
}

View File

@@ -0,0 +1,36 @@
package Camera;
@SuppressWarnings("unused")
public class ArducamIMX477Preset {
public static final int V4L2_ID_Brightness = 0x00980900;
public static final CameraProperty Brightness = new CameraProperty(-64, 64, 0);
public static final int V4L2_ID_Contrast = 0x00980901;
public static final CameraProperty Contrast = new CameraProperty(0, 64, 32);
public static final int V4L2_ID_Hue = 0x00980903;
public static final CameraProperty Hue = new CameraProperty(-40, 40, 0);
public static final int V4L2_ID_Saturation = 0x00980902;
public static final CameraProperty Saturation = new CameraProperty(0, 128, 64);
public static final int V4L2_ID_Gamma = 0x00980910;
public static final CameraProperty Gamma = new CameraProperty(72, 500, 100);
public static final int V4L2_ID_Gain = 0x00980913;
public static final CameraProperty Gain = new CameraProperty(0, 100, 0);
public static final int V4L2_ID_AutoWhiteBalance = 0x0098090c;
public static final OnOffProperty AutoWhiteBalance = new OnOffProperty(1, 0, 1);
public static final int V4L2_ID_PowerLineFrequency = 0x00980918;
public static final CameraProperty PowerLineFrequency = new CameraProperty(0, 2, 1); // 0 = disable, 1 = 50Hz, 2 = 60Hz
public static final int V4L2_ID_WhiteBalance = 0x0098091a;
public static final CameraProperty WhiteBalance = new CameraProperty(2800, 6500, 4600);
public static final int V4L2_ID_Sharpness = 0x0098091b;
public static final CameraProperty Sharpness = new CameraProperty(0, 6, 3);
public static final int V4L2_ID_BacklightCompensation = 0x0098091c;
public static final CameraProperty BacklightCompensation = new CameraProperty(0, 2, 1);
public static final int V4L2_ID_AutoExposure = 0x009a0901;
public static final OnOffProperty AutoExposure = new OnOffProperty(3, 1, 3); // 1 = Manual 3 = Aperture Priority
public static final int V4L2_ID_ExposureTime = 0x009a0902;
public static final CameraProperty ExposureTime = new CameraProperty(1, 5000, 157);
public static final int V4L2_ID_ExposureDynamicFramerate = 0x009a0903;
public static final OnOffProperty ExposureDynamicFramerate = new OnOffProperty(1, 0, 0);
public static final int V4L2_ID_AutoFocus = 0x009a090c;
public static final OnOffProperty AutoFocus = new OnOffProperty(1, 0, 1);
}

View File

@@ -0,0 +1,35 @@
package Camera;
import lombok.Getter;
@Getter
public enum AverVisionM15W {
Mode1(8.3, 3840, 2160, "16:9"),
Mode2(2.1, 1920, 1080, "16:9"),
Mode3(0.9, 1280, 720, "16:9"),
Mode4(0.2, 640, 360, "16:9"),
Mode5(4.9, 2560, 1920, "4:3"),
Mode6(3.1, 2048, 1536, "4:3"),
Mode7(1.9, 1600, 1200, "4:3"),
Mode8(1.2, 1280, 960, "4:3"),
Mode9(0.8, 1024, 768, "4:3"),
ModeLive(0.3, 640, 480, "4:3"),
ModeBest(8.3, 3840, 2160, "16:9"),
Mode11(0.08, 320, 240, "4:3"),
Mode12(1.0, 1280, 800, "16:10"),
Mode13(1.3, 1280, 1024, "5:4");
private final double Megapixel;
private final int width;
private final int height;
private final String aspectRatio;
AverVisionM15W(double Megapixel, int width, int height, String aspectRatio) {
this.Megapixel = Megapixel;
this.width = width;
this.height = height;
this.aspectRatio = aspectRatio;
}
}

View File

@@ -0,0 +1,12 @@
package Camera;
public enum CameraControlFlagWin32 {
CameraControl_Flags_Auto(1),
CameraControl_Flags_Manual(2),
CameraControl_Flags_Absolute(0),
CameraControl_Flags_Relative(0x10);
public final int value;
CameraControlFlagWin32(int value) {
this.value = value;
}
}

View File

@@ -0,0 +1,24 @@
package Camera;
public enum CameraControlPropertyWin32 {
// values range from -180 to 180 degree
CameraControl_Pan(0),
// values range from -180 to 180 degrees
CameraControl_Tilt(1),
// values range from -180 to 180 degrees
CameraControl_Roll(2),
// values range from 10 to 600 mm
CameraControl_Zoom(3),
// if negative value, exposure is 1/2^n seconds, if positive value, exposure is 2^n seconds
// example -3 is 1/8 seconds, 2 is 4 seconds
CameraControl_Exposure(4),
// value fstop * 10
CameraControl_Iris(5),
// in milimeters
CameraControl_Focus(6);
public final int value;
CameraControlPropertyWin32(int value) {
this.value = value;
}
}

View File

@@ -0,0 +1,12 @@
package Camera;
public class CameraProperty {
public final double Min;
public final double Max;
public final double Default;
public CameraProperty(double min, double max, double def){
Min = min;
Max = max;
Default = def;
}
}

View File

@@ -0,0 +1,7 @@
package Camera;
public interface LiveCamEvent {
void onDetectedQRCode(String qrCode);
void onFaceDetector(boolean hasface, int width, int height);
void onLog(String log);
}

View File

@@ -0,0 +1,26 @@
package Camera;
import lombok.Getter;
@Getter
public enum ObsbotMeet2 {
Mode1(8.3, 3840, 2160, "16:9"),
Mode2(2.1, 1920, 1080, "16:9"),
Mode3(0.9, 1280, 720, "16:9"),
Mode4(0.2, 640, 360, "16:9"),
Mode5(0.3, 640, 480, "4:3"),
ModeBest(8.3, 3840, 2160, "16:9"),
ModeLive(0.3, 640, 480, "4:3");
private final double Megapixel;
private final int width;
private final int height;
private final String aspectRatio;
ObsbotMeet2(double Megapixel, int width, int height, String aspectRatio) {
this.Megapixel = Megapixel;
this.width = width;
this.height = height;
this.aspectRatio = aspectRatio;
}
}

View File

@@ -0,0 +1,38 @@
package Camera;
@SuppressWarnings("unused")
public class ObsbotMeet2Preset {
public static final int V4L2_ID_Brightness = 0x00980900;
public static final CameraProperty Brightness = new CameraProperty(0,100,50);
public static final int V4L2_ID_Contrast = 0x00980901;
public static final CameraProperty Contrast = new CameraProperty(0,100,50);
public static final int V4L2_ID_Saturation = 0x00980902;
public static final CameraProperty Saturation = new CameraProperty(0,100,50);
public static final int V4L2_ID_Hue = 0x00980903;
public static final CameraProperty Hue = new CameraProperty(0,100,50);
public static final int V4L2_ID_AutoWhiteBalance = 0x0098090c;
public static final OnOffProperty AutoWhiteBalance = new OnOffProperty(1,0,1);
public static final int V4L2_ID_RedBalance = 0x0098090e;
public static final CameraProperty RedBalance = new CameraProperty(0,2048,0);
public static final int V4L2_ID_BlueBalance = 0x0098090f;
public static final CameraProperty BlueBalance = new CameraProperty(0,2048,1024);
public static final int V4L2_ID_Gain = 0x00980913;
public static final CameraProperty Gain = new CameraProperty(1,64,1);
public static final int V4L2_ID_PowerLineFrequency = 0x00980918;
public static final CameraProperty PowerLineFrequency = new CameraProperty(0,2,0); // masak default nya 3 di V4L2 ??
public static final int V4L2_ID_WhiteBalanceTemperature = 0x0098091a;
public static final CameraProperty WhiteBalanceTemperature = new CameraProperty(2000,10000,4600);
public static final int V4L2_ID_Sharpness = 0x0098091b;
public static final CameraProperty Sharpness = new CameraProperty(0,100,50);
public static final int V4L2_ID_BacklightCompensation = 0x0098091c;
public static final CameraProperty BacklightCompensation = new CameraProperty(0,18,9);
public static final int V4L2_ID_AutoExposure = 0x009a0901;
public static final OnOffProperty AutoExposure = new OnOffProperty(0,3,0); // 0 = Auto
public static final int V4L2_ID_ExposureTime = 0x009a0902;
public static final CameraProperty ExposureTime = new CameraProperty(1,2500,330);
public static final int V4L2_ID_Focus = 0x009a090a;
public static final CameraProperty Focus = new CameraProperty(0,100,0);
public static final int V4L2_ID_AutoFocus = 0x009a090c;
public static final OnOffProperty AutoFocus = new OnOffProperty(1,0,1);
}

View File

@@ -0,0 +1,12 @@
package Camera;
public class OnOffProperty {
public final double On;
public final double Off;
public final double Default;
public OnOffProperty(double on, double off, double def) {
On = on;
Off = off;
Default = def;
}
}

View File

@@ -0,0 +1,18 @@
package Config;
import lombok.Data;
@Data
public class CameraConfig {
public double Brightness = 0;
public double Contrast = 0;
public double Saturation = 0;
public double Hue = 0;
public double Gain = 0;
public double Exposure = 0;
public double Sharpness = 0;
public double Gamma = 0;
public boolean AutoExposure = true;
public boolean AutoFocus = true;
public boolean AutoWhiteBalance = true;
}

View File

@@ -0,0 +1,9 @@
package Config;
public enum CameraConfigEnum {
CameraConfigLeft90,
CameraConfigLeft45,
CameraConfigCenter,
CameraConfigRight45,
CameraConfigRight90
}

View File

@@ -0,0 +1,650 @@
package Config;
import lombok.Getter;
import lombok.val;
import org.tinylog.Logger;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.nio.file.Path;
import java.util.Properties;
import static Config.SomeCodes.*;
@SuppressWarnings("unused")
@Getter
public class ConfigFile {
private String AudioPhase1;
private String AudioPhase2;
private String AudioPhase3;
private String AudioPhase4;
private String AudioPhase5;
private String CameraLeft90;
private String CameraLeft45;
private String CameraCenter;
private String CameraRight45;
private String CameraRight90;
private CameraConfig ConfigLeft90 = new CameraConfig();
private CameraConfig ConfigLeft45 = new CameraConfig();
private CameraConfig ConfigCenter = new CameraConfig();
private CameraConfig ConfigRight45 =new CameraConfig();
private CameraConfig ConfigRight90 = new CameraConfig();
private String FTPHost;
private String FTPPort;
private String FTPUser;
private String FTPPass;
private String FTPPath;
private String PhotoDirectory;
private boolean needsave = false;
public ConfigFile(){
String ss = String.format("Current working directory in Java : %s", currentDirectory);
Logger.info(ss);
Load();
}
public void SetPhotoDirectory(String path){
if (ValidString(path)){
if (!path.equals(PhotoDirectory)){
File ff = new File(path);
if (ff.isDirectory()){
Logger.info("Photo Directory changed from {} to {}", PhotoDirectory, path);
PhotoDirectory = path;
needsave = true;
}
}
}
}
public void SetAudioPhase1(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase1)){
Logger.info("Audio Phase 1 changed from {} to {}", AudioPhase1, path);
AudioPhase1 = path;
needsave = true;
}
}
}
public void SetAudioPhase2(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase2)){
Logger.info("Audio Phase 2 changed from {} to {}", AudioPhase2, path);
AudioPhase2 = path;
needsave = true;
}
}
}
public void SetAudioPhase3(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase3)){
Logger.info("Audio Phase 3 changed from {} to {}", AudioPhase3, path);
AudioPhase3 = path;
needsave = true;
}
}
}
public void SetAudioPhase4(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase4)){
Logger.info("Audio Phase 4 changed from {} to {}", AudioPhase4, path);
AudioPhase4 = path;
needsave = true;
}
}
}
public void SetAudioPhase5(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase5)){
Logger.info("Audio Phase 5 changed from {} to {}", AudioPhase5, path);
AudioPhase5 = path;
needsave = true;
}
}
}
/**
* Set Camera Path for Left 90 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraLeft90(String path){
if (path==null) path="";
if (!path.equals(CameraLeft90)){
Logger.info("Camera Left 90 Degree changed from {} to {}", CameraLeft90, path);
CameraLeft90 = path;
needsave = true;
}
}
/**
* Set Camera Path for Left 45 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraLeft45(String path){
if (path==null) path="";
if (!path.equals(CameraLeft45)){
Logger.info("Camera Left 45 Degree changed from {} to {}", CameraLeft45, path);
CameraLeft45 = path;
needsave = true;
}
}
/**
* Set Camera Path for Center
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraCenter(String path){
if (path==null) path="";
if (!path.equals(CameraCenter)){
Logger.info("Camera Center changed from {} to {}", CameraCenter, path);
CameraCenter = path;
needsave = true;
}
}
/**
* Set Camera Path for Right 45 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraRight45(String path){
if (path==null) path="";
if (!path.equals(CameraRight45)){
Logger.info("Camera Right 45 Degree changed from {} to {}", CameraRight45, path);
CameraRight45 = path;
needsave = true;
}
}
/**
* Set Camera Path for Right 90 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraRight90(String path){
if (path==null) path="";
if (!path.equals(CameraRight90)){
Logger.info("Camera Right 90 Degree changed from {} to {}", CameraRight90, path);
CameraRight90 = path;
needsave = true;
}
}
public void SetFTPHost(String host){
if (ValidString(host)){
if (!host.equals(FTPHost)){
Logger.info("FTP Host changed from {} to {}", FTPHost, host);
FTPHost = host;
needsave = true;
}
}
}
public void SetFTPPort(String port){
if (ValidString(port)){
if (!port.equals(FTPPort)){
if (ValidPortNumber(toInt(port))){
Logger.info("FTP Port changed from {} to {}", FTPPort, port);
FTPPort = port;
needsave = true;
}
}
}
}
public void SetFTPUser(String user){
if (ValidString(user)){
if (!user.equals(FTPUser)){
Logger.info("FTP User changed from {} to {}", FTPUser, user);
FTPUser = user;
needsave = true;
}
}
}
public void SetFTPPass(String pass){
if (ValidString(pass)){
if (!pass.equals(FTPPass)){
Logger.info("FTP Password changed from {} to {}", FTPPass, pass);
FTPPass = pass;
needsave = true;
}
}
}
public void SetFTPPath(String path){
if (ValidString(path)){
if (!path.equals(FTPPath)){
Logger.info("FTP Path changed from {} to {}", FTPPath, path);
FTPPath = path;
needsave = true;
}
}
}
public void setBrightness(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Brightness != value){
conf.Brightness = value;
needsave = true;
}
}
public double getBrightness(CameraConfigEnum cc){
return switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90.Brightness;
case CameraConfigLeft45 -> ConfigLeft45.Brightness;
case CameraConfigCenter -> ConfigCenter.Brightness;
case CameraConfigRight45 -> ConfigRight45.Brightness;
case CameraConfigRight90 -> ConfigRight90.Brightness;
};
}
public void setContrast(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Contrast != value){
conf.Contrast = value;
needsave = true;
}
}
public double getContrast(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Contrast;
case CameraConfigLeft45 -> ConfigLeft45.Contrast;
case CameraConfigCenter -> ConfigCenter.Contrast;
case CameraConfigRight45 -> ConfigRight45.Contrast;
case CameraConfigRight90 -> ConfigRight90.Contrast;
};
}
public void setSaturation(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Saturation != value){
conf.Saturation = value;
needsave = true;
}
}
public double getSaturation(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Saturation;
case CameraConfigLeft45 -> ConfigLeft45.Saturation;
case CameraConfigCenter -> ConfigCenter.Saturation;
case CameraConfigRight45 -> ConfigRight45.Saturation;
case CameraConfigRight90 -> ConfigRight90.Saturation;
};
}
public void setHue(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Hue != value){
conf.Hue = value;
needsave = true;
}
}
public double getHue(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Hue;
case CameraConfigLeft45 -> ConfigLeft45.Hue;
case CameraConfigCenter -> ConfigCenter.Hue;
case CameraConfigRight45 -> ConfigRight45.Hue;
case CameraConfigRight90 -> ConfigRight90.Hue;
};
}
public void setGain(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Gain != value){
conf.Gain = value;
needsave = true;
}
}
public double getGain(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Gain;
case CameraConfigLeft45 -> ConfigLeft45.Gain;
case CameraConfigCenter -> ConfigCenter.Gain;
case CameraConfigRight45 -> ConfigRight45.Gain;
case CameraConfigRight90 -> ConfigRight90.Gain;
};
}
public void setExposure(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Exposure != value){
conf.Exposure = value;
needsave = true;
}
}
public double getExposure(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Exposure;
case CameraConfigLeft45 -> ConfigLeft45.Exposure;
case CameraConfigCenter -> ConfigCenter.Exposure;
case CameraConfigRight45 -> ConfigRight45.Exposure;
case CameraConfigRight90 -> ConfigRight90.Exposure;
};
}
public void setSharpness(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Sharpness != value){
conf.Sharpness = value;
needsave = true;
}
}
public double getSharpness(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Sharpness;
case CameraConfigLeft45 -> ConfigLeft45.Sharpness;
case CameraConfigCenter -> ConfigCenter.Sharpness;
case CameraConfigRight45 -> ConfigRight45.Sharpness;
case CameraConfigRight90 -> ConfigRight90.Sharpness;
};
}
public void setGamma(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Gamma != value){
conf.Gamma = value;
needsave = true;
}
}
public double getGamma(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Gamma;
case CameraConfigLeft45 -> ConfigLeft45.Gamma;
case CameraConfigCenter -> ConfigCenter.Gamma;
case CameraConfigRight45 -> ConfigRight45.Gamma;
case CameraConfigRight90 -> ConfigRight90.Gamma;
};
}
public void setAutoExposure(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoExposure != value){
conf.AutoExposure = value;
needsave = true;
}
}
public boolean getAutoExposure(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoExposure;
case CameraConfigLeft45 -> ConfigLeft45.AutoExposure;
case CameraConfigCenter -> ConfigCenter.AutoExposure;
case CameraConfigRight45 -> ConfigRight45.AutoExposure;
case CameraConfigRight90 -> ConfigRight90.AutoExposure;
};
}
public void setAutoFocus(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoFocus != value){
conf.AutoFocus = value;
needsave = true;
}
}
public boolean getAutoFocus(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoFocus;
case CameraConfigLeft45 -> ConfigLeft45.AutoFocus;
case CameraConfigCenter -> ConfigCenter.AutoFocus;
case CameraConfigRight45 -> ConfigRight45.AutoFocus;
case CameraConfigRight90 -> ConfigRight90.AutoFocus;
};
}
public void setAutoWhiteBalance(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoWhiteBalance != value){
conf.AutoWhiteBalance = value;
needsave = true;
}
}
public boolean getAutoWhiteBalance(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoWhiteBalance;
case CameraConfigLeft45 -> ConfigLeft45.AutoWhiteBalance;
case CameraConfigCenter -> ConfigCenter.AutoWhiteBalance;
case CameraConfigRight45 -> ConfigRight45.AutoWhiteBalance;
case CameraConfigRight90 -> ConfigRight90.AutoWhiteBalance;
};
}
private void Load(){
File ff = Path.of(currentDirectory, "config.properties").toFile();
if (ff.isFile()){
String ss = String.format("Load config file at %s", ff.getPath());
Logger.info(ss);
try{
Properties prop = new Properties();
FileInputStream FIS = new FileInputStream(ff.getPath());
prop.load(FIS);
boolean allcorrect = prop.getProperty("AudioPhase1") != null;
if (prop.getProperty("AudioPhase2") == null) allcorrect = false;
if (prop.getProperty("AudioPhase3") == null) allcorrect = false;
if (prop.getProperty("AudioPhase4") == null) allcorrect = false;
if (prop.getProperty("AudioPhase5") == null) allcorrect = false;
if (prop.getProperty("CameraLeft90") == null) allcorrect = false;
if (prop.getProperty("CameraLeft45") == null) allcorrect = false;
if (prop.getProperty("CameraCenter") == null) allcorrect = false;
if (prop.getProperty("CameraRight45") == null) allcorrect = false;
if (prop.getProperty("CameraRight90") == null) allcorrect = false;
if (prop.getProperty("FTPHost") == null) allcorrect = false;
if (prop.getProperty("FTPPort") == null) allcorrect = false;
if (prop.getProperty("FTPUser") == null) allcorrect = false;
if (prop.getProperty("FTPPass") == null) allcorrect = false;
if (prop.getProperty("FTPPath") == null) allcorrect = false;
if (prop.getProperty("PhotoDirectory") == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigLeft90.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigLeft45.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigCenter.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigRight45.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigRight90.toString()) == null) allcorrect = false;
if (allcorrect){
AudioPhase1 = prop.getProperty("AudioPhase1");
AudioPhase2 = prop.getProperty("AudioPhase2");
AudioPhase3 = prop.getProperty("AudioPhase3");
AudioPhase4 = prop.getProperty("AudioPhase4");
AudioPhase5 = prop.getProperty("AudioPhase5");
CameraLeft90 = prop.getProperty("CameraLeft90");
CameraLeft45 = prop.getProperty("CameraLeft45");
CameraCenter = prop.getProperty("CameraCenter");
CameraRight45 = prop.getProperty("CameraRight45");
CameraRight90 = prop.getProperty("CameraRight90");
FTPHost = prop.getProperty("FTPHost");
FTPPort = prop.getProperty("FTPPort");
FTPUser = prop.getProperty("FTPUser");
FTPPass = prop.getProperty("FTPPass");
FTPPath = prop.getProperty("FTPPath");
PhotoDirectory = prop.getProperty("PhotoDirectory");
ConfigLeft90 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigLeft90.toString()), CameraConfig.class);
ConfigLeft45 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigLeft45.toString()), CameraConfig.class);
ConfigCenter = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigCenter.toString()), CameraConfig.class);
ConfigRight45 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigRight45.toString()), CameraConfig.class);
ConfigRight90 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigRight90.toString()), CameraConfig.class);
Logger.info("Config Loaded");
return;
} else {
Logger.info("Config File Not Correct, Creating Default");
}
} catch (Exception e){
Logger.error("Error Load Config: " + e.getMessage()+", Creating Default");
}
} else {
Logger.info("Config File Not Found, Creating Default");
}
CreateDefault();
}
private void CreateDefault(){
AudioPhase1 = Path.of(currentDirectory, "audio", "phase1.mp3").toString();
AudioPhase2 = Path.of(currentDirectory, "audio", "phase2.mp3").toString();
AudioPhase3 = Path.of(currentDirectory, "audio", "phase3.mp3").toString();
AudioPhase4 = Path.of(currentDirectory, "audio", "phase4.mp3").toString();
AudioPhase5 = Path.of(currentDirectory, "audio", "phase5.mp3").toString();
CameraLeft90 = "";
CameraLeft45 = "";
CameraCenter = "";
CameraRight45 = "";
CameraRight90 = "";
FTPHost = "192.168.10.2";
FTPPort = "21";
FTPUser = "user";
FTPPass = "password";
FTPPath = "/";
PhotoDirectory = currentDirectory;
SetDefaultCameraConfig(ConfigLeft90);
SetDefaultCameraConfig(ConfigLeft45);
SetDefaultCameraConfig(ConfigCenter);
SetDefaultCameraConfig(ConfigRight45);
SetDefaultCameraConfig(ConfigRight90);
Logger.info("Default Config Created");
needsave = true;
Save();
}
private void SetDefaultCameraConfig(CameraConfig cc){
if (cc!=null){
cc.AutoExposure = true;
cc.AutoFocus = true;
cc.AutoWhiteBalance = true;
cc.Brightness = 0;
cc.Contrast = 0;
cc.Exposure = 0;
cc.Gain = 0;
cc.Saturation = 0;
cc.Hue = 0;
cc.Gamma = 0;
cc.Sharpness = 0;
}
}
public void Save(){
if (!needsave) return;
needsave = false;
Properties prop = new Properties();
prop.setProperty("AudioPhase1", AudioPhase1);
prop.setProperty("AudioPhase2", AudioPhase2);
prop.setProperty("AudioPhase3", AudioPhase3);
prop.setProperty("AudioPhase4", AudioPhase4);
prop.setProperty("AudioPhase5", AudioPhase5);
prop.setProperty("CameraLeft90", CameraLeft90);
prop.setProperty("CameraLeft45", CameraLeft45);
prop.setProperty("CameraCenter", CameraCenter);
prop.setProperty("CameraRight45", CameraRight45);
prop.setProperty("CameraRight90", CameraRight90);
prop.setProperty("FTPHost", FTPHost);
prop.setProperty("FTPPort", FTPPort);
prop.setProperty("FTPUser",FTPUser);
prop.setProperty("FTPPass", FTPPass);
prop.setProperty("FTPPath", FTPPath);
prop.setProperty("PhotoDirectory", PhotoDirectory);
prop.setProperty(CameraConfigEnum.CameraConfigLeft90.toString(), gson.toJson(ConfigLeft90));
prop.setProperty(CameraConfigEnum.CameraConfigLeft45.toString(), gson.toJson(ConfigLeft45));
prop.setProperty(CameraConfigEnum.CameraConfigCenter.toString(), gson.toJson(ConfigCenter));
prop.setProperty(CameraConfigEnum.CameraConfigRight45.toString(), gson.toJson(ConfigRight45));
prop.setProperty(CameraConfigEnum.CameraConfigRight90.toString(), gson.toJson(ConfigRight90));
try{
prop.store(new FileOutputStream(Path.of(currentDirectory, "config.properties").toString()), null);
Logger.info("Config Saved");
} catch (Exception e){
Logger.error("Error Save Config: " + e.getMessage());
}
}
}

View File

@@ -0,0 +1,304 @@
package Config;
import com.google.gson.Gson;
import com.google.zxing.MultiFormatReader;
import javafx.embed.swing.SwingFXUtils;
import javafx.scene.image.Image;
import lombok.val;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.opencv.global.opencv_imgcodecs;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.Size;
import org.bytedeco.opencv.opencv_objdetect.CascadeClassifier;
import org.tinylog.Logger;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.InputStream;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
@SuppressWarnings("unused")
public class SomeCodes {
public final static String currentDirectory = System.getProperty("user.dir");
private static final DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
public static final Java2DFrameConverter converter = new Java2DFrameConverter();
public static final OpenCVFrameConverter.ToMat matconverter = new OpenCVFrameConverter.ToMat();
public static final Gson gson = new Gson();
public static final ConfigFile config = new ConfigFile();
public static Path GetLogsPath(){
return Path.of(currentDirectory, "logs");
}
public static int[] FindIndexes(List<String> source, String value){
if (source!=null && !source.isEmpty()){
if (ValidString(value)){
List<Integer> result = new ArrayList<>();
for (int i=0; i<source.size(); i++){
if (source.get(i).equals(value)){
result.add(i);
}
}
if (!result.isEmpty()) return result.stream().mapToInt(i->i).toArray();
}
}
return new int[0];
}
public static int FindFirstIndex(List<String> source, String value, int... avoidedindex){
if (source!=null && !source.isEmpty()){
if (ValidString(value)){
for (int i=0; i<source.size(); i++){
if (source.get(i).equals(value)){
// ketemu, tapi cek dulu apakah masuk avoidedindex
if (avoidedindex!=null && avoidedindex.length>0){
boolean found = false;
for (int j : avoidedindex){
if (j!=-1){
if (i==j){
found = true;
break;
}
}
}
if (found) continue;
}
return i;
}
}
}
}
return -1;
}
public static Image ConvertToImage(Mat mat, int width, int height){
if (mat!=null){
Mat resized = new Mat();
opencv_imgproc.resize(mat, resized, new org.bytedeco.opencv.opencv_core.Size(width, height));
BufferedImage img = converter.convert(matconverter.convert(resized));
return SwingFXUtils.toFXImage(img, null);
}
return null;
}
/**
* Find thumbfile in thumbs directory
* @param sourcejpg source jpg file
* @return thumbfile if found, or null if not found
*/
public static String FindThumbfile(String sourcejpg){
File sourcefile = new File(sourcejpg);
Path thumbpath = Path.of(sourcefile.getParent(), "thumbs");
try{
if (!Files.exists(thumbpath)) Files.createDirectories(thumbpath);
} catch (Exception e){
Logger.error("Error creating thumbs directory: "+thumbpath+", Msg : "+e.getMessage());
}
Path thumbfile = thumbpath.resolve(sourcefile.getName());
if (Files.exists(thumbfile)){
return thumbfile.toString();
}
return null;
}
/**
* Make thumbfile from source jpg file
* @param sourcejpg source jpg file
* @param thumbsize thumbfile Size
* @return thumbfile if success, or null if failed
*/
public static String MakeThumbfile(String sourcejpg, Size thumbsize){
try{
File ff = new File(sourcejpg);
if (ff.exists()){
Path thumbpath = Path.of(ff.getParent(), "thumbs");
if (!Files.exists(thumbpath)) Files.createDirectories(thumbpath);
String thumbfile = thumbpath.resolve(ff.getName()).toString();
File thumb = new File(thumbfile);
if (thumb.exists()) return thumbfile;
Mat source = opencv_imgcodecs.imread(sourcejpg);
if (source!=null && !source.empty()){
Mat resized = new Mat();
opencv_imgproc.resize(source, resized, thumbsize);
opencv_imgcodecs.imwrite(thumbfile, resized);
Logger.info("Thumbfile created: "+thumbfile);
return thumbfile;
} else Logger.info("MakeThumbfile failed, Source File not valid image : "+sourcejpg);
} else Logger.info("MakeThumbfile failed, Source File not found: "+sourcejpg);
} catch (Exception e){
Logger.error("Error making thumbfile: "+sourcejpg+", Msg : "+e.getMessage());
}
return null;
}
public static String RemoveSpaces(String x){
return x.replaceAll("\\s+","");
}
public static String LocalDateTimeToString(LocalDateTime x){
return x.format(dtf);
}
public static String ExtractResource(String filename){
try{
File destination = new File(currentDirectory, filename);
if (destination.exists()){
return destination.getAbsolutePath();
}
InputStream is = SomeCodes.class.getResourceAsStream(filename);
if (is!=null){
Files.copy(is, destination.toPath());
Logger.info("Resource File extracted: "+filename);
return destination.getAbsolutePath();
}
} catch (Exception e){
Logger.error("Error extracting resource: "+filename+", Message : "+e.getMessage());
}
return null;
}
public static boolean ValidDirectory(String path){
if (ValidString(path)){
File ff = new File(path);
return ff.isDirectory();
}
return false;
}
public static boolean ValidPortNumber(int port){
return port>0 && port<65536;
}
public static int toInt(String x){
try {
return Integer.parseInt(x);
} catch (Exception e){
return 0;
}
}
public static boolean ValidIPV4(String ipaddress){
if (ValidString(ipaddress)){
try{
InetAddress inet = InetAddress.getByName(ipaddress);
if (inet instanceof Inet4Address){
if (inet.getHostAddress().equals(ipaddress)){
return true;
}
}
} catch (Exception ignored) {
}
}
return false;
}
public static boolean ValidIPV6(String ipaddress){
if (ValidString(ipaddress)){
try{
InetAddress inet = InetAddress.getByName(ipaddress);
if (inet instanceof Inet6Address){
if (inet.getHostAddress().equals(ipaddress)){
return true;
}
}
} catch (Exception ignored) {
}
}
return false;
}
public static String GetFileName(String filepath){
if (ValidString(filepath)){
File ff = new File(filepath);
if (ff.isFile()){
return ff.getName();
}
}
return "";
}
public static boolean ValidFile(String filename){
if (ValidString(filename)){
File ff = new File(filename);
return ff.isFile();
}
return false;
}
public static boolean ValidString(String x){
if (x!=null){
return !x.isEmpty();
}
return false;
}
public static void OpenPictureInDefaultViewer(String filename){
try{
File ff = new File(filename);
if (ff.exists()){
String os = System.getProperty("os.name").toLowerCase();
if (os.contains("win")){
//Runtime.getRuntime().exec("rundll32 url.dll,FileProtocolHandler "+ff.getAbsolutePath());
Runtime.getRuntime().exec(new String[]{"rundll32", "url.dll,FileProtocolHandler", ff.getAbsolutePath()});
} else if (os.contains("mac")){
//Runtime.getRuntime().exec("open "+ff.getAbsolutePath());
Runtime.getRuntime().exec(new String[]{"open", ff.getAbsolutePath()});
} else if (os.contains("nix") || os.contains("nux")){
//Runtime.getRuntime().exec("xdg-open "+ff.getAbsolutePath());
Runtime.getRuntime().exec(new String[]{"xdg-open", ff.getAbsolutePath()});
}
}
} catch (Exception e){
Logger.error("Error opening file: "+filename+", Msg : "+e.getMessage());
}
}
public static MultiFormatReader qrreader;
public static void LoadQRReader(){
if (qrreader==null) {
qrreader = new MultiFormatReader();
Logger.info("QRReader loaded");
}
}
public static CascadeClassifier faceDetector;
public static void LoadFaceDetector(){
String filename = SomeCodes.ExtractResource("/haarcascade_frontalface_alt.xml");
if (filename!=null) {
if (faceDetector==null) {
faceDetector = new CascadeClassifier(filename);
Logger.info("FaceDetector loaded");
}
}
}
public static String[] MakeArray(String... args){
if (args!=null && args.length>0){
List<String> ll = new ArrayList<String>();
for(String x : args){
if (ValidString(x)) ll.add(x);
}
return ll.toArray(new String[0]);
}
return new String[0];
}
}

View File

@@ -0,0 +1,67 @@
package Database;
import lombok.Data;
import static Config.SomeCodes.MakeArray;
@Data
public class PhotoReviewClass {
private int id;
private String DateTime;
private String Prefix;
private String FileLeft90;
private String FileLeft45;
private String FileCenter;
private String FileRight45;
private String FileRight90;
private String ThumbLeft90;
private String ThumbLeft45;
private String ThumbCenter;
private String ThumbRight45;
private String ThumbRight90;
public PhotoReviewClass(){
this.id = 0;
this.DateTime = "";
this.Prefix = "";
this.FileLeft90 = "";
this.FileLeft45 = "";
this.FileCenter = "";
this.FileRight45 = "";
this.FileRight90 = "";
this.ThumbLeft90 = "";
this.ThumbLeft45 = "";
this.ThumbCenter = "";
this.ThumbRight45 = "";
this.ThumbRight90 = "";
}
public PhotoReviewClass(int id, String DateTime, String Prefix, String FileLeft90, String FileLeft45, String FileCenter, String FileRight45, String FileRight90, String ThumbLeft90, String ThumbLeft45, String ThumbCenter, String ThumbRight45, String ThumbRight90){
this.id = id;
this.DateTime = DateTime;
this.Prefix = Prefix;
this.FileLeft90 = FileLeft90;
this.FileLeft45 = FileLeft45;
this.FileCenter = FileCenter;
this.FileRight45 = FileRight45;
this.FileRight90 = FileRight90;
this.ThumbLeft90 = ThumbLeft90;
this.ThumbLeft45 = ThumbLeft45;
this.ThumbCenter = ThumbCenter;
this.ThumbRight45 = ThumbRight45;
this.ThumbRight90 = ThumbRight90;
}
/**
* Get all files that are not null
* @return array of files
*/
public String[] files(){
return MakeArray(FileLeft90, FileLeft45, FileCenter, FileRight45, FileRight90);
}
public String[] thumbnails(){
return MakeArray(ThumbLeft90, ThumbLeft45, ThumbCenter, ThumbRight45, ThumbRight90);
}
}

View File

@@ -0,0 +1,147 @@
package Database;
import static Config.SomeCodes.LocalDateTimeToString;
import static Config.SomeCodes.ValidString;
import lombok.val;
import org.tinylog.Logger;
import java.sql.*;
import java.time.LocalDateTime;
import java.util.List;
public class Sqlite {
/**
* create SQLite database for photos
*/
public Sqlite(){
CreateDatabase();
}
private void CreateDatabase(){
try{
Connection conn = GetConnection();
if (conn != null){
Statement stmt = conn.createStatement();
String str = "CREATE TABLE IF NOT EXISTS photos "+
" (id INTEGER PRIMARY KEY AUTOINCREMENT, DateTime TEXT, Prefix TEXT, FileLeft90 TEXT, FileLeft45 TEXT, FileCenter TEXT, FileRight45 TEXT, FileRight90 TEXT, ThumbLeft90 TEXT, ThumbLeft45 TEXT, ThumbCenter TEXT, ThumbRight45 TEXT, ThumbRight90 TEXT)";
stmt.execute(str);
conn.close();
Logger.info("Database created successfully");
}
} catch (Exception e){
Logger.error("Error creating database: "+e.getMessage());
}
}
/**
* Insert PhotoReviewClass object to database
* @param pr PhotoReviewClass object
*/
public void Insert(PhotoReviewClass pr){
if (pr!=null){
Insert(pr.getPrefix(), pr.getFileLeft90(), pr.getFileLeft45(), pr.getFileCenter(), pr.getFileRight45(), pr.getFileRight90(), pr.getThumbLeft90(), pr.getThumbLeft45(), pr.getThumbCenter(), pr.getThumbRight45(), pr.getThumbRight90());
}
}
/**
* Get all PhotoReviewClass object from database
* @return array of PhotoReviewClass object if success, or null if failed
*/
public PhotoReviewClass[] GetAll(){
try{
Connection conn = GetConnection();
if (conn != null){
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM photos");
List<PhotoReviewClass> list = new java.util.ArrayList<>();
while (rs.next()){
list.add(new PhotoReviewClass(rs.getInt("id"), rs.getString("DateTime"), rs.getString("Prefix"), rs.getString("FileLeft90"), rs.getString("FileLeft45"), rs.getString("FileCenter"), rs.getString("FileRight45"), rs.getString("FileRight90"), rs.getString("ThumbLeft90"), rs.getString("ThumbLeft45"), rs.getString("ThumbCenter"), rs.getString("ThumbRight45"), rs.getString("ThumbRight90")));
}
conn.close();
Logger.info("GetAll success, result count: "+list.size());
return list.toArray(new PhotoReviewClass[0]);
} else Logger.info("GetAll failed, connection is null");
} catch (Exception e){
Logger.error("Error getting data: "+e.getMessage());
}
return null;
}
/**
* Delete all data from database
*/
@SuppressWarnings("unused")
public void DeleteAll(){
try{
Connection conn = GetConnection();
if (conn != null){
Statement stmt = conn.createStatement();
stmt.execute("DELETE FROM photos");
conn.close();
Logger.info("DeleteAll success");
} else Logger.info("DeleteAll failed, connection is null");
} catch (Exception e){
Logger.error("Error deleting data: "+e.getMessage());
}
}
/**
* Insert data to database
* @param prefix Prefix
* @param fileLeft90 FileLeft90
* @param fileLeft45 FileLeft45
* @param fileCenter FileCenter
* @param fileRight45 FileRight45
* @param fileRight90 FileRight90
* @param thumbLeft90 ThumbLeft90
* @param thumbLeft45 ThumbLeft45
* @param thumbCenter ThumbCenter
* @param thumbRight45 ThumbRight45
* @param thumbRight90 ThumbRight90
*/
private void Insert(String prefix, String fileLeft90, String fileLeft45, String fileCenter, String fileRight45, String fileRight90, String thumbLeft90, String thumbLeft45, String thumbCenter, String thumbRight45, String thumbRight90){
try{
Connection conn = GetConnection();
if (conn != null){
PreparedStatement stmt = conn.prepareStatement("INSERT INTO photos (DateTime, Prefix, FileLeft90, FileLeft45, FileCenter, FileRight45, FileRight90, ThumbLeft90, ThumbLeft45, ThumbCenter, ThumbRight45, ThumbRight90 ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
stmt.setString(1, LocalDateTimeToString(LocalDateTime.now()));
stmt.setString(2, ValidString(prefix)?prefix:"");
stmt.setString(3, ValidString(fileLeft90)?fileLeft90:"");
stmt.setString(4, ValidString(fileLeft45)?fileLeft45:"");
stmt.setString(5, ValidString(fileCenter)?fileCenter:"");
stmt.setString(6, ValidString(fileRight45)?fileRight45:"");
stmt.setString(7, ValidString(fileRight90)?fileRight90:"");
stmt.setString(8, ValidString(thumbLeft90)?thumbLeft90:"");
stmt.setString(9, ValidString(thumbLeft45)?thumbLeft45:"");
stmt.setString(10, ValidString(thumbCenter)?thumbCenter:"");
stmt.setString(11, ValidString(thumbRight45)?thumbRight45:"");
stmt.setString(12, ValidString(thumbRight90)?thumbRight90:"");
stmt.execute();
conn.close();
Logger.info("Data inserted successfully");
} else Logger.info("Insert failed, connection is null");
} catch (Exception e){
Logger.error("Error inserting data: "+e.getMessage());
}
}
/**
* Get Database Connection
* @return Connection object if success, or null if failed
*/
private Connection GetConnection(){
String dbPath = "jdbc:sqlite:database.db";
try{
return DriverManager.getConnection(dbPath);
} catch (Exception e){
Logger.error("Error connecting to database: "+e.getMessage());
}
return null;
}
}

View File

@@ -0,0 +1,38 @@
package Database;
import lombok.Getter;
import lombok.Setter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static Config.SomeCodes.ValidString;
@Getter
public class TinyLogRow {
private @Setter int index;
private final String DateTime;
private final String Category;
private final String Message;
public TinyLogRow(int index, String datetime, String category, String message){
this.index = index;
this.DateTime = datetime;
this.Category = category;
this.Message = message;
}
public boolean HaveContent(){
return ValidString(DateTime) & ValidString(Category) & ValidString(Message);
}
public static TinyLogRow Regex(String input){
final String regex = "^(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2})\\s([A-Z]+:)\\s(.*)$";
final Pattern pattern = Pattern.compile(regex);
final Matcher matcher = pattern.matcher(input);
if (matcher.find()){
return new TinyLogRow(0, matcher.group(1), matcher.group(2), matcher.group(3));
}
return null;
}
}

View File

@@ -0,0 +1,97 @@
package FTP;
import lombok.NonNull;
import org.apache.commons.net.ftp.FTPClient;
import org.tinylog.Logger;
@SuppressWarnings("unused")
public class FTPCheck {
private final String host;
private final int port;
private final String user;
private final String pass;
private final String path;
private final boolean passive;
/**
* Initialize FTP Check
* @param host FTP Host
* @param port FTP Port
* @param user FTP User
* @param pass FTP Password
* @param path FTP Path
* @param passive Use Passive Mode
*/
public FTPCheck(String host, int port, String user, String pass, String path, boolean passive){
this.host = host;
this.port = port;
this.user = user;
this.pass = pass;
this.path = path;
this.passive = passive;
}
/**
* Check if FTP is Correct and connectable
* @return true if FTP is correct, false otherwise
*/
public boolean IsCorrect(){
FTPClient client = new FTPClient();
try {
boolean alliswell = true;
client.connect(host, port);
if (!client.login(user, pass)) {
Logger.info("FTPCheck failed, Error: Login failed");
alliswell = false;
}
if (passive) client.enterLocalPassiveMode();
if (!client.changeWorkingDirectory(path)) {
Logger.info("FTPCheck failed, Error: Change Directory failed");
alliswell = false;
}
if (!client.logout()) {
Logger.info("FTPCheck failed, Error: Logout failed");
alliswell = false;
}
client.disconnect();
Logger.info("IsCorrect: "+alliswell);
return alliswell;
} catch (Exception e) {
Logger.error("FTPCheck failed, Error: " + e.getMessage());
return false;
}
}
/**
* List Files in FTP
* @return filenames found in FTP path, or empty array if failed
*/
public @NonNull String[] ListFiles() {
FTPClient client = new FTPClient();
try {
client.connect(host, port);
if (!client.login(user, pass)) {
Logger.info("FTPCheck failed, Error: Login failed");
return new String[0];
}
if (passive) client.enterLocalPassiveMode();
if (!client.changeWorkingDirectory(path)) {
Logger.info("FTPCheck failed, Error: Change Directory failed");
return new String[0];
}
String[] files = client.listNames();
if (!client.logout()) {
Logger.info("FTPCheck failed, Error: Logout failed");
return new String[0];
}
client.disconnect();
Logger.info("ListFiles: "+files.length);
return files;
} catch (Exception e) {
Logger.error("FTPCheck failed, Error: " + e.getMessage());
return new String[0];
}
}
}

View File

@@ -0,0 +1,146 @@
package FTP;
import lombok.val;
import org.apache.commons.net.ftp.FTPClient;
import java.io.*;
@SuppressWarnings("unused")
public class FTPUpload {
private final String host;
private final int port;
private final String username;
private final String password;
private final String remoteDirectory;
private final boolean passiveMode;
/**
* Initialize FTP Upload using Active Mode
* @param host FTP Host
* @param port FTP Port
* @param username FTP Username
* @param password FTP Password
* @param remoteDirectory Remote Directory
*/
public FTPUpload(String host, int port, String username, String password, String remoteDirectory){
this.host = host;
this.port = port;
this.username = username;
this.password = password;
this.remoteDirectory = remoteDirectory;
this.passiveMode = false;
}
/**
* Initialize FTP Upload
* @param host FTP Host
* @param port FTP Port
* @param username FTP Username
* @param password FTP Password
* @param remoteDirectory Remote Directory
* @param passiveMode Passive Mode
*/
public FTPUpload(String host, int port, String username, String password, String remoteDirectory, boolean passiveMode){
this.host = host;
this.port = port;
this.username = username;
this.password = password;
this.remoteDirectory = remoteDirectory;
this.passiveMode = passiveMode;
}
public void UploadFile(FTPUploadEvent event, String... files) {
if (files.length!=0){
int total = files.length;
var success = 0;
var failed = 0;
FTPClient ftp = new FTPClient();
try {
ftp.connect(host, port);
if (event!=null) event.uploadLog("Connected to " + host + ":" + port);
ftp.login(username, password);
if (event!=null) event.uploadLog("Logged in as " + username);
if (passiveMode) {
ftp.enterLocalPassiveMode();
if (event!=null) event.uploadLog("Passive Mode Enabled");
}
ftp.setFileType(FTPClient.BINARY_FILE_TYPE);
if (event!=null) event.uploadLog("Setting File Type to Binary");
ftp.changeWorkingDirectory(remoteDirectory);
if (event!=null) event.uploadLog("Changed Working Directory to " + remoteDirectory);
for (String file : files) {
try{
File localFile = new File(file);
if (localFile.exists()) {
FileInputStream input = new FileInputStream(localFile);
ProgressInputStream pis = new ProgressInputStream(file, input, localFile.length(), event);
if (event!=null) event.onUploadStarted(file);
ftp.storeFile(localFile.getName(), pis);
input.close();
if (event!=null) event.onUploadSuccess(file);
success++;
} else {
if (event!=null) event.uploadLog("File " + file + " not found");
failed++;
}
} catch (Exception e){
if (event!=null) event.onUploadFailed(file);
failed++;
}
}
ftp.logout();
if (event!=null) event.uploadLog("Logged out from FTP Server");
} catch (IOException e) {
if (event!=null) event.uploadLog("Failed to upload file, Msg: " + e.getMessage());
} finally {
if (ftp.isConnected()) {
try {
ftp.disconnect();
if (event!=null) event.uploadLog("Disconnected from FTP Server");
} catch (IOException e) {
if (event!=null) event.uploadLog("Failed to disconnect from FTP Server, Msg: " + e.getMessage());
}
}
if (event!=null) event.onUploadFinished(total, success, failed, files);
}
} else {
if (event!=null) event.onUploadFinished(0, 0, 0, new String[]{});
}
}
static class ProgressInputStream extends FilterInputStream{
private final long totalbytes;
private long bytesread = 0;
private final FTPUploadEvent event;
private final String filename;
protected ProgressInputStream(String filename, InputStream in, long totalbytes, FTPUploadEvent event) {
super(in);
this.totalbytes = totalbytes;
this.event = event;
this.filename = filename;
}
@Override
public int read() throws IOException {
int result = in.read();
if (result != -1) {
bytesread++;
if (event!=null) event.onUploadProgress(filename, bytesread, totalbytes);
}
return result;
}
@Override
public int read(byte[] bb, int offset, int len) throws IOException {
int result = in.read(bb, offset, len);
if (result != -1) {
bytesread += result;
if (event!=null) event.onUploadProgress(filename, bytesread, totalbytes);
}
return result;
}
}
}

View File

@@ -0,0 +1,10 @@
package FTP;
public interface FTPUploadEvent {
void onUploadSuccess(String file);
void onUploadFailed(String file);
void onUploadProgress(String file, long bytes, long total);
void onUploadStarted(String file);
void uploadLog(String msg);
void onUploadFinished(int total, int success, int failed, String[] files);
}

View File

@@ -0,0 +1,760 @@
package id.co.gtc.erhacam;
import Camera.ArducamIMX477Preset;
import Camera.CameraProperty;
import Camera.LiveCamEvent;
import Config.CameraConfigEnum;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.NotFoundException;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.common.HybridBinarizer;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.scene.control.CheckBox;
import javafx.scene.control.Slider;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.control.Label;
import javafx.scene.image.PixelFormat;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import lombok.Getter;
import lombok.Setter;
import lombok.val;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameGrabber;
import org.bytedeco.opencv.global.opencv_imgproc;
import org.bytedeco.opencv.opencv_core.*;
import org.opencv.videoio.Videoio;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static Config.SomeCodes.*;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
import static org.bytedeco.opencv.global.opencv_core.mean;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
@SuppressWarnings({"unused"})
public class Cameradetail {
private final AtomicBoolean Capturing = new AtomicBoolean(false);
private final AtomicBoolean TakingPhoto = new AtomicBoolean(false);
private final AtomicBoolean IsGrabbingLiveView = new AtomicBoolean(false);
private OpenCVFrameGrabber mGrabber = null;
private LiveCamEvent event = null;
private @Getter @Setter CameraConfigEnum cameraConfigEnum = CameraConfigEnum.CameraConfigCenter;
private @Getter int LiveFPS = 0;
/**
* Get detected QR text from Live View
*/
private @Getter String qrtext = null;
@FXML
private Label cameratitle;
@FXML
private ImageView camerastream;
@FXML
private AnchorPane streamanchor;
@FXML
private Label camerastatus;
@FXML
private Slider brightnessSlider;
@FXML
private Slider contrastSlider;
@FXML
private Slider saturationSlider;
@FXML
private Slider hueSlider;
@FXML
private Slider gainSlider;
@FXML
private Slider exposureSlider;
@FXML
private CheckBox AutoExposure;
@FXML
private CheckBox AutoWhiteBalance;
@FXML
private CheckBox AutoFocus;
private final UMat BestMat = new UMat();
private final UMat LiveMat = new UMat();
private Size LiveSize = new Size(640, 480);
private Size PhotoSize = new Size(1920, 1080);
private void setSliderValue(Slider sld, CameraProperty prop, double value){
sld.setMin(prop.Min);
sld.setMax(prop.Max);
sld.setValue(value);
}
@FXML
public void initialize(){
camerastream.fitHeightProperty().bind(streamanchor.heightProperty());
//camerastream.fitWidthProperty().bind(streamanchor.widthProperty());
camerastream.setPreserveRatio(true);
Platform.runLater(()->{
setSliderValue(brightnessSlider, ArducamIMX477Preset.Brightness, config.getBrightness(cameraConfigEnum));
setSliderValue(contrastSlider, ArducamIMX477Preset.Contrast, config.getContrast(cameraConfigEnum));
setSliderValue(saturationSlider, ArducamIMX477Preset.Saturation, config.getSaturation(cameraConfigEnum));
setSliderValue(hueSlider, ArducamIMX477Preset.Hue, config.getHue(cameraConfigEnum));
setSliderValue(gainSlider, ArducamIMX477Preset.Gain, config.getGain(cameraConfigEnum));
setSliderValue(exposureSlider, ArducamIMX477Preset.ExposureTime, config.getExposure(cameraConfigEnum));
AutoExposure.setSelected(config.getAutoExposure(cameraConfigEnum));
AutoWhiteBalance.setSelected(config.getAutoWhiteBalance(cameraConfigEnum));
AutoFocus.setSelected(config.getAutoFocus(cameraConfigEnum));
});
AutoExposure.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoExposure(newVal);
config.setAutoExposure(cameraConfigEnum, newVal);
raise_log("AutoExposure for "+getCameraTitle()+" changed to " + newVal);
});
AutoWhiteBalance.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoWB(newVal);
config.setAutoWhiteBalance(cameraConfigEnum, newVal);
raise_log("AutoWhiteBalance for "+getCameraTitle()+" changed to "+newVal);
});
AutoFocus.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoFocus(newVal);
config.setAutoFocus(cameraConfigEnum, newVal);
raise_log("AutoFocus for "+getCameraTitle()+" changed to "+newVal);
});
brightnessSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setBrightness(newVal.doubleValue());
config.setBrightness(cameraConfigEnum, newVal.doubleValue());
raise_log("Brightness for "+getCameraTitle()+" changed to "+newVal);
});
contrastSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setContrast(newVal.doubleValue());
config.setContrast(cameraConfigEnum, newVal.doubleValue());
raise_log("Contrast for "+getCameraTitle()+" changed to "+newVal);
});
saturationSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setSaturation(newVal.doubleValue());
config.setSaturation(cameraConfigEnum, newVal.doubleValue());
raise_log("Saturation for "+getCameraTitle()+" changed to "+newVal);
});
hueSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setHue(newVal.doubleValue());
config.setHue(cameraConfigEnum, newVal.doubleValue());
raise_log("Hue for "+getCameraTitle()+" changed to "+newVal);
});
gainSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setGain(newVal.doubleValue());
config.setGain(cameraConfigEnum, newVal.doubleValue());
raise_log("Gain for "+getCameraTitle()+" changed to "+newVal);
});
exposureSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setExposure(newVal.doubleValue());
config.setExposure(cameraConfigEnum, newVal.doubleValue());
raise_log("Exposure for "+getCameraTitle()+" changed to "+newVal);
});
}
@FXML
public void resetClick(){
brightnessSlider.adjustValue(ArducamIMX477Preset.Brightness.Default);
contrastSlider.adjustValue(ArducamIMX477Preset.Contrast.Default);
saturationSlider.adjustValue(ArducamIMX477Preset.Saturation.Default);
hueSlider.adjustValue(ArducamIMX477Preset.Hue.Default);
gainSlider.adjustValue(ArducamIMX477Preset.Gain.Default);
exposureSlider.adjustValue(ArducamIMX477Preset.ExposureTime.Default);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
}
public boolean isCapturing(){
return Capturing.get();
}
/**
* Set Camera Title
* @param title Title of the Camera
*/
public void setCameraTitle(String title){
if (ValidString(title)){
if (cameratitle!=null){
cameratitle.setText(title);
}
}
}
public void setSaturation(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SATURATION, value);
}
}
public double getSaturation(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SATURATION);
}
return 0;
}
public void setHue(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_HUE, value);
}
}
public double getHue(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_HUE);
}
return 0;
}
public void setGain(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAIN, value);
}
}
public double getGain(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAIN);
}
return 0;
}
/**
* Get Camera Title
* @return Title of the Camera, or empty string if not set
*/
public String getCameraTitle(){
if (cameratitle!=null){
return cameratitle.getText();
}
return "";
}
/**
* Set Camera Status
* @param status Status of the Camera
*/
public void setCameraStatus(String status){
if (ValidString(status)){
if (camerastatus!=null){
camerastatus.setText(status);
}
}
}
/**
* Get Camera Status
* @return Status of the Camera, or empty string if not set
*/
public String getCameraStatus(){
if (camerastatus!=null){
return camerastatus.getText();
}
return "";
}
/**
* Set Camera Stream
* @param image Image to be displayed
*/
public void setCameraStream(Image image){
if (image!=null){
if (camerastream!=null){
camerastream.setImage(image);
}
}
}
/**
* Get Camera Stream
* @return Image of the Camera Stream, or null if not set
*/
public Image getCameraStream(){
if (camerastream!=null){
return camerastream.getImage();
}
return null;
}
public void setFPS(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FPS, value);
}
}
public double getFPS(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FPS);
}
return 0;
}
/**
* Set Camera Grabber and Target Width and Height
* @param grabber Camera Grabber
* @param livewidth Width used on live view
* @param liveheight Height used on live view
* @param photowidth Width used on photo capture
* @param photoheight Height used on photo capture
*/
public void SetGrabber(OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight){
if (mGrabber!=null) {
StopLiveView();
}
LiveSize = new Size(livewidth, liveheight);
PhotoSize = new Size(photowidth, photoheight);
mGrabber = grabber;
}
//Exposure and Focus Tricks :
// https://stackoverflow.com/questions/53545945/how-to-set-camera-to-auto-exposure-with-opencv-3-4-2
// https://github.com/opencv/opencv/issues/9738
/**
* Set Auto Exposure Mode
* @param ON if true, set autoexposure on, otherwise off
*/
public void setAutoExposure(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_EXPOSURE, ON?ArducamIMX477Preset.AutoExposure.On:ArducamIMX477Preset.AutoExposure.Off);
}
}
/**
* Get Auto Exposure Mode
* @return true if autoexposure is on, otherwise off
*/
public boolean getAutoExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_EXPOSURE)==ArducamIMX477Preset.AutoExposure.On;
}
return false;
}
/**
* Set Exposure when Auto Exposure is Off
* @param value exposure value
*/
public void setExposure(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_EXPOSURE, value);
}
}
/**
* Get Exposure when Auto Exposure is Off
* @return exposure value
*/
public double getExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_EXPOSURE);
}
return 0;
}
/**
* Set Auto Focus
* @param ON if true, set autofocus on, otherwise off
*/
public void setAutoFocus(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTOFOCUS, ON?ArducamIMX477Preset.AutoFocus.On:ArducamIMX477Preset.AutoFocus.Off);
}
}
/**
* Get Auto Focus
* @return true if autofocus is on, otherwise off
*/
public boolean getAutoFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTOFOCUS)==ArducamIMX477Preset.AutoFocus.On;
}
return false;
}
public void setAutoWB(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_WB, ON?ArducamIMX477Preset.AutoWhiteBalance.On:ArducamIMX477Preset.AutoWhiteBalance.Off);
}
}
public boolean getAutoWB(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_WB)==ArducamIMX477Preset.AutoWhiteBalance.On;
}
return false;
}
/**
* Set Focus when Auto Focus is Off
* @param value focus value
*/
public void setFocus(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FOCUS, value);
}
}
/**
* Get Focus when Auto Focus is Off
* @return focus value
*/
public double getFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FOCUS);
}
return 0;
}
public void setBrightness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_BRIGHTNESS, value);
}
}
public double getBrightness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_BRIGHTNESS);
}
return 0;
}
public void setContrast(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_CONTRAST, value);
}
}
public double getContrast(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_CONTRAST);
}
return 0;
}
public void setFrameWidth(int width){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_WIDTH, width);
}
}
public double getFrameWidth(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_WIDTH);
}
return 0;
}
public void setFrameHeight(int height){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_HEIGHT, height);
}
}
public double getFrameHeight(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_HEIGHT);
}
return 0;
}
public void setSharpness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SHARPNESS, value);
}
}
public double getSharpness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SHARPNESS);
}
return 0;
}
public void setGamma(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAMMA, value);
}
}
public double getGamma(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAMMA);
}
return 0;
}
/**
* Take Photo from Camera
* @param directory directory to save the photo, if null, will use default directory
* @param prefix filename prefix
* @return filename path of the saved photo, or null if failed
*/
@SuppressWarnings("BusyWait")
public String TakePhoto(String directory, String prefix) throws InterruptedException {
String result = null;
if (!ValidDirectory(directory)) directory = currentDirectory;
if (mGrabber!=null){
while(IsGrabbingLiveView.get()){
Thread.sleep(10);
}
TakingPhoto.set(true);
if (!BestMat.empty()){
Size sz = BestMat.size();
raise_log("TakePhoto got frame with width: " + sz.width() + " and height: " + sz.height());
String filename = Path.of(directory, makeFileName(prefix)).toString();
if (imwrite(filename, BestMat)){
raise_log("TakePhoto success, Photo saved to " + filename);
result = filename;
} else raise_log("TakePhoto failed, Unable to Save Photo");
} else raise_log("TakePhoto failed, Live View is Empty");
} else raise_log("TakePhoto failed, Grabber is null");
TakingPhoto.set(false);
return result;
}
private String makeFileName(String prefix){
//make filename with prefix_POSITION_YYYY-MM-DD_HH-MM-SS
LocalDateTime ldt = LocalDateTime.now();
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
return prefix+"_"
+ switch(cameratitle.getText()){
case "Camera Left 90" -> "LEFT90";
case "Camera Left 45" -> "LEFT45";
case "Camera Center" -> "CENTER";
case "Camera Right 45" -> "RIGHT45";
case "Camera Right 90" -> "RIGHT90";
default -> "UNKNOWN";
}
+ "_" + timetag + ".jpg";
}
public void StopLiveView(){
Capturing.set(false);
if (mGrabber!=null){
try{
mGrabber.stop();
Platform.runLater(()->setCameraStatus("Camera Stopped"));
} catch (Exception e){
raise_log("StopLiveView failed, Unable to Stop Camera, Error: " + e.getMessage());
}
}
TakingPhoto.set(false);
IsGrabbingLiveView.set(false);
}
public boolean StartLiveView(LiveCamEvent event, String cameratitle, final boolean use_qr , final boolean use_face) {
this.event = event;
if (mGrabber != null) {
try {
if (use_qr) raise_log("QR Reader loaded");
if (use_face) raise_log("Face detector loaded");
// capture with best resolution
setFrameHeight(PhotoSize.height());
setFrameWidth(PhotoSize.width());
LiveFPS = 0;
mGrabber.start();
Capturing.set(true);
// just information
String ss = String.format("Camera Started with resolution %dx%d@%d", PhotoSize.width(), PhotoSize.height(),LiveFPS);
Platform.runLater(()->setCameraStatus(ss));
raise_log(ss);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
Task<Image> task = new Task<>() {
@SuppressWarnings("BusyWait")
@Override
protected Image call() {
// repeat until capturing is false
AtomicInteger fps = new AtomicInteger(0);
TimerTask timerTask = new TimerTask() {
@Override
public void run() {
LiveFPS = fps.getAndSet(0);
}
};
Timer timer = new java.util.Timer();
timer.scheduleAtFixedRate(timerTask, 1000, 1000);
while (Capturing.get()) {
try {
// selama proses pengambilan foto, jangan ambil frame
while(TakingPhoto.get() && Capturing.get()){
Thread.sleep(10);
}
if (!Capturing.get()) return null;
IsGrabbingLiveView.set(true);
Frame frame = mGrabber.grab(); // grab frame
Mat mat = matconverter.convert(frame); // convert to Mat
mat.copyTo(BestMat); // copy to BestMat for using OpenCL
fps.addAndGet(1);
IsGrabbingLiveView.set(false);
if (frame != null) {
opencv_imgproc.resize(BestMat, LiveMat, LiveSize); // resize to LiveSize
UMat graymat = new UMat(); // use OpenCL for grayscale
opencv_imgproc.cvtColor(LiveMat,graymat, COLOR_BGR2GRAY); // convert to grayscale
if (use_qr){
String qr = DetectQRFromMat(graymat);
if (qr!=null) {
if (!qr.equals(qrtext)){
qrtext = qr;
raise_log("QR Detected: " + qr);
if (event!=null) event.onDetectedQRCode(qr);
}
}
}
if (use_face){
RectVector face = DetectFace(graymat);
if (face!=null && face.size()>0){
if (event!=null) event.onFaceDetector(true, PhotoSize.width(), PhotoSize.height());
for(int i=0; i<face.size(); i++){
Rect rect = face.get(i);
rectangle(LiveMat, rect, Scalar.GREEN);
}
} else if (event!=null) event.onFaceDetector(false, PhotoSize.width(), PhotoSize.height());
}
UMat rgbmat = new UMat(LiveMat.size(), CV_8UC3);
cvtColor(LiveMat, rgbmat, COLOR_BGR2RGB);
Mat imgmat = new Mat();
rgbmat.copyTo(imgmat); // copy back to CPU
// Update Task Value usign matToWritableImage
updateValue(matToWritableImage(imgmat, imgmat.cols(), imgmat.rows()));
}
} catch (Exception e) {
raise_log("Unable to Grab Frame, Error: " + e.getMessage());
//if (!Capturing.get()) Platform.runLater(this::StopLiveView);
}
}
timer.cancel();
return null;
}
};
// value dari task, yaitu image, akan diupdate ke camerastream
task.valueProperty().addListener((obs, oldVal, newVal) -> {
if (newVal != null) {
setCameraStream(newVal);
}
});
// start task
new Thread(task).start();
return true;
} catch (Exception e) {
raise_log("StartLiveView failed, Unable to Start Camera, Error: " + e.getMessage());
}
} else raise_log("StartLiveView failed, grabber is null");
return false;
}
/**
* Detect QR Code from Mat
* @param graymat Mat in Gray Scale
* @return QR Code Text, or null if not detected
*/
private String DetectQRFromMat(UMat graymat){
if (qrreader!=null){
Mat mat = new Mat();
graymat.copyTo(mat); // back to CPU, because zxing only accept BufferedImage
BufferedImage bufferedImage = matToBufferedImage(mat);
String title = cameratitle.getText();
BinaryBitmap binaryBitmap = new BinaryBitmap(new HybridBinarizer(new BufferedImageLuminanceSource(bufferedImage)));
try{
Result result = qrreader.decode(binaryBitmap);
if (result!=null){
return result.getText();
}
} catch (NotFoundException ignored) {
}
}
return null;
}
/**
* Detect Face from Mat
* @param graymat Mat in Gray Scale
* @return RectVector if face detected, otherwise false
*/
private RectVector DetectFace(UMat graymat){
if (faceDetector!=null){
RectVector face = new RectVector();
faceDetector.detectMultiScale(graymat, face);
return face;
}
return null;
}
private double getBrightnessFromGrayMat(Mat graymat){
Scalar mean = mean(graymat);
return mean.get(0);
}
private WritableImage matToWritableImage(Mat mat, int cols, int rows){
WritableImage writableImage = new WritableImage(cols, rows);
ByteBuffer buffer = mat.createBuffer();
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
writableImage.getPixelWriter().setPixels(0, 0, mat.cols(), mat.rows(), pixelFormat, buffer, mat.cols() * 3);
return writableImage;
}
private BufferedImage matToBufferedImage(Mat mat){
int type = BufferedImage.TYPE_BYTE_GRAY;
if (mat.channels() > 1) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
mat.data().get(data);
return image;
}
private void raise_log(String msg){
if (event!=null) event.onLog(msg);
}
}

View File

@@ -0,0 +1,633 @@
package id.co.gtc.erhacam;
import BASS.AudioPlayer;
import BASS.PlaybackStatus;
import Camera.*;
import Config.CameraConfigEnum;
import Config.SomeCodes;
import Database.PhotoReviewClass;
import Database.Sqlite;
import FTP.FTPUpload;
import FTP.FTPUploadEvent;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.control.Alert;
import javafx.scene.control.TextArea;
import javafx.scene.layout.AnchorPane;
import javafx.stage.DirectoryChooser;
import lombok.val;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javafx.scene.control.Alert.AlertType;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv.OpenCVFrameGrabber;
import org.bytedeco.javacv.VideoInputFrameGrabber;
import org.bytedeco.opencv.opencv_core.Size;
import org.tinylog.Logger;
import static Config.SomeCodes.*;
public class CaptureView {
@FXML
private AnchorPane cam1, cam2, cam3, cam4, cam5, controlpane;
private Cameradetail image1, image2, image3, image4, image5;
@FXML
private TextArea directorypath, prefixfile;
@FXML
private AnchorPane progressanchor;
private AudioPlayer audioPlayer;
private String audio_posisikan_muka = "satu.wav";
private String audio_posisi_diam = "dua.wav";
private String audio_foto_selesai = "tiga.wav";
private String audio_ke_ruangtunggu = "empat.wav";
private List<String> cams;
@FXML
private void ChangeDirectory(){
DirectoryChooser dc = new DirectoryChooser();
dc.setTitle("Select Directory");
String path = dc.showDialog(null).getAbsolutePath();
config.SetPhotoDirectory(path);
config.Save();
directorypath.setText(path);
}
private void trigger_autofocus(Cameradetail image) throws InterruptedException {
if (image!=null){
if (image.isCapturing()){
image.setAutoFocus(false);
Thread.sleep(2);
image.setFocus(0.9);
Thread.sleep(2);
image.setAutoFocus(true);
Thread.sleep(2);
}
}
}
@FXML
private void AutoFocus() throws InterruptedException {
trigger_autofocus(image1);
trigger_autofocus(image2);
trigger_autofocus(image3);
trigger_autofocus(image4);
trigger_autofocus(image5);
}
@SuppressWarnings("resource")
@FXML
private void TakePhotos(){
Size thumbsize = new Size(160,120);
String directory = directorypath.getText();
String prefix = RemoveSpaces(prefixfile.getText()) ;
if (ValidDirectory(directory)){
if (ValidString(prefix)){
audioPlayer.PlayFile(audio_posisi_diam, ps);
PhotoReviewClass prc = new PhotoReviewClass();
prc.setPrefix(prefix);
long nanostart = System.nanoTime(); // for performance measurement
ExecutorService executor = Executors.newFixedThreadPool(5);
Callable<String> task1 = ()->{
if (image1!=null) {
String p1 = image1.TakePhoto(directory,prefix);
if (ValidFile(p1)) {
Platform.runLater(()->image1.setCameraStatus("Photo: "+ SomeCodes.GetFileName(p1)));
prc.setFileLeft90(p1);
String thumb1 = MakeThumbfile(p1, thumbsize);
if (ValidFile(thumb1)) prc.setThumbLeft90(thumb1);
}
}
return "Task 1 Done";
};
Callable<String> task2 = ()->{
if (image2!=null) {
String p2 = image2.TakePhoto(directory,prefix);
if (ValidFile(p2)) {
Platform.runLater(()->image2.setCameraStatus("Photo: "+ SomeCodes.GetFileName(p2)));
prc.setFileLeft45(p2);
String thumb2 = MakeThumbfile(p2, thumbsize);
if (ValidFile(thumb2)) prc.setThumbLeft45(thumb2);
}
}
return "Task 2 Done";
};
Callable<String> task3 = ()->{
if (image3!=null) {
String p3 = image3.TakePhoto(directory,prefix);
if (ValidFile(p3)) {
Platform.runLater(()->image3.setCameraStatus("Photo: "+ SomeCodes.GetFileName(p3)));
prc.setFileCenter(p3);
String thumb3 = MakeThumbfile(p3, thumbsize);
if (ValidFile(thumb3)) prc.setThumbCenter(thumb3);
}
}
return "Task 3 Done";
};
Callable<String> task4 = ()->{
if (image4!=null) {
String p4 = image4.TakePhoto(directory,prefix);
if (ValidFile(p4)) {
Platform.runLater(()->image4.setCameraStatus("Photo: "+ SomeCodes.GetFileName(p4)));
prc.setFileRight45(p4);
String thumb4 = MakeThumbfile(p4, thumbsize);
if (ValidFile(thumb4)) prc.setThumbRight45(thumb4);
}
}
return "Task 4 Done";
};
Callable<String> task5 = ()->{
if (image5!=null) {
String p5 = image5.TakePhoto(directory,prefix);
if (ValidFile(p5)) {
Platform.runLater(()->image5.setCameraStatus("Photo: "+ SomeCodes.GetFileName(p5)));
prc.setFileRight90(p5);
String thumb5 = MakeThumbfile(p5, thumbsize);
if (ValidFile(thumb5)) prc.setThumbRight90(thumb5);
}
}
return "Task 5 Done";
};
try{
Future<String> f1 = executor.submit(task1);
Future<String> f2 = executor.submit(task2);
Future<String> f3 = executor.submit(task3);
Future<String> f4 = executor.submit(task4);
Future<String> f5 = executor.submit(task5);
f1.get();
f2.get();
f3.get();
f4.get();
f5.get();
} catch (Exception e){
Logger.error("Error TakePhotos: " + e.getMessage());
} finally {
executor.shutdown();
}
long duration = (System.nanoTime() - nanostart) / 1000000; // in milliseconds
System.out.println("TakePhotos duration: "+duration+" ms");
audioPlayer.PlayFile(audio_foto_selesai, ps);
String[] files = prc.files();
if (files!=null && files.length>0){
InsertSQL(prc);
progressanchor.getChildren().clear();
prefixfile.setText("");
new Thread(()-> UploadToFTP(files)).start();
} else {
Alert Alert = new Alert(AlertType.ERROR);
Alert.setTitle("Error");
Alert.setHeaderText("No Photos Taken");
Alert.setContentText("No Photos Taken, please check camera");
Alert.showAndWait();
}
} else {
Alert Alert = new Alert(AlertType.ERROR);
Alert.setTitle("Error");
Alert.setHeaderText("Invalid Prefix");
Alert.setContentText("Please input valid prefix or scan QR Code");
Alert.showAndWait();
}
} else {
Alert Alert = new Alert(AlertType.ERROR);
Alert.setTitle("Error");
Alert.setHeaderText("Invalid Directory");
Alert.setContentText("Please select valid directory");
Alert.showAndWait();
}
}
@FXML
public void initialize(){
audio_posisikan_muka = ExtractResource("/satu.wav");
audio_posisi_diam = ExtractResource("/dua.wav");
audio_foto_selesai = ExtractResource("/tiga.wav");
audio_ke_ruangtunggu = ExtractResource("/empat.wav");
audioPlayer = new AudioPlayer(1,48000);
Logger.info("Audio Player : "+(audioPlayer.isInited()? "Inited" : "Not Inited"));
cams = null;
try{
String[] xxx = VideoInputFrameGrabber.getDeviceDescriptions();
if (xxx!=null && xxx.length>0){
cams = Arrays.asList(xxx);
}
} catch (Exception e){
Logger.error("Error getting camera list: "+e.getMessage());
}
LoadCameraDetail(cam1, 1, CameraConfigEnum.CameraConfigLeft90);
LoadCameraDetail(cam2, 2, CameraConfigEnum.CameraConfigLeft45);
LoadCameraDetail(cam3, 3, CameraConfigEnum.CameraConfigCenter);
LoadCameraDetail(cam4, 4, CameraConfigEnum.CameraConfigRight45);
LoadCameraDetail(cam5, 5, CameraConfigEnum.CameraConfigRight90);
Platform.runLater(()->{
int indexleft90=-1;
int indexleft45=-1;
int indexcenter=-1;
int indexright45=-1;
int indexright90;
if (cams!=null && !cams.isEmpty()){
String camleft90 = config.getCameraLeft90();
if (ValidString(camleft90)){
int[] indexes = FindIndexes(cams, camleft90);
if (indexes.length>0){
indexleft90 = indexes[0];
if (indexleft90!=1){
final int finalindex = indexleft90;
new Thread(()-> SetupCameraWithController(image1, camleft90, finalindex)).start();
}
}
}
String camleft45 = config.getCameraLeft45();
if (ValidString(camleft45)){
int[] indexes = FindIndexes(cams, camleft45);
if (indexes.length>0){
indexleft45 = FindFirstIndex(cams, camleft45, indexleft90);
if (indexleft45!=-1) {
final int finalindex = indexleft45;
new Thread(()-> SetupCameraWithController(image2, camleft45, finalindex)).start();
}
}
}
String camcenter = config.getCameraCenter();
if (ValidString(camcenter)){
int[] indexes = FindIndexes(cams, camcenter);
if (indexes.length>0){
indexcenter = FindFirstIndex(cams, camcenter, indexleft90, indexleft45);
if (indexcenter!=-1) {
final int finalindex = indexcenter;
new Thread(()-> SetupCameraWithController(image3, camcenter, finalindex)).start();
}
}
}
String camright45 = config.getCameraRight45();
if (ValidString(camright45)){
int[] indexes = FindIndexes(cams, camright45);
if (indexes.length>0){
indexright45 = FindFirstIndex(cams, camright45, indexleft90, indexleft45, indexcenter);
if (indexright45!=-1) {
final int finalindex = indexright45;
new Thread(()-> SetupCameraWithController(image4, camright45, finalindex)).start();
}
}
}
String camright90 = config.getCameraRight90();
if (ValidString(camright90)){
int[] indexes = FindIndexes(cams, camright90);
if (indexes.length>0){
indexright90 = FindFirstIndex(cams, camright90, indexleft90, indexleft45, indexcenter, indexright45);
if (indexright90!=-1) {
final int finalindex = indexright90;
new Thread(()-> SetupCameraWithController(image5, camright90, finalindex)).start();
}
}
}
}
directorypath.setText(config.getPhotoDirectory());
progressanchor.prefWidthProperty().bind(controlpane.widthProperty());
});
}
public void Unload(){
if (image1!=null) {
image1.StopLiveView();
}
if (image2!=null) {
image2.StopLiveView();
}
if (image3!=null) {
image3.StopLiveView();
}
if (image4!=null) {
image4.StopLiveView();
}
if (image5!=null) {
image5.StopLiveView();
}
config.Save();
}
final PlaybackStatus ps = new PlaybackStatus(){
@Override
public void onPlaybackStarted(String filename) {
if (filename.contains(audio_posisikan_muka)){
Logger.info("Audio Positikan Muka Started");
} else if (filename.contains(audio_posisi_diam)){
Logger.info("Audio Posisi Diam Started");
} else if (filename.contains(audio_foto_selesai)){
Logger.info("Audio Foto Selesai Started");
} else if (filename.contains(audio_ke_ruangtunggu)){
Logger.info("Audio Ke Ruang Tunggu Started");
}
}
@Override
public void onPlaybackFinished(String filename) {
if (filename.contains(audio_posisikan_muka)){
Logger.info("Audio Positikan Muka Finished");
} else if (filename.contains(audio_posisi_diam)){
Logger.info("Audio Posisi Diam Finished");
} else if (filename.contains(audio_foto_selesai)){
Logger.info("Audio Foto Selesai Finished");
} else if (filename.contains(audio_ke_ruangtunggu)){
Logger.info("Audio Ke Ruang Tunggu Finished");
}
}
@Override
public void onPlaybackFailure(String filename) {
if (filename.contains(audio_posisikan_muka)){
Logger.info("Audio Positikan Muka Failure");
} else if (filename.contains(audio_posisi_diam)){
Logger.info("Audio Posisi Diam Failure");
} else if (filename.contains(audio_foto_selesai)){
Logger.info("Audio Foto Selesai Failure");
} else if (filename.contains(audio_ke_ruangtunggu)){
Logger.info("Audio Ke Ruang Tunggu Failure");
}
}
};
private void SetupCameraWithController(Cameradetail image, String cameraname, int devicenumber){
if (image!=null){
String title = switch(image.getCameraConfigEnum()){
case CameraConfigCenter -> "Camera Center";
case CameraConfigLeft45 -> "Camera Left 45";
case CameraConfigLeft90 -> "Camera Left 90";
case CameraConfigRight45 -> "Camera Right 45";
case CameraConfigRight90 -> "Camera Right 90";
};
Platform.runLater(()-> image.setCameraTitle(title));
if (devicenumber!=-1){
OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(devicenumber);
// default
int livewidth = 640;
int liveheight = 480;
int photowidth = 640;
int photoheight = 480;
// mode1 selalu paling tinggi
if (cameraname.contains("ACER QHD")){
photowidth = AcerQHD.ModeBest.getWidth();
photoheight = AcerQHD.ModeBest.getHeight();
livewidth = AcerQHD.ModeLive.getWidth();
liveheight = AcerQHD.ModeLive.getHeight();
} else if (cameraname.contains("AVerVision M15W")){
photowidth = AverVisionM15W.ModeBest.getWidth();
photoheight = AverVisionM15W.ModeBest.getHeight();
livewidth = AverVisionM15W.ModeLive.getWidth();
liveheight = AverVisionM15W.ModeLive.getHeight();
} else if (cameraname.contains("Arducam IMX477")){
// mode1 terbaik 16:9
// mode5 terbaik 4:3
photowidth = ArducamIMX477.ModeBest.getWidth();
photoheight = ArducamIMX477.ModeBest.getHeight();
livewidth = ArducamIMX477.ModeLive.getWidth();
liveheight = ArducamIMX477.ModeLive.getHeight();
} else if (cameraname.contains("OBSBOT Meet 2")){
photowidth = ObsbotMeet2.ModeBest.getWidth();
photoheight = ObsbotMeet2.ModeBest.getHeight();
livewidth = ObsbotMeet2.ModeLive.getWidth();
liveheight = ObsbotMeet2.ModeLive.getHeight();
}
image.SetGrabber(grabber, livewidth,liveheight,photowidth,photoheight);
//TODO reconfirm requirement again
boolean use_face_detector = true;
boolean use_qr_detector = true;
LiveCamEvent lce = new LiveCamEvent() {
@Override
public void onDetectedQRCode(String qrCode) {
Platform.runLater(()->prefixfile.setText(RemoveSpaces(qrCode)));
if (ValidString(qrCode)) audioPlayer.PlayFile(audio_posisikan_muka, ps);
}
@Override
public void onFaceDetector(boolean hasface, int width, int height) {
Platform.runLater(()-> {
String ss = hasface ? String.format("Camera Started, %dx%d@%d, Face Detected", width, height, image.getLiveFPS()) : String.format("Camera Started, %dx%d@%d", width, height, image.getLiveFPS());
image.setCameraStatus(ss);
//String qr = prefixfile.getText();
//if (ValidString(qr) && hasface) audioPlayer.PlayFile(audio_posisikan_muka, ps);
});
}
@Override
public void onLog(String log) {
String ss = String.format("[%s] : %s", title, log);
Logger.info(ss);
}
};
Platform.runLater(()-> image.setCameraStatus("Camera Starting"));
if (image.StartLiveView(lce, title, use_qr_detector, use_face_detector)){
//TODO Start Live View berhasil, apa lagi yang mau dikerjakan ?
} else Platform.runLater(()->image.setCameraStatus("Unable to Set Grabber"));
} else Platform.runLater(()->image.setCameraStatus("Camera not found, please check setting"));
}
}
private void LoadCameraDetail(AnchorPane cam, int camid, CameraConfigEnum cc){
try{
FXMLLoader loader = new FXMLLoader(getClass().getResource("cameradetail.fxml"));
AnchorPane child = loader.load();
AnchorPane.setTopAnchor(child, 0.0);
AnchorPane.setBottomAnchor(child, 0.0);
AnchorPane.setLeftAnchor(child, 0.0);
AnchorPane.setRightAnchor(child, 0.0);
cam.getChildren().clear();
cam.getChildren().add(child);
switch(camid){
case 1:
image1 = loader.getController();
image1.setCameraConfigEnum(cc);
break;
case 2:
image2 = loader.getController();
image2.setCameraConfigEnum(cc);
break;
case 3:
image3 = loader.getController();
image3.setCameraConfigEnum(cc);
break;
case 4:
image4 = loader.getController();
image4.setCameraConfigEnum(cc);
break;
case 5:
image5 = loader.getController();
image5.setCameraConfigEnum(cc);
break;
}
} catch (Exception e){
Logger.error("Error LoadCameraDetail: " + e.getMessage());
}
}
private void UploadToFTP(String[] files){
final double uploadprogressheight = 50;
Map<String, UploadProgress> progressmap = new HashMap<>();
for (String filetoupload : files){
Task<AnchorPane> loadtask = new Task<>() {
@Override
protected AnchorPane call() throws Exception {
FXMLLoader loader = new FXMLLoader(getClass().getResource("uploadprogress.fxml"));
AnchorPane pane = loader.load();
pane.prefWidthProperty().bind(progressanchor.widthProperty());
pane.setPrefHeight(uploadprogressheight);
UploadProgress up = loader.getController();
up.SetFile(filetoupload);
up.SetStatus("Initialized");
up.SetProgress(0,0);
int ii = progressmap.size();
AnchorPane.setTopAnchor(pane, (ii*uploadprogressheight)+10);
progressmap.put(GetFileName(filetoupload), up);
return pane;
}
};
loadtask.setOnSucceeded(e-> progressanchor.getChildren().add(loadtask.getValue()));
loadtask.setOnFailed(e-> Logger.error("Error LoadTask: {}",e.getSource().getMessage()));
new Thread(loadtask).start();
}
FTPUpload ftp = new FTPUpload(config.getFTPHost(), toInt(config.getFTPPort()), config.getFTPUser(), config.getFTPPass(), config.getFTPPath());
ftp.UploadFile(new FTPUploadEvent() {
@Override
public void onUploadSuccess(String file) {
Logger.info("Upload Success: {}" ,file);
UploadProgress up = progressmap.get(GetFileName(file));
if (up!=null){
Platform.runLater(()->{
up.SetStatus("Success");
up.SetProgress(1,1);
});
}
}
@Override
public void onUploadFailed(String file) {
Logger.info("Upload Failed: {}",file);
UploadProgress up = progressmap.get(GetFileName(file));
if (up!=null){
Platform.runLater(()->{
up.SetStatus("Failed");
up.SetProgress(0,1);
});
}
}
@Override
public void onUploadProgress(String file, long bytes, long total) {
UploadProgress up = progressmap.get(GetFileName(file));
if (up!=null){
Platform.runLater(()->up.SetProgress(bytes, total));
}
}
@Override
public void onUploadStarted(String file) {
Logger.info("Upload Started: {}",file);
UploadProgress up = progressmap.get(GetFileName(file));
if (up!=null){
Platform.runLater(()->{
up.SetStatus("Started");
up.SetProgress(0,0);
});
}
}
@Override
public void uploadLog(String msg) {
Logger.info("Upload Log: {}",msg);
}
@Override
public void onUploadFinished(int total, int success, int failed, String[] files) {
Logger.info("Upload Finished, Total: {}, Success: {}, Failed: {}", total, success, failed);
Platform.runLater(()->{
audioPlayer.PlayFile(audio_ke_ruangtunggu, ps);
Alert Alert = new Alert(AlertType.INFORMATION);
Alert.setTitle("Upload Finished");
Alert.setHeaderText("Upload Finished");
Alert.setContentText("Total: "+total+"\nSuccess: "+success+"\nFailed: "+failed);
Alert.showAndWait();
});
}
}, files);
}
// private void Load_UploadProgress(Map<String, UploadProgress> progressmap, String filename, double uploadprogressheight){
// try{
// FXMLLoader loader = new FXMLLoader(getClass().getResource("uploadprogress.fxml"));
// AnchorPane pane = loader.load();
// pane.prefWidthProperty().bind(progressanchor.widthProperty());
// pane.setPrefHeight(uploadprogressheight);
// UploadProgress up = loader.getController();
// up.SetFile(filename);
// up.SetStatus("Started");
// up.SetProgress(0,0);
// int ii = progressmap.size();
// AnchorPane.setTopAnchor(pane, (ii*uploadprogressheight)+10);
// progressanchor.getChildren().add(pane);
// progressmap.put(filename, up);
//
// } catch (Exception e){
// Logger.error("Error loading uploadprogress.fxml: "+e.getMessage());
// }
// }
private void InsertSQL(PhotoReviewClass prc){
Sqlite sql = new Sqlite();
sql.Insert(prc);
}
}

View File

@@ -0,0 +1,143 @@
package id.co.gtc.erhacam;
import Database.TinyLogRow;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.scene.control.*;
import javafx.scene.control.cell.PropertyValueFactory;
import lombok.val;
import org.tinylog.Logger;
import java.io.IOException;
import java.nio.file.Files;
import java.util.concurrent.atomic.AtomicInteger;
import static Config.SomeCodes.GetLogsPath;
public class LogsView {
@FXML
private ComboBox<String> datePicker;
@FXML
private TextField searchField;
@FXML
private TableView<TinyLogRow> logTable;
@FXML
private Button PopulateButton;
private ObservableList<TinyLogRow> tablerows ;
@FXML
private void initialize(){
tablerows = FXCollections.observableArrayList();
logTable.setItems(tablerows);
initialize_tableview();
datePicker.onActionProperty().set(e->{
String selected = datePicker.getValue();
tablerows.clear();
try {
AtomicInteger index = new AtomicInteger(1);
Files.lines(GetLogsPath().resolve(selected+".log"))
.forEachOrdered(l->{
TinyLogRow row = TinyLogRow.Regex(l);
if (row != null && row.HaveContent()) {
row.setIndex(index.getAndIncrement());
tablerows.add(row);
}
});
} catch (IOException ex) {
Logger.error("datePicker error: {}", ex.getMessage());
}
});
PopulateButton.onActionProperty().set(e->{
datePicker.getItems().clear();
try {
Files.list(GetLogsPath())
.filter(p -> p.toFile().isFile())
.filter(p -> p.getFileName().toString().endsWith(".log"))
.forEachOrdered(p->{
String ss = p.getFileName().toString();
datePicker.getItems().add(ss.substring(0, ss.lastIndexOf('.')));
});
} catch (IOException ex) {
Logger.error("PopulateButton error: {}", ex.getMessage());
}
});
searchField.onKeyTypedProperty().set(e->{
String search = searchField.getText().toLowerCase();
if (search.length() > 0){
ObservableList<TinyLogRow> filtered = FXCollections.observableArrayList();
tablerows.forEach(r->{
if (r.getMessage().toLowerCase().contains(search)){
filtered.add(r);
}
});
logTable.setItems(filtered);
} else {
logTable.setItems(tablerows);
}
});
Platform.runLater(()->{
PopulateButton.fire(); // trigger PopulateButton on show
});
}
private void initialize_tableview(){
logTable.getColumns().clear();
TableColumn<TinyLogRow,Integer> indexCol = new TableColumn<>("No");
indexCol.setCellValueFactory(new PropertyValueFactory<>("index"));
TableColumn<TinyLogRow,String> datetimeCol = new TableColumn<>("DateTime");
datetimeCol.setCellValueFactory(new PropertyValueFactory<>("DateTime"));
TableColumn<TinyLogRow,String> categoryCol = new TableColumn<>("Category");
categoryCol.setCellValueFactory(new PropertyValueFactory<>("Category"));
TableColumn<TinyLogRow,String> messageCol = new TableColumn<>("Message");
messageCol.setCellValueFactory(new PropertyValueFactory<>("Message"));
logTable.getColumns().add(indexCol);
logTable.getColumns().add(datetimeCol);
logTable.getColumns().add(categoryCol);
logTable.getColumns().add(messageCol);
logTable.widthProperty().addListener((obs, oldVal, newVal)->{
double width = (double)newVal;
if (width > (75+150+75+100)){
// cukup besar, pake patokan
indexCol.setPrefWidth(75);
datetimeCol.setPrefWidth(150);
categoryCol.setPrefWidth(75);
messageCol.setPrefWidth(width-300);
} else {
// kecil, pake persen
indexCol.setPrefWidth(width*0.075);
datetimeCol.setPrefWidth(width*0.15);
categoryCol.setPrefWidth(width*0.075);
messageCol.setPrefWidth(width*0.7);
}
});
}
public void Unload(){
}
}

View File

@@ -0,0 +1,45 @@
package id.co.gtc.erhacam;
import Config.SomeCodes;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Rectangle2D;
import javafx.scene.Scene;
import javafx.stage.Screen;
import javafx.stage.Stage;
import org.tinylog.Logger;
import java.io.IOException;
import static Config.SomeCodes.config;
public class MainApplication extends Application {
@Override
public void start(Stage stage) throws IOException {
FXMLLoader fxmlLoader = new FXMLLoader(MainApplication.class.getResource("main-view.fxml"));
Screen screen = Screen.getPrimary();
Rectangle2D screenbound = screen.getVisualBounds();
Scene scene = new Scene(fxmlLoader.load(), screenbound.getWidth(), screenbound.getHeight());
stage.setTitle("MultiCam Capture App for ERHA");
stage.setScene(scene);
stage.setResizable(false);
stage.setMaximized(true);
stage.setOnCloseRequest(e->{
config.Save();
MainView mainView = fxmlLoader.getController();
mainView.Unload();
Logger.info("Application closed");
});
SomeCodes.LoadQRReader();
SomeCodes.LoadFaceDetector();
stage.show();
Logger.info("Application started");
}
public static void main(String[] args) {
SomeCodes.ExtractResource("/tinylog.properties");
launch();
}
}

View File

@@ -0,0 +1,99 @@
package id.co.gtc.erhacam;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.control.Button;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.Pane;
import org.tinylog.Logger;
import static Config.SomeCodes.ValidString;
@SuppressWarnings("unused")
public class MainView {
private String currentselected = "";
@FXML
private Pane mainpane;
@FXML
private Button ReviewButton;
@FXML
private Button CaptureButton;
@FXML
private Button SettingButton;
@FXML
private Button LogsButton;
private Object currentcontroller;
@FXML
private void ReviewClick(ActionEvent event){
if (currentselected.equals("review-view.fxml")) return;
loadContent("review-view.fxml");
}
@FXML
private void CaptureClick(ActionEvent event){
if (currentselected.equals("capture-view.fxml")) return;
loadContent("capture-view.fxml");
}
@FXML
private void SettingClick(ActionEvent event){
if (currentselected.equals("setting-view.fxml")) return;
loadContent("setting-view.fxml");
}
@FXML
private void LogsClick(ActionEvent event){
if (currentselected.equals("logs-view.fxml")) return;
loadContent("logs-view.fxml");
}
@FXML
private void initialize(){
ReviewClick(null);
}
public void Unload(){
loadContent("");
}
private void loadContent(String fxmlfile){
if (currentcontroller!=null){
switch (currentcontroller) {
case CaptureView captureView -> captureView.Unload();
case SettingView settingView -> settingView.Unload();
case ReviewView reviewView -> reviewView.Unload();
case LogsView logsView -> logsView.Unload();
default -> {
}
}
}
if (ValidString(fxmlfile)){
try {
FXMLLoader loader = new FXMLLoader(getClass().getResource(fxmlfile));
AnchorPane child = loader.load();
AnchorPane.setTopAnchor(child, 0.0);
AnchorPane.setRightAnchor(child, 0.0);
AnchorPane.setLeftAnchor(child, 0.0);
AnchorPane.setBottomAnchor(child, 0.0);
mainpane.getChildren().clear();
mainpane.getChildren().add(child);
currentselected = fxmlfile;
currentcontroller = loader.getController();
} catch (Exception e) {
Logger.error("Unable to load " + fxmlfile + ", exception : " + e.getMessage());
}
} else Logger.info("Not loading empty fxml file");
}
}

View File

@@ -0,0 +1,91 @@
package id.co.gtc.erhacam;
import Config.SomeCodes;
import javafx.fxml.FXML;
import javafx.scene.control.Alert;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.HBox;
import org.bytedeco.opencv.global.opencv_imgcodecs;
import org.bytedeco.opencv.opencv_core.Mat;
import org.tinylog.Logger;
import java.io.File;
import java.nio.file.Path;
import static Config.SomeCodes.config;
public class PhotoRow {
@FXML
private Label datetime;
@FXML
private Label prefix;
@FXML
private HBox photos;
private final String borderstyle = "-fx-border-color: black; -fx-border-width: 1px;";
public void setDatetime(String datetime){
this.datetime.setText(datetime);
this.datetime.setStyle(borderstyle);
}
public void setPrefix(String prefix){
this.prefix.setText(prefix);
this.prefix.setStyle(borderstyle);
}
public void setPhotos(int width, int height, String... thumbnails){
photos.setSpacing(10);
for(String photopath : thumbnails){
ImageView imgview = createImageView(loadImage(photopath), width, height);
if (imgview!=null){
photos.getChildren().add(imgview);
//HBox.setMargin(imgview, new Insets(5, 5, 5, 5));
imgview.setStyle(borderstyle);
imgview.setOnMouseClicked(e->{
if (e.getClickCount()>=2){
File ff = new File(photopath);
String hires = Path.of(config.getPhotoDirectory(), ff.getName()).toString();
File hiresfile = new File(hires);
if (hiresfile.isFile()){
SomeCodes.OpenPictureInDefaultViewer(hires);
} else {
Alert alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("Error");
alert.setHeaderText("File not found");
alert.setContentText("File not found: "+hires);
alert.showAndWait();
}
e.consume();
}
});
}
}
}
private ImageView createImageView(Image img, int width, int height){
if (img!=null){
ImageView imgview = new ImageView(img);
imgview.prefWidth(width);
imgview.prefHeight(height);
imgview.setFitHeight(height);
imgview.setFitWidth(width);
imgview.setPreserveRatio(true);
return imgview;
}
return null;
}
private Image loadImage(String photopath){
try{
Mat mat = opencv_imgcodecs.imread(photopath);
return SomeCodes.ConvertToImage(mat, 640,480);
} catch (Exception e){
Logger.error("Error loading image: " + photopath + ", Msg : " + e.getMessage());
}
return null;
}
}

View File

@@ -0,0 +1,83 @@
package id.co.gtc.erhacam;
import Database.PhotoReviewClass;
import Database.Sqlite;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.layout.AnchorPane;
import org.tinylog.Logger;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class ReviewView {
@FXML
private AnchorPane mainpane;
@FXML
private AnchorPane reviewpane;
@FXML
public void initialize(){
Platform.runLater(()->{
reviewpane.prefWidthProperty().bind(mainpane.widthProperty());
int height = 120;
double factor = 4.0/3.0;
Sqlite sql = new Sqlite();
PhotoReviewClass[] prcs = sql.GetAll();
if (prcs!=null){
ExecutorService executor = Executors.newSingleThreadExecutor();
for(int ii=0;ii<prcs.length;ii++){
PhotoReviewClass prc = prcs[ii];
Thumbloader tl = new Thumbloader(prc,ii,height,factor);
tl.setOnSucceeded(e->{
AnchorPane row = tl.getValue();
if (row!=null) reviewpane.getChildren().add(row);
});
tl.setOnFailed(e-> Logger.error("Thumbloader for "+prc.getPrefix()+" failed, error: "+e.getSource().getException().getMessage()));
executor.submit(tl);
}
executor.shutdown();
}
});
}
// somehow this code is not working, it's not showing the thumbnails
private static class Thumbloader extends Task<AnchorPane> {
private final PhotoReviewClass prc;
private final int height;
private final double factor;
private final int ii;
public Thumbloader(PhotoReviewClass prc, int ii, int height, double factor){
this.prc = prc;
this.height = height;
this.factor = factor;
this.ii = ii;
}
@Override
protected AnchorPane call() {
try{
FXMLLoader loader = new FXMLLoader(getClass().getResource("PhotoRow.fxml"));
AnchorPane row = loader.load();
row.setPrefHeight(height);
PhotoRow pr = loader.getController();
AnchorPane.setTopAnchor(row, (1.0*ii*height)+5.0);
pr.setDatetime(prc.getDateTime());
pr.setPrefix(prc.getPrefix());
pr.setPhotos((int)(factor*height), height, prc.thumbnails());
return row;
} catch (Exception e) {
System.out.println("Error loading PhotoRow.fxml, error: "+e.getMessage());
return null;
}
}
}
public void Unload(){
}
}

View File

@@ -0,0 +1,215 @@
package id.co.gtc.erhacam;
import FTP.FTPCheck;
import javafx.application.Platform;
import javafx.fxml.FXML;
import javafx.scene.control.Alert;
import javafx.scene.control.ComboBox;
import javafx.scene.control.TextField;
import javafx.stage.FileChooser;
import lombok.val;
import org.bytedeco.javacv.VideoInputFrameGrabber;
import org.tinylog.Logger;
import java.io.File;
import static Config.SomeCodes.config;
public class SettingView {
@FXML
private TextField AudioPhase1;
@FXML
private TextField AudioPhase2;
@FXML
private TextField AudioPhase3;
@FXML
private TextField AudioPhase4;
@FXML
private TextField AudioPhase5;
@FXML
private ComboBox<String> CameraLeft90;
@FXML
private ComboBox<String> CameraLeft45;
@FXML
private ComboBox<String> CameraCenter;
@FXML
private ComboBox<String> CameraRight45;
@FXML
private ComboBox<String> CameraRight90;
@FXML
private TextField FTPHost;
@FXML
private TextField FTPPort;
@FXML
private TextField FTPUser;
@FXML
private TextField FTPPass;
@FXML
private TextField FTPPath;
final FileChooser jfc = new FileChooser();
String[] cameranames = null;
@FXML
public void initialize(){
FileChooser.ExtensionFilter filter = new FileChooser.ExtensionFilter("Audio File", "wav","mp3");
jfc.setSelectedExtensionFilter(filter);
jfc.setTitle("Select Audio File");
try{
cameranames = VideoInputFrameGrabber.getDeviceDescriptions();
Logger.info("Found "+cameranames.length+" Cameras");
} catch (Exception e){
Logger.error("Unable to detect Cameras, Msg : "+e.getMessage());
}
Platform.runLater(()->{
AudioPhase1.setText(config.getAudioPhase1());
AudioPhase2.setText(config.getAudioPhase2());
AudioPhase3.setText(config.getAudioPhase3());
AudioPhase4.setText(config.getAudioPhase4());
AudioPhase5.setText(config.getAudioPhase5());
CameraLeft90.getItems().clear();
CameraLeft45.getItems().clear();
CameraCenter.getItems().clear();
CameraRight45.getItems().clear();
CameraRight90.getItems().clear();
CameraLeft90.getItems().add("");
CameraLeft45.getItems().add("");
CameraCenter.getItems().add("");
CameraRight45.getItems().add("");
CameraRight90.getItems().add("");
for(String camera: cameranames){
Logger.info("adding camera : "+camera+" to camera list");
CameraLeft90.getItems().add(camera);
CameraLeft45.getItems().add(camera);
CameraCenter.getItems().add(camera);
CameraRight45.getItems().add(camera);
CameraRight90.getItems().add(camera);
}
CameraLeft90.setValue(config.getCameraLeft90());
CameraLeft45.setValue(config.getCameraLeft45());
CameraCenter.setValue(config.getCameraCenter());
CameraRight45.setValue(config.getCameraRight45());
CameraRight90.setValue(config.getCameraRight90());
FTPHost.setText(config.getFTPHost());
FTPPort.setText(config.getFTPPort());
FTPUser.setText(config.getFTPUser());
FTPPass.setText(config.getFTPPass());
FTPPath.setText(config.getFTPPath());
});
}
public void Unload(){
config.Save();
}
@FXML
private void BrowseAudioPhase1(){
File file = jfc.showOpenDialog(null);
if (file!=null){
config.SetAudioPhase1(file.getAbsolutePath());
AudioPhase1.setText(config.getAudioPhase1());
}
}
@FXML
private void BrowseAudioPhase2(){
File file = jfc.showOpenDialog(null);
if (file!=null){
config.SetAudioPhase2(file.getAbsolutePath());
AudioPhase2.setText(config.getAudioPhase2());
}
}
@FXML
private void BrowseAudioPhase3(){
File file = jfc.showOpenDialog(null);
if (file!=null){
config.SetAudioPhase3(file.getAbsolutePath());
AudioPhase3.setText(config.getAudioPhase3());
}
}
@FXML
private void BrowseAudioPhase4(){
File file = jfc.showOpenDialog(null);
if (file!=null){
config.SetAudioPhase4(file.getAbsolutePath());
AudioPhase4.setText(config.getAudioPhase4());
}
}
@FXML
private void BrowseAudioPhase5(){
File file = jfc.showOpenDialog(null);
if (file!=null){
config.SetAudioPhase5(file.getAbsolutePath());
AudioPhase5.setText(config.getAudioPhase5());
}
}
@FXML
private void ApplyCameraLeft90(){
config.SetCameraLeft90(CameraLeft90.getValue());
}
@FXML
private void ApplyCameraLeft45(){
config.SetCameraLeft45(CameraLeft45.getValue());
}
@FXML
private void ApplyCameraFront(){
config.SetCameraCenter(CameraCenter.getValue());
}
@FXML
private void ApplyCameraRight45(){
config.SetCameraRight45(CameraRight45.getValue());
}
@FXML
private void ApplyCameraRight90(){
config.SetCameraRight90(CameraRight90.getValue());
}
@FXML
private void SaveFTP(){
boolean passive = false;
FTPCheck ftp = new FTPCheck(FTPHost.getText(),Integer.parseInt(FTPPort.getText()),FTPUser.getText(),FTPPass.getText(),FTPPath.getText(), passive);
if (ftp.IsCorrect()){
config.SetFTPHost(FTPHost.getText());
config.SetFTPPort(FTPPort.getText());
config.SetFTPUser(FTPUser.getText());
config.SetFTPPass(FTPPass.getText());
config.SetFTPPath(FTPPath.getText());
val alert = new Alert(Alert.AlertType.INFORMATION);
alert.setTitle("FTP Configuration");
alert.setHeaderText("FTP Configuration Saved");
alert.setContentText("FTP Configuration Saved Successfully");
alert.showAndWait();
} else {
val alert = new Alert(Alert.AlertType.ERROR);
alert.setTitle("FTP Error");
alert.setHeaderText("FTP Configuration Error");
alert.setContentText("FTP Configuration is incorrect, please check your FTP Configuration");
alert.showAndWait();
}
}
}

View File

@@ -0,0 +1,60 @@
package id.co.gtc.erhacam;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.fxml.FXML;
import javafx.scene.control.Label;
import javafx.scene.control.ProgressBar;
import static Config.SomeCodes.ValidString;
public class UploadProgress {
@FXML
private Label labelfile;
@FXML
private Label labelstatus;
@FXML
private ProgressBar progressbar;
@FXML
public void initialize(){
}
/**
* Set the filename to be displayed
* @param filename the filename to be displayed
*/
public void SetFile(String filename){
if (ValidString(filename)){
labelfile.setText(filename);
}
}
/**
* Set the status to be displayed
* @param status the status to be displayed
*/
public void SetStatus(String status){
if (ValidString(status)){
labelstatus.setText(status);
}
}
/**
* Set the progress of the upload
* @param current the current progress in Bytes
* @param total the total progress in Bytes
*/
public void SetProgress(long current, long total){
if (total > 0){
progressbar.setProgress((double)current / (double)total);
}
}
}

View File

@@ -0,0 +1,24 @@
module id.co.gtc.erhacam {
requires javafx.controls;
requires javafx.fxml;
requires static lombok;
requires org.bytedeco.javacv;
requires com.sun.jna;
requires org.bytedeco.opencv;
requires java.desktop;
requires org.apache.commons.net;
requires com.google.zxing;
requires com.google.zxing.javase;
requires javafx.swing;
requires com.google.gson;
requires org.tinylog.api;
requires java.sql;
opens id.co.gtc.erhacam to javafx.fxml;
opens BASS to com.sun.jna;
opens Config to com.google.gson;
opens Database to javafx.base;
exports id.co.gtc.erhacam;
}

View File

@@ -0,0 +1,819 @@
package unusedcodes;
import Camera.ArducamIMX477Preset;
import Camera.CameraProperty;
import Camera.LiveCamEvent;
import Config.CameraConfigEnum;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.NotFoundException;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.common.HybridBinarizer;
import javafx.application.Platform;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.scene.control.CheckBox;
import javafx.scene.control.Label;
import javafx.scene.control.Slider;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.image.PixelFormat;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.AnchorPane;
import lombok.Getter;
import lombok.Setter;
import lombok.val;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameGrabber;
import org.bytedeco.opencv.opencv_core.Mat;
import org.bytedeco.opencv.opencv_core.RectVector;
import org.bytedeco.opencv.opencv_core.Scalar;
import org.bytedeco.opencv.opencv_core.UMat;
import org.opencv.videoio.Videoio;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.nio.ByteBuffer;
import java.nio.file.Path;
import java.time.LocalDateTime;
import java.util.concurrent.atomic.AtomicBoolean;
import static Config.SomeCodes.*;
import static org.bytedeco.opencv.global.opencv_core.CV_8UC3;
import static org.bytedeco.opencv.global.opencv_core.mean;
import static org.bytedeco.opencv.global.opencv_imgcodecs.imwrite;
import static org.bytedeco.opencv.global.opencv_imgproc.*;
@SuppressWarnings({"unused"})
public class Cameradetail_Arducam {
private final AtomicBoolean Capturing = new AtomicBoolean(false);
private final AtomicBoolean TakingPhoto = new AtomicBoolean(false);
private final AtomicBoolean IsGrabbingLiveView = new AtomicBoolean(false);
private OpenCVFrameGrabber mGrabber = null;
private int liveWidth = 640;
private int liveHeight = 480;
private int photoWidth = 640;
private int photoHeight = 480;
private LiveCamEvent event = null;
private final String photo_extension = ".jpg";
private @Getter @Setter CameraConfigEnum cameraConfigEnum = CameraConfigEnum.CameraConfigCenter;
//private CascadeClassifier faceDetector;
/**
* Get detected QR text from Live View
*/
private @Getter String qrtext = null;
@FXML
private Label cameratitle;
@FXML
private ImageView camerastream;
@FXML
private AnchorPane streamanchor;
@FXML
private Label camerastatus;
@FXML
private Slider brightnessSlider;
@FXML
private Slider contrastSlider;
@FXML
private Slider saturationSlider;
@FXML
private Slider hueSlider;
@FXML
private Slider gainSlider;
@FXML
private Slider exposureSlider;
@FXML
private CheckBox AutoExposure;
@FXML
private CheckBox AutoWhiteBalance;
@FXML
private CheckBox AutoFocus;
private void setSliderValue(Slider sld, CameraProperty prop, double value){
sld.setMin(prop.Min);
sld.setMax(prop.Max);
sld.setValue(value);
}
@FXML
public void initialize(){
camerastream.fitHeightProperty().bind(streamanchor.heightProperty());
//camerastream.fitWidthProperty().bind(streamanchor.widthProperty());
camerastream.setPreserveRatio(true);
Platform.runLater(()->{
setSliderValue(brightnessSlider, ArducamIMX477Preset.Brightness, config.getBrightness(cameraConfigEnum));
setSliderValue(contrastSlider, ArducamIMX477Preset.Contrast, config.getContrast(cameraConfigEnum));
setSliderValue(saturationSlider, ArducamIMX477Preset.Saturation, config.getSaturation(cameraConfigEnum));
setSliderValue(hueSlider, ArducamIMX477Preset.Hue, config.getHue(cameraConfigEnum));
setSliderValue(gainSlider, ArducamIMX477Preset.Gain, config.getGain(cameraConfigEnum));
setSliderValue(exposureSlider, ArducamIMX477Preset.ExposureTime, config.getExposure(cameraConfigEnum));
AutoExposure.setSelected(config.getAutoExposure(cameraConfigEnum));
AutoWhiteBalance.setSelected(config.getAutoWhiteBalance(cameraConfigEnum));
AutoFocus.setSelected(config.getAutoFocus(cameraConfigEnum));
});
AutoExposure.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoExposure(newVal);
config.setAutoExposure(cameraConfigEnum, newVal);
if (event!=null) event.onLog("AutoExposure for "+getCameraTitle()+" changed to " + newVal);
});
AutoWhiteBalance.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoWB(newVal);
config.setAutoWhiteBalance(cameraConfigEnum, newVal);
if (event!=null) event.onLog("AutoWhiteBalance for "+getCameraTitle()+" changed to "+newVal);
});
AutoFocus.selectedProperty().addListener((obs, oldVal, newVal) -> {
setAutoFocus(newVal);
config.setAutoFocus(cameraConfigEnum, newVal);
if (event!=null) event.onLog("AutoFocus for "+getCameraTitle()+" changed to "+newVal);
});
brightnessSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setBrightness(newVal.doubleValue());
config.setBrightness(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Brightness for "+getCameraTitle()+" changed to "+newVal);
});
contrastSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setContrast(newVal.doubleValue());
config.setContrast(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Contrast for "+getCameraTitle()+" changed to "+newVal);
});
saturationSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setSaturation(newVal.doubleValue());
config.setSaturation(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Saturation for "+getCameraTitle()+" changed to "+newVal);
});
hueSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setHue(newVal.doubleValue());
config.setHue(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Hue for "+getCameraTitle()+" changed to "+newVal);
});
gainSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setGain(newVal.doubleValue());
config.setGain(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Gain for "+getCameraTitle()+" changed to "+newVal);
});
exposureSlider.valueProperty().addListener((obs, oldVal, newVal) -> {
setExposure(newVal.doubleValue());
config.setExposure(cameraConfigEnum, newVal.doubleValue());
if (event!=null) event.onLog("Exposure for "+getCameraTitle()+" changed to "+newVal);
});
}
@FXML
public void resetClick(){
brightnessSlider.adjustValue(ArducamIMX477Preset.Brightness.Default);
contrastSlider.adjustValue(ArducamIMX477Preset.Contrast.Default);
saturationSlider.adjustValue(ArducamIMX477Preset.Saturation.Default);
hueSlider.adjustValue(ArducamIMX477Preset.Hue.Default);
gainSlider.adjustValue(ArducamIMX477Preset.Gain.Default);
exposureSlider.adjustValue(ArducamIMX477Preset.ExposureTime.Default);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
}
public boolean isCapturing(){
return Capturing.get();
}
/**
* Set Camera Title
* @param title Title of the Camera
*/
public void setCameraTitle(String title){
if (ValidString(title)){
if (cameratitle!=null){
cameratitle.setText(title);
}
}
}
public void setSaturation(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SATURATION, value);
}
}
public double getSaturation(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SATURATION);
}
return 0;
}
public void setHue(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_HUE, value);
}
}
public double getHue(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_HUE);
}
return 0;
}
public void setGain(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAIN, value);
}
}
public double getGain(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAIN);
}
return 0;
}
/**
* Get Camera Title
* @return Title of the Camera, or empty string if not set
*/
public String getCameraTitle(){
if (cameratitle!=null){
return cameratitle.getText();
}
return "";
}
/**
* Set Camera Status
* @param status Status of the Camera
*/
public void setCameraStatus(String status){
if (ValidString(status)){
if (camerastatus!=null){
camerastatus.setText(status);
}
}
}
/**
* Get Camera Status
* @return Status of the Camera, or empty string if not set
*/
public String getCameraStatus(){
if (camerastatus!=null){
return camerastatus.getText();
}
return "";
}
/**
* Set Camera Stream
* @param image Image to be displayed
*/
public void setCameraStream(Image image){
if (image!=null){
if (camerastream!=null){
camerastream.setImage(image);
}
}
}
/**
* Get Camera Stream
* @return Image of the Camera Stream, or null if not set
*/
public Image getCameraStream(){
if (camerastream!=null){
return camerastream.getImage();
}
return null;
}
public void setFPS(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FPS, value);
}
}
public double getFPS(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FPS);
}
return 0;
}
/**
* Set Camera Grabber and Target Width and Height
* @param grabber Camera Grabber
* @param livewidth Width used on live view
* @param liveheight Height used on live view
* @param photowidth Width used on photo capture
* @param photoheight Height used on photo capture
*/
public void SetGrabber(OpenCVFrameGrabber grabber, int livewidth, int liveheight, int photowidth, int photoheight){
if (mGrabber!=null) {
StopLiveView();
}
liveHeight = liveheight;
liveWidth = livewidth;
photoHeight = photoheight;
photoWidth = photowidth;
mGrabber = grabber;
}
//Exposure and Focus Tricks :
// https://stackoverflow.com/questions/53545945/how-to-set-camera-to-auto-exposure-with-opencv-3-4-2
// https://github.com/opencv/opencv/issues/9738
/**
* Set Auto Exposure Mode
* @param ON if true, set autoexposure on, otherwise off
*/
public void setAutoExposure(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_EXPOSURE, ON?ArducamIMX477Preset.AutoExposure.On:ArducamIMX477Preset.AutoExposure.Off);
}
}
/**
* Get Auto Exposure Mode
* @return true if autoexposure is on, otherwise off
*/
public boolean getAutoExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_EXPOSURE)==ArducamIMX477Preset.AutoExposure.On;
}
return false;
}
/**
* Set Exposure when Auto Exposure is Off
* @param value exposure value
*/
public void setExposure(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_EXPOSURE, value);
}
}
/**
* Get Exposure when Auto Exposure is Off
* @return exposure value
*/
public double getExposure(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_EXPOSURE);
}
return 0;
}
/**
* Set Auto Focus
* @param ON if true, set autofocus on, otherwise off
*/
public void setAutoFocus(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTOFOCUS, ON?ArducamIMX477Preset.AutoFocus.On:ArducamIMX477Preset.AutoFocus.Off);
}
}
/**
* Get Auto Focus
* @return true if autofocus is on, otherwise off
*/
public boolean getAutoFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTOFOCUS)==ArducamIMX477Preset.AutoFocus.On;
}
return false;
}
public void setAutoWB(boolean ON){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_AUTO_WB, ON?ArducamIMX477Preset.AutoWhiteBalance.On:ArducamIMX477Preset.AutoWhiteBalance.Off);
}
}
public boolean getAutoWB(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_AUTO_WB)==ArducamIMX477Preset.AutoWhiteBalance.On;
}
return false;
}
/**
* Set Focus when Auto Focus is Off
* @param value focus value
*/
public void setFocus(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FOCUS, value);
}
}
/**
* Get Focus when Auto Focus is Off
* @return focus value
*/
public double getFocus(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FOCUS);
}
return 0;
}
public void setBrightness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_BRIGHTNESS, value);
}
}
public double getBrightness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_BRIGHTNESS);
}
return 0;
}
public void setContrast(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_CONTRAST, value);
}
}
public double getContrast(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_CONTRAST);
}
return 0;
}
public void setFrameWidth(int width){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_WIDTH, width);
}
}
public double getFrameWidth(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_WIDTH);
}
return 0;
}
public void setFrameHeight(int height){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_FRAME_HEIGHT, height);
}
}
public double getFrameHeight(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_FRAME_HEIGHT);
}
return 0;
}
public void setSharpness(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_SHARPNESS, value);
}
}
public double getSharpness(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_SHARPNESS);
}
return 0;
}
public void setGamma(double value){
if (mGrabber!=null){
mGrabber.setOption(Videoio.CAP_PROP_GAMMA, value);
}
}
public double getGamma(){
if (mGrabber!=null){
return mGrabber.getOption(Videoio.CAP_PROP_GAMMA);
}
return 0;
}
/**
* Take Photo from Camera
* @param directory directory to save the photo, if null, will use default directory
* @param prefix filename prefix
* @return filename path of the saved photo, or null if failed
*/
@SuppressWarnings("BusyWait")
public String TakePhoto(String directory, String prefix){
if (!ValidDirectory(directory)) directory = currentDirectory;
if (mGrabber!=null){
try{
long nanos = System.nanoTime();
while(IsGrabbingLiveView.get()) Thread.sleep(10);
long delta = System.nanoTime() - nanos;
double ms = delta / 1000000.0;
if (event!=null) event.onLog("Waited IsGrabbingLiveView for "+ms+" miliseconds");
nanos = System.nanoTime();
TakingPhoto.set(true);
setFrameHeight(photoHeight);
setFrameWidth(photoWidth);
mGrabber.restart();
long delta2 = System.nanoTime() - nanos;
double ms2 = delta2 / 1000000.0;
if (event!=null) event.onLog("Set Frame Width and Height for Photo Capture for "+ms2+" miliseconds");
nanos = System.nanoTime();
setAutoWB(true);
Thread.sleep(1000);
setAutoExposure(true);
Thread.sleep(1000);
long delta3 = System.nanoTime() - nanos;
double ms3 = delta3 / 1000000.0;
if (event!=null) event.onLog("Re-enable Auto WB and Auto Exposure for "+ms3+" miliseconds");
nanos = System.nanoTime();
mGrabber.grab(); // buang frame pertama
long delta4 = System.nanoTime() - nanos;
double ms4 = delta4 / 1000000.0;
if (event!=null) event.onLog("Buang frame pertama for "+ms4+" miliseconds");
nanos = System.nanoTime();
Frame frame;
int retry = 0;
while(true){
frame = mGrabber.grab();
if (frame!=null) {
if (frame.imageHeight==photoHeight){
if (frame.imageWidth==photoWidth){
break;
}
}
}
retry++;
if (retry>5) {
if (event!=null) event.onLog("TakePhoto failed, Unable to Take Photo correctly after 5 retries");
break;
}
}
long delta5 = System.nanoTime() - nanos;
double ms5 = delta4 / 1000000.0;
if (event!=null) event.onLog("Retry count = "+retry+", Grab Frame for "+ms5+" miliseconds");
if (frame!=null && retry < 5){
if (event!=null) event.onLog("TakePhoto got frame with width: " + frame.imageWidth + " and height: " + frame.imageHeight);
val mat = matconverter.convert(frame);
String filename = Path.of(directory, makeFileName(prefix)).toString();
if (imwrite(filename, mat)){
long delta6 = System.nanoTime() - nanos;
double ms6 = delta6 / 1000000.0;
if (event!=null) event.onLog("TakePhoto success, Photo saved to " + filename + " for "+ms6+" miliseconds");
//if (event!=null) event.onLog("TakePhoto success, Photo saved to " + filename);
return filename;
} else if (event!=null) event.onLog("TakePhoto failed, Unable to Save Photo");
}
} catch (Exception e){
if (event!=null) event.onLog("TakePhoto failed, Unable to Take Photo, Error: " + e.getMessage());
} finally {
// restart camera for live view
try{
setFrameWidth(liveWidth);
setFrameHeight(liveHeight);
mGrabber.restart();
TakingPhoto.set(false);
if (event!=null) event.onLog("TakePhoto finished, Camera Restarted for Live View");
} catch (Exception e){
if (event!=null) event.onLog("TakePhoto failed, Unable to Restart Camera, Error: " + e.getMessage());
}
}
}
return null;
}
private String makeFileName(String prefix){
//make filename with prefix_POSITION_YYYY-MM-DD_HH-MM-SS
LocalDateTime ldt = LocalDateTime.now();
String timetag = ldt.getYear() + "-" + ldt.getMonthValue() + "-" + ldt.getDayOfMonth() + "_" + ldt.getHour() + "-" + ldt.getMinute() + "-" + ldt.getSecond();
return prefix+"_"
+ switch(cameratitle.getText()){
case "Camera Left 90" -> "LEFT90";
case "Camera Left 45" -> "LEFT45";
case "Camera Center" -> "CENTER";
case "Camera Right 45" -> "RIGHT45";
case "Camera Right 90" -> "RIGHT90";
default -> "UNKNOWN";
}
+ "_" + timetag + photo_extension;
}
public void StopLiveView(){
Capturing.set(false);
if (mGrabber!=null){
try{
mGrabber.stop();
Platform.runLater(()->setCameraStatus("Camera Stopped"));
} catch (Exception e){
if (event!=null) event.onLog("StopLiveView failed, Unable to Stop Camera, Error: " + e.getMessage());
}
}
TakingPhoto.set(false);
IsGrabbingLiveView.set(false);
}
public boolean StartLiveView(LiveCamEvent event, String cameratitle, final boolean use_qr , final boolean use_face) {
this.event = event;
if (mGrabber != null) {
try {
//StopLiveView();
if (use_qr && event!=null) event.onLog("QR Reader loaded");
if (use_face && event!=null) event.onLog("Face detector loaded");
setFrameHeight(liveHeight);
setFrameWidth(liveWidth);
mGrabber.start();
Capturing.set(true);
//val converter = new OpenCVFrameConverter.ToMat();
Platform.runLater(()->setCameraStatus("Camera Started, " + liveWidth + "x" + liveHeight));
if (event!=null) event.onLog("Camera started with resolution " + liveWidth + "x" + liveHeight);
AutoExposure.setSelected(true);
AutoFocus.setSelected(true);
AutoWhiteBalance.setSelected(true);
val task = new Task<Image>() {
@SuppressWarnings("BusyWait")
@Override
protected Image call() {
while (Capturing.get()) {
try {
// selama proses pengambilan foto, jangan ambil frame
while(TakingPhoto.get() && Capturing.get()){
Thread.sleep(10);
}
if (!Capturing.get()) return null;
IsGrabbingLiveView.set(true);
val frame = mGrabber.grab();
IsGrabbingLiveView.set(false);
if (frame != null) {
val mat = matconverter.convert(frame);
val umat = new UMat(); // use OpenCL
mat.copyTo(umat);
val graymat = new UMat(); // use OpenCL
cvtColor(umat,graymat, COLOR_BGR2GRAY);
if (use_qr){
String qr = DetectQRFromMat(graymat);
if (qr!=null) {
if (!qr.equals(qrtext)){
qrtext = qr;
if (event!=null) event.onLog("QR Detected: " + qr);
if (event!=null) event.onDetectedQRCode(qr);
}
}
}
if (use_face){
RectVector face = DetectFace(graymat);
if (face!=null && face.size()>0){
if (event!=null) event.onFaceDetector(true);
for(int i=0; i<face.size(); i++){
val rect = face.get(i);
rectangle(umat, rect, Scalar.GREEN);
}
} else if (event!=null) event.onFaceDetector(false);
}
umat.copyTo(mat); // balik to CPU
val rgbmat = new Mat(mat.size(), CV_8UC3);
cvtColor(mat, rgbmat, COLOR_BGR2RGB);
updateValue(matToWritableImage(rgbmat, mat.cols(), mat.rows()));
}
} catch (Exception e) {
if (event!=null) event.onLog("Unable to Grab Frame, Error: " + e.getMessage());
//if (!Capturing.get()) Platform.runLater(this::StopLiveView);
}
}
return null;
}
};
// value dari task, yaitu image, akan diupdate ke camerastream
task.valueProperty().addListener((obs, oldVal, newVal) -> {
if (newVal != null) {
setCameraStream(newVal);
}
});
// start task
new Thread(task).start();
return true;
} catch (Exception e) {
if (event!=null) event.onLog("StartLiveView failed, Unable to Start Camera, Error: " + e.getMessage());
}
} else if (event!=null) event.onLog("StartLiveView failed, grabber is null");
return false;
}
/*private void UpdateCameraStreamFromMat(Mat mat){
val rgbmat = new Mat(mat.size(), CV_8UC3);
cvtColor(mat, rgbmat, COLOR_BGR2RGB);
val updated = matToWritableImage(rgbmat, mat.cols(), mat.rows());
Platform.runLater(()->setCameraStream(updated));
}*/
/**
* Detect QR Code from Mat
* @param graymat Mat in Gray Scale
* @return QR Code Text, or null if not detected
*/
private String DetectQRFromMat(UMat graymat){
if (qrreader!=null){
Mat mat = new Mat();
graymat.copyTo(mat); // back to CPU, because zxing only accept BufferedImage
val bufferedImage = matToBufferedImage(mat);
val title = cameratitle.getText();
val binaryBitmap = new BinaryBitmap(new HybridBinarizer(new BufferedImageLuminanceSource(bufferedImage)));
try{
Result result = qrreader.decode(binaryBitmap);
if (result!=null){
return result.getText();
}
} catch (NotFoundException ignored) {
}
}
return null;
}
/**
* Detect Face from Mat
* @param graymat Mat in Gray Scale
* @return true if face detected, otherwise false
*/
private RectVector DetectFace(UMat graymat){
if (faceDetector!=null){
val face = new RectVector();
faceDetector.detectMultiScale(graymat, face);
return face;
}
return null;
}
private double getBrightnessFromGrayMat(Mat graymat){
Scalar mean = mean(graymat);
return mean.get(0);
}
private WritableImage matToWritableImage(Mat mat, int cols, int rows){
WritableImage writableImage = new WritableImage(cols, rows);
ByteBuffer buffer = mat.createBuffer();
PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance();
writableImage.getPixelWriter().setPixels(0, 0, mat.cols(), mat.rows(), pixelFormat, buffer, mat.cols() * 3);
return writableImage;
}
private BufferedImage matToBufferedImage(Mat mat){
int type = BufferedImage.TYPE_BYTE_GRAY;
if (mat.channels() > 1) {
type = BufferedImage.TYPE_3BYTE_BGR;
}
BufferedImage image = new BufferedImage(mat.cols(), mat.rows(), type);
byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
mat.data().get(data);
return image;
}
}

View File

@@ -0,0 +1,632 @@
package unusedcodes;
import Camera.ArducamIMX477Preset;
import Config.CameraConfig;
import Config.CameraConfigEnum;
import lombok.Getter;
import lombok.val;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.nio.file.Path;
import java.util.Properties;
import static Config.SomeCodes.*;
@SuppressWarnings("unused")
@Getter
public class ConfigFile_Arducam {
private String AudioPhase1;
private String AudioPhase2;
private String AudioPhase3;
private String AudioPhase4;
private String AudioPhase5;
private String CameraLeft90;
private String CameraLeft45;
private String CameraCenter;
private String CameraRight45;
private String CameraRight90;
private CameraConfig ConfigLeft90 = new CameraConfig();
private CameraConfig ConfigLeft45 = new CameraConfig();
private CameraConfig ConfigCenter = new CameraConfig();
private CameraConfig ConfigRight45 =new CameraConfig();
private CameraConfig ConfigRight90 = new CameraConfig();
private String FTPHost;
private String FTPPort;
private String FTPUser;
private String FTPPass;
private String FTPPath;
private String PhotoDirectory;
private boolean needsave = false;
public ConfigFile_Arducam(){
System.out.println("Current working directory in Java : " + currentDirectory);
Load();
}
public void SetPhotoDirectory(String path){
if (ValidString(path)){
if (!path.equals(PhotoDirectory)){
File ff = new File(path);
if (ff.isDirectory()){
PhotoDirectory = path;
needsave = true;
}
}
}
}
public void SetAudioPhase1(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase1)){
AudioPhase1 = path;
needsave = true;
}
}
}
public void SetAudioPhase2(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase2)){
AudioPhase2 = path;
needsave = true;
}
}
}
public void SetAudioPhase3(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase3)){
AudioPhase3 = path;
needsave = true;
}
}
}
public void SetAudioPhase4(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase4)){
AudioPhase4 = path;
needsave = true;
}
}
}
public void SetAudioPhase5(String path){
if (ValidString(path)){
if (!path.equals(AudioPhase5)){
AudioPhase5 = path;
needsave = true;
}
}
}
/**
* Set Camera Path for Left 90 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraLeft90(String path){
if (path==null) path="";
if (!path.equals(CameraLeft90)){
CameraLeft90 = path;
needsave = true;
}
}
/**
* Set Camera Path for Left 45 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraLeft45(String path){
if (path==null) path="";
if (!path.equals(CameraLeft45)){
CameraLeft45 = path;
needsave = true;
}
}
/**
* Set Camera Path for Center
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraCenter(String path){
if (path==null) path="";
if (!path.equals(CameraCenter)){
CameraCenter = path;
needsave = true;
}
}
/**
* Set Camera Path for Right 45 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraRight45(String path){
if (path==null) path="";
if (!path.equals(CameraRight45)){
CameraRight45 = path;
needsave = true;
}
}
/**
* Set Camera Path for Right 90 Degree
* @param path Path to Camera, or empty string to disable
*/
public void SetCameraRight90(String path){
if (path==null) path="";
if (!path.equals(CameraRight90)){
CameraRight90 = path;
needsave = true;
}
}
public void SetFTPHost(String host){
if (ValidString(host)){
if (!host.equals(FTPHost)){
FTPHost = host;
needsave = true;
}
}
}
public void SetFTPPort(String port){
if (ValidString(port)){
if (!port.equals(FTPPort)){
if (ValidPortNumber(toInt(port))){
FTPPort = port;
needsave = true;
}
}
}
}
public void SetFTPUser(String user){
if (ValidString(user)){
if (!user.equals(FTPUser)){
FTPUser = user;
needsave = true;
}
}
}
public void SetFTPPass(String pass){
if (ValidString(pass)){
if (!pass.equals(FTPPass)){
FTPPass = pass;
needsave = true;
}
}
}
public void SetFTPPath(String path){
if (ValidString(path)){
if (!path.equals(FTPPath)){
FTPPath = path;
needsave = true;
}
}
}
public void setBrightness(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Brightness != value){
conf.Brightness = value;
needsave = true;
}
}
public double getBrightness(CameraConfigEnum cc){
return switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90.Brightness;
case CameraConfigLeft45 -> ConfigLeft45.Brightness;
case CameraConfigCenter -> ConfigCenter.Brightness;
case CameraConfigRight45 -> ConfigRight45.Brightness;
case CameraConfigRight90 -> ConfigRight90.Brightness;
};
}
public void setContrast(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Contrast != value){
conf.Contrast = value;
needsave = true;
}
}
public double getContrast(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Contrast;
case CameraConfigLeft45 -> ConfigLeft45.Contrast;
case CameraConfigCenter -> ConfigCenter.Contrast;
case CameraConfigRight45 -> ConfigRight45.Contrast;
case CameraConfigRight90 -> ConfigRight90.Contrast;
};
}
public void setSaturation(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Saturation != value){
conf.Saturation = value;
needsave = true;
}
}
public double getSaturation(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Saturation;
case CameraConfigLeft45 -> ConfigLeft45.Saturation;
case CameraConfigCenter -> ConfigCenter.Saturation;
case CameraConfigRight45 -> ConfigRight45.Saturation;
case CameraConfigRight90 -> ConfigRight90.Saturation;
};
}
public void setHue(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Hue != value){
conf.Hue = value;
needsave = true;
}
}
public double getHue(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Hue;
case CameraConfigLeft45 -> ConfigLeft45.Hue;
case CameraConfigCenter -> ConfigCenter.Hue;
case CameraConfigRight45 -> ConfigRight45.Hue;
case CameraConfigRight90 -> ConfigRight90.Hue;
};
}
public void setGain(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Gain != value){
conf.Gain = value;
needsave = true;
}
}
public double getGain(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Gain;
case CameraConfigLeft45 -> ConfigLeft45.Gain;
case CameraConfigCenter -> ConfigCenter.Gain;
case CameraConfigRight45 -> ConfigRight45.Gain;
case CameraConfigRight90 -> ConfigRight90.Gain;
};
}
public void setExposure(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Exposure != value){
conf.Exposure = value;
needsave = true;
}
}
public double getExposure(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Exposure;
case CameraConfigLeft45 -> ConfigLeft45.Exposure;
case CameraConfigCenter -> ConfigCenter.Exposure;
case CameraConfigRight45 -> ConfigRight45.Exposure;
case CameraConfigRight90 -> ConfigRight90.Exposure;
};
}
public void setSharpness(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Sharpness != value){
conf.Sharpness = value;
needsave = true;
}
}
public double getSharpness(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Sharpness;
case CameraConfigLeft45 -> ConfigLeft45.Sharpness;
case CameraConfigCenter -> ConfigCenter.Sharpness;
case CameraConfigRight45 -> ConfigRight45.Sharpness;
case CameraConfigRight90 -> ConfigRight90.Sharpness;
};
}
public void setGamma(CameraConfigEnum cc, double value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.Gamma != value){
conf.Gamma = value;
needsave = true;
}
}
public double getGamma(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.Gamma;
case CameraConfigLeft45 -> ConfigLeft45.Gamma;
case CameraConfigCenter -> ConfigCenter.Gamma;
case CameraConfigRight45 -> ConfigRight45.Gamma;
case CameraConfigRight90 -> ConfigRight90.Gamma;
};
}
public void setAutoExposure(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoExposure != value){
conf.AutoExposure = value;
needsave = true;
}
}
public boolean getAutoExposure(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoExposure;
case CameraConfigLeft45 -> ConfigLeft45.AutoExposure;
case CameraConfigCenter -> ConfigCenter.AutoExposure;
case CameraConfigRight45 -> ConfigRight45.AutoExposure;
case CameraConfigRight90 -> ConfigRight90.AutoExposure;
};
}
public void setAutoFocus(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoFocus != value){
conf.AutoFocus = value;
needsave = true;
}
}
public boolean getAutoFocus(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoFocus;
case CameraConfigLeft45 -> ConfigLeft45.AutoFocus;
case CameraConfigCenter -> ConfigCenter.AutoFocus;
case CameraConfigRight45 -> ConfigRight45.AutoFocus;
case CameraConfigRight90 -> ConfigRight90.AutoFocus;
};
}
public void setAutoWhiteBalance(CameraConfigEnum cc, boolean value){
CameraConfig conf = switch (cc) {
case CameraConfigLeft90 -> ConfigLeft90;
case CameraConfigLeft45 -> ConfigLeft45;
case CameraConfigCenter -> ConfigCenter;
case CameraConfigRight45 -> ConfigRight45;
case CameraConfigRight90 -> ConfigRight90;
};
if (conf.AutoWhiteBalance != value){
conf.AutoWhiteBalance = value;
needsave = true;
}
}
public boolean getAutoWhiteBalance(CameraConfigEnum cc){
return switch (cc){
case CameraConfigLeft90 -> ConfigLeft90.AutoWhiteBalance;
case CameraConfigLeft45 -> ConfigLeft45.AutoWhiteBalance;
case CameraConfigCenter -> ConfigCenter.AutoWhiteBalance;
case CameraConfigRight45 -> ConfigRight45.AutoWhiteBalance;
case CameraConfigRight90 -> ConfigRight90.AutoWhiteBalance;
};
}
private void Load(){
File ff = Path.of(currentDirectory, "config.properties").toFile();
if (ff.isFile()){
System.out.println("Load config file at "+ff.getPath());
try{
Properties prop = new Properties();
val FIS = new FileInputStream(ff.getPath());
prop.load(FIS);
boolean allcorrect = true;
if (prop.getProperty("AudioPhase1") == null) allcorrect = false;
if (prop.getProperty("AudioPhase2") == null) allcorrect = false;
if (prop.getProperty("AudioPhase3") == null) allcorrect = false;
if (prop.getProperty("AudioPhase4") == null) allcorrect = false;
if (prop.getProperty("AudioPhase5") == null) allcorrect = false;
if (prop.getProperty("CameraLeft90") == null) allcorrect = false;
if (prop.getProperty("CameraLeft45") == null) allcorrect = false;
if (prop.getProperty("CameraCenter") == null) allcorrect = false;
if (prop.getProperty("CameraRight45") == null) allcorrect = false;
if (prop.getProperty("CameraRight90") == null) allcorrect = false;
if (prop.getProperty("FTPHost") == null) allcorrect = false;
if (prop.getProperty("FTPPort") == null) allcorrect = false;
if (prop.getProperty("FTPUser") == null) allcorrect = false;
if (prop.getProperty("FTPPass") == null) allcorrect = false;
if (prop.getProperty("FTPPath") == null) allcorrect = false;
if (prop.getProperty("PhotoDirectory") == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigLeft90.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigLeft45.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigCenter.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigRight45.toString()) == null) allcorrect = false;
if (prop.getProperty(CameraConfigEnum.CameraConfigRight90.toString()) == null) allcorrect = false;
if (allcorrect){
AudioPhase1 = prop.getProperty("AudioPhase1");
AudioPhase2 = prop.getProperty("AudioPhase2");
AudioPhase3 = prop.getProperty("AudioPhase3");
AudioPhase4 = prop.getProperty("AudioPhase4");
AudioPhase5 = prop.getProperty("AudioPhase5");
CameraLeft90 = prop.getProperty("CameraLeft90");
CameraLeft45 = prop.getProperty("CameraLeft45");
CameraCenter = prop.getProperty("CameraCenter");
CameraRight45 = prop.getProperty("CameraRight45");
CameraRight90 = prop.getProperty("CameraRight90");
FTPHost = prop.getProperty("FTPHost");
FTPPort = prop.getProperty("FTPPort");
FTPUser = prop.getProperty("FTPUser");
FTPPass = prop.getProperty("FTPPass");
FTPPath = prop.getProperty("FTPPath");
PhotoDirectory = prop.getProperty("PhotoDirectory");
ConfigLeft90 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigLeft90.toString()), CameraConfig.class);
ConfigLeft45 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigLeft45.toString()), CameraConfig.class);
ConfigCenter = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigCenter.toString()), CameraConfig.class);
ConfigRight45 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigRight45.toString()), CameraConfig.class);
ConfigRight90 = gson.fromJson(prop.getProperty(CameraConfigEnum.CameraConfigRight90.toString()), CameraConfig.class);
System.out.println("Config Loaded");
return;
} else System.out.println("Config File Not Correct, Creating Default");
} catch (Exception e){
System.out.println("Error Load Config: " + e.getMessage()+", Creating Default");
}
} else System.out.println("Config File Not Found, Creating Default");
CreateDefault();
}
private void CreateDefault(){
AudioPhase1 = Path.of(currentDirectory, "audio", "phase1.mp3").toString();
AudioPhase2 = Path.of(currentDirectory, "audio", "phase2.mp3").toString();
AudioPhase3 = Path.of(currentDirectory, "audio", "phase3.mp3").toString();
AudioPhase4 = Path.of(currentDirectory, "audio", "phase4.mp3").toString();
AudioPhase5 = Path.of(currentDirectory, "audio", "phase5.mp3").toString();
CameraLeft90 = "";
CameraLeft45 = "";
CameraCenter = "";
CameraRight45 = "";
CameraRight90 = "";
FTPHost = "192.168.10.2";
FTPPort = "21";
FTPUser = "user";
FTPPass = "password";
FTPPath = "/";
PhotoDirectory = currentDirectory;
SetDefaultCameraConfig(ConfigLeft90);
SetDefaultCameraConfig(ConfigLeft45);
SetDefaultCameraConfig(ConfigCenter);
SetDefaultCameraConfig(ConfigRight45);
SetDefaultCameraConfig(ConfigRight90);
System.out.println("Default Config Created");
needsave = true;
Save();
}
private void SetDefaultCameraConfig(CameraConfig cc){
if (cc!=null){
cc.AutoExposure = true;
cc.AutoFocus = true;
cc.AutoWhiteBalance = true;
cc.Brightness = ArducamIMX477Preset.Brightness.Default;
cc.Contrast = ArducamIMX477Preset.Contrast.Default;
cc.Exposure = ArducamIMX477Preset.ExposureTime.Default;
cc.Gain = ArducamIMX477Preset.Gain.Default;
cc.Saturation = ArducamIMX477Preset.Saturation.Default;
cc.Hue = ArducamIMX477Preset.Hue.Default;
cc.Gamma = ArducamIMX477Preset.Gamma.Default;
cc.Sharpness = ArducamIMX477Preset.Sharpness.Default;
}
}
public void Save(){
if (!needsave) return;
needsave = false;
Properties prop = new Properties();
prop.setProperty("AudioPhase1", AudioPhase1);
prop.setProperty("AudioPhase2", AudioPhase2);
prop.setProperty("AudioPhase3", AudioPhase3);
prop.setProperty("AudioPhase4", AudioPhase4);
prop.setProperty("AudioPhase5", AudioPhase5);
prop.setProperty("CameraLeft90", CameraLeft90);
prop.setProperty("CameraLeft45", CameraLeft45);
prop.setProperty("CameraCenter", CameraCenter);
prop.setProperty("CameraRight45", CameraRight45);
prop.setProperty("CameraRight90", CameraRight90);
prop.setProperty("FTPHost", FTPHost);
prop.setProperty("FTPPort", FTPPort);
prop.setProperty("FTPUser",FTPUser);
prop.setProperty("FTPPass", FTPPass);
prop.setProperty("FTPPath", FTPPath);
prop.setProperty("PhotoDirectory", PhotoDirectory);
prop.setProperty(CameraConfigEnum.CameraConfigLeft90.toString(), gson.toJson(ConfigLeft90));
prop.setProperty(CameraConfigEnum.CameraConfigLeft45.toString(), gson.toJson(ConfigLeft45));
prop.setProperty(CameraConfigEnum.CameraConfigCenter.toString(), gson.toJson(ConfigCenter));
prop.setProperty(CameraConfigEnum.CameraConfigRight45.toString(), gson.toJson(ConfigRight45));
prop.setProperty(CameraConfigEnum.CameraConfigRight90.toString(), gson.toJson(ConfigRight90));
try{
prop.store(new FileOutputStream(Path.of(currentDirectory, "config.properties").toString()), null);
System.out.println("Config Saved");
} catch (Exception e){
System.out.println("Error Save Config: " + e.getMessage());
}
}
}