commit 05/02/2026
This commit is contained in:
1
.idea/inspectionProfiles/Project_Default.xml
generated
1
.idea/inspectionProfiles/Project_Default.xml
generated
@@ -17,5 +17,6 @@
|
||||
<option name="processComments" value="true" />
|
||||
</inspection_tool>
|
||||
<inspection_tool class="SqlDialectInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
<inspection_tool class="SqlSourceToSinkFlow" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
</profile>
|
||||
</component>
|
||||
@@ -16,16 +16,20 @@ function load_log_data(datevalue){
|
||||
data.forEach(function(item, index){
|
||||
tblog.row.add({
|
||||
index: index + 1,
|
||||
date: item.datenya,
|
||||
time: item.timenya,
|
||||
source: item.machine,
|
||||
message: item.description
|
||||
})
|
||||
date: item.date,
|
||||
time: item.time,
|
||||
source: item.source,
|
||||
description: item.description
|
||||
});
|
||||
});
|
||||
tblog.draw();
|
||||
} else {
|
||||
console.log("No log data found for date:", datevalue);
|
||||
alert("No log data found for the selected date.");
|
||||
}
|
||||
},function(error){
|
||||
console.error("Error fetching log data:", error);
|
||||
alert("Error fetching log data : " + error.message);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -37,7 +41,7 @@ $(document).ready(function () {
|
||||
{title: 'Date', data: 'date'},
|
||||
{title: 'Time', data: 'time'},
|
||||
{title: 'Source', data: 'source'},
|
||||
{title: 'Message', data: 'message'}
|
||||
{title: 'Description', data: 'description'}
|
||||
],
|
||||
pageLength: 25,
|
||||
data: [],
|
||||
|
||||
@@ -65,29 +65,7 @@
|
||||
<th class="class50 wrap" data-sortable="true">Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="w-100 table-fixed" id="tbodylog">
|
||||
<tr>
|
||||
<td>999999</td>
|
||||
<td>13-OKT-25</td>
|
||||
<td>24:10:10</td>
|
||||
<td><strong>1.Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. <br><br></td>
|
||||
<td><strong><span style="color: rgb(0, 0, 0);">2.Lorem Ipsum</span></strong><span style="color: rgb(0, 0, 0);"> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. </span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>2222</td>
|
||||
<td>13-OKT-25<br><strong>Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry.<br><br><br></td>
|
||||
<td>24:10:10</td>
|
||||
<td><strong>2.Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. </td>
|
||||
<td><strong>2.Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>33333</td>
|
||||
<td>13-OKT-25<br><strong>Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry.<br><br></td>
|
||||
<td>Text</td>
|
||||
<td><strong>3.Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. </td>
|
||||
<td><strong>3. Lorem Ipsum</strong> is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. </td>
|
||||
</tr>
|
||||
</tbody>
|
||||
<tbody class="w-100 table-fixed" id="tbodylog"></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -210,6 +210,7 @@ function get_pagingresult_files() {
|
||||
player.pause();
|
||||
window.URL.revokeObjectURL(player.src);
|
||||
}
|
||||
$icon.removeClass("fa-stop").addClass("fa-play");
|
||||
} else {
|
||||
// start playback
|
||||
play("PlayPagingResultFile", filename, () => {
|
||||
@@ -221,8 +222,9 @@ function get_pagingresult_files() {
|
||||
}
|
||||
window.lastplaybutton = $(this);
|
||||
});
|
||||
$icon.removeClass("fa-play").addClass("fa-stop");
|
||||
}
|
||||
$icon.toggleClass("fa-play fa-stop");
|
||||
//$icon.toggleClass("fa-play fa-stop");
|
||||
});
|
||||
|
||||
let $tdbutton = $("<td></td>").append($btndownload).append($btnplay).addClass("text-center");
|
||||
@@ -262,6 +264,7 @@ function get_soundbankresult_files() {
|
||||
player.pause();
|
||||
window.URL.revokeObjectURL(player.src);
|
||||
}
|
||||
$icon.removeClass("fa-stop").addClass("fa-play");
|
||||
} else {
|
||||
// start playback
|
||||
play("PlaySoundbankResultFile", filename, () => {
|
||||
@@ -273,8 +276,9 @@ function get_soundbankresult_files() {
|
||||
}
|
||||
window.lastplaybutton = $(this);
|
||||
});
|
||||
$icon.removeClass("fa-play").addClass("fa-stop");
|
||||
}
|
||||
$icon.toggleClass("fa-play fa-stop");
|
||||
//$icon.toggleClass("fa-play fa-stop");
|
||||
});
|
||||
$tr.append($tdtitle);
|
||||
$tr.append($tdbutton);
|
||||
|
||||
@@ -280,14 +280,14 @@ fun main(args: Array<String>) {
|
||||
}
|
||||
}
|
||||
|
||||
db.Add_Log("AAS"," Application started")
|
||||
|
||||
db.logDB.Add("AAS"," Application started")
|
||||
|
||||
|
||||
// shutdown hook
|
||||
Runtime.getRuntime().addShutdownHook(Thread ({
|
||||
|
||||
db.Add_Log("AAS"," Application stopping")
|
||||
db.logDB.Add("AAS"," Application stopping")
|
||||
Logger.info { "Shutdown hook called, stopping services..." }
|
||||
barixserver.StopTcpCommand()
|
||||
androidserver.StopTcpCommand()
|
||||
|
||||
@@ -583,16 +583,16 @@ class MainExtension01 {
|
||||
StreamerOutputs[ip]?.SendData(afi.bytes,
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) },
|
||||
db.logDB.Add("AAS", it) },
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) })
|
||||
db.logDB.Add("AAS", it) })
|
||||
}
|
||||
|
||||
val logmessage =
|
||||
"Broadcast started PAGING with Filename '${qp.Message}' to zones: ${qp.BroadcastZones}"
|
||||
Logger.info { logmessage }
|
||||
db.Add_Log("AAS", logmessage)
|
||||
db.logDB.Add("AAS", logmessage)
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
return true
|
||||
@@ -600,7 +600,7 @@ class MainExtension01 {
|
||||
// file tidak valid, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled paging message $qp due to invalid audio file"
|
||||
)
|
||||
@@ -611,14 +611,14 @@ class MainExtension01 {
|
||||
// ada broadcast zone yang tidak valid, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled paging message $qp due to invalid broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled paging message $qp due to invalid broadcast zone")
|
||||
Logger.error { "Cancelled paging message $qp due to invalid broadcast zone" }
|
||||
}
|
||||
} else {
|
||||
// file tidak valid, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled paging message $qp due to invalid audio file"
|
||||
)
|
||||
@@ -628,7 +628,7 @@ class MainExtension01 {
|
||||
// invalid broadcast zone, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled paging message $qp due to empty broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled paging message $qp due to empty broadcast zone")
|
||||
Logger.error { "Cancelled paging message $qp due to empty broadcast zone" }
|
||||
}
|
||||
}
|
||||
@@ -677,7 +677,7 @@ class MainExtension01 {
|
||||
listafi,
|
||||
targetfile, true,
|
||||
)
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
if (result.success) {
|
||||
// file siap broadcast
|
||||
val targetafi = audioPlayer.LoadAudioFile(targetfile)
|
||||
@@ -689,21 +689,21 @@ class MainExtension01 {
|
||||
StreamerOutputs[ip]?.SendData(targetafi.bytes,
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) },
|
||||
db.logDB.Add("AAS", it) },
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) })
|
||||
db.logDB.Add("AAS", it) })
|
||||
}
|
||||
|
||||
val logmsg =
|
||||
"Broadcast started SHALAT message with generated file '$targetfile' to zones: ${qp.BroadcastZones}"
|
||||
Logger.info { logmsg }
|
||||
db.Add_Log("AAS", logmsg)
|
||||
db.logDB.Add("AAS", logmsg)
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
return true
|
||||
} else {
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Failed to load generated Shalat WAV file $targetfile"
|
||||
)
|
||||
@@ -712,7 +712,7 @@ class MainExtension01 {
|
||||
} else{
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -721,7 +721,7 @@ class MainExtension01 {
|
||||
// tidak ada messagebank dengan ann_id ini, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled Shalat message $qp due to ANN_ID $ann_id not found in Messagebank"
|
||||
)
|
||||
@@ -733,21 +733,21 @@ class MainExtension01 {
|
||||
// invalid ann_id, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled shalat message $qp due to invalid ANN_ID")
|
||||
db.logDB.Add("AAS", "Cancelled shalat message $qp due to invalid ANN_ID")
|
||||
Logger.error { "Cancelled shalat message $qp due to invalid ANN_ID" }
|
||||
}
|
||||
} else {
|
||||
// ada broadcast zone yang tidak valid, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled shalat message $qp due to invalid broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled shalat message $qp due to invalid broadcast zone")
|
||||
Logger.error { "Cancelled shalat message $qp due to invalid broadcast zone" }
|
||||
}
|
||||
} else {
|
||||
// invalid broadcast zone, delete from queue paging
|
||||
db.queuepagingDB.DeleteByIndex(qp.index.toInt())
|
||||
db.queuepagingDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled shalat message $qp due to empty broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled shalat message $qp due to empty broadcast zone")
|
||||
Logger.error { "Cancelled shalat message $qp due to empty broadcast zone" }
|
||||
}
|
||||
}
|
||||
@@ -788,7 +788,7 @@ class MainExtension01 {
|
||||
}
|
||||
val targetfile = SoundbankResult_directory.resolve(Make_WAV_FileName("Timer","")).toString()
|
||||
val result = audioPlayer.WavWriter(listafi, targetfile, true)
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
if (result.success){
|
||||
// file siap broadcast
|
||||
val targetafi = audioPlayer.LoadAudioFile(targetfile)
|
||||
@@ -800,16 +800,16 @@ class MainExtension01 {
|
||||
StreamerOutputs[ip]?.SendData(targetafi.bytes,
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) },
|
||||
db.logDB.Add("AAS", it) },
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it) })
|
||||
db.logDB.Add("AAS", it) })
|
||||
}
|
||||
|
||||
val logmsg =
|
||||
"Broadcast started TIMER message with generated file '$targetfile' to zones: ${qa.BroadcastZones}"
|
||||
Logger.info { logmsg }
|
||||
db.Add_Log("AAS", logmsg)
|
||||
db.logDB.Add("AAS", logmsg)
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
return true
|
||||
@@ -817,12 +817,12 @@ class MainExtension01 {
|
||||
} else{
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
}
|
||||
} else {
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -830,7 +830,7 @@ class MainExtension01 {
|
||||
// tidak ada messagebank dengan ann_id ini, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled TIMER $qa due to ANN_ID $ann_id not found in Messagebank"
|
||||
)
|
||||
@@ -842,21 +842,21 @@ class MainExtension01 {
|
||||
// invalid ann_id, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled TIMER message $qa due to invalid ANN_ID")
|
||||
db.logDB.Add("AAS", "Cancelled TIMER message $qa due to invalid ANN_ID")
|
||||
Logger.error { "Cancelled TIMER message $qa due to invalid ANN_ID" }
|
||||
}
|
||||
} else {
|
||||
// ada broadcast zone yang tidak valid, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled TIMER message $qa due to invalid broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled TIMER message $qa due to invalid broadcast zone")
|
||||
Logger.error {"Cancelled TIMER message $qa due to invalid broadcast zone"}
|
||||
}
|
||||
} else {
|
||||
// invalid broadcast zone, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled TIMER message $qa due to empty broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled TIMER message $qa due to empty broadcast zone")
|
||||
Logger.error { "Cancelled TIMER message $qa due to empty broadcast zone" }
|
||||
}
|
||||
}
|
||||
@@ -922,7 +922,7 @@ class MainExtension01 {
|
||||
// not available from variables, try to get from Message column
|
||||
// ada ini, karena protokol FIS dulu tidak ada ANN_ID tapi pake Remark
|
||||
val remark = variables?.get("REMARK").orEmpty()
|
||||
db.Add_Log("AAS", "Trying to get ANN_ID from REMARK field: $remark")
|
||||
db.logDB.Add("AAS", "Trying to get ANN_ID from REMARK field: $remark")
|
||||
Logger.info{ "Trying to get ANN_ID from REMARK field: $remark" }
|
||||
when(remark){
|
||||
"GOP" -> {
|
||||
@@ -947,15 +947,15 @@ class MainExtension01 {
|
||||
}
|
||||
}
|
||||
Logger.info{"Found ANN_ID from REMARK field: $ann_id" }
|
||||
db.Add_Log("AAS", "Found ANN_ID from REMARK field: $ann_id")
|
||||
db.logDB.Add("AAS", "Found ANN_ID from REMARK field: $ann_id")
|
||||
} else {
|
||||
db.Add_Log("AAS", "Found ANN_ID from SB_TAGS variables: $ann_id")
|
||||
db.logDB.Add("AAS", "Found ANN_ID from SB_TAGS variables: $ann_id")
|
||||
Logger.info{ "Found ANN_ID from SB_TAGS variables: $ann_id" }
|
||||
}
|
||||
|
||||
// recheck again
|
||||
if (ann_id == 0) {
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled SOUNDBANK message $qa due to missing or invalid ANN_ID in SB_TAGS"
|
||||
)
|
||||
@@ -986,7 +986,7 @@ class MainExtension01 {
|
||||
|
||||
}
|
||||
} else {
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
}
|
||||
@@ -1014,25 +1014,25 @@ class MainExtension01 {
|
||||
StreamerOutputs[ip]?.SendData(targetafi.bytes,
|
||||
{
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it)
|
||||
db.logDB.Add("AAS", it)
|
||||
|
||||
}, {
|
||||
Deactivate_Relays(zz)
|
||||
db.Add_Log("AAS", it)
|
||||
db.logDB.Add("AAS", it)
|
||||
})
|
||||
}
|
||||
|
||||
val logmsg =
|
||||
"Broadcast started SOUNDBANK message with generated file '$targetfile' to zones: ${qa.BroadcastZones}"
|
||||
Logger.info { logmsg }
|
||||
db.Add_Log("AAS", logmsg)
|
||||
db.logDB.Add("AAS", logmsg)
|
||||
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", result.message)
|
||||
db.logDB.Add("AAS", result.message)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1040,7 +1040,7 @@ class MainExtension01 {
|
||||
// tidak ada messagebank dengan ann_id ini, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Cancelled SOUNDBANK message $qa due to ANN_ID $ann_id not found in Messagebank"
|
||||
)
|
||||
@@ -1053,14 +1053,14 @@ class MainExtension01 {
|
||||
// ada broadcast zone yang tidak valid, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled SOUNDBANK message $qa due to invalid broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled SOUNDBANK message $qa due to invalid broadcast zone")
|
||||
Logger.error { "Cancelled SOUNDBANK message $qa due to invalid broadcast zone" }
|
||||
}
|
||||
} else {
|
||||
// invalid broadcast zone, delete from queue table
|
||||
db.queuetableDB.DeleteByIndex(qa.index.toInt())
|
||||
db.queuetableDB.Resort()
|
||||
db.Add_Log("AAS", "Cancelled SOUNDBANK message $qa due to empty broadcast zone")
|
||||
db.logDB.Add("AAS", "Cancelled SOUNDBANK message $qa due to empty broadcast zone")
|
||||
Logger.error { "Cancelled SOUNDBANK message $qa due to empty broadcast zone" }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
package database
|
||||
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
|
||||
@Suppress("unused")
|
||||
data class Log(
|
||||
val index: ULong,
|
||||
@@ -14,9 +18,11 @@ data class Log(
|
||||
machine: String,
|
||||
description: String
|
||||
) : Log {
|
||||
val current = java.time.LocalDateTime.now()
|
||||
val date = current.toLocalDate().toString() // format YYYY-MM-DD
|
||||
val time = current.toLocalTime().withNano(0).toString() // format HH:MM:SS
|
||||
//val current = java.time.LocalDateTime.now()
|
||||
//val date = current.toLocalDate().toString() // format YYYY-MM-DD
|
||||
//val time = current.toLocalTime().withNano(0).toString() // format HH:MM:SS
|
||||
val date = LocalDate.now().format(DateTimeFormatter.ofPattern("dd/MM/yyyy"))
|
||||
val time = LocalTime.now().format(DateTimeFormatter.ofPattern("HH:mm:ss"))
|
||||
return Log(0u, date, time, machine, description)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
package database
|
||||
|
||||
data class LogSemiauto(val index: UInt, val date: String, val time: String, val source: String, val description: String){
|
||||
|
||||
import java.time.LocalDate
|
||||
import java.time.LocalTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
|
||||
data class LogSemiauto(val index: ULong, val date: String, val time: String, val source: String, val description: String){
|
||||
override fun toString(): String {
|
||||
return "$date $time [$source] $description"
|
||||
}
|
||||
companion object{
|
||||
fun NewLog(source: String, description: String): LogSemiauto {
|
||||
val date = LocalDate.now().format(DateTimeFormatter.ofPattern("dd/MM/yyyy"))
|
||||
val time = LocalTime.now().format(DateTimeFormatter.ofPattern("HH:mm:ss"))
|
||||
return LogSemiauto(0u, date, time, source, description)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package database
|
||||
|
||||
import codes.Somecodes.Companion.ValiDateForLogHtml
|
||||
import codes.Somecodes.Companion.ValidScheduleDay
|
||||
import codes.Somecodes.Companion.toJsonString
|
||||
import config
|
||||
@@ -16,6 +15,7 @@ import java.sql.DriverManager
|
||||
import java.util.function.Consumer
|
||||
import codes.configKeys
|
||||
|
||||
|
||||
/**
|
||||
* A class to manage a connection to a MariaDB database.
|
||||
*
|
||||
@@ -45,15 +45,12 @@ class MariaDB(
|
||||
lateinit var queuetableDB: dbFunctions<QueueTable>
|
||||
lateinit var queuepagingDB: dbFunctions<QueuePaging>
|
||||
lateinit var soundchannelDB: dbFunctions<SoundChannel>
|
||||
lateinit var logDB: dbFunctions<Log>
|
||||
lateinit var logDB: Table_Logs
|
||||
lateinit var userDB: dbFunctions<UserDB>
|
||||
lateinit var logSemiAuto: Table_LogSemiAuto
|
||||
|
||||
companion object {
|
||||
fun ValidDate(date: String): Boolean {
|
||||
// Check if the date is in the format DD/MM/YYYY
|
||||
val regex = Regex("""^\d{2}/\d{2}/\d{4}$""")
|
||||
return regex.matches(date)
|
||||
}
|
||||
|
||||
|
||||
fun ValidTime(time: String): Boolean {
|
||||
// Check if the time is in the format HH:MM
|
||||
@@ -167,15 +164,15 @@ class MariaDB(
|
||||
statement?.setString(6, data.Path)
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Add_Log("AAS", "Soundbank added: ${data.Description}; TAG: ${data.TAG}; Category: ${data.Category}; Language: ${data.Language}; VoiceType: ${data.VoiceType}; Path: ${data.Path}")
|
||||
logDB.Add("AAS", "Soundbank added: ${data.Description}; TAG: ${data.TAG}; Category: ${data.Category}; Language: ${data.Language}; VoiceType: ${data.VoiceType}; Path: ${data.Path}")
|
||||
Logger.info("Soundbank added: ${data.Description}" as Any)
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "No soundbank entry added for: ${data.Description}")
|
||||
logDB.Add("AAS", "No soundbank entry added for: ${data.Description}")
|
||||
Logger.warn("No soundbank entry added for: ${data.Description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed to add Soundbank: ${data.Description}. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed to add Soundbank: ${data.Description}. Error: ${e.message}")
|
||||
Logger.error("Error adding soundbank entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -200,11 +197,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk soundbank insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Sound Bank Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Sound Bank Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Sound Bank. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Sound Bank. Error: ${e.message}")
|
||||
Logger.error("Error adding soundbank entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -224,14 +221,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Soundbank updated at index $index: ${data.Description}" as Any)
|
||||
Add_Log("AAS", "Soundbank updated at index $index: ${data.Description}; TAG: ${data.TAG}; Category: ${data.Category}; Language: ${data.Language}; VoiceType: ${data.VoiceType}; Path: ${data.Path}")
|
||||
logDB.Add("AAS", "Soundbank updated at index $index: ${data.Description}; TAG: ${data.TAG}; Category: ${data.Category}; Language: ${data.Language}; VoiceType: ${data.VoiceType}; Path: ${data.Path}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Sound Bank at index $index for: ${data.Description}")
|
||||
logDB.Add("AAS", "Failed updating Sound Bank at index $index for: ${data.Description}")
|
||||
Logger.warn("No soundbank entry updated at index $index for: ${data.Description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Sound Bank at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Sound Bank at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating soundbank entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -381,14 +378,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Messagebank added: ${data.Description}" as Any)
|
||||
Add_Log("AAS", "Messagebank added: ${data.Description}; Language: ${data.Language}; ANN_ID: ${data.ANN_ID}; Voice_Type: ${data.Voice_Type}")
|
||||
logDB.Add("AAS", "Messagebank added: ${data.Description}; Language: ${data.Language}; ANN_ID: ${data.ANN_ID}; Voice_Type: ${data.Voice_Type}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed adding Message Bank for: ${data.Description}")
|
||||
logDB.Add("AAS","Failed adding Message Bank for: ${data.Description}")
|
||||
Logger.warn("No messagebank entry added for: ${data.Description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed adding Message Bank for: ${data.Description}. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed adding Message Bank for: ${data.Description}. Error: ${e.message}")
|
||||
Logger.error("Error adding messagebank entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -412,11 +409,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk messagebank insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Message Bank Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Message Bank Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Message Bank. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Message Bank. Error: ${e.message}")
|
||||
Logger.error("Error adding messagebank entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -594,14 +591,14 @@ class MariaDB(
|
||||
val rowsAffected = statement.executeUpdate()
|
||||
if (rowsAffected > 0) {
|
||||
Logger.info("Language link added: ${data.TAG} -> ${data.Language}" as Any)
|
||||
Add_Log("AAS", "Language link added: ${data.TAG} -> ${data.Language}")
|
||||
logDB.Add("AAS", "Language link added: ${data.TAG} -> ${data.Language}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed adding Language Link for: ${data.TAG} -> ${data.Language}.")
|
||||
logDB.Add("AAS","Failed adding Language Link for: ${data.TAG} -> ${data.Language}.")
|
||||
Logger.warn("No language link entry added for: ${data.TAG} -> ${data.Language}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed adding Language Link for: ${data.TAG} -> ${data.Language}. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed adding Language Link for: ${data.TAG} -> ${data.Language}. Error: ${e.message}")
|
||||
Logger.error("Error adding language link entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -622,11 +619,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk languagelinking insert successful: ${List.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Language Link Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Language Link Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Language Link. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Language Link. Error: ${e.message}")
|
||||
Logger.error("Error adding languagelinking entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -642,14 +639,15 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Language link updated at index $index: ${data.TAG} -> ${data.Language}" as Any)
|
||||
Add_Log("AAS", "Language link updated at index $index: ${data.TAG} -> ${data.Language}")
|
||||
|
||||
logDB.Add("AAS", "Language link updated at index $index: ${data.TAG} -> ${data.Language}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Language Link at index $index for: ${data.TAG} -> ${data.Language}")
|
||||
logDB.Add("AAS", "Failed updating Language Link at index $index for: ${data.TAG} -> ${data.Language}")
|
||||
Logger.warn("No language link entry updated at index $index for: ${data.TAG} -> ${data.Language}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Language Link at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Language Link at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating language link entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -800,14 +798,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Schedulebank added: ${data.Description}" as Any)
|
||||
Add_Log("AAS", "Schedulebank added: ${data.Description}; Day: ${data.Day}; Time: ${data.Time}; Soundpath: ${data.Soundpath}; Repeat: ${data.Repeat}; Enable: ${data.Enable}; BroadcastZones: ${data.BroadcastZones}; Language: ${data.Language}")
|
||||
logDB.Add("AAS", "Schedulebank added: ${data.Description}; Day: ${data.Day}; Time: ${data.Time}; Soundpath: ${data.Soundpath}; Repeat: ${data.Repeat}; Enable: ${data.Enable}; BroadcastZones: ${data.BroadcastZones}; Language: ${data.Language}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed adding Schedule Bank for: ${data.Description}.")
|
||||
logDB.Add("AAS","Failed adding Schedule Bank for: ${data.Description}.")
|
||||
Logger.warn("No schedulebank entry added for: ${data.Description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed adding Schedule Bank for: ${data.Description}. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed adding Schedule Bank for: ${data.Description}. Error: ${e.message}")
|
||||
Logger.error("Error adding schedulebank entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -837,11 +835,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk schedulebank insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Schedule Bank Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Schedule Bank Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Schedule Bank. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Schedule Bank. Error: ${e.message}")
|
||||
Logger.error("Error adding schedulebank entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -872,14 +870,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Schedulebank updated at index $index: ${data.Description}" as Any)
|
||||
Add_Log("AAS", "Schedulebank updated at index $index: ${data.Description}; Day: ${data.Day}; Time: ${data.Time}; Soundpath: ${data.Soundpath}; Repeat: ${data.Repeat}; Enable: ${data.Enable}; BroadcastZones: ${data.BroadcastZones}; Language: ${data.Language}")
|
||||
logDB.Add("AAS", "Schedulebank updated at index $index: ${data.Description}; Day: ${data.Day}; Time: ${data.Time}; Soundpath: ${data.Soundpath}; Repeat: ${data.Repeat}; Enable: ${data.Enable}; BroadcastZones: ${data.BroadcastZones}; Language: ${data.Language}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Schedule Bank at index $index for: ${data.Description}.")
|
||||
logDB.Add("AAS", "Failed updating Schedule Bank at index $index for: ${data.Description}.")
|
||||
Logger.warn("No schedulebank entry updated at index $index for: ${data.Description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Schedule Bank at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Schedule Bank at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating schedulebank entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1066,14 +1064,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Broadcast zone added: ${data.description}" as Any)
|
||||
Add_Log("AAS", "Broadcast zone added: ${data.description}; SoundChannel: ${data.SoundChannel}; id: ${data.id}; bp: ${data.bp}")
|
||||
logDB.Add("AAS", "Broadcast zone added: ${data.description}; SoundChannel: ${data.SoundChannel}; id: ${data.id}; bp: ${data.bp}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed adding Broadcast Zone for: ${data.description}.")
|
||||
logDB.Add("AAS","Failed adding Broadcast Zone for: ${data.description}.")
|
||||
Logger.warn("No broadcast zone entry added for: ${data.description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed adding Broadcast Zone for: ${data.description}. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed adding Broadcast Zone for: ${data.description}. Error: ${e.message}")
|
||||
Logger.error("Error adding broadcast zone entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1095,11 +1093,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk ${super.dbName} insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Broadcast Zones Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Broadcast Zones Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Broadcast Zones. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Broadcast Zones. Error: ${e.message}")
|
||||
Logger.error("Error adding ${super.dbName} entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1117,14 +1115,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("Broadcast zone updated at index $index: ${data.description}" as Any)
|
||||
Add_Log("AAS", "Broadcast zone updated at index $index: ${data.description}; SoundChannel: ${data.SoundChannel}; id: ${data.id}; bp: ${data.bp}")
|
||||
logDB.Add("AAS", "Broadcast zone updated at index $index: ${data.description}; SoundChannel: ${data.SoundChannel}; id: ${data.id}; bp: ${data.bp}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Broadcast Zone at index $index for: ${data.description}.")
|
||||
logDB.Add("AAS", "Failed updating Broadcast Zone at index $index for: ${data.description}.")
|
||||
Logger.warn("No broadcast zone entry updated at index $index for: ${data.description}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Broadcast Zone at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Broadcast Zone at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating broadcast zone entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1304,11 +1302,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk QueueTable insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Queue Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Queue Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Queue Table. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Queue Table. Error: ${e.message}")
|
||||
Logger.error("Error adding QueueTable entries: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1468,11 +1466,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk QueuePaging insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Queue Paging Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Queue Paging Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Queue Paging Table. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Queue Paging Table. Error: ${e.message}")
|
||||
Logger.error("Error adding QueuePaging entries: ${e.message}" as Any)
|
||||
false
|
||||
}
|
||||
@@ -1595,14 +1593,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("SoundChannel updated: ${data.channel} -> ${data.ip}" as Any)
|
||||
Add_Log("AAS", "Sound Channel updated: ${data.channel} -> ${data.ip}")
|
||||
logDB.Add("AAS", "Sound Channel updated: ${data.channel} -> ${data.ip}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Sound Channel for: ${data.channel} -> ${data.ip}.")
|
||||
logDB.Add("AAS", "Failed updating Sound Channel for: ${data.channel} -> ${data.ip}.")
|
||||
Logger.warn("No SoundChannel entry updated for: ${data.channel} -> ${data.ip}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Sound Channel for: ${data.channel} -> ${data.ip}. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Sound Channel for: ${data.channel} -> ${data.ip}. Error: ${e.message}")
|
||||
Logger.error("Error updating SoundChannel entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1621,11 +1619,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk SoundChannel update successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Sound Channels Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import Sound Channels Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Sound Channels. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import Sound Channels. Error: ${e.message}")
|
||||
Logger.error("Error updating SoundChannel entries: ${e.message}" as Any)
|
||||
false
|
||||
}
|
||||
@@ -1641,14 +1639,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("SoundChannel updated at index $index: ${data.channel} -> ${data.ip}" as Any)
|
||||
Add_Log("AAS", "Sound Channel updated at index $index: ${data.channel} -> ${data.ip}")
|
||||
logDB.Add("AAS", "Sound Channel updated at index $index: ${data.channel} -> ${data.ip}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating Sound Channel at index $index for: ${data.channel} -> ${data.ip}.")
|
||||
logDB.Add("AAS", "Failed updating Sound Channel at index $index for: ${data.channel} -> ${data.ip}.")
|
||||
Logger.warn("No Sound Channel entry updated at index $index for: ${data.channel} -> ${data.ip}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating Sound Channel at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating Sound Channel at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating SoundChannel entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1774,14 +1772,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("${super.dbName} IP cleared for index $index" as Any)
|
||||
Add_Log("AAS", "${super.dbName} IP cleared for index $index")
|
||||
logDB.Add("AAS", "${super.dbName} IP cleared for index $index")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed clearing ${super.dbName} IP for index $index.")
|
||||
logDB.Add("AAS","Failed clearing ${super.dbName} IP for index $index.")
|
||||
Logger.warn("No ${super.dbName} entry cleared for index $index" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed clearing ${super.dbName} IP for index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed clearing ${super.dbName} IP for index $index. Error: ${e.message}")
|
||||
Logger.error("Error clearing ${super.dbName} entry for index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -1789,160 +1787,10 @@ class MariaDB(
|
||||
|
||||
}
|
||||
|
||||
logDB = object : dbFunctions<Log>("logs", connection, listOf("index", "datenya", "timenya", "machine", "description")) {
|
||||
override fun Create() {
|
||||
val tabledefinition = "CREATE TABLE IF NOT EXISTS ${super.dbName} (" +
|
||||
"`index` INT AUTO_INCREMENT PRIMARY KEY," +
|
||||
"datenya VARCHAR(20) NOT NULL," + // format DD/MM/YYYY
|
||||
"timenya VARCHAR(20) NOT NULL," + // format HH:MM:SS
|
||||
"machine VARCHAR(45) NOT NULL," +
|
||||
"description TEXT NOT NULL" +
|
||||
")"
|
||||
logSemiAuto = Table_LogSemiAuto(connection)
|
||||
|
||||
super.Create(tabledefinition)
|
||||
logDB = Table_Logs(connection)
|
||||
|
||||
}
|
||||
|
||||
override fun Get(cbOK: Consumer<Unit>?, cbFail: Consumer<String>?) {
|
||||
List.clear()
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
List.add(log)
|
||||
}
|
||||
cbOK?.accept(Unit)
|
||||
} catch (e: Exception) {
|
||||
cbFail?.accept("Error fetching ${super.dbName}: ${e.message}")
|
||||
Logger.error("Error fetching ${super.dbName}: ${e.message}" as Any)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
override fun Add(data: Log): Boolean {
|
||||
try {
|
||||
val statement =
|
||||
connection.prepareStatement("INSERT INTO logs (datenya, timenya, machine, description) VALUES (?, ?, ?, ?)")
|
||||
statement?.setString(1, data.datenya)
|
||||
statement?.setString(2, data.timenya)
|
||||
statement?.setString(3, data.machine)
|
||||
statement?.setString(4, data.description)
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
return true
|
||||
} else {
|
||||
Logger.warn{"Failed to add log entry : $data"}
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Logger.error{"Error adding log entry: ${e.message}"}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
override fun AddAll(data: ArrayList<Log>): Boolean {
|
||||
return try {
|
||||
connection.autoCommit = false
|
||||
val sql = "INSERT INTO logs (datenya, timenya, machine, description) VALUES (?, ?, ?, ?)"
|
||||
val statement = connection.prepareStatement(sql)
|
||||
for (log in data) {
|
||||
statement.setString(1, log.datenya)
|
||||
statement.setString(2, log.timenya)
|
||||
statement.setString(3, log.machine)
|
||||
statement.setString(4, log.description)
|
||||
statement.addBatch()
|
||||
}
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk log insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import Log Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import Log Table. Error: ${e.message}")
|
||||
Logger.error("Error adding log entries: ${e.message}" as Any)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
override fun UpdateByIndex(index: Int, data: Log): Boolean {
|
||||
throw Exception("Update not supported")
|
||||
}
|
||||
|
||||
override fun Resort(): Boolean {
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val tempdb_name = "temp_${super.dbName}"
|
||||
// use a temporary table to reorder the index
|
||||
statement?.executeUpdate("CREATE TABLE IF NOT EXISTS $tempdb_name LIKE ${super.dbName}")
|
||||
statement?.executeUpdate("TRUNCATE TABLE $tempdb_name")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO $tempdb_name (datenya, timenya, machine, description) " +
|
||||
"SELECT datenya, timenya, machine, description FROM ${super.dbName} " +
|
||||
"ORDER BY datenya , timenya , machine "
|
||||
)
|
||||
statement?.executeUpdate("TRUNCATE TABLE ${super.dbName}")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO ${super.dbName} (datenya, timenya, machine, description) " +
|
||||
"SELECT datenya, timenya, machine, description FROM $tempdb_name"
|
||||
)
|
||||
statement?.executeUpdate("DROP TABLE $tempdb_name")
|
||||
Logger.info("${super.dbName} table resorted by datenya, timenya, machine" as Any)
|
||||
// reload the local list
|
||||
Get()
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error resorting ${super.dbName} table by datenya, timenya, machine: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
override fun Import_XLSX(workbook: XSSFWorkbook): Boolean {
|
||||
throw Exception("Importing Logs from XLSX is not supported")
|
||||
}
|
||||
|
||||
override fun Export_XLSX(): XSSFWorkbook? {
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
val workbook = XSSFWorkbook()
|
||||
val sheet = workbook.createSheet("Log")
|
||||
val headerRow = sheet.createRow(0)
|
||||
val headers = arrayOf("Index", "datenya", "timenya", "machine", "description")
|
||||
for ((colIndex, header) in headers.withIndex()) {
|
||||
val cell = headerRow.createCell(colIndex)
|
||||
cell.setCellValue(header)
|
||||
}
|
||||
var rowIndex = 1
|
||||
while (resultSet?.next() == true) {
|
||||
val row = sheet.createRow(rowIndex++)
|
||||
row.createCell(0).setCellValue(resultSet.getString("index"))
|
||||
row.createCell(1).setCellValue(resultSet.getString("datenya"))
|
||||
row.createCell(2).setCellValue(resultSet.getString("timenya"))
|
||||
row.createCell(3).setCellValue(resultSet.getString("machine"))
|
||||
row.createCell(4).setCellValue(resultSet.getString("description"))
|
||||
}
|
||||
for (i in headers.indices) {
|
||||
sheet.autoSizeColumn(i)
|
||||
}
|
||||
return workbook
|
||||
} catch (e: Exception) {
|
||||
Logger.error { "Error exporting Log, Msg: ${e.message}" }
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
userDB = object : dbFunctions<UserDB>("newuser", connection, listOf("index", "username", "password", "location", "airline_tags", "city_tags", "messagebank_ann_id", "broadcastzones")) {
|
||||
override fun Create() {
|
||||
@@ -1997,14 +1845,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("User added: ${data.username}" as Any)
|
||||
Add_Log("AAS", "User added: ${data.username}")
|
||||
logDB.Add("AAS", "User added: ${data.username}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS","Failed adding User entry for: ${data.username}.")
|
||||
logDB.Add("AAS","Failed adding User entry for: ${data.username}.")
|
||||
Logger.warn("No user entry added for: ${data.username}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed adding User entry for: ${data.username}. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed adding User entry for: ${data.username}. Error: ${e.message}")
|
||||
Logger.error("Error adding user entry: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -2028,11 +1876,11 @@ class MariaDB(
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk user insert successful: ${data.size} entries" as Any)
|
||||
Add_Log("AAS","Successfully Import User Table: ${data.size} entries.")
|
||||
logDB.Add("AAS","Successfully Import User Table: ${data.size} entries.")
|
||||
connection.autoCommit = true
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS","Failed to Import User Table. Error: ${e.message}")
|
||||
logDB.Add("AAS","Failed to Import User Table. Error: ${e.message}")
|
||||
Logger.error("Error adding user entries: ${e.message}" as Any)
|
||||
false
|
||||
}
|
||||
@@ -2052,14 +1900,14 @@ class MariaDB(
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
Logger.info("User updated at index $index: ${data.username}" as Any)
|
||||
Add_Log("AAS", "User updated at index $index: ${data.username}")
|
||||
logDB.Add("AAS", "User updated at index $index: ${data.username}")
|
||||
return true
|
||||
} else {
|
||||
Add_Log("AAS", "Failed updating User at index $index for: ${data.username}.")
|
||||
logDB.Add("AAS", "Failed updating User at index $index for: ${data.username}.")
|
||||
Logger.warn("No user entry updated at index $index for: ${data.username}" as Any)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Add_Log("AAS", "Failed updating User at index $index. Error: ${e.message}")
|
||||
logDB.Add("AAS", "Failed updating User at index $index. Error: ${e.message}")
|
||||
Logger.error("Error updating user entry at index $index: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
@@ -2166,6 +2014,7 @@ class MariaDB(
|
||||
queuetableDB.Create()
|
||||
queuepagingDB.Create()
|
||||
userDB.Create()
|
||||
logSemiAuto.Create()
|
||||
|
||||
messageDB.Get()
|
||||
soundDB.Get()
|
||||
@@ -2189,6 +2038,7 @@ class MariaDB(
|
||||
Logger.info { "User count: ${userDB.List.size}" }
|
||||
|
||||
|
||||
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Failed to connect to MariaDB: ${e.message}" as Any)
|
||||
}
|
||||
@@ -2208,121 +2058,28 @@ class MariaDB(
|
||||
connected = false
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds a new log entry to the database with the current date and time.
|
||||
* @param machine The machine name or identifier.
|
||||
* @param description The log description.
|
||||
* @return true if the log was added successfully, false otherwise.
|
||||
*/
|
||||
fun Add_Log(machine: String, description: String): Boolean {
|
||||
val current = java.time.LocalDateTime.now()
|
||||
val date = current.toLocalDate().toString() // format YYYY-MM-DD
|
||||
val time = current.toLocalTime().withNano(0).toString() // format HH:MM:SS
|
||||
val datenya = date.split("-").reversed().joinToString("/") // convert to DD/MM/YYYY
|
||||
val log = Log(0u, datenya, time, machine, description)
|
||||
return logDB.Add(log)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get All Log from database
|
||||
* @param consumer A Consumer that will receive the list of logs
|
||||
*/
|
||||
fun GetLog(consumer: Consumer<ArrayList<Log>>) {
|
||||
val logList = ArrayList<Log>()
|
||||
fun GetSemiAutoLogForHTML(date: String, consumer: Consumer<ArrayList<LogSemiauto>>) {
|
||||
val logList = ArrayList<LogSemiauto>()
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM logs")
|
||||
val statement = connection.prepareStatement("SELECT * FROM logsemiauto WHERE date = ?")
|
||||
statement?.setString(1, date)
|
||||
val resultSet = statement?.executeQuery()
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
val log = LogSemiauto(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("date"),
|
||||
resultSet.getString("time"),
|
||||
resultSet.getString("source"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
logList.add(log)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error fetching logs table: ${e.message}" as Any)
|
||||
Logger.error("Error fetching logsemiauto table for date $date: ${e.message}" as Any)
|
||||
}
|
||||
consumer.accept(logList)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Log from database by date for HTML usage
|
||||
* @param date The date to filter logs by (format: DD-MM-YYYY)
|
||||
* @param consumer A Consumer that will receive the list of logs for the specified date
|
||||
*/
|
||||
fun GetLogForHtml(date: String, consumer: Consumer<ArrayList<Log>>) {
|
||||
val logList = ArrayList<Log>()
|
||||
//println("GetLogForHtml Date: $date" )
|
||||
if (ValiDateForLogHtml(date)) {
|
||||
try {
|
||||
// must convert from DD-MM-YYYY to DD/MM/YYYY, because in database we use DD/MM/YYYY
|
||||
val adjusteddate = date.replace("-", "/")
|
||||
val statement = connection.prepareStatement("SELECT * FROM logs WHERE datenya = ?")
|
||||
statement?.setString(1, adjusteddate)
|
||||
//println("GetLogForHtml Date: $adjusteddate" )
|
||||
// println("GetLogForHtml SQL: " +statement?.toString())
|
||||
val resultSet = statement?.executeQuery()
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
logList.add(log)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error fetching logs table for date $date: ${e.message}" as Any)
|
||||
}
|
||||
}
|
||||
|
||||
consumer.accept(logList)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Log from database by date and filter for HTML usage
|
||||
* @param date The date to filter logs by (format: DD-MM-YYYY)
|
||||
* @param filter The filter string to search in description or machine
|
||||
* @param consumer A Consumer that will receive the list of logs for the specified date and filter
|
||||
*/
|
||||
fun GetLogForHtml(date: String, filter: String, consumer: Consumer<ArrayList<Log>>) {
|
||||
val logList = ArrayList<Log>()
|
||||
//println("GetLogForHtml Date: $date Filter: $filter" )
|
||||
|
||||
if (ValiDateForLogHtml(date)) {
|
||||
try {
|
||||
// must convert from DD-MM-YYYY to DD/MM/YYYY, because in database we use DD/MM/YYYY
|
||||
val adjusteddate = date.replace("-", "/")
|
||||
val statement =
|
||||
connection.prepareStatement("SELECT * FROM logs WHERE datenya = ? AND description LIKE ?")
|
||||
statement?.setString(1, adjusteddate)
|
||||
statement?.setString(2, "%$filter%")
|
||||
//println("GetLogForHtml Date: $adjusteddate , Filter=$filter" )
|
||||
//println("GetLogForHtml SQL: " +statement?.toString())
|
||||
val resultSet = statement?.executeQuery()
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
logList.add(log)
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error fetching logs for date $date with filter $filter: ${e.message}" as Any)
|
||||
}
|
||||
}
|
||||
|
||||
consumer.accept(logList)
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports the log table to an XLSX workbook for a specific date and optional filter.
|
||||
* @param logDate The date string in format "dd/MM/yyyy".
|
||||
|
||||
242
src/database/Table_LogSemiAuto.kt
Normal file
242
src/database/Table_LogSemiAuto.kt
Normal file
@@ -0,0 +1,242 @@
|
||||
package database
|
||||
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook
|
||||
import org.tinylog.Logger
|
||||
import java.sql.Connection
|
||||
import java.util.function.Consumer
|
||||
|
||||
class Table_LogSemiAuto(connection: Connection) : dbFunctions<LogSemiauto>("logsemiauto", connection, listOf("index", "date", "time", "source", "description")) {
|
||||
/**
|
||||
* dateformat1 is regex for DD/MM/YYYY
|
||||
*/
|
||||
val dateformat1 = """^(0[1-9]|[12][0-9]|3[01])/(0[1-9]|1[0-2])/\d{4}$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat2 is regex for DD-MM-YYYY
|
||||
*/
|
||||
val dateformat2 = """^(0[1-9]|[12][0-9]|3[01])-(0[1-9]|1[0-2])-\d{4}$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat3 is regex for YYYY/MM/DD
|
||||
*/
|
||||
val dateformat3 = """^\d{4}/(0[1-9]|1[0-2])/(0[1-9]|[12][0-9]|3[01])$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat4 is regex for YYYY-MM-DD
|
||||
*/
|
||||
val dateformat4 = """^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])$""".toRegex()
|
||||
|
||||
override fun Create() {
|
||||
val tabledefinition = "CREATE TABLE IF NOT EXISTS ${super.dbName} (" +
|
||||
"`index` INT AUTO_INCREMENT PRIMARY KEY," +
|
||||
"date VARCHAR(20) NOT NULL," + // format DD/MM/YYYY
|
||||
"time VARCHAR(20) NOT NULL," + // format HH:MM:SS
|
||||
"source VARCHAR(45) NOT NULL," +
|
||||
"description TEXT NOT NULL" +
|
||||
")"
|
||||
|
||||
super.Create(tabledefinition)
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
override fun Get(cbOK: Consumer<Unit>?, cbFail: Consumer<String>?) {
|
||||
List.clear()
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
while (resultSet?.next() == true) {
|
||||
val log = LogSemiauto(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("date"),
|
||||
resultSet.getString("time"),
|
||||
resultSet.getString("source"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
List.add(log)
|
||||
}
|
||||
cbOK?.accept(Unit)
|
||||
} catch (e: Exception) {
|
||||
cbFail?.accept("Error fetching ${super.dbName}: ${e.message}")
|
||||
Logger.error("Error fetching ${super.dbName}: ${e.message}" as Any)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fun Add(source: String, description: String){
|
||||
val log = LogSemiauto.NewLog(source, description)
|
||||
Add(log)
|
||||
}
|
||||
|
||||
override fun Add(data: LogSemiauto): Boolean {
|
||||
try{
|
||||
val statement = connection.prepareStatement("INSERT INTO ${super.dbName} (date, time, source, description) VALUES (?, ?, ?, ?)")
|
||||
statement?.setString(1, data.date)
|
||||
statement?.setString(2, data.time)
|
||||
statement?.setString(3, data.source)
|
||||
statement?.setString(4, data.description)
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
return true
|
||||
} else throw Exception("Failed to add logsemiauto entry: $data")
|
||||
} catch (e : Exception){
|
||||
Logger.error { "Error adding logsemiauto entry: ${e.message}" }
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
fun GetLogSemiAutoForHtml(date: String, filter: String?, cbOK: Consumer<ArrayList<LogSemiauto>>?, cbFail: Consumer<String>?){
|
||||
try{
|
||||
val valid_date : String? = when{
|
||||
dateformat1.matches(date) -> date
|
||||
dateformat2.matches(date) -> {
|
||||
val parts = date.split("-")
|
||||
"${parts[0]}/${parts[1]}/${parts[2]}"
|
||||
}
|
||||
dateformat3.matches(date) -> {
|
||||
val parts = date.split("/")
|
||||
"${parts[2]}/${parts[1]}/${parts[0]}"
|
||||
}
|
||||
dateformat4.matches(date) -> {
|
||||
val parts = date.split("-")
|
||||
"${parts[2]}/${parts[1]}/${parts[0]}"
|
||||
}
|
||||
else -> null
|
||||
}
|
||||
if (valid_date!=null){
|
||||
val statement = if (filter.isNullOrEmpty()){
|
||||
connection.prepareStatement("SELECT * FROM ${super.dbName} WHERE date = ?")
|
||||
} else {
|
||||
connection.prepareStatement("SELECT * FROM ${super.dbName} WHERE date = ? AND description LIKE ?")
|
||||
}
|
||||
statement?.setString(1, valid_date)
|
||||
if (!filter.isNullOrEmpty()){
|
||||
statement?.setString(2, "%$filter%")
|
||||
}
|
||||
val resultSet = statement?.executeQuery()
|
||||
val tempList = ArrayList<LogSemiauto>()
|
||||
while (resultSet?.next() == true) {
|
||||
val log = LogSemiauto(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("date"),
|
||||
resultSet.getString("time"),
|
||||
resultSet.getString("source"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
tempList.add(log)
|
||||
}
|
||||
cbOK?.accept(tempList)
|
||||
} else throw Exception("Invalid date")
|
||||
} catch (e : Exception){
|
||||
if (filter.isNullOrEmpty()){
|
||||
cbFail?.accept("Failed to Get logs for date $date: ${e.message}")
|
||||
} else {
|
||||
cbFail?.accept("Failed to Get logs for date $date with filter $filter: ${e.message}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun AddAll(data: ArrayList<LogSemiauto>): Boolean {
|
||||
try{
|
||||
val statement = connection.prepareStatement("INSERT INTO ${super.dbName} (date, time, source, description) VALUES (?, ?, ?, ?)")
|
||||
for (log in data) {
|
||||
statement.setString(1, log.date)
|
||||
statement.setString(2, log.time)
|
||||
statement.setString(3, log.source)
|
||||
statement.setString(4, log.description)
|
||||
statement.addBatch()
|
||||
}
|
||||
val rowsAffected = statement.executeBatch()
|
||||
if (rowsAffected.isNotEmpty()) {
|
||||
return true
|
||||
} else throw Exception("Failed to add logsemiauto entries: $data")
|
||||
} catch (e : Exception){
|
||||
Logger.error { "Error adding logsemiauto entries: ${e.message}" }
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
override fun UpdateByIndex(index: Int, data: LogSemiauto): Boolean {
|
||||
try{
|
||||
val statement = connection.prepareStatement("UPDATE ${super.dbName} SET date = ?, time = ?, source = ?, description = ? WHERE `index` = ?")
|
||||
statement.setString(1, data.date)
|
||||
statement.setString(2, data.time)
|
||||
statement.setString(3, data.source)
|
||||
statement.setString(4, data.description)
|
||||
statement.setInt(5, index)
|
||||
val rowsAffected = statement.executeUpdate()
|
||||
if (rowsAffected > 0) {
|
||||
return true
|
||||
} else throw Exception("Failed to update logsemiauto entry at index $index: $data")
|
||||
} catch (e : Exception){
|
||||
Logger.error { "Error updating logsemiauto entry at index $index: ${e.message}" }
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
override fun Resort(): Boolean {
|
||||
try{
|
||||
val statement = connection.createStatement()
|
||||
val tempdb_name = "temp_${super.dbName}"
|
||||
// use a temporary table to reorder the index
|
||||
statement?.executeUpdate("CREATE TABLE IF NOT EXISTS $tempdb_name LIKE ${super.dbName}")
|
||||
statement?.executeUpdate("TRUNCATE TABLE $tempdb_name")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO $tempdb_name (date, time, source, description) " +
|
||||
"SELECT date, time, source, description FROM ${super.dbName} " +
|
||||
"ORDER BY date , time , source "
|
||||
)
|
||||
statement?.executeUpdate("TRUNCATE TABLE ${super.dbName}")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO ${super.dbName} (date, time, source, description) " +
|
||||
"SELECT date, time, source, description FROM $tempdb_name"
|
||||
)
|
||||
statement?.executeUpdate("DROP TABLE $tempdb_name")
|
||||
Logger.info("${super.dbName} table resorted by date, time, source" as Any)
|
||||
// reload the local list
|
||||
Get()
|
||||
return true
|
||||
} catch (e : Exception){
|
||||
Logger.error { "Error resorting logsemiauto table: ${e.message}" }
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
override fun Import_XLSX(workbook: XSSFWorkbook): Boolean {
|
||||
throw Exception("Importing LogSemiauto from XLSX is not supported")
|
||||
}
|
||||
|
||||
override fun Export_XLSX(): XSSFWorkbook? {
|
||||
try{
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
val workbook = XSSFWorkbook()
|
||||
val sheet = workbook.createSheet("LogSemiauto")
|
||||
val headerRow = sheet.createRow(0)
|
||||
val headers = arrayOf("Index", "date", "time", "source", "description")
|
||||
for ((colIndex, header) in headers.withIndex()) {
|
||||
val cell = headerRow.createCell(colIndex)
|
||||
cell.setCellValue(header)
|
||||
}
|
||||
var rowIndex = 1
|
||||
while (resultSet?.next() == true) {
|
||||
val row = sheet.createRow(rowIndex++)
|
||||
row.createCell(0).setCellValue(resultSet.getString("index"))
|
||||
row.createCell(1).setCellValue(resultSet.getString("date"))
|
||||
row.createCell(2).setCellValue(resultSet.getString("time"))
|
||||
row.createCell(3).setCellValue(resultSet.getString("source"))
|
||||
row.createCell(4).setCellValue(resultSet.getString("description"))
|
||||
}
|
||||
for (i in headers.indices) {
|
||||
sheet.autoSizeColumn(i)
|
||||
}
|
||||
return workbook
|
||||
} catch (e : Exception){
|
||||
Logger.error { "Error exporting LogSemiauto, Msg: ${e.message}" }
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
233
src/database/Table_Logs.kt
Normal file
233
src/database/Table_Logs.kt
Normal file
@@ -0,0 +1,233 @@
|
||||
package database
|
||||
|
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook
|
||||
import org.tinylog.Logger
|
||||
import java.sql.Connection
|
||||
import java.util.function.Consumer
|
||||
|
||||
class Table_Logs(connection: Connection) : dbFunctions<Log> ("logs", connection,listOf("index", "datenya", "timenya", "machine", "description")) {
|
||||
/**
|
||||
* dateformat1 is regex for DD/MM/YYYY
|
||||
*/
|
||||
val dateformat1 = """^(0[1-9]|[12][0-9]|3[01])/(0[1-9]|1[0-2])/\d{4}$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat2 is regex for DD-MM-YYYY
|
||||
*/
|
||||
val dateformat2 = """^(0[1-9]|[12][0-9]|3[01])-(0[1-9]|1[0-2])-\d{4}$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat3 is regex for YYYY/MM/DD
|
||||
*/
|
||||
val dateformat3 = """^\d{4}/(0[1-9]|1[0-2])/(0[1-9]|[12][0-9]|3[01])$""".toRegex()
|
||||
|
||||
/**
|
||||
* dateformat4 is regex for YYYY-MM-DD
|
||||
*/
|
||||
val dateformat4 = """^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])$""".toRegex()
|
||||
|
||||
|
||||
override fun Create() {
|
||||
val tabledefinition = "CREATE TABLE IF NOT EXISTS ${super.dbName} (" +
|
||||
"`index` INT AUTO_INCREMENT PRIMARY KEY," +
|
||||
"datenya VARCHAR(20) NOT NULL," + // format DD/MM/YYYY
|
||||
"timenya VARCHAR(20) NOT NULL," + // format HH:MM:SS
|
||||
"machine VARCHAR(45) NOT NULL," +
|
||||
"description TEXT NOT NULL" +
|
||||
")"
|
||||
|
||||
super.Create(tabledefinition)
|
||||
|
||||
}
|
||||
|
||||
fun GetLogForHtml(date: String, filter: String?, cbOK: Consumer<ArrayList<Log>>?, cbFail: Consumer<String>?){
|
||||
try{
|
||||
val valid_date : String? = when{
|
||||
dateformat1.matches(date) -> date
|
||||
dateformat2.matches(date) -> {
|
||||
val parts = date.split("-")
|
||||
"${parts[0]}/${parts[1]}/${parts[2]}"
|
||||
}
|
||||
dateformat3.matches(date) -> {
|
||||
val parts = date.split("/")
|
||||
"${parts[2]}/${parts[1]}/${parts[0]}"
|
||||
}
|
||||
dateformat4.matches(date) -> {
|
||||
val parts = date.split("-")
|
||||
"${parts[2]}/${parts[1]}/${parts[0]}"
|
||||
}
|
||||
else -> null
|
||||
}
|
||||
if (valid_date!=null){
|
||||
val statement = if (filter.isNullOrEmpty()){
|
||||
connection.prepareStatement("SELECT * FROM ${super.dbName} WHERE datenya = ?")
|
||||
} else {
|
||||
connection.prepareStatement("SELECT * FROM ${super.dbName} WHERE datenya = ? AND description LIKE ?")
|
||||
}
|
||||
statement?.setString(1, valid_date)
|
||||
if (!filter.isNullOrEmpty()){
|
||||
statement?.setString(2, "%$filter%")
|
||||
}
|
||||
val resultSet = statement?.executeQuery()
|
||||
val tempList = ArrayList<Log>()
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
tempList.add(log)
|
||||
}
|
||||
cbOK?.accept(tempList)
|
||||
} else throw Exception("Invalid date")
|
||||
} catch (e : Exception){
|
||||
if (filter.isNullOrEmpty()){
|
||||
cbFail?.accept("Failed to Get logs for date $date: ${e.message}")
|
||||
} else {
|
||||
cbFail?.accept("Failed to Get logs for date $date with filter $filter: ${e.message}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun Get(cbOK: Consumer<Unit>?, cbFail: Consumer<String>?) {
|
||||
List.clear()
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
while (resultSet?.next() == true) {
|
||||
val log = Log(
|
||||
resultSet.getLong("index").toULong(),
|
||||
resultSet.getString("datenya"),
|
||||
resultSet.getString("timenya"),
|
||||
resultSet.getString("machine"),
|
||||
resultSet.getString("description")
|
||||
)
|
||||
List.add(log)
|
||||
}
|
||||
cbOK?.accept(Unit)
|
||||
} catch (e: Exception) {
|
||||
cbFail?.accept("Error fetching ${super.dbName}: ${e.message}")
|
||||
Logger.error("Error fetching ${super.dbName}: ${e.message}" as Any)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
fun Add(machine: String, description: String): Boolean {
|
||||
val log = Log.NewLog(machine, description)
|
||||
return Add(log)
|
||||
}
|
||||
|
||||
override fun Add(data: Log): Boolean {
|
||||
try {
|
||||
val statement =
|
||||
connection.prepareStatement("INSERT INTO logs (datenya, timenya, machine, description) VALUES (?, ?, ?, ?)")
|
||||
statement?.setString(1, data.datenya)
|
||||
statement?.setString(2, data.timenya)
|
||||
statement?.setString(3, data.machine)
|
||||
statement?.setString(4, data.description)
|
||||
val rowsAffected = statement?.executeUpdate()
|
||||
if (rowsAffected != null && rowsAffected > 0) {
|
||||
return true
|
||||
} else {
|
||||
Logger.warn{"Failed to add log entry : $data"}
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
Logger.error{"Error adding log entry: ${e.message}"}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
override fun AddAll(data: ArrayList<Log>): Boolean {
|
||||
return try {
|
||||
connection.autoCommit = false
|
||||
val sql = "INSERT INTO logs (datenya, timenya, machine, description) VALUES (?, ?, ?, ?)"
|
||||
val statement = connection.prepareStatement(sql)
|
||||
for (log in data) {
|
||||
statement.setString(1, log.datenya)
|
||||
statement.setString(2, log.timenya)
|
||||
statement.setString(3, log.machine)
|
||||
statement.setString(4, log.description)
|
||||
statement.addBatch()
|
||||
}
|
||||
statement.executeBatch()
|
||||
connection.commit()
|
||||
Logger.info("Bulk log insert successful: ${data.size} entries" as Any)
|
||||
connection.autoCommit = true
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error adding log entries: ${e.message}" as Any)
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
override fun UpdateByIndex(index: Int, data: Log): Boolean {
|
||||
throw Exception("Update not supported")
|
||||
}
|
||||
|
||||
override fun Resort(): Boolean {
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val tempdb_name = "temp_${super.dbName}"
|
||||
// use a temporary table to reorder the index
|
||||
statement?.executeUpdate("CREATE TABLE IF NOT EXISTS $tempdb_name LIKE ${super.dbName}")
|
||||
statement?.executeUpdate("TRUNCATE TABLE $tempdb_name")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO $tempdb_name (datenya, timenya, machine, description) " +
|
||||
"SELECT datenya, timenya, machine, description FROM ${super.dbName} " +
|
||||
"ORDER BY datenya , timenya , machine "
|
||||
)
|
||||
statement?.executeUpdate("TRUNCATE TABLE ${super.dbName}")
|
||||
statement?.executeUpdate(
|
||||
"INSERT INTO ${super.dbName} (datenya, timenya, machine, description) " +
|
||||
"SELECT datenya, timenya, machine, description FROM $tempdb_name"
|
||||
)
|
||||
statement?.executeUpdate("DROP TABLE $tempdb_name")
|
||||
Logger.info("${super.dbName} table resorted by datenya, timenya, machine" as Any)
|
||||
// reload the local list
|
||||
Get()
|
||||
return true
|
||||
} catch (e: Exception) {
|
||||
Logger.error("Error resorting ${super.dbName} table by datenya, timenya, machine: ${e.message}" as Any)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
override fun Import_XLSX(workbook: XSSFWorkbook): Boolean {
|
||||
throw Exception("Importing Logs from XLSX is not supported")
|
||||
}
|
||||
|
||||
override fun Export_XLSX(): XSSFWorkbook? {
|
||||
try {
|
||||
val statement = connection.createStatement()
|
||||
val resultSet = statement?.executeQuery("SELECT * FROM ${super.dbName}")
|
||||
val workbook = XSSFWorkbook()
|
||||
val sheet = workbook.createSheet("Log")
|
||||
val headerRow = sheet.createRow(0)
|
||||
val headers = arrayOf("Index", "datenya", "timenya", "machine", "description")
|
||||
for ((colIndex, header) in headers.withIndex()) {
|
||||
val cell = headerRow.createCell(colIndex)
|
||||
cell.setCellValue(header)
|
||||
}
|
||||
var rowIndex = 1
|
||||
while (resultSet?.next() == true) {
|
||||
val row = sheet.createRow(rowIndex++)
|
||||
row.createCell(0).setCellValue(resultSet.getString("index"))
|
||||
row.createCell(1).setCellValue(resultSet.getString("datenya"))
|
||||
row.createCell(2).setCellValue(resultSet.getString("timenya"))
|
||||
row.createCell(3).setCellValue(resultSet.getString("machine"))
|
||||
row.createCell(4).setCellValue(resultSet.getString("description"))
|
||||
}
|
||||
for (i in headers.indices) {
|
||||
sheet.autoSizeColumn(i)
|
||||
}
|
||||
return workbook
|
||||
} catch (e: Exception) {
|
||||
Logger.error { "Error exporting Log, Msg: ${e.message}" }
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import codes.Somecodes.Companion.GetUptime
|
||||
import codes.Somecodes.Companion.ListAudioFiles
|
||||
import codes.Somecodes.Companion.String_To_List
|
||||
import codes.Somecodes.Companion.ValiDateForLogHtml
|
||||
import codes.Somecodes.Companion.ValidDate
|
||||
import codes.Somecodes.Companion.ValidDirectory
|
||||
import codes.Somecodes.Companion.ValidFile
|
||||
import codes.Somecodes.Companion.ValidIPV4
|
||||
@@ -44,6 +43,7 @@ import java.nio.file.Files
|
||||
import java.time.LocalDateTime
|
||||
import codes.configKeys
|
||||
import config
|
||||
import database.LogSemiauto
|
||||
import database.QueueTable
|
||||
import google.GoogleTTS
|
||||
import google.autoadd
|
||||
@@ -695,7 +695,8 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.soundDB.DeleteByIndex(index.toInt())) {
|
||||
db.soundDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted sound bank with index $index")
|
||||
|
||||
db.logDB.Add("AAS", "Deleted sound bank with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete soundbank with index $index")))
|
||||
@@ -908,7 +909,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.messageDB.DeleteByIndex(index.toInt())) {
|
||||
db.messageDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted message bank with index $index")
|
||||
db.logDB.Add("AAS", "Deleted message bank with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete messagebank with index $index")))
|
||||
@@ -1087,7 +1088,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.languageDB.DeleteByIndex(index.toInt())) {
|
||||
db.languageDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted language link with index $index")
|
||||
db.logDB.Add("AAS", "Deleted language link with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete language link with index $index")))
|
||||
@@ -1273,7 +1274,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.scheduleDB.DeleteByIndex(index.toInt())) {
|
||||
db.scheduleDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted schedule bank with index $index")
|
||||
db.logDB.Add("AAS", "Deleted schedule bank with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete schedule with index $index")))
|
||||
@@ -1521,7 +1522,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.userDB.DeleteByIndex(index.toInt())) {
|
||||
db.userDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted user with index $index")
|
||||
db.logDB.Add("AAS", "Deleted user with index $index")
|
||||
} else it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete user with index $index")))
|
||||
}
|
||||
@@ -1728,23 +1729,12 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
path("Log") {
|
||||
get("List") { get1 ->
|
||||
val logdate = get1.queryParam("date") ?: ""
|
||||
val logfilter = get1.queryParam("filter") ?: ""
|
||||
if (ValiDateForLogHtml(logdate)) {
|
||||
if (ValidString(logfilter)) {
|
||||
// ada log filter
|
||||
|
||||
db.GetLogForHtml(logdate, logfilter) {
|
||||
get1.result(MariaDB.ArrayListtoString(it))
|
||||
}
|
||||
} else {
|
||||
db.GetLogForHtml(logdate) {
|
||||
get1.result(MariaDB.ArrayListtoString(it))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
get1.status(400)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Invalid logdate")))
|
||||
}
|
||||
val logfilter = get1.queryParam("filter")
|
||||
db.logDB.GetLogForHtml(logdate, logfilter, { loglist ->
|
||||
get1.result(objectmapper.writeValueAsString(loglist))
|
||||
}, { msgFail ->
|
||||
get1.status(500).result(objectmapper.writeValueAsString(resultMessage(msgFail)))
|
||||
})
|
||||
}
|
||||
get("ExportXLSX") { get1 ->
|
||||
val logdate = get1.queryParam("date") ?: ""
|
||||
@@ -1833,7 +1823,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.broadcastDB.DeleteByIndex(index.toInt())) {
|
||||
db.broadcastDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted broadcast zone with index $index")
|
||||
db.logDB.Add("AAS", "Deleted broadcast zone with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete broadcast zone with index $index")))
|
||||
@@ -2096,7 +2086,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.queuepagingDB.DeleteByIndex(index.toInt())) {
|
||||
db.queuepagingDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted queue paging with index $index")
|
||||
db.logDB.Add("AAS", "Deleted queue paging with index $index")
|
||||
} else {
|
||||
it.status(500)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to delete queue paging with index $index")))
|
||||
@@ -2135,7 +2125,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (db.queuetableDB.DeleteByIndex(index.toInt())) {
|
||||
db.queuetableDB.Resort()
|
||||
it.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.Add_Log("AAS", "Deleted queue sound with index $index")
|
||||
db.logDB.Add("AAS", "Deleted queue sound with index $index")
|
||||
|
||||
} else {
|
||||
it.status(500)
|
||||
@@ -2423,7 +2413,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
_config.Set(configKeys.DEFAULT_VOICE_TYPE.key, defaultvoice)
|
||||
_config.Save()
|
||||
Logger.info { "Changed FIS Codes" }
|
||||
db.Add_Log(
|
||||
db.logDB.Add(
|
||||
"AAS",
|
||||
"Save FIS Codes Message: GOP=$_gop, GBD=$_gbd, GFC=$_gfc, FLD=$_fld, DefaultVoice=$defaultvoice"
|
||||
)
|
||||
@@ -2503,6 +2493,8 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
if (user != null) {
|
||||
it.cookie("semiauto-user", user.username)
|
||||
it.redirect("index.html")
|
||||
|
||||
db.logSemiAuto.Add(LogSemiauto.NewLog("SEMIAUTOWEB", "User logged in: ${user.username} from ip ${it.ip()}"))
|
||||
} else ResultMessageString(it, 400, "Invalid username or password")
|
||||
} else ResultMessageString(it, 400, "Username or password cannot be empty")
|
||||
}
|
||||
@@ -2588,6 +2580,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
}
|
||||
|
||||
ctx.result(objectmapper.writeValueAsString(result))
|
||||
db.logSemiAuto.Add(LogSemiauto.NewLog("SEMIAUTOWEB", "Initialized Web variables for user: ${user.username}"))
|
||||
} else ResultMessageString(ctx, 400, "User not found")
|
||||
} else ResultMessageString(ctx, 400, "Username is empty")
|
||||
}
|
||||
@@ -2619,6 +2612,7 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
db.queuetableDB.Resort()
|
||||
Logger.info { "SemiAutoWeb added to queue table: $qt" }
|
||||
ctx.result(objectmapper.writeValueAsString(resultMessage("OK")))
|
||||
db.logSemiAuto.Add(LogSemiauto.NewLog("SEMIAUTOWEB", "Added to queue table: $qt"))
|
||||
} else ResultMessageString(ctx, 500, "Failed to add to queue table")
|
||||
} else ResultMessageString(ctx, 400, "Broadcast zones cannot be empty")
|
||||
} else ResultMessageString(ctx, 400, "Tags cannot be empty")
|
||||
@@ -2650,19 +2644,18 @@ class WebApp(val listenPort: Int, var userlist: List<Pair<String, String>>, val
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Failed to export log to XLSX")))
|
||||
}
|
||||
} else get1.status(400)
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Invalid logdate")))
|
||||
.result(objectmapper.writeValueAsString(resultMessage("Invalid date format must be dd-MM-yyyy")))
|
||||
}
|
||||
path("Log") {
|
||||
get("/{datelog}") { ctx ->
|
||||
val datelog = ctx.pathParam("datelog")
|
||||
println("Request log for date: $datelog")
|
||||
if (ValiDateForLogHtml(datelog)) {
|
||||
db.GetLogForHtml(datelog) { loghtml ->
|
||||
val resultstring = objectmapper.writeValueAsString(loghtml)
|
||||
ctx.result(resultstring)
|
||||
}
|
||||
db.logSemiAuto.GetLogSemiAutoForHtml(datelog, null, {
|
||||
ctx.result(MariaDB.ArrayListtoString(it))
|
||||
},{ err ->
|
||||
ctx.status(500).result(objectmapper.writeValueAsString(resultMessage(err)))
|
||||
})
|
||||
|
||||
} else ResultMessageString(ctx, 400, "Invalid date format")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user