summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main/c/PcapOne/PcapOne.c311
-rw-r--r--src/main/c/common/assert_is.h39
-rw-r--r--src/main/c/common/commonbase.h (renamed from src/main/c/common/commonKludge.h)0
-rw-r--r--src/main/c/common/offset_of.h9
-rw-r--r--src/main/c/common/windoof.h59
-rw-r--r--src/main/c/paisa-fleet/FindFullDisks.c383
-rw-r--r--src/main/c/postshit/launch/mvn/mvn-launch.c214
-rw-r--r--src/main/c/postshit/launch/mvn/mvn-versions-set.c133
-rw-r--r--src/main/c/postshit/launch/openshift/ocexec.c152
-rw-r--r--src/main/docker/android-dev.Dockerfile44
-rw-r--r--src/main/docker/gateleen.Dockerfile65
-rw-r--r--src/main/docker/gcc-windoof.Dockerfile233
-rw-r--r--src/main/docker/gcc.Dockerfile220
-rw-r--r--src/main/docker/gxx.Dockerfile17
-rw-r--r--src/main/docker/jni.Dockerfile20
-rw-r--r--src/main/docker/jre8.Dockerfile27
-rw-r--r--src/main/docker/maven.Dockerfile35
-rw-r--r--src/main/docker/nginx.Dockerfile50
-rw-r--r--src/main/docker/zlib-deb.Dockerfile49
-rw-r--r--src/main/docker/zlib-mingw.Dockerfile51
l---------src/main/eagle1
-rw-r--r--src/main/firefox/gaga-plugin/main.js149
-rw-r--r--src/main/gimp/nek2023-scan2/arrange-pdf32
-rw-r--r--src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh24
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java35
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java27
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java394
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java475
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java111
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java131
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java265
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java15
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java6
-rw-r--r--src/main/java/org/apache/logging/slf4j/Log4jLogger.java104
-rw-r--r--src/main/lua/brgmt-logs/DigBrgmtLogs.lua5
-rw-r--r--src/main/lua/git/GitflowChangelogGen.lua195
-rw-r--r--src/main/lua/maven/MvnCentralDepScan.lua30
-rw-r--r--src/main/lua/misc/JavaCallgraph.lua159
-rw-r--r--src/main/lua/mshitteams/ListEmlInbox.lua322
-rw-r--r--src/main/lua/mshitteams/SendRawMsEmail.lua60
-rw-r--r--src/main/lua/paisa-fleet/FindFullDisks.lua322
-rw-r--r--src/main/lua/paisa-fleet/RmArtifactBaseDir.lua381
-rw-r--r--src/main/lua/paisa-jvm-memLeak/LogStatistics.lua112
-rw-r--r--src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua235
-rw-r--r--src/main/lua/paisa-logs/DigHoustonLogs.lua252
-rw-r--r--src/main/lua/paisa-logs/PaisaLogParser.lua435
-rw-r--r--src/main/lua/pcap/KubeProbeFilter.lua93
-rw-r--r--src/main/lua/pcap/extractDnsHosts.lua147
-rw-r--r--src/main/lua/pcap/httpStats.lua117
-rw-r--r--src/main/lua/pcap/tcpDataAmountStats.lua97
-rw-r--r--src/main/lua/pcap/tcpPortStats.lua82
-rw-r--r--src/main/lua/pcap/xServiceStats.lua90
-rw-r--r--src/main/lua/wireshark/HttpTime.lua2
-rw-r--r--src/main/nodejs/misc/ProduceLotsOfQueues.js119
-rw-r--r--src/main/nodejs/paisa-nonslim/README.txt3
-rw-r--r--src/main/nodejs/paisa-nonslim/foo.js902
-rw-r--r--src/main/patch/eagle/default-bak20211124-080400.patch103
-rw-r--r--src/main/patch/eagle/default-bak20230220-121000.patch102
-rw-r--r--src/main/patch/eagle/default-bak20231024-082300.patch101
-rw-r--r--src/main/patch/eagle/default.patch101
-rw-r--r--src/main/patch/eagle/simplelogger.patch33
-rw-r--r--src/main/patch/houston/default-20230203.patch52
-rw-r--r--src/main/patch/houston/default-20230214.patch56
-rw-r--r--src/main/patch/houston/default-20230331.patch56
-rw-r--r--src/main/patch/houston/default.patch88
-rw-r--r--src/main/patch/houston/fixidiots.patch365
-rw-r--r--src/main/patch/houston/future.patch47
-rw-r--r--src/main/patch/preflux/default.patch236
-rw-r--r--src/main/patch/slarti/default.patch31
-rw-r--r--src/main/php/sqlite-exec.php30
-rwxr-xr-xsrc/main/shell/BackupByRsync/backup.sh53
71 files changed, 8058 insertions, 1436 deletions
diff --git a/src/main/c/PcapOne/PcapOne.c b/src/main/c/PcapOne/PcapOne.c
deleted file mode 100644
index 2eb9e25..0000000
--- a/src/main/c/PcapOne/PcapOne.c
+++ /dev/null
@@ -1,311 +0,0 @@
-/* TODO fix this bullshit */
-typedef unsigned u_int;
-typedef unsigned short u_short;
-typedef unsigned char u_char;
-#include <pcap/pcap.h>
-/* endOf TODO */
-
-
-/* System */
-#include <assert.h>
-#include <stdlib.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <string.h>
-
-static char const*const DEV_STDIN = "/dev/stdin";
-
-#define FLG_isHelp (1<<0)
-#define FLG_isTcpPsh (1<<3)
-#define FLG_isTcpRst (1<<4)
-#define FLG_isTcpSyn (1<<5)
-#define FLG_isTcpFin (1<<6)
-#define FLG_isHttpReq (1<<7)
-#define FLG_isLlLinux (1<<12)
-#define FLG_isHdrPrinted (1<<13)
-#define FLG_INIT (0)
-
-typedef struct PcapOne PcapOne;
-
-
-struct PcapOne {
- uint_least16_t flg;
- const char *dumpFilePath;
- char *pcapErrbuf;
- pcap_t *pcap;
- unsigned long frameNr;
- struct/*most recent frame*/{
- int llProto;
- int llHdrEnd;
- };
- struct/*most recent packet*/{
- int netProto;
- int netBodyLen;
- int netHdrEnd;
- int_fast32_t netTotLen;
- uint_least32_t ipSrcAddr, ipDstAddr;
- };
- struct/*most recent segment*/{
- int trspBodyLen;
- int trspSrcPort, trspDstPort;
- int trspHdrEnd;
- };
- struct/*most recent http requst*/{
- const uint8_t *httpReqHeadline;
- int httpReqHeadline_len;
- int httpReq_off; /* pkg offset from begin of most recent request */
- };
-};
-
-
-/*BEG func fwd decl*/
-static void parse_ll_LINUX_SLL( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_net_IPv4( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_trsp_TCP( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_appl_HTTP_req( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void printParsingResults( PcapOne*, const struct pcap_pkthdr* );
-/*END func fwd decl*/
-
-static void printHelp(){
- #define STRQUOT_21a9ffbe344c0792ed88688d6c676359(s) #s
- #define STRQUOT(s) STRQUOT_21a9ffbe344c0792ed88688d6c676359(s)
- const char *basename = "/"__FILE__ + sizeof("/"__FILE__);
- for(; basename[-1] != '/'; --basename );
- printf("%s%s%s", " \n"
- " ", basename, " " STRQUOT(PROJECT_VERSION) "\n"
- " \n"
- " Options:\n"
- " \n"
- " --pcap-stdin\n"
- " Like --pcap but reading from stdin.\n"
- " \n"
- " --pcap <path>\n"
- " Pcap file to operate on. Compressed files are NOT supported.\n"
- " \n");
- #undef STRQUOT_21a9ffbe344c0792ed88688d6c676359
- #undef STRQUOT
-}
-
-
-static int parseArgs( PcapOne*app, int argc, char**argv ){
- app->flg = FLG_INIT;
- app->dumpFilePath = NULL;
- for( int iA = 1 ; iA < argc ; ++iA ){
- const char *arg = argv[iA];
- if(0){
- }else if( !strcmp(arg,"--help") ){
- app->flg |= FLG_isHelp; return 0;
- }else if( !strcmp(arg,"--pcap") ){
- arg = argv[++iA];
- if( arg == NULL ){ fprintf(stderr, "EINVAL --pcap needs value\n"); return -1; }
- app->dumpFilePath = arg;
- }else if( !strcmp(arg,"--pcap-stdin") ){
- app->dumpFilePath = DEV_STDIN;
- }else{
- fprintf(stderr, "EINVAL: %s\n", arg); return -1;
- }
- }
- if( app->dumpFilePath == NULL ){
- fprintf(stderr, "EINVAL Arg missing: --pcap <path>\n"); return -1; }
- return 0;
-}
-
-
-static void onPcapPkg( u_char*user, const struct pcap_pkthdr*hdr, const u_char*buf ){
- PcapOne *const app = (void*)user;
-
- /* prepare for this new packet */
- app->frameNr += 1;
- app->flg &= ~(FLG_isTcpPsh | FLG_isTcpRst | FLG_isTcpSyn | FLG_isTcpFin | FLG_isHttpReq);
-
- /* data-link layer */
- switch( pcap_datalink(app->pcap) ){
- case 0x71: parse_ll_LINUX_SLL(app, hdr, buf); break;
- default: assert(!fprintf(stderr,"pcap_datalink() -> 0x%02X\n", pcap_datalink(app->pcap)));
- }
-
- /* network layer */
- switch( app->llProto ){
- case 0x0800: parse_net_IPv4(app, hdr, buf); break;
- default: printf("???, proto=0x%04X, network-layer\n", app->llProto); return;
- }
-
- /* transport layer */
- switch( app->netProto ){
- case 0x06: parse_trsp_TCP(app, hdr, buf); break;
- default: printf("???, proto=0x%02X, transport-layer\n", app->netProto); return;
- }
-
- assert(app->trspBodyLen >= 0);
-
- /* application layer, towards server */
- switch( app->trspDstPort ){
- case 80: parse_appl_HTTP_req(app, hdr, buf); break;
- case 7012: parse_appl_HTTP_req(app, hdr, buf); break;
- case 8080: parse_appl_HTTP_req(app, hdr, buf); break;
- }
-
- printParsingResults(app, hdr);
-}
-
-
-static void parse_ll_LINUX_SLL( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= 15);
- app->llProto = buf[14]<<8 | buf[15];
- app->llHdrEnd = 16;
-}
-
-
-static void parse_net_IPv4( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= app->llHdrEnd+19 && "TODO_775afde7f19010220e9df8d5e2924c3e");
- int_fast8_t netHdrLen = (buf[app->llHdrEnd+0] & 0x0F) * 4;
- app->netTotLen = buf[app->llHdrEnd+2] << 8 | buf[app->llHdrEnd+3];
- app->netProto = buf[app->llHdrEnd+9];
- app->ipSrcAddr = 0
- | ((uint_least32_t)buf[app->llHdrEnd+12]) << 24
- | ((uint_least32_t)buf[app->llHdrEnd+13]) << 16
- | buf[app->llHdrEnd+14] << 8
- | buf[app->llHdrEnd+15] ;
- app->ipDstAddr = 0
- | ((uint_least32_t)buf[app->llHdrEnd+16]) << 24
- | ((uint_least32_t)buf[app->llHdrEnd+17]) << 16
- | buf[app->llHdrEnd+18] << 8
- | buf[app->llHdrEnd+19] ;
- app->netHdrEnd = app->llHdrEnd + netHdrLen;
- app->netBodyLen = app->netTotLen - netHdrLen;
-}
-
-
-static void parse_trsp_TCP( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= app->netHdrEnd+12 && "TODO_058d5f41043d383e1ba2c492d0db4b6a");
- app->trspSrcPort = buf[app->netHdrEnd+0] << 8 | buf[app->netHdrEnd+1];
- app->trspDstPort = buf[app->netHdrEnd+2] << 8 | buf[app->netHdrEnd+3];
- int tcpHdrLen = (buf[app->netHdrEnd+12] >> 4) * 4;
- app->trspHdrEnd = app->netHdrEnd + tcpHdrLen;
- app->trspBodyLen = app->netBodyLen - tcpHdrLen;
-}
-
-
-static void parse_appl_HTTP_req( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- app->flg |= FLG_isHttpReq;
- app->httpReqHeadline = buf + app->trspHdrEnd;
- app->httpReqHeadline_len = 0;
- for(;; ++app->httpReqHeadline_len ){
- if( (app->trspHdrEnd + app->httpReqHeadline_len) > hdr->caplen ) break;
- if( app->httpReqHeadline[app->httpReqHeadline_len] == '\r' ) break;
- if( app->httpReqHeadline[app->httpReqHeadline_len] == '\n' ) break;
- }
- /* TODO improve, as now its like a guess only */
- int isNewRequest = 0
- | !memcmp(buf + app->trspHdrEnd, "GET ", 4)
- | !memcmp(buf + app->trspHdrEnd, "PUT ", 4)
- | !memcmp(buf + app->trspHdrEnd, "POST ", 5)
- | !memcmp(buf + app->trspHdrEnd, "DELETE ", 7)
- ;
- if( isNewRequest ){
- app->httpReq_off = 0;
- }else{
- app->httpReq_off = 42; /*TODO make more accurate*/
- }
-}
-
-
-static void printParsingResults( PcapOne*app, const struct pcap_pkthdr*hdr ){
-
- int isHttpRequest = (app->flg & FLG_isHttpReq);
- int isHttpReqBegin = isHttpRequest && app->httpReq_off == 0;
-
- if( isHttpRequest && isHttpReqBegin ){
- /* find http method */
- const uint8_t *method = app->httpReqHeadline;
- int method_len = 0;
- for(;; ++method_len ){
- if( method_len > app->httpReqHeadline_len ) break;
- if( method[method_len] == ' ' ) break;
- }
- /* find http uri */
- const uint8_t *uri = method + method_len + 1;
- int uri_len = 0;
- for(;; ++uri_len ){
- if( method_len + uri_len > app->httpReqHeadline_len ) break;
- if( uri[uri_len] == ' ' ) break;
- }
- if( !(app->flg & FLG_isHdrPrinted) ){
- app->flg |= FLG_isHdrPrinted;
- printf("h;Title;HTTP requests\n");
- printf("c;epochSec;srcIp;dstIp;srcPort;dstPort;http_method;http_uri\n");
- }
- /* print it as a quick-n-dirty CSV record */
- printf("r;%ld.%06ld;%d.%d.%d.%d;%d.%d.%d.%d;%d;%d;%.*s;%.*s\n",
- hdr->ts.tv_sec, hdr->ts.tv_usec,
- app->ipSrcAddr >> 24, app->ipSrcAddr >> 16 & 0xFF, app->ipSrcAddr >> 8 & 0xFF, app->ipSrcAddr & 0xFF,
- app->ipDstAddr >> 24, app->ipDstAddr >> 16 & 0xFF, app->ipDstAddr >> 8 & 0xFF, app->ipDstAddr & 0xFF,
- app->trspSrcPort, app->trspDstPort,
- method_len, method, uri_len, uri);
- }
-}
-
-
-static int run( PcapOne*app ){
- int err;
- err = pcap_init(PCAP_CHAR_ENC_UTF_8, app->pcapErrbuf);
- if( err == PCAP_ERROR ){
- fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
- app->pcap = pcap_open_offline(
- (app->dumpFilePath == DEV_STDIN) ? "-" : app->dumpFilePath,
- app->pcapErrbuf);
- if( app->pcap == NULL ){
- fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
- for(;;){
- err = pcap_dispatch(app->pcap, -1, onPcapPkg, (void*)app);
- switch( err ){
- case PCAP_ERROR:
- fprintf(stderr, "pcap_dispatch(): %s\n", pcap_geterr(app->pcap));
- err = -1; goto endFn;
- case PCAP_ERROR_BREAK:
- case PCAP_ERROR_NOT_ACTIVATED:
- fprintf(stderr, "pcap_dispatch() -> %d\n", err);
- err = -1; goto endFn;
- }
- if( err > 0 ){
- fprintf(stderr, "Processed %d packages in this turn.\n", err);
- continue;
- }
- break;
- }
- err = 0;
-endFn:
- if( app->pcap != NULL ){ pcap_close(app->pcap); app->pcap = NULL; }
- return err;
-}
-
-
-int main( int argc, char**argv ){
- int err;
- static char errbuf[PCAP_ERRBUF_SIZE];
- errbuf[0] = '\0';
- PcapOne app = {
- .flg = FLG_INIT,
- .pcapErrbuf = errbuf,
- .pcap = NULL,
- .frameNr = 0,
- .trspBodyLen = 0,
- };
- #define app (&app)
-
- err = parseArgs(app, argc, argv);
- if( err ){ goto endFn; }
-
- if( app->flg & FLG_isHelp ){
- printHelp(); goto endFn; }
-
- err = run(app);
-
-endFn:
- if( err < 0 ) err = -err;
- if( err > 0x7F ) err = 1;
- return err;
- #undef app
-}
-
-
diff --git a/src/main/c/common/assert_is.h b/src/main/c/common/assert_is.h
new file mode 100644
index 0000000..316bf02
--- /dev/null
+++ b/src/main/c/common/assert_is.h
@@ -0,0 +1,39 @@
+
+#if !NDEBUG
+#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\
+const char*f,int l){if(p==NULL){fprintf(stderr,"assert(" STR_QUOT(T)\
+" != NULL) %s:%d\n",f,l);abort();}T*obj=p;if(!(PRED)){fprintf(stderr,\
+"ssert(type is \""STR_QUOT(T)"\") %s:%d\n",f,l);abort();}return p; }
+#else
+#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\
+const char*f,int l){return p;}
+#endif
+
+
+
+/* Example usage: */
+
+/* add some magic to your struct under check */
+typedef struct Person Person;
+struct Person {
+ char tYPE[sizeof"Hi, I'm a Person"];
+};
+
+/* instantiate a checker */
+TPL_assert_is(Person, !strcmp(obj->tYPE, "Hi, I'm a Person"))
+#define assert_is_Person(p) assert_is_Person(p, __FILE__, __LINE__)
+
+/* make sure magic is initialized (ALSO MAKE SURE TO PROPERLY INVALIDATE
+ * IT IN DTOR!)*/
+static void someCaller( void ){
+ Person p = {0};
+ strcpy(p.tYPE, "Hi, I'm a Person");
+ void *ptr = p; /*whops compiler cannot help us any longer*/
+ someCallee(ptr);
+}
+
+/* verify you reall got a Person*/
+static void someCallee( void*shouldBeAPerson ){
+ Person *p = assert_is_Person(shouldBeAPerson);
+}
+
diff --git a/src/main/c/common/commonKludge.h b/src/main/c/common/commonbase.h
index e0f0cba..e0f0cba 100644
--- a/src/main/c/common/commonKludge.h
+++ b/src/main/c/common/commonbase.h
diff --git a/src/main/c/common/offset_of.h b/src/main/c/common/offset_of.h
new file mode 100644
index 0000000..7d9179d
--- /dev/null
+++ b/src/main/c/common/offset_of.h
@@ -0,0 +1,9 @@
+#ifndef INCGUARD_yisgKqALPG4lfEqb
+#define INCGUARD_yisgKqALPG4lfEqb
+
+
+#define container_of(P, T, M) \
+ ((T*)( ((size_t)P) - ((size_t)((char*)&((T*)0)->M - (char*)0) )))
+
+
+#endif /* INCGUARD_yisgKqALPG4lfEqb */
diff --git a/src/main/c/common/windoof.h b/src/main/c/common/windoof.h
new file mode 100644
index 0000000..6ed9b41
--- /dev/null
+++ b/src/main/c/common/windoof.h
@@ -0,0 +1,59 @@
+
+#if 0
+# include <windows.h>
+#else
+
+#include <stdint.h>
+
+//#define HANDLE void*
+//typedef int BOOL;
+//typedef unsigned long LPDWORD;
+
+
+typedef struct _PROCESS_INFORMATION {
+ void* hProcess;
+ void* hThread;
+ uint32_t dwProcessId;
+ uint32_t dwThreadId;
+} PROCESS_INFORMATION, *PPROCESS_INFORMATION, *LPPROCESS_INFORMATION;
+
+
+typedef struct _SECURITY_ATTRIBUTES {
+ uint32_t nLength;
+ void* lpSecurityDescriptor;
+ int bInheritHandle;
+} SECURITY_ATTRIBUTES, *PSECURITY_ATTRIBUTES, *LPSECURITY_ATTRIBUTES;
+
+
+typedef struct _STARTUPINFOA {
+ uint32_t cb;
+ char *lpReserved;
+ char *lpDesktop;
+ char *lpTitle;
+ uint32_t dwX;
+ uint32_t dwY;
+ uint32_t dwXSize;
+ uint32_t dwYSize;
+ uint32_t dwXCountChars;
+ uint32_t dwYCountChars;
+ uint32_t dwFillAttribute;
+ uint32_t dwFlags;
+ short wShowWindow;
+ short cbReserved2;
+ uint8_t lpReserved2;
+ void *hStdInput, *hStdOutput, *hStdError;
+} STARTUPINFOA, *LPSTARTUPINFOA;
+
+
+
+int CreateProcessA( char const*, char*, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, int, uint32_t,
+ void*, char const*, LPSTARTUPINFOA, LPPROCESS_INFORMATION );
+
+
+int GetExitCodeProcess(void*, unsigned long*);
+
+
+
+
+
+#endif /*manual windoof on/off switch*/
diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c
new file mode 100644
index 0000000..429b71c
--- /dev/null
+++ b/src/main/c/paisa-fleet/FindFullDisks.c
@@ -0,0 +1,383 @@
+#if 0
+
+true `# configure FindFullDisks for NORMAL systems` \
+ && CC=gcc \
+ && MKDIR_P="mkdir -p" \
+ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \
+ && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lpthread,-lws2_w32,-Limport/lib" \
+ && true
+
+true `# configure FindFullDisks for BROKEN systems` \
+ && CC=x86_64-w64-mingw32-gcc \
+ && MKDIR_P="mkdir -p" \
+ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \
+ && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lws2_32,-Limport/lib" \
+ && true
+
+true `# make FindFullDisks` \
+ && ${MKDIR_P:?} build/bin \
+ && ${CC:?} -o build/bin/findfulldisks $CFLAGS src/main/c/paisa-fleet/FindFullDisks.c $LDFLAGS \
+ && true
+
+#endif
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "Garbage.h"
+
+#define FLG_isHelp (1<<0)
+
+#if !NDEBUG
+# define REGISTER register
+# define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+# define IF_DBG(expr) expr
+#else
+# define REGISTER
+# define LOGDBG(...)
+# define IF_DBG(expr)
+#endif
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+
+
+
+typedef struct FindFullDisks FindFullDisks;
+typedef struct Device Device;
+
+
+#define MAGIC_FindFullDisks 0xB5410200
+struct FindFullDisks {
+ IF_DBG(int mAGIC);
+ int flg;
+ const char *sshUser;
+ int sshPort;
+ int maxParallel, numInProgress;
+ struct GarbageEnv **env;
+ struct Garbage_CsvIStream **csvSrc;
+ struct Garbage_Process **child;
+ char *inBuf;
+ int inBuf_cap, inBuf_len;
+ Device *devices;
+ int devices_cap, devices_cnt;
+ int iDevice; /* Next device to be triggered. */
+ int exitCode;
+};
+
+
+#define MAGIC_Device 0xAB420200
+struct Device {
+ IF_DBG(int mAGIC);
+ struct FindFullDisks *app;
+ char hostname[sizeof"lunkwill-0123456789AB_____"];
+ char eddieName[sizeof"eddie12345_____"];
+ char stdoutBuf[8192];
+ int stdoutBuf_cap, stdoutBuf_len;
+};
+
+
+/*BEG fwd decls*/
+static void beginNextDevice( void* );
+static void feedNextChunkFromStdinToCsvParser( void* );
+/*END fwd decls*/
+
+
+static void printHelp( void ){
+ printf("%s%s%s", " \n"
+ " ", strrchr(__FILE__,'/')+1, "\n"
+ " \n"
+ " Expected format on stdin is a CSV like:\n"
+ " \n"
+ " eddie00042 <SEMICOLON> lunkwill-ABBABEAFABBA <LF>\n"
+ " ...\n"
+ " \n"
+ " Options:\n"
+ " \n"
+ " --sshUser <str>\n"
+ " \n"
+ " --sshPort <int>\n"
+ " Default: 22\n"
+ " \n"
+ " --maxParallel <int>\n"
+ " Default 1.\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, FindFullDisks*app ){
+ int iA = 1;
+ app->sshUser = NULL;
+ app->sshPort = 22;
+ app->maxParallel = 1;
+nextArg:;
+ const char *arg = argv[iA++];
+ if( arg == NULL ) goto validateArgs;
+ if( !strcmp(arg, "--help")){
+ app->flg |= FLG_isHelp; return 0;
+ }else if( !strcmp(arg, "--sshUser")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --sshUser needs value\n"); return -1; }
+ app->sshUser = arg;
+ }else if( !strcmp(arg, "--sshPort")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --sshPort needs value\n"); return -1; }
+ errno = 0;
+ app->sshPort = strtol(arg, NULL, 0);
+ if( errno ){ LOGERR("EINVAL: --sshPort %s\n", arg); return -1; }
+ }else if( !strcmp(arg, "--maxParallel")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --maxParallel needs value\n"); return -1; }
+ errno = 0;
+ app->maxParallel = strtol(arg, NULL, 0);
+ if( errno ){ LOGERR("EINVAL: --maxParallel %s\n", arg); return -1; }
+ }else{
+ LOGERR("EINVAL: %s\n", arg);
+ }
+ goto nextArg;
+validateArgs:;
+ if( app->sshUser == NULL ){ LOGERR("EINVAL: Arg --sshUser missing\n"); return -1; }
+ return 0;
+}
+
+
+static void no_op( void*_ ){}
+
+
+static void examineDeviceResult( void*device_ ){
+ REGISTER int err;
+ Device*const device = device_; assert(device->mAGIC = MAGIC_Device);
+ //FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ FILE *outFd = NULL;
+ if( device->stdoutBuf_len <= 0 ){ /*nothing to do*/ goto endFn; }
+ char outName[sizeof"result/eddie12345-lunkwill-1234567890123456.log"];
+ err = snprintf(outName, sizeof outName, "result/%s-%s.log", device->eddieName, device->hostname);
+ assert(err < sizeof outName);
+ outFd = fopen(outName, "wb");
+ if( outFd == NULL ){ LOGDBG("assert(fopen(%s) != %d) %s:%d\n", outName, errno, __FILE__, __LINE__); abort(); }
+ err = fwrite(device->stdoutBuf, 1, device->stdoutBuf_len, outFd);
+ assert(err == device->stdoutBuf_len);
+endFn:
+ if( outFd != NULL ) fclose(outFd);
+}
+
+
+static void Child_onStdout( const char*buf, int buf_len, void*cls ){
+ Device*const device = cls; assert(device->mAGIC = MAGIC_Device);
+ FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( buf_len > 0 ){ /*another chunk*/
+ if( device->stdoutBuf_len + buf_len >= device->stdoutBuf_cap ) assert(!"TODO_VD8CAIVAgBDwIA4mECAKVjAgB1XwIAfk");
+ memcpy(device->stdoutBuf + device->stdoutBuf_len, buf, buf_len);
+ device->stdoutBuf_len += buf_len;
+ //printf("%.*s", buf_len, buf);
+ }else{ /*EOF*/
+ assert(buf_len == 0);
+ }
+}
+
+
+static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){
+ Device*const device = cls; assert(device->mAGIC == MAGIC_Device);
+ FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( retval != 0 || exitCode != 0 || sigNum != 0 ){
+ LOGDBG("[DEBUG] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum);
+ }
+ assert(app->numInProgress > 0);
+ app->numInProgress -= 1;
+ (*app->env)->enqueBlocking(app->env, examineDeviceResult, device);
+ (*app->env)->enqueBlocking(app->env, beginNextDevice, app);
+}
+
+
+static void visitDevice( FindFullDisks*app, Device*device ){
+ assert(device != NULL && device->mAGIC == MAGIC_Device);
+ assert(device < app->devices + app->devices_cnt);
+ LOGERR("\n[INFO ] %s \"%s\" (behind \"%s\")\n", __func__, device->hostname, device->eddieName);
+ int err;
+ char eddieCmd[2048];
+ //err = snprintf(eddieCmd, sizeof eddieCmd, "true"
+ // " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ // " && STAGE=$PAISA_ENV"
+ // " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ // " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true"
+ // " && echo wrong host. Want %s found $HOSTNAME && false"
+ // " ;fi"
+ // " && df",
+ // device->eddieName, device->eddieName
+ //);
+ err = snprintf(eddieCmd, sizeof eddieCmd, "true"
+ " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ " && STAGE=$PAISA_ENV"
+ " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true"
+ " && echo wrong host. Want %s found $HOSTNAME && false"
+ " ;fi"
+ " && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ " -p%d %s@%s"
+ " -- sh -c 'true"
+ " && HOSTNAME=$(hostname|sed '\"'\"'s_.isa.localdomain__'\"'\"')"
+ " && STAGE=$PAISA_ENV"
+ " && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\""
+ // on some machine, df failed with "Stale file handle" But I want to
+ // continue with next device regardless of such errors.
+ " && df || true"
+ "'",
+ device->eddieName, device->eddieName, app->sshPort, app->sshUser,
+ strncmp("fook-",device->hostname,5) ? device->hostname : "fook"
+ );
+ assert(err < sizeof eddieCmd);
+ assert(app->sshPort > 0 && app->sshPort <= 0xFFFF);
+ char sshPortStr[sizeof"65535"];
+ err = snprintf(sshPortStr, sizeof sshPortStr, "%d", app->sshPort);
+ assert(err < (int)sizeof sshPortStr);
+ char userAtEddie[64];
+ err = snprintf(userAtEddie, sizeof userAtEddie, "%s@%s", app->sshUser, device->eddieName);
+ assert(err < sizeof userAtEddie);
+ char *childArgv[] = { "ssh",
+ "-oRemoteCommand=none",
+ "-oStrictHostKeyChecking=no",
+ "-oUserKnownHostsFile=/dev/null",
+ "-oConnectTimeout=4",
+ "-p", sshPortStr,
+ userAtEddie,
+ "--", "sh", "-c", eddieCmd,
+ NULL
+ };
+ //LOGDBG("CMDLINE:");
+ //for( int i = 0 ; childArgv[i] != NULL ; ++i ) LOGDBG(" \"%s\"", childArgv[i]);
+ //LOGDBG("\n\n");
+ app->child = (*app->env)->newProcess(app->env, &(struct Garbage_Process_Mentor){
+ .cls = device,
+ .usePathSearch = !0,
+ .argv = childArgv,
+ .onStdout = Child_onStdout,
+ //.onStderr = ,
+ .onJoined = Child_onJoined,
+ });
+ assert(app->child != NULL);
+ (*app->child)->join(app->child, 42000);
+}
+
+
+static void beginNextDevice( void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+maybeBeginAnotherOne:
+ if( app->numInProgress >= app->maxParallel ){
+ //LOGDBG("[DEBUG] Already %d/%d in progress. Do NOT trigger more for now.\n",
+ // app->numInProgress, app->maxParallel);
+ goto endFn;
+ }
+ if( app->iDevice >= app->devices_cnt ){
+ //LOGDBG("[INFO ] Work on %d devices triggered. No more devices to trigger.\n", app->iDevice);
+ goto endFn;
+ }
+ assert(app->iDevice >= 0 && app->iDevice < INT_MAX);
+ app->iDevice += 1;
+ assert(app->numInProgress >= 0 && app->numInProgress < INT_MAX);
+ app->numInProgress += 1;
+ visitDevice(app, app->devices + app->iDevice - 1);
+ goto maybeBeginAnotherOne;
+endFn:;
+}
+
+
+static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, void*cls ){
+ REGISTER int err;
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( app->exitCode ) return;
+ if( numCols != 2 ){
+ LOGERR("[ERROR] Expected 2 column in input CSV but found %d\n", numCols);
+ app->exitCode = -1; return;
+ }
+ if( app->devices_cap <= app->devices_cnt ){
+ app->devices_cap += 4096;
+ void *tmp = realloc(app->devices, app->devices_cap*sizeof*app->devices);
+ if( tmp == NULL ) assert(!"TODO_c04CAJtRAgDYWQIAm10CAOAeAgA0KgIA");
+ app->devices = tmp;
+ }
+ #define DEVICE (app->devices + app->devices_cnt)
+ IF_DBG(DEVICE->mAGIC = MAGIC_Device);
+ DEVICE->app = app;
+ DEVICE->stdoutBuf_cap = sizeof DEVICE->stdoutBuf / sizeof*DEVICE->stdoutBuf;
+ if( row[0].len >= sizeof DEVICE->eddieName ){
+ LOGERR("[ERROR] eddieName too long: len=%d\n", row[0].len);
+ app->exitCode = -1; return;
+ }
+ if( row[1].len >= sizeof DEVICE->hostname ){
+ LOGERR("[ERROR] hostname too long: len=%d\n", row[1].len);
+ app->exitCode = -1; return;
+ }
+ memcpy(DEVICE->eddieName, row[0].buf, row[0].len);
+ DEVICE->eddieName[row[0].len] = '\0';
+ memcpy(DEVICE->hostname, row[1].buf, row[1].len);
+ DEVICE->hostname[row[1].len] = '\0';
+ #undef DEVICE
+ app->devices_cnt += 1;
+}
+
+
+static void onCsvParserCloseSnkDone( int retval, void*app_ ){
+ FindFullDisks*const app = app_; assert(app->mAGIC == MAGIC_FindFullDisks);
+ LOGDBG("[DEBUG] Found %d devices in input.\n", app->devices_cnt);
+ (*app->env)->enqueBlocking(app->env, beginNextDevice, app);
+}
+
+
+static void onCsvParserWriteDone( int retval, void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( retval <= 0 ){ LOGDBG("assert(retval != %d) %s:%d\n", retval, __FILE__, __LINE__); abort(); }
+ (*app->env)->enqueBlocking(app->env, feedNextChunkFromStdinToCsvParser, app);
+}
+
+
+static void feedNextChunkFromStdinToCsvParser( void*cls ){
+ REGISTER int err;
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( app->exitCode ) return;
+ #define SRC (stdin)
+ if( app->inBuf == NULL || app->inBuf_cap < 1<<15 ){
+ app->inBuf_cap = 1<<15;
+ if( app->inBuf ) free(app->inBuf);
+ app->inBuf = malloc(app->inBuf_cap*sizeof*app->inBuf);;
+ if( app->inBuf == NULL ){ assert(!"TODO_TT8CAGQLAgCoawIA9jgCANA6AgBTaAIA"); }
+ }
+ err = fread(app->inBuf, 1, app->inBuf_cap, SRC);
+ if( err <= 0 ){
+ (*app->csvSrc)->closeSnk(app->csvSrc, onCsvParserCloseSnkDone, app);
+ return;
+ }
+ app->inBuf_len = err;
+ (*app->csvSrc)->write(app->inBuf, app->inBuf_len, app->csvSrc, onCsvParserWriteDone, app);
+ #undef SRC
+}
+
+
+static void initCsvParserForDeviceListOnStdin( void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ static struct Garbage_CsvIStream_Mentor csvMentor = {
+ .onCsvRow = onCsvRow,
+ .onCsvDocEnd = no_op,
+ };
+ struct Garbage_CsvIStream_Opts csvOpts = { .delimCol = ';' };
+ app->csvSrc = (*app->env)->newCsvIStream(app->env, &csvOpts, &csvMentor, app);
+ feedNextChunkFromStdinToCsvParser(app);
+}
+
+
+int main( int argc, char**argv ){
+ void *envMemory[SIZEOF_struct_GarbageEnv/sizeof(void*)];
+ FindFullDisks app = {0}; assert((void*)0 == NULL);
+ #define app (&app)
+ IF_DBG(app->mAGIC = MAGIC_FindFullDisks);
+ if( parseArgs(argc, argv, app) ){ app->exitCode = -1; goto endFn; }
+ if( app->flg & FLG_isHelp ){ printHelp(); goto endFn; }
+ app->env = GarbageEnv_ctor(envMemory, sizeof envMemory);
+ assert(app->env != NULL);
+ (*app->env)->enqueBlocking(app->env, initCsvParserForDeviceListOnStdin, app);
+ (*app->env)->runUntilDone(app->env);
+endFn:
+ return !!app->exitCode;
+ #undef app
+}
+
+
diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c
new file mode 100644
index 0000000..8886e9e
--- /dev/null
+++ b/src/main/c/postshit/launch/mvn/mvn-launch.c
@@ -0,0 +1,214 @@
+/*
+
+ Shitty policies require shitty workarounds. Standard maven ships with a 'cmd'
+ file for its execution. But as some shiny 'security' policies forbid
+ execution of 'cmd' files, we need to waste our time writing stuff like this
+ instead doing our work. Grrr...
+
+ ${CC:?} -o build/bin/mvn-launch.exe \
+ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \
+ -DPROJECT_VERSION=0.0.0-$(date -u +%s) \
+ src/main/c/postshit/launch/mvn/mvn-launch.c \
+
+*/
+
+#include <windows.h>
+#include <assert.h>
+#include <stdio.h>
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+
+#define STR_QUOT_3q9o58uhzjad(s) #s
+#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s)
+
+
+static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ memcpy(dst + dst_len, src, src_len);
+ dst_len += src_len;
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ for( err = 0 ; err < src_len ; ++err ){
+ if( src[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ dst[dst_len++] = src[err];
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendArg( char*cmdline, int*cmdline_len, int cmdline_cap, const char*newArg, int newArg_len ){
+ #define cmdline_len (*cmdline_len)
+ register int err;
+ if( cmdline_cap < cmdline_len + newArg_len + sizeof" \"\"" ){
+ LOGERR("ENOBUFS: Cmdline too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn;
+ }
+ cmdline[cmdline_len++] = ' ';
+ cmdline[cmdline_len++] = '"';
+ for( err = 0 ; err < newArg_len ; ++err ){
+ if( newArg[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ cmdline[cmdline_len++] = newArg[err];
+ }
+ cmdline[cmdline_len++] = '"';
+ err = 0;
+endFn:
+ return err;
+ #undef cmdline_len
+}
+
+
+static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){
+ #define cmdline_len (*cmdline_len)
+ assert(envKey != NULL);
+ register int err;
+ char envval[0x7FFF];
+ const int envval_cap = sizeof envval;
+ err = GetEnvironmentVariable(envKey, envval, envval_cap-1);
+ if( err >= envval_cap-1 ){
+ LOGERR("ENOBUFS: environ.%s too long. %s:%d\n", envKey, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn;
+ }
+ if( err > 0 ){
+ err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err);
+ if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+ cmdline_len += err;
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef cmdline_len
+}
+
+
+int main( int argc, char**argv ){
+ register int err;
+
+ char tmp[2];
+ err = GetEnvironmentVariable("LAUNCHR_HELP", tmp, 1);
+ if( err == 0 ){
+ if( GetLastError() != ERROR_ENVVAR_NOT_FOUND ){
+ LOGERR("ERROR: GetEnvironmentVariable(LAUNCHR_HELP): %lu. %s:%d\n", GetLastError(), __FILE__, __LINE__);
+ err = -1; goto endFn; }
+ /*no such variable. interpret as no-help-wanted*/;
+ }else{
+ printf("\n %s " STR_QUOT(PROJECT_VERSION) "\n \n Delegates the call to maven without 'cmd' files.\n\n", strrchr(__FILE__,'/')+1);
+ err = -1; goto endFn;
+ }
+
+ char username[16];
+ const int username_cap = sizeof username;
+ err = GetEnvironmentVariable("USERNAME", username, username_cap);
+ if( err == 0 ){ LOGERR("ERROR: GetEnvironmentVariable(USERNAME) -> 0x%lX\n", GetLastError());
+ err = -1; goto endFn; }
+ if( err > username_cap ){
+ LOGERR("ENOBUFS: environ.USERNAME too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ assert(err > 0);
+ const int username_len = err;
+
+ char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/
+ cmdline[0] = '\0';
+ const int cmdline_cap = sizeof cmdline;
+ int cmdline_len = 0;
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "C:/Users/", 9) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/java/bin/java.exe", 23) < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "JVM_CONFIG_MAVEN_PROPS") < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_OPTS") < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_DEBUG_OPTS") < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -classpath", 11) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/", 10) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 45) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/", 29) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/bin/m2.conf", 23) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dmaven.home=C:/Users/", 23) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven", 11) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+
+ char workDir[0x7FFF];
+ const int workDir_cap = sizeof workDir;
+ err = GetCurrentDirectory(workDir_cap, workDir);
+ if( err == 0 ){
+ LOGERR("ERROR: GetCurrentDirectory() -> 0x%lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( err >= workDir_cap ){
+ LOGERR("ENOBUFS: Working dir too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn; }
+ assert(err > 0);
+ const int workDir_len = err;
+ for( err = 0 ; err < workDir_len ; ++err ){ if( workDir[err] == '\\' ) workDir[err] = '/'; }
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " \"-Dmaven.multiModuleProjectDirectory=", 38) < 0
+ || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, workDir, workDir_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " org.codehaus.plexus.classworlds.launcher.Launcher", 50) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; }
+
+ /*append all other args*/
+ for( int iA=1 ; iA < argc ; ++iA ){
+ char *arg = argv[iA];
+ err = appendArg(cmdline, &cmdline_len, cmdline_cap, arg, strlen(arg));
+ if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+ }
+
+ STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, };
+ startInfo.cb = sizeof(startInfo);
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = exitCode;
+endFn:
+ if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); }
+ if( err < 0 ) err = -err;
+ return err;
+}
+
diff --git a/src/main/c/postshit/launch/mvn/mvn-versions-set.c b/src/main/c/postshit/launch/mvn/mvn-versions-set.c
new file mode 100644
index 0000000..888183d
--- /dev/null
+++ b/src/main/c/postshit/launch/mvn/mvn-versions-set.c
@@ -0,0 +1,133 @@
+/*
+
+ Shitty policies require shitty workarounds. Standard maven ships with a 'cmd'
+ file for its execution. But as some shiny 'security' policies forbid
+ execution of 'cmd' files, we need to waste our time writing stuff like this
+ instead doing our work. Grrr...
+
+ ${CC:?} -o build/bin/mvn-versions-set.exe \
+ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \
+ -DPROJECT_VERSION=0.0.0-$(date -u +%s) \
+ src/main/c/postshit/launch/mvn/mvn-versions-set.c \
+
+*/
+
+#include <windows.h>
+#include <assert.h>
+#include <stdio.h>
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+
+#define STR_QUOT_3q9o58uhzjad(s) #s
+#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s)
+
+
+static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ memcpy(dst + dst_len, src, src_len);
+ dst_len += src_len;
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ for( err = 0 ; err < src_len ; ++err ){
+ if( src[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ dst[dst_len++] = src[err];
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+int main( int argc, char**argv ){
+ register int err;
+ int isHelp = 0;
+ const char *newVersion = NULL;
+
+ /*parse args*/
+ for( err = 1 ; err < argc ; ++err ){
+ const char *arg = argv[err];
+ if( !strcmp(arg, "--help") ){
+ isHelp = !0; break;
+ }else if( newVersion == NULL ){
+ newVersion = arg;
+ }else{
+ LOGERR("EINVAL: Only ONE arg expected. But got: %s\n", arg); err = -1; goto endFn;
+ }
+ }
+ if( isHelp ){
+ printf("\n"
+ " %s " STR_QUOT(PROJECT_VERSION) "\n"
+ " \n"
+ " Set a specific maven version. Usage:\n"
+ " \n"
+ " %s 0.0.0-SNAPSHOT\n"
+ "\n", strrchr(__FILE__,'/')+1, argv[0]);
+ err = -1; goto endFn;
+ }
+ if( newVersion == NULL ){
+ LOGERR("EINVAL: new version to use missing. Try --help\n");
+ err = -1; goto endFn;
+ }
+ const int newVersion_len = strlen(newVersion);
+
+ char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/
+ cmdline[0] = '\0';
+ const int cmdline_cap = sizeof cmdline;
+ int cmdline_len = 0;
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "mvn versions:set -DgenerateBackupPoms=false \"-DnewVersion=", 58) < 0
+ || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, newVersion, newVersion_len)
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; }
+
+ STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, };
+ startInfo.cb = sizeof(startInfo);
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = exitCode;
+endFn:
+ if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); }
+ if( err < 0 ) err = -err;
+ return err;
+}
+
+
diff --git a/src/main/c/postshit/launch/openshift/ocexec.c b/src/main/c/postshit/launch/openshift/ocexec.c
new file mode 100644
index 0000000..45c4af9
--- /dev/null
+++ b/src/main/c/postshit/launch/openshift/ocexec.c
@@ -0,0 +1,152 @@
+/*
+
+SH: true \
+SH: && `# Configure` \
+SH: && CC=x86_64-w64-mingw32-cc \
+SH: && MKDIR_P="mkdir -p" \
+SH: && CFLAGS="-Wall -Werror -pedantic -O0 -g -Isrc/main/c/common -DPROJECT_VERSION=0.0.0-$(date -u +%s) -fmax-errors=1 -Wno-error=unused-variable" \
+SH: && LDFLAGS="-Wl,--gc-sections,--as-needed" \
+SH: && `# Make` \
+SH: && ${MKDIR_P:?} build/bin \
+SH: && ${CC:?} -o build/bin/ocexec ${CFLAGS:?} src/main/c/postshit/launch/openshift/ocexec.c ${LDFLAGS:?} \
+SH: && true
+
+*/
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+#if __WIN32
+# include <windoof.h>
+#endif
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#if !NDEBUG
+# define REGISTER
+# define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+#else
+# define REGISTER register
+# define LOGDBG(...)
+#endif
+
+#define FLG_isHelp (1<<0)
+
+
+typedef struct App App;
+
+
+struct App {
+ int flg;
+ char const *ocNamespace;
+ char const *podName;
+};
+
+
+static void printHelp( void ){
+ printf(" \n"
+ " TODO write help page\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, App*app ){
+ REGISTER int err;
+ int iArg = 1;
+ if( argc <= 1 ){ LOGERR("EINVAL: Luke.. use arguments!\n"); return-1; }
+nextArg:;
+ char const *arg = argv[iArg++];
+ if( arg == NULL ) goto verifyArgs;
+ if( !strcmp(arg,"--help") ){
+ app->flg |= FLG_isHelp;
+ //LOGDBG("[DEBUG] help -> true\n", arg);
+ return 0;
+ }else if( !strcmp(arg,"-n") || !strcmp(arg,"--namespace") ){
+ arg = argv[iArg++];
+ if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; }
+ app->ocNamespace = arg;
+ //LOGDBG("[DEBUG] namespace -> \"%s\"\n", arg);
+ }else if( !strcmp(arg,"-p") || !strcmp(arg,"--pod") ){
+ arg = argv[iArg++];
+ if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; }
+ app->podName = arg;
+ //LOGDBG("[DEBUG] pod -> \"%s\"\n", arg);
+ }else{
+ LOGERR("EINVAL: %s\n", arg); return -1;
+ }
+ goto nextArg;
+verifyArgs:
+ return 0;
+}
+
+
+static int fetchPodnames( App*app ){
+ assert(!"TODO_hCICALJrAgDwNgIAZ0ACAD9sAgB5UwIA");
+ return -1;
+}
+
+
+static int resolvePodname( App*app ){
+ REGISTER int err;
+ err = fetchPodnames(app);
+ if( err ) return err;
+ if( !strcmp(app->podName, "houston") ){
+ }
+}
+
+
+static int resolveNamespace( App*app ){
+ if(0){
+ }else if( !strcmp(app->ocNamespace,"test") ){
+ app->ocNamespace = "isa-houston-test";
+ }else if( !strcmp(app->ocNamespace,"int") ){
+ app->ocNamespace = "isa-houston-int";
+ }else if( !strcmp(app->ocNamespace,"preprod") ){
+ app->ocNamespace = "isa-houston-preprod";
+ }else{
+ LOGDBG("[DEBUG] Use oc namespace as provided: \"%s\"\n", app->ocNamespace);
+ }
+ return 0;
+}
+
+
+static int run( App*app ){
+ REGISTER int err;
+ err = resolveNamespace(app); if( err ) return err;
+ err = resolvePodname(app); if( err ) return err;
+
+ LOGDBG("ENOTSUP: TODO continue here %s:%d\n", __FILE__, __LINE__);
+
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){
+ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){
+ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+}
+
+
+int main( int argc, char**argv ){
+ REGISTER int err;
+ App app = {0}; assert((void*)0 == NULL);
+ #define app (&app)
+ if( parseArgs(argc, argv, app) ){ err = -1; goto endFn; }
+ LOGDBG("[DEBUG] flags are 0x%X\n", app->flg);
+ if( app->flg & FLG_isHelp ){ printHelp(); err = 0; goto endFn; }
+ err = run(app);
+endFn:
+ return !!err;
+ #undef app
+}
+
diff --git a/src/main/docker/android-dev.Dockerfile b/src/main/docker/android-dev.Dockerfile
deleted file mode 100644
index 3f7b4b4..0000000
--- a/src/main/docker/android-dev.Dockerfile
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Tools for Android development.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="curl unzip openjdk-11-jdk-headless aapt apksigner zipalign"
-ARG PKGS_TO_DEL="curl unzip"
-ARG PKGINIT="apt-get update"
-ARG PKGADD="apt-get install -y --no-install-recommends"
-ARG PKGDEL="apt-get purge -y"
-ARG PKGCLEAN="apt-get clean"
-ARG PLATFORM_VERSION="22"
-ARG BUILD_TOOLS_VERSION="22.0.1"
-ARG CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip"
-
-ENV ANDROID_HOME="/usr/lib/android-sdk"
-ENV PATH="$PATH:/usr/lib/android-sdk/build-tools/debian:/usr/lib/android-sdk/cmdline-tools/latest/bin:/usr/lib/android-sdk/build-tools/$BUILD_TOOLS_VERSION"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && (cd /tmp && curl -sSLO "$CMDLINETOOLS_URL") \
- && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \
- && (cd /tmp && unzip $(basename "$CMDLINETOOLS_URL") >/dev/null) \
- && mkdir /usr/lib/android-sdk/cmdline-tools \
- && mkdir /usr/lib/android-sdk/cmdline-tools/latest \
- && mv /tmp/cmdline-tools/* /usr/lib/android-sdk/cmdline-tools/latest/. \
- && yes | sdkmanager --install "platforms;android-$PLATFORM_VERSION" "build-tools;$BUILD_TOOLS_VERSION" \
- # Those for some reason are broken (wrong linker) so use the debian variant.
- && (cd "/usr/lib/android-sdk/build-tools/${BUILD_TOOLS_VERSION:?}" && rm aapt zipalign) \
- && chown 1000:1000 /work \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && rm -rf /tmp/* \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/gateleen.Dockerfile b/src/main/docker/gateleen.Dockerfile
deleted file mode 100644
index f604dc2..0000000
--- a/src/main/docker/gateleen.Dockerfile
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-# A Gateleen playground instance.
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG GATELEEN_GIT_TAG=v1.3.28
-ARG UID=1000
-ARG GID=1000
-ARG PKGS_TO_ADD="maven nodejs npm curl redis openjdk11-jre-headless"
-#ARG PKGS_TO_DEL="maven nodejs npm"
-ARG PKGS_TO_DEL="nodejs npm"
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && printf 'user:x:%s:%s:user:/work:/bin/sh\n' "${UID:?}" "${GID:?}" >> /etc/passwd \
- && true
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGS_TO_ADD \
- && sed -i "s,</settings>, <localRepository>/data/maven/.m2/repository</localRepository>\n</settings>,g" /usr/share/java/maven-3/conf/settings.xml \
- && mkdir /data /data/maven /work/gateleen \
- && chown "${UID:?}:${GID:?}" /data/maven /work /work/gateleen \
- && curl -sSL https://github.com/swisspush/gateleen/archive/refs/tags/"$GATELEEN_GIT_TAG".tar.gz > "/tmp/gateleen-$GATELEEN_GIT_TAG.tgz" \
- && cd /work/gateleen \
- && su user -c 'tar --strip-components 1 -xf /tmp/gateleen-"$GATELEEN_GIT_TAG".tgz' \
- && (cd gateleen-hook-js && su user -c 'npm install') \
- && su user -c 'mkdir -p gateleen-hook-js/node/node_modules/npm/bin' \
- && su user -c 'ln -s /usr/bin/node gateleen-hook-js/node/node' \
- && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | su user -c 'tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js' >/dev/null \
- && su user -c 'mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js' \
- && su user -c 'mvn install -PpublicRepos -DfailIfNoTests=false \
- -pl !gateleen-test,!gateleen-hook-js \
- -Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests' \
- && mkdir /work/classpath \
- && chown "${UID:?}:${GID:?}" /work/classpath \
- && su user -c 'cd gateleen-playground && mvn dependency:copy-dependencies \
- -DexcludeScope=provided -DoutputDirectory=/work/classpath/.' \
- && cp gateleen-playground/target/gateleen-playground-*.jar /work/classpath/. \
- && mkdir /work/etc \
- && printf >/work/etc/redis.conf '%s\n' \
- 'save ""' \
- 'appendonly yes' \
- 'appenddirname "redis-state"' \
- 'appendfilename appendonly.aof' \
- && (su user -c 'cd /work && redis-server /work/etc/redis.conf & \
- java -cp '"'/work/classpath/*'"' org.swisspush.gateleen.playground.Server' \
- & sleep 3) \
- && su user -c 'cd /work/gateleen && mvn deploy -PuploadStaticFiles' \
- && pkill -INT java && pkill -INT redis-server \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER "${UID}:${GID}"
-
-#CMD ["sleep", "36000"]
-CMD ["sh", "-c", "ip a|grep inet && redis-server /work/etc/redis.conf & java -cp '/work/classpath/*' org.swisspush.gateleen.playground.Server"]
-
diff --git a/src/main/docker/gcc-windoof.Dockerfile b/src/main/docker/gcc-windoof.Dockerfile
deleted file mode 100644
index 69cc18e..0000000
--- a/src/main/docker/gcc-windoof.Dockerfile
+++ /dev/null
@@ -1,233 +0,0 @@
-#
-# Windoof GCC build env
-#
-
-ARG BASE_IMG=alpine:3.16.0
-FROM $BASE_IMG
-
-ARG PKGSTOADD="ca-certificates curl mingw-w64-gcc make tar"
-ARG PKGSTODEL="ca-certificates curl"
-ARG PKGADD="apk add"
-ARG PKGDEL="apk del"
-ARG PKGCLEAN="true"
-ARG PKGINIT="true"
-ARG VERSION_CJSON="1.7.15"
-ARG VERSION_EXPAT="2.4.2"
-ARG VERSION_LUA="5.4.3"
-ARG VERSION_MBEDTLS="3.1.0"
-ARG VERSION_SDL2="2.0.20"
-ARG VERSION_SQLITE="3.33.0"
-ARG VERSION_ZLIB="1.2.11"
-
-ENV NDEBUG=1 MAKE_JOBS=8 HOST=x86_64-w64-mingw32
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGSTOADD \
- #
- && ensureSourceIsCached () { \
- local localPath=${1:?}; \
- local url=${2:?}; \
- if test -f "${localPath:?}"; then \
- echo "[DEBUG] Source avail as \"${localPath:?}\""; \
- return; \
- fi; \
- echo "[DEBUG] Downloading \"${localPath:?}\""; \
- echo "[DEBUG] from \"${url:?}\""; \
- curl -L "$url" -o "${localPath:?}"; \
- } \
- #
- && makeZlib () { echo "\n Build zlib\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir "/tmp/zlib" && cd "/tmp/zlib" \
- && tar xzf "${tarbal:?}" \
- && cd zlib-* \
- && mkdir build \
- && export DESTDIR=./build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \
- && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \
- && make -e -j$MAKE_JOBS -fwin32/Makefile.gcc PREFIX="${HOST:?}"- \
- && make -e -fwin32/Makefile.gcc install PREFIX="${HOST:?}"- \
- && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \
- && cp README build/. \
- && (cd build && rm -rf lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-windoof.tgz" \
- && cd / && rm -rf "/tmp/zlib" \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f "/tmp/zlib-${version:?}-windoof.tgz" -x include lib \
- && echo -e "\n zlib Done :)\n" \
- && cd "${origDir:?}" ; } \
- && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \
- && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \
- #
- && $PKGADD xz \
- && makeExpat () { echo -e "\n Build Expat\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/expat && cd /tmp/expat \
- && tar xf "${tarbal:?}" --strip-components=1 \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" CFLAGS="-Wall -pedantic --std=c99 -O2" \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md build/. \
- && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/expat \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \
- && echo -e "\n Expat Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \
- && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \
- #
- && makeCJSON () { echo -e "\n Build cJSON\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/cJSON && cd /tmp/cJSON \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build build/obj build/lib build/include \
- && CC="${HOST:?}-gcc" \
- && AR="${HOST:?}-ar" \
- && CFLAGS="-Wall -pedantic -fPIC" \
- && ${CC:?} $CFLAGS -c -o build/obj/cJSON.o cJSON.c \
- && ${CC:?} $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \
- && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \
- && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \
- && ${AR:?} rcs build/lib/libcJSON.a build/obj/cJSON.o \
- && unset CC AR CFLAGS \
- && cp -t build/. LICENSE README.md \
- && cp -t build/include/. cJSON.h \
- && rm -rf build/obj \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \
- && cd / && rm -rf /tmp/cJSON \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \
- && echo -e "\n cJSON Done :)\n"; } \
- && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \
- && makeCJSON "${VERSION_CJSON:?}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \
- #
- && $PKGADD python3 \
- && makeMbedtls () { echo -e "\n Build mbedtls\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/mbedtls && cd /tmp/mbedtls \
- && tar xf "${tarbal:?}" \
- && cd * \
- && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \
- # Yet another hack around as gethostname seems not to exist and I do
- # not understand how to disable compiling those "programs" which I
- # do not want anyway.
- && rm programs/ssl/ssl_mail_client.c programs/test/udp_proxy.c \
- && sed -i '/^\t\+\(ssl\/ssl_mail_client\|test\/udp_proxy\) \+\\$/d' programs/Makefile \
- && sed -i '/^ \+ssl_mail_client$/d' programs/ssl/CMakeLists.txt \
- && export CC="${HOST:?}-gcc" AR="${HOST:?}-ar" WINDOWS_BUILD=1 SHARED=1 \
- && make -e -j$MAKE_JOBS no_test \
- && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \
- && make -e install \
- && unset CC AR WINDOWS_BUILD SHARED \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/mbedtls \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/mbedtls-${version:?}-windoof.tgz -x bin include lib \
- && cd "${origDir:?}" \
- && echo -e "\n mbedtls Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \
- && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \
- #
- && makeSqLite () { echo -e "\n Build SqLite\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/sqlite && cd /tmp/sqlite \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build \
- && $PKGADD gcc musl-dev tcl \
- && export CC="${HOST}-gcc" CPP="${HOST:?}-cpp" CXX="${HOST:?}-g++" BCC=gcc \
- && ./configure --prefix="${PWD:?}/build" --host=$HOST CC=$CC CPP=$CPP CXX=$CXX BCC=gcc BEXE=.exe config_TARGET_EXEEXT=.exe \
- && ln -s mksourceid.exe mksourceid \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && $PKGDEL gcc musl-dev tcl \
- && make -e install \
- && unset CC CPP CXX BCC \
- && (cd build && rm -rf lemon* mksourceid lib/pkgconfig lib/*.la) \
- && cp README.md LICENSE.md VERSION build/. \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-3.33.0-windoof.tgz" \
- && cd / && rm -rf /tmp/sqlite \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/sqlite-${version:?}-windoof.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n SqLite Done :)\n"; } \
- && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \
- && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \
- #
- && $PKGADD binutils \
- && makeLua () { echo -e "\n Build Lua\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/lua && cd /tmp/lua \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir -p build/bin build/include build/lib build/man/man1 \
- && make -e -j$MAKE_JOBS PLAT=mingw CC="${HOST:?}-gcc -std=gnu99" "AR=${HOST:?}-ar rcu" "RANLIB=${HOST:?}-ranlib" \
- && cp -t build/. README \
- && cp -t build/bin/. src/lua.exe src/luac.exe \
- && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \
- && cp -t build/lib/. src/liblua.a \
- && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/lua \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/lua-${version:?}-windoof.tgz -x bin include lib man \
- && cd "$origDir" \
- && echo -e "\n Lua Done :)\n"; } \
- && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \
- && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \
- #
- && $PKGADD alsa-lib libxext-dev pulseaudio-dev \
- && makeSDL2 () { echo -e "\n Build SDL2\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/SDL2 && cd /tmp/SDL2 \
- && tar xf "${tarbal:?}" \
- && cd * \
- && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \
- && (cd build \
- && ls -A \
- | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \
- | xargs rm -rf) \
- && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \
- && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/SDL2 \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/SDL2-${version:?}-windoof.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n SDL2 Done :)\n"; } \
- && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \
- && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \
- #
- && $PKGDEL $PKGSTODEL && $PKGCLEAN \
- && true
-
-WORKDIR /work
-
-CMD sleep 999999999
-
-
diff --git a/src/main/docker/gcc.Dockerfile b/src/main/docker/gcc.Dockerfile
deleted file mode 100644
index 5894667..0000000
--- a/src/main/docker/gcc.Dockerfile
+++ /dev/null
@@ -1,220 +0,0 @@
-#
-# Debian GCC build env
-#
-
-ARG BASE_IMG=debian:9-slim
-FROM $BASE_IMG
-
-ARG PKGSTOADD="ca-certificates curl gcc make tar"
-ARG PKGSTODEL="ca-certificates curl"
-ARG PKGADD="apt-get install -y --no-install-recommends"
-ARG PKGDEL="apt-get purge -y"
-ARG PKGCLEAN="apt-get clean"
-ARG PKGINIT="apt-get update"
-ARG VERSION_CJSON="1.7.15"
-ARG VERSION_EXPAT="2.4.2"
-ARG VERSION_LUA="5.4.3"
-ARG VERSION_MBEDTLS="3.1.0"
-ARG VERSION_SDL2="2.0.20"
-ARG VERSION_SQLITE="3.33.0"
-ARG VERSION_ZLIB="1.2.11"
-
-ENV NDEBUG=1 MAKE_JOBS=8
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGSTOADD \
- #
- && ensureSourceIsCached () { \
- local localPath=${1:?}; \
- local url=${2:?}; \
- if test -f "${localPath:?}"; then \
- echo "[DEBUG] Source avail as \"${localPath:?}\""; \
- return; \
- fi; \
- echo "[DEBUG] Downloading \"${localPath:?}\""; \
- echo "[DEBUG] from \"${url:?}\""; \
- curl -L "$url" -o "${localPath:?}"; \
- } \
- #
- && $PKGADD libc-dev \
- && makeZlib () { echo "\n Build zlib\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir "/tmp/zlib" && cd "/tmp/zlib" \
- && tar xzf "${tarbal:?}" \
- && cd zlib-* \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build/" \
- && make -e -j$MAKE_JOBS \
- && make install \
- && cp README build/. \
- && (cd build \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-debian.tgz" \
- && cd / && rm -rf "/tmp/zlib" \
- && mkdir -p /usr/local/ \
- && tar -C /usr/local -f "/tmp/zlib-${version:?}-debian.tgz" -x include lib \
- && cd "${origDir:?}" \
- && echo -e "\n zlib Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \
- && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \
- #
- && $PKGADD libc-dev xz-utils \
- && makeExpat () { echo -e "\n Build Expat\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/expat && cd /tmp/expat \
- && tar xf "${tarbal:?}" --strip-components=1 \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" CFLAGS='-Wall -pedantic --std=c99 -O2' \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md build/. \
- && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/expat \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n Expat Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \
- && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \
- #
- && $PKGADD libc-dev \
- && makeCJSON () { echo -e "\n Build cJSON\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/cJSON && cd /tmp/cJSON \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build build/obj build/lib build/include \
- && CFLAGS="-Wall -pedantic -fPIC" \
- && gcc $CFLAGS -c -o build/obj/cJSON.o cJSON.c \
- && gcc $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \
- && unset CFLAGS \
- && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \
- && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \
- && ar rcs build/lib/libcJSON.a build/obj/cJSON.o \
- && cp -t build/. LICENSE README.md \
- && cp -t build/include/. cJSON.h \
- && rm -rf build/obj \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \
- && cd / && rm -rf /tmp/cJSON \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n cJSON Done :)\n"; } \
- && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \
- && makeCJSON "${VERSION_CJSON}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \
- #
- && $PKGADD libc-dev python3 \
- && makeMbedtls () { echo -e "\n Build mbedtls\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/mbedtls && cd /tmp/mbedtls \
- && tar xf "${tarbal:?}" \
- && cd * \
- && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \
- && SHARED=1 make -e -j$MAKE_JOBS tests lib mbedtls_test \
- && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \
- && make -e install \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/mbedtls \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/mbedtls-${version:?}-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n mbedtls Done :)\n"; } \
- && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \
- && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \
- #
- && $PKGADD libc-dev tcl \
- && makeSqLite () { echo -e "\n Build SqLite\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/sqlite && cd /tmp/sqlite \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md LICENSE.md VERSION build/. \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/sqlite \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/sqlite-${version:?}-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n SqLite Done :)\n"; } \
- && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \
- && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \
- #
- && $PKGADD libc-dev \
- && makeLua () { echo -e "\n Build Lua\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/lua && cd /tmp/lua \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir -p build/bin build/include build/lib build/man/man1 \
- && make -e -j$MAKE_JOBS \
- && cp -t build/. README \
- && cp -t build/bin/. src/lua src/luac \
- && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \
- && cp -t build/lib/. src/liblua.a \
- && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/lua \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/lua-${version:?}-debian.tgz -x bin include lib man \
- && cd "$origDir" \
- && echo -e "\n Lua Done :)\n"; } \
- && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \
- && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \
- #
- && $PKGADD libc-dev libasound2-dev libxext-dev libpulse-dev \
- && makeSDL2 () { echo -e "\n Build SDL2\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/SDL2 && cd /tmp/SDL2 \
- && tar xf "${tarbal:?}" \
- && cd * \
- && ./configure --prefix="${PWD:?}/build" --host= \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \
- && (cd build \
- && ls -A \
- | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \
- | xargs rm -rf) \
- && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \
- && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/SDL2 \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/SDL2-${version:?}-debian.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n SDL2 Done :)\n"; } \
- && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \
- && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \
- #
- && $PKGDEL $PKGSTODEL && $PKGCLEAN \
- && true
-
-WORKDIR /work
-
-CMD sleep 999999999
-
-
diff --git a/src/main/docker/gxx.Dockerfile b/src/main/docker/gxx.Dockerfile
deleted file mode 100644
index f29f168..0000000
--- a/src/main/docker/gxx.Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Debian with C++ compiler.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-RUN true \
- && apt update \
- && apt install -y --no-install-recommends \
- g++ make \
- && apt clean \
- && true
-
-USER 1000:1000
-WORKDIR /work
-CMD ["sleep", "36000"]
diff --git a/src/main/docker/jni.Dockerfile b/src/main/docker/jni.Dockerfile
deleted file mode 100644
index c790e47..0000000
--- a/src/main/docker/jni.Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# Debian with tools for java-native-interface development.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-ENV \
- JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
-
-RUN true \
- && apt update \
- && apt install -y --no-install-recommends \
- g++ make openjdk-11-jdk-headless \
- && apt clean \
- && true
-
-USER 1000:1000
-WORKDIR /work
-CMD ["sleep", "36000"]
diff --git a/src/main/docker/jre8.Dockerfile b/src/main/docker/jre8.Dockerfile
deleted file mode 100644
index 603b5f5..0000000
--- a/src/main/docker/jre8.Dockerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# openjdk java 1.8 runtime environment.
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="openjdk8-jre"
-ARG PKGS_TO_DEL=""
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/maven.Dockerfile b/src/main/docker/maven.Dockerfile
deleted file mode 100644
index c33d519..0000000
--- a/src/main/docker/maven.Dockerfile
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# Maven build env.
-#
-# Use this to share your hosts repository with the container:
-#
-# -v "$HOME/.m2/repository:/data/maven/.m2/repository"
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="maven"
-ARG PKGS_TO_DEL=""
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && sed -i "s,</settings>, <localRepository>/data/maven/.m2/repository</localRepository>\n</settings>,g" /usr/share/java/maven-3/conf/settings.xml \
- && mkdir /data /data/maven \
- && chown 1000:1000 /data/maven \
- && chown 1000:1000 /work \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/nginx.Dockerfile b/src/main/docker/nginx.Dockerfile
deleted file mode 100644
index 097d283..0000000
--- a/src/main/docker/nginx.Dockerfile
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Bare nginx server serving HTTP/80 and HTTPS/443 from "/work/www".
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG CN=example.com
-ARG PKGS_TO_ADD="nginx openssl"
-ARG PKGS_TO_DEL="openssl"
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && mkdir /work/www \
- && openssl genrsa -out /etc/ssl/private/nginx.key 2048 \
- && openssl req -new -key /etc/ssl/private/nginx.key \
- -out /etc/ssl/private/nginx.csr \
- -subj "/C=/ST=/L=/O=/OU=/CN=${CN:?}" \
- && openssl x509 -req -days 365 -in /etc/ssl/private/nginx.csr \
- -signkey /etc/ssl/private/nginx.key -out /etc/ssl/certs/nginx.crt \
- && chgrp nginx /etc/ssl/private/nginx.key \
- && chmod 0640 /etc/ssl/private/nginx.key \
- && printf 'server {\n\
- listen 80 default_server;\n\
- listen [::]:80 default_server;\n\
- listen 443 ssl default_server;\n\
- listen [::]:443 default_server;\n\
- ssl_certificate /etc/ssl/certs/nginx.crt;\n\
- ssl_certificate_key /etc/ssl/private/nginx.key;\n\
- location / {\n\
- root /work/www;\n\
- index index.html index.htm;\n\
- }\n\
-}\n' > /etc/nginx/http.d/default.conf \
- && chown nginx:nginx /work /work/www \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER nginx:nginx
-
-CMD ["nginx", "-g", "daemon off;"]
-
diff --git a/src/main/docker/zlib-deb.Dockerfile b/src/main/docker/zlib-deb.Dockerfile
deleted file mode 100644
index c5abaf6..0000000
--- a/src/main/docker/zlib-deb.Dockerfile
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-deb.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)"
-#
-ARG PARENT_IMAGE=debian:9-slim
-FROM $PARENT_IMAGE
-
-ARG ZLIB_VERSION="1.2.11"
-ARG PKGS_TO_ADD="curl gcc make tar libc-dev ca-certificates vim"
-ARG PKGS_TO_DEL=""
-ARG PKG_INIT="apt-get update"
-ARG PKG_ADD="apt-get install -y --no-install-recommends"
-ARG PKG_DEL="apt-get purge"
-ARG PKG_CLEAN="apt-get clean"
-
-RUN true \
- && WORKDIR="/work" \
- && THEOLDPWD="$PWD" \
- # Prepare System
- && $PKG_INIT \
- && $PKG_ADD $PKGS_TO_ADD \
- # Prepare zlib
- && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \
- && mkdir tarballs tree build \
- && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \
- && cd "${WORKDIR:?}/tree" \
- && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \
- # Make zlib
- && ./configure --prefix="${WORKDIR:?}/build" \
- && make -e \
- && make install \
- && cp README "${WORKDIR}/build/." \
- && cd "${WORKDIR}/build" \
- && rm -rf lib/pkgconfig \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \
- && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" \
- && cd "${WORKDIR}" \
- && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \
- # install zlib
- && mkdir -p /usr/local/ \
- && tar -C /usr/local -f "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" -x include lib \
- # cleanup
- && cd "${THEOLDPWD:?}" \
- && unset THEOLDPWD WORKDIR \
- && $PKG_DEL $PKGS_TO_DEL \
- && $PKG_CLEAN \
- && true
-
-WORKDIR /work
-
diff --git a/src/main/docker/zlib-mingw.Dockerfile b/src/main/docker/zlib-mingw.Dockerfile
deleted file mode 100644
index abaa241..0000000
--- a/src/main/docker/zlib-mingw.Dockerfile
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-mingw.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)"
-#
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG ZLIB_VERSION="1.2.11"
-ARG PKGS_TO_ADD="curl mingw-w64-gcc make tar ca-certificates"
-ARG PKGS_TO_DEL=""
-ARG PKG_INIT="true"
-ARG PKG_ADD="apk add "
-ARG PKG_DEL="apk del"
-ARG PKG_CLEAN="true"
-
-RUN true \
- && WORKDIR="/work" \
- && THEOLDPWD="$PWD" \
- # Prepare System
- && $PKG_INIT \
- && $PKG_ADD $PKGS_TO_ADD \
- # Prepare zlib
- && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \
- && mkdir tarballs tree build \
- && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \
- && cd "${WORKDIR:?}/tree" \
- && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \
- # Make zlib
- && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \
- && export DESTDIR=../build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \
- && make -e -fwin32/Makefile.gcc PREFIX=x86_64-w64-mingw32- \
- && make -e -fwin32/Makefile.gcc install PREFIX=x86_64-w64-mingw32- \
- && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \
- && cp README ../build/. \
- && cd "${WORKDIR:?}/build" \
- && rm -rf lib/pkgconfig \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \
- && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" \
- && cd "${WORKDIR:?}" \
- && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \
- # Install zlib
- && mkdir -p /usr/local/x86_64-w64-mingw32 \
- && tar -C /usr/x86_64-w64-mingw32 -f "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" -x include lib \
- && cd "${THEOLDPWD:?}" \
- && unset THEOLDPWD WORKDIR \
- && $PKG_DEL $PKGS_TO_DEL \
- && $PKG_CLEAN \
- && true
-
-WORKDIR /work
-
-
diff --git a/src/main/eagle b/src/main/eagle
new file mode 120000
index 0000000..f5160d6
--- /dev/null
+++ b/src/main/eagle
@@ -0,0 +1 @@
+C:/work/projects/isa-svc/eagle/.git/meins \ No newline at end of file
diff --git a/src/main/firefox/gaga-plugin/main.js b/src/main/firefox/gaga-plugin/main.js
index 2a5bbae..4447719 100644
--- a/src/main/firefox/gaga-plugin/main.js
+++ b/src/main/firefox/gaga-plugin/main.js
@@ -1,15 +1,10 @@
/*
- * For how to install see:
- *
- * "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/tree/doc/note/firefox/firefox.txt"
+ * [How to install](UnspecifiedGarbage/doc/note/firefox/firefox.txt)
*/
;(function(){ try{
var NDEBUG = false;
var STATUS_INIT = 1;
- var STATUS_RUNNING = 2;
- var STATUS_DONE = 3;
- var STATUS_OBSOLETE = 4;
var NOOP = function(){};
var LOGERR = console.error.bind(console);
var N = null;
@@ -19,11 +14,10 @@
function main(){
var app = Object.seal({
ui: {},
- status: Object.seal({
- checklistBtn: STATUS_INIT,
- developmentBtn: STATUS_INIT,
- }),
lastClickEpochMs: 0,
+ wantChecklistExpanded: false,
+ wantDevelopmentExpanaded: false,
+ wantBigTemplateExpanded: false,
});
if( NDEBUG ){
setTimeout = window.setTimeout;
@@ -32,14 +26,16 @@
}else{ /* fix broken tooling */
setTimeout = setTimeoutWithCatch.bind(0, app);
logErrors = logErrorsImpl.bind(N, app);
- LOGDBG = console.debug.bind(console);
+ LOGDBG = console.debug.bind(console, "[gaga-plugin]");
}
document.addEventListener("DOMContentLoaded", logErrors.bind(N, onDOMContentLoaded, app));
+ scheduleNextStateCheck(app);
+ LOGDBG("gaga-plugin initialized");
}
function onDOMContentLoaded( app ){
- cleanupClutter(app);
+ LOGDBG("onDOMContentLoaded()");
attachDomObserver(app);
}
@@ -50,83 +46,58 @@
}
- function onDomHasChangedSomehow( app, changes, mutationObserver ){
- var nowEpochMs = Date.now();
- if( (app.lastClickEpochMs + 2000) > nowEpochMs ){
- LOGDBG("ignore, likely triggered by user.");
- return; }
- var needsReEval = false;
- for( var change of changes ){
- if( change.target.nodeName != "BUTTON" ) continue;
- var isAriaExpanded = (change.attributeName == "aria-expanded");
- var isChildAdded = (change.addedNodes.length > 0);
- var isChildRemoved = (change.removedNodes.length > 0);
- var isChildAddedOrRemoved = isChildAdded || isChildRemoved;
- if( !isAriaExpanded && !isChildAddedOrRemoved ) continue;
- if( isAriaExpanded ){
- LOGDBG("Suspicious, isExpanded: ", change.target);
- needsReEval = true; break;
- }
- if( !isChildAddedOrRemoved ) continue;
- var isBloatyChecklistBtnStillThere = document.body.contains(getBloatyChecklistBtn(app));
- if( !isBloatyChecklistBtnStillThere ){
- LOGDBG("Suspicious, btn lost");
- needsReEval = true; break;
- }
- var isBloatyDevelopmentBtnStillThere = document.body.contains(getBloatyDevelopmentBtn(app));
- if( !isBloatyDevelopmentBtnStillThere ){
- LOGDBG("Suspicious, btn lost");
- needsReEval = true; break;
- }
- }
- if( needsReEval ){
- LOGDBG("Change detected! Eval again");
- app.ui.bloatyChecklistBtn = null;
- app.ui.bloatyDevelopmentBtn = null;
- setTimeout(cleanupClutter, 42, app);
+ function scheduleNextStateCheck( app ){
+ //LOGDBG("scheduleNextStateCheck()");
+ if( app.stateCheckTimer ){
+ LOGDBG("Why is stateCheckTimer not zero?", app.stateCheckTimer);
}
+ app.stateCheckTimer = setTimeout(function(){
+ app.stateCheckTimer = null;
+ scheduleNextStateCheck(app);
+ performStateCheck(app);
+ }, 42);
}
- function cleanupClutter( app ){
- if( app.bloatyChecklistDone != STATUS_RUNNING ){
- app.bloatyChecklistDone = STATUS_OBSOLETE
- setTimeout(hideBloatyButton, 0, app, "checklistBtn");
- }
- if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
- app.bloatyDevelopmentDone = STATUS_OBSOLETE;
- setTimeout(hideBloatyButton, 0, app, "developmentBtn");
- }
- if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
- app.bloatyDevelopmentDone = STATUS_OBSOLETE;
- setTimeout(hideBloatyButton, 0, app, "bigTemplateBtn");
+ function performStateCheck( app ){
+ var buttons = [ "checklistBtn", "developmentBtn", "bigTemplateBtn" ];
+ var wantKey = [ "wantChecklistExpanded", "wantDevelopmentExpanaded", "wantBigTemplateExpanded" ];
+ for( var i = 0 ; i < buttons.length ; ++i ){
+ var btnKey = buttons[i];
+ var btnElem = getBloatyButton(app, btnKey);
+ if( !btnElem ) continue;
+ var isExpanded = isAriaBtnExpanded(app, btnElem)
+ var wantExpanded = app[wantKey[i]];
+ //LOGDBG(btnKey +" expanded is", isExpanded);
+ if( isExpanded && !wantExpanded ){
+ collapseAriaBtn(app, btnElem);
+ }
}
}
- function setLastClickTimeToNow( app ){ app.lastClickEpochMs = Date.now(); }
+ function onDomHasChangedSomehow( app, changes, mutationObserver ){
+ var nowEpochMs = Date.now();
+ LOGDBG("DOM Change detected!");
+ /*refresh dom refs so check will work on correct elems*/
+ Object.keys(app.ui).forEach(function( key ){
+ app.ui[key] = null;
+ });
+ }
- function hideBloatyButton( app, btnKey ){
- if( app.status[btnKey] == STATUS_DONE ){
- LOGDBG(btnKey +" now hidden");
- return; }
- app.status[btnKey] == STATUS_RUNNING;
- var btn = getBloatyButton(app, btnKey);
- do{
- if( !btn ){ LOGDBG(btnKey +" not found. DOM maybe not yet ready?"); break; }
- var isExpanded = isAriaBtnExpanded(app, btn);
- if( isExpanded === true ){
- LOGDBG(btnKey +".click()");
- btn.click();
- }else if( isExpanded === false ){
- app.status[btnKey] = STATUS_DONE;
- }else{
- throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded);
- }
- }while(0);
- /* try later */
- setTimeout(hideBloatyButton, 16, app, btnKey);
+ function onBloatyChecklistBtnMousedown( app ){
+ app.wantChecklistExpanded = !app.wantChecklistExpanded;
+ }
+
+
+ function onBloatyDevelopmentBtnMousedown( app ){
+ app.wantDevelopmentExpanaded = !app.wantDevelopmentExpanaded;
+ }
+
+
+ function onBloatyBigTemplateBtnMousedown( app ){
+ app.wantBigTemplateExpanded = !app.wantBigTemplateExpanded;
}
@@ -135,19 +106,22 @@
}else if( btnKey == "checklistBtn" ){
var selector = "button[aria-label=Checklists]";
var uiKey = "bloatyChecklistBtn";
+ var onMousedown = onBloatyChecklistBtnMousedown;
}else if( btnKey == "developmentBtn" ){
var selector = "button[aria-label=Development]";
var uiKey = "bloatyDevelopmentBtn";
+ var onMousedown = onBloatyDevelopmentBtnMousedown;
}else if( btnKey == "bigTemplateBtn" ){
var selector = "button[aria-label=BigTemplate]";
var uiKey = "bloatyBigTemplateBtn";
+ var onMousedown = onBloatyBigTemplateBtnMousedown;
}else{
throw Error(btnKey);
}
if( !app.ui[uiKey] ){
var btn = fetchUiRefOrNull(app, document, selector);
if( btn ){
- btn.addEventListener("mousedown", logErrors.bind(N, setLastClickTimeToNow, app));
+ btn.addEventListener("mousedown", logErrors.bind(N, onMousedown, app));
app.ui[uiKey] = btn;
}
}
@@ -155,6 +129,21 @@
}
+ function collapseAriaBtn( app, btnElem ){
+ do{
+ var isExpanded = isAriaBtnExpanded(app, btnElem);
+ if( isExpanded === true ){
+ LOGDBG("click()");
+ btnElem.click();
+ }else if( isExpanded === false ){
+ break;
+ }else{
+ throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded);
+ }
+ }while(0);
+ }
+
+
function isAriaBtnExpanded( app, btnElem ){
var value = btnElem.getAttribute("aria-expanded");
if( value === "true" ){
diff --git a/src/main/gimp/nek2023-scan2/arrange-pdf b/src/main/gimp/nek2023-scan2/arrange-pdf
new file mode 100644
index 0000000..e2d2c7b
--- /dev/null
+++ b/src/main/gimp/nek2023-scan2/arrange-pdf
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Once used to batch process some PDFs. This is NOT functional. It is only here
+# for reference purposes.
+#
+# scan.pdf
+# scan0001.pdf
+# scan0002.pdf
+# scan0003.pdf
+# scan0004.pdf
+# scan0005.pdf
+#
+
+true \
+ && mkdir scan scan0001 scan0002 scan0003 scan0004 scan0005 \
+ && pdfimages -all scan.pdf scan-tmp1/scan-img \
+ && pdfimages -all scan0001.pdf scan0001-tmp1/scan0001-img \
+ && pdfimages -all scan0002.pdf scan0002-tmp1/scan0002-img \
+ && pdfimages -all scan0003.pdf scan0003-tmp1/scan0003-img \
+ && pdfimages -all scan0004.pdf scan0004-tmp1/scan0004-img \
+ && pdfimages -all scan0005.pdf scan0005-tmp1/scan0005-img \
+ && X=nVXBcpswEL3nK7acRDuywUkPnvTCNErijmtnHJK7ArKiFAS1ZDf9+wosRB08GTligF28b/X27Y6MCqY/AzoDs5CQckFLpgAVQun2W7OCsS7rccmEVGOVUTlpn2NRchxF0eil5oFfbHxC7MQn1mwfOwOfQOgY0IvdMeCHqZ57AnvjxBoHQN8aB0DfGgfAD9d44Qk8d8aJ4gyAvuIMgL7iXDjjRKoDoC/VAdCX6ldnDKmG4cFxAZH1q62+k9y5BufsfEP/0KeijxX5K+DYOpnUEE8bp7lRztZCMkD6WaiZ+rFVOrneZr+E5Fe0lPOqqsHAu0RrQN8aH0yWENAT40La+pBi+hN0NJHUz21gd8zZOlyc4YsyugG0FgXDLzXjuKhoDrFLYd/hG6Srbg/noqyNapQzzJnGNNNix3BB/7JNs8l/6DayQ+OC7Vih+my3s/t0ebNKfuLHZP5AXNOi0aR5TAGuk/k9Abg0xOptf2DHowgO1iVwWpb0MEMb5TKY5vUpDgnuS8kquWMbjYXM2SvLW7m+LxePZJXiq1l6S1Z4sVwQ9+0umZM0JfiGLMgqSQmcT+xu6eqBQBAcamiHBym9MY3GtK6ZzJ3wAW6YjGrJg1692kTqt4hA0Z1xoekh6AoCm7mHtd01mbCJZE1zTSFOc0ujG2WYmttex0TJmfn/ZG1Xu1/fmVr0pZ2/2HIJ3cS/g2m33W/5eyt044dn/wA4gyeC \
+ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \
+ && mkdir scan-out scan0001-out scan0002-out scan0003-out scan0004-out scan0005-out \
+ && mv -t scan-out/. scan-tmp1/*-gimp.png \
+ && mv -t scan0001-out/. scan0001-tmp1/*-gimp.png \
+ && mv -t scan0002-out/. scan0002-tmp1/*-gimp.png \
+ && mv -t scan0003-out/. scan0003-tmp1/*-gimp.png \
+ && mv -t scan0004-out/. scan0004-tmp1/*-gimp.png \
+ && mv -t scan0005-out/. scan0005-tmp1/*-gimp.png \
+
+
diff --git a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh
new file mode 100644
index 0000000..68caf52
--- /dev/null
+++ b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Once used to batch process some PDFs. This is NOT functional. It is only here
+# for reference purposes.
+#
+
+true \
+ && pdftk \
+ A=scan.pdf \
+ B=scan0001.pdf \
+ shuffle A Bend-1 \
+ output all-pages.pdf \
+ && pdftk \
+ all-pages.pdf \
+ cat 1 3 5 7 8 9 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \
+ output pages-with-content.pdf \
+ && mkdir pages-with-content-img \
+ && pdfimages -all pages-with-content.pdf pages-with-content-img/img \
+ && X=pZZNc9owEIbv+RVbn+RmBBZp0zLpxdM4CR0KGeLkrtiLUGrLLhIk/feVjS2HTCeHSoyAtfdZ7cc7BlKg+QjkBOwiUqkFL1EDKaQ27bVmBWNT1uMSpdJjnXHFxjUXqOmzNBuaVcqgMlSWYmw3jaJo9FSL4H9p5kVPvOgzL/qTF/3Ziz73or940V+96KkPzby0xry0xry0xry0xry0xry0xry0xry0xry0xl5pLQyPHnkQdXa1M7dKONNy7nu+5c/8sRh8Zf4ClHVGpgxM2jvNJjmupUIgZiP1TP/YaRNf7bJfUolLXqp5VdVg8T7QGsi3xgYbJQTyiEKqrkii0XyAPk2izKZ17B/VXR3Oz+ZLMr4FspYF0qcaBS0qngNzIbrP8A3pqjvgQpa17ZztJhVoKM+M3CMt+B/cNoe8olvPnqYF7rHQQ7Sb2V26vF7FP+lDPL9P3OSi0aR5mwJcxfO7BODCJlbvhh8dNorgaF2A4GXJjyO0Xi6CHd4Q4jjBQylWEnvcWkmoHF8wb9v1fbl4SFYpvZylN8mKLpaLxF27jedJmib0OlkkqzhN4GzSnZau7hMIguMeduIh2mztoCmva1S5a3xAm0xGtRLB0L3aepq3RKD53prQzBBMBUEXecDa6dpI1HpiM1xbiOt5l0YvZZja3b3+1ZQc7X8AbKfa331HteS01R/rcgmd4t9h2mMPR/7eSdPY4clfwj6IxA== \
+ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \
+ && mkdir pages-image-adjusted \
+ && mv -t pages-image-adjusted/. pages-with-content-img/*gimp.png \
+
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java
new file mode 100644
index 0000000..c911061
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java
@@ -0,0 +1,35 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge;
+
+import io.vertx.core.json.JsonObject;
+import io.vertx.core.metrics.MetricsOptions;
+import io.vertx.core.spi.VertxMetricsFactory;
+
+
+public class FailFastMetricsOptions extends io.vertx.core.metrics.MetricsOptions {
+
+ private final String dbgMsg;
+
+ public FailFastMetricsOptions( String dbgMsg ){ this.dbgMsg = dbgMsg; }
+
+ public FailFastMetricsOptions(){ this(failCtor()); }
+
+ private FailFastMetricsOptions( MetricsOptions o ){ this(failCtor()); }
+
+ private FailFastMetricsOptions( JsonObject json ){ this(failCtor()); }
+
+ private static String failCtor(){ throw new IllegalStateException("Do NOT use this ctor!"); }
+
+ @Override public boolean isEnabled(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions setEnabled(boolean en){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public VertxMetricsFactory getFactory(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions setFactory( VertxMetricsFactory f ){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public JsonObject toJson(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public String toString(){ throw new UnsupportedOperationException(dbgMsg); }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java
new file mode 100644
index 0000000..fa0d7e1
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java
@@ -0,0 +1,27 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge;
+
+import io.vertx.core.VertxOptions;
+import io.vertx.core.impl.VertxBuilder;
+import io.vertx.core.json.JsonObject;
+import io.vertx.core.metrics.MetricsOptions;
+import io.vertx.core.spi.metrics.VertxMetrics;
+
+
+public class FailFastVertxMetricsFactory implements io.vertx.core.spi.VertxMetricsFactory {
+
+ private final String dbgMsg;
+
+ public FailFastVertxMetricsFactory(String dbgMsg ){ this.dbgMsg = dbgMsg; }
+
+ @Override public void init(VertxBuilder b) { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public VertxMetrics metrics(VertxOptions o){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions() { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions(MetricsOptions o) { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions(JsonObject j) { throw new UnsupportedOperationException(dbgMsg); }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java
new file mode 100644
index 0000000..aa4ad48
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java
@@ -0,0 +1,394 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.netty.handler.codec.DecoderResult;
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpConnection;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerFileUpload;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.HttpVersion;
+import io.vertx.core.http.ServerWebSocket;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.net.NetSocket;
+import io.vertx.core.net.SocketAddress;
+import io.vertx.core.streams.Pipe;
+import io.vertx.core.streams.WriteStream;
+
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.security.cert.X509Certificate;
+import java.util.Map;
+import java.util.Set;
+
+public class DelegateHttpServerRequest implements io.vertx.core.http.HttpServerRequest {
+
+ private final io.vertx.core.http.HttpServerRequest delegate;
+ private final boolean isDebugging = true;
+
+ public DelegateHttpServerRequest(HttpServerRequest delegate) {
+ this.delegate = delegate;
+ }
+
+ private void breakpoint(){
+ try{
+ throw new UnsupportedOperationException();
+ }catch(UnsupportedOperationException ex){}
+ }
+
+ @Override
+ public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.exceptionHandler(handler);
+ }
+
+ @Override
+ public HttpServerRequest handler(Handler<Buffer> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.handler(handler);
+ }
+
+ @Override
+ public HttpServerRequest pause() {
+ if( isDebugging ) breakpoint();
+ return delegate.pause();
+ }
+
+ @Override
+ public HttpServerRequest resume() {
+ if( isDebugging ) breakpoint();
+ return delegate.resume();
+ }
+
+ @Override
+ public HttpServerRequest fetch(long amount) {
+ if( isDebugging ) breakpoint();
+ return delegate.fetch(amount);
+ }
+
+ @Override
+ public HttpServerRequest endHandler(Handler<Void> endHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.endHandler(endHandler);
+ }
+
+ @Override
+ public HttpVersion version() {
+ if( isDebugging ) breakpoint();
+ return delegate.version();
+ }
+
+ @Override
+ public HttpMethod method() {
+ if( isDebugging ) breakpoint();
+ return delegate.method();
+ }
+
+ @Override
+ public boolean isSSL() {
+ if( isDebugging ) breakpoint();
+ return delegate.isSSL();
+ }
+
+ @Override
+ public String scheme() {
+ if( isDebugging ) breakpoint();
+ return delegate.scheme();
+ }
+
+ @Override
+ public String uri() {
+ if( isDebugging ) breakpoint();
+ return delegate.uri();
+ }
+
+ @Override
+ public String path() {
+ if( isDebugging ) breakpoint();
+ return delegate.path();
+ }
+
+ @Override
+ public String query() {
+ if( isDebugging ) breakpoint();
+ return delegate.query();
+ }
+
+ @Override
+ public String host() {
+ if( isDebugging ) breakpoint();
+ return delegate.host();
+ }
+
+ @Override
+ public long bytesRead() {
+ if( isDebugging ) breakpoint();
+ return delegate.bytesRead();
+ }
+
+ @Override
+ public HttpServerResponse response() {
+ if( isDebugging ) breakpoint();
+ return delegate.response();
+ }
+
+ @Override
+ public MultiMap headers() {
+ if( isDebugging ) breakpoint();
+ return delegate.headers();
+ }
+
+ @Override
+ public String getHeader(String headerName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public String getHeader(CharSequence headerName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public MultiMap params() {
+ if( isDebugging ) breakpoint();
+ return delegate.params();
+ }
+
+ @Override
+ public String getParam(String paramName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName);
+ }
+
+ @Override
+ public String getParam(String paramName, String defaultValue) {
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName, defaultValue);
+ }
+
+ @Override
+ public SocketAddress remoteAddress() {
+ if( isDebugging ) breakpoint();
+ return delegate.remoteAddress();
+ }
+
+ @Override
+ public SocketAddress localAddress() {
+ if( isDebugging ) breakpoint();
+ return delegate.localAddress();
+ }
+
+ @Override
+ public SSLSession sslSession() {
+ if( isDebugging ) breakpoint();
+ return delegate.sslSession();
+ }
+
+ @Override
+ public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException {
+ if( isDebugging ) breakpoint();
+ return delegate.peerCertificateChain();
+ }
+
+ @Override
+ public String absoluteURI() {
+ if( isDebugging ) breakpoint();
+ return delegate.absoluteURI();
+ }
+
+ @Override
+ public HttpServerRequest bodyHandler(Handler<Buffer> bodyHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.bodyHandler(bodyHandler);
+ }
+
+ @Override
+ public HttpServerRequest body(Handler<AsyncResult<Buffer>> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.body(handler);
+ }
+
+ @Override
+ public Future<Buffer> body() {
+ if( isDebugging ) breakpoint();
+ return delegate.body();
+ }
+
+ @Override
+ public void end(Handler<AsyncResult<Void>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.end(handler);
+ }
+
+ @Override
+ public Future<Void> end() {
+ if( isDebugging ) breakpoint();
+ return delegate.end();
+ }
+
+ @Override
+ public void toNetSocket(Handler<AsyncResult<NetSocket>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.toNetSocket(handler);
+ }
+
+ @Override
+ public Future<NetSocket> toNetSocket() {
+ if( isDebugging ) breakpoint();
+ return delegate.toNetSocket();
+ }
+
+ @Override
+ public HttpServerRequest setExpectMultipart(boolean expect) {
+ if( isDebugging ) breakpoint();
+ return delegate.setExpectMultipart(expect);
+ }
+
+ @Override
+ public boolean isExpectMultipart() {
+ if( isDebugging ) breakpoint();
+ return delegate.isExpectMultipart();
+ }
+
+ @Override
+ public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> uploadHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.uploadHandler(uploadHandler);
+ }
+
+ @Override
+ public MultiMap formAttributes() {
+ if( isDebugging ) breakpoint();
+ return delegate.formAttributes();
+ }
+
+ @Override
+ public String getFormAttribute(String attributeName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getFormAttribute(attributeName);
+ }
+
+ @Override
+ public int streamId() {
+ if( isDebugging ) breakpoint();
+ return delegate.streamId();
+ }
+
+ @Override
+ public void toWebSocket(Handler<AsyncResult<ServerWebSocket>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.toWebSocket(handler);
+ }
+
+ @Override
+ public Future<ServerWebSocket> toWebSocket() {
+ if( isDebugging ) breakpoint();
+ return delegate.toWebSocket();
+ }
+
+ @Override
+ public boolean isEnded() {
+ if( isDebugging ) breakpoint();
+ return delegate.isEnded();
+ }
+
+ @Override
+ public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.customFrameHandler(handler);
+ }
+
+ @Override
+ public HttpConnection connection() {
+ if( isDebugging ) breakpoint();
+ return delegate.connection();
+ }
+
+ @Override
+ public StreamPriority streamPriority() {
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriority();
+ }
+
+ @Override
+ public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriorityHandler(handler);
+ }
+
+ @Override
+ public DecoderResult decoderResult() {
+ if( isDebugging ) breakpoint();
+ return delegate.decoderResult();
+ }
+
+ @Override
+ public Cookie getCookie(String name) {
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name);
+ }
+
+ @Override
+ public Cookie getCookie(String name, String domain, String path) {
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name, domain, path);
+ }
+
+ @Override
+ public int cookieCount() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookieCount();
+ }
+
+ @Override
+ @Deprecated
+ public Map<String, Cookie> cookieMap() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookieMap();
+ }
+
+ @Override
+ public Set<Cookie> cookies(String name) {
+ if( isDebugging ) breakpoint();
+ return delegate.cookies(name);
+ }
+
+ @Override
+ public Set<Cookie> cookies() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookies();
+ }
+
+ @Override
+ public HttpServerRequest routed(String route) {
+ if( isDebugging ) breakpoint();
+ return delegate.routed(route);
+ }
+
+ @Override
+ public Pipe<Buffer> pipe() {
+ if( isDebugging ) breakpoint();
+ return delegate.pipe();
+ }
+
+ @Override
+ public Future<Void> pipeTo(WriteStream<Buffer> dst) {
+ if( isDebugging ) breakpoint();
+ return delegate.pipeTo(dst);
+ }
+
+ @Override
+ public void pipeTo(WriteStream<Buffer> dst, Handler<AsyncResult<Void>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.pipeTo(dst, handler);
+ }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java
new file mode 100644
index 0000000..92fe3fc
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java
@@ -0,0 +1,475 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.netty.handler.codec.DecoderResult;
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Context;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpConnection;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerFileUpload;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.HttpVersion;
+import io.vertx.core.http.ServerWebSocket;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.http.impl.HttpServerRequestInternal;
+import io.vertx.core.net.NetSocket;
+import io.vertx.core.net.SocketAddress;
+import io.vertx.core.streams.Pipe;
+import io.vertx.core.streams.WriteStream;
+import org.slf4j.Logger;
+
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.security.cert.X509Certificate;
+import java.util.Map;
+import java.util.Set;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+public class DelegateVertxHttpServerRequestInternal implements HttpServerRequestInternal {
+
+ private static final Logger log = getLogger(DelegateVertxHttpServerRequestInternal.class);
+ private final HttpServerRequestInternal delegate;
+ private final boolean isDebugging = true;
+ private final String dbgHint;
+
+ public DelegateVertxHttpServerRequestInternal(String debugHint, HttpServerRequest delegate) {
+ log.trace("{}: new DelegateVertxHttpServerRequestInternal()", debugHint);
+ this.delegate = (HttpServerRequestInternal) delegate;
+ this.dbgHint = debugHint;
+ }
+
+ private void breakpoint(){
+ try{
+ throw new UnsupportedOperationException();
+ }catch(UnsupportedOperationException ex){}
+ }
+
+ @Override
+ public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
+ log.trace("{}: exceptionHandler(Hdlr<Ex>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.exceptionHandler(handler);
+ }
+
+ @Override
+ public HttpServerRequest handler(Handler<Buffer> handler) {
+ log.trace("{}: handler(Hdlr<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.handler(handler);
+ }
+
+ @Override
+ public HttpServerRequest pause() {
+ log.trace("{}: pause()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pause();
+ }
+
+ @Override
+ public HttpServerRequest resume() {
+ log.trace("{}: resume()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.resume();
+ }
+
+ @Override
+ public HttpServerRequest fetch(long amount) {
+ log.trace("{}: fetch({})", dbgHint, amount);
+ if( isDebugging ) breakpoint();
+ return delegate.fetch(amount);
+ }
+
+ @Override
+ public HttpServerRequest endHandler(Handler<Void> endHandler) {
+ log.trace("{}: endHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.endHandler(endHandler);
+ }
+
+ @Override
+ public HttpVersion version() {
+ log.trace("{}: version()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.version();
+ }
+
+ @Override
+ public HttpMethod method() {
+ log.trace("{}: method()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.method();
+ }
+
+ @Override
+ public boolean isSSL() {
+ log.trace("{}: isSSL()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isSSL();
+ }
+
+ @Override
+ public String scheme() {
+ log.trace("{}: scheme()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.scheme();
+ }
+
+ @Override
+ public String uri() {
+ log.trace("{}: uri()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.uri();
+ }
+
+ @Override
+ public String path() {
+ log.trace("{}: path()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.path();
+ }
+
+ @Override
+ public String query() {
+ log.trace("{}: query()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.query();
+ }
+
+ @Override
+ public String host() {
+ log.trace("{}: host()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.host();
+ }
+
+ @Override
+ public long bytesRead() {
+ log.trace("{}: bytesRead()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.bytesRead();
+ }
+
+ @Override
+ public HttpServerResponse response() {
+ log.trace("{}: response()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.response();
+ }
+
+ @Override
+ public MultiMap headers() {
+ log.trace("{}: headers()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.headers();
+ }
+
+ @Override
+ public String getHeader(String headerName) {
+ log.trace("{}: getHeader(\"{}\")", dbgHint, headerName);
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public String getHeader(CharSequence headerName) {
+ log.trace("{}: getHeader(\"{}\")", dbgHint, headerName);
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public MultiMap params() {
+ log.trace("{}: params()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.params();
+ }
+
+ @Override
+ public String getParam(String paramName) {
+ log.trace("{}: getParam(\"{}\")", dbgHint, paramName);
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName);
+ }
+
+ @Override
+ public String getParam(String paramName, String defaultValue) {
+ log.trace("{}: getParam(\"{}\", \"{}\")", dbgHint, paramName, defaultValue);
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName, defaultValue);
+ }
+
+ @Override
+ public SocketAddress remoteAddress() {
+ log.trace("{}: remoteAddress()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.remoteAddress();
+ }
+
+ @Override
+ public SocketAddress localAddress() {
+ log.trace("{}: localAddress()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.localAddress();
+ }
+
+ @Override
+ public SSLSession sslSession() {
+ log.trace("{}: sslSession()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.sslSession();
+ }
+
+ @Override
+ public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException {
+ log.trace("{}: peerCertificateChain()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.peerCertificateChain();
+ }
+
+ @Override
+ public String absoluteURI() {
+ log.trace("{}: absoluteURI()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.absoluteURI();
+ }
+
+ @Override
+ public HttpServerRequest bodyHandler(Handler<Buffer> bodyHandler) {
+ log.trace("{}: bodyHandler(Hdlr<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.bodyHandler(bodyHandler);
+ }
+
+ @Override
+ public HttpServerRequest body(Handler<AsyncResult<Buffer>> handler) {
+ log.trace("{}: body(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.body(handler);
+ }
+
+ @Override
+ public Future<Buffer> body() {
+ log.trace("{}: body(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.body();
+ }
+
+ @Override
+ public void end(Handler<AsyncResult<Void>> handler) {
+ log.trace("{}: end(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.end(handler);
+ }
+
+ @Override
+ public Future<Void> end() {
+ log.trace("{}: end(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.end();
+ }
+
+ @Override
+ public void toNetSocket(Handler<AsyncResult<NetSocket>> handler) {
+ log.trace("{}: toNetSocket(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.toNetSocket(handler);
+ }
+
+ @Override
+ public Future<NetSocket> toNetSocket() {
+ log.trace("{}: toNetSocket(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.toNetSocket();
+ }
+
+ @Override
+ public HttpServerRequest setExpectMultipart(boolean expect) {
+ log.trace("{}: toNetSocket({})", dbgHint, expect);
+ if( isDebugging ) breakpoint();
+ return delegate.setExpectMultipart(expect);
+ }
+
+ @Override
+ public boolean isExpectMultipart() {
+ log.trace("{}: isExpectMultipart()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isExpectMultipart();
+ }
+
+ @Override
+ public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> uploadHandler) {
+ log.trace("{}: uploadHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.uploadHandler(uploadHandler);
+ }
+
+ @Override
+ public MultiMap formAttributes() {
+ log.trace("{}: formAttributes()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.formAttributes();
+ }
+
+ @Override
+ public String getFormAttribute(String attributeName) {
+ log.trace("{}: getFormAttribute(\"{}\")", dbgHint, attributeName);
+ if( isDebugging ) breakpoint();
+ return delegate.getFormAttribute(attributeName);
+ }
+
+ @Override
+ public int streamId() {
+ log.trace("{}: streamId()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamId();
+ }
+
+ @Override
+ public void toWebSocket(Handler<AsyncResult<ServerWebSocket>> handler) {
+ log.trace("{}: toWebSocket(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.toWebSocket(handler);
+ }
+
+ @Override
+ public Future<ServerWebSocket> toWebSocket() {
+ log.trace("{}: toWebSocket()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.toWebSocket();
+ }
+
+ @Override
+ public boolean isEnded() {
+ log.trace("{}: isEnded()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isEnded();
+ }
+
+ @Override
+ public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
+ log.trace("{}: customFrameHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.customFrameHandler(handler);
+ }
+
+ @Override
+ public HttpConnection connection() {
+ log.trace("{}: connection()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.connection();
+ }
+
+ @Override
+ public StreamPriority streamPriority() {
+ log.trace("{}: streamPriority()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriority();
+ }
+
+ @Override
+ public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
+ log.trace("{}: streamPriorityHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriorityHandler(handler);
+ }
+
+ @Override
+ public DecoderResult decoderResult() {
+ log.trace("{}: decoderResult()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.decoderResult();
+ }
+
+ @Override
+ public Cookie getCookie(String name) {
+ log.trace("{}: getCookie(\"{}\")", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name);
+ }
+
+ @Override
+ public Cookie getCookie(String name, String domain, String path) {
+ log.trace("{}: getCookie(\"{}\", Str, Str)", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name, domain, path);
+ }
+
+ @Override
+ public int cookieCount() {
+ log.trace("{}: cookieCount()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookieCount();
+ }
+
+ @Override
+ @Deprecated
+ public Map<String, Cookie> cookieMap() {
+ log.trace("{}: cookieMap()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookieMap();
+ }
+
+ @Override
+ public Set<Cookie> cookies(String name) {
+ log.trace("{}: cookies(\"{}\")", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.cookies(name);
+ }
+
+ @Override
+ public Set<Cookie> cookies() {
+ log.trace("{}: cookies(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookies();
+ }
+
+ @Override
+ public HttpServerRequest routed(String route) {
+ log.trace("{}: routed(\"{}\")", dbgHint, route);
+ if( isDebugging ) breakpoint();
+ return delegate.routed(route);
+ }
+
+ @Override
+ public Pipe<Buffer> pipe() {
+ log.trace("{}: pipe()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pipe();
+ }
+
+ @Override
+ public Future<Void> pipeTo(WriteStream<Buffer> dst) {
+ log.trace("{}: pipeTo(WrStrm<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pipeTo(dst);
+ }
+
+ @Override
+ public void pipeTo(WriteStream<Buffer> dst, Handler<AsyncResult<Void>> handler) {
+ log.trace("{}: pipeTo(WrStrm<Buf>,Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.pipeTo(dst, handler);
+ }
+
+ @Override
+ public Context context() {
+ log.trace("{}: context()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.context();
+ }
+
+ @Override
+ public Object metric() {
+ log.trace("{}: metric()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.metric();
+ }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java
new file mode 100644
index 0000000..87ce5a9
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java
@@ -0,0 +1,111 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.streams.ReadStream;
+import org.slf4j.Logger;
+
+import java.util.Set;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+public class DelegateVertxHttpServerResponse implements HttpServerResponse {
+
+ private static final Logger log = getLogger(DelegateVertxHttpServerResponse.class);
+ private final HttpServerResponse delegate;
+ private final String dbgHint;
+
+ public DelegateVertxHttpServerResponse(String debugHint, HttpServerResponse delegate) {
+ this.dbgHint = debugHint;
+ this.delegate = delegate;
+ }
+
+ @Override public HttpServerResponse exceptionHandler(Handler<Throwable> handler) { log.trace("{}: exceptionHandler()", dbgHint); return delegate.exceptionHandler(handler); }
+ @Override public HttpServerResponse setWriteQueueMaxSize(int maxSize) { log.trace("{}: setWriteQueueMaxSize()", dbgHint); return delegate.setWriteQueueMaxSize(maxSize); }
+ @Override public HttpServerResponse drainHandler(Handler<Void> handler) { log.trace("{}: drainHandler()", dbgHint); return delegate.drainHandler(handler); }
+ @Override public int getStatusCode() { log.trace("{}: getStatusCode()", dbgHint); return delegate.getStatusCode(); }
+ @Override public HttpServerResponse setStatusCode(int statusCode) { log.trace("{}: setStatusCode()", dbgHint); return delegate.setStatusCode(statusCode); }
+ @Override public String getStatusMessage() { log.trace("{}: getStatusMessage()", dbgHint); return delegate.getStatusMessage(); }
+ @Override public HttpServerResponse setStatusMessage(String statusMessage) { log.trace("{}: setStatusMessage()", dbgHint); return delegate.setStatusMessage(statusMessage); }
+ @Override public HttpServerResponse setChunked(boolean chunked) { log.trace("{}: setChunked()", dbgHint); return delegate.setChunked(chunked); }
+ @Override public boolean isChunked() { log.trace("{}: isChunked()", dbgHint); return delegate.isChunked(); }
+ @Override public MultiMap headers() { log.trace("{}: headers()", dbgHint); return delegate.headers(); }
+ @Override public HttpServerResponse putHeader(String name, String value) { log.trace("{}: putHeader(Str,Str)", dbgHint); return delegate.putHeader(name, value); }
+ @Override public HttpServerResponse putHeader(CharSequence name, CharSequence value) { log.trace("{}: putHeader(ChrSeq,ChrSeq)", dbgHint); return delegate.putHeader(name, value); }
+ @Override public HttpServerResponse putHeader(String name, Iterable<String> values) { log.trace("{}: putHeader(Str,Iter<Str>)", dbgHint); return delegate.putHeader(name, values); }
+ @Override public HttpServerResponse putHeader(CharSequence name, Iterable<CharSequence> values) { log.trace("{}: putHeader(ChrSeq,Iter<ChrSeq>)", dbgHint); return delegate.putHeader(name, values); }
+ @Override public MultiMap trailers() { log.trace("{}: trailers()", dbgHint); return delegate.trailers(); }
+ @Override public HttpServerResponse putTrailer(String name, String value) { log.trace("{}: putTrailer(Str,Str)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse putTrailer(CharSequence name, CharSequence value) { log.trace("{}: putTrailer(ChrSeq,ChrSeq)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse putTrailer(String name, Iterable<String> values) { log.trace("{}: putTrailer(Str,Iter<Str>)", dbgHint); return delegate.putTrailer(name, values); }
+ @Override public HttpServerResponse putTrailer(CharSequence name, Iterable<CharSequence> value) { log.trace("{}: putTrailer(ChrSeq,Iter<ChrSeq>)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse closeHandler(Handler<Void> handler) { log.trace("{}: closeHandler()", dbgHint); return delegate.closeHandler(handler); }
+ @Override public HttpServerResponse endHandler(Handler<Void> handler) { log.trace("{}: endHandler()", dbgHint); return delegate.endHandler(handler); }
+ @Override public Future<Void> write(String chunk, String enc) { log.trace("{}: write(Str,Str)", dbgHint); return delegate.write(chunk, enc); }
+ @Override public void write(String chunk, String enc, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Str,Str,Hdlr)", dbgHint); delegate.write(chunk, enc, handler); }
+ @Override public Future<Void> write(String chunk) { log.trace("{}: write(Str)", dbgHint); return delegate.write(chunk); }
+ @Override public void write(String chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Str,Hdlr)", dbgHint); delegate.write(chunk, handler); }
+ @Override public HttpServerResponse writeContinue() { log.trace("{}: writeContinue()", dbgHint); return delegate.writeContinue(); }
+ @Override public Future<Void> end(String chunk) { log.trace("{}: end(Str)", dbgHint); return delegate.end(chunk); }
+ @Override public void end(String chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Str,Hdlr)", dbgHint); delegate.end(chunk, handler); }
+ @Override public Future<Void> end(String chunk, String enc) { log.trace("{}: end(Str,Str)", dbgHint); return delegate.end(chunk, enc); }
+ @Override public void end(String chunk, String enc, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Str,Str,Hdlr)", dbgHint); delegate.end(chunk, enc, handler); }
+ @Override public Future<Void> end(Buffer chunk) { log.trace("{}: end(Buf)", dbgHint); return delegate.end(chunk); }
+ @Override public void end(Buffer chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Buf,Hdlr)", dbgHint); delegate.end(chunk, handler); }
+ @Override public Future<Void> end() { log.trace("{}: end(void)", dbgHint); return delegate.end(); }
+ @Override public void send(Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Hdlr)", dbgHint); delegate.send(handler); }
+ @Override public Future<Void> send() { log.trace("{}: send(void)", dbgHint); return delegate.send(); }
+ @Override public void send(String body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Str,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(String body) { log.trace("{}: send(Str)", dbgHint); return delegate.send(body); }
+ @Override public void send(Buffer body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Buf,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(Buffer body) { log.trace("{}: send(Buf)", dbgHint); return delegate.send(body); }
+ @Override public void send(ReadStream<Buffer> body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(RdStr<Buf>,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(ReadStream<Buffer> body) { log.trace("{}: send(RdStr<Buf>)", dbgHint); return delegate.send(body); }
+ @Override public Future<Void> sendFile(String filename) { log.trace("{}: sendFile(Str)", dbgHint); return delegate.sendFile(filename); }
+ @Override public Future<Void> sendFile(String filename, long offset) { log.trace("{}: sendFile(Str,lng)", dbgHint); return delegate.sendFile(filename, offset); }
+ @Override public Future<Void> sendFile(String filename, long offset, long length) { log.trace("{}: sendFile(Str,lng,lng)", dbgHint); return delegate.sendFile(filename, offset, length); }
+ @Override public HttpServerResponse sendFile(String filename, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,Hdlr)", dbgHint); return delegate.sendFile(filename, resultHandler); }
+ @Override public HttpServerResponse sendFile(String filename, long offset, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, resultHandler); }
+ @Override public HttpServerResponse sendFile(String filename, long offset, long length, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,lng,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, length, resultHandler); }
+ @Override public void close() { log.trace("{}: close()", dbgHint); delegate.close(); }
+ @Override public boolean ended() { log.trace("{}: ended()", dbgHint); return delegate.ended(); }
+ @Override public boolean closed() { log.trace("{}: closed()", dbgHint); return delegate.closed(); }
+ @Override public boolean headWritten() { log.trace("{}: headWritten()", dbgHint); return delegate.headWritten(); }
+ @Override public HttpServerResponse headersEndHandler(Handler<Void> handler) { log.trace("{}: headersEndHandler()", dbgHint); return delegate.headersEndHandler(handler); }
+ @Override public HttpServerResponse bodyEndHandler(Handler<Void> handler) { log.trace("{}: bodyEndHandler()", dbgHint); return delegate.bodyEndHandler(handler); }
+ @Override public long bytesWritten() { log.trace("{}: bytesWritten()", dbgHint); return delegate.bytesWritten(); }
+ @Override public int streamId() { log.trace("{}: streamId()", dbgHint); return delegate.streamId(); }
+ @Override public HttpServerResponse push(HttpMethod method, String host, String path, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Str,Hdlr)", dbgHint); return delegate.push(method, host, path, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String host, String path) { log.trace("{}: push(Mthd,Str,Str)", dbgHint); return delegate.push(method, host, path); }
+ @Override public HttpServerResponse push(HttpMethod method, String path, MultiMap headers, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Map,Hdlr)", dbgHint); return delegate.push(method, path, headers, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Map)", dbgHint); return delegate.push(method, path, headers); }
+ @Override public HttpServerResponse push(HttpMethod method, String path, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Hdlr)", dbgHint); return delegate.push(method, path, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String path) { log.trace("{}: push(Mthd,Str)", dbgHint); return delegate.push(method, path); }
+ @Override public HttpServerResponse push(HttpMethod method, String host, String path, MultiMap headers, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Str,Map,Hdlr)", dbgHint); return delegate.push(method, host, path, headers, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String host, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Str,Map)", dbgHint); return delegate.push(method, host, path, headers); }
+ @Override public boolean reset() { log.trace("{}: reset(void)", dbgHint); return delegate.reset(); }
+ @Override public boolean reset(long code) { log.trace("{}: reset({})", dbgHint, code); return delegate.reset(code); }
+ @Override public HttpServerResponse writeCustomFrame(int type, int flags, Buffer payload) { log.trace("{}: writeCustomFrame({}, {}, Buf)", dbgHint, type, flags); return delegate.writeCustomFrame(type, flags, payload); }
+ @Override public HttpServerResponse writeCustomFrame(HttpFrame frame) { log.trace("{}: writeCustomFrame()", dbgHint); return delegate.writeCustomFrame(frame); }
+ @Override public HttpServerResponse setStreamPriority(StreamPriority streamPriority) { log.trace("{}: setStreamPriority()", dbgHint); return delegate.setStreamPriority(streamPriority); }
+ @Override public HttpServerResponse addCookie(Cookie cookie) { log.trace("{}: addCookie()", dbgHint); return delegate.addCookie(cookie); }
+ @Override public Cookie removeCookie(String name) { log.trace("{}: removeCookie({})", dbgHint, name); return delegate.removeCookie(name); }
+ @Override public Cookie removeCookie(String name, boolean invalidate) { log.trace("{}: removeCookie({}, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, invalidate); }
+ @Override public Set<Cookie> removeCookies(String name) { log.trace("{}: removeCookies({})", dbgHint, name); return delegate.removeCookies(name); }
+ @Override public Set<Cookie> removeCookies(String name, boolean invalidate) { log.trace("{}: removeCookies({}, {})", dbgHint, name, invalidate); return delegate.removeCookies(name, invalidate); }
+ @Override public Cookie removeCookie(String name, String domain, String path) { log.trace("{}: removeCookie({}, Str, Str)", dbgHint, name); return delegate.removeCookie(name, domain, path); }
+ @Override public Cookie removeCookie(String name, String domain, String path, boolean invalidate) { log.trace("{}: removeCookie({}, Str, Str, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, domain, path, invalidate); }
+ @Override public Future<Void> write(Buffer data) { log.trace("{}: write(Buf)", dbgHint); return delegate.write(data); }
+ @Override public void write(Buffer data, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Buf, Hdlr)", dbgHint); delegate.write(data, handler); }
+ @Override public void end(Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Hdlr)", dbgHint); delegate.end(handler); }
+ @Override public boolean writeQueueFull() { log.trace("{}: writeQueueFull()", dbgHint); return delegate.writeQueueFull(); }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java
new file mode 100644
index 0000000..8295088
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java
@@ -0,0 +1,131 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.ext.web.RoutingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.lang.System.currentTimeMillis;
+
+
+public class Foo {
+
+ private static final Logger log = Foo.getLogger(Foo.class);
+ private static final boolean assertRequestEquality = false;
+ private static HttpServerRequest serverInfoRequest;
+ private static io.vertx.core.http.impl.HttpServerRequestInternal restStorageEvBusAdaptMappdHttpServReq;
+ private static long onBeginRouteEpochMs;
+
+ public static synchronized void onNewServerInfoRequst(HttpServerRequest request){
+ if( !isServerInfoRequst(request) ) return;
+ //assert serverInfoRequest == null;
+ log.trace("onNewServerInfoRequst()");
+ serverInfoRequest = request;
+ }
+
+ public static void downReqBegin(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("downReqBegin()");
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void downReqAuthorized(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("downReqAuthorized()");
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void onBeforeMainVerticleRouteGeneric(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("onBeforeMainVerticleRouteGeneric()");
+ onBeginRouteEpochMs = currentTimeMillis();
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static Logger getLogger(Class<?> clazz) {
+ assert clazz != null;
+ return getLogger(clazz.getName());
+ }
+
+ public static Logger getLogger(String name) {
+ assert name != null;
+ return LoggerFactory.getLogger("FOO."+ name);
+ }
+
+ public static boolean isServerInfoRequst(HttpServerRequest request) {
+ return isServerInfoRequst(request.uri());
+ }
+
+ private static boolean isServerInfoRequst(String uri) {
+ assert uri != null;
+ assert uri.startsWith("/");
+ try{
+ if( "/houston/server/info".equals(uri) ){
+ //log.trace("true <- isServerInfoRequst({})", uri);
+ return true;
+ }
+ //log.trace("false <- isServerInfoRequst({})", uri);
+ return false;
+ }catch(Throwable ex){
+ assert false;
+ throw ex;
+ }
+ }
+
+ public static void onBeforeEvBusAdapterDataHandler(String uri) {
+ if( !isServerInfoRequst(uri) ) return;
+ log.trace("onBeforeEvBusAdapterDataHandler({})", uri);
+ assert false;
+ }
+
+ public static void onBeforeEvBusAdapterEndHandler(String uri) {
+ if( !isServerInfoRequst(uri)) return;
+ log.trace("onBeforeEvBusAdapterEndHandler({})", uri);
+ assert false;
+ }
+
+ public static void onEvBusAdapterHandle(io.vertx.core.http.impl.HttpServerRequestInternal req) {
+ if( !isServerInfoRequst(req.uri()) ) return;
+ assert !assertRequestEquality || serverInfoRequest != req;
+ assert restStorageEvBusAdaptMappdHttpServReq == null;
+ log.trace("onEvBusAdapterHandle({})", req.uri());
+ restStorageEvBusAdaptMappdHttpServReq = req;
+ }
+
+ public static void onEvBusAdapterError(Throwable ex) {
+ log.error("onEvBusAdapterError()", new Exception("stacktrace", ex));
+ }
+
+ public static void onRestStorageHandlerHandle(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("onRestStorageHandlerHandle({})", req.uri());
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void onRestStorageHandler_getResource(io.vertx.ext.web.RoutingContext ctx) {
+ if( !isServerInfoRequst(ctx.request()) ) return;
+ assert !assertRequestEquality || serverInfoRequest == ctx.request();
+ log.trace("onRestStorageHandler_getResource({})", ctx.request().uri());
+ }
+
+ public static void onRestStorageHandler_getResource_before_storage_get(String path, int offset, int limit) {
+ //log.trace("onRestStorageHandler_getResource_before_storage_get({}, {}, {})", path, offset, limit);
+ }
+
+ public static void onRestStorageHandler_getResource_after_storage_get(String path, int offset, int limit, Object/*org.swisspush.reststorage.Resource*/ resource) {
+ //log.trace("onRestStorageHandler_getResource_after_storage_get({})", path);
+ }
+
+ public static void onGetHoustonServerInfo(RoutingContext ctx) {
+ var req = ctx.request();
+ log.trace("onGetHoustonServerInfo({})", req.uri());
+ assert !assertRequestEquality || serverInfoRequest != req;
+ }
+
+ public static void onEndCompleted(long responseBegEpochMs){
+ long nowEpochMs = currentTimeMillis();
+ log.debug("Request took {}ms and {}ms", nowEpochMs - onBeginRouteEpochMs, nowEpochMs - responseBegEpochMs);
+ }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java
new file mode 100644
index 0000000..a011c7f
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java
@@ -0,0 +1,265 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import org.slf4j.Logger;
+
+import java.lang.reflect.Field;
+import java.util.NoSuchElementException;
+
+import static java.lang.System.currentTimeMillis;
+import static org.slf4j.LoggerFactory.getLogger;
+
+
+/**
+ * <p>This class got introduced to trace timings of "/houston/server/info"
+ * requests. It is optimized for exactly this purpose AND NOTHING ELSE! It was
+ * introduced because SDCISA-13746 is only observable on PROD. It does not
+ * reproduce locally, and not even on TEST, INT or PREPROD. So we do not really
+ * have another choice but tracing down this bug directly on PROD itself.
+ * Unluckily it is not that simple to do so. First debugging/testing on PROD env
+ * always has some risk. Plus, also our feedback-loop is terribly slow due to our
+ * heavyweight deployment process. So to be able to see if this code actually does
+ * what it should, we likely have to wait up to SEVERAL MONTHS.</p>
+ */
+public class HoustonInfoRequestTracer implements org.swisspush.gateleen.core.debug.InfoRequestTracer {
+
+ private static final Logger log = getLogger(HoustonInfoRequestTracer.class);
+ private static final String INFO_URI = "/houston/server/info";
+ private static final int MAX_REQUESTS = 8; /*WARN: do NOT go too high*/
+ private static final Long NO_VALUE = Long.MIN_VALUE / 2;
+ private static final Class<?> wrapperClazz;
+ private static final Field delegateField;
+ private static final int
+ FLG_WritingHttpResponseHasReturned = 1 << 0,
+ FLG_WritingHttpResponseEnd = 1 << 1,
+ FLG_slotIsBusy = 1 << 2;
+ private final int requestDurationBailTresholdLowMs = 42; /* requests faster than 42 millis likely not interesting*/
+ private final Object requestSlotLock = new Object();
+ private final HttpServerRequest[]
+ requestInstances = new HttpServerRequest[MAX_REQUESTS];
+ private int slotReUseOffset;
+ private final int[]
+ requestFlg = new int[MAX_REQUESTS];
+ private final long[]
+ requestNewHttpReqEpochMs = new long[MAX_REQUESTS],
+ authorizerBeginMs = new long[MAX_REQUESTS],
+ authorizerEndMs = new long[MAX_REQUESTS],
+ beforeCatchallRouting = new long[MAX_REQUESTS],
+ responseGotRequestedMs = new long[MAX_REQUESTS],
+ writingResponseBeginMs = new long[MAX_REQUESTS],
+ writingResponseHasReturnedMs = new long[MAX_REQUESTS],
+ writingResponseEndMs = new long[MAX_REQUESTS],
+ requestDoneMs = new long[MAX_REQUESTS];
+
+ static {
+ try {
+ wrapperClazz = Class.forName("io.vertx.ext.web.impl.HttpServerRequestWrapper");
+ delegateField = wrapperClazz.getDeclaredField("delegate");
+ delegateField.setAccessible(true);
+ } catch (ClassNotFoundException | NoSuchFieldException ex) {
+ assert false : "TODO_395w8zuj";
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
+ }
+ }
+
+ public void onNewHttpRequest(HttpServerRequest req) {
+ if( !isOfInterestEvenReqNotYetSeen(req) ) return;
+ req = unwrap(req);
+ int reqIdx;
+ synchronized (requestSlotLock){
+ reqIdx = getFreeSlotIdx();
+ if( reqIdx == -2 ) {
+ log.debug("No more space to trace yet another request");
+ return;
+ }
+ assert reqIdx >= 0 && reqIdx < MAX_REQUESTS;
+ assert !alreadyKnowRequest(req) : "TODO what if..";
+ requestFlg[reqIdx] = FLG_slotIsBusy;
+ }
+ requestInstances[reqIdx] = req;
+ requestNewHttpReqEpochMs[reqIdx] = currentTimeMillis();
+ authorizerBeginMs[reqIdx] = NO_VALUE;
+ authorizerEndMs[reqIdx] = NO_VALUE;
+ beforeCatchallRouting[reqIdx] = NO_VALUE;
+ responseGotRequestedMs[reqIdx] = NO_VALUE;
+ writingResponseBeginMs[reqIdx] = NO_VALUE;
+ writingResponseHasReturnedMs[reqIdx] = NO_VALUE;
+ writingResponseEndMs[reqIdx] = NO_VALUE;
+ requestDoneMs[reqIdx] = NO_VALUE;
+ }
+
+ public void onHttpRequestError(HttpServerRequest req, Throwable ex) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ long durMs = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet. Took "+durMs+"ms", ex);
+ }
+
+ public void onAuthorizerBegin(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ authorizerBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onAuthorizerEnd(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ authorizerEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public HttpServerRequest filterRequestBeforeCallingCatchallRouter(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return req;
+ int reqIdx = getIdxOf(req);
+ beforeCatchallRouting[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ return new InterceptingServerRequest("ai9oh8urtgj", req);
+ }
+
+ private void onHttpResponseGotRequested(HttpServerRequest req) {
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ responseGotRequestedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onWritingHttpResponseBegin(HttpServerRequest req) {
+ int reqIdx = getIdxOf(req);
+ writingResponseBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onWritingHttpResponseHasReturned(HttpServerRequest req) {
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ writingResponseHasReturnedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ requestFlg[reqIdx] |= FLG_WritingHttpResponseHasReturned;
+ tryCompletingRequest(reqIdx);
+ }
+
+ public void onWritingHttpResponseEnd(Throwable ex, HttpServerRequest req) {
+ assert ex == null;
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ writingResponseEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ requestFlg[reqIdx] |= FLG_WritingHttpResponseEnd;
+ tryCompletingRequest(reqIdx);
+ }
+
+ private void tryCompletingRequest(int reqIdx) {
+ int requestIsDoneMask = FLG_WritingHttpResponseHasReturned | FLG_WritingHttpResponseEnd;
+ if ((requestFlg[reqIdx] & requestIsDoneMask) != requestIsDoneMask) return;
+ requestDoneMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ report(reqIdx);
+ /* free our slot */
+ synchronized (requestSlotLock){
+ requestFlg[reqIdx] &= ~FLG_slotIsBusy;
+ requestInstances[reqIdx] = null;
+ }
+ }
+
+ private void report(int reqIdx) {
+ if( requestDoneMs[reqIdx] < requestDurationBailTresholdLowMs ){
+ /*fast requests usually are not worth logging, we're interested in the slow requests only*/
+ if (log.isTraceEnabled()) log.trace(
+ "Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)",
+ requestDoneMs[reqIdx],
+ authorizerBeginMs[reqIdx],
+ authorizerEndMs[reqIdx],
+ beforeCatchallRouting[reqIdx],
+ responseGotRequestedMs[reqIdx],
+ writingResponseBeginMs[reqIdx],
+ writingResponseHasReturnedMs[reqIdx],
+ writingResponseEndMs[reqIdx]);
+ }else{
+ /*slow requests are interesting*/
+ log.info("Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)",
+ requestDoneMs[reqIdx],
+ authorizerBeginMs[reqIdx],
+ authorizerEndMs[reqIdx],
+ beforeCatchallRouting[reqIdx],
+ responseGotRequestedMs[reqIdx],
+ writingResponseBeginMs[reqIdx],
+ writingResponseHasReturnedMs[reqIdx],
+ writingResponseEndMs[reqIdx]);
+ }
+ }
+
+ private boolean isOfInterest(HttpServerRequest req){
+ if( !isOfInterestEvenReqNotYetSeen(req) ) return false;
+ if( !alreadyKnowRequest(req) ) return false; // Without start point, we cannot report anything useful
+ return true;
+ }
+
+ private boolean isOfInterestEvenReqNotYetSeen(HttpServerRequest req) {
+ if( !log.isInfoEnabled() ) return false; // if we produce no output, makes no sense to burn CPU for it
+ if( !HttpMethod.GET.equals(req.method()) ) return false; // Only GET is interesting for us
+ if( !INFO_URI.equals(req.uri()) ) return false; // Only this specific URI is of interest
+ return true;
+ }
+
+ private int getIdxOf(HttpServerRequest req) {
+ req = unwrap(req);
+ for( int idx = 0 ; idx < MAX_REQUESTS ; ++idx ){
+ if( requestInstances[idx] == req ) return idx;
+ }
+ assert false : "why does this happen?";
+ throw new NoSuchElementException(/*TODO*/"Not impl yet");
+ }
+
+ /** @return either index of free slot or -2 if no slot available */
+ private int getFreeSlotIdx() {
+ for( int i = 0 ; i < MAX_REQUESTS ; ++i ){
+ if( (requestFlg[i+slotReUseOffset%MAX_REQUESTS] & FLG_slotIsBusy) == 0 ) {
+ slotReUseOffset = i + 1;
+ return i;
+ }
+ }
+ return -2;
+ }
+
+ private boolean alreadyKnowRequest(HttpServerRequest req) {
+ req = unwrap(req);
+ for( int i = 0 ; i < (0 + MAX_REQUESTS) ; ++i ){
+ if((requestFlg[i] & FLG_slotIsBusy) == 0) continue;
+ if( requestInstances[i] == req ) return true;
+ }
+ return false;
+ }
+
+ private HttpServerRequest unwrap(HttpServerRequest req){
+ for( boolean hasChanged = true ; hasChanged ;){
+ hasChanged = false;
+ while (req instanceof InterceptingServerRequest) {
+ hasChanged = true;
+ req = ((InterceptingServerRequest) req).delegate;
+ }
+ while(wrapperClazz.isInstance(req)){
+ hasChanged = true;
+ try {
+ req = (HttpServerRequest) delegateField.get(req);
+ } catch (IllegalAccessException ex) {
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
+ }
+ }
+ }
+ assert req != null;
+ return req;
+ }
+
+ private class InterceptingServerRequest extends DelegateVertxHttpServerRequestInternal {
+ private final HttpServerRequest delegate;
+
+ public InterceptingServerRequest(String debugHint, HttpServerRequest delegate) {
+ super(debugHint, delegate);
+ assert isOfInterest(delegate);
+ this.delegate = delegate;
+ }
+
+ @Override public HttpServerResponse response() {
+ assert isOfInterest(delegate);
+ onHttpResponseGotRequested(delegate);
+ return super.response();
+ }
+ }
+
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
index 889b3f1..bebe970 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
@@ -25,6 +25,21 @@ public class StreamUtils {
return totalBytes;
}
+ public static Runnable newCopyTask(java.io.InputStream src, java.io.OutputStream dst, boolean doCloseDst){
+ return ()->{
+ try{
+ for( byte[] buf = new byte[8291] ;; ){
+ int readLen = src.read(buf, 0, buf.length);
+ if( readLen == -1 ) break;
+ dst.write(buf, 0, readLen);
+ }
+ if( doCloseDst ) dst.close();
+ }catch( java.io.IOException ex ){
+ throw new RuntimeException(ex);
+ }
+ };
+ }
+
public static <SRC,DST> java.util.Iterator<DST> map( java.util.Iterator<SRC> src , java.util.function.Function<SRC,DST> mapper ) {
return new java.util.Iterator<DST>() {
@Override public boolean hasNext() { return src.hasNext(); }
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
index 2bb1bfb..d7d7ec8 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
@@ -25,9 +25,9 @@ public class TimeUtils {
* Find smallest distance assuming integers overflow "like a circle".
*
* Computers cannot represent all existing integers. Due to how
- * integers are represented in computers, they are not infinite but
- * more like a circle. Speak when we infinitely increment an
- * integer, it overflows and (usually) continues to walk around this
+ * integers are represented in java, they are not infinite but
+ * more like a circle. Speak when we infinitely increment an integer,
+ * it overflows and (usually) continues to walk around this
* (imaginary) circle.
*
* This function takes two of those numbers on this circle and
diff --git a/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
new file mode 100644
index 0000000..eb06c77
--- /dev/null
+++ b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
@@ -0,0 +1,104 @@
+package org.apache.logging.slf4j;
+
+import org.apache.logging.log4j.spi.ExtendedLogger;
+import org.slf4j.Marker;
+import org.slf4j.event.Level;
+import org.slf4j.spi.LocationAwareLogger;
+import org.slf4j.spi.LoggingEventBuilder;
+
+import java.io.Serializable;
+
+
+/**
+ * <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
+ * how strong we tell it NOT to use it!</p>
+ *
+ * <p>This class only exists to prevent services from starting if IDEA still
+ * did miss the dependency changes in pom and still tries to use the wrong
+ * logger impl. So that I once and for all time can stop wasting my time
+ * waiting for logs which never arive because the wrong logger still is used
+ * somewhere.</p>
+ */
+public class Log4jLogger implements LocationAwareLogger, Serializable {
+
+ private final org.slf4j.Logger log;
+
+ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) {
+ this.log = new org.slf4j.simple.SimpleLoggerFactory().getLogger(name);
+ }
+
+ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
+ }
+
+ @Override public String getName() { return log.getName(); }
+ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); }
+ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); }
+ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); }
+ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
+ @Override public void trace(String s) { log.trace(s); }
+ @Override public void trace(String s, Object o) { log.trace(s, o); }
+ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
+ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
+ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
+ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
+ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); }
+ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
+ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
+ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
+ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
+ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
+ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
+ @Override public void debug(String s) { log.debug(s); }
+ @Override public void debug(String s, Object o) { log.debug(s, o); }
+ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
+ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
+ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
+ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
+ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
+ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
+ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
+ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
+ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
+ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); }
+ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
+ @Override public void info(String s) { log.info(s); }
+ @Override public void info(String s, Object o) { log.info(s, o); }
+ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
+ @Override public void info(String s, Object... objects) { log.info(s, objects); }
+ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
+ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
+ @Override public void info(Marker marker, String s) { log.info(marker, s); }
+ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
+ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
+ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
+ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
+ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); }
+ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
+ @Override public void warn(String s) { log.warn(s); }
+ @Override public void warn(String s, Object o) { log.warn(s, o); }
+ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
+ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
+ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
+ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
+ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
+ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
+ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
+ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
+ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
+ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); }
+ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
+ @Override public void error(String s) { log.error(s); }
+ @Override public void error(String s, Object o) { log.error(s, o); }
+ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
+ @Override public void error(String s, Object... objects) { log.error(s, objects); }
+ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
+ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
+ @Override public void error(Marker marker, String s) { log.error(marker, s); }
+ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
+ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
+ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
+ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
+ @Override public LoggingEventBuilder atError() { return log.atError(); }
+
+}
diff --git a/src/main/lua/brgmt-logs/DigBrgmtLogs.lua b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua
new file mode 100644
index 0000000..fb1f036
--- /dev/null
+++ b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua
@@ -0,0 +1,5 @@
+--
+-- NOTHING HERE
+--
+-- See "brgmt-beef/scripts/". Instead.
+--
diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua
new file mode 100644
index 0000000..3b44ac3
--- /dev/null
+++ b/src/main/lua/git/GitflowChangelogGen.lua
@@ -0,0 +1,195 @@
+
+local log = io.stderr
+local main
+
+
+function printHelp()
+ io.stdout:write(" \n"
+ .." Helper to extract essential data from a gitflow log which potentially\n"
+ .." is useful to write a CHANGELOG from.\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --since <date>\n"
+ .." Ignore commits with this ISO date and older.\n"
+ .." \n"
+ .." --remote <str>\n"
+ .." Name of the git remote to use. Defaults to 'upstream'.\n"
+ .." \n"
+ .." --no-fetch\n"
+ .." Do NOT update refs from remote. Just use what we have local.\n"
+ .." \n"
+ )
+end
+
+
+function parseArgs( app )
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--since" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --since needs value\n")return end
+ app.since = arg
+ elseif arg == "--remote" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --remote needs value\n")return end
+ app.remoteName = arg
+ elseif arg == "--no-fetch" then
+ app.isFetch = false
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ end
+ if not app.since then log:write("EINVAL: --since missing\n")return end
+ if not app.remoteName then app.remoteName = "upstream" end
+ return 0
+end
+
+
+function readCommitHdr( app )
+ --log:write("[DEBUG] parse hdr from '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+256) .."...'\n")
+ local f, t = app.fullHistory:find("^"
+ .."commit ........................................[^\n]*\n"
+ .."Merge: [0-9a-z]+ [0-9a-z]+\n"
+ .."Author: [^\n]+\n"
+ .."Date: [^\n]+\n"
+ .."\n"
+ , app.fullHistoryRdBeg)
+ if not f then f, t = app.fullHistory:find("^"
+ .."commit ........................................[^\n]*\n"
+ .."Author: [^\n]+\n"
+ .."Date: [^\n]+\n"
+ .."\n"
+ , app.fullHistoryRdBeg) end
+ if not f then
+ assert(app.fullHistory:len() == app.fullHistoryRdBeg-1, app.fullHistory:len()..", "..app.fullHistoryRdBeg)
+ app.parseFn = false
+ return
+ end
+ app.commitHdr = assert(app.fullHistory:sub(f, t-1))
+ --log:write("hdrBeginsWith '"..(app.commitHdr:sub(1, 32)).."...'\n")
+ app.fullHistoryRdBeg = t + 1
+ --log:write("hdr parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n")
+ app.parseFn = assert(readCommitMsg)
+end
+
+
+function readCommitMsg( app )
+ local idxOfC = app.fullHistoryRdBeg
+ local chrPrev = false
+ while true do idxOfC = idxOfC + 1
+ local chr = app.fullHistory:byte(idxOfC)
+ --log:write("CHR '"..tostring(app.fullHistory:sub(idxOfC, idxOfC)).."'\n")
+ if (chr == 0x63) and chrPrev == 0x0A then
+ idxOfC = idxOfC - 1
+ break -- LF followed by 'c' (aka 'commit') found
+ elseif not chr then
+ idxOfC = idxOfC - 1
+ break
+ else
+ chrPrev = assert(chr)
+ end
+ end
+ local mtch = app.fullHistory:sub(app.fullHistoryRdBeg, idxOfC - 1)
+ assert(mtch)
+ while mtch:byte(mtch:len()) == 0x0A do mtch = mtch:sub(1, -2) end
+ mtch = mtch:gsub("\n ", "\n"):gsub("^ ", "")
+ app.commitMsg = mtch
+ app.fullHistoryRdBeg = idxOfC + 1
+ app.parseFn = readCommitHdr
+ --log:write("msg parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n")
+ table.insert(app.commits, {
+ hdr = assert(app.commitHdr),
+ msg = assert(app.commitMsg),
+ })
+end
+
+
+function run( app )
+ local snk = io.stdout
+ if app.isFetch then
+ -- Make sure refs are up-to-date
+ local gitFetch = "git fetch \"".. app.remoteName .."\""
+ log:write("[DEBUG] ".. gitFetch .."\n")
+ local gitFetch = io.popen(gitFetch)
+ while true do
+ local buf = gitFetch:read(1<<16)
+ if not buf then break end
+ log:write(buf)
+ end
+ end
+ -- Collect input
+ local git = "git log --date-order --first-parent --decorate --since \"".. app.since.."\""
+ .." \"".. app.remoteName .."/master\""
+ .." \"".. app.remoteName .."/develop\""
+ log:write("[DEBUG] ".. git .."\n")
+ local git = io.popen(git)
+ while true do
+ local buf = git:read(1<<16)
+ if not buf then break end
+ --io.stdout:write(buf)
+ table.insert(app.fullHistory, buf)
+ end
+ -- Parse raw commits
+ app.fullHistory = table.concat(app.fullHistory)
+ app.parseFn = assert(readCommitHdr)
+ while app.parseFn do app.parseFn(app) end
+ -- Prepare output
+ local prevDate = "0000-00-00"
+ local version, prevVersion = "v_._._", false
+ local dateEntry = false
+ local entries = {}
+ for k, v in ipairs(app.commits) do
+ local date = assert(v.hdr:match("\nDate: +([0-9-]+) "))
+ local author = assert(v.hdr:match("\nAuthor: +([^\n]+)\n"))
+ local prNr, short = v.msg:match("Pull request #(%d+): ([^\n]+)\n")
+ prevVersion = version
+ _, version = v.hdr:match("^([^\n]+)\n"):match("tag: ([a-z]+)-([^,]+)[,)]")
+ if not version then version = prevVersion end
+
+ if version ~= prevVersion or not dateEntry then
+ if dateEntry then table.insert(entries, dateEntry) end
+ dateEntry = {
+ txt = date .." - ".. version .."\n\nResolved issues:\n\n"
+ }
+ prevDate = date
+ end
+ if prNr then
+ dateEntry.txt = dateEntry.txt .. short .." (PR ".. prNr ..")\n"
+ else
+ dateEntry.txt = dateEntry.txt .. v.msg .."\n"
+ end
+ end
+ if dateEntry then table.insert(entries, dateEntry) end
+ -- output
+ for k, v in ipairs(entries) do
+ snk:write("\n\n")
+ snk:write(v.txt)
+ snk:write("\n")
+ end
+end
+
+
+function main()
+ local app = {
+ since = false,
+ remoteName = false,
+ isFetch = true,
+ fullHistory = {},
+ fullHistoryRdBeg = 1,
+ commits = {},
+ parseFn = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
+
diff --git a/src/main/lua/maven/MvnCentralDepScan.lua b/src/main/lua/maven/MvnCentralDepScan.lua
index 5322bc0..7f71afa 100644
--- a/src/main/lua/maven/MvnCentralDepScan.lua
+++ b/src/main/lua/maven/MvnCentralDepScan.lua
@@ -941,9 +941,6 @@ function mod.exportParentsLatest(app)
local stmt = app.stmtCache[stmtStr]
if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
local rs = stmt:execute()
- out:write("h;Title;Parent relations (latest only)\n")
- out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
- out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
-- Need to filter out the older artifacts.
local all = {}
while rs:next() do
@@ -954,18 +951,14 @@ function mod.exportParentsLatest(app)
if diff > 0 then -- existing is newer. Keep it and ignore newer one.
goto nextRecord
else -- Either no entry yet or found a newer one.
- local entry = { gid=false, aid=false, ver=false, pgid=false, paid=false, pver=false }
- entry.gid = gid
- entry.aid = aid
- entry.ver = ver
- entry.pgid = rs:value(4)
- entry.paid = rs:value(5)
- entry.pver = rs:value(6)
- all[key] = entry
+ all[key] = { gid=gid, aid=aid, ver=ver, pgid=rs:value(4), paid=rs:value(5), pver=rs:value(6) }
end
::nextRecord::
end
-- Print
+ out:write("h;Title;Parent relations (latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
for _, entry in pairs(all) do
out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
..";".. entry.pgid ..";".. entry.paid ..";".. entry.pver .."\n")
@@ -1031,9 +1024,6 @@ function mod.exportDepsLatest(app)
local stmt = app.stmtCache[stmtStr]
if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
local rs = stmt:execute()
- out:write("h;Title;Dependencies (of latest only)\n")
- out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
- out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
-- Need to filter out the older artifacts.
local all = {}
local entry, key, gid, aid, ver, diff
@@ -1046,18 +1036,14 @@ function mod.exportDepsLatest(app)
if diff > 0 then -- existing is newer. Keep it and ignore newer one.
goto nextRecord
else -- Either no entry yet or found a newer one.
- local entry = { gid=false, aid=false, ver=false, dgid=false, daid=false, dver=false }
- entry.gid = gid
- entry.aid = aid
- entry.ver = ver
- entry.dgid = rs:value(4)
- entry.daid = rs:value(5)
- entry.dver = rs:value(6)
- all[key] = entry
+ all[key] = { gid=gid, aid=aid, ver=ver, dgid=rs:value(4), daid=rs:value(5), dver=rs:value(6) }
end
goto nextRecord
::endFiltering::
-- Print
+ out:write("h;Title;Dependencies (of latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
for _, entry in pairs(all) do
out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
..";".. entry.dgid ..";".. entry.daid ..";".. entry.dver .."\n")
diff --git a/src/main/lua/misc/JavaCallgraph.lua b/src/main/lua/misc/JavaCallgraph.lua
new file mode 100644
index 0000000..6d0bd62
--- /dev/null
+++ b/src/main/lua/misc/JavaCallgraph.lua
@@ -0,0 +1,159 @@
+
+local SL = require("scriptlee")
+local newJavaClassParser = SL.newJavaClassParser
+local objectSeal = SL.objectSeal
+SL = nil
+
+local snk = io.stdout
+
+local main
+
+
+function initParser( app )
+ app.parser = newJavaClassParser{
+ cls = app,
+ onMagic = function(m, app) assert(m == "\xCA\xFE\xBA\xBE") end,
+ onClassfileVersion = function(maj, min, app) assert(maj == 55 and min == 0) end,
+ onConstPoolClassRef = function(i, idx, app)
+ app.constPool[i] = objectSeal{ type = "CLASS_REF", classNameIdx = idx, className = false, }
+ end,
+ onConstPoolIfaceMethodRef = function(i, nameIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "IFACE_METHOD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, methodType = false,
+ }
+ end,
+ onConstPoolMethodRef = function(i, classIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "METHOD_REF", classIdx = classIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, signature = false,
+ }
+ end,
+ onConstPoolMethodType = function(i, descrIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "METHOD_TYPE", descrIdx = descrIdx, descrStr = false,
+ }
+ end,
+ onConstPoolNameAndType = function(i, nameIdx, typeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "NAME_AND_TYPE", nameIdx = nameIdx, typeIdx = typeIdx, nameStr = false, typeStr = false,
+ }
+ end,
+ onConstPoolUtf8 = function(i, str, app)
+ app.constPool[i] = objectSeal{ type = "UTF8", str = str, }
+ end,
+
+ onConstPoolInvokeDynamic = function(i, bootstrapMethodAttrIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "INVOKE_DYNAMIC", bootstrapMethodAttrIdx = bootstrapMethodAttrIdx, nameAndTypeIdx = nameAndTypeIdx,
+ methodName = false, methodType = false, factoryClass = false, factoryMethod = false, factoryType = false,
+ }
+ end,
+ onConstPoolFieldRef = function(i, nameIdx, nameAndTypeIdx, that)
+ app.constPool[i] = objectSeal{
+ type = "FIELD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, methodType = false,
+ }
+ end,
+ --onConstPoolMethodHandle = function(i, refKind, refIdx, app)
+ -- app.constPool[i] = objectSeal{ type = "METHOD_HANDLE", refKind = refKind, refIdx = refIdx, }
+ --end,
+ --onConstPoolStrRef = function(i, dstIdx, app)
+ -- print("ConstPool["..i.."] <StrRef> #"..dstIdx)
+ --end,
+ --onThisClass = function(nameIdx, app)
+ -- -- TODO print("onThisClass(#"..nameIdx..")")
+ --end,
+ --onField = function(iField, accessFlags, nameIdx, descrIdx, numAttrs, app)
+ -- print(string.format("onField(0x%04X, #%d, #%d, %d)",accessFlags,nameIdx,descrIdx,numAttrs))
+ --end,
+ --onMethod = function(accessFlags, nameIdx, descrIdx, app)
+ -- print(string.format("onMethod(0x%04X, #%d, #%d)",accessFlags,nameIdx,descrIdx))
+ --end,
+
+ onConstPoolEnd = function( app )
+ -- 1st run
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ elseif cpe.type == "CLASS_REF" then
+ local tmp
+ tmp = assert(cpe.classNameIdx)
+ tmp = assert(app.constPool[cpe.classNameIdx], cpe.classNameIdx)
+ tmp = assert(tmp.str, tmp)
+ cpe.className = assert(tmp)
+ elseif cpe.type == "METHOD_TYPE" then
+ cpe.descrStr = assert(app.constPool[cpe.descrIdx].str)
+ elseif cpe.type == "NAME_AND_TYPE" then
+ cpe.nameStr = assert(app.constPool[cpe.nameIdx].str);
+ cpe.typeStr = assert(app.constPool[cpe.typeIdx].str);
+ end
+ end
+ -- 2nd run
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ elseif cpe.type == "FIELD_REF" then
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ cpe.className = assert(app.constPool[cpe.nameIdx].className);
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str);
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str);
+ elseif cpe.type == "METHOD_REF" then
+ local nameAndType = app.constPool[cpe.nameAndTypeIdx]
+ cpe.className = assert(app.constPool[cpe.classIdx].className)
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.signature = assert(app.constPool[nameAndType.typeIdx].str)
+ elseif cpe.type == "IFACE_METHOD_REF" then
+ local classRef = assert(app.constPool[cpe.nameIdx])
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ cpe.className = assert(classRef.className)
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str)
+ elseif cpe.type == "INVOKE_DYNAMIC" then
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ local bootstrapMethod = assert(app.constPool[cpe.bootstrapMethodAttrIdx], cpe.bootstrapMethodAttrIdx);
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str)
+ --cpe.factoryClass = ;
+ --cpe.factoryMethod = ;
+ --cpe.factoryType = ;
+ end
+ end
+ -- debug-print
+ snk:write("\n")
+ for _,cpIdx in pairs{ 13, 14, 15, 227, 230, 236, 704, 709, 717 }do
+ snk:write("CONST_POOL @ ".. cpIdx .."\n")
+ for k,v in pairs(app.constPool[cpIdx])do print("X",k,v)end
+ end
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ --elseif cpe.type == "CLASSREF" then
+ -- snk:write("CLASS \"".. cpe.className .."\"\n")
+ end
+ end
+ end,
+ }
+end
+
+
+function main()
+ local app = objectSeal{
+ parser = false,
+ constPool = {},
+ }
+
+ initParser(app)
+
+ -- Read 1st arg as a classfile and pump it into the parser.
+ local src = arg[1] and io.open( arg[1], "rb" ) or nil
+ if not src then
+ print("ERROR: Failed to open file from 1st arg: "..(arg[1]or"nil")) return
+ end
+ while true do
+ local buf = src:read(8192)
+ if not buf then break end
+ app.parser:write(buf)
+ end
+ app.parser:closeSnk()
+end
+
+
+main()
diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua
new file mode 100644
index 0000000..23b42aa
--- /dev/null
+++ b/src/main/lua/mshitteams/ListEmlInbox.lua
@@ -0,0 +1,322 @@
+--
+-- Sources:
+-- - [Authorize](https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http)
+-- - [Auth witout app register](https://techcommunity.microsoft.com/t5/teams-developer/authenticate-microsoft-graph-api-with-username-and-password/m-p/3940540)
+--
+-- TODO: scriptlee 0.0.5-83-gdffa272 seems to SEGFAULT constantly here. No
+-- matter if we use socket or newHttpClient.
+-- TODO: scriptlee 0.0.5-87-g946ebdc crashes through assertion:
+-- Assertion failed: cls->msg.connect.sck->vt->unwrap != NULL, file src/windoof/c/io/AsyncIO.c, line 421
+--
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+--local AF_INET = SL.posix.AF_INET
+--local getaddrinfo = SL.posix.getaddrinfo
+--local INADDR_ANY = SL.posix.INADDR_ANY
+--local inaddrOfHostname = SL.posix.inaddrOfHostname
+--local IPPROTO_TCP = SL.posix.IPPROTO_TCP
+local objectSeal = SL.objectSeal
+--local SOCK_STREAM = SL.posix.SOCK_STREAM
+--local socket = SL.posix.socket
+local startOrExecute = SL.reactor.startOrExecute
+--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1)
+SL = nil
+
+local authorizeToMsGraphApi, getAccessToken, getAuthHdr, httpUrlEncode, main, parseArgs, printHelp,
+ run, getMyProfileForDebugging
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+
+function printHelp()
+ out:write(" \n"
+ .." Experiments for M$ graph API.\n"
+ .." \n"
+ .." WARN: This tool is experimental! Do NOT use it!\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --user <str>\n"
+ .." M$ user.\n"
+ .." \n"
+ .." --pass <str>\n"
+ .." M$ password. TODO get rid of this insecure idea.\n"
+ .." \n"
+ .." --appId <str>\n"
+ .." AppId (aka client_id). See M$ doc about it.\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ if #_ENV.arg == 0 then log:write("EINVAL: Args missing\n")return-1 end
+ local iA = 0
+ --local isYolo = false
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ elseif arg == "--user" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --user needs value\n")return-1 end
+ app.msUser = arg
+ elseif arg == "--pass" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --pass needs value\n")return-1 end
+ app.msPass = arg
+ elseif arg == "--appId" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --appId needs value\n")return-1 end
+ app.msAppId = arg
+ --elseif arg == "--yolo" then
+ -- isYolo = true
+ else
+ log:write("EINVAL: ".. arg .."\n") return-1
+ end
+ end
+ if not app.msUser then log:write("EINVAL: --user missing\n") return-1 end
+ if not app.msPass then log:write("EINVAL: --pass missing\n") return-1 end
+ if not app.msAppId then log:write("EINVAL: --appId missing\n")return-1 end
+ --if not isYolo then log:write("EINVAL: --yolo missing\n")return-1 end
+ return 0
+end
+
+
+function getMyProfileForDebugging( app )
+ local http = app.http
+ local authKey, authVal = getAuthHdr(app)
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ uri = "/v1.0/me",
+ rspCode = false,
+ rspBody = {},
+ }
+ req.base = http:request{
+ cls = req,
+ host = app.msGraphHost,
+ port = app.msGraphPort,
+ connectTimeoutMs = 3000,
+ method = req.method,
+ url = req.uri,
+ hdrs = {
+ { authKey, authVal },
+ },
+ --useHostHdr = ,
+ --useTLS = true,
+ onRspHdr = function( rsp, cls )
+ cls.rspCode = rsp.status
+ if rsp.status ~= 200 then
+ log:write("> ".. req.method .." ".. req.uri .."\n> \n")
+ log:write("< ".. rsp.proto .." ".. rsp.status .." ".. rsp.phrase .."\n")
+ for _,h in ipairs(rsp.headers)do log:write("< "..h[1]..": "..h[2].."\n")end
+ log:write("\n")
+ end
+ end,
+ onRspChunk = function(buf, cls)
+ if cls.rspCode ~= 200 then
+ log:write("< ")
+ log:write((buf:gsub("\n", "\n< ")))
+ log:write("\n")
+ else
+ assert(type(buf) == "string")
+ table.insert(cls.rspBody, buf)
+ end
+ end,
+ onRspEnd = function(cls)
+ if cls.rspCode ~= 200 then error("Request failed.") end
+ cls.rspBody = table.concat(cls.rspBody)
+ log:write("Response was:\n\n")
+ log:write(cls.rspBody)
+ log:write("\n\n")
+ end,
+ }
+ req.base:closeSnk()
+end
+
+
+function authorizeToMsGraphApi( app )
+ local http = app.http
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ host = (app.proxyHost or app.msLoginHost),
+ port = (app.proxyPort or app.msLoginPort),
+ uri = false,
+ hdrs = {
+ { "Content-Type", "application/x-www-form-urlencoded" },
+ },
+ reqBody = ""
+ .. "grant_type=password"
+ .."&resource=https://graph.microsoft.com"
+ .."&username=".. httpUrlEncode(app, app.msUser) ..""
+ .."&password=".. httpUrlEncode(app, app.msPass) .."",
+ rspProto = false, rspCode = false, rspPhrase = false,
+ rspHdrs = false,
+ rspBody = {},
+ }
+ if app.proxyHost then
+ req.uri = "https://".. app.msLoginHost ..":".. app.msLoginPort
+ .."/".. app.msTenant .."/oauth2/v2.0/token"
+ else
+ req.uri = "/".. app.msTenant .."/oauth2/v2.0/token"
+ end
+ local ok, ex = xpcall(function()
+ req.base = http:request{
+ cls = req,
+ connectTimeoutMs = app.connectTimeoutMs,
+ host = req.host,
+ port = req.port,
+ method = req.method,
+ url = req.uri,
+ hdrs = req.hdrs,
+ onRspHdr = function( rsp, req )
+ req.rspProto = rsp.proto
+ req.rspCode = rsp.status
+ req.rspPhrase = rsp.phrase
+ req.rspHdrs = rsp.headers
+ end,
+ onRspChunk = function( buf, req ) table.insert(req.rspBody, buf) end,
+ onRspEnd = function( req )
+ local rspBody = table.concat(req.rspBody) req.rspBody = false
+ if req.rspCode ~= 200 then
+ log:write("[ERROR] Request failed\n")
+ log:write("peer ".. req.host ..":".. req.port .."\n")
+ log:write("> ".. req.method .." ".. req.uri .."\n")
+ for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("> \n")
+ log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n")
+ log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n")
+ for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end
+ log:write("< \n")
+ log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n")
+ error("TODO_10aa11de804e733337e7c244298791c6")
+ end
+ log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n")
+ for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end
+ log:write("< \n")
+ log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n")
+ -- How to continue:
+ --local token = rsp.bodyJson.access_token
+ --local authHdr = { "Authorization", "Bearer ".. token, }
+ end,
+ }
+ end, debug.traceback)
+ if not ok then
+ log:write("[ERROR] Request failed 2\n")
+ log:write("peer ".. req.host ..":".. req.port .."\n")
+ log:write("> ".. req.method .." ".. req.uri .."\n")
+ for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("> \n")
+ log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n")
+ error(ex)
+ end
+ --req.base:write(req.reqBody)
+ req.base:closeSnk()
+end
+
+
+function httpUrlEncode( app, str )
+ local hexDigits, ret, beg, iRd = "0123456789ABCDEF", {}, 1, 0
+ ::nextInputChar::
+ iRd = iRd + 1
+ local byt = str:byte(iRd)
+ if not byt then
+ elseif byt == 0x2D -- dash
+ or byt == 0x2E -- dot
+ or byt >= 0x30 and byt <= 0x39 -- 0-9
+ or byt >= 0x40 and byt <= 0x5A -- A-Z
+ or byt >= 0x60 and byt <= 0x7A -- a-z
+ then
+ goto nextInputChar
+ end
+ if beg < iRd then table.insert(ret, str:sub(beg, iRd-1)) end
+ if not byt then return table.concat(ret) end
+ table.insert(ret, "%")
+ local hi = (byt & 0xF0) >> 4 +1
+ local lo = (byt & 0x0F) +1
+ table.insert(ret, hexDigits:sub(hi, hi) .. hexDigits:sub(lo, lo))
+ beg = iRd + 1
+ goto nextInputChar
+end
+
+
+function getAccessToken( app )
+ -- See "https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http#3-request-an-access-token"
+ local method = "POST"
+ local uri = "/".. app.msTenant .."/oauth2/v2.0/token"
+ local hdrs = {
+ { "Host", "https://login.microsoftonline.com" },
+ { "Content-Type", "application/x-www-form-urlencoded" },
+ }
+ local body = ""
+ .."client_id=".. assert(app.appId)
+ .."&scope=".. scope
+ .."&code=".. code
+ .."&redirect_uri=".. redirUri
+ .."&grant_type=authorization_code"
+end
+
+
+-- @return 1 - HTTP header key
+-- @return 2 - HTTP header value
+function getAuthHdr( app )
+ assert(app.msToken)
+ return "Authorization", ("Bearer ".. app.msToken)
+end
+
+
+function run( app )
+ app.http = newHttpClient{}
+ authorizeToMsGraphApi(app)
+ --getMyProfileForDebugging(app)
+end
+
+
+function main()
+ local loginHost, loginPort, graphHost, graphPort, proxyHost, proxyPort
+ local choice = 3
+ if choice == 1 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "graph.microsoft.com"; graphPort = 443
+ proxyHost = "127.0.0.1"; proxyPort = 3128
+ elseif choice == 2 then
+ loginHost = "127.0.0.1"; loginPort = 8081
+ graphHost = "127.0.0.1"; graphPort = 8081
+ proxyHost = false; proxyPort = false
+ elseif choice == 3 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "127.0.0.1"; graphPort = 8081
+ proxyHost = "127.0.0.1"; proxyPort = 3128
+ elseif choice == 4 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "graph.microsoft.com"; graphPort = 443
+ proxyHost = false; proxyPort = false
+ else error("TODO_1700683244") end
+ local app = objectSeal{
+ isHelp = false,
+ msLoginHost = loginHost, msLoginPort = loginPort,
+ msGraphHost = graphHost, msGraphPort = graphPort,
+ proxyHost = proxyHost, proxyPort = proxyPort,
+ -- TODO take this from a failed api call, which has this in the rsp headers.
+ msTenant = "common", -- TODO configurable
+ -- TODO take this from a failed api call, which has this in the rsp headers.
+ msAppId = false,
+ msPerms = "offline_access user.read mail.read",
+ msToken = false,
+ msUser = false,
+ msPass = false,
+ http = false,
+ connectTimeoutMs = 3000,
+ --sck = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
diff --git a/src/main/lua/mshitteams/SendRawMsEmail.lua b/src/main/lua/mshitteams/SendRawMsEmail.lua
new file mode 100644
index 0000000..2d2940e
--- /dev/null
+++ b/src/main/lua/mshitteams/SendRawMsEmail.lua
@@ -0,0 +1,60 @@
+
+local SL = require("scriptlee")
+--local newHttpClient = SL.newHttpClient
+--local newShellcmd = SL.newShellcmd
+--local objectSeal = SL.objectSeal
+--local parseJSON = SL.parseJSON
+--local sleep = SL.posix.sleep
+--local newCond = SL.posix.newCond
+--local async = SL.reactor.async
+--local startOrExecute = SL.reactor.startOrExecute
+--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1)
+SL = nil
+
+local mod = {}
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+
+function mod.printHelp()
+ out:write(" \n"
+ .." Options:\n"
+ .." \n"
+ .."\n\n")
+end
+
+
+function mod.parseArgs( app )
+ local isStdinn = false
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ else
+ log:write("Unknown arg: ".. arg .."\n") return-1
+ end
+ end
+ if not isStdinn then log:write("Bad args\n")return-1 end
+ return 0
+end
+
+
+function mod.run( app )
+ error("TODO_20230608125925")
+end
+
+
+function mod.main()
+ local app = objectSeal{
+ isHelp = false,
+ }
+ if mod.parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then mod.printHelp() return end
+ mod.run(app)
+end
+
+
+startOrExecute(mod.main)
+
diff --git a/src/main/lua/paisa-fleet/FindFullDisks.lua b/src/main/lua/paisa-fleet/FindFullDisks.lua
new file mode 100644
index 0000000..9963838
--- /dev/null
+++ b/src/main/lua/paisa-fleet/FindFullDisks.lua
@@ -0,0 +1,322 @@
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+local newShellcmd = SL.newShellcmd
+local newSqlite = SL.newSqlite
+local objectSeal = SL.objectSeal
+local parseJSON = SL.parseJSON
+local startOrExecute = SL.reactor.startOrExecute
+SL = nil
+
+local log = io.stdout
+
+
+function printHelp()
+ io.write("\n"
+ .." WARN: This is experimental.\n"
+ .." \n"
+ .." Options:\n"
+ .." --backendHost <inaddr> (eg \"localhost\")\n"
+ .." --backendPort <int> (eg 80)\n"
+ .." --sshPort <int> (eg 22)\n"
+ .." --sshUser <str> (eg \"eddieuser\")\n"
+ .." --state <path> (eg \"path/to/state\")\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ app.backendPort = 80
+ app.sshPort = 22
+ app.sshUser = os.getenv("USERNAME") or false
+ app.statePath = ":memory:"
+ local iA = 0
+ ::nextArg::
+ iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ goto verifyResult
+ elseif arg == "--help" then
+ app.isHelp = true return 0
+ elseif arg == "--backendHost" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendHost needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPort needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--sshPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshPort needs value\n")return end
+ app.sshPort = arg
+ elseif arg == "--sshUser" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshUser needs value\n")return end
+ app.sshUser = arg
+ elseif arg == "--state" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --state needs value\n")return end
+ app.statePath = arg
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ goto nextArg
+ ::verifyResult::
+ if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end
+ if not app.sshUser then log:write("EINVAL: --sshUser missing")return end
+ return 0
+end
+
+
+function getStateDb(app)
+ if not app.stateDb then
+ local db = newSqlite{ database = assert(app.statePath) }
+ -- TODO normalize scheme
+ db:prepare("CREATE TABLE IF NOT EXISTS DeviceDfLog(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." \"when\" TEXT NOT NULL,\n" -- "https://xkcd.com/1179"
+ .." hostname TEXT NOT NULL,\n"
+ .." eddieName TEXT NOT NULL,\n"
+ .." rootPartitionUsedPercent INT,\n"
+ .." varLibDockerUsedPercent INT,\n"
+ .." varLogUsedPercent INT,\n"
+ .." dataUsedPercent INT,\n"
+ .." stderr TEXT NOT NULL,\n"
+ .." stdout TEXT NOT NULL)\n"
+ ..";"):execute()
+ app.stateDb = db
+ end
+ return app.stateDb
+end
+
+
+function storeDiskFullResult( app, hostname, eddieName, stderrBuf, stdoutBuf )
+ assert(app and hostname and eddieName and stderrBuf and stdoutBuf);
+ local rootPartitionUsedPercent = stdoutBuf:match("\n/[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /\n")
+ local varLibDockerUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/lib/docker\n")
+ local dataUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /data\n")
+ local varLogUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/log\n")
+ local stmt = getStateDb(app):prepare("INSERT INTO DeviceDfLog("
+ .." \"when\", hostname, eddieName, stderr, stdout,"
+ .." rootPartitionUsedPercent, dataUsedPercent, varLibDockerUsedPercent, varLogUsedPercent, dataUsedPercent"
+ ..")VALUES("
+ .." $when, $hostname, $eddieName, $stderr, $stdout,"
+ .." $rootPartitionUsedPercent, $dataUsedPercent, $varLibDockerUsedPercent, $varLogUsedPercent, $dataUsedPercent);")
+ stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%SZ"))
+ stmt:bind("$hostname", hostname)
+ stmt:bind("$eddieName", eddieName)
+ stmt:bind("$stderr", stderrBuf)
+ stmt:bind("$stdout", stdoutBuf)
+ stmt:bind("$rootPartitionUsedPercent", rootPartitionUsedPercent)
+ stmt:bind("$varLibDockerUsedPercent", varLibDockerUsedPercent)
+ stmt:bind("$varLogUsedPercent", varLogUsedPercent)
+ stmt:bind("$dataUsedPercent", dataUsedPercent)
+ stmt:execute()
+end
+
+
+function doWhateverWithDevices( app )
+ for k, dev in pairs(app.devices) do
+ log:write("[INFO ] Inspecting '".. dev.hostname .."' (@ ".. dev.eddieName ..") ...\n")
+ local fookCmd = "true"
+ .." && HOSTNAME=$(hostname|sed 's_.isa.localdomain__')"
+ .." && STAGE=$PAISA_ENV"
+ .." && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\""
+ -- on some machine, df failed with "Stale file handle" But I want to continue
+ -- with next device regardless of such errors.
+ .." && df || true"
+ local eddieCmd = "true"
+ .." && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ .." && STAGE=$PAISA_ENV"
+ .." && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ .." && if test \"${HOSTNAME}\" != \"".. dev.eddieName .."\"; then true"
+ .." && echo wrong host. Want ".. dev.eddieName .." found $HOSTNAME && false"
+ .." ;fi"
+ .." && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ .." -p".. app.sshPort .." ".. app.sshUser .."@".. ((dev.type == "FOOK")and"fook"or dev.hostname)
+ .." \\\n --"
+ .." sh -c 'true && ".. fookCmd:gsub("'", "'\"'\"'") .."'"
+ local localCmd = assert(os.getenv("SSH_EXE"), "environ.SSH_EXE missing")
+ .." -oRemoteCommand=none -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ .." -p".. app.sshPort .." ".. app.sshUser .."@".. dev.eddieName ..""
+ .." \\\n --"
+ .." sh -c 'true && ".. eddieCmd:gsub("'", "'\"'\"'") .."'"
+ -- TODO get rid of this ugly use-tmp-file-as-script workaround
+ local tmpPath = assert(os.getenv("TMP"), "environ.TMP missing"):gsub("\\", "/") .."/b30589uj30oahujotehuj.sh"
+ --log:write("[DEBUG] tmpPath '".. tmpPath .."'\n")
+ local tmpFile = assert(io.open(tmpPath, "wb"), "Failed to open '".. tmpPath .."'")
+ tmpFile:write("#!/bin/sh\n".. localCmd .."\n")
+ tmpFile:close()
+ --log:write("[DEBUG] tmpPath ".. tmpPath .."\n")
+ -- EndOf kludge
+ local cmd = objectSeal{
+ base = false,
+ stdoutBuf = {},
+ stderrBuf = {},
+ }
+ cmd.base = newShellcmd{
+ cls = cmd,
+ cmdLine = "sh \"".. tmpPath .."\"",
+ onStdout = function( buf, cmd ) table.insert(cmd.stdoutBuf, buf or"") end,
+ onStderr = function( buf, cmd ) table.insert(cmd.stderrBuf, buf or"") end,
+ }
+ cmd.base:start()
+ cmd.base:closeSnk()
+ local exit, signal = cmd.base:join(17)
+ cmd.stderrBuf = table.concat(cmd.stderrBuf)
+ cmd.stdoutBuf = table.concat(cmd.stdoutBuf)
+ if exit == 255 and signal == nil then
+ log:write("[DEBUG] fd2: ".. cmd.stderrBuf:gsub("\n", "\n[DEBUG] fd2: "):gsub("\n%[DEBUG%] fd2: $", "") .."\n")
+ goto nextDevice
+ end
+ log:write("[DEBUG] fd1: ".. cmd.stdoutBuf:gsub("\n", "\n[DEBUG] fd1: "):gsub("\n%[DEBUG%] fd1: $", "") .."\n")
+ storeDiskFullResult(app, dev.hostname, dev.eddieName, cmd.stderrBuf, cmd.stdoutBuf)
+ if exit ~= 0 or signal ~= nil then
+ error("exit=".. tostring(exit)..", signal="..tostring(signal))
+ end
+ ::nextDevice::
+ end
+end
+
+
+function sortDevicesMostRecentlySeenFirst( app )
+ table.sort(app.devices, function(a, b) return a.lastSeen > b.lastSeen end)
+end
+
+
+-- Don't want to visit just seen devices over and over again. So drop devices
+-- we've recently seen from our devices-to-visit list.
+function dropDevicesRecentlySeen( app )
+ -- Collect recently seen devices.
+ local devicesToRemove = {}
+ local st = getStateDb(app):prepare("SELECT hostname FROM DeviceDfLog WHERE \"when\" > $tresholdDate")
+ st:bind("$tresholdDate", os.date("!%Y-%m-%dT%H:%M:%SZ", os.time()-42*3600))
+ local rs = st:execute()
+ while rs:next() do
+ local hostname = rs:value(1)
+ devicesToRemove[hostname] = true
+ end
+ -- Remove selected devices
+ local numKeep, numDrop = 0, 0
+ local iD = 0 while true do iD = iD + 1
+ local device = app.devices[iD]
+ if not device then break end
+ if devicesToRemove[device.hostname] then
+ --log:write("[DEBUG] Drop '".. device.hostname .."' (".. device.eddieName ..")\n")
+ numDrop = numDrop + 1
+ app.devices[iD] = app.devices[#app.devices]
+ app.devices[#app.devices] = nil
+ iD = iD - 1
+ else
+ --log:write("[DEBUG] Keep '".. device.hostname .."' (".. device.eddieName ..")\n")
+ numKeep = numKeep + 1
+ end
+ end
+ log:write("[INFO ] Of "..(numKeep+numDrop).." devices from state visit ".. numKeep
+ .." and skip ".. numDrop .." (bcause seen recently)\n")
+end
+
+
+function fetchDevices( app )
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ uri = "/houston/vehicle/inventory/v1/info/devices",
+ rspCode = false,
+ rspBody = false,
+ isDone = false,
+ }
+ req.base = app.http:request{
+ cls = req, connectTimeoutMs = 3000,
+ host = app.backendHost, port = app.backendPort,
+ method = req.method, url = req.uri,
+ onRspHdr = function( rspHdr, req )
+ req.rspCode = rspHdr.status
+ if rspHdr.status ~= 200 then
+ log:write(".-----------------------------------------\n")
+ log:write("| ".. req.method .." ".. req.uri .."\n")
+ log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n")
+ log:write("+-----------------------------------------\n")
+ log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n")
+ for i,h in ipairs(rspHdr.headers) do log:write("| ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("| \n")
+ end
+ end,
+ onRspChunk = function( buf, req )
+ if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end
+ if buf then
+ if not req.rspBody then req.rspBody = buf
+ else req.rspBody = req.rspBody .. buf end
+ end
+ end,
+ onRspEnd = function( req )
+ if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end
+ req.isDone = true
+ end,
+ }
+ req.base:closeSnk()
+ assert(req.isDone)
+ if req.rspCode ~= 200 then log:write("ERROR: Couldn't fetch devices\n")return end
+ assert(not app.devices)
+ app.devices = {}
+ log:write("[DEBUG] rspBody.len is ".. req.rspBody:len() .."\n")
+ --io.write(req.rspBody)io.write("\n")
+ for iD, device in pairs(parseJSON(req.rspBody).devices) do
+ --print("Wa", iD, device)
+ --for k,v in pairs(device)do print("W",k,v)end
+ -- TODO how to access 'device.type'?
+ local hostname , eddieName , lastSeen
+ = device.hostname:value(), device.eddieName:value(), device.lastSeen:value()
+ local typ
+ if false then
+ elseif hostname:find("^eddie%d%d%d%d%d$") then
+ typ = "EDDIE"
+ elseif hostname:find("^fook%-[a-z0-9]+$") then
+ typ = "FOOK"
+ elseif hostname:find("^lunkwill%-[a-z0-9]+$") then
+ typ = "LUNKWILL"
+ elseif hostname:find("^fook$") then
+ log:write("[WARN ] WTF?!? '"..hostname.."'\n")
+ typ = false
+ else error("TODO_359zh8i3wjho "..hostname) end
+ table.insert(app.devices, objectSeal{
+ hostname = hostname,
+ eddieName = eddieName,
+ type = typ,
+ lastSeen = lastSeen,
+ })
+ end
+ log:write("[INFO ] Fetched ".. #app.devices .." devices.\n")
+end
+
+
+function run( app )
+ fetchDevices(app)
+ dropDevicesRecentlySeen(app)
+ --sortDevicesMostRecentlySeenFirst(app)
+ doWhateverWithDevices(app)
+end
+
+
+function main()
+ local app = objectSeal{
+ isHelp = false,
+ backendHost = false,
+ backendPort = false,
+ sshPort = false,
+ sshUser = false,
+ statePath = false,
+ stateDb = false,
+ http = newHttpClient{},
+ devices = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
+
diff --git a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua
new file mode 100644
index 0000000..949d1fe
--- /dev/null
+++ b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua
@@ -0,0 +1,381 @@
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+local newShellcmd = SL.newShellcmd
+local newSqlite = SL.newSqlite
+local objectSeal = SL.objectSeal
+local parseJSON = SL.parseJSON
+local sleep = SL.posix.sleep
+local startOrExecute = SL.reactor.startOrExecute
+SL = nil
+local log = io.stdout
+
+
+function printHelp()
+ io.write("\n"
+ .." WARN: This is experimental.\n"
+ .." \n"
+ .." Options:\n"
+ .." --backendHost <inaddr> (eg \"localhost\")\n"
+ .." --backendPort <int> (eg 80)\n"
+ .." --backendPath <str> (eg \"/houston\")\n"
+ .." --sshPort <int> (eg 22)\n"
+ .." --sshUser <str> (eg \"eddieuser\")\n"
+ .." --state <path> (eg \"path/to/state\")\n"
+ .." \n"
+ .." --exportLatestStatus\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ app.backendPort = 80
+ app.statePath = ":memory:"
+ local iA = 0
+ ::nextArg::
+ iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ goto verifyResult
+ elseif arg == "--help" then
+ app.isHelp = true return 0
+ elseif arg == "--backendHost" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendHost needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPort needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPath" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPath needs value\n")return end
+ app.backendPath = arg
+ elseif arg == "--sshPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshPort needs value\n")return end
+ app.sshPort = arg
+ elseif arg == "--sshUser" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshUser needs value\n")return end
+ app.sshUser = arg
+ elseif arg == "--state" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --state needs value\n")return end
+ app.statePath = arg
+ elseif arg == "--exportLatestStatus" then
+ app.exportLatestStatus = true
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ goto nextArg
+ ::verifyResult::
+ if app.exportLatestStatus then
+ if not app.statePath then log:write("EINVAL: --state missing\n")return end
+ else
+ if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end
+ if not app.backendPath then log:write("EINVAL: --backendPath missing\n")return end
+ if app.backendPath:find("^C:.") then log:write("[WARN ] MSYS_NO_PATHCONV=1 likely missing? ".. app.backendPath.."\n") end
+ end
+ return 0
+end
+
+
+function removeCompletedEddies( app )
+ local db = getStateDb(app)
+ local rs = db:prepare("SELECT eddieName FROM Eddie"
+ .." JOIN EddieLog ON Eddie.id = eddieId"
+ .." WHERE status = \"OK\";"):execute()
+ local eddieNamesToRemoveSet = {}
+ while rs:next() do
+ assert(rs:type(1) == "TEXT", rs:type(1))
+ assert(rs:name(1) == "eddieName", rs:name(1))
+ local eddieName = rs:value(1)
+ eddieNamesToRemoveSet[eddieName] = true
+ end
+ local oldEddies = app.eddies
+ app.eddies = {}
+ local numKeep, numDrop = 0, 0
+ for _, eddie in pairs(oldEddies) do
+ if not eddieNamesToRemoveSet[eddie.eddieName] then
+ --log:write("[DEBUG] Keep '".. eddie.eddieName .."'\n")
+ numKeep = numKeep + 1
+ table.insert(app.eddies, eddie)
+ else
+ numDrop = numDrop + 1
+ --log:write("[DEBUG] Drop '".. eddie.eddieName .."': Already done\n")
+ end
+ end
+ log:write("[DEBUG] todo: ".. numKeep ..", done: ".. numDrop .."\n")
+end
+
+
+function setEddieStatus( app, statusStr, eddieName, stderrStr, stdoutStr )
+ assert(type(app) == "table")
+ assert(type(eddieName) == "string")
+ assert(statusStr == "OK" or statusStr == "ERROR")
+ log:write("[DEBUG] setEddieStatus(".. eddieName ..", ".. statusStr ..")\n")
+ local db = getStateDb(app)
+ local stmt = db:prepare("INSERT INTO Eddie(eddieName)VALUES($eddieName);")
+ stmt:bind("$eddieName", eddieName)
+ local ok, emsg = xpcall(function()
+ stmt:execute()
+ end, debug.traceback)
+ if not ok and not emsg:find("UNIQUE constraint failed: Eddie.eddieName") then
+ error(emsg)
+ end
+ local stmt = db:prepare("INSERT INTO EddieLog('when',eddieId,status,stderr,stdout)"
+ .."VALUES($when, (SELECT rowid FROM Eddie WHERE eddieName = $eddieName), $status, $stderr, $stdout)")
+ stmt:reset()
+ stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%S+00:00"))
+ stmt:bind("$eddieName", eddieName)
+ stmt:bind("$status", statusStr)
+ stmt:bind("$stderr", stderrStr)
+ stmt:bind("$stdout", stdoutStr)
+ stmt:execute()
+end
+
+
+function getStateDb( app )
+ if not app.stateDb then
+ app.stateDb = newSqlite{ database = app.statePath }
+ app.stateDb:prepare("CREATE TABLE IF NOT EXISTS Eddie(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." eddieName TEXT UNIQUE NOT NULL)\n"
+ ..";"):execute()
+ app.stateDb:prepare("CREATE TABLE IF NOT EXISTS EddieLog(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." 'when' TEXT NOT NULL,\n"
+ .." eddieId INT NOT NULL,\n"
+ .." status TEXT, -- OneOf OK, ERROR\n"
+ .." stderr TEXT NOT NULL,\n"
+ .." stdout TEXT NOT NULL)\n"
+ ..";\n"):execute()
+ end
+ return app.stateDb
+end
+
+
+function loadEddies( app )
+ local httpClient = newHttpClient{}
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ path = app.backendPath .."/data/preflux/inventory",
+ rspCode = false,
+ rspBody = false,
+ isDone = false,
+ }
+ req.base = httpClient:request{
+ cls = req,
+ host = app.backendHost, port = app.backendPort,
+ method = req.method, url = req.path,
+ onRspHdr = function( rspHdr, req )
+ req.rspCode = rspHdr.status
+ if rspHdr.status ~= 200 then
+ log:write(".-----------------------------------------\n")
+ log:write("| ".. req.method .." ".. req.path .."\n")
+ log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n")
+ log:write("+-----------------------------------------\n")
+ log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n")
+ for i,h in ipairs(rspHdr.headers) do
+ log:write("| ".. h[1] ..": ".. h[2] .."\n")
+ end
+ log:write("| \n")
+ end
+ end,
+ onRspChunk = function( buf, req )
+ if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end
+ if buf then
+ if not req.rspBody then req.rspBody = buf
+ else req.rspBody = req.rspBody .. buf end
+ end
+ end,
+ onRspEnd = function( req )
+ if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end
+ req.isDone = true
+ end,
+ }
+ req.base:closeSnk()
+ assert(req.isDone)
+ if req.rspCode ~= 200 then log:write("ERROR: Couldn't load eddies\n")return end
+ local prefluxInventory = parseJSON(req.rspBody)
+ local eddies = {}
+ for eddieName, detail in pairs(prefluxInventory.hosts) do
+ table.insert(eddies, objectSeal{
+ eddieName = eddieName,
+ lastSeen = detail.lastSeen:value(),
+ })
+ end
+ app.eddies = eddies
+end
+
+
+function makeWhateverWithEddies( app )
+ local ssh = "C:/Users/fankhauseand/.opt/gitPortable-2.27.0-x64/usr/bin/ssh.exe"
+ local cmdLinePre = ssh .." -oConnectTimeout=3 -oRemoteCommand=none"
+ if app.sshPort then cmdLinePre = cmdLinePre .." -p".. app.sshPort end
+ if app.sshUser then cmdLinePre = cmdLinePre .." \"-oUser=".. app.sshUser .."\"" end
+ for k,eddie in pairs(app.eddies) do
+ local eddieName = eddie.eddieName
+ local isEddie = eddieName:find("^eddie%d%d%d%d%d$")
+ local isTeddie = eddieName:find("^teddie%d%d$")
+ local isVted = eddieName:find("^vted%d%d$")
+ local isAws = eddieName:find("^10.117.%d+.%d+$")
+ local isDevMachine = eddieName:find("^w00[a-z0-9][a-z0-9][a-z0-9]$")
+ if isAws or isDevMachine or isVted then
+ log:write("[DEBUG] Skip \"".. eddieName .."\"\n")
+ goto nextEddie
+ end
+ assert(isEddie or isTeddie, eddieName or"nil")
+ local okMarker = "OK_".. math.random(10000000, 99999999) .."wCAkgQQA2AJAzAIA"
+ local cmdLine = cmdLinePre .." ".. eddieName
+ .." -- \"true"
+ .. " && if test \"".. eddieName .."\" != \"$(hostname|sed 's,.pnet.ch$,,'); then true\""
+ .. " && echo WrongHost expected=".. eddieName .." actual=$(hostname|sed 's,.pnet.ch$,,') && false"
+ .. " ;fi"
+ .. " && echo hostname=$(hostname|sed 's,.pnet.ch,,')"
+ .. " && echo stage=${PAISA_ENV:?}"
+ .. " && echo Scan /data/instances/default/??ARTIFACT_BASE_DIR?"
+ --[[report only]]
+ --.. " && test -e /data/instances/default/??ARTIFACT_BASE_DIR? && ls -Ahl /data/instances/default/??ARTIFACT_BASE_DIR?"
+ --[[Find un-/affected eddies]]
+ .. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true"
+ .. " ;else true"
+ .. " && echo ".. okMarker
+ .. " ;fi"
+ --[[DELETE them]]
+ --.. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true"
+ --.. " && find /data/instances/default/??ARTIFACT_BASE_DIR? -type d -mtime +420 -print -delete"
+ --.. " ;fi"
+ --.. " && echo ".. okMarker ..""
+ --[[]]
+ .. " \""
+ log:write("\n")
+ log:write("[INFO ] Try ".. eddieName .." ...\n")
+ log:write("[DEBUG] ".. cmdLine.."\n")
+ --log:write("[DEBUG] sleep ...\n")sleep(3)
+ local isStdioDone, isSuccess, stderrStr, stdoutStr = false, false, "", ""
+ local cmd = newShellcmd{
+ cmdLine = cmdLine,
+ onStdout = function( buf )
+ if buf then
+ if buf:find("\n"..okMarker.."\n",0,true) then isSuccess = true end
+ stdoutStr = stdoutStr .. buf
+ io.stdout:write(buf)
+ else isStdioDone = true end
+ end,
+ onStderr = function( buf )
+ stderrStr = buf and stderrStr .. buf or stderrStr
+ io.stderr:write(buf or"")
+ end,
+ }
+ cmd:start()
+ cmd:closeSnk()
+ local exitCode, signal = cmd:join(42)
+ if exitCode ~= 0 and signal ~= nil then
+ log:write("[WARN ] code="..tostring(exitCode)..", signal="..tostring(signal).."\n")
+ end
+ while not isStdioDone do sleep(0.042) end
+ -- Analyze outcome
+ if not isSuccess then
+ setEddieStatus(app, "ERROR", eddieName, stderrStr, stdoutStr)
+ goto nextEddie
+ end
+ setEddieStatus(app, "OK", eddieName, stderrStr, stdoutStr)
+ ::nextEddie::
+ end
+end
+
+
+function sortEddiesMostRecentlySeenFirst( app )
+ table.sort(app.eddies, function(a, b) return a.lastSeen > b.lastSeen end)
+end
+
+
+function quoteCsvVal( v )
+ local typ = type(v)
+ if false then
+ elseif typ == "string" then
+ if v:find("[\"\r\n]",0,false) then
+ v = '"'.. v:gsub('"', '""') ..'"'
+ end
+ else error("TODO_a928rzuga98oirh "..typ)end
+ return v
+end
+
+
+function exportLatestStatus( app )
+ local snk = io.stdout
+ local db = getStateDb(app)
+ local stmt = db:prepare("SELECT \"when\",eddieName,status,stderr,stdout FROM EddieLog"
+ .." JOIN Eddie ON Eddie.id = eddieId"
+ .." ORDER BY eddieId,[when]"
+ .." ;")
+ rs = stmt:execute()
+ snk:write("c;when;eddieName;status;stderr;stdout\n")
+ local prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout
+ local qt = quoteCsvVal
+ while rs:next() do
+ local when , eddieName , status , stderr , stdout
+ = rs:value(1), rs:value(2), rs:value(3), rs:value(4), rs:value(5)
+ --log:write("[DEBUG] "..tostring(when).." "..tostring(eddieName).." "..tostring(status).."\n")
+ assert(when and eddieName and status and stderr and stdout)
+ if eddieName == prevEddieName then
+ if not prevWhen or when > prevWhen then
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." take\n")
+ goto assignPrevThenNextEntry
+ else
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." obsolete\n")
+ goto nextEntry
+ end
+ elseif prevEddieName then
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." Eddie complete\n")
+ snk:write("r;".. qt(when) ..";".. qt(eddieName) ..";".. qt(status) ..";".. qt(stderr) ..";".. qt(stdout) .."\n")
+ else
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." Another eddie\n")
+ goto assignPrevThenNextEntry
+ end
+ ::assignPrevThenNextEntry::
+ --[[]] prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout
+ = when , eddieName , status , stderr , stdout
+ ::nextEntry::
+ end
+ snk:write("t;status;OK\n")
+end
+
+
+function run( app )
+ if app.exportLatestStatus then
+ exportLatestStatus(app)
+ return
+ end
+ loadEddies(app)
+ assert(app.eddies)
+ removeCompletedEddies(app)
+ sortEddiesMostRecentlySeenFirst(app)
+ makeWhateverWithEddies(app)
+end
+
+
+function main()
+ local app = objectSeal{
+ isHelp = false,
+ backendHost = false,
+ backendPort = false,
+ backendPath = false,
+ sshPort = false,
+ sshUser = false,
+ statePath = false,
+ stateDb = false,
+ exportLatestStatus = false,
+ eddies = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
diff --git a/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua
new file mode 100644
index 0000000..cbd84b2
--- /dev/null
+++ b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua
@@ -0,0 +1,112 @@
+
+local newLogParser = require("PaisaLogParser").newLogParser
+
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+local main, printHelp, parseArgs, run, onLogEntry, printStats
+
+
+function printHelp( app )
+ io.stdout:write(" \n"
+ .." TODO write help page\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ local arg = _ENV.arg[1]
+ if arg == "--help" then app.isHelp = true return 0 end
+ if arg ~= "--yolo" then log:write("EINVAL\n")return end
+ return 0
+end
+
+
+function onLogEntry( entry, app )
+ local isTheEntryWeReSearching = false
+ -- HOT!
+ --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: null"))
+ -- HOT!
+ or (entry.file == "HttpHeaderUtil" and entry.msg:find("Keep.Alive. values do not match timeout.42 .. timeout.120 for request "))
+ -- HOT!
+ --or (entry.msg:find("timetable"))
+ -- nope
+ --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: You must set the Content%-Length header"))
+ -- nope
+ --or (entry.file == "LocalHttpServerResponse" and entry.msg:find("non-proper HttpServerResponse occured", 0, true))
+ -- TODO
+ local instantKey = entry.date
+ local instant = app.instants[instantKey]
+ if not instant then
+ instant = {
+ date = entry.date,
+ count = 0,
+ }
+ app.instants[instantKey] = instant
+ end
+ if isTheEntryWeReSearching then
+ instant.count = instant.count + 1
+ end
+end
+
+
+function printStats( app )
+ -- Arrange data
+ local numGroups = 0
+ local groupSet = {}
+ local countMax = 1
+ for date, instant in pairs(app.instants) do
+ assert(date == instant.date)
+ local key = date:sub(1, 15)
+ local group = groupSet[key]
+ if not group then
+ numGroups = numGroups + 1
+ group = { key = key, date = date, count = 0, }
+ groupSet[key] = group
+ end
+ group.count = group.count + instant.count
+ if countMax < group.count then countMax = group.count end
+ end
+ local groupArr = {}
+ for _, group in pairs(groupSet) do
+ table.insert(groupArr, group)
+ end
+ table.sort(groupArr, function( a, b )return a.key < b.key end)
+ -- Plot
+ out:write("\n")
+ out:write(string.format(" Splitted into %9d groups\n", numGroups))
+ out:write(string.format(" Peak value %9d num log entries\n", countMax))
+ out:write("\n")
+ local fullBar = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+ for _, group in pairs(groupArr) do
+ out:write(string.format("%s... |", group.key))
+ local len = math.floor(group.count / countMax * fullBar:len())
+ out:write(fullBar:sub(1, len))
+ out:write("\n")
+ end
+end
+
+
+function run( app )
+ app.logParser = newLogParser{
+ cls = app,
+ patternV1 = "DATE STAGE SERVICE LEVEL FILE - MSG",
+ onLogEntry = onLogEntry,
+ }
+ app.logParser:tryParseLogs()
+ printStats(app)
+end
+
+
+function main()
+ local app = {
+ isHelp = false,
+ logParser = false,
+ instants = {},
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
diff --git a/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua
new file mode 100644
index 0000000..b17c00f
--- /dev/null
+++ b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua
@@ -0,0 +1,235 @@
+
+local inn, out, log = io.stdin, io.stdout, io.stderr
+local main, parseArgs, printHelp, run, runAsPipe, runWithStdinFilelist
+
+
+function printHelp()
+ io.stdout:write(" \n"
+ .." Try to get some useful data out of a 'smap' dump.\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --yolo\n"
+ .." WARN: Only use if you know what you do.\n"
+ .." \n"
+ .." --stdin-filelist\n"
+ .." Read LF separated file list form stdin.\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ if #_ENV.arg == 0 then log:write("EINVAL: Try --help\n") return end
+ app.isHelp = false
+ local isYolo = false
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ elseif arg == "--yolo" then
+ isYolo = true
+ elseif arg == "--date" then
+ iA = iA + 1
+ app.dateStr = _ENV.arg[iA]
+ if not app.dateStr then log:write("EINVAL: --date needs value\n") return end
+ elseif arg == "--stdin-filelist" then
+ app.isStdinFilelist = true
+ else
+ log:write("EINVAL: ".. arg .."\n") return
+ end
+ end
+ return 0
+end
+
+
+function runAsPipe( app )
+ local iLine = 0
+ if #app.whitelist > 0 then
+ log:write("[INFO ] Filtering enabled\n")
+ end
+ local isHdrWritten = false
+ while true do
+ iLine = iLine + 1
+ local buf = inn:read("l")
+ if iLine == 1 then goto nextLine end
+ --log:write("BUF: ".. buf .."\n")
+ local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$")
+ if not sz and buf:find("^ +total +%d+[KMGTPE]$") then break end
+ if not sz then log:write("BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end
+ if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end
+ if #app.whitelist > 0 then
+ if not whitelist[addr] then goto nextLine end
+ end
+ if not isHdrWritten then
+ isHdrWritten = true
+ out:write("c; Addr ; Size ; Perm ; Note ; arg.date\n")
+ end
+ out:write(string.format("r; %s ; %12d ; %s ; %-12s ; %s\n", addr, sz, perm, note, (app.dateStr or"")))
+ ::nextLine::
+ end
+end
+
+
+function debugPrintRecursive( out, obj, prefix, isSubCall )
+ local typ = type(obj)
+ if false then
+ elseif typ == "string" then
+ out:write("\"") out:write((obj:gsub("\n", "\\n"):gsub("\r", "\\r"))) out:write("\"")
+ elseif typ == "number" then
+ out:write(obj)
+ elseif typ == "nil" then
+ out:write("nil")
+ elseif typ == "table" then
+ local subPrefix = (prefix)and(prefix.." ")or(" ")
+ for k, v in pairs(obj) do
+ out:write("\n") out:write(prefix or "")
+ debugPrintRecursive(out, k, prefix, true) out:write(": ")
+ debugPrintRecursive(out, v, subPrefix, true)
+ end
+ else
+ error(tostring(typ))
+ end
+ if not isSubCall then out:write("\n")end
+end
+
+
+function runWithStdinFilelist( app )
+ while true do
+ local srcFilePath = inn:read("l")
+ if not srcFilePath then break end
+ --log:write("[DEBUG] src file \"".. srcFilePath .."\"\n")
+ local srcFile = io.open(srcFilePath, "rb")
+ if not srcFile then error("fopen(\""..tostring(srcFilePath).."\")") end
+ collectData(app, srcFile, srcFilePath)
+ end
+ removeUnchanged(app)
+ printResult(app)
+end
+
+
+function collectData( app, src, timestamp )
+ assert(src)
+ assert(timestamp)
+ local iLine = 0
+ while true do
+ iLine = iLine + 1
+ local buf = src:read("l")
+ if iLine == 1 then goto nextLine end
+ local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$")
+ if not sz and buf:find("^ +total +%d+[A-Za-z]?\r?$") then break end
+ if not sz then log:write("[ERROR] BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end
+ if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end
+ local addrObj = app.addrs[addr]
+ if not addrObj then
+ addrObj = { measures = {} }
+ app.addrs[addr] = addrObj
+ end
+ local measure = { ts = timestamp, sz = sz, }
+ assert(not addrObj.measures[timestamp])
+ addrObj.measures[timestamp] = measure
+ ::nextLine::
+ end
+end
+
+
+function removeUnchanged( app )
+ local addrsWhichHaveChanged = {}
+ local knownSizes = {}
+ for addr, addrObj in pairs(app.addrs) do
+ for ts, measure in pairs(addrObj.measures) do
+ local knownSizeKey = assert(addr)
+ local knownSize = knownSizes[knownSizeKey]
+ if not knownSize then
+ knownSize = measure.sz;
+ knownSizes[knownSizeKey] = knownSize
+ elseif knownSize ~= measure.sz then
+ addrsWhichHaveChanged[addr] = true
+ end
+ end
+ end
+ local newAddrs = {}
+ for addr, addrObj in pairs(app.addrs) do
+ if addrsWhichHaveChanged[addr] then
+ newAddrs[addr] = addrObj
+ end
+ end
+ app.addrs = newAddrs
+end
+
+
+function printResult( app )
+ -- arrange data
+ local addrSet, tsSet, szByAddrAndTs = {}, {}, {}
+ for addr, addrObj in pairs(app.addrs) do
+ local measures = assert(addrObj.measures)
+ addrSet[addr] = true
+ for ts, measure in pairs(measures) do
+ assert(ts == measure.ts)
+ local sz = measure.sz
+ tsSet[ts] = true
+ szByAddrAndTs[addr.."\0"..ts] = sz
+ end
+ end
+ local addrArr, tsArr = {}, {}
+ for k,v in pairs(addrSet)do table.insert(addrArr, k) end
+ for k,v in pairs(tsSet)do table.insert(tsArr, k) end
+ table.sort(addrArr, function( a, b )return a < b end)
+ table.sort(tsArr, function( a, b )return a < b end)
+ --
+ out:write("c;file")
+ for _, addr in ipairs(addrArr) do out:write(";".. addr) end
+ out:write("\n")
+ for iTs, ts in ipairs(tsArr) do
+ out:write("r;".. filterTsForOutput(app, ts))
+ for iAddr, addr in ipairs(addrArr) do
+ local sz = szByAddrAndTs[assert(addr).."\0"..assert(ts)]
+ out:write(";".. sz)
+ end
+ out:write("\n")
+ end
+end
+
+
+function filterTsForOutput( app, ts )
+ local y, mnth, d, h, min, sec = ts:match("^houston%-prod%-pmap%-(%d%d%d%d)(%d%d)(%d%d)%-(%d%d)(%d%d)(%d%d).txt$")
+ return "".. os.time{ year=y, month=mnth, day=d, hour=h, min=min, sec=sec, }
+end
+
+
+function sortedFromMap( map, smallerPredicate )
+ if not smallerPredicate then smallerPredicate = function(a,b)return a.key < b.key end end
+ local arr = {}
+ for k, v in pairs(map) do table.insert(arr, {key=k, val=v}) end
+ table.sort(arr, smallerPredicate)
+ return arr
+end
+
+
+function run( app )
+ if app.isStdinFilelist then
+ runWithStdinFilelist(app)
+ else
+ runAsPipe(app)
+ end
+end
+
+
+function main()
+ local app = {
+ isHelp = false,
+ isStdinFilelist = false,
+ addrs = {},
+ whitelist = {
+ --["00000000DEADBEAF"] = true,
+ }
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua
new file mode 100644
index 0000000..92ef035
--- /dev/null
+++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua
@@ -0,0 +1,252 @@
+#!/usr/bin/env lua
+--[====================================================================[
+
+ projDir='/c/path/to/proj/root'
+ export LUA_PATH="${projDir:?}/src/main/lua/paisa-logs/?.lua"
+ lua -W "${projDir:?}/src/main/lua/paisa-logs/DigHoustonLogs.lua"
+
+ ]====================================================================]
+
+local PaisaLogParser = require("PaisaLogParser")
+local normalizeIsoDateTime = require("PaisaLogParser").normalizeIsoDateTime
+local LOGDBG = function(msg)io.stderr:write(msg)end
+
+local main, onLogEntry, isWorthToPrint, loadFilters, initFilters
+
+
+function main()
+ local that = {
+ logPattern = "DATE STAGE SERVICE LEVEL FILE - MSG", -- Since 2021-09-24 on prod
+ printRaw = true,
+ filters = false,
+ }
+ loadFilters(that)
+ initFilters(that)
+ local parser = PaisaLogParser.newLogParser({
+ cls = that,
+ patternV1 = that.logPattern,
+ onLogEntry = onLogEntry,
+ })
+ parser:tryParseLogs();
+end
+
+
+function loadFilters( that )
+ assert(not that.filters)
+ that.filters = {
+ -- General: Append new rules AT END if not closely related to another one.
+
+-- { action = "drop", beforeDate = "2024-10-18 03:00:00.000", },
+-- { action = "drop", afterDate = "2024-01-31 23:59:59.999", },
+
+ { action = "drop", level = "TRACE" },
+ { action = "drop", level = "DEBUG" },
+ { action = "drop", level = "INFO" },
+ --{ action = "drop", level = "WARN" },
+
+ -- FUCK those damn nonsense spam logs!!!
+ { action = "drop", file = "Forwarder" },
+ { action = "drop", level = "ERROR", file = "HttpClientRequestImpl" },
+ { action = "drop", level = "ERROR", file = "BisectClient" },
+
+ -- Seen: 2024-04-10 prod.
+ -- Reported 20240410 via "https://github.com/swisspost/vertx-redisques/pull/166"
+ { action = "drop", file = "RedisQues", level = "WARN",
+ msgPattern = "^Registration for queue .- has changed to .-$", },
+
+ -- Reported: SDCISA-13717
+ -- Seen: 2024-01-05 prod, 2023-10-18 prod
+ { action = "drop", file = "LocalHttpServerResponse", level = "ERROR",
+ msgPattern = "^non%-proper HttpServerResponse occured\r?\n"
+ .."java.lang.IllegalStateException:"
+ .." You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using"
+ .." HTTP chunked encoding.", },
+
+ -- Reported: <none>
+ -- Seen: 2024-01-05 prod, 2023-10-18 prod
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgPattern = "Unhandled exception\n"
+ .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending"
+ .." any data if you are not using HTTP chunked encoding.", },
+
+ -- Seen: 2023-10-18
+ -- Happens all the time as gateleens error reporting is broken-by-desing.
+ { action = "drop", file = "Forwarder", level = "WARN",
+ msgPattern = "^..... ................................ Problem to request /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks"
+ .."%-affiliated%-planning%-area%-[0-9]+%-vehicles: io.netty.channel.ConnectTimeoutException: connection timed out:"
+ .." eddie[0-9]+.pnet.ch/[0-9]+:7012", },
+ -- Seen: 2023-10-18
+ -- Nearly same as above but on ERROR level instead.
+ { action = "drop", file = "Forwarder", level = "ERROR",
+ msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles"
+ .." The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/"
+ .."trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012", },
+ -- Seen: 2023-10-18 prod
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded"
+ .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", },
+ -- Seen: 2023-10-18 prod
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" },
+
+ -- Seen: 2024-04-10 prod, 2023-10-18 prod
+ { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", },
+
+ -- Seen: 2024-04-10 prod, 2023-10-18 prod
+ { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", },
+
+ -- Seen: 2024-04-10 prod, 2024-01-05 prod, 2023-10-18 prod
+ -- Reported: TODO link existing issue here
+ { action = "drop", file = "HttpHeaderUtil", level = "ERROR",
+ msgPattern = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/.*", },
+
+ -- Seen: 2024-01-05 prod
+ -- Reported: <unknown>
+ { action = "drop", file = "Utils", level = "ERROR",
+ msgPattern = "^Exception occurred\njava.lang.Exception: %(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync",
+ stackPattern = "^"
+ .."%s-at org.swisspush.nsync.NSyncHandler.lambda.onPutClientSyncBody.%d+"
+ .."%(NSyncHandler.java:%d+%) ..nsync.-at io.vertx.core.impl.future.FutureImpl.%d+.onFailure%(FutureImpl.java:%d+%)"
+ ..".-"
+ .."Caused by: io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for a reply."
+ .." address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync"
+ },
+
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", },
+ -- Seen 2024-01-10 prod
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/query.index The timeout period of 30000ms has been exceeded while executing"
+ .." POST /from.houston/%d+/eagle/nsync/v1/query-index for server eddie%d+:7012$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/timetable/notification/v1/planningareas/%d+/notifications/%x+ Connection was closed$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", },
+
+ -- Reported: SDCISA-9574
+ -- TODO rm when resolved
+ -- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod,
+ { action = "drop", file = "Utils", level = "ERROR",
+ msgPattern = "%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", },
+
+ -- TODO analyze
+ -- Seen 2024-03-20 prod
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgPattern = "^Unhandled exception\njava.lang.IllegalStateException: Response head already sent", },
+
+ -- Seen: 2024-04-10 prod.
+ { action = "drop", level = "ERROR", file = "HttpClientRequestImpl",
+ msgEquals = "Connection reset by peer\njava.io.IOException: Connection reset by peer",
+ stackPattern = "^"
+ .."%s-at sun.nio.ch.FileDispatcherImpl.read0%(.-\n"
+ .."%s-at sun.nio.ch.SocketDispatcher.read%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.readIntoNativeBuffer%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.read%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.read%(.-\n"
+ .."%s-at sun.nio.ch.SocketChannelImpl.read%(.-\n"
+ .."%s-at io.netty.buffer.PooledByteBuf.setBytes%(.-\n"
+ .."%s-at io.netty.buffer.AbstractByteBuf.writeBytes%(.-\n"
+ .."%s-at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes%(.-\n"
+ .."%s-at io.netty.channel.nio.AbstractNioByteChannel.NioByteUnsafe.read%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKey%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeys%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.run%(.-\n"
+ .."%s-at io.netty.util.concurrent.SingleThreadEventExecutor.%d+.run%(.-\n"
+ .."%s-at io.netty.util.internal.ThreadExecutorMap.%d+.run%(.-\n"
+ .."%s-at io.netty.util.concurrent.FastThreadLocalRunnable.run%(.-\n"
+ .."%s-at java.lang.Thread.run%(.-", },
+
+ -- Seen: 2024-04-10 prod.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null",
+ stackPattern = "^"
+ ..".-io.vertx.-%.HttpClientResponseImpl.checkEnded%(.-\n"
+ ..".-io.vertx.-%.HttpClientResponseImpl.endHandler%(.-\n"
+ ..".-gateleen.routing.Forwarder.-\n", },
+
+ -- Seen: 2024-04-10 prod.
+ -- TODO get rid of this silly base class.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", },
+
+ -- Seen: 2024-04-10 prod.
+ -- TODO get rid of this silly base class.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", },
+
+ }
+end
+
+
+function initFilters( that )
+ for iF = 1, #(that.filters) do
+ local descr = that.filters[iF]
+ local beforeDate = descr.beforeDate and normalizeIsoDateTime(descr.beforeDate)
+ local afterDate = descr.afterDate and normalizeIsoDateTime(descr.afterDate)
+ local file, level, msgPattern, msgEquals = descr.file, descr.level, descr.msgPattern, descr.msgEquals
+ local rawPattern, stackPattern = descr.rawPattern, descr.stackPattern
+ local stackStartsWith = descr.stackStartsWith
+ local filter = { action = descr.action, matches = false, }
+ local hasAnyCondition = (beforeDate or afterDate or file or level or msgPattern or rawPattern or stackPattern or stackStartsWith);
+ if not hasAnyCondition then
+ filter.matches = function( that, log ) --[[LOGDBG("match unconditionally\n")]] return true end
+ else
+ filter.matches = function( that, log )
+ local match, mismatch = true, false
+ if not log.date then log:debugPrint() end
+ if level and level ~= log.level then --[[LOGDBG("level mismatch: \"".. level .."\" != \"".. log.level .."\"\n")]] return mismatch end
+ if file and file ~= log.file then --[[LOGDBG("file mismatch: \"".. file .."\" != \"".. log.file .."\"\n")]] return mismatch end
+ local logDate = normalizeIsoDateTime(log.date)
+ local isBeforeDate = (not beforeDate or logDate < beforeDate);
+ local isAfterDate = (not afterDate or logDate >= afterDate);
+ if not isBeforeDate then --[[LOGDBG("not before: \"".. tostring(beforeDate) .."\", \"".. logDate .."\"\n")]] return mismatch end
+ if not isAfterDate then --[[LOGDBG("not after: \"".. tostring(afterDate) .."\", \"".. logDate .."\"\n")]] return mismatch end
+ if msgEquals and log.msg ~= msgEquals then return mismatch end
+ if stackStartsWith and log.stack and log.stack:sub(1, #stackStartsWith) ~= stackStartsWith then return mismatch end
+ if msgPattern and not log.msg:find(msgPattern) then --[[LOGDBG("match: msgPattern\n")]] return mismatch end
+ if stackPattern and log.stack and not log.stack:find(stackPattern) then return mismatch end
+ if rawPattern and not log.raw:find(rawPattern) then return mismatch end
+ --LOGDBG("DEFAULT match\n")
+ return match
+ end
+ end
+ that.filters[iF] = filter
+ end
+end
+
+
+function onLogEntry( log, that )
+ local isWorthIt = isWorthToPrint(that, log)
+ if isWorthIt then
+ if that.printRaw then
+ print(log.raw)
+ else
+ log:debugPrint()
+ end
+ end
+end
+
+
+function isWorthToPrint( that, log )
+ local pass, drop = true, false
+ for iF = 1, #(that.filters) do
+ local filter = that.filters[iF]
+ if filter.matches(that, log) then
+ if filter.action == "drop" then return drop end
+ if filter.action == "keep" then return pass end
+ error("Unknown filter.action: \"".. filter.action .."\"");
+ end
+ end
+ return pass
+end
+
+
+main()
+
diff --git a/src/main/lua/paisa-logs/PaisaLogParser.lua b/src/main/lua/paisa-logs/PaisaLogParser.lua
new file mode 100644
index 0000000..f6ac0ce
--- /dev/null
+++ b/src/main/lua/paisa-logs/PaisaLogParser.lua
@@ -0,0 +1,435 @@
+
+local exports = {}
+local mod = {}
+local stderr = io.stderr
+
+
+local LogParse = { -- class
+ line = nil,
+ log = nil,
+}
+
+
+function exports.newLogParser( config )
+ return LogParse:new(nil, config )
+end
+
+
+function LogParse:new(o, config)
+ if not config or type(config.onLogEntry) ~= "function" then
+ error( "Arg 'config.onLogEntry' must be a function" )
+ end
+ o = o or {};
+ setmetatable(o, self);
+ self.__index = self;
+ -- Register callbacks
+ self.cb_cls = config.cls
+ self.cb_onLogEntry = config.onLogEntry
+ self.cb_onEnd = config.onEnd
+ self.cb_onError = config.onError or function(s)
+ error(s or "nil")
+ end
+ self.cb_onWarn = config.onWarn or function(s)
+ io.stdout:flush()
+ warn(s)
+ end
+ -- END callbacks
+ mod.setupParserPattern( o, config )
+ return o;
+end
+
+
+function mod.setupParserPattern( this, c )
+ local inputPat
+ if c.patternV1 then
+ inputPat = c.patternV1; -- Use the one from parameter.
+ else
+ this.cb_onWarn( "No 'c.patternV1' specified. Fallback to internal obsolete one." )
+ inputPat = "DATE POD STAGE SERVICE THREAD LEVEL FILE - MSG"
+ end
+ local parts = {}
+ for part in string.gmatch(inputPat,"[^ ]+") do
+ table.insert( parts, part )
+ end
+ this.parts = parts
+end
+
+
+local function writeStderr(...)
+ local args = table.pack(...)
+ for i=1,args.n do
+ io.stderr:write( args[i] or "nil" )
+ end
+end
+
+
+function LogParse:tryParseLogs()
+ while true do
+ self.line = io.read("l");
+ if self.line==nil then -- EOF
+ self:publishLogEntry();
+ break;
+ end
+
+ --io.write( "\nBUF: ", self.line, "\n\n" );
+ --io.flush()
+
+ if self.line:match("%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d ") then
+ -- Looks like the beginning of a new log entry.
+ self:initLogEntryFromLine();
+ elseif self.line:match("^%s+at [^ ]") then
+ -- Looks like a line from exception stack
+ self:appendStacktraceLine();
+ elseif self.line:match("^%s*Caused by: ") then
+ -- Looks like a stacktrace 'Caused by' line
+ self:appendStacktraceLine();
+ elseif self.line:match("^%s+Suppressed: ") then
+ -- Looks like a stacktrace 'Suppressed: ' line
+ self:appendStacktraceLine();
+ elseif self.line:match("^%\t... (%d+) more$") then
+ -- Looks like folded stacktrace elements
+ self:appendStacktraceLine();
+ else
+ -- Probably msg containing newlines.
+ self:appendLogMsg();
+ end
+
+ end
+end
+
+
+function LogParse:initLogEntryFromLine()
+ self:publishLogEntry();
+ local log = self:getOrNewLogEntry();
+
+ -- Try some alternative parsers
+ mod.parseByPattern( self )
+ --if log.date==nil then
+ -- self:parseOpenshiftServiceLogLine();
+ --end
+ --if log.date==nil then
+ -- self:parseEagleLogLine();
+ --end
+ --if log.date==nil then
+ -- self:parseJettyServiceLogLine();
+ --end
+
+ if log.date==nil then
+ self.cb_onWarn("Failed to parse log line:\n\n".. self.line .."\n\n", self.cb_cls)
+ end
+end
+
+
+function mod.parseByPattern( this )
+ local date, pod, stage, service, thread, level, file, msg, matchr, match
+ local line = this.line
+ local log = this:getOrNewLogEntry();
+
+ -- We can just return on failure. if log is missing, it will report error
+ -- on caller side. Just ensure that 'date' is nil.
+ log.date = nil
+
+ local rdPos = 1
+ for i,part in ipairs(this.parts) do
+ if part=="DATE" then
+ date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d) ", rdPos)()
+ if not date or date=="" then return end
+ rdPos = rdPos + date:len()
+ --stderr:write("date: "..tostring(date).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="STAGE" then
+ match = line:gmatch( " +[^%s]+", rdPos)()
+ if not match then return end
+ stage = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("stage: "..tostring(stage).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="SERVICE" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ service = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("service: "..tostring(service).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="LEVEL" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ level = match:gmatch("[^%s]+")()
+ if not level:find("^[ABCDEFGINORTUW]+$") then -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ this.cb_onWarn( "Does not look like a level: "..(level or"nil"), this.cb_cls )
+ end
+ rdPos = rdPos + match:len()
+ --stderr:write("level: "..tostring(level).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="FILE" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ file = match:gmatch("[^%s]+")()
+ if file=="WARN" then stderr:write("\n"..tostring(line).."\n\n")error("Doesn't look like a file: "..tostring(file)) end
+ rdPos = rdPos + match:len()
+ --stderr:write("file: "..tostring(file).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="-" then
+ match = line:gmatch(" +%-", rdPos)()
+ rdPos = rdPos + match:len();
+ --stderr:write("dash (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="MSG" then
+ match = line:gmatch(" +.*$", rdPos)()
+ if not match then return end
+ msg = match:gmatch("[^%s].*$")()
+ rdPos = rdPos + match:len()
+ --stderr:write("msg: "..tostring(msg).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="POD" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ pod = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("pod: "..tostring(pod).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="THREAD" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ thread = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("thrd: "..tostring(thread).." (rdPos="..tostring(rdPos)..")\n")
+ end
+ end
+
+ log.raw = this.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.thread = thread;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseOpenshiftServiceLogLine()
+ local date, pod, stage, service, thread, level, file, msg
+ local this = self
+ local line = this.line
+ local log = self:getOrNewLogEntry();
+
+ -- We can just return on failure. if log is missing, it will report error
+ -- on caller side. Just ensure that 'date' is nil.
+ log.date = nil
+
+ -- VERSION 3 (Since 2021-09-24 houstonProd)
+ local rdPos = 1
+ -- Date
+ date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)()
+ if not date then return end
+ rdPos = rdPos + date:len()
+ -- Pod
+ pod = line:gmatch(" (%a+)", rdPos )()
+ if not pod then return end
+ rdPos = rdPos + pod:len()
+ -- stage
+ stage = line:gmatch( " (%a+)", rdPos)()
+ if not stage then return end
+ rdPos = rdPos + stage:len()
+ -- service
+ service = line:gmatch( " (%a+)", rdPos)()
+ if not service then return end
+ rdPos = rdPos + service:len()
+ -- thread (this only maybe exists)
+ thread = line:gmatch( " ([%a%d%-]+)", rdPos)()
+ -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ if thread and thread:find("^[ABCDEFGINORTUW]+$") then
+ thread = nil; -- Does more look like an error level. So do NOT advance
+ else
+ rdPos = rdPos + thread:len()
+ end
+ -- level
+ level = line:gmatch( " ([A-Z]+)", rdPos)()
+ if not level then return end
+ rdPos = rdPos + level:len()
+ -- file
+ file = line:gmatch(" ([^%s]+)", rdPos)()
+ if not file then return end
+ rdPos = rdPos + file:len()
+ -- msg
+ msg = line:gmatch(" %- (.*)", rdPos)()
+ if not msg then return end
+ rdPos = rdPos + msg:len()
+
+ -- VERSION 2 (Since 2021-09-24 prefluxInt)
+ --local rdPos = 1
+ ---- Date
+ --date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)()
+ --if not date then return end
+ --rdPos = rdPos + date:len()
+ ---- Pod
+ --pod = line:gmatch(" (%a+)", rdPos )()
+ --if not pod then return end
+ --rdPos = rdPos + pod:len()
+ ---- stage
+ --stage = line:gmatch( " (%a+)", rdPos)()
+ --if not stage then return end
+ --rdPos = rdPos + stage:len()
+ ---- service
+ --service = line:gmatch( " (%a+)", rdPos)()
+ --if not service then return end
+ --rdPos = rdPos + service:len()
+ ---- thread (this only maybe exists)
+ --thread = line:gmatch( " ([%a%d%-]+)", rdPos)()
+ ---- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ --if thread and thread:find("^[ABCDEFGINORTUW]+$") then
+ -- thread = nil; -- Does more look like an error level. So do NOT advance
+ --else
+ -- rdPos = rdPos + thread:len()
+ --end
+ ---- level
+ --level = line:gmatch( " ([A-Z]+)", rdPos)()
+ --if not level then return end
+ --rdPos = rdPos + level:len()
+ ---- file
+ --file = line:gmatch(" ([^%s]+)", rdPos)()
+ --if not file then return end
+ --rdPos = rdPos + file:len()
+ ---- msg
+ --msg = line:gmatch(" %- (.*)", rdPos)()
+ --if not msg then return end
+ --rdPos = rdPos + msg:len()
+
+ log.raw = self.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.thread = thread;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseEagleLogLine()
+ local log = self:getOrNewLogEntry();
+ local date, stage, service, level, file, msg = self.line:gmatch(""
+ .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime
+ .." (%a+)" -- stage
+ .." (%a+)" -- service
+ .." (%a+)" -- level
+ .." ([^%s]+)" -- file
+ .." %- (.*)" -- msg
+ )();
+ local pod = service; -- just 'mock' it
+ log.raw = self.line;
+ log.date = date;
+ log.service = service;
+ log.pod = pod;
+ log.stage = stage;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseJettyServiceLogLine()
+ local log = self:getOrNewLogEntry();
+ local date, pod, stage, service, level, file, msg = self.line:gmatch(""
+ .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime
+ .." (%S+)" -- pod (aka container)
+ .." (%a+)" -- stage
+ .." (%a+)" -- service
+ .." (%a+)" -- level
+ .." ([^%s]+)" -- file
+ .." %- (.*)" -- msg
+ )();
+ log.raw = self.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:appendLogMsg()
+ local log = self:getOrNewLogEntry()
+ log.msg = log.msg or "";
+ log.raw = log.raw or "";
+
+ log.msg = log.msg .."\n".. self.line;
+ -- Also append to raw to have the complete entry there.
+ log.raw = log.raw .."\n".. self.line;
+end
+
+
+function LogParse:appendStacktraceLine()
+ local log = self:getOrNewLogEntry()
+ if not log.stack then
+ log.stack = self.line
+ else
+ log.stack = log.stack .."\n".. self.line
+ end
+ -- Also append to raw to have the complete entry there.
+ log.raw = log.raw .."\n".. self.line;
+end
+
+
+function LogParse:publishLogEntry()
+ local log = self.log
+ if not log then
+ return -- nothing to do
+ end
+ if not log.raw then
+ -- WhatTheHeck?!?
+ local msg = "InternalError: Collected log unexpectedly empty"
+ self.cb_onError(msg, self.cb_cls)
+ error(msg); return
+ end
+ self.log = nil; -- Mark as consumed
+ -- Make sure log lines do NOT end in 0x0D
+ local msg = log.msg
+ if msg:byte(msg:len()) == 0x0D then log.msg = msg:sub(1, -2) end
+ self.cb_onLogEntry(log, self.cb_cls)
+end
+
+
+function LogParse:getOrNewLogEntry()
+ self.log = self.log or LogEntry:new(nil)
+ return self.log
+end
+
+
+function exports.normalizeIsoDateTime( str )
+ if str:find("%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d%.%d%d%d") then return str end
+ local y, mo, d, h, mi, s, ms = str:match("^(%d%d%d%d)-(%d%d)-(%d%d)[ T_-](%d%d):(%d%d):(%d%d)[,.](%d%d%d)$")
+ return y .."-".. mo .."-".. d .."T".. h ..":".. mi ..":".. s ..".".. ms
+end
+
+
+LogEntry = {
+ raw,
+ date,
+ service,
+ stack,
+}
+
+
+function LogEntry:new(o)
+ o = o or {};
+ setmetatable(o, self);
+ self.__index = self;
+ return o;
+end
+
+
+function LogEntry:debugPrint()
+ print( "+- PUBLISH ------------------------------------------------------------" );
+ print( "| date ---> ", self.date or "nil" );
+ print( "| pod ----> ", self.pod or "nil" );
+ print( "| service > ", self.service or "nil" );
+ print( "| stage --> ", self.stage or "nil" );
+ print( "| thread -> ", self.thread or "nil" );
+ print( "| level --> ", self.level or "nil" );
+ print( "| file ---> ", self.file or "nil" );
+ print( "| msg ----> ", self.msg or "nil" );
+ print( "| " )
+ io.write( "| RAW: ", self.raw or "nil", "\n" );
+ print( "`--------------------" );
+end
+
+
+return exports
+
diff --git a/src/main/lua/pcap/KubeProbeFilter.lua b/src/main/lua/pcap/KubeProbeFilter.lua
new file mode 100644
index 0000000..a5967e9
--- /dev/null
+++ b/src/main/lua/pcap/KubeProbeFilter.lua
@@ -0,0 +1,93 @@
+--
+-- Try to extract kube-probe related requests.
+--
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+local newPcapDumper = assert(require("pcapit").newPcapDumper)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables
+
+
+function onPcapFrame( app, it )
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ local userAgent, reqUri
+ --
+ if dstPort ~= 7012 and srcPort ~= 7012 then return end
+ local trspPayload = it:trspPayload()
+ local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 =
+ trspPayload:match("^([A-Z/1.0]+) ([^ ]+) ([^ \r\n]+)\r?\n")
+ if httpReqLinePart1 and not httpReqLinePart1:find("^HTTP/1.%d$") then -- assume HTTP request
+ reqUri = httpReqLinePart2
+ userAgent = trspPayload:match("\n[Uu][Ss][Ee][Rr]%-[Aa][Gg][Ee][Nn][Tt]:%s+([^\r\n]+)\r?\n");
+ if userAgent then
+ --if not userAgent:find("^kube%-probe/") then return end -- assume halfrunt
+ --log:write("User-Agent: ".. userAgent .."\n")
+ end
+ elseif httpReqLinePart1 then -- assume HTTP response
+ --out:write(trspPayload)
+ end
+ local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ local connKey = ((srcPort < dstPort)and(srcPort.."\0"..dstPort)or(dstPort.."\0"..srcPort))
+ .."\0"..((srcIp < dstIp)and(srcIp.."\0"..dstIp)or(dstIp.."\0"..srcIp))
+ local conn = app.connections[connKey]
+ if not conn then conn = {isOfInterest=false, pkgs={}} app.connections[connKey] = conn end
+ conn.isOfInterest = (conn.isOfInterest or reqUri == "/houston/server/info")
+ if not conn.isOfInterest then
+ if #conn.pkgs > 3 then -- Throw away all stuff except TCP handshake
+ conn.pkgs = { conn.pkgs[1], conn.pkgs[2], conn.pkgs[3] }
+ end
+ local sec, usec = it:frameArrivalTime()
+ --for k,v in pairs(getmetatable(it))do print("E",k,v)end
+ local pkg = {
+ sec = assert(sec), usec = assert(usec),
+ caplen = it:frameCaplen(), len = it:frameLen(),
+ tcpFlags = (conn.isOfInterest)and(it:tcpFlags())or false,
+ srcPort = srcPort, dstPort = dstPort,
+ trspPayload = trspPayload,
+ rawFrame = it:rawFrame(),
+ }
+ table.insert(conn.pkgs, pkg)
+ else
+ -- Stop memory hogging. Write that stuff to output
+ if #conn.pkgs > 0 then
+ for _, pkg in ipairs(conn.pkgs) do
+ --out:write(string.format("-- PKG 1 %d->%d %d.%09d tcpFlg=0x%04X\n", pkg.srcPort, pkg.dstPort, pkg.sec, pkg.usec, pkg.tcpFlags or 0))
+ --out:write(pkg.trspPayload)
+ --out:write("\n")
+ app.dumper:dump(pkg.sec, pkg.usec, pkg.caplen, pkg.len, pkg.rawFrame, 1, pkg.rawFrame:len())
+ end
+ conn.pkgs = {}
+ end
+ local tcpFlags = it:tcpFlags()
+ local sec, usec = it:frameArrivalTime()
+ local rawFrame = it:rawFrame()
+ --out:write(string.format("-- PKG 2 %d->%d %d.%09d tcpFlg=0x%04X, len=%d\n", srcPort, dstPort, sec, usec, tcpFlags or 0, trspPayload:len()))
+ --out:write(trspPayload)
+ --if trspPayload:byte(trspPayload:len()) ~= 0x0A then out:write("\n") end
+ --out:write("\n")
+ app.dumper:dump(sec, usec, it:frameCaplen(), it:frameLen(), rawFrame, 1, rawFrame:len())
+ end
+end
+
+
+function main()
+ local app = {
+ parser = false,
+ dumper = false,
+ connections = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.dumper = newPcapDumper{
+ dumpFilePath = "C:/work/tmp/KubeProbeFilter.out.pcap",
+ }
+ app.parser:resume()
+end
+
+
+main()
+
+
diff --git a/src/main/lua/pcap/extractDnsHosts.lua b/src/main/lua/pcap/extractDnsHosts.lua
new file mode 100644
index 0000000..655586f
--- /dev/null
+++ b/src/main/lua/pcap/extractDnsHosts.lua
@@ -0,0 +1,147 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+local out, log = io.stdout, io.stderr
+
+local main, onPcapFrame, vapourizeUrlVariables, printResult
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ dnsResponses = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printResult(app)
+end
+
+
+function onPcapFrame( app, it )
+ local out = io.stdout
+ local sec, usec = it:frameArrivalTime()
+ sec = sec + (usec/1e6)
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ if it:trspSrcPort() == 53 then
+ extractHostnameFromDns(app, it)
+ elseif it:tcpSeqNr() then
+ extractHostnameFromHttpHeaders(app, it)
+ end
+end
+
+
+function extractHostnameFromDns( app, it )
+ local payload = it:trspPayload()
+ local bug = 8 -- TODO looks as lib has a bug and payload is offset by some bytes.
+ local dnsFlags = (payload:byte(bug+3) << 8) | (payload:byte(bug+4))
+ if (dnsFlags & 0x0004) ~= 0 then return end -- ignore error responses
+ local numQuestions = payload:byte(bug+5) << 8 | payload:byte(bug+6)
+ local numAnswers = payload:byte(bug+7) << 8 | payload:byte(bug+8)
+ if numQuestions ~= 1 then
+ log:write("[WARN ] numQuestions ".. numQuestions .."?!?\n")
+ return
+ end
+ if numAnswers == 0 then return end -- empty answers are boring
+ if numAnswers ~= 1 then log:write("[WARN ] dns.count.answers ".. numAnswers .." not supported\n") return end
+ local questionsOffset = bug+13
+ local hostname = payload:match("^([^\0]+)", questionsOffset)
+ hostname = hostname:gsub("^[\r\n]", "") -- TODO WTF?!?
+ hostname = hostname:gsub("[\x04\x02]", ".") -- TODO WTF?!?
+ local answersOffset = bug + 13 + (24 * numQuestions)
+ local ttl = payload:byte(answersOffset+6) << 24 | payload:byte(answersOffset+7) << 16
+ | payload:byte(answersOffset+8) << 8 | payload:byte(answersOffset+9)
+ local dataLen = payload:byte(answersOffset+10) | payload:byte(answersOffset+11)
+ if dataLen ~= 4 then log:write("[WARN ] dns.resp.len ".. dataLen .." not impl\n") return end
+ local ipv4Str = string.format("%d.%d.%d.%d", payload:byte(answersOffset+12), payload:byte(answersOffset+13),
+ payload:byte(answersOffset+14), payload:byte(answersOffset+15))
+ --
+ addEntry(app, ipv4Str, hostname, ttl)
+end
+
+
+function extractHostnameFromHttpHeaders( app, it )
+ local payload = it:trspPayload()
+ local _, beg = payload:find("^([A-Z]+ [^ \r\n]+ HTTP/1%.%d\r?\n)")
+ if not beg then return end
+ beg = beg + 1
+ local httpHost
+ while true do
+ local line
+ local f, t = payload:find("^([^\r\n]+)\r?\n", beg)
+ if not f then return end
+ if not payload:byte(1) == 0x72 or payload:byte(1) == 0x68 then goto nextHdr end
+ line = payload:sub(f, t)
+ httpHost = line:match("^[Hh][Oo][Ss][Tt]:%s*([^\r\n]+)\r?\n$")
+ if not httpHost then goto nextHdr end
+ break
+ ::nextHdr::
+ beg = t
+ end
+ httpHost = httpHost:gsub("^(.+):%d+$", "%1")
+ local dstIp = it:netDstIpStr()
+ if dstIp == httpHost then return end
+ addEntry(app, dstIp, httpHost, false, "via http host header")
+end
+
+
+function addEntry( app, ipv4Str, hostname, ttl, kludge )
+ local key
+ --log:write("addEntry(app, ".. ipv4Str ..", ".. hostname ..")\n")
+ if kludge == "via http host header" then
+ key = ipv4Str .."\0".. hostname .."\0".. "via http host header"
+ else
+ key = ipv4Str .."\0".. hostname .."\0".. ttl
+ end
+ local entry = app.dnsResponses[key]
+ if not entry then
+ entry = { ipv4Str = ipv4Str, hostname = hostname, ttl = ttl, }
+ app.dnsResponses[key] = entry
+ end
+end
+
+
+function printResult( app )
+ local sorted = {}
+ for _, stream in pairs(app.dnsResponses) do
+ table.insert(sorted, stream)
+ end
+ table.sort(sorted, function(a, b)
+ if a.ipv4Str < b.ipv4Str then return true end
+ if a.ipv4Str > b.ipv4Str then return false end
+ return a.hostname < b.hostname
+ end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format("# Subject Hostname to IP addresses\n"))
+ out:write(string.format("# Begin %s\n", os.date(timeFmt, math.floor(app.oldestEpochSec))))
+ out:write(string.format("# Duration %.3f seconds\n", dumpDurationSec))
+ out:write("\n")
+ --out:write(" .-- KiB per Second\n")
+ --out:write(" | .-- IP endpoints\n")
+ --out:write(" | | .-- TCP server port\n")
+ --out:write(" | | | .-- TCP Payload (less is better)\n")
+ --out:write(" | | | |\n")
+ --out:write(".--+----. .----+----------------------. .+--. .-+------------\n")
+ for i, elem in ipairs(sorted) do
+ local ipv4Str, hostname, ttl = elem.ipv4Str, elem.hostname, elem.ttl
+ if ttl then
+ out:write(string.format("%-14s %-30s # TTL=%ds", ipv4Str, hostname, ttl))
+ else
+ out:write(string.format("%-14s %-30s # ", ipv4Str, hostname))
+ end
+ out:write("\n")
+ end
+ out:write("\n")
+end
+
+
+main()
+
+
diff --git a/src/main/lua/pcap/httpStats.lua b/src/main/lua/pcap/httpStats.lua
new file mode 100644
index 0000000..ff48bd2
--- /dev/null
+++ b/src/main/lua/pcap/httpStats.lua
@@ -0,0 +1,117 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables, printHttpRequestStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ foundHttpRequests = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printHttpRequestStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local portOfInterest = 7012
+ if dstPort == portOfInterest then
+ local httpMethod, httpUri =
+ it:trspPayload():match("^([A-Z]+) ([^ ]+) [^ \r\n]+\r?\n")
+ if httpMethod then
+ --out:write(string.format("%5d->%5d %s %s\n", srcPort, dstPort, httpMethod, httpUri))
+ httpUri = vapourizeUrlVariables(app, httpUri)
+ local key = httpUri -- httpMethod .." ".. httpUri
+ local obj = app.foundHttpRequests[key]
+ if not obj then
+ obj = { count=0, httpMethod=false, httpUri=false, }
+ app.foundHttpRequests[key] = obj
+ end
+ obj.count = obj.count + 1
+ obj.httpMethod = httpMethod
+ obj.httpUri = httpUri
+ end
+ elseif srcPort == portOfInterest then
+ local httpStatus, httpPhrase =
+ it:trspPayload():match("^HTTP/%d.%d (%d%d%d) ([^\r\n]*)\r?\n")
+ if httpStatus then
+ --out:write(string.format("%5d<-%5d %s %s\n", srcPort, dstPort, httpStatus, httpPhrase))
+ end
+ end
+end
+
+
+function vapourizeUrlVariables( app, uri )
+ -- A very specific case
+ uri = uri:gsub("^(/houston/users/)%d+(/.*)$", "%1{}%2");
+ if uri:find("^/houston/users/[^/]+/user/.*$") then return uri end
+ --
+ -- Try to do some clever guesses to group URIs wich only differ in variable segments
+ uri = uri:gsub("(/|-)[%dI_-]+/", "%1{}/"):gsub("(/|-)[%dI-]+/", "%1{}/") -- two turns, to also get consecutive number segments
+ uri = uri:gsub("([/-])[%dI_-]+$", "%1{}")
+ uri = uri:gsub("/%d+(%.%w+)$", "/{}%1")
+ uri = uri:gsub("(/|-)[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+(/?)$", "%1{}%2")
+ uri = uri:gsub("/v%d/", "/v0/") -- Merge all API versions
+ --
+ -- Generify remaining by trimming URIs from right
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/fis/information/).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/v%d/push/trillian%-phonebooks%-).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/timetable/wait/).*$", "%1...")
+ uri = uri:gsub("^(/houston/service%-instances/).*$", "%1...")
+ uri = uri:gsub("^(/vortex/stillInterested%?vehicleId%=).*$", "%1...")
+ uri = uri:gsub("^(/houston/[^/]+/[^/]+/).*$", "%1...")
+ return uri
+end
+
+
+function printHttpRequestStats( app )
+ local sorted = {}
+ local maxOccurValue = 0
+ local overallCount = 0
+ for _, reqObj in pairs(app.foundHttpRequests) do
+ if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end
+ overallCount = overallCount + reqObj.count
+ table.insert(sorted, reqObj)
+ end
+ table.sort(sorted, function(a, b)return a.count > b.count end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject HTTP Request Statistics\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Throughput %.1f HTTP requests per second\n", overallCount / dumpDurationSec))
+ out:write("\n")
+ out:write(" .-- HTTP Requests per Second\n")
+ out:write(" | .-- URI\n")
+ out:write(".--+--. .-+---------\n")
+ local chartWidth = 60
+ local cntPrinted = 0
+ for i, elem in ipairs(sorted) do
+ local count, httpMethod, httpUri = elem.count, elem.httpMethod, elem.httpUri
+ local cntPerSec = math.floor((count / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%7.1f %s\n", cntPerSec, httpUri))
+ cntPrinted = cntPrinted + 1
+ ::nextPort::
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/tcpDataAmountStats.lua b/src/main/lua/pcap/tcpDataAmountStats.lua
new file mode 100644
index 0000000..496687a
--- /dev/null
+++ b/src/main/lua/pcap/tcpDataAmountStats.lua
@@ -0,0 +1,97 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local main, onPcapFrame, vapourizeUrlVariables, printResult
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ nextStreamNr = 1,
+ httpStreams = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printResult(app)
+end
+
+
+function onPcapFrame( app, it )
+ local out = io.stdout
+ --
+ if not it:tcpSeqNr() then return end
+ --
+ --
+ local sec, usec = it:frameArrivalTime()
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ local lowIp = (srcIp < dstIp)and(srcIp)or(dstIp)
+ local higIp = (lowIp == dstIp)and(srcIp)or(dstIp)
+ local lowPort = math.min(srcPort, dstPort)
+ local streamId = lowIp .."\0".. higIp .."\0".. lowPort
+ local stream = app.httpStreams[streamId]
+ if not stream then
+ stream = {
+ srcIp = srcIp, dstIp = dstIp, srcPort = srcPort, dstPort = dstPort,
+ streamNr = app.nextStreamNr, numBytes = 0,
+ }
+ app.nextStreamNr = app.nextStreamNr + 1
+ app.httpStreams[streamId] = stream
+ end
+ local trspPayload = it:trspPayload()
+ stream.numBytes = stream.numBytes + trspPayload:len()
+end
+
+
+function printResult( app )
+ local out = io.stdout
+ local sorted = {}
+ local overalValue, maxValue = 0, 0
+ for _, stream in pairs(app.httpStreams) do
+ if stream.numBytes > maxValue then maxValue = stream.numBytes end
+ overalValue = overalValue + stream.numBytes
+ table.insert(sorted, stream)
+ end
+ table.sort(sorted, function(a, b)return a.numBytes > b.numBytes end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local overallBytesPerSec = overalValue / dumpDurationSec
+ local maxValuePerSec = maxValue / dumpDurationSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject TCP data throughput\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format(" Overall %.3f KiB per second (%.3f KiBit per second)\n",
+ overallBytesPerSec/1024, overallBytesPerSec/1024*8))
+ out:write("\n")
+ out:write(" .-- KiB per Second\n")
+ out:write(" | .-- IP endpoints\n")
+ out:write(" | | .-- TCP server port\n")
+ out:write(" | | | .-- TCP Payload (less is better)\n")
+ out:write(" | | | |\n")
+ out:write(".--+----. .----+----------------------. .+--. .-+------------\n")
+ local bar = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ for i, elem in ipairs(sorted) do
+ local streamNr, srcIp, dstIp, srcPort, dstPort, numBytes =
+ elem.streamNr, elem.srcIp, elem.dstIp, elem.srcPort, elem.dstPort, elem.numBytes
+ local lowPort = math.min(srcPort, dstPort)
+ local bytesPerSecond = math.floor((numBytes / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%9.3f %-14s %-14s %5d ", bytesPerSecond/1024, srcIp, dstIp, lowPort))
+ local part = bytesPerSecond / maxValuePerSec;
+ out:write(bar:sub(0, math.floor(part * bar:len())))
+ out:write("\n")
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/tcpPortStats.lua b/src/main/lua/pcap/tcpPortStats.lua
new file mode 100644
index 0000000..9038db7
--- /dev/null
+++ b/src/main/lua/pcap/tcpPortStats.lua
@@ -0,0 +1,82 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, printStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ foundPortNumbers = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ --local isTcp = (it:tcpSeqNr() ~= nil)
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ if not app.foundPortNumbers[srcPort] then app.foundPortNumbers[srcPort] = 1
+ else app.foundPortNumbers[srcPort] = app.foundPortNumbers[srcPort] + 1 end
+ if not app.foundPortNumbers[dstPort+100000] then app.foundPortNumbers[dstPort+100000] = 1
+ else app.foundPortNumbers[dstPort+100000] = app.foundPortNumbers[dstPort+100000] + 1 end
+end
+
+
+function printStats( app )
+ local sorted = {}
+ local totalPackets, maxOccurValue = 0, 0
+ for port, pkgcnt in pairs(app.foundPortNumbers) do
+ if pkgcnt > maxOccurValue then maxOccurValue = pkgcnt end
+ table.insert(sorted, { port=port, pkgcnt=pkgcnt })
+ totalPackets = totalPackets + pkgcnt
+ end
+ table.sort(sorted, function(a, b)return a.pkgcnt > b.pkgcnt end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject TCP/UDP stats\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Throughput %.1f packets per second\n", totalPackets / dumpDurationSec))
+ out:write("\n")
+ out:write(" .- TCP/UDP Port\n")
+ out:write(" | .-Direction (Send, Receive)\n")
+ out:write(" | | .- Packets per second\n")
+ out:write(".-+-. | .---+-.\n")
+ local chartWidth = 60
+ for i, elem in ipairs(sorted) do
+ local port, pkgcnt = elem.port, elem.pkgcnt
+ local dir = (port > 100000)and("R")or("S")
+ if port > 100000 then port = port - 100000 end
+ if port > 30000 then goto nextPort end
+ local pkgsPerSec = math.floor((pkgcnt / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%5d %s %7.1f |", port, dir, pkgsPerSec))
+ local barLen = pkgcnt / maxOccurValue
+ --local barLen = (math.log(pkgcnt) / math.log(maxOccurValue))
+ for i=1, chartWidth-1 do
+ out:write((i < (barLen*chartWidth))and("=")or(" "))
+ end
+ out:write("|\n")
+ ::nextPort::
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/xServiceStats.lua b/src/main/lua/pcap/xServiceStats.lua
new file mode 100644
index 0000000..3bc94a4
--- /dev/null
+++ b/src/main/lua/pcap/xServiceStats.lua
@@ -0,0 +1,90 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables, printStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ services = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local portsOfInterest = {
+ [ 80] = true,
+ [8080] = true,
+ [7012] = true,
+ }
+ --if not portsOfInterest[dstPort] and not portsOfInterest[srcPort] then return end
+ local trspPayload = it:trspPayload()
+ local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 =
+ trspPayload:match("^([A-Z/1.0]+) ([^ ]+) [^ \r\n]+\r?\n")
+ if not httpReqLinePart1 then return end
+ if httpReqLinePart1:find("^HTTP/1.%d$") then return end
+ --log:write(string.format("%5d->%5d %s %s %s\n", srcPort, dstPort, httpReqLinePart1, httpReqLinePart2, httpReqLinePart3))
+ xService = trspPayload:match("\n[Xx]%-[Ss][Ee][Rr][Vv][Ii][Cc][Ee]:%s+([^\r\n]+)\r?\n");
+ if not xService then return end
+ --log:write("X-Service is '".. xService .."'\n")
+ local obj = app.services[xService]
+ if not obj then
+ app.services[xService] = {
+ xService = xService,
+ count=0,
+ }
+ else
+ assert(xService == obj.xService)
+ obj.count = obj.count + 1
+ end
+end
+
+
+function printStats( app )
+ local sorted = {}
+ local maxOccurValue = 0
+ local overallCount = 0
+ for _, reqObj in pairs(app.services) do
+ if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end
+ overallCount = overallCount + reqObj.count
+ table.insert(sorted, reqObj)
+ end
+ table.sort(sorted, function(a, b)return a.count > b.count end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject Pressure by Services\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Matching Requests %.1f (HTTP requests per second)\n", overallCount / dumpDurationSec))
+ out:write("\n")
+ out:write(" .-- HTTP Requests per Second\n")
+ out:write(" | .-- Service\n")
+ out:write(".-+---. .-+-----\n")
+ for i, elem in ipairs(sorted) do
+ local xService, count = elem.xService, elem.count
+ local countPerSecond = math.floor((count / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%7.1f %s\n", countPerSecond, xService))
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/wireshark/HttpTime.lua b/src/main/lua/wireshark/HttpTime.lua
index b06c0a7..514c62b 100644
--- a/src/main/lua/wireshark/HttpTime.lua
+++ b/src/main/lua/wireshark/HttpTime.lua
@@ -10,7 +10,7 @@ local mod = {}
function mod.init()
local that = mod.seal{
- proto = Proto("__", "Additional Metadata"),
+ proto = Proto("AdditMeta", "Additional Metadata"),
f_andy_httpTime = ProtoField.float("_.httpTime", "HttpTime"),
f_andy_synSeen = ProtoField.bool("_.synSeen", "SynSeen"),
f_andy_uri = ProtoField.string("_.uri", "Request URI"),
diff --git a/src/main/nodejs/misc/ProduceLotsOfQueues.js b/src/main/nodejs/misc/ProduceLotsOfQueues.js
new file mode 100644
index 0000000..810ac63
--- /dev/null
+++ b/src/main/nodejs/misc/ProduceLotsOfQueues.js
@@ -0,0 +1,119 @@
+;(function(){
+
+ const http = require("http");
+ const log = process.stderr;
+ const out = process.stdout;
+ const NOOP = function(){};
+
+ setTimeout(main); return;
+
+
+ function main(){
+ const app = Object.seal({
+ isHelp: false,
+ host: "localhost",
+ port: 7013,
+ uri: "/houston/tmp/gugus/bar",
+ queueName: "my-gaga-queue",
+ });
+ if( parseArgs(app, process.argv) !== 0 ) process.exit(1);
+ if( app.isHelp ){ printHelp(); return; }
+ run(app);
+ }
+
+
+
+ function printHelp(){
+ out.write("\n"
+ +" Produce a bunch of gateleen queues\n"
+ +" \n"
+ +" Options:\n"
+ +" \n"
+ +" \n")
+ }
+
+
+ function parseArgs( app, argv ){
+ var isYolo = false;
+ for( var iA = 2 ; iA < argv.length ; ++iA ){
+ var arg = argv[iA];
+ if( arg == "--help" ){
+ app.isHelp = true; return 0;
+ }else if( arg == "--yolo" ){
+ isYolo = true;
+ }else{
+ log.write("EINVAL: "+ arg +"\n");
+ return -1;
+ }
+ }
+ if( !isYolo ){ log.write("EINVAL: wanna yolo?\n"); return; }
+ return 0;
+ }
+
+
+ function run( app ){
+ //placeHook(app);
+ putSomeNonsense(app);
+ }
+
+
+ function placeHook( app ){
+ const req = Object.seal({
+ base: null,
+ app: app,
+ });
+ req.base = http.request({
+ host: app.host, port: app.port,
+ method: "PUT", path: app.uri +"/_hooks/listeners/http",
+ //headers: {
+ // "X-Expire-After": "42",
+ //},
+ });
+ req.base.on("response", onResponse.bind(0, req));
+ req.base.end(JSON.stringify({
+ destination: "http://127.0.0.1:7099/guguseli",
+ queueExpireAfter/*seconds*/: 42,
+ }));
+ function onResponse( req, rsp ){
+ var app = req.app;
+ log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n");
+ for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n");
+ }
+ }
+
+
+ function putSomeNonsense( app ){
+ const nonsense = Object.seal({
+ app: app,
+ req: null,
+ i: 0,
+ limit: 42,
+ });
+ putNextRequest(nonsense);
+ function putNextRequest( nonsense ){
+ nonsense.req = http.request({
+ host: app.host, port: app.port,
+ method: "PUT", path: app.uri +"/foo/"+ nonsense.i,
+ headers: {
+ "X-Queue": app.queueName +"-"+ nonsense.i,
+ "X-Queue-Expire-After": 9999999,
+ },
+ });
+ nonsense.req.on("response", onResponse.bind(0, nonsense));
+ nonsense.req.end("{\"guguseli\":\""+ new Date().toISOString() +"\"}\n");
+ }
+ function onResponse( nonsense, rsp ){
+ var app = nonsense.app;
+ if( rsp.statusCode != 200 ){
+ log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n");
+ for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n");
+ }
+ rsp.on("data", NOOP);
+ if( nonsense.i++ < nonsense.limit ){
+ putNextRequest(nonsense);
+ }
+ }
+ }
+
+
+}());
diff --git a/src/main/nodejs/paisa-nonslim/README.txt b/src/main/nodejs/paisa-nonslim/README.txt
new file mode 100644
index 0000000..e3a94f7
--- /dev/null
+++ b/src/main/nodejs/paisa-nonslim/README.txt
@@ -0,0 +1,3 @@
+
+Created 20240419 as it seems we need some automation for those tasks.
+
diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js
new file mode 100644
index 0000000..43cf8aa
--- /dev/null
+++ b/src/main/nodejs/paisa-nonslim/foo.js
@@ -0,0 +1,902 @@
+/*
+
+Related:
+- [Remove Slim Packaging](SDCISA-15648)
+
+*/
+;(function(){ "use-strict";
+
+ const child_process = require("child_process");
+ const fs = require("fs");
+ const promisify = require("util").promisify;
+ const zlib = require("zlib");
+ const noop = function(){};
+ const log = process.stderr;
+ const out = process.stdout;
+ const logAsString = function( buf ){ log.write(buf.toString()); };
+
+ setImmediate(main);
+
+
+ function printHelp( argv, app ){
+ process.stdout.write(" \n"
+ +" Autmoate some steps that are tedious manually.\n"
+ +" \n"
+ +" Options:\n"
+ +" \n"
+ +" --fetch\n"
+ +" Update local repos from remote.\n"
+ +" \n"
+ +" --reset-hard\n"
+ +" Reset worktree to develop.\n"
+ +" \n"
+ +" --patch-platform\n"
+ +" Remove slim packaging from patform and set snapshot version.\n"
+ +" \n"
+ +" --patch-services\n"
+ +" Disable slim packaging in Jenkinsfile and use platform snapshot in\n"
+ +" pom.\n"
+ +" \n"
+ +" --commit\n"
+ +" Create a git commit with our changes.\n"
+ +" \n"
+ +" --push | --push-force\n"
+ +" Create commits for patched services and push them to upstream. If\n"
+ +" not given, the change is only made locally (aka without cluttering\n"
+ +" remote git repo). The force variant will replace existing branches\n"
+ +" on the remnote. If given multiple times, less-invasive wins.\n"
+ +" \n"
+ +" --print-isa-version\n"
+ +" Prints an isaVersion JSON that can be fed to preflux.\n"
+ +" \n"
+ // not impl yet
+ //+" --max-parallel <int>\n"
+ //+" How many tasks to run concurrently. Defaults to 1. Which means to\n"
+ //+" do all the work sequentially (HINT: very handy for debugging).\n"
+ //+" \n"
+ );
+ }
+
+
+ function parseArgs( argv, app ){
+ if( argv.length <= 2 ){
+ log.write("EINVAL: Refuse to produce damage with zero args.\n");
+ return -1;
+ }
+ for( var iA = 2 ; iA < argv.length ; ++iA ){
+ var arg = argv[iA];
+ if( arg == "--help" ){
+ app.isHelp = true; return 0;
+ }else if( arg == "--fetch" ){
+ app.isFetch = true;
+ }else if( arg == "--reset-hard" ){
+ app.isResetHard = true;
+ }else if( arg == "--patch-platform" ){
+ app.isPatchPlatform = true;
+ }else if( arg == "--patch-services" ){
+ app.isPatchServices = true;
+ }else if( arg == "--commit" ){
+ app.isCommit = true;
+ }else if( arg == "--push" ){
+ if( app.isPushForce ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; }
+ app.isPush = true;
+ }else if( arg == "--push-force" ){
+ if( app.isPush ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; }
+ app.isPushForce = true;
+ }else if( arg == "--print-isa-version" ){
+ app.isPrintIsaVersion = true;
+ //}else if( arg == "--max-parallel" ){
+ // arg = argv[++iA];
+ // if( !/^[0-9]+$/.test(arg) ){ log.write("EINVAL: --max-parallel "+ arg +"\n"); return -1; }
+ // app.maxParallel = 0 + arg;
+ }else{
+ log.write("EINVAL: "+ arg +"\n");
+ return -1;
+ }
+ }
+ return 0;
+ }
+
+
+ function isThingyNameValid( app, thingyName ){
+ if( typeof thingyName !== "string" ) return false;
+ if( !/^[a-z-]+$/.test(thingyName) ) return false;
+ return true;
+ }
+
+
+ function workdirOfSync( app, thingyName ){
+ if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName);
+ return app.workdir +"/"+ thingyName;
+ }
+
+
+ function gitUrlOfSync( app, thingyName ){
+ if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName);
+ return "https://gitit.post.ch/scm/isa/"+ thingyName +".git";
+ }
+
+
+ function isCloned( app, thingyName, onDone){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ var child = child_process.spawn(
+ "git", ["status", "--porcelain"],
+ { cwd: workdirOfSync(app, thingyName), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", noop);
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ onDone(null, true);
+ }
+ });
+ }
+
+
+ function isWorktreeClean( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ var isStdoutDirty = false;
+ var child = child_process.spawn(
+ "git", ["status", "--porcelain"],
+ { cwd: workdirOfSync(app, thingyName), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", function(){ isStdoutDirty = true; });
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( signal !== null ){
+ throw Error("code "+ code +", signal "+ signal +"");
+ }else{
+ onDone(null, !isStdoutDirty);
+ }
+ });
+ }
+
+
+ function getDropSlimArtifactsTagInPlatformPatch( app, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ /* patch which empties the <slimArtifacts> tag in
+ * "poms/service/paisa-service-superpom/pom.xml" as described in
+ * SDCISA-15648 */
+ var patch = ""
+ +"tVrdb9s2EH/PX8EZ2OosIe2kadOw7Zaia4sM3Vo0fdhDgYGSKJkOJQok7dgr+r/vqA9/NHZisrIf"
+ +"IpIif3e8O94HlUSkKcI4ExaxQalyMzBcT0XMByUThuGmh82k5BreuzlklksU+cw+EEXCZyhKT9L4"
+ +"ybMnQ0LO2fmz6On56TN0Mhw+PTs7wBj7cXBwdHTkycXlJcIXw5Pjk5PH6Mg1LtDl5QF66PdikCkm"
+ +"zW87zIxVkYpsopkVqthhQbWo1AoYtYIDCbzTErfKSJG/glUpi63PQvdjqmRSClbEnK60X/yE8ecP"
+ +"f3ygqOBTrlEiNI+tnKOJ4QkSBSols6nSOcaeBOMRKZWxRFhS6YgwKRxtWemp0VctPHhovgf0ROVM"
+ +"FPtCBx3G3Jh9wUuVZaLYm2xueRQArXRGmCnBRMa0ed5yBoZT2ZFmhRFWTHlrP9EcmVK7XdSTjbcV"
+ +"bSCo7R6NFtxGyozlGpwHjSUzJmeWb93dSERcFzADT0G8CbNKB9EsFJdCGaK5sZJbumGM8JklTnvQ"
+ +"3srPOBeFmO2Lhc7JLg0VqOWqIM4iad2urBPn3DIQK/PeDyAYHHFWTKyQht4Z2YMIKwKxSnhM13qh"
+ +"QFKCVUNQMXTDWBhoKiSflFKxhN4dCoMUii6bYRCSFRld7QTC1B6TftfvFAyzUnQOSC8uyBCiFTe4"
+ +"UBbzmTDWj0gB/sGkhI9iFo84bZ4BDr4F+i9XuhzR5tn1YXFefeE6ycJ10g3u9EeAx2zKZgQSLQMC"
+ +"5S7zWRIYlwyfkhN/fVYRqXTiJSNrS1BnqQpeWEOrrhTQ7hQxLEhvxctF7ptPrAZFEGp8Y1pX3XQw"
+ +"K8BwWZhjegg7IAF6GNTFlAhKlC6AHZadl+vgbgCPjX58MtyeG414IlV8E3R+MpecpMKMCKRytDZ1"
+ +"Lv1w2lWL5f7HoV651D/9fiDIYxIzKQhUOWLagt4wbdnKUAiXTmlO6QA3i8L5WocJM8+7OCIvPbXn"
+ +"rGAcKWNIG1uqXhtZ9pq0OtpGpmdjOo4lVpCF46q7v9S82m0Vbmj98LQAlTBswffRRSsoMo7dAZcC"
+ +"TlzT2EcunimVSU5c9khSsJBokhnqnMnwiR+YUCQTdjSJqliIQenAIyAZz6R0Rd0Tia3arOxOdNxE"
+ +"rsaI3ROoVn9D69XtgEHBvy2QVtpVbQbKV7Gw830kSxtobiWzoXALoKmAltpjwdfcTEDy25R74Qn2"
+ +"PaCuXgbX1A2mmUPkz7GBuDYx3fIJZa4WcUeYoCSNNc8g6dWhIXgT7pSPRCzBsIobXKrb7uQ6Ypon"
+ +"4JymIuuS3xVAOJldIrtrRSgo3AUqVpEzskD0pV+t/gYCVFdsqWY5v1X6hrZXbqp86FYOMsd8GocF"
+ +"/y1E63u+zvDcfc32u5rlNrrcQ6xAubOAIu5+PPdxolS6S9yQcLgFjM9KCBbGO7O+B9Jy32uMe8BA"
+ +"x11igdmHVBBNwuxKkmXzh+4Q2qyBPpg+8DziSQKOcpzPrJsQmELXi0kLR+/gBu+ludpaXHGVSsnT"
+ +"Pe7HpeROL7Ge5JEhbTlNF3V1re+OQcMu1aKJlBVuDnpO5nTZDDzHY62geKT1w7/EsCM1yUbWHQtD"
+ +"ZpApcJbT5unxSXLw3TfJo51Xri+8A7Qjee8PqS+MmuiYe1gaFObgtSsHQAooqNyFOfn1AO+O8JHZ"
+ +"EXL7ey+iqv2yGjIk47YPOxhDyAQDETIhdb2s9Bwdod4AystB7/D57pRWiBCr3gKn/UOS3wCq6Tuc"
+ +"3ZESnqI1lQDTS2GTtVfgZ6Ww/d6XL+bIi113PK0s+r13Wk1KCoC8pmUViji6fn/1Vw4eCvVAGmsU"
+ +"D31o1PJlC245OCv0FbEU4V8y68FuK5cbPgdpsJRkju2rxOmKOh5hqCVz5Xm1WIki7feaPf+Ofsbn"
+ +"Q9M7dsQ8dut+IkX9df24DARSVtN3YIfoqx/egj2nKYQroaE5N0CFW2RiMAqnsEcg6akAh/4IsSIB"
+ +"J1PO0Z+vPrlXlmkw9cqcD3ueu3E/kGtN5iXqtVR6/jDV2XN8vdUqrzVYHWar3Ju+j+XegfysAHD1"
+ +"+EE2peSU91siBbj7EAqDQSv7lnPg99q6wNY/dJbXasQZYM3JyoQAYTuv4UymnC8oHjfAx+jagm6Z"
+ +"Tl5D/0Ppsh/y6c3H969ev/n3zT9X15+v/n7nu8lviEvDOzHKQvla17fdp+84FSJZE2F2ioVtPIKd"
+ +"APuqBCs6O32+BNmBns9/IL0Y8BmPJw/MXJkFcfV/3mNHXg=="
+ ;
+ patch = Buffer.from(patch, 'base64');
+ patch = zlib.inflateRaw(patch, function( ex, patch ){
+ if( ex ){ throw ex; }
+ setImmediate(onDone, null, patch);
+ });
+ }
+
+
+ function getJettyServiceNamesAsArray( app, onDone ){
+ setImmediate(onDone, null, [ /*TODO get via args/file */
+ TODO_GX0CAJ9hAgCNRAIA9hgCAP5jAgDGCgIA
+ ]);
+ }
+
+
+ function getVersionByServiceName(app, svcName, onDone){
+ /* if we did patch services, we already know the version without
+ * lookup. This is a performance optimization, because maven performs
+ * absolutely terrible. Performance DOES matter! */
+ //if( app.isPatchServices ){
+ setImmediate(onDone, null, app.jenkinsSnapVersion);
+ //}else{
+ // wasteOurTimeBecausePerformanceDoesNotMatter();
+ //}
+ //function wasteOurTimeBecausePerformanceDoesNotMatter( ex ){
+ // if( ex ) throw ex;
+ // var stdoutBufs = [];
+ // /* SHOULD start maven with low prio to not kill windoof. But I
+ // * guess spawning a process with other prio is YAGNI, and so we're
+ // * now fucked. Therefore I wish you happy time-wasting, as the only
+ // * option left is to NOT start too many maven childs
+ // * simultaneously. */
+ // var child = child_process.spawn(
+ // "mvn", ["help:evaluate", "-o", "-q", "-DforceStdout", "-Dexpression=project.version"],
+ // { cwd:workdirOfSync(app, svcName) }
+ // );
+ // child.on("error", console.error.bind(console));
+ // child.stderr.on("data", logAsString);
+ // child.stdout.on("data", stdoutBufs.push.bind(stdoutBufs));
+ // child.on("close", function( code, signal ){
+ // if( code !== 0 || signal !== null ){
+ // endFn(Error("code="+ code +", signal="+ signal +""));
+ // return;
+ // }
+ // if( stdoutBufs.length <= 0 ) throw Error("maven has failed");
+ // var version = stdoutBufs.join().trim();
+ // onDone(null, version);
+ // });
+ //}
+ }
+
+
+ function printIsaVersion( app, onDone ){
+ var iSvcQuery = 0, iSvcPrinted = 0;
+ printIntro();
+ function printIntro( ex ){
+ if( ex ) throw ex;
+ var epochMs = Date.now();
+ out.write('{\n');
+ out.write(' "timestamp": "'+ new Date().toISOString() +'",\n');
+ out.write(' "isaVersionId": "SDCISA-15648-'+ epochMs +'",\n');
+ out.write(' "isaVersionName": "SDCISA-15648-'+ epochMs +'",\n');
+ out.write(' "trial": true,\n');
+ out.write(' "services": [\n');
+ out.write(' { "name": "eagle", "version": "02.23.01.00" },\n');
+ out.write(' { "name": "storage", "version": "00.25.00.02" },\n');
+ out.write(' { "name": "platform", "version": "'+ app.platformJenkinsVersion +'" }');
+ /* maven performance is an absolute terrible monster.
+ * Problem 1: Doing this sequentially takes forever.
+ * Problem 2: Doing this parallel for all makes windoof freeze.
+ * Workaround: Do at most a few of them in parallel. */
+ for( var i = 3 ; i ; --i ) nextService();
+ }
+ function nextService( ex ){
+ if( ex ) throw ex;
+ if( iSvcQuery >= app.services.length ){ /*printTail();*/ return; }
+ var svcName = app.services[iSvcQuery++];
+ getVersionByServiceName(app, svcName, function(e,r){ printService(e,r,svcName); });
+ }
+ function printService( ex, svcVersion, svcName ){
+ if( ex ) throw ex;
+ if( typeof svcVersion != "string") throw Error(svcVersion);
+ iSvcPrinted += 1;
+ out.write(",\n ");
+ out.write('{ "name": "'+ svcName +'", "version": "'+ svcVersion +'" }');
+ if( iSvcPrinted >= app.services.length ){ printTail(); }else{ nextService(); }
+ }
+ function printTail( ex ){
+ if( ex ) throw ex;
+ out.write('\n');
+ out.write(' ],\n');
+ out.write(' "featureSwitches": [],\n');
+ out.write(' "mergedBundles": []\n');
+ out.write('}\n');
+ onDone(/*ex*/null, /*ret*/null);
+ }
+ }
+
+
+ function pushService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ){ throw TypeError("onDone"); }
+ var iRemoteNameToTry = 0;
+ push();
+ function push( ex, isClean ){
+ if( ex ) throw ex;
+ var remoteName = app.remoteNamesToTry[iRemoteNameToTry++];
+ if( remoteName === undefined ){ endFn(Error("No more remote names. s="+ thingyName +"")); return; }
+ log.write("[DEBUG] "+ thingyName +" - git push "+ remoteName +" "
+ + app.branchName +(app.isPushForce?" --force":"")+"\n");
+ argv = ["push", remoteName, "refs/heads/"+app.branchName +":refs/heads/"+ app.branchName];
+ if( app.isPushForce ) argv.push("--force");
+ var child = child_process.spawn(
+ "git", argv,
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code === 128 ){ /* retry with next upstream name */
+ push(); return;
+ }else if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ endFn();
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function commitService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ){ throw Error("onDone"); }
+ incrNumTasks(app);
+ isWorktreeClean(app, thingyName, gitAdd);
+ function gitAdd( ex, isClean ){
+ if( ex ) throw ex;
+ if( isClean ){
+ log.write("[INFO ] Nothing to commit in \""+ thingyName +"\"\n");
+ endFn(null, null); return;
+ }
+ log.write("[DEBUG] "+ thingyName +"$ git add Jenkinsfile\n");
+ var child = child_process.spawn(
+ "git", ["add", "--", "."],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ gitCommit();
+ });
+ }
+ function gitCommit( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git commit -m \""+ app.commitMsg +"\"\n");
+ var child = child_process.spawn(
+ "git", ["commit", "-m", app.commitMsg],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ var stdoutBufs = [];
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.stdout.on("data", function( buf ){ stdoutBufs.push(buf); });
+ child.on("exit", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ var stdoutStr = "";
+ for( var buf in stdoutBufs ){ log.write(buf.toString()); }
+ endFn(Error("code="+ code +", signal="+ signal));
+ return;
+ }
+ createBranch(); return;
+ });
+ }
+ function createBranch( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git branch "+ app.branchName +"\n");
+ var child = child_process.spawn(
+ "git", ["branch", "-f", app.branchName],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("exit", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ endFn(); return;
+ });
+ }
+ function endFn( ex, ret ){
+ decrNumTasks(app);
+ onDone(ex, ret);
+ }
+ }
+
+
+ function commitAllServices( app, onDone ){
+ var iSvc = 0;
+ var services;
+ incrNumTasks(app);
+ getJettyServiceNamesAsArray(app, onGetJettyServiceNamesAsArrayDone);
+ function onGetJettyServiceNamesAsArrayDone( ex, ret ){
+ if( ex ) throw ex;
+ services = ret;
+ nextService(null);
+ }
+ function nextService( ex ){
+ if( ex ) throw ex;
+ if( iSvc >= services.length ){ endFn(null); return; }
+ var thingyName = services[iSvc++];
+ if( !thingyName ) throw Error("assert(thingyName != NULL)");
+ commitService(app, thingyName, nextService);
+ }
+ function endFn( ex ){
+ decrNumTasks(app);
+ if( ex ) throw ex;
+ log.write("[DEBUG] No more services to commit\n");
+ onDone(null, null);
+ }
+ }
+
+
+ function giveServiceOurSpecialVersion( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ doit();
+ function doit( ex ){
+ if( ex ) throw ex;
+ var child = child_process.spawn(
+ "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DallowSnapshots=true",
+ "-DnewVersion="+ app.serviceSnapVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ onDone();
+ });
+ }
+ }
+
+
+ function setPlatformVersionInService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ updateParent();
+ function updateParent(){
+ log.write("[DEBUG] "+ thingyName +" - Set platform version "+ app.parentVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:update-parent", "-DgenerateBackupPoms=false", "-DallowDowngrade=true",
+ "-DallowSnapshots=true", "-DforceUpdate=true", "-DskipResolution=true",
+ "-DparentVersion="+app.parentVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ updateProperty();
+ });
+ }
+ function updateProperty( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +" - Set parent.version "+ app.parentVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:set-property", "-DgenerateBackupPoms=false", "-DallowSnapshots=true",
+ "-Dproperty=platform.version", "-DnewVersion="+ app.parentVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ onDone();
+ });
+ }
+ }
+
+
+ function dropSlimFromAllJenkinsfiles( app, onDone ){
+ var iSvc = -1;
+ var jettyServices;
+ var jettyService;
+ incrNumTasks(app);
+ getJettyServiceNamesAsArray(app, function( ex, jettyServices_ ){
+ if( ex ) throw ex;
+ jettyServices = jettyServices_;
+ nextJettyService();
+ });
+ function nextJettyService( ex ){
+ decrNumTasks(app);
+ if( ex ) throw ex;
+ if( ++iSvc >= jettyServices.length ){ onNoMoreJettyServices(); return; }
+ incrNumTasks(app);
+ jettyService = jettyServices[iSvc];
+ isWorktreeClean(app, jettyService, onIsWorktreeCleanRsp);
+ }
+ function onIsWorktreeCleanRsp( ex, isClean ){
+ if( ex ) throw ex;
+ if( !isClean ){
+ log.write("[WARN ] Wont patch: Worktree not clean: "+ jettyService +"\n");
+ nextJettyService();
+ return;
+ }
+ log.write("[DEBUG] Patching \""+ jettyService +"/Jenkinsfile\"\n");
+ var child = child_process.spawn(
+ "sed", [ "-i", "-E", "s_^(.*?buildMaven.*?),? *slim: *true,? *(.*?)$_\\1\\2_", "Jenkinsfile" ],
+ { cwd: workdirOfSync(app, jettyService) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", removeEmptyArray);
+ }
+ /* Pipeline is too dump for an empty array */
+ function removeEmptyArray( ex ){
+ if( ex ) throw ex;
+ var child = child_process.spawn(
+ "sed", [ "-i", "-E", "s_^(.*?).buildMaven\\(\\[\\]\\))(.*?)$_\\1\\2_", "Jenkinsfile" ],
+ { cwd: workdirOfSync(app, jettyService) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", nextJettyService);
+ }
+ function onNoMoreJettyServices(){
+ onDone(null, null);
+ }
+ }
+
+
+ function checkoutUpstreamDevelop( app, thingyName, onDone){
+ var iRemoteName = 0;
+ checkout();
+ function checkout(){
+ var remoteName = app.remoteNamesToTry[iRemoteName];
+ if( remoteName === undefined ){ onDone(Error("No more remote names for "+ thingyName)); return; }
+ log.write("[DEBUG] git checkout "+ thingyName +" "+ remoteName +"/develop\n");
+ var child = child_process.spawn(
+ "git", ["checkout", remoteName+"/develop"],
+ { cwd: workdirOfSync(app, thingyName), });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( !"TODO_GlACAIQoAgDMTwIAIh8CAOJvAgALLgIA" ){
+ checkout(); /* try next remote name */
+ }else if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ onDone(null, null);
+ }
+ });
+ }
+ }
+
+
+ function fetchChangesFromGitit( app, thingyName, onDone ){
+ var child;
+ var iRemoteName = 0;
+ mkAppWorkdir();
+ function mkAppWorkdir( ex ){
+ if( ex ) throw ex;
+ fs.mkdir(app.workdir, {recursive:true}, checkRepoExists);
+ }
+ function checkRepoExists( ex ){
+ if( ex ) throw ex;
+ fs.exists(workdirOfSync(app, thingyName) +"/.git", function( isLocalCloneExists ){
+ isLocalCloneExists ? fetch() : clone();
+ });
+ }
+ function clone( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] git clone "+ thingyName +"\n");
+ var child = child_process.spawn(
+ "git", ["clone", "--no-single-branch", "--depth", "4", gitUrlOfSync(app, thingyName)],
+ { cwd: app.workdir });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal)); return;
+ }
+ onDone(null, null);
+ });
+ }
+ function fetch( ex ){
+ if( ex ) throw ex;
+ var remoteName = app.remoteNamesToTry[iRemoteName++];
+ if( remoteName === undefined ){
+ onDone(Error("No more remotes to try for "+ thingyName)); return; }
+ log.write("[DEBUG] "+ thingyName +" - git fetch "+ remoteName +"\n");
+ var child = child_process.spawn(
+ "git", ["fetch", remoteName],
+ { cwd: workdirOfSync(app, thingyName), });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal)); return;
+ }
+ onDone(null, null);
+ });
+ }
+ }
+
+
+ function setVersionInPlatform( app, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ setVersion();
+ function setVersion(){
+ log.write("[DEBUG] platform - mvn versions:set "+ app.platformSnapVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DnewVersion="+app.platformSnapVersion],
+ { cwd: workdirOfSync(app, "platform"), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", noop);
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ endFn();
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function patchAwaySlimPackagingInPlatform( app, onDone ){
+ var onDoneCalledNTimes = 0;
+ incrNumTasks(app);
+ isWorktreeClean(app, "platform", function( ex, isClean ){
+ if( ex ) throw ex;
+ if( !isClean ){ log.write("[WARN ] Skip platform patch: Worktree not clean\n");
+ endFn(); return; }
+ getDropSlimArtifactsTagInPlatformPatch(app, onPatchBufReady);
+ });
+ function onPatchBufReady( ex, patch ){
+ if( ex ) throw ex;
+ var stdoutBufs = [];
+ var gitApply = child_process.spawn(
+ "git", ["apply"],
+ { cwd: workdirOfSync(app, "platform"), });
+ gitApply.on("error", console.error.bind(console));
+ gitApply.stderr.on("data", logAsString);
+ gitApply.stdout.on("data", stdoutBufs.push.bind(stdoutBufs));
+ gitApply.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ for( var buf in stdoutBufs ){ log.write(buf.toString()); }
+ throw Error(""+ code +", "+ signal +"");
+ }
+ endFn(null, null);
+ });
+ setTimeout/*TODO why?*/(function(){
+ gitApply.stdin.write(patch);
+ gitApply.stdin.end();
+ }, 42);
+ }
+ function endFn( ex, ret ){
+ if( onDoneCalledNTimes !== 0 ){ throw Error("assert(onDoneCalledNTimes == 0)"); }
+ onDoneCalledNTimes += 1;
+ decrNumTasks(app);
+ onDone(ex, ret);
+ }
+ }
+
+
+ function incrNumTasks( app ){
+ //if( app.numRunningTasks >= app.maxParallel ){
+ // throw Error("assert(app.numRunningTasks < app.maxParallel)");
+ //}
+ app.numRunningTasks += 1;
+ }
+
+
+ function decrNumTasks( app ){
+ if( app.numRunningTasks <= 0 ) throw Error("assert(app.numRunningTasks > 0)");
+ app.numRunningTasks -= 1;
+ }
+
+
+ function forEachInArrayDo( app, array, onService, onDone ){
+ var iE = 0;
+ var isOnDoneCalled = false;
+ nextElem();
+ function nextElem( ex ){
+ if( ex ){ endFn(ex); return; }
+ if( iE >= array.length ){ endFn(); return; }
+ onService(app, array[iE++], nextElem);
+ }
+ function endFn( ex ){
+ if( isOnDoneCalled ){
+ throw (ex) ? ex : Error("onDone MUST be called ONCE only");
+ }else{
+ isOnDoneCalled = true;
+ onDone(ex);
+ }
+ }
+ }
+
+
+ function forEachJettyService( app, onService, onDone ){
+ getJettyServiceNamesAsArray(app, onServicesArrived);
+ function onServicesArrived( ex, services ){
+ if( ex ) throw ex;
+ forEachInArrayDo(app, services, onService, onDone);
+ }
+ }
+
+
+ function resetHardToDevelop( app, thingyName, onDone ){
+ var iRemoteName = 0;
+ if( typeof onDone !== "function" ) throw Error("onDone");
+ detach();
+ function detach(){
+ log.write("[DEBUG] "+ thingyName +"$ git checkout --detach\n");
+ var child = child_process.spawn(
+ "git", ["checkout", "--detach"],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ tryResetHard();
+ }
+ });
+ }
+ function tryResetHard(){
+ var remoteName = app.remoteNamesToTry[iRemoteName++];
+ if( remoteName === undefined ){ onDone(Error("no usable remote found")); return; }
+ log.write("[DEBUG] "+ thingyName +"$ git reset --hard "+ remoteName +"/develop\n");
+ var child = child_process.spawn(
+ "git", ["reset", "--hard", remoteName +"/develop"],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else if( code !== 0 ){
+ tryResetHard(); /*try next remoteName*/
+ }else{
+ wipeWorktree();
+ }
+ });
+ }
+ function wipeWorktree(){
+ log.write("[DEBUG] "+ thingyName +"$ git rimraf\n");
+ var child = child_process.spawn(
+ "git", ["rimraf"/*TODO make portable*/],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ deleteBranch();
+ }
+ });
+ }
+ function deleteBranch( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git branch --delete --force "+ app.branchName +"\n");
+ var child = child_process.spawn(
+ "git", ["branch", "--delete", "--force", app.branchName],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code == 1 ){ /* assume branch doesnt exist*/
+ log.write("[INFO ] Ignore: Failed to delete branch '"+ app.branchName +"' in '"
+ + thingyName +"'.\n");
+ endFn(null, null);
+ }else if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ endFn(null, null);
+ }
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function setPlatformVersionInAllServices( app, onDone ){
+ forEachJettyService(app, setPlatformVersionInService, onDone);
+ }
+
+
+ function fetchRemoteChanges( app, onDone ){
+ var platformAndServices = app.services.slice(0);
+ platformAndServices.unshift("platform");
+ forEachInArrayDo(app, platformAndServices, fetchChangesFromGitit, onDone);
+ }
+
+
+ function fetchListOfServices( app, onDone ){
+ getJettyServiceNamesAsArray(app, function( ex, ret ){
+ if( ex ) throw ex;
+ app.services = ret;
+ onDone();
+ });
+ }
+
+
+ function run( app ){
+ var actions = [ fetchListOfServices ];
+ if( app.isFetch ){ actions.push(fetchRemoteChanges); }
+ if( app.isResetHard ){
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, checkoutUpstreamDevelop, onDone);
+ });
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, resetHardToDevelop, onDone);
+ });
+ }
+ if( app.isPatchPlatform ){
+ actions.push(patchAwaySlimPackagingInPlatform);
+ actions.push(setVersionInPlatform);
+ }
+ if( app.isPatchServices ){
+ actions.push(dropSlimFromAllJenkinsfiles);
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, giveServiceOurSpecialVersion, onDone);
+ });
+ }
+ if( app.isCommit ) actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, commitService, onDone);
+ });
+ if( app.isPush || app.isPushForce ){
+ actions.push(function( app, onDone ){
+ forEachJettyService(app, pushService, onDone);
+ });
+ }
+ if( app.isPrintIsaVersion ){ actions.push(printIsaVersion); }
+ actions.push(function( app, onDone ){
+ log.write("[INFO ] App done\n");
+ });
+ triggerNextAction();
+ function triggerNextAction( ex ){
+ if( ex ) throw ex;
+ var action = actions.shift();
+ if( action === undefined ){ endFn(); return; }
+ action(app, triggerNextAction);
+ }
+ }
+
+
+ function main(){
+ const app = {
+ isHelp: false,
+ isFetch: false,
+ isResetHard: false,
+ isPatchPlatform: false,
+ isPatchServices: false,
+ iscommit: false,
+ isPush: false,
+ isPushForce: false,
+ isPrintIsaVersion: false,
+ remoteNamesToTry: ["origin"],
+ workdir: "C:/work/tmp/git-scripted",
+ maxParallel: 1,
+ numRunningTasks: 0,
+ services: null,
+ branchName: "SDCISA-15648-RemoveSlimPackaging-n1",
+ commitMsg: "[SDCISA-15648] Remove slim packaging",
+ platformSnapVersion: "0.0.0-SNAPSHOT",
+ serviceSnapVersion: "0.0.0-SNAPSHOT",
+ platformJenkinsVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT",
+ jenkinsSnapVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT",
+ parentVersion: null,
+ };
+ app.parentVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT";
+ if( parseArgs(process.argv, app) !== 0 ){ process.exit(1); }
+ if( app.isHelp ){ printHelp(); return; }
+ run(app);
+ }
+
+
+}());
diff --git a/src/main/patch/eagle/default-bak20211124-080400.patch b/src/main/patch/eagle/default-bak20211124-080400.patch
new file mode 100644
index 0000000..c7d3a8c
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20211124-080400.patch
@@ -0,0 +1,103 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only.
+ - Disable NSync. To suppress that useless noise.
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,7 +21,27 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
+- <!-- project -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+ <artifactId>eagle-domain</artifactId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -377,9 +377,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (classpathResourceHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+
+ // Attention to the order of handlers - handlers from here can fire selfrequests
+ if ((env.equals("dev") || env.equals("test")) && validationHandler.isToValidate(request)) {
+@@ -440,7 +440,7 @@ public class MainVerticle extends AbstractVerticle {
+ vertxRouter.route().handler(routingContextHandler);
+
+ mainServer.requestHandler(vertxRouter::accept);
+- mainServer.listen(mainPort, x -> {
++ mainServer.listen(mainPort, "127.0.0.1", x -> {
+ propertyHandler.addRefreshable(schedulerResourceManager);
+ log.info("<init> I am ready");
+ String bootLogFilename = props.getOrDefault("service.boot.log", "/data/init/boot.log").toString();
+@@ -530,7 +530,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Future<String> storageDataFuture = newLoggingFuture.apply("storage-data" );
+ final Future<String> mirrorModFuture = newLoggingFuture.apply("MirrorMod" );
+ final Future<String> metricsModuleFuture = newLoggingFuture.apply("MetricsModule" );
+- final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
++ //final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
+ CompositeFuture.all(futuresToWaitFor).setHandler(handler);
+
+
+@@ -669,11 +669,11 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync /////////////////////////////////////////////////////////////////////////
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
++// NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++// .withBasePath(EAGLE_NSYNC_PATH)
++// .withMainPort(mainPort);
++// DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++// vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default-bak20230220-121000.patch b/src/main/patch/eagle/default-bak20230220-121000.patch
new file mode 100644
index 0000000..76cedd0
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20230220-121000.patch
@@ -0,0 +1,102 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,6 +21,28 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -325,7 +325,7 @@ public class MainVerticle extends AbstractVerticle {
+ .withLoggingResourceManager(loggingResourceManager)
+ .withMonitoringHandler(monitoringHandler)
+ .withHttpClientFactory(this::createHttpClientForRouter)
+- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler))
++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null))
+ .build();
+ });
+ });
+@@ -412,9 +412,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -559,7 +559,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Future<String> storageDataFuture = newLoggingFuture.apply("storage-data" );
+ final Future<String> mirrorModFuture = newLoggingFuture.apply("MirrorMod" );
+ final Future<String> metricsModuleFuture = newLoggingFuture.apply("MetricsModule" );
+- final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
++ //final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
+ CompositeFuture.all(futuresToWaitFor).setHandler(handler);
+
+
+@@ -721,10 +721,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
++ //NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++ // .withBasePath(EAGLE_NSYNC_PATH)
++ // .withMainPort(mainPort);
++ //DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default-bak20231024-082300.patch b/src/main/patch/eagle/default-bak20231024-082300.patch
new file mode 100644
index 0000000..5578433
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20231024-082300.patch
@@ -0,0 +1,101 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+ Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near
+ "eagle-02.01.10.00".
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -23,6 +23,23 @@
+ </properties>
+
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -323,7 +323,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); /*TODO revert*/
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -387,7 +387,7 @@ public class MainVerticle extends AbstractVerticle {
+ .withLoggingResourceManager(loggingResourceManager)
+ .withMonitoringHandler(monitoringHandler)
+ .withHttpClientFactory(this::createHttpClientForRouter)
+- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler, authorizer))
++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null, authorizer))
+ .build();
+ });
+ });
+@@ -476,9 +476,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -624,7 +624,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Promise<String> storageLogPromise = newLoggingFuture.apply("storage-log" );
+ final Promise<String> storageDataPromise = newLoggingFuture.apply("storage-data" );
+ final Promise<String> metricsModulePromise = newLoggingFuture.apply("MetricsModule" );
+- final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync" );
++ //final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync" ); /*TODO revert*/
+ CompositeFuture.all(futuresToWaitFor).onComplete(handler);
+
+
+@@ -776,10 +776,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync /////////////////////////////////////////////////////////////////////////
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
++ // TODO NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++ // TODO .withBasePath(EAGLE_NSYNC_PATH)
++ // TODO .withMainPort(mainPort);
++ // TODO DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++ // TODO vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default.patch b/src/main/patch/eagle/default.patch
new file mode 100644
index 0000000..c0b6785
--- /dev/null
+++ b/src/main/patch/eagle/default.patch
@@ -0,0 +1,101 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+ Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near
+ "eagle-02.01.10.00".
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -23,6 +23,23 @@
+ </properties>
+
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -367,7 +367,7 @@ public class MainVerticle extends AbstractVerticle {
+ ClasspathResourceHandler classpathResourceHandler =
+ new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler =
+ new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+@@ -470,7 +470,7 @@ public class MainVerticle extends AbstractVerticle {
+ selfClient,
+ classpathResourceHandler,
+ returnHttpErrorHandler,
+- nSyncHandler,
++ null,
+ authorizer))
+ .build();
+ });
+@@ -607,9 +607,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -777,7 +777,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Promise<String> storageLogPromise = newLoggingFuture.apply("storage-log");
+ final Promise<String> storageDataPromise = newLoggingFuture.apply("storage-data");
+ final Promise<String> metricsModulePromise = newLoggingFuture.apply("MetricsModule");
+- final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync");
++ //final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync"); /*TODO revert*/
+ CompositeFuture.all(futuresToWaitFor).onComplete(handler);
+
+ String redisHost = (String) props.get("redis.host");
+@@ -979,10 +979,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig =
+- new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort);
+- DeploymentOptions deplOpt =
+- new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
++ //NSyncVerticleConfig nSyncVerticleConfig = /*TODO revert*/
++ // new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort); /*TODO revert*/
++ //DeploymentOptions deplOpt = /*TODO revert*/
++ // new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); /*TODO revert*/
++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); /*TODO revert*/
+ }
+ }
+
diff --git a/src/main/patch/eagle/simplelogger.patch b/src/main/patch/eagle/simplelogger.patch
new file mode 100644
index 0000000..97cd8de
--- /dev/null
+++ b/src/main/patch/eagle/simplelogger.patch
@@ -0,0 +1,33 @@
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,7 +21,27 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
+- <!-- project -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+ <artifactId>eagle-domain</artifactId>
diff --git a/src/main/patch/houston/default-20230203.patch b/src/main/patch/houston/default-20230203.patch
deleted file mode 100644
index c1deeca..0000000
--- a/src/main/patch/houston/default-20230203.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- <skip.node.install>true</skip.node.install>
- </properties>
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -256,7 +256,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("brox-from-vehicles-.*").withRetryIntervals(10, 20, 30, 60, 120)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(10_000));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(10, 20, 30, 60, 120));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5));/*TODO revert*/
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
diff --git a/src/main/patch/houston/default-20230214.patch b/src/main/patch/houston/default-20230214.patch
deleted file mode 100644
index 3f8fa16..0000000
--- a/src/main/patch/houston/default-20230214.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
- Patch based on "houston-02.01.12.00" aka
- "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- </properties>
-
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -309,7 +309,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
-
diff --git a/src/main/patch/houston/default-20230331.patch b/src/main/patch/houston/default-20230331.patch
deleted file mode 100644
index 64d3628..0000000
--- a/src/main/patch/houston/default-20230331.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
- Patch based on "houston-02.01.12.00" aka
- "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- </properties>
-
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -309,7 +309,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
-
diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch
index d70b12b..b5b7639 100644
--- a/src/main/patch/houston/default.patch
+++ b/src/main/patch/houston/default.patch
@@ -2,29 +2,55 @@
Custom houston patch to have a "usable" service at all.
Patch based on "develop" aka
- "3b1275e123c2b7aa2ffaa34270a5e1a373a65993" from "2023-04-27".
+ "497a9477c9e2100130f9a29ec130c1131220c935" from "2024-04-22".
-diff --git a/pom.xml b/pom.xml
-index 0ed4f7f3..b44c5693 100644
--- a/pom.xml
+++ b/pom.xml
-@@ -72,8 +72,6 @@
- <skip.wagon>false</skip.wagon>
+@@ -73,7 +73,7 @@
<skip.copy-dependencies>false</skip.copy-dependencies>
-- <!-- spotless -->
+ <!-- spotless -->
- <source.format.apply.phase>compile</source.format.apply.phase>
- </properties>
++ <source.format.apply.phase>none</source.format.apply.phase>
- <scm>
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index 374dcb97..3c24937c 100644
+ <!-- JavaMelody -->
+ <jetty.version>9.4.43.v20210629</jetty.version>
+
+
+--- a/pom.xml
++++ b/pom.xml
+@@ -301,4 +301,25 @@
+ </properties>
+ </profile>
+ </profiles>
++ <build>
++ <plugins>
++ <plugin>
++ <groupId>com.diffplug.spotless</groupId>
++ <artifactId>spotless-maven-plugin</artifactId>
++ <executions>
++ <execution>
++ <id>spotless-apply</id>
++ <phase>none</phase>
++ </execution>
++ <execution>
++ <id>spotless-check</id>
++ <phase>none</phase>
++ </execution>
++ </executions>
++ <configuration>
++ <skip>true</skip>
++ </configuration>
++ </plugin>
++ </plugins>
++ </build>
+ </project>
+
+
--- a/houston-process/pom.xml
+++ b/houston-process/pom.xml
-@@ -25,6 +25,26 @@
- </properties>
-
+@@ -27,3 +27,23 @@
<dependencies>
+ <dependency> <!-- TODO: Remove -->
+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
@@ -48,27 +74,43 @@ index 374dcb97..3c24937c 100644
+ </dependency> <!-- TODO: Remove -->
<!-- project -->
<dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index 432efb01..d1729fe9 100644
+
+
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -212,6 +232,2 @@
+ </dependency>
+- <dependency>
+- <groupId>org.apache.logging.log4j</groupId>
+- <artifactId>log4j-slf4j2-impl</artifactId>
+- </dependency>
+ <dependency>
+
+
--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -68,6 +68,9 @@ public class Deployer {
+@@ -98,6 +98,9 @@ public class Deployer {
private static final Logger LOGGER = LoggerFactory.getLogger(Deployer.class);
public static void main(String[] args) throws Exception {
+ boolean isAssertIsEnabled = false;
+ assert isAssertIsEnabled = true;
+ if (!isAssertIsEnabled) throw new UnsupportedOperationException("Enable assertions to fix this problem -> https://stackoverflow.com/a/68893479/4415884");
- setStartupProperties();
- Props.prepare();
+ throwIfLoggerAmbiguous();
-@@ -378,7 +378,7 @@ public class Deployer {
+ configureObjectMapper();
+
+
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -471,7 +477,7 @@ public class Deployer {
// All other queues (typically to backend services) with a slow-down pattern after
// failed delivery
qc.add(
- new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
+ new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig =
- RedisquesConfiguration.with()
+ LOGGER.info(
+ "Redisques redis-client will created with MaxPoolSize: {}, MaxPoolWaiting: {}, MaxWaitingHandlers: {}",
+ Props.getMaxRedisConnectionPoolSize4RedisQues(),
+
+
diff --git a/src/main/patch/houston/fixidiots.patch b/src/main/patch/houston/fixidiots.patch
new file mode 100644
index 0000000..7af8f9d
--- /dev/null
+++ b/src/main/patch/houston/fixidiots.patch
@@ -0,0 +1,365 @@
+
+ Why is it so fucking hard to just keep out all those random annoying logger
+ implementations?!?
+
+ Who the heck wants to configure all of them, and if ONE is missed just have
+ all important error reports concealed to the nirvana? Who the fuck wants such
+ shit?
+
+ Please: STOP THIS SHIT! Libraries solely have to depend on slf4j. As its name
+ already says, it is a FACADE! NOT AN IMPLEMENTATION! STOP MESSING THIS UP ALL
+ THE TIME WITH YET ANOTHER NEW SHITTY NERDY LOGGER IMPL!
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
+@@ -0,0 +1,109 @@
++package org.apache.logging.slf4j;
++
++import org.apache.logging.log4j.spi.ExtendedLogger;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.event.Level;
++import org.slf4j.spi.LocationAwareLogger;
++import org.slf4j.spi.LoggingEventBuilder;
++
++import java.io.Serializable;
++import java.lang.reflect.Constructor;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
++ * how strong we tell it NOT to use it!</p>
++ * <p>This class only exists to prevent services from starting if IDEA still did miss
++ * the dependency changes in pom and still tries to use the wrong logger impl.</p> */
++public class Log4jLogger implements LocationAwareLogger, Serializable {
++
++ private final org.slf4j.Logger log;
++
++ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ Constructor<?> ctor = logrFactClz.getConstructor();
++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ this.log = (Logger) getLoggerFn.invoke(ctor.newInstance(), name);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
++ }
++
++ @Override public String getName() { return log.getName(); }
++ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); }
++ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); }
++ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); }
++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
++ @Override public void trace(String s) { log.trace(s); }
++ @Override public void trace(String s, Object o) { log.trace(s, o); }
++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
++ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); }
++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Object o) { log.debug(s, o); }
++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
++ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); }
++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Object o) { log.info(s, o); }
++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
++ @Override public void info(String s, Object... objects) { log.info(s, objects); }
++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
++ @Override public void info(Marker marker, String s) { log.info(marker, s); }
++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
++ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); }
++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
++ @Override public void warn(String s) { log.warn(s); }
++ @Override public void warn(String s, Object o) { log.warn(s, o); }
++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
++ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); }
++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
++ @Override public void error(String s) { log.error(s); }
++ @Override public void error(String s, Object o) { log.error(s, o); }
++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
++ @Override public void error(String s, Object... objects) { log.error(s, objects); }
++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
++ @Override public void error(Marker marker, String s) { log.error(marker, s); }
++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
++ @Override public LoggingEventBuilder atError() { return log.atError(); }
++
++}
+
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jLoggerAdapter.java
+@@ -0,0 +1,16 @@
++package org.slf4j.reload4j;
++
++/** <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
++ * how strong we tell it NOT to use it!</p>
++ * <p>This class only exists to prevent services from starting if IDEA still did miss
++ * the dependency changes in pom and still tries to use the wrong logger impl.</p> */
++public class Reload4jLoggerAdapter {
++
++ public Reload4jLoggerAdapter(org.apache.log4j.Logger l) {
++ throw new UnsupportedOperationException("Fuck those fucking script-kiddies!"
++ + " How fucking hard can it be to just properly setup logging?!?"
++ + " Please !STOP! intermixing interfaces with implementations all the time!"
++ + " This fucking shit just conceals erros all the time! STOP IT!");
++ }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jServiceProvider.java
+@@ -0,0 +1,64 @@
++package org.slf4j.reload4j;
++
++import org.slf4j.ILoggerFactory;
++import org.slf4j.IMarkerFactory;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.MDCAdapter;
++import org.slf4j.spi.SLF4JServiceProvider;
++
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++import java.util.Deque;
++import java.util.Map;
++
++
++/** <p>How many of those fu**ing damn stupid idiots are still out there
++ * continuing to stubbornly include those stupid logger impls with their
++ * libraries?!?</p> */
++public class Reload4jServiceProvider implements SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter {
++
++ private final Object slf4jSimpleLoggerFactory;
++ private final Method getLoggerFn;
++
++ public Reload4jServiceProvider() {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public ILoggerFactory getLoggerFactory() { return this; }
++ @Override public IMarkerFactory getMarkerFactory() { return this; }
++ @Override public MDCAdapter getMDCAdapter() { return this; }
++ @Override public String getRequestedApiVersion() { return "2.0"; }
++ @Override public void initialize() {}
++
++ @Override
++ public Logger getLogger(String name) {
++ try {
++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name);
++ } catch (IllegalAccessException | InvocationTargetException ex) {
++ throw new RuntimeException(ex);
++ }
++ }
++
++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void remove(String s) { assert false : "TODO not impl yet"; }
++ @Override public void clear() { assert false : "TODO not impl yet"; }
++ @Override public Map<String, String> getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; }
++ @Override public void setContextMap(Map<String, String> map) { assert false : "TODO not impl yet"; }
++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public Deque<String> getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/apache/logging/slf4j/SLF4JServiceProvider.java
+@@ -0,0 +1,62 @@
++package org.apache.logging.slf4j;
++
++import org.slf4j.ILoggerFactory;
++import org.slf4j.IMarkerFactory;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.MDCAdapter;
++
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++import java.util.Deque;
++import java.util.Map;
++
++
++/** <p>How many of those fu**ing damn stupid idiotic libs are still out there
++ * continuing to stubbornly include those stupid logger impls?!?</p> */
++public class SLF4JServiceProvider implements org.slf4j.spi.SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter {
++
++ private final Object slf4jSimpleLoggerFactory;
++ private final Method getLoggerFn;
++
++ public SLF4JServiceProvider() {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public ILoggerFactory getLoggerFactory() { return this; }
++ @Override public IMarkerFactory getMarkerFactory() { return this; }
++ @Override public MDCAdapter getMDCAdapter() { return this; }
++ @Override public String getRequestedApiVersion() { return "2.0"; }
++ @Override public void initialize() {}
++
++ @Override
++ public Logger getLogger(String name) {
++ try {
++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name);
++ } catch (IllegalAccessException | InvocationTargetException ex) {
++ throw new RuntimeException(ex);
++ }
++ }
++
++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void remove(String s) { assert false : "TODO not impl yet"; }
++ @Override public void clear() { assert false : "TODO not impl yet"; }
++ @Override public Map<String, String> getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; }
++ @Override public void setContextMap(Map<String, String> map) { assert false : "TODO not impl yet"; }
++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public Deque<String> getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java
+@@ -0,0 +1,38 @@
++package net.bull.javamelody.internal.common;
++
++import org.slf4j.Logger;
++
++import javax.servlet.http.HttpServletRequest;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>How many of those fu**ing damn stupid idiot libs are still out there
++ * continuing to stubbornly include those stupid logger impls?!?</p> */
++public class Log4J2Logger implements net.bull.javamelody.JavaMelodyLogger {
++
++ private final org.slf4j.Logger log;
++
++ public Log4J2Logger(){
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ Object slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ this.log = (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, "net.bull.javamelody");
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Throwable ex) { log.debug(s, ex); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Throwable ex) { log.info(s, ex);}
++ @Override public void warn(String s, Throwable ex) { log.warn(s, ex);}
++ @Override public void logHttpRequest(
++ HttpServletRequest httpRequest, String requestName, long duration, boolean systemError, int responseStatus, long responseSize, String loggerName
++ ){
++ if (log.isInfoEnabled()) log.info("{}", LOG.buildLogMessage(httpRequest, duration, systemError, responseStatus, responseSize));
++ }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/eclipse/jetty/util/log/Slf4jLog.java
+@@ -0,0 +1,32 @@
++package org.eclipse.jetty.util.log;
++
++import org.slf4j.LoggerFactory;
++
++
++/** Yet another fu**ing bastard lib having its own shiny stupid loggers. */
++public class Slf4jLog {
++
++ private final org.slf4j.Logger log;
++
++ public Slf4jLog() {
++ this.log = LoggerFactory.getLogger("org.eclipse.jetty.util.log");
++ }
++
++ public Slf4jLog(String name) {
++ this.log = LoggerFactory.getLogger(name);
++ }
++
++ public String getName() { return log.getName(); }
++ public void warn(String msg, Object... args) { log.warn(msg, args); }
++ public void warn(Throwable thrown) { log.warn("", thrown); }
++ public void warn(String msg, Throwable thrown) { log.warn(msg, thrown); }
++ public void info(String msg, Object... args) { log.info(msg, args); }
++ public void info(Throwable thrown) { log.info("", thrown); }
++ public void info(String msg, Throwable thrown) { log.info(msg, thrown); }
++ public void debug(String msg, Object... args) { log.debug(msg, args); }
++ public void debug(String msg, long arg) { if (log.isDebugEnabled()) log.debug(msg, arg); }
++ public void debug(Throwable thrown) { this.debug("", thrown); }
++ public void debug(String msg, Throwable thrown) { log.debug(msg, thrown); }
++ public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ public void setDebugEnabled(boolean enabled) { log.warn("setDebugEnabled not implemented"); }
++
++}
+
diff --git a/src/main/patch/houston/future.patch b/src/main/patch/houston/future.patch
new file mode 100644
index 0000000..2ac5922
--- /dev/null
+++ b/src/main/patch/houston/future.patch
@@ -0,0 +1,47 @@
+
+ Some patches that maybe will become relevant in future. For example bcause a
+ PR is no yet merged or similar.
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java
+@@ -22,1 +22,1 @@
+ if (!iLoaders.hasNext()) throw new IllegalStateException("Too few logger impls");
+ SLF4JServiceProvider loggerProvider = iLoaders.next();
+- if (iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls");
++ if (!(loggerProvider instanceof org.slf4j.simple.SimpleServiceProvider) && iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls");
+ loggerProvider.initialize();
+ ILoggerFactory loggerFactory = loggerProvider.getLoggerFactory();
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/BadLoggerImplKiller.java
+@@ -0,0 +1,26 @@
++package ch.post.it.paisa.houston.process.main;
++
++import org.slf4j.spi.SLF4JServiceProvider;
++
++import java.util.ServiceLoader;
++
++
++public class BadLoggerImplKiller {
++
++ public static void assertExactlyOneLoggerImplPresent(){
++ Class<?> log4jProviderClz, simpleProviderClz;
++ try {
++ log4jProviderClz = Class.forName("org.apache.logging.slf4j.SLF4JServiceProvider");
++ simpleProviderClz = Class.forName("org.slf4j.simple.SimpleServiceProvider");
++ }catch( ClassNotFoundException ex ){
++ throw new RuntimeException(ex);
++ }
++ for( SLF4JServiceProvider provider : ServiceLoader.load(SLF4JServiceProvider.class) ){
++ Class<?> providerClass = provider.getClass();
++ if( log4jProviderClz.isAssignableFrom(providerClass) ) continue;
++ if( simpleProviderClz.isAssignableFrom(providerClass) ) continue;
++ throw new IllegalStateException("Go away with this ugly logger: "+ providerClass.getName());
++ }
++ }
++
++}
+
+
diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch
new file mode 100644
index 0000000..897d731
--- /dev/null
+++ b/src/main/patch/preflux/default.patch
@@ -0,0 +1,236 @@
+
+ TODO describe (like in houston)
+
+
+--- a/pom.xml
++++ b/pom.xml
+@@ -57,6 +57,24 @@
+
+ <!-- atlas -->
+ <atlas.version>00.01.00.00</atlas.version>
++ <skip.angular.build>true</skip.angular.build>
++ <skip.frontend.sourceformat.check>true</skip.frontend.sourceformat.check>
++ <skip.grunt.build>true</skip.grunt.build>
++ <skip.install.nodeAndNpm>false</skip.install.nodeAndNpm>
++ <skip.integration.tests>true</skip.integration.tests>
++ <skip.jacoco>true</skip.jacoco>
++ <skip.jacoco.regularbuild>true</skip.jacoco.regularbuild>
++ <skip.junit.tests>false</skip.junit.tests>
++ <skip.karma.tests>true</skip.karma.tests>
++ <skip.npm.install>true</skip.npm.install>
++ <skip.remote.tests>true</skip.remote.tests>
++ <skip.selenium.tests>true</skip.selenium.tests>
++ <skip.spotless>true</skip.spotless>
++ <skip.test.install.nodeAndNpm>true</skip.test.install.nodeAndNpm>
++ <skip.test.npm.install>true</skip.test.npm.install>
++ <skip.unpack.dependencies>true</skip.unpack.dependencies>
++ <skip.web.install.nodeAndNpm>true</skip.web.install.nodeAndNpm>
++ <skip.web.npm.install>true</skip.web.npm.install>
+ </properties>
+
+ <modules>
+
+
+--- a/preflux-web/pom.xml
++++ b/preflux-web/pom.xml
+@@ -14,6 +14,26 @@
+ <packaging>war</packaging>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- PA-ISA Platform -->
+ <dependency>
+ <groupId>ch.post.it.paisa.alice</groupId>
+
+
+--- a/preflux-test/pom.xml
++++ b/preflux-test/pom.xml
+@@ -16,6 +16,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!--Alice Test Framework-->
+ <dependency>
+ <groupId>ch.post.it.paisa.alice</groupId>
+
+
+--- a/preflux-web/package.json
++++ b/preflux-web/package.json
+@@ -10,10 +10,10 @@
+ "check": "npm run format:check && npm run lint && npm run test",
+ "check:ci": "npm run format:check && npm run lint",
+ "check:fix": "npm run format:fix && npm run lint:fix && npm run test",
+- "format:check": "prettier --check \"src/main/angular/**/*.{ts,html,css,json}\"",
+- "format:fix": "prettier --write \"src/main/angular/**/*.{ts,html,css,json}\"",
+- "lint": "ng lint",
+- "lint:fix": "ng lint --fix",
++ "format:check": "true",
++ "format:fix": "true",
++ "lint": "true",
++ "lint:fix": "true",
+ "test": "ng test --no-watch --browsers=ChromeHeadlessNoSandbox",
+ "test:ci": "npm run test",
+ "test:watch": "ng test --watch --browsers=ChromeHeadlessNoSandbox"
+
+
+--- /dev/null
++++ b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
+@@ -0,0 +1,115 @@
++package org.apache.logging.slf4j;
++
++import org.apache.logging.log4j.spi.ExtendedLogger;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.LocationAwareLogger;
++
++import java.io.Serializable;
++import java.lang.reflect.Constructor;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>This class only exists to really, really, really, really use the wanted
++ * logger impl. And only the one choosen logger impl and no other log impl. In
++ * fact there should be no reason for this class to exist. But it seems as some
++ * code still manages to stubbornly use some unwanted logger impls occasionally,
++ * for whatever reason. As it seems impossible to configure this properly, this
++ * class here at least make it fail-fast, before make devs wasting time searching
++ * expected logs which magically never appear.</p>
++ */
++public class Log4jLogger implements LocationAwareLogger, Serializable {
++
++ private static final Method getLoggerFn;
++ private static final Object loggerFactory;
++ private final Logger log;
++
++ static {
++ try {
++ Class<?> slfClass = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ getLoggerFn = slfClass.getDeclaredMethod("getLogger", String.class);
++ Constructor<?> ctor = slfClass.getConstructor();
++ ctor.setAccessible(true);
++ loggerFactory = ctor.newInstance();
++ } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | InstantiationException |
++ IllegalAccessException ex) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
++ }
++ }
++
++ Log4jLogger(Object markerFactory, ExtendedLogger logger, final String name) {
++ try {
++ this.log = (Logger) getLoggerFn.invoke(loggerFactory, name);
++ } catch (InvocationTargetException | IllegalAccessException ex) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
++ }
++ }
++
++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
++ }
++
++ @Override public String getName() { return log.getName(); }
++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
++ @Override public void trace(String s) { log.trace(s); }
++ @Override public void trace(String s, Object o) { log.trace(s, o); }
++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Object o) { log.debug(s, o); }
++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Object o) { log.info(s, o); }
++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
++ @Override public void info(String s, Object... objects) { log.info(s, objects); }
++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
++ @Override public void info(Marker marker, String s) { log.info(marker, s); }
++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
++ @Override public void warn(String s) { log.warn(s); }
++ @Override public void warn(String s, Object o) { log.warn(s, o); }
++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
++ @Override public void error(String s) { log.error(s); }
++ @Override public void error(String s, Object o) { log.error(s, o); }
++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
++ @Override public void error(String s, Object... objects) { log.error(s, objects); }
++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
++ @Override public void error(Marker marker, String s) { log.error(marker, s); }
++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
++
++}
+
+
+
+
diff --git a/src/main/patch/slarti/default.patch b/src/main/patch/slarti/default.patch
new file mode 100644
index 0000000..2d910d7
--- /dev/null
+++ b/src/main/patch/slarti/default.patch
@@ -0,0 +1,31 @@
+diff --git a/slarti-web/pom.xml b/slarti-web/pom.xml
+index 7933bdf86..3a1730377 100644
+--- a/slarti-web/pom.xml
++++ b/slarti-web/pom.xml
+@@ -17,6 +17,26 @@
+ <packaging>war</packaging>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <dependency>
+ <groupId>ch.post.it.webjars</groupId>
+ <artifactId>linti</artifactId>
diff --git a/src/main/php/sqlite-exec.php b/src/main/php/sqlite-exec.php
new file mode 100644
index 0000000..8df2fe0
--- /dev/null
+++ b/src/main/php/sqlite-exec.php
@@ -0,0 +1,30 @@
+<?php
+
+throw new Exception("Sorry, cannot just execute from file :(");
+
+
+function run( $app ){
+ $lotsOfSql = file_get_contents($app->srcPath);
+ if( !$lotsOfSql ) throw new Exception("fopen(\"{$app->srcPath}\")");
+ $app->db = new SQLite3($app->dstPath);
+ if( !$app->db ) throw new Exception("SQLite3(\"{$app->dstPath}\")");
+ $db = $app->db;
+ $db->enableExceptions(true);
+ $st = $db->prepare($lotsOfSql);
+ $st->execute();
+ $st->close();
+}
+
+
+function main(){
+ $app = (object)array(
+ "srcPath" => NULL/*TODO set me*/,
+ "dstPath" => NULL/*TODO set me*/,
+ "srcFile" => NULL,
+ "db" => NULL,
+ );
+ run($app);
+}
+
+
+main();
diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh
index 40189c2..16c1aa2 100755
--- a/src/main/shell/BackupByRsync/backup.sh
+++ b/src/main/shell/BackupByRsync/backup.sh
@@ -53,57 +53,68 @@ run () {
rsync --archive --verbose \
--link-dest "${DIR_TO}/latest/${DST_PREFIX:?}" \
--filter=':- .gitignore' \
+ --exclude=".git/branches" \
--exclude=".git/COMMIT_EDITMSG" \
--exclude=".git/FETCH_HEAD" \
- --exclude=".git/ORIG_HEAD" \
- --exclude=".git/branches" \
--exclude=".git/hooks/*.sample" \
--exclude=".git/index" \
--exclude=".git/info" \
--exclude=".git/logs" \
--exclude=".git/objects" \
+ --exclude=".git/ORIG_HEAD" \
--exclude=".git/packed-refs" \
--exclude=".git/refs/remotes" \
--exclude=".git/refs/tags" \
--exclude=".idea" \
- --exclude="/.git-credentials" \
- --exclude="/.NERDTreeBookmarks" \
- --exclude="/.Xauthority" \
+ --exclude="/.android" \
--exclude="/.bash_history" \
+ --exclude="/.cache" \
+ --exclude="/.config/chromium" \
+ --exclude="/.config/GIMP" \
+ --exclude="/.config/inkscape" \
+ --exclude="/.config/JetBrains" \
+ --exclude="/.config/libreoffice" \
+ --exclude="/.config/VirtualBox/compreg.dat" \
--exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.leases*" \
--exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.log*" \
- --exclude="/.config/VirtualBox/VBoxSVC.log*" \
- --exclude="/.config/VirtualBox/compreg.dat" \
--exclude="/.config/VirtualBox/selectorwindow.log*" \
--exclude="/.config/VirtualBox/vbox-ssl-cacertificate.crt" \
+ --exclude="/.config/VirtualBox/VBoxSVC.log*" \
--exclude="/.config/VirtualBox/xpti.dat" \
- --exclude="/.config/libreoffice" \
- --exclude="/.config/GIMP" \
- --exclude="/.config/JetBrains" \
+ --exclude="/.eclipse" \
--exclude="/.gdb_history" \
+ --exclude="/.git-credentials" \
+ --exclude="/.gmrun_history" \
--exclude="/.lesshst" \
- --exclude="/.xsession-errors" \
- --exclude="/.xsession-errors.old" \
- --exclude="/mnt" \
- --exclude="/.android" \
- --exclude="/.cache" \
- --exclude="/.config/chromium" \
- --exclude="/.config/inkscape" \
--exclude="/.local/share" \
--exclude="/.m2/repository" \
+ --exclude="/mnt" \
--exclude="/.mozilla/firefox" \
+ --exclude="/.NERDTreeBookmarks" \
+ --exclude="/.recently-used" \
+ --exclude="/.recoll" \
+ --exclude="/.sh_history" \
+ --exclude="/.sqlite_history" \
--exclude="/.squirrel-sql" \
--exclude="/.viking-maps" \
- --exclude="/Downloads" \
+ --exclude="/.viminfo" \
+ --exclude="/.viminfo.tmp" \
+ --exclude="/.Xauthority" \
+ --exclude="/.xsession-errors" \
+ --exclude="/.xsession-errors.old" \
--exclude="/crashdumps" \
+ --exclude="/Downloads" \
--exclude="/images" \
+ --exclude="/mnt" \
--exclude="/projects/forks" \
- --exclude="cee-misc-lib/external" \
- --exclude="cee-misc-lib/tmp" \
--exclude="/tmp" \
--exclude="/virtualbox-*" \
- --exclude="/vmshare" \
+ --exclude="/VirtualBox VMs" \
--exclude="/vm-qemu" \
+ --exclude="/vm-share" \
+ --exclude="/vmshare" \
+ --exclude="cee-misc-lib/external" \
+ --exclude="cee-misc-lib/tmp" \
"${DIR_FROM:?}" \
"${BACKUP_PATH:?}/${DST_PREFIX}" \
;