summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.txt20
-rw-r--r--doc/note/bash/bash-on-windoof.txt6
-rw-r--r--doc/note/bash/bashrc53
-rw-r--r--doc/note/bash/inputrc4
-rw-r--r--doc/note/binutils/dumpbin.txt20
-rw-r--r--doc/note/burncdrom/burncdrom.txt38
-rw-r--r--doc/note/compress/xz.txt5
-rw-r--r--doc/note/curl/curl.txt11
-rw-r--r--doc/note/docker/Docker-Daemon-Install.txt8
-rw-r--r--doc/note/ffmpeg/ffmpeg.txt19
-rw-r--r--doc/note/firefox/firefox.txt9
-rw-r--r--doc/note/gdb/gdb.txt10
-rw-r--r--doc/note/gpg/gpg.txt33
-rw-r--r--doc/note/java/java.txt7
-rw-r--r--doc/note/links/links.txt90
-rw-r--r--doc/note/maven-pom/howto-ban-unwanted-sh__.txt41
-rw-r--r--doc/note/maven/maven.txt24
-rw-r--r--doc/note/mount/fstab.txt13
-rw-r--r--doc/note/mount/mount.txt8
-rw-r--r--doc/note/nginx/nginx-wdoof.txt1
-rw-r--r--doc/note/nginx/nginx.txt47
-rw-r--r--doc/note/openshift/dbg-mem-issues.txt170
-rw-r--r--doc/note/openshift/openshift.txt14
-rw-r--r--doc/note/pdf/pdfToPng.txt3
-rw-r--r--doc/note/qemu/build-cJSON.txt76
-rw-r--r--doc/note/qemu/build-gateleen.txt79
-rw-r--r--doc/note/qemu/build-jssc.txt41
-rw-r--r--doc/note/qemu/build-libarchive.txt72
-rw-r--r--doc/note/qemu/build-libcurl.txt99
-rw-r--r--doc/note/qemu/build-libpcap.txt64
-rw-r--r--doc/note/qemu/build-libpcre1.txt70
-rw-r--r--doc/note/qemu/build-lua.txt86
-rw-r--r--doc/note/qemu/build-sqlite.txt77
-rw-r--r--doc/note/qemu/build-zlib.txt73
-rw-r--r--doc/note/qemu/php-dev-server.txt6
-rw-r--r--doc/note/qemu/qemu-compile-itself.txt27
-rw-r--r--doc/note/qemu/qemu.txt164
-rw-r--r--doc/note/qemu/setup-android-env.txt85
-rw-r--r--doc/note/qemu/setup-dockerVM.txt86
-rw-r--r--doc/note/qemu/setup-jni-env.txt22
-rw-r--r--doc/note/qemu/setup-jre8-env.txt18
-rw-r--r--doc/note/qemu/setup-maven-env.txt16
-rw-r--r--doc/note/qemu/setup-nginx-env.txt48
-rw-r--r--doc/note/qemu/setup-windoof.txt40
-rw-r--r--doc/note/qemu/setup-zwp-env.txt24
-rw-r--r--doc/note/redis/redis.txt32
-rw-r--r--doc/note/setup-debian/etc-environment18
-rw-r--r--doc/note/setup-debian/setup-debian.txt130
-rw-r--r--doc/note/ssh/ssh-setup.txt12
-rw-r--r--doc/note/ssh/ssh-usage.txt5
-rw-r--r--doc/note/tcpdump/tcpdump.txt26
-rw-r--r--doc/note/windoof/kill-auto-update.txt18
-rw-r--r--doc/note/windoof/msteams.txt (renamed from doc/note/msteams/msteams.txt)0
-rw-r--r--doc/note/windoof/msys-path-issue.txt3
-rw-r--r--doc/note/windoof/outlook-expor-eml.txt14
-rw-r--r--doc/note/windoof/proto-handler.txt6
-rw-r--r--src/main/c/PcapOne/PcapOne.c311
-rw-r--r--src/main/c/common/assert_is.h39
-rw-r--r--src/main/c/common/commonbase.h (renamed from src/main/c/common/commonKludge.h)0
-rw-r--r--src/main/c/common/offset_of.h9
-rw-r--r--src/main/c/common/windoof.h59
-rw-r--r--src/main/c/paisa-fleet/FindFullDisks.c383
-rw-r--r--src/main/c/postshit/launch/mvn/mvn-launch.c214
-rw-r--r--src/main/c/postshit/launch/mvn/mvn-versions-set.c133
-rw-r--r--src/main/c/postshit/launch/openshift/ocexec.c152
-rw-r--r--src/main/docker/android-dev.Dockerfile44
-rw-r--r--src/main/docker/gateleen.Dockerfile65
-rw-r--r--src/main/docker/gcc-windoof.Dockerfile233
-rw-r--r--src/main/docker/gcc.Dockerfile220
-rw-r--r--src/main/docker/gxx.Dockerfile17
-rw-r--r--src/main/docker/jni.Dockerfile20
-rw-r--r--src/main/docker/jre8.Dockerfile27
-rw-r--r--src/main/docker/maven.Dockerfile35
-rw-r--r--src/main/docker/nginx.Dockerfile50
-rw-r--r--src/main/docker/zlib-deb.Dockerfile49
-rw-r--r--src/main/docker/zlib-mingw.Dockerfile51
l---------src/main/eagle1
-rw-r--r--src/main/firefox/gaga-plugin/main.js149
-rw-r--r--src/main/gimp/nek2023-scan2/arrange-pdf32
-rw-r--r--src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh24
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java35
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java27
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java394
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java475
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java111
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java131
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java265
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java15
-rw-r--r--src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java6
-rw-r--r--src/main/java/org/apache/logging/slf4j/Log4jLogger.java104
-rw-r--r--src/main/lua/brgmt-logs/DigBrgmtLogs.lua5
-rw-r--r--src/main/lua/git/GitflowChangelogGen.lua195
-rw-r--r--src/main/lua/maven/MvnCentralDepScan.lua30
-rw-r--r--src/main/lua/misc/JavaCallgraph.lua159
-rw-r--r--src/main/lua/mshitteams/ListEmlInbox.lua322
-rw-r--r--src/main/lua/mshitteams/SendRawMsEmail.lua60
-rw-r--r--src/main/lua/paisa-fleet/FindFullDisks.lua322
-rw-r--r--src/main/lua/paisa-fleet/RmArtifactBaseDir.lua381
-rw-r--r--src/main/lua/paisa-jvm-memLeak/LogStatistics.lua112
-rw-r--r--src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua235
-rw-r--r--src/main/lua/paisa-logs/DigHoustonLogs.lua252
-rw-r--r--src/main/lua/paisa-logs/PaisaLogParser.lua435
-rw-r--r--src/main/lua/pcap/KubeProbeFilter.lua93
-rw-r--r--src/main/lua/pcap/extractDnsHosts.lua147
-rw-r--r--src/main/lua/pcap/httpStats.lua117
-rw-r--r--src/main/lua/pcap/tcpDataAmountStats.lua97
-rw-r--r--src/main/lua/pcap/tcpPortStats.lua82
-rw-r--r--src/main/lua/pcap/xServiceStats.lua90
-rw-r--r--src/main/lua/wireshark/HttpTime.lua2
-rw-r--r--src/main/nodejs/misc/ProduceLotsOfQueues.js119
-rw-r--r--src/main/nodejs/paisa-nonslim/README.txt3
-rw-r--r--src/main/nodejs/paisa-nonslim/foo.js902
-rw-r--r--src/main/patch/eagle/default-bak20211124-080400.patch103
-rw-r--r--src/main/patch/eagle/default-bak20230220-121000.patch102
-rw-r--r--src/main/patch/eagle/default-bak20231024-082300.patch101
-rw-r--r--src/main/patch/eagle/default.patch101
-rw-r--r--src/main/patch/eagle/simplelogger.patch33
-rw-r--r--src/main/patch/houston/default-20230203.patch52
-rw-r--r--src/main/patch/houston/default-20230214.patch56
-rw-r--r--src/main/patch/houston/default-20230331.patch56
-rw-r--r--src/main/patch/houston/default.patch88
-rw-r--r--src/main/patch/houston/fixidiots.patch365
-rw-r--r--src/main/patch/houston/future.patch47
-rw-r--r--src/main/patch/preflux/default.patch236
-rw-r--r--src/main/patch/slarti/default.patch31
-rw-r--r--src/main/php/sqlite-exec.php30
-rwxr-xr-xsrc/main/shell/BackupByRsync/backup.sh53
127 files changed, 10194 insertions, 1491 deletions
diff --git a/README.txt b/README.txt
index 8064101..ced31c3 100644
--- a/README.txt
+++ b/README.txt
@@ -7,3 +7,23 @@ Just some random garbage which was handy in some way somewhen.
Not yet migrated scripts see "C:/Users/fankhauseand/OneDrive - POSTCHAG/doc"
+
+## Stats For Nerds
+
+github.com/AlDanial/cloc v 1.81 T=0.53 s (84.3 files/s, 11729.9 lines/s)
+-------------------------------------------------------------------------------
+Language files blank comment code
+-------------------------------------------------------------------------------
+Lua 11 238 286 2259
+JavaScript 6 165 50 1069
+C 3 146 40 759
+Java 19 158 242 570
+Bourne Shell 1 13 7 104
+XML 2 9 22 41
+Markdown 1 18 0 35
+JSON 1 0 0 18
+C/C++ Header 1 4 0 12
+-------------------------------------------------------------------------------
+SUM: 45 751 647 4867
+-------------------------------------------------------------------------------
+
diff --git a/doc/note/bash/bash-on-windoof.txt b/doc/note/bash/bash-on-windoof.txt
new file mode 100644
index 0000000..32c0ee3
--- /dev/null
+++ b/doc/note/bash/bash-on-windoof.txt
@@ -0,0 +1,6 @@
+
+
+## Stop silly path replacements
+
+ MSYS_NO_PATHCONV=1 ssh foo -- ls /var/lib
+
diff --git a/doc/note/bash/bashrc b/doc/note/bash/bashrc
new file mode 100644
index 0000000..ca3aaa2
--- /dev/null
+++ b/doc/note/bash/bashrc
@@ -0,0 +1,53 @@
+
+WINDOOF=$(if [ -d /c/Windows ]; then echo true; else echo false; fi)
+
+# Disable annoying "features", so that exclamation marks become usable again.
+set +o histexpand
+
+# Do NOT store duplicates in history. Do NOT store in history if
+# starts-with-space.
+HISTCONTROL=ignoreboth
+
+if [ $SHLVL -eq 1 ]; then
+ set -o ignoreeof # Require explicit 'exit' cmd to exit shell.
+else
+ set +o ignoreeof
+fi
+
+export PS1='\033[1;32m[\033[0m$? \033[1;30m\u\033[0m\033[1;32m@\033[1;30m\h \033[1;34m\w\033[1;32m]\033[0m\n\$ '
+
+# Add global node modules to path
+#PATH=/opt/node-6.10.1/lib/node_modules/.bin:$PATH
+# bash completion for npm
+#source /opt/node-6.10.1/etc/npm-completion.sh
+
+if test -d ~/.local/bin; then export PATH=~/.local/bin:$PATH; fi
+
+###############################################################################
+#
+# Auto-launching ssh-agent on Git for Windoofs
+# (See: https://docs.github.com/en/github/authenticating-to-github/working-with-ssh-key-passphrases#auto-launching-ssh-agent-on-git-for-windows)
+#
+if $WINDOOF; then
+ env=~/.ssh/agent.env
+
+ agent_load_env () { test -f "$env" && . "$env" >| /dev/null ; }
+ agent_start () { (umask 077; ssh-agent >| "$env"); . "$env" >| /dev/null ; }
+
+ agent_load_env
+
+ # agent_run_state: 0=agent running w/ key; 1=agent w/o key; 2= agent not running
+ agent_run_state=$(ssh-add -l >| /dev/null 2>&1; echo $?)
+
+ if [ ! "$SSH_AUTH_SOCK" ] || [ $agent_run_state = 2 ]; then
+ agent_start
+ #ssh-add
+ #elif [ "$SSH_AUTH_SOCK" ] && [ $agent_run_state = 1 ]; then
+ # ssh-add
+ fi
+
+ unset env
+fi
+#
+###############################################################################
+
diff --git a/doc/note/bash/inputrc b/doc/note/bash/inputrc
new file mode 100644
index 0000000..df82709
--- /dev/null
+++ b/doc/note/bash/inputrc
@@ -0,0 +1,4 @@
+
+set colored-completion-prefix on
+set colored-stats off
+
diff --git a/doc/note/binutils/dumpbin.txt b/doc/note/binutils/dumpbin.txt
index e71be0e..d71878b 100644
--- a/doc/note/binutils/dumpbin.txt
+++ b/doc/note/binutils/dumpbin.txt
@@ -2,15 +2,23 @@
DumpBin For Windoof
===================
-Analyze PE32 / PE32+ files.
+Scan for unwanted dependencies
+------------------------------
-Location: "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\dumpbin.exe"
+Linux:
-Help
+ find build/bin -type f -print0|xargs -0I? sh -c 'echo ?;readelf -d ?|egrep NEEDED|egrep -v "\\[(libc\\.|libz\\.|libm\\.|libdl\.|libpthread\.)"'
- dumpbin /?
-List needed DLLs.
+Windoof:
- dumpbin /DEPENDENTS foo.exe
+ find build/bin/*.exe -print0|xargs -0I? sh -c 'echo ?;objdump -p ?|egrep -i DLL\ Name:|egrep -iv "(KERNEL32.dll|msvcrt.dll|USER32.dll|WS2_32.dll|ADVAPI32.dll|GDI32.dll|IMM32.dll|ole32.dll|OLEAUT32.dll|SETUPAPI.dll|SHELL32.dll|VERSION.dll|WINMM.dll)"'
+
+
+
+
+List Dll Dependencies Of A PE32 Executable
+------------------------------------------
+
+ x86_64-w64-mingw32-objdump -p out.exe | grep 'DLL Name:'
diff --git a/doc/note/burncdrom/burncdrom.txt b/doc/note/burncdrom/burncdrom.txt
new file mode 100644
index 0000000..4c6f163
--- /dev/null
+++ b/doc/note/burncdrom/burncdrom.txt
@@ -0,0 +1,38 @@
+
+How to handle CD/DVD burning
+============================
+
+## Install growisofs
+
+ apt install genisoimage
+
+
+## Burn to optical media from ISO
+
+growisofs -dvd-compat -Z /dev/srX=path/to/my.iso
+
+
+## "Burn" to USB from ISO
+
+ dd bs=4M if=path/to/my.iso of=/dev/sdX status=progress oflag=sync
+
+
+## Get checksum of disc
+
+- Use isoinfo (from genisoimage pkg) to get size params.
+- Use dd parameterized by previous output.
+
+ isoinfo -d dev=/dev/srX
+ dd bs=${Logical block size} count=${Volume size} if=/dev/srX | md5sum -b
+
+
+## Get checksum of usb drive
+
+- Use stat to get block/count.
+- Divide num by drive block size (likely 4096).
+- Use dd parameterized by previous output.
+
+ stat -c '%s' my.iso
+ dd bs=${Logical block size} count=${Volume size} if=/dev/sdx | md5sum -b
+
+
diff --git a/doc/note/compress/xz.txt b/doc/note/compress/xz.txt
new file mode 100644
index 0000000..b7dff5f
--- /dev/null
+++ b/doc/note/compress/xz.txt
@@ -0,0 +1,5 @@
+
+
+ xz --keep --lzma2=preset=9,nice=273,dict=1G -vv
+
+
diff --git a/doc/note/curl/curl.txt b/doc/note/curl/curl.txt
new file mode 100644
index 0000000..fe0302b
--- /dev/null
+++ b/doc/note/curl/curl.txt
@@ -0,0 +1,11 @@
+
+
+## Timing
+
+ curl example.com -w "\n\nconnect=%{time_connect}s, trsf=%{time_starttransfer}s, totl=%{time_total}s\n"
+
+
+## Sources
+
+- [time trace](https://stackoverflow.com/a/18215566/4415884)
+
diff --git a/doc/note/docker/Docker-Daemon-Install.txt b/doc/note/docker/Docker-Daemon-Install.txt
index c6a120a..1bfa6bb 100644
--- a/doc/note/docker/Docker-Daemon-Install.txt
+++ b/doc/note/docker/Docker-Daemon-Install.txt
@@ -27,10 +27,10 @@ section (HINT: "/etc/environment" does not work)
[service]
...
- Environment="HTTP_PROXY=http://10.0.2.2:31280"
- Environment="http_proxy=http://10.0.2.2:31280"
- Environment="HTTPS_PROXY=http://10.0.2.2:31280"
- Environment="https_proxy=http://10.0.2.2:31280"
+ Environment="HTTP_PROXY=http://10.0.2.2:3128"
+ Environment="http_proxy=http://10.0.2.2:3128"
+ Environment="HTTPS_PROXY=http://10.0.2.2:3128"
+ Environment="https_proxy=http://10.0.2.2:3128"
Environment="NO_PROXY=127.0.0.1,10.0.2.2,*.post.ch"
Environment="no_proxy=127.0.0.1,10.0.2.2,*.post.ch"
diff --git a/doc/note/ffmpeg/ffmpeg.txt b/doc/note/ffmpeg/ffmpeg.txt
index 072faa7..c992537 100644
--- a/doc/note/ffmpeg/ffmpeg.txt
+++ b/doc/note/ffmpeg/ffmpeg.txt
@@ -32,6 +32,18 @@ Use -codec:v copy to keep video, or -codec:v no for audio-only.
-filter:a lowpass=f=16000
+## Fix Loudness war, bring bass back
+
+ -af "equalizer=f=200:w=200:t=h:g=-9,equalizer=f=400:w=600:t=h:g=-9,equalizer=f=2000:w=2000:t=h:g=-12,equalizer=f=4000:w=4000:t=h:g=-12,equalizer=f=10000:w=10000:t=h:g=-12,volume=+0.0dB"
+
+
+## Audio trim
+
+HINT: Repeat afade twice for log.
+
+ -af "afade=t=in:d=500ms,afade=t=out:st=183427ms:d=500ms" \
+
+
## Record Desktop
ffmpeg -f gdigrab -framerate 6 -probesize 10M -offset_x 0 -offset_y 0 \
@@ -45,6 +57,13 @@ Use -codec:v copy to keep video, or -codec:v no for audio-only.
-i INFILE -ss <pos> -to <pos> OUTFILE
+## Concatenate
+
+ && ffmpeg -i one.mkv -i two.mkv \
+ -filter_complex '[0:v] [0:a] [1:v] [1:a] concat=n=2:v=1:a=1 [v] [a]' \
+ -map "[v]" -map "[a]" out.mkv \
+
+
## Rotate Portrait
-i INFILE -vf "transpose=2" OUTFILE
diff --git a/doc/note/firefox/firefox.txt b/doc/note/firefox/firefox.txt
index e1e2999..9c48e29 100644
--- a/doc/note/firefox/firefox.txt
+++ b/doc/note/firefox/firefox.txt
@@ -2,6 +2,7 @@
Firefox
================
+
## Install plugin quickNdirty until restart
- Visit "about:debugging"
@@ -12,6 +13,7 @@ Firefox
For refresh, there is a button on the same page to reload the plugin.
+
## Create an XPI file (eg for distribution)
"manifest.json" MUST be in top level dir inside ZIP.
@@ -19,6 +21,7 @@ For refresh, there is a button on the same page to reload the plugin.
zip my.xpi manifest.json main.js
+
## Distribute via self-hosting
Package MUST be signed by "addons.mozilla.org" (Except for ESR or dev
@@ -27,3 +30,9 @@ firefox builds)
XPI file can be drag-n-drop to FF to trigger install dialog. Or via gear
icon "install from file".
+
+
+## Install native manifest (linux)
+
+"~/.mozilla/native-messaging-hosts/<name>.json"
+
diff --git a/doc/note/gdb/gdb.txt b/doc/note/gdb/gdb.txt
new file mode 100644
index 0000000..20cbd4d
--- /dev/null
+++ b/doc/note/gdb/gdb.txt
@@ -0,0 +1,10 @@
+
+## Print next few ASM instructions
+
+ x/3i $pc
+
+
+## Sources
+
+- [Print asm instructions](https://stackoverflow.com/a/59331366/4415884)
+
diff --git a/doc/note/gpg/gpg.txt b/doc/note/gpg/gpg.txt
index 11721f0..5580a13 100644
--- a/doc/note/gpg/gpg.txt
+++ b/doc/note/gpg/gpg.txt
@@ -67,6 +67,36 @@ you're doing! If you don't, you MUST NOT use those instructions!
gpgwin --sign-key foreignUser@example.com
+## Expand detached subkey expiry
+
+PS: Why is this so fu***** damn complicated! Anyone still wondering why this
+ system is not used by most humans?!? Please STOP producing so uselessly
+ complicated software!
+
+ cd "${WORKDIR:?}"
+ mkdir master
+ (cd "${OFFHOME:?}/.gnupg" && tar c $(ls -A)) | (cd master && tar x)
+ export GNUPGHOME="${WORKDIR:?}/master"
+ gpg --list-secret-keys --with-keygrip --keyid-format=long --with-fingerprint --with-subkey-fingerprint --list-options show-unusable-subkeys
+ gpg --edit-key SEC_KEY
+ expire
+ save
+ gpg --edit-key SSB_KEY
+ key 1
+ key 2
+ expire
+ save
+ NOW=$(date -u +%Y%m%d-%H%MZ)
+ gpg --export-secret-subkeys F00! BA5! > subkey-${NOW:?}.sec.gpg
+ gpg --export F00! BA5! > subkey-${NOW:?}.pub.gpg
+ gpg --list-packets subkey-${NOW:?}.sec.gpg
+ gpg --list-packets subkey-${NOW:?}.pub.gpg
+ tar --owner=0 --group=0 -c subkey-${NOW:?}.*.gpg | (cd "${OFFHOME:?}" && sudo tar x)
+ md5sum -b subkey-${NOW:?}.*.gpg | sudo tee -a "${OFFHOME:?}/MD5SUM"
+ echo "After import, you'd likely want to change phrase away from master"
+ gpg --edit-key foo@example.com passwd quit
+
+
## Use keys with throw-away keyring
GNUPGHOME="/tmp/foo/"
@@ -85,4 +115,7 @@ you're doing! If you don't, you MUST NOT use those instructions!
gpgconf --kill gpg-agent
gpgconf --launch gpg-agent
+[windoof: Why does git complain that no GPG agent is running?](https://superuser.com/a/1663941/1123359) says:
+ gpg-connect-agent reloadagent /bye
+
diff --git a/doc/note/java/java.txt b/doc/note/java/java.txt
new file mode 100644
index 0000000..8dc01a2
--- /dev/null
+++ b/doc/note/java/java.txt
@@ -0,0 +1,7 @@
+
+Java / JVM
+================
+
+ --add-opens java.base/java.lang=ALL-UNNAMED
+
+
diff --git a/doc/note/links/links.txt b/doc/note/links/links.txt
index 5564be3..049363c 100644
--- a/doc/note/links/links.txt
+++ b/doc/note/links/links.txt
@@ -83,6 +83,7 @@ Links (Aka argument amplifiers)
- SRP "https://blog.ndepend.com/solid-design-the-single-responsibility-principle-srp/"
- OCP "https://blog.ndepend.com/solid-design-the-open-close-principle-ocp/"
- LSP "https://blog.ndepend.com/solid-design-the-liskov-substitution-principle/"
+- ISP "https://blog.ndepend.com/solid-design-the-interface-segregation-principle-isp/"
- DIP "https://stackify.com/dependency-inversion-principle/#post-18184-_nuqaxpnmvpn7"
## Java how to handle InterruptedException:
@@ -95,6 +96,9 @@ Links (Aka argument amplifiers)
- "https://jira.post.ch/browse/SDCISA-5624"
- "https://gitit.post.ch/projects/ISA/repos/halfrunt/pull-requests/27/overview?commentId=105541"
+## Java dropping exception stack traces is a "feature"
+- [](https://stackoverflow.com/a/3010106/4415884)
+
## Please don't never not avoid nevative (un)logic
- "https://schneide.blog/tag/boolean-statements/"
@@ -105,6 +109,7 @@ Links (Aka argument amplifiers)
- "https://medium.com/humans-create-software/composition-over-inheritance-cb6f88070205"
- "https://softwareengineering.stackexchange.com/a/371715/306800"
- "https://youtu.be/wfMtDGfHWpA"
+- [Damn! Use it!](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/5/overview?commentId=316867)
## requirements, experts, business:
- "https://www.youtube.com/watch?v=BKorP55Aqvg"
@@ -130,6 +135,9 @@ Links (Aka argument amplifiers)
- [case slf4j](http://www.slf4j.org/api/org/slf4j/Logger.html)
- [General rules](https://gualtierotesta.github.io/java/tutorial-correct-slf4j-logging-usage-and-how-to-check-it/)
- [logging guards](https://stackoverflow.com/a/12953090/4415884)
+- [impl VS facade in lib](https://jira.post.ch/browse/SDCISA-15223)
+- [drop logger impl from lib](https://github.com/swisspost/vertx-redisques/pull/153)
+- [Should my library attempt to configure logging?](https://www.slf4j.org/faq.html#configure_logging)
## Misleading log msg messages
- "https://gitit.post.ch/projects/ISA/repos/zarquon/pull-requests/2/overview?commentId=61283"
@@ -152,6 +160,8 @@ Links (Aka argument amplifiers)
## Code Style format auto-formatters
- [warning about formatters](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/79/overview?commentId=235667)
+- [Linter produces crap](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/567/overview?commentId=237627)
+- [Linter produces crap js if](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/598/overview?commentId=252867)
- [static final java uppercase](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/82/overview?commentId=39126)
- [invalid java class name](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/82/overview?commentId=39125)
- [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/poodoo/pull-requests/40/overview?commentId=263122)
@@ -160,8 +170,6 @@ Links (Aka argument amplifiers)
- [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/trin/pull-requests/79)
- [spotless produces crap](https://gitit.post.ch/projects/ISA/repos/houston/pull-requests/449/overview?commentId=263593)
- [boolean expression formatting](https://gitit.post.ch/projects/ISA/repos/houston/pull-requests/461/overview?commentId=284022)
-- [Linter produces crap](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/567/overview?commentId=237627)
-- [Linter produces crap js if](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/598/overview?commentId=252867)
- [Suddenly NEW formatting rules in PaISA since 2021](https://gitit.post.ch/projects/ISA/repos/watson/pull-requests/1/overview?commentId=234597)
- "https://gitit.post.ch/projects/ISA/repos/zarniwoop/pull-requests/20/overview?commentId=85912"
- "https://gitit.post.ch/projects/ISA/repos/zarniwoop/pull-requests/21/overview?commentId=87250"
@@ -201,6 +209,7 @@ Links (Aka argument amplifiers)
## Java Memory
- "youtube.com/watch?v=f2aNWtt0QRo"
- jvm GC statistics "https://stackoverflow.com/a/467366"
+- [sizeof(java.lang.Object)](https://stackoverflow.com/a/258150/4415884)
## Yaml Is Bullshit
- "https://www.arp242.net/yaml-config.html#can-be-hard-to-edit-especially-for-large-files"
@@ -214,11 +223,9 @@ Links (Aka argument amplifiers)
- "https://stackoverflow.com/a/20177092/4415884"
- "https://github.com/swisspush/gateleen/pull/426#discussion_r813752075"
- "https://github.com/swisspush/gateleen/blob/v1.1.61/gateleen-kafka/src/main/java/org/swisspush/gateleen/kafka/KafkaMessageSender.java#L21"
-- performance long "https://m.youtube.com/watch?v=x5akmCWgGY0"
-- think please ... "https://m.youtube.com/watch?v=hSfylUXhpkA"
-
-## The Only way to Format Dates ISO 8601
-- "https://xkcd.com/1179/"
+- [How to use java fancy streams](https://m.youtube.com/watch?v=x5akmCWgGY0)
+- [think please ...](https://m.youtube.com/watch?v=hSfylUXhpkA)
+- [java streams are ugly](https://gitit.post.ch/projects/ISA/repos/nsync/pull-requests/55/overview?commentId=328210)
## Backward compatibility, Breaking Changes
- "https://www.redstar.be/backward-compatibility-in-software-development-what-and-why/"
@@ -227,30 +234,45 @@ Links (Aka argument amplifiers)
- [thor-DasUnheilNaht](https://gitit.post.ch/projects/ISA/repos/fis-masterdata-api/pull-requests/17/overview?commentId=227703)
- [thor-DerBlizHatEingeschlagen](https://gitit.post.ch/projects/ISA/repos/fis-masterdata-api/pull-requests/18/overview)
- [PaISA api new enum values](https://gitit.post.ch/projects/ISA/repos/fis-control-api/pull-requests/14/overview?commentId=296012)
+- [Keep APIs scope narrow as possible](https://gitit.post.ch/projects/ISA/repos/timetable-reservation-api/pull-requests/12/overview?commentId=327819)
## Performance DOES matter
- "https://github.com/swisspush/gateleen/pull/456#discussion_r844865066"
- [Performance Excuses Debunked](https://m.youtube.com/watch?v=x2EOOJg8FkA)
-- [Is writing performant code too expensive?](https://m.youtube.com/watch?v=EpYr3T5VP6w)
-- [Simple Code, High Performance](https://m.youtube.com/watch?v=Ge3aKEmZcqY)
+- [Is writing performant code too expensive?](https://www.youtube.com/watch?v=EpYr3T5VP6w&t=1109)
+- [Simple Code, High Performance](https://m.youtube.com/watch?v=Ge3aKEmZcqY&t=78)
- [Houston Last führt zu Neustart](https://wikit.post.ch/x/HDV8T)
- [Houston storage request timed out large json](https://jira.post.ch/browse/SDCISA-11294)
- [Preflux Garbage Collection issues](https://jira.post.ch/browse/SDCISA-4714)
- [Preflux Gatherfacts läuft in Timeout](https://jira.post.ch/browse/SDCISA-8136)
+- [Performance Fahrplanimports](https://jira.post.ch/browse/SDCISA-11528)
- [Houston Optimize EnqueuePatrol](https://jira.post.ch/browse/SDCISA-2876)
- [Update beim Fahrzeughersteller dauert zu lange](https://jira.post.ch/browse/SDCISA-9059)
- [vortex too slow](https://jira.post.ch/browse/SDCISA-9990)
+- [2023-10-27 OOM nun auch auf Eagle](https://wikit.post.ch/x/c2U1Tw)
+- [Fahrplanimports slow](https://jira.post.ch/browse/SDCISA-11528)
+- [Jenkinsbuild too slow](https://jira.post.ch/browse/SDCISA-14313?focusedId=1914236&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1914236)
+- [Houston check too slow](https://jira.post.ch/browse/SDCISA-13746?focusedId=1937167&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1937167)
## Performance is not an issue ...
- [Houston OOM 2023-06-27](https://wikit.post.ch/x/_Bv6Rw)
- [Houston OOM 2023-01-20](https://wikit.post.ch/x/iRepPQ)
- [Houston OOM Killed](https://jira.post.ch/browse/SDCISA-10871)
+- [SDCISA-14967 Houston collects all req bodies into memory](https://jira.post.ch/browse/SDCISA-14967)
- [http cache disable](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/1/overview?commentId=287832)
+- [How to repair KISS for performance](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff)
+- [Houston readyness fails often](https://jira.post.ch/browse/SDCISA-13746?focusedId=1899551&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1899551)
+- [Just one message per minute](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/375/overview?commentId=330543)
+- [Houston down readyness probe timeout](https://wikit.post.ch/x/koO0Vg)
## Common Performance
-- [Optimize code by doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY)
+- [Java Exceptions performance is bad](https://www.baeldung.com/java-exceptions-performance)
+- [going fast is about doing less](https://m.youtube.com/watch?v=5rb0vvJ7NCY)
- [CppCon Tuning Benchmarks clang CPUs Compilers" ](https://m.youtube.com/watch?v=nXaxk27zwlk)
+## Errorhandling is not needed ...
+- [OOM exit code 137 9 sigkill houston openshift pod](https://jira.post.ch/browse/SDCISA-13746?focusedId=1925526&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925526)
+
## Bugs are not an issue
- [Bistr gateleen Forwarder exception](https://jira.post.ch/browse/SDCISA-11147)
@@ -260,6 +282,7 @@ Links (Aka argument amplifiers)
## How to API design
- "https://m.youtube.com/watch?v=2xgplCQS1bY"
- [How to migrate an API properly via migration path](https://wikit.post.ch/x/pK1WJQ)
+- [What "Software Architect" means](https://m.youtube.com/watch?v=rPJfadFSCyQ&t=900)
## Posix c API design
- "https://lucumr.pocoo.org/2013/8/18/beautiful-native-libraries/"
@@ -347,9 +370,10 @@ Links (Aka argument amplifiers)
- "https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html"
## YAGNI (but also KISS and DRY)
-- "https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6#fc82"
+- [YAGNI, KISS and DRY](https://medium.com/swlh/yagni-and-dry-the-kiss-of-death-for-your-software-project-cfd44b0654b6)
- [eagle queue json only](https://gitit.post.ch/projects/ISA/repos/eagle/pull-requests/331/overview?commentId=236944)
-- [Uncle Bob - Why Are Programmers slow](https://youtu.be/G6HyEeEcB-w)
+- [How to repair KISS](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/304/diff)
+- [won't stream, bcause YAGNI](https://gitit.post.ch/projects/ISA/repos/bangbang/pull-requests/5/overview?commentId=316503)
## How to format method parameters
- "https://gitit.post.ch/projects/ISA/repos/god-backend/pull-requests/281/overview?commentId=210650"
@@ -386,10 +410,10 @@ Links (Aka argument amplifiers)
## java assert
- [how to enable](https://stackoverflow.com/a/68893479/4415884)
-- [What are they for](https://stackoverflow.com/a/298933/4415884)
+- [When to use them](https://softwareengineering.stackexchange.com/a/15518/306800)
- [What are they for](https://en.wikipedia.org/wiki/Assertion_(software_development)#Assertions_for_run-time_checking)
+- [What are they for](https://stackoverflow.com/a/298933/4415884)
- [How and when to use them](https://docs.oracle.com/javase/8/docs/technotes/guides/language/assert.html)
-- [When to use them](https://softwareengineering.stackexchange.com/questions/15515/when-to-use-assertions-and-when-to-use-exceptions)
- [I dont care](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/505/overview?commentId=219173)
## Mensch verblödet, modern tech, IQ, dumm, test
@@ -413,12 +437,18 @@ Links (Aka argument amplifiers)
## Resilience limit upper bound
- [Thought OOM](https://jira.post.ch/browse/SDCISA-10021)
- [Thought DB streams](https://wikit.post.ch/pages/viewpage.action?pageId=993270063&focusedCommentId=993272727#comment-993272727)
+- [in-memory buffer OOM](https://gitit.post.ch/projects/ISA/repos/nsync/pull-requests/55/overview?commentId=324715)
+
+## Resilience reliable end-to-end transport
- [How To Prevent Data Loss On A Non-Reliable Transport Channel](https://wikit.post.ch/x/4y_nQg)
+- [Houston losing hook messages](https://jira.post.ch/browse/SDCISA-13346)
+- [Not interested in the fix](https://jira.post.ch/browse/SDCISA-11619?focusedId=1913186&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1913186)
## Bugs, Frameworks, Dependencies include them all
- "https://medium.com/dinahmoe/escape-dependency-hell-b289de537403"
- "https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/119/overview?commentId=46245"
- [Stop Ductaping crap together](https://devrant.com/rants/5107044)
+- [JavaMelody OutOfMemory](https://wikit.post.ch/display/ISA/God+UI+ohne+Inhalt?focusedCommentId=1439580947#comment-1439580947)
## Input validation
- [WontDo](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/512/overview?commentId=222672)
@@ -444,6 +474,7 @@ Links (Aka argument amplifiers)
## FileLogging is a MUST have (kibana is bullsh**)
- [example](https://jira.post.ch/browse/SDCISA-8382?focusedCommentId=1554435&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1554435)
+- [example](https://jira.post.ch/browse/SDCISA-13655)
- [warning](https://jira.post.ch/browse/SDCISA-7230?focusedCommentId=1550476&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1550476)
## Automatic variable dynamic version are evil
@@ -505,6 +536,8 @@ Links (Aka argument amplifiers)
## security
- [SDCISA-4808] Security flaw due to no-security-if-no-identity policy Open
- [Make ISA secure (again?)](https://wikit.post.ch/x/n984Mg)
+- [platform public IP exposed wont fix](https://jira.post.ch/browse/SDCISA-6998)
+- [houston public IP exposed](TODO)
## Bus Factor Faktor
- [Bus factor definition wikipedia](https://en.wikipedia.org/wiki/Bus_factor)
@@ -533,3 +566,32 @@ Links (Aka argument amplifiers)
## Qemu is Crap
- [Qemu for Windows Host Quirks](https://wonghoi.humgar.com/blog/2021/05/03/qemu-for-windows-host-quirks/)
+## Git paisa complain about rebase stuff
+- [Complain about force-pushes](https://gitit.post.ch/projects/ISA/repos/lazlar/pull-requests/3/overview?commentId=311142)
+
+## Angular is terrible
+- [Why angular sucks](https://medium.com/dirtyjs/why-angular-2-4-5-6-sucks-afb36567ad68)
+- [JS bloat everywhere](https://tonsky.me/blog/js-bloat/)
+
+## java try-with-resources behavior
+[Exception Scenarios for Java's try-with-resources](https://dev.to/moaxcp/exception-scenarios-for-java-s-try-with-resources-63m)
+
+[About TLS in isa](https://jira.post.ch/browse/SDCISA-14330?focusedId=1925001&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1925001)
+
+## Tools like tcpdump are incredibly important
+- [tcpdump discovers the truth once more](https://jira.post.ch/browse/SDCISA-13746?focusedId=1939377&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-1939377)
+
+## MUST have an issue
+- [no-issue PR gets sued](https://gitit.post.ch/projects/ISA/repos/preflux/pull-requests/721/overview?commentId=349529)
+
+## Format Date ISO 8601, UTC GMT localtime
+- [public service announcement](https://xkcd.com/1179/)
+- [3 simple rules](https://dev.to/corykeane/3-simple-rules-for-effectively-handling-dates-and-timezones-1pe0)
+
+## (TODO put to a better place in here)
+- [how to handle vertx promise fail/errors properly](https://github.com/swisspost/vertx-redisques/pull/164#discussion_r1562105007)
+
+- [null VS empty](https://jira.post.ch/browse/SDCISA-14534)
+
+
+
diff --git a/doc/note/maven-pom/howto-ban-unwanted-sh__.txt b/doc/note/maven-pom/howto-ban-unwanted-sh__.txt
new file mode 100644
index 0000000..1edad9b
--- /dev/null
+++ b/doc/note/maven-pom/howto-ban-unwanted-sh__.txt
@@ -0,0 +1,41 @@
+
+
+ <project>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-slf4j-impl</artifactId>
+ <version>[0.0.0,)</version>
+ <scope>provided</scope>
+ </dependency>
+ </dependencies>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <!--<version>1.4.1</version>-->
+ <version>3.4.1</version>
+ <executions>
+ <execution>
+ <goals><goal>enforce</goal></goals>
+ <configuration>
+ <rules>
+ <bannedDependencies>
+ <excludes>
+ <exclude>org.apache.logging.log4j:log4j-slf4j-impl</exclude>
+ </excludes>
+ </bannedDependencies>
+ </rules>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ <project>
+
+
+## Sources
+
+- [How to globally exclude mvn dependency](https://stackoverflow.com/a/39979760/4415884)
+- [How to yell about bannded dependencies](https://stackoverflow.com/a/46104531/4415884)
+
diff --git a/doc/note/maven/maven.txt b/doc/note/maven/maven.txt
index eb55ba0..a9fd248 100644
--- a/doc/note/maven/maven.txt
+++ b/doc/note/maven/maven.txt
@@ -4,14 +4,18 @@ Maven
mvn dependency:help -Ddetail=true
-mvn versions:set -DgenerateBackupPoms=false -DnewVersion=
+mvn versions:set -DgenerateBackupPoms=false -DallowSnapshots=true -DnewVersion=
mvn versions:set -DgenerateBackupPoms=false -DnextSnapshot
-mvn versions:update-parent -DparentVersion=
+mvn versions:update-parent -DgenerateBackupPoms=false -DallowDowngrade=true -DallowSnapshots=true -DforceUpdate=true -DskipResolution=true -DparentVersion=YOUR_VERSION
-mvn versions:set-property -Dproperty=foo.bar -DnewVersion=gugus
+mvn versions:set-property -DgenerateBackupPoms=false -DallowSnapshots=true -Dproperty=foo.bar -DnewVersion=gugus
+export MAVEN_OPTS="..."
+
+## Get project version without any other bullshit
+mvn help:evaluate -o -q -DforceStdout -Dexpression=project.version && echo
## Deploy paisa snapshot
mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://artifactory.tools.pnet.ch/artifactory/libs-snapshot-local
@@ -21,6 +25,18 @@ mvn deploy -DaltDeploymentRepository=artifactory-snapshots::default::https://art
mvn deploy -Dcmake.generate.skip=true -Dcmake.compile.skip=true -DaltDeploymentRepository=artifactory-releases::default::https://artifactory.tools.pnet.ch/artifactory/libs-release-local
+true \
+ && DEPLOPTS= \
+ && `# Deploy custom gateleen build 20240206` \
+ && mvn clean install -pl '!gateleen-hook-js,!gateleen-playground' \
+ && mvn deploy -DskipTests -pl '!gateleen-hook-js,!gateleen-playground' ${DEPLOPTS:?} \
+ && `# Deploy custom houston build 20240216` \
+ && jenkinsbuild-by-upstream \
+ #&& mvn clean install \
+ #&& mvn deploy -DskipTests ${DEPLOPTS:?} \
+ && true
+
+
## Run e2e locally
mvn verify -U -DSelBaseUrl=http://localhost:7012/apigateway/services/foo/index.html -Dskip.tests=false -Dserver.host=localhost -Dserver.port=7012 -Ptestsuite
@@ -29,6 +45,6 @@ mvn verify -U -DSelBaseUrl=http://localhost:7012/apigateway/services/foo/index.h
mvn dependency:go-offline
-(See als "https://maven.apache.org/plugins/maven-dependency-plugin/go-offline-mojo.html")
+[See also](https://maven.apache.org/plugins/maven-dependency-plugin/go-offline-mojo.html)
diff --git a/doc/note/mount/fstab.txt b/doc/note/mount/fstab.txt
new file mode 100644
index 0000000..de642ce
--- /dev/null
+++ b/doc/note/mount/fstab.txt
@@ -0,0 +1,13 @@
+
+fstab
+=====
+
+## Moving firefox cache to RAM
+
+Effect: Faster at runtime, slower at startup.
+
+fstab entry:
+none /home/YOURNAME/.cache/mozilla/firefox tmpfs noatime,noexec,users 0 0
+
+
+
diff --git a/doc/note/mount/mount.txt b/doc/note/mount/mount.txt
new file mode 100644
index 0000000..99e1521
--- /dev/null
+++ b/doc/note/mount/mount.txt
@@ -0,0 +1,8 @@
+
+## tmpfs / ramfs
+
+TODO: it seems as 'size' has no effect, and system may run OOM instead.
+
+ mount -t ramfs -o size=1G ramfs /mnt/ramfs
+ chown -R $(whoami):$(whoami) /mnt/ramfs
+
diff --git a/doc/note/nginx/nginx-wdoof.txt b/doc/note/nginx/nginx-wdoof.txt
index 2bf7a52..cc9c23d 100644
--- a/doc/note/nginx/nginx-wdoof.txt
+++ b/doc/note/nginx/nginx-wdoof.txt
@@ -23,6 +23,7 @@ http {
sendfile on;
keepalive_timeout 65;
server {
+ # For public access use "8080" and "[::]:8080"
listen 127.0.0.1:8080;
server_name localhost;
location / {
diff --git a/doc/note/nginx/nginx.txt b/doc/note/nginx/nginx.txt
index 2a15ae5..75f2431 100644
--- a/doc/note/nginx/nginx.txt
+++ b/doc/note/nginx/nginx.txt
@@ -1,4 +1,51 @@
+## Basic nginx config
+
+[looks promising](https://stackoverflow.com/a/73297125/4415884)
+
+ # Basic setup:
+ # - Maybe change "access_log" to "/var/log/nginx/access.log".
+ # - For CLI use: Change all "/tmp/nginx" to "." (single dot, aka workdir or
+ # other user writable dir).
+ # Public expose setup:
+ # - Adapt "listen" as commented.
+ # - set "server_name" to meaningful value.
+ #
+ #daemon off; # run in foreground (eg from cli)
+ #user www-data;
+ #worker_processes auto;
+ pid /run/nginx.pid;
+ events {}
+ http {
+ access_log /dev/stdout;
+ # Directories nginx needs configured to start up.
+ client_body_temp_path /tmp/nginx;
+ proxy_temp_path /tmp/nginx;
+ fastcgi_temp_path /tmp/nginx;
+ uwsgi_temp_path /tmp/nginx;
+ scgi_temp_path /tmp/nginx;
+ server {
+ # public access: "80" and "[::]:80"
+ # local access: "127.0.0.1:80" and "[::1]:80"
+ listen 127.0.0.1:80;
+ listen [::1]:80;
+ server_name localhost;
+ root /srv/www;
+ location /foo {
+ #autoindex on; # directory listing
+ try_files $uri $uri/ =404;
+ }
+ location /example {
+ return 200 "Example says hi";
+ }
+ }
+ }
+
+
+[tutorial](https://www.javatpoint.com/nginx-minimal-configuration)
+
+
+
## fCGI keep alive backend connections
upstream myFancyBackend {
diff --git a/doc/note/openshift/dbg-mem-issues.txt b/doc/note/openshift/dbg-mem-issues.txt
new file mode 100644
index 0000000..c730629
--- /dev/null
+++ b/doc/note/openshift/dbg-mem-issues.txt
@@ -0,0 +1,170 @@
+
+How to hunt memory issues in production
+=======================================
+
+true \
+ && SVCNAME=foo-prod \
+ && PID=42 \
+ && OC= \
+ && JMX= \
+ && MemLeakTry1="lua -W MemLeakTry1.lua" \
+ && dropPadding () { sed -E 's_ *; *_;_g'; } \
+ && getPodName () { ${OC:?} get pods | egrep ston-[0-9] | cut -d' ' -f1; } \
+ && true
+
+
+${OC:?} exec -ti "$(${OC:?} get pods|egrep ston-1|cut -f1 -d' ')" -- pmap 9 > "pmap/${SVCNAME:?}"-pmap-$(date -u +%Y%m%d-%H%M%S).txt
+
+true `# Track pod memory` \
+ && ${OC:?} exec -ti "$(${OC:?} get pods|grep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'true \
+ && printf '\''h;PageSize;%s\n'\'' $(getconf PAGESIZE) \
+ && printf '\''c;%-24s;%8s;%8s;%8s;%5s;%4s;%3s;%8s;%3s;%7s\n'\'' When nThrds size RSS SHR text lib data dt nFds \
+ && while true; do true \
+ && printf '\''r;%s;%8s;%8d;%8d;%5d;%4d;%3d;%8d;%3d;%7d\n'\'' \
+ "$(date -Is)" \
+ $(cat /proc/'${PID:?}'/stat|cut -d" " -f20) \
+ $(cat /proc/'${PID:?}'/statm) \
+ $(ls -1 /proc/'${PID:?}'/fd | wc -l) \
+ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break; done' \
+ | tee "mem/${SVCNAME:?}"-mem-$(date +%Y%m%d-%H%M%S%z).csv
+
+true `# log JMX stuff` \
+ && grepUsed () { egrep 'used : ' | sed -r 's_^[^0-9]+ ([0-9]+) [^0-9]+$_\1_'; } \
+ && grepPureNumberLine () { egrep $(printf '^[0-9]+\r?$') | sed -r 's_^(.*)\r$_\1_'; } \
+ && (true \
+ && printf 'c; When ; JvmMetaspace; jvmNonHeap; JvmClassCnt; JvmHeap\n' \
+ && while true; do true \
+ && metaSpcByts="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/name=Metaspace,type=MemoryPool/attributes/Usage/' | grepUsed)" \
+ && jvmNonHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/NonHeapMemoryUsage/' | grepUsed)" \
+ && ldClassCnt="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=ClassLoading/attributes/LoadedClassCount/' | grepPureNumberLine)" \
+ && jvmHeap="$(curl -sS "${JMX:?}"'/servers/0/domains/java.lang/mbeans/type=Memory/attributes/HeapMemoryUsage/' | grepUsed)" \
+ && printf 'r;%s;%13d;%11d;%12d;%11d\n' "$(date -Is)" "${metaSpcByts:?}" "${jvmNonHeap:?}" "${ldClassCnt:?}" "${jvmHeap:?}" \
+ && sleep $(expr 60 - \( $(date +%s) % 60 \)) || break \
+ ; done) \
+ | tee "jmx/houston-prod-jmx-Metaspace-used-$(date +%Y%m%d-%H%M%S).log" \
+ && true
+
+(true `# Fetch logs` \
+ && while true; do true \
+ && printf '%s - Fetch logs\n' "$(date -Is)" \
+ && ${OC:?} exec -i "$(getPodName)" -- sh -c 'cd /usr/local/vertx/logs && (tar -cz houston* || test $? -eq 1)' \
+ > "logs/${SVCNAME:?}-log-$(date -u +%Y%m%d-%H%M%SZ).tgz" \
+ && sleep $(expr 14400 - \( $(date +%s) % 14400 \)) || break; done \
+ && true)
+
+true `# Merge logs` \
+ && PREFX="houston-prod-log-" \
+ && SUFFX=".tgz" \
+ && for SRCTGZ in \
+ ${PREFX:?}20231110-160510Z${SUFFX:?} \
+ ${PREFX:?}20231110-181226Z${SUFFX:?} \
+ ${PREFX:?}20231114-093133Z${SUFFX:?} \
+ ${PREFX:?}20231114-120002Z${SUFFX:?} \
+ ${PREFX:?}20231114-123040Z${SUFFX:?} \
+ ${PREFX:?}20231114-160001Z${SUFFX:?} \
+ ${PREFX:?}20231116-082933Z${SUFFX:?} \
+ ${PREFX:?}20231116-120002Z${SUFFX:?} \
+ ${PREFX:?}20231116-160002Z${SUFFX:?} \
+ ${PREFX:?}20231117-081112Z${SUFFX:?} \
+ ${PREFX:?}20231117-120001Z${SUFFX:?} \
+ ${PREFX:?}20231117-164612Z${SUFFX:?} \
+ ; do true \
+ && echo "[INFO ] Create ${SRCTGZ%.*}.log" \
+ && tar xf ../logs/${SRCTGZ:?} \
+ && unzip houston.log.1.zip \
+ && cat houston.log.1 houston.log > "${SRCTGZ%.*}.log" \
+ && rm houston.log.1.zip houston.log.1 houston.log \
+ ;done && true \
+ && printf '%s' '
+ local newLogFileMerger = require("AndisLogUtils").newLogFileMerger
+ local merger = newLogFileMerger{
+ sources = {
+ io.open("houston-prod-log-20231110-160510Z.log", "r"),
+ io.open("houston-prod-log-20231110-181226Z.log", "r"),
+ io.open("houston-prod-log-20231114-093133Z.log", "r"),
+ io.open("houston-prod-log-20231114-120002Z.log", "r"),
+ io.open("houston-prod-log-20231114-123040Z.log", "r"),
+ io.open("houston-prod-log-20231114-160001Z.log", "r"),
+ io.open("houston-prod-log-20231116-082933Z.log", "r"),
+ io.open("houston-prod-log-20231116-120002Z.log", "r"),
+ io.open("houston-prod-log-20231116-160002Z.log", "r"),
+ io.open("houston-prod-log-20231117-081112Z.log", "r"),
+ io.open("houston-prod-log-20231117-120001Z.log", "r"),
+ io.open("houston-prod-log-20231117-164612Z.log", "r"),
+ },
+ snk = { write = function( t, buf, b, c ) io.stdout:write(buf) io.stdout:write("\n") end, },
+ }' | lua -W - | gzip -n > houston-log-merged-$(date -u +%Y%m%d-%H%M%S)Z.log.gz \
+ && true
+
+
+`# Create heap dump`
+com.sun.management.dumpHeap("/usr/local/vertx/houston-storage-file/houston-___-heap-2023____-____Z.hprof", true)
+
+`# Inspect`
+ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && ls -Ahl'
+
+true `# Get made heap dump` \
+ && echo create checksum. \
+ && ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && md5sum -b houston-*.hprof >> MD5SUM-$(date -u +%Y%m%d-%H%M%SZ)' \
+ && echo checksum done. Begin dload. \
+ && ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && tar c houston-*.hprof MD5SUM*' | (cd heapDump && tar x) \
+ && echo dload done \
+ && true
+
+true `# Probe` \
+ && stage=prod \
+ && logFile="houston-${stage:?}-ready-$(date -u +%Y%m%d-%H%M%SZ).log" \
+ && printf 'c; when ;rspCode; connectSec; trsfSec; totlSec; curlExit\n' | tee -a "${logFile:?}" \
+ && while true; do true \
+ && printf 'r;%s;%7d;%11.3f;%8.3f;%8.3f;%9d\n' \
+ $(date +%Y-%m-%dT%H:%M:%S%z) \
+ $(curl -sSw "%{http_code} %{time_connect} %{time_starttransfer} %{time_total}" "${houstonServerInfoUrl:?}" -o /dev/null || ex=$? && echo " $ex") \
+ | tee -a "${logFile:?}" \
+ && sleep $(expr 60 - $(date +%s) % 60) || break \
+ ;done \
+ && true
+
+true \
+ && ONE="houston-prod-pmap-20231102-163425.txt" \
+ && TWO="houston-prod-pmap-20231103-074301.txt" \
+ && diff -U0 "${ONE:?}" "${TWO:?}" | egrep '^\+' | sed -r 's_\+([^ ]+) .*$_\1_'|sort|uniq \
+ && true
+
+(true \
+ && for F in $(ls *pmap*.txt); do true \
+ && printf "$F\n" \
+ && DATE="$(date +%s -d "$(echo $F|sed -r 's_.*([0-9]{4})([0-9]{2})([0-9]{2})-([0-9]{2})([0-9]{2})([0-9]{2}).*_\1-\2-\3T\4:\5:\6Z_')")" \
+ && <"$F" ${MemLeakTry1:?} --date "${DATE:?}" > "${F%.*}.csv" \
+ ;done)
+
+
+
+
+
+
+
+Zwischenfall auf INT 20231124
+
+
+
+`# Create heap dump`
+com.sun.management.dumpHeap("/usr/local/vertx/houston-storage-file/houston-___-heap-2023____-____Z.hprof", true)
+
+`# Inspect`
+${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && ls -Ahl'
+
+true `# Get made heap dump` \
+ && if test ! -d heapDump; then echo "Dir heapDump missing"; false ;fi \
+ && echo create checksum. \
+ && ${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && md5sum -b houston-*.hprof >> MD5SUM-$(date -u +%Y%m%d-%H%M%SZ)' \
+ && echo checksum done. Begin dload. \
+ && ${OC:?} exec -i "$(${OC:?} get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /usr/local/vertx/houston-storage-file && tar c houston-*.hprof MD5SUM*' | (cd heapDump && tar x) \
+ && echo dload done \
+ && true
+
+
+
+
+
+
+
diff --git a/doc/note/openshift/openshift.txt b/doc/note/openshift/openshift.txt
index e807da3..88e33ee 100644
--- a/doc/note/openshift/openshift.txt
+++ b/doc/note/openshift/openshift.txt
@@ -77,10 +77,18 @@ HINT: ALL files from Current dir (.) will get uploaded (when global rsync not av
## up/down scale from cli. input von thom (20230815)
- oc scale dc/preflux --replicas=1
+ oc scale dc/${SVCNAME:?} --replicas=1
-## TODO what was this for?
- oc get pvc
+
+## Kube Probe
+
+ echo && ocprod exec -ti "$(ocprod get pods|egrep ston-[0-9]|cut -f1 -d' ')" -- sh -c 'true \
+ && printf "c; When ; rsp_code; time_connect; time_redirect; time_starttransfer; time_total\n" \
+ && while true; do true \
+ && now=$(date -uIs) \
+ && curl -sS -o/dev/null -w "r; $(date -uIs); %{response_code}; %{time_connect}s; %{time_redirect}s; %{time_starttransfer}s; %{time_total}s\n" 127.0.0.1:7012/houston/server/info \
+ && sleep 5 || break \
+ ;done' | tee -a C:/work/tmp/houston-prod-inPod-probe.log
diff --git a/doc/note/pdf/pdfToPng.txt b/doc/note/pdf/pdfToPng.txt
new file mode 100644
index 0000000..234b1ea
--- /dev/null
+++ b/doc/note/pdf/pdfToPng.txt
@@ -0,0 +1,3 @@
+
+ && pdftoppm -f 1 -t 1 -png input.pdf > output.pdf \
+
diff --git a/doc/note/qemu/build-cJSON.txt b/doc/note/qemu/build-cJSON.txt
new file mode 100644
index 0000000..0e8d0df
--- /dev/null
+++ b/doc/note/qemu/build-cJSON.txt
@@ -0,0 +1,76 @@
+
+### Debian native
+true \
+ && PKGS_TO_ADD="ca-certificates curl gcc libc6-dev" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="curl mingw-w64-gcc tar" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+# Generic
+true \
+ && CJSON_VERSION="1.7.15" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && CJSON_URL="https://github.com/DaveGamble/cJSON/archive/refs/tags/v${CJSON_VERSION:?}.tar.gz" \
+ && CJSON_SRCTGZ="${CACHE_DIR:?}/cJSON-${CJSON_VERSION:?}.tgz" \
+ && CJSON_BINTGZ="${CJSON_SRCTGZ%.*}-bin.tgz" \
+ && ${PKGINIT:?} && ${PKGADD:?} ${PKGS_TO_ADD} \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && if test ! -e "${CJSON_SRCTGZ:?}"; then (true \
+ && echo "Download \"${CJSON_URL:?}\"" \
+ && curl -sSLo "${CJSON_SRCTGZ:?}" "${CJSON_URL:?}" \
+ );fi \
+ && if test ! -e "${CJSON_BINTGZ:?}"; then (true \
+ && printf '\n Build cJSON\n\n' \
+ && tar xzf "${CJSON_SRCTGZ:?}" \
+ && cd "cJSON-${CJSON_VERSION:?}" \
+ && mkdir build build/obj build/lib build/include \
+ && CFLAGS="-Wall -pedantic -fPIC" \
+ && ${HOST_}cc $CFLAGS -c -o build/obj/cJSON.o cJSON.c \
+ && ${HOST_}cc $CFLAGS -shared -o build/lib/libcJSON.so.${CJSON_VERSION:?} build/obj/cJSON.o \
+ && unset CFLAGS \
+ && (cd build/lib \
+ && MIN=${CJSON_VERSION%.*} && MAJ=${MIN%.*} \
+ && ln -s libcJSON.so.${CJSON_VERSION:?} libcJSON.so.${MIN:?} \
+ && ln -s libcJSON.so.${MIN:?} libcJSON.so.${MAJ} \
+ ) \
+ && ${HOST_}ar rcs build/lib/libcJSON.a build/obj/cJSON.o \
+ && cp -t build/. LICENSE README.md \
+ && cp -t build/include/. cJSON.h \
+ && rm build/obj -rf \
+ && (cd build \
+ && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \
+ && tar --owner=0 --group=0 -czf "${CJSON_BINTGZ:?}" * \
+ && md5sum -b "${CJSON_BINTGZ:?}" > "${CJSON_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "cJSON-${CJSON_VERSION:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${CJSON_BINTGZ:?}" \
+ && true
+
+
+
diff --git a/doc/note/qemu/build-gateleen.txt b/doc/note/qemu/build-gateleen.txt
new file mode 100644
index 0000000..c29fcdc
--- /dev/null
+++ b/doc/note/qemu/build-gateleen.txt
@@ -0,0 +1,79 @@
+
+
+### Alpine
+true \
+ && PKGS_TO_ADD="curl maven nodejs npm redis openjdk11-jre-headless" \
+ && SUDO="${HOME:?}/.local/bin/mysudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN=true \
+ && mkdir -p "${HOME:?}/.local/bin" \
+ && printf '%s\n' '#!/bin/sh' 'printf "Sudo "' 'su root -c "$(echo "$@")"' > "${HOME:?}/.local/bin/mysudo" \
+ && chmod u+x "${HOME:?}/.local/bin/mysudo" \
+ && true
+
+
+### Generic
+true \
+ && GATELEEN_GIT_TAG="v1.3.28" \
+ && WORKDIR="/${HOME:?}/work" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Setup Dependencies & get sources
+true \
+ && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \
+ \
+ && curl -sSL https://github.com/swisspush/gateleen/archive/refs/tags/"${GATELEEN_GIT_TAG:?}".tar.gz > "${CACHE_DIR:?}/gateleen-${GATELEEN_GIT_TAG:?}.tgz" \
+ && true
+
+
+### Make
+true \
+ && mkdir -p "${WORKDIR:?}/gateleen" && cd "${WORKDIR:?}/gateleen" \
+ && tar --strip-components 1 -xf "${CACHE_DIR:?}/gateleen-${GATELEEN_GIT_TAG:?}.tgz" \
+ && (cd gateleen-hook-js && npm install) \
+ && mkdir -p gateleen-hook-js/node/node_modules/npm/bin \
+ && ln -s /usr/bin/node gateleen-hook-js/node/node \
+ && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js >/dev/null \
+ && mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js \
+ && mvn install -PpublicRepos -DfailIfNoTests=false \
+ -pl '!gateleen-test,!gateleen-hook-js' \
+ '-Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests' \
+ && mkdir "${WORKDIR:?}/classpath" \
+ && (cd gateleen-playground && mvn dependency:copy-dependencies \
+ -DexcludeScope=provided -DoutputDirectory="${WORKDIR:?}/classpath/.") \
+ && cp gateleen-playground/target/gateleen-playground-*.jar "${WORKDIR:?}/classpath/." \
+ && mkdir "${WORKDIR:?}/etc" "${WORKDIR:?}/redis-state" \
+ && printf >"${WORKDIR:?}/etc/redis.conf" '%s\n' \
+ 'save ""' \
+ 'appendonly yes' \
+ 'appendfilename appendonly.aof' \
+ && `# Squeeze those funny "static files" into redis` \
+ && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf" \
+ & java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \
+ & sleep 3 \
+ ) \
+ && (cd "${WORKDIR:?}/gateleen" && mvn deploy -PuploadStaticFiles) \
+ && (pkill -INT java || sleep 3 && pkill -TERM java || sleep 3 && pkill -9 java) \
+ && pkill -INT redis-server \
+ && $PKGDEL $PKGS_TO_DEL \
+ && $PKGCLEAN \
+ && sleep 3 \
+ && (cd "${WORKDIR:?}/gateleen" && mvn clean) \
+ && printf '\n DONE\n\n' \
+ && true
+
+
+### Run
+true \
+ && ip a | grep inet \
+ && (true \
+ && (cd "${WORKDIR:?}/redis-state" && redis-server "${WORKDIR:?}/etc/redis.conf") \
+ & true \
+ && cd ~ \
+ && java -cp "${WORKDIR:?}/classpath/"'*' org.swisspush.gateleen.playground.Server \
+ ) \
+ && true
+
diff --git a/doc/note/qemu/build-jssc.txt b/doc/note/qemu/build-jssc.txt
new file mode 100644
index 0000000..3acdf6e
--- /dev/null
+++ b/doc/note/qemu/build-jssc.txt
@@ -0,0 +1,41 @@
+
+This is only another copy. Likely we should use the one at
+https://github.com/hiddenalpha/jssc/blob/master/contrib/hiddenalpha-buildEnv-one
+.
+
+true `# Configure for debian 9` \
+ && CXX="g++" \
+ && CFLAGS="-fPIC -Wall -pedantic -Werror \
+ -Wno-error=long-long \
+ -Wno-error=sign-compare \
+ -Wno-error=variadic-macros \
+ -Wno-long-long" \
+ && targets="linux_64" \
+ && SUDO= \
+ && true
+
+true `# Setup` \
+ && $SUDO apt install -y --no-install-recommends \
+ git openjdk-8-jdk-headless g++ maven \
+ && true
+
+true `# Make` \
+ && cat contrib/hiddenalpha-buildEnv-one/res/pom.patch | git apply \
+ && mvn clean \
+ && mvn -PnoCmake compile \
+ && printf '%s "%s"\n' "#define JSSC_VERSION" "$(git describe --tags|sed 's,^v,,')" \
+ > src/main/cpp/version.h \
+ && mkdir -p src/main/resources-precompiled/natives/linux_64 \
+ && g++ $CFLAGS -shared \
+ -o src/main/resources-precompiled/natives/linux_64/libjssc.so \
+ src/main/cpp/_nix_based/jssc.cpp \
+ -I/usr/lib/jvm/java-1.8.0-openjdk-amd64/include \
+ -I/usr/lib/jvm/java-1.8.0-openjdk-amd64/include/linux \
+ -Isrc/main/cpp \
+ && for T in ${targets:?}; do
+ && mvn -PnoCmake -PnoJavah -PnativeJar -P"${T:?}" package \
+ ;done \
+ && mvn -PnoCmake -PnoJavah -PnoNatives -PwithTestClasspath verify \
+ && true
+
+
diff --git a/doc/note/qemu/build-libarchive.txt b/doc/note/qemu/build-libarchive.txt
new file mode 100644
index 0000000..96f95d6
--- /dev/null
+++ b/doc/note/qemu/build-libarchive.txt
@@ -0,0 +1,72 @@
+
+### Debian native
+### TODO: test this
+true \
+ && PKGS_TO_ADD="make gcc curl ca-certificates libc6-dev" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="make mingw-w64-gcc curl tar" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk cache clean 2>&1| grep -v 'ERROR: Package cache is not enabled'" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+### General
+true \
+ && LIBARCHIVE_VERSION="3.6.2" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+### Make
+true \
+ && if test -n "$(ls -A)"; then true \
+ && printf '\n It is recommended to run this script in an empty dir.\n\n' \
+ && false \
+ ;fi \
+ && LIBARCHIVE_URL="https://github.com/libarchive/libarchive/releases/download/v${LIBARCHIVE_VERSION:?}/libarchive-${LIBARCHIVE_VERSION:?}.tar.gz" \
+ && LIBARCHIVE_SRCTGZ="${CACHE_DIR:?}/libarchive-${LIBARCHIVE_VERSION:?}.tgz" \
+ && LIBARCHIVE_BINTGZ="${LIBARCHIVE_SRCTGZ%.*}-bin.tgz" \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \
+ && if test ! -e "${LIBARCHIVE_SRCTGZ:?}"; then true \
+ && echo "Download ${LIBARCHIVE_URL:?}" \
+ && curl -sSLo "${LIBARCHIVE_SRCTGZ:?}" "${LIBARCHIVE_URL:?}" \
+ ;fi \
+ && if test ! -e "${LIBARCHIVE_BINTGZ}"; then (true \
+ && printf '\n Build libarchive\n\n' \
+ && tar xf "${LIBARCHIVE_SRCTGZ:?}" \
+ && cd "libarchive-${LIBARCHIVE_VERSION:?}" \
+ && ./configure --prefix="${PWD:?}/build/usr_local" --host=${HOST} \
+ --enable-bsdtar=static --enable-bsdcat=static --enable-bsdcpio=static \
+ --disable-rpath --enable-posix-regex-lib \
+ --with-libiconv-prefix="${PWD%/*}/libiconv-1.16-mingw64" \
+ CC=${HOST_}gcc CPP=${HOST_}cpp \
+ && make clean && make -j$(nproc) && make install \
+ && (cd build/usr_local \
+ && rm -rf lib/pkgconfig lib/libarchive.la \
+ && find -type f -not -wholename MD5SUM -exec md5sum {} + > MD5SUM \
+ && tar --owner=0 --group=0 -czf "${LIBARCHIVE_BINTGZ:?}" * \
+ && md5sum -b "${LIBARCHIVE_BINTGZ:?}" > "${LIBARCHIVE_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "libarchive-${LIBARCHIVE_VERSION:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${LIBARCHIVE_BINTGZ:?}" \
+ && true
+
diff --git a/doc/note/qemu/build-libcurl.txt b/doc/note/qemu/build-libcurl.txt
new file mode 100644
index 0000000..be7b8c2
--- /dev/null
+++ b/doc/note/qemu/build-libcurl.txt
@@ -0,0 +1,99 @@
+
+
+### Debian native
+true \
+ && PKGS_TO_ADD="autoconf automake ca-certificates curl make" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="git make mingw-w64-gcc curl tar cmake autoconf automake libtool m4" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && CURL_VERSION="8.3.0" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && if test -n "$(ls -A)"; then true \
+ && printf '\n It is recommended to run this script in an empty dir.\n\n' \
+ && false \
+ ;fi \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && CURL_VERSION_UGLY="$(echo "$CURL_VERSION"|sed 's;\.;_;g')" \
+ && CURL_URL="https://github.com/curl/curl/archive/refs/tags/curl-${CURL_VERSION_UGLY:?}.tar.gz" \
+ && CURL_SRCTGZ="${CACHE_DIR:?}/curl-${CURL_VERSION:?}.tgz" \
+ && CURL_BINTGZ="${CURL_SRCTGZ%.*}-bin.tgz" \
+ \
+ && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \
+ && printf '\n Download Sources\n\n' \
+ && if test ! -e "${CURL_SRCTGZ:?}"; then true \
+ && echo "Download ${CURL_URL:?}" \
+ && curl -sSLo "${CURL_SRCTGZ:?}" "${CURL_URL:?}" \
+ ;fi \
+ && if test ! -e "${CURL_BINTGZ:?}"; then (true \
+ && printf '\n Build curl\n\n' \
+ && tar xf "${CURL_SRCTGZ:?}" \
+ && cd "curl-curl-${CURL_VERSION_UGLY:?}" \
+ && autoreconf -fi \
+ && if test -n "$HOST"; then HORSCHT="--host=${HOST:?}";fi \
+ && ./configure --prefix="$PWD/build/usr_local" --enable-http --with-nghttp2 --with-nghttp3 \
+ --disable-alt-svc --disable-ares --disable-aws --disable-basic-auth \
+ --disable-bearer-auth --disable-bindlocal --disable-cookies --disable-curldebug \
+ --disable-dateparse --disable-debug --disable-dependency-tracking --disable-dict \
+ --disable-digest-auth --disable-dnsshuffle --disable-doh --disable-ech --disable-file \
+ --disable-form-api --disable-ftp --disable-get-easy-options --disable-gopher \
+ --disable-headers-api --disable-hsts --disable-http-auth --disable-imap --enable-ipv6 \
+ --disable-kerberos-auth --disable-largefile --disable-ldap --disable-ldaps \
+ --disable-libcurl-option --disable-libtool-lock --enable-manual --disable-mime \
+ --disable-mqtt --disable-negotiate-auth --disable-netrc --enable-ntlm --enable-ntlm-wb \
+ --disable-openssl-auto-load-config --disable-optimize --disable-pop3 \
+ --disable-progress-meter --enable-proxy --disable-pthreads --disable-rt --disable-rtsp \
+ --disable-smb --enable-smtp --disable-socketpair --disable-sspi --disable-symbol-hiding \
+ --disable-telnet --disable-tftp --disable-threaded-resolver --disable-tls-srp \
+ --disable-unix-sockets --disable-verbose --disable-versioned-symbols --disable-warnings \
+ --disable-websockets --disable-werror --without-schannel --without-secure-transport \
+ --without-amissl --without-ssl --without-openssl --without-gnutls --without-mbedtls \
+ --without-wolfssl --without-bearssl --without-rustls --without-test-nghttpx \
+ --without-test-caddy --without-test-httpd --without-pic --without-aix-soname \
+ --without-gnu-ld --without-sysroot --without-mingw1-deprecated --without-hyper \
+ --without-zlib --without-brotli --without-zstd --without-ldap-lib --without-lber-lib \
+ --without-gssapi-includes --without-gssapi-libs --without-gssapi \
+ --without-default-ssl-backend --without-random --without-ca-bundle --without-ca-path \
+ --without-ca-fallback --without-libpsl --without-libgsasl --without-librtmp \
+ --without-winidn --without-libidn2 --without-ngtcp2 --without-quiche --without-msh3 \
+ --without-zsh-functions-dir --without-fish-functions-dir \
+ CFLAGS=-fPIC $HORSCHT \
+ && make clean && make -j$(nproc) && make install \
+ && (cd build/usr_local \
+ && rm -rf share/aclocal bin/curl-config lib/libcurl.la lib/pkgconfig \
+ && tar --owner=0 --group=0 -czf "${CURL_BINTGZ:?}" * \
+ && md5sum -b "${CURL_BINTGZ:?}" > "${CURL_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "curl-curl-${CURL_VERSION_UGLY:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+### Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${CURL_BINTGZ:?}" \
+ && true
+
+
diff --git a/doc/note/qemu/build-libpcap.txt b/doc/note/qemu/build-libpcap.txt
new file mode 100644
index 0000000..b86dd1c
--- /dev/null
+++ b/doc/note/qemu/build-libpcap.txt
@@ -0,0 +1,64 @@
+
+### Debian native
+true \
+ && PKGS_TO_ADD="curl ca-certificates gcc make libc6-dev flex bison" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="binutils curl mingw-w64-gcc make tar flex bison" \
+ && SUDO="/home/$USER/.local/bin/mysudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && PCAP_VERSION="1.10.4" \
+ && CACHE_DIR="/var/tmp" \
+ && WORKDIR="/tmp" \
+ && INSTALL_ROOT="/usr/local" \
+ && MAKE_JOBS=$(nproc) \
+ && true
+
+
+## Make
+true \
+ && PCAP_URL="https://github.com/the-tcpdump-group/libpcap/archive/refs/tags/libpcap-${PCAP_VERSION:?}.tar.gz" \
+ && PCAP_SRCTGZ="${CACHE_DIR:?}/pcap-${PCAP_VERSION:?}.tgz" \
+ && PCAP_BINTGZ="${PCAP_SRCTGZ%.*}-bin.tgz" \
+ && if test -f "${PCAP_SRCTGZ:?}" ]; then true \
+ && echo "[DEBUG] Already have \"${PCAP_SRCTGZ:?}\"" \
+ ;else true \
+ && echo curl -sSL "${PCAP_URL:?}" -o "${PCAP_SRCTGZ:?}" \
+ && curl -sSL "${PCAP_URL:?}" -o "${PCAP_SRCTGZ:?}" \
+ ;fi \
+ && ( mkdir "${WORKDIR:?}/pcap-${PCAP_VERSION}" \
+ && cd "${WORKDIR:?}/pcap-${PCAP_VERSION}" \
+ && tar xf "${PCAP_SRCTGZ:?}" \
+ && cd * \
+ && ./configure --prefix=${WORKDIR:?}/pcap-"${PCAP_VERSION:?}"/out \
+ && make clean \
+ && make -j$(nproc) \
+ && make install \
+ && cd ../out \
+ && rm bin/pcap-config lib/pkgconfig -rf \
+ && tar cf "${PCAP_BINTGZ:?}" * \
+ && printf '\n Build pcap Done\n\n' \
+ ) \
+ && true
+
+
+### Install
+true \
+ && $SUDO mkdir -p "${INSTALL_ROOT:?}" \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xf "${PCAP_BINTGZ:?}" \
+ && true
+
diff --git a/doc/note/qemu/build-libpcre1.txt b/doc/note/qemu/build-libpcre1.txt
new file mode 100644
index 0000000..491809b
--- /dev/null
+++ b/doc/note/qemu/build-libpcre1.txt
@@ -0,0 +1,70 @@
+
+
+### Debian native
+true \
+ && PKGS_TO_ADD="curl git make gcc ca-certificates libc6-dev cmake autoconf automake libtool m4" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="git make mingw-w64-gcc curl tar cmake autoconf automake libtool m4" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && PCRE_VERSION="8.45" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && if test -n "$(ls -A)"; then true \
+ && printf '\n It is recommended to run this script in an empty dir.\n\n' \
+ && false \
+ ;fi \
+ && PCRE_URL="https://sourceforge.net/projects/pcre/files/pcre/${PCRE_VERSION:?}/pcre-${PCRE_VERSION:?}.tar.gz/download" \
+ && PCRE_SRCTGZ="${CACHE_DIR:?}/pcre-${PCRE_VERSION:?}.tgz" \
+ && PCRE_BINTGZ="${PCRE_SRCTGZ%.*}-bin.tgz" \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \
+ && printf '\n Download Dependency Sources\n\n' \
+ && if test ! -e "${PCRE_SRCTGZ:?}"; then true \
+ && echo "Download ${PCRE_URL:?}" \
+ && curl -sSLo "${PCRE_SRCTGZ:?}" "${PCRE_URL:?}" \
+ ;fi \
+ && if test ! -e "${PCRE_BINTGZ:?}"; then (true \
+ && printf '\n Build curl\n\n' \
+ && tar xf "${PCRE_SRCTGZ:?}" \
+ && cd "pcre-${PCRE_VERSION:?}" \
+ && ./configure --prefix="$PWD/build/usr_local" --host=$HOST --disable-cpp --enable-utf \
+ && make clean && make -j$(nproc) && make install \
+ && (cd build/usr_local \
+ && rm -rf lib/libpcre.la lib/pkgconfig lib/libpcreposix.la bin/pcre-config \
+ && tar --owner=0 --group=0 -czf "${PCRE_BINTGZ:?}" * \
+ && md5sum -b "${PCRE_BINTGZ:?}" > "${PCRE_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "pcre-${PCRE_VERSION:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${PCRE_BINTGZ:?}" \
+ && true
+
+
+
diff --git a/doc/note/qemu/build-lua.txt b/doc/note/qemu/build-lua.txt
new file mode 100644
index 0000000..5440233
--- /dev/null
+++ b/doc/note/qemu/build-lua.txt
@@ -0,0 +1,86 @@
+
+### Debian native
+true \
+ && PKGS_TO_ADD="curl ca-certificates gcc make libc6-dev" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="binutils curl mingw-w64-gcc make tar" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && LUA_VERSION="5.4.3" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && if test -n "$(ls -A)"; then true \
+ && printf '\n It is recommended to run this script in an empty dir.\n\n' \
+ && false \
+ ;fi \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && ${PKGINIT:?} && ${PKGADD:?} $PKGS_TO_ADD \
+ && LUA_URL="https://www.lua.org/ftp/lua-${LUA_VERSION:?}.tar.gz" \
+ && LUA_SRCTGZ="${CACHE_DIR:?}/lua-${LUA_VERSION:?}.tgz" \
+ && LUA_BINTGZ="${LUA_SRCTGZ%.*}-bin.tgz" \
+ && printf '\n Download Dependency Sources\n\n' \
+ && if test ! -e "${LUA_SRCTGZ:?}"; then true \
+ && echo "Download ${LUA_URL:?}" \
+ && curl -sSLo "${LUA_SRCTGZ:?}" "${LUA_URL:?}" \
+ ;fi \
+ && if test ! -e "${LUA_BINTGZ:?}"; then (true \
+ && printf '\n Build lua\n\n' \
+ && tar xf "${LUA_SRCTGZ:?}" \
+ && cd "lua-${LUA_VERSION:?}" \
+ && mkdir -p build/bin build/include build/lib build/man/man1 \
+ && export CFLAGS="-ggdb -Wall -Wextra" \
+ && `# Uncomment this line for debugging` \
+ && export CFLAGS="$CFLAGS -DLUAI_ASSERT -DLUA_USE_APICHECK" \
+ && `# endOf Uncomment` \
+ && make clean \
+ && if echo "$HOST"|grep -q '\-mingw'; then true \
+ && make -j$(nproc) PLAT=mingw \
+ CC="${HOST_}gcc -std=gnu99" AR="${HOST_}ar rcu" RANLIB="${HOST_}ranlib" \
+ && cp -t build/. README \
+ && cp -t build/bin/. src/lua.exe src/luac.exe \
+ ;else true \
+ && export CFLAGS="$CFLAGS -DLUA_USE_POSIX" \
+ && make -j$(nproc) \
+ && cp -t build/. README \
+ && cp -t build/bin/. src/lua src/luac \
+ ;fi \
+ && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \
+ && cp -t build/lib/. src/liblua.a \
+ && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \
+ && (cd build \
+ && rm -rf include/lua.hpp \
+ && tar --owner=0 --group=0 -czf "${LUA_BINTGZ:?}" * \
+ && md5sum -b "${LUA_BINTGZ:?}" > "${LUA_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "lua-${LUA_VERSION:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${LUA_BINTGZ:?}" \
+ && true
+
+
diff --git a/doc/note/qemu/build-sqlite.txt b/doc/note/qemu/build-sqlite.txt
new file mode 100644
index 0000000..81210f9
--- /dev/null
+++ b/doc/note/qemu/build-sqlite.txt
@@ -0,0 +1,77 @@
+
+### Debian native
+true \
+ && PKGS_TO_ADD="curl ca-certificates gcc libc6-dev make tcl" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="curl gcc musl-dev make mingw-w64-gcc tar tcl" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && SQLITE_VERSION="3.33.0" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && SQLITE_URL="https://github.com/sqlite/sqlite/archive/refs/tags/version-${SQLITE_VERSION:?}.tar.gz" \
+ && SQLITE_SRCTGZ="${CACHE_DIR:?}/sqlite-${SQLITE_VERSION:?}.tgz" \
+ && SQLITE_BINTGZ="${SQLITE_SRCTGZ%.*}-bin.tgz" \
+ && ${PKGINIT:?} && ${PKGADD:?} ${PKGS_TO_ADD} \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && if test ! -e "${SQLITE_SRCTGZ:?}"; then (true \
+ && echo "Download \"${SQLITE_URL:?}\"" \
+ && curl -sSLo "${SQLITE_SRCTGZ:?}" "${SQLITE_URL:?}" \
+ );fi \
+ && if test ! -e "${SQLITE_BINTGZ:?}"; then (true \
+ && printf '\n Build SqLite\n\n' \
+ && tar xzf "${SQLITE_SRCTGZ:?}" \
+ && cd sqlite-*${SQLITE_VERSION:?} \
+ && mkdir build \
+ && if echo $HOST|grep -q 'mingw'; then true \
+ && ./configure --prefix=${PWD:?}/build --host=${HOST:?} \
+ CC=${HOST_}cc CPP=$CPP CXX=$CXX BCC=gcc BEXE=.exe config_TARGET_EXEEXT=.exe \
+ && ln -s mksourceid.exe mksourceid \
+ && make clean && make -j$(nproc) && make install \
+ && (cd build \
+ && rm -rf lemon* mksourceid lib/pkgconfig lib/*.la \
+ ) \
+ ;else true \
+ && ./configure --prefix=${PWD:?}/build \
+ && make clean && make -j$(nproc) && make install \
+ ;fi \
+ && cp README.md LICENSE.md VERSION build/. \
+ && (cd build \
+ && rm -rf lib/libsqlite3.la lib/pkgconfig \
+ && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM \
+ && tar --owner=0 --group=0 -czf "${SQLITE_BINTGZ:?}" * \
+ && md5sum -b "${SQLITE_BINTGZ:?}" > "${SQLITE_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf sqlite-*${SQLITE_VERSION:?} \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${SQLITE_BINTGZ:?}" \
+ && true
+
+
+
diff --git a/doc/note/qemu/build-zlib.txt b/doc/note/qemu/build-zlib.txt
new file mode 100644
index 0000000..a90e616
--- /dev/null
+++ b/doc/note/qemu/build-zlib.txt
@@ -0,0 +1,73 @@
+
+### Debian native
+true \
+ && PKGS_TO_ADD="curl ca-certificates gcc libc6-dev make" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && HOST= \
+ && true
+
+
+### Alpine mingw cross
+true \
+ && PKGS_TO_ADD="curl make mingw-w64-gcc tar" \
+ && SUDO="/home/$USER/.local/bin/sudo" \
+ && PKGINIT=true \
+ && PKGADD="$SUDO apk add" \
+ && PKGCLEAN="$SUDO apk clean" \
+ && HOST=x86_64-w64-mingw32 \
+ && true
+
+
+## Generic
+true \
+ && ZLIB_VERSION="1.2.11" \
+ && CACHE_DIR="/var/tmp" \
+ && true
+
+
+## Make
+true \
+ && ZLIB_URL="https://downloads.sourceforge.net/project/libpng/zlib/${ZLIB_VERSION:?}/zlib-${ZLIB_VERSION:?}.tar.gz" \
+ && ZLIB_SRCTGZ="${CACHE_DIR:?}/zlib-${ZLIB_VERSION:?}.tgz" \
+ && ZLIB_BINTGZ="${ZLIB_SRCTGZ%.*}-bin.tgz" \
+ && if test -n "$HOST"; then HOST_="${HOST:?}-" ;fi \
+ && if test ! -e "${ZLIB_SRCTGZ:?}"; then (true \
+ && echo "Download \"${ZLIB_URL:?}\"" \
+ && curl -sSLo "${ZLIB_SRCTGZ:?}" "${ZLIB_URL:?}" \
+ );fi \
+ && if test ! -e "${ZLIB_BINTGZ:?}"; then (true \
+ && printf '\n Build zlib\n\n' \
+ && tar xzf "${ZLIB_SRCTGZ:?}" \
+ && cd "zlib-${ZLIB_VERSION:?}" \
+ && mkdir build \
+ && if echo $HOST|grep -q '\-mingw'; then true \
+ && export DESTDIR=./build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \
+ && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \
+ && make -j$(nproc) -fwin32/Makefile.gcc PREFIX=${HOST_:?} \
+ && make -fwin32/Makefile.gcc install PREFIX=${HOST_:?} \
+ && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \
+ ;else true \
+ && ./configure --prefix=$PWD/build \
+ && make -j$(nproc) && make install \
+ ;fi \
+ && cp README build/. \
+ && (cd build \
+ && rm -rf lib/pkgconfig \
+ && tar --owner=0 --group=0 -czf "${ZLIB_BINTGZ:?}" * \
+ && md5sum -b "${ZLIB_BINTGZ:?}" > "${ZLIB_BINTGZ:?}.md5" \
+ ) \
+ && cd .. && rm -rf "zlib-${ZLIB_VERSION:?}" \
+ );fi \
+ && printf '\n DONE\n\n'
+
+
+## Install
+true \
+ && $SUDO tar -C "${INSTALL_ROOT:?}" -xzf "${ZLIB_BINTGZ:?}" \
+ && true
+
+
+
diff --git a/doc/note/qemu/php-dev-server.txt b/doc/note/qemu/php-dev-server.txt
index 98c5700..c1a8b01 100644
--- a/doc/note/qemu/php-dev-server.txt
+++ b/doc/note/qemu/php-dev-server.txt
@@ -33,6 +33,7 @@ true `# Setup mounts & persistence (host)` \
&& true
true `# Run dev server (guest)` \
+ && cd "${GUESTWD:?}" \
&& DEBUG=1 php -dassert.bail=1 -dzend.assertions=1 -dassert.exception=1 -S 0.0.0.0:8080 src/index.php \
&& true
@@ -40,3 +41,8 @@ true `# Copy persistence from vm back to host (host)` \
&& $SSH -- sh -c "true && cd \"${GUESTWD:?}\" && tar c \"${CPY_OUT:?}\"" | tar x \
&& true
+
+## Links
+
+- [Install old php](https://tecadmin.net/how-to-install-php-on-debian-12/)
+
diff --git a/doc/note/qemu/qemu-compile-itself.txt b/doc/note/qemu/qemu-compile-itself.txt
new file mode 100644
index 0000000..aed0522
--- /dev/null
+++ b/doc/note/qemu/qemu-compile-itself.txt
@@ -0,0 +1,27 @@
+
+#
+# Compile qemu bullshit-free for windoof.
+#
+# [src](https://wiki.qemu.org/Hosts/W32)
+#
+#
+# WARN: This does NOT work
+#
+
+true \
+ && QEMU_URL="https://download.qemu.org/qemu-8.2.1.tar.xz" \
+ && SUDO=sudo \
+ && WORKDIR=/home/${USER:?}/work \
+ && CACHEDIR=/var/tmp \
+ && QEMU_TXZ=$(basename "${QEMU_URL:?}") \
+ && $SUDO apt install -y --no-install-recommends curl \
+ && curl -D- "${QEMU_URL:?}" -o "${CACHEDIR:?}/${QEMU_TXZ:?}" \
+ && mkdir -p "${WORKDIR:?}" \
+ && cd "${WORKDIR:?}" \
+ && tar xf "${CACHEDIR:?}/${QEMU_TXZ:?}" \
+ && cd qemu* \
+ && ./configure \
+ && make \
+ && true
+
+
diff --git a/doc/note/qemu/qemu.txt b/doc/note/qemu/qemu.txt
index de7a71b..edaf9bc 100644
--- a/doc/note/qemu/qemu.txt
+++ b/doc/note/qemu/qemu.txt
@@ -7,12 +7,24 @@ Qemu
apt install qemu-system-aarch64 qemu-efi-aarch64
-## Create Image
+## Manage Images
- qemu-img create -f qcow2 my_disk.qcow2 16G
+### Create new image
+ qemu-img create -f qcow2 disk.qcow2 16G
+### Create new overlay image
+ qemu-img create -o backing_file=base.qcow2,backing_fmt=qcow2 -f qcow2 disk.qcow2
-## Shrink img
+### Convert qcow2 to raw
+ qemu-img convert -f qcow2 -O raw foo.qcow2 foo.img
+
+### Convert raw to qcow2
+ qemu-img convert -f raw -O qcow2 foo.img foo.qcow2
+
+### Create Standalone image based on snapshot image
+ qemu-img convert -O qcow2 derived.qcow2 standalone.qcow2
+
+## Shrink/compact img
Normal systems:
qemu-img convert -O qcow2 input.qcow2 output.qcow2
@@ -22,6 +34,74 @@ Windoof:
qemu-img convert -O qcow2 input.qcow output.qcow2
+## Shrink snapshot layer
+
+ qemu-img convert -O qcow2 snapLayer.qcow2 tmpFullClone.qcow2
+ qemu-img create -f qcow2 -b tmpFullClone.qcow2 diff.qcow2
+ qemu-img rebase -b base.qcow2 tmpDiff.qcow2
+ mv tmpDiff.qcow2 snapLayer.qcow2
+
+
+## Example Params (Usage: CopyPaste, then delege what is not needed)
+qemu-system-x86_64 \
+ -nodefaults `# <- TODO Fix network when using this` \
+ -accel kvm:whpx:hax:tcg -m size=2G -smp cores=$(nproc) \
+ -monitor stdio -serial stdio `# coose ONE` \
+ `# Drives & Boot.` \
+ -boot order=dc \
+ -cdrom "path/to/cd.iso" \
+ -hda "$(dirname "$(realpath "$0")")/hda.qcow2" \
+ `# Isolated Network plus host port/cmd reachable from guest` \
+ -netdev 'user,id=n1,ipv6=off,restrict=y,guestfwd=tcp:10.0.2.9:80-cmd:ncat 127.0.0.1 80' \
+ -device e1000,netdev=n1 \
+ `# Isolated Network with samba access to host` \
+ -netdev 'user,id=n2,ipv6=off,restrict=y,guestfwd=tcp:10.0.2.9:139-cmd:ncat 127.0.0.1 139,guestfwd=tcp:10.0.2.9:445-cmd:ncat 127.0.0.1 445' \
+ -device e1000,netdev=n2 \
+ `# 10.0.2.x network with host redirect` \
+ -netdev user,id=n0,ipv6=off,hostfwd=tcp:127.0.0.1:${SSH_PORT:-2222}-:22 \
+ -device e1000,netdev=n0 \
+ `# socket mcast shared network adapter` \
+ -netdev socket,id=n1,ipv6=off,mcast=230.0.0.1:1234 \
+ -device e1000,netdev=n1 \
+ `# USB pass-through` \
+ -usb -device usb-host,id=myUsbQemuId,vendorid=0xFFFF,productid=0xFFFF \
+ `# Choose ONE of those for graphic output` \
+ -nographic \
+ -device VGA \
+ -display sdl,grab-mod=rctrl \
+ -display gtk,show-menubar=on \
+ -display vnc=127.0.0.1:0,to=99 `#HINT: 0 is port 5900` \
+ ;
+
+## Broken systems likely need some of those too
+ `# Fix broken hosts` \
+ -L "${QEMU_HOME:?}/Bios" -bios "${QEMU_HOME:?}/Bios/bios-256k.bin" \
+ -accel whpx,kernel-irqchip=off `# "https://github.com/Tech-FZ/EmuGUI/issues/72#issuecomment-1940933918"` \
+ `# Fix broken guests` \
+ -device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet \
+
+
+## Inspect qcow2 by host mounting it
+
+ $SUDO modprobe nbd
+ $SUDO qemu-nbd -c /dev/nbd__ /path/to/my.qcow2
+ echo 'p' | $SUDO fdisk /dev/nbd__
+ $SUDO mount -o ro /dev/nbd__p__ /mnt/q
+ $SUDO umount /mnt/q `# cleanup`
+ $SUDO qemu-nbd -d /dev/nbd__ `# cleanup`
+ $SUDO rmmod nbd `# cleanup`
+
+
+### Example manual adapter setup (inside VM) for socket mcast network:
+true \
+ && ADDR=192.168.42.101/24 \
+ && DEV=ens4 \
+ && SUDO=sudo \
+ && $SUDO ip a add dev "${DEV:?}" "${ADDR:?}" \
+ && $SUDO ip link set "${DEV:?}" up \
+ && true
+
+
## amd64
# Choose whichever fits the need
@@ -31,7 +111,7 @@ Windoof:
&& BIOSFILE="${BIOSDIR:?}/bios-256k.bin" \
&& FIXMOUSEALIGN="-device usb-ehci,id=usb,bus=pci.0,addr=0x4 -device usb-tablet" \
&& NETWORK="-net nic -net user" \
- && NETWORK="-device e1000,netdev=net0 -netdev user,id=net0,hostfwd=tcp:127.0.0.1:10022-:22" \
+ && NETWORK="-device e1000,netdev=n0 -netdev user,id=n0,hostfwd=tcp:127.0.0.1:2222-:22" \
&& HOSTSPECIFICOPTS="--enable-kvm" \
&& HOSTSPECIFICOPTS="-L ${BIOSDIR:?} -bios ${BIOSFILE:?}" \
@@ -47,19 +127,46 @@ Regular boot
## aarch64 (not working yet)
- MAC='00:de:ad:de:ad:00'
- DISK=my_disk.qcow2
- cp /usr/share/AAVMF/AAVMF_CODE.fd ./flash1.img
- qemu-system-aarch64 -m 1G -cpu cortex-a57 -M virt \
- -pflash /usr/share/AAVMF/AAVMF_CODE.fd \
- -pflash flash1.img \
- -drive if=none,file=${DISK:?},id=hd0 \
- -device virtio-blk-device,drive=hd0 \
- -device virtio-net-device,netdev=net0,mac=${MAC:?}
-
- qemu-system-aarch64 -M virt -hda my_disk.qcow2 -cdrom debian.iso -boot c -m 1G
-
- qemu-system-aarch64 -M virt -cpu cortex-a57 -m 1G -bios /usr/share/qemu-efi-aarch64/QEMU_EFI.fd -hda my_disk.qcow2 -cdrom ~/images/debian-12.0.0-arm64-DVD/debian-12.0.0-arm64-DVD-1.iso
+ #apt install -y --no-install-recommends qemu-uefi-aarch64
+ curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/kernel-qemu-5.10.63-bullseye
+ curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/versatile-pb-bullseye-5.10.63.dtb
+ curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/native-emulation/dtbs/bcm2711-rpi-4-b.dtb
+ curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/kernel-qemu-5.4.51-buster
+ curl -sSLO https://github.com/dhruvvyas90/qemu-rpi-kernel/blob/master/versatile-pb-buster-5.4.51.dtb
+ curl -sSLO https://downloads.raspberrypi.org/raspios_lite_arm64/images/raspios_lite_arm64-2023-05-03/2023-05-03-raspios-bullseye-arm64-lite.img.xz
+ xz -d 2023-05-03-raspios-bullseye-arm64-lite.img.xz
+ echo p | /sbin/fdisk 2023-05-03-raspios-bullseye-arm64-lite.img | egrep 'Linux$' | sed -E 's:^\S+\s+([0-9]+) .*$:\nmount -o offset=$(expr \1 \\* 512) ./2023-05-03-raspios-bullseye-arm64-lite.img /mnt/foo:'
+ qemu-img convert -f raw -O qcow2 2023-05-03-raspios-bullseye-arm64-lite.img raspbian-bullseye-lite.qcow2
+ qemu-img resize raspbian-bullseye-lite.qcow2 16G
+ mv raspbian-bullseye-lite.qcow2 hda.qcow2
+
+ qemu-system-aarch64 \
+ -m 256 -cpu arm1176 \
+ -M versatilepb \
+ -no-reboot \
+ -serial stdio \
+ -net nic -net user \
+ -drive file=2023-05-03-raspios-bullseye-arm64-lite.img,format=raw \
+ -boot 'dtb=versatile-pb-bullseye-5.10.63.dtb,kernel=kernel-qemu-5.10.63-bullseye,kernel_args=root=/dev/vda2 panic=1' \
+
+ qemu-system-aarch64 \
+ -dtb ./bcm2711-rpi-4-b.dtb \
+ -m 256 -cpu arm1176 -M versatilepb \
+ -kernel kernel-qemu-5.10.63-bullseye -append "root=/dev/sda2 rootfstype=ext4 rw" \
+ -serial stdio \
+ -drive file=2023-05-03-raspios-bullseye-arm64-lite.img,format=raw \
+ -net nic -net user \
+ -no-reboot \
+
+ qemu-system-arm \
+ -M versatilepb \
+ -cpu arm1176 -m 256 \
+ -drive "file=2023-05-03-raspios-bullseye-arm64-lite.img,if=none,index=0,media=disk,format=raw,id=disk0" \
+ -device "virtio-blk-pci,drive=disk0,disable-modern=on,disable-legacy=off" \
+ -net "user,hostfwd=tcp::5022-:2222" \
+ -dtb "./versatile-pb-buster-5.4.51.dtb" \
+ -kernel "./kernel-qemu-5.4.51-buster" -append 'root=/dev/vda2 panic=1' \
+ -no-reboot
## Shared host directory via CIFS/SMB
@@ -67,7 +174,7 @@ Regular boot
true `# SMB server debian` \
&& hostpath=/path/to/host/dir \
&& sharename=work \
- && apt install --no-install-recommends -y samba
+ && apt install --no-install-recommends -y samba \
&& printf '[%s]\npath = %s\npublic = no\nwriteable = yes\nguest ok = yes\nforce user = andreas\n' "${sharename:?}" "${hostpath:?}" | $SUDO tee -a /etc/samba/smb.conf >/dev/null \
&& $SUDO /etc/init.d/smbd restart \
&& true
@@ -75,7 +182,7 @@ Regular boot
TODO: SMB server windoof
DoesNotWork: "https://serverfault.com/questions/442664/virtualization-linux-kvm-qemu-host-windows-vm-guest-how-to-access-data-drive#comment479177_442678"
- true `# SMB client debian` \
+true `# SMB client debian` \
&& hostUsername=yourHostUser \
&& smbServer=10.0.2.2 \
&& sharename=work \
@@ -88,6 +195,11 @@ DoesNotWork: "https://serverfault.com/questions/442664/virtualization-linux-kvm-
&& $SUDO mount -t cifs -o username=${hostUsername:?},uid=${guestUid:?},gid=${guestGid:?} "//${smbServer:?}/${sharename:?}" "${mountpoint:?}" \
&& true
+### Add those in "/etc/fstab" to setup mount automatically at boot:
+### HINT: mkdir /home/user/build
+ //10.0.2.2/sharename /mnt/sharename cifs password=,uid=1000,gid=1000,user,vers=3.0 0 0
+ /home/user/build /mnt/sharename/build none bind 0 0
+
List smb shares (eg debugging)
smbclient -NL //10.0.2.2
@@ -130,10 +242,13 @@ NOTE: Couldn't yet test any of those commands.
## Alpine PostInstall
true \
+ && `# HINT: environ setup does not work autmoatically during login. has to be sourced manually.` \
+ && P="http://10.0.2.2:3128/" \
+ && printf 'export no_proxy=127.0.0.1,10.0.2.*\nexport http_proxy=%s\nexport https_proxy=%s\n' "${P:?}" "${P:?}" >> '/etc/environment' \
&& apk add openssh-server \
&& rc-update add sshd \
&& sed -i -E 's;^# *(PermitRootLogin).+$;\1 yes;' /etc/ssh/sshd_config \
- && sed -i -E 's;^# *(http://dl-cdn.alpinelinux.org/alpine/v[^/]+/community)$;\1;' /etc/apk/repositories \
+ && sed -i -E 's;^# *(http://dl-cdn.alpinelinux.org/alpine/v.*?/community)$;\1;' /etc/apk/repositories \
&& mkdir /home/user && chown 1000:1000 /home/user && chmod 755 /home/user \
&& printf 'user:x:1000:1000:user:/home/user:/bin/ash\n' >> /etc/passwd \
&& printf 'user:x:1000:user\n' >> /etc/group \
@@ -146,16 +261,16 @@ NOTE: Couldn't yet test any of those commands.
TODO: move this to a better place. Eg: debian/setup.txt or whatever.
true \
- && http_proxy= \
- && https_proxy= \
&& no_proxy=127.0.0.1,10.0.2.* \
+ && http_proxy=http://10.0.2.2:3128 \
+ && https_proxy=http://10.0.2.2:3128 \
&& SUDO= \
&& true \
&& if [ -n "$http_proxy" ]; then true \
&& (echo "Acquire::http::proxy \"${http_proxy}\";"
echo "Acquire::https::proxy \"${https_proxy}\";"
) | $SUDO tee /etc/apt/apt.conf.d/80proxy >/dev/null \
- fi \
+ ;fi \
&& $SUDO apt update \
&& $SUDO apt install -y --no-install-recommends vim openssh-server net-tools curl \
&& $SUDO sed -i -E 's;^GRUB_TIMEOUT=5$;GRUB_TIMEOUT=1;' /etc/default/grub \
@@ -179,4 +294,7 @@ TODO: move this to a better place. Eg: debian/setup.txt or whatever.
- [USB pass-through](https://unix.stackexchange.com/a/452946/292722)
- [qemu monitor via telnet](https://unix.stackexchange.com/a/426951/292722)
- [qemu monitor via stdio](https://unix.stackexchange.com/a/57835/292722)
+- [qemu raspberry pi TODO](https://blog.agchapman.com/using-qemu-to-emulate-a-raspberry-pi/)
+- [connect VM networks](https://qemu.weilnetz.de/doc/6.0/system/invocation.html#sec-005finvocation)
+- [inspect qcow2 mount host browse](https://www.jamescoyle.net/how-to/1818-access-a-qcow2-virtual-disk-image-from-the-host)
diff --git a/doc/note/qemu/setup-android-env.txt b/doc/note/qemu/setup-android-env.txt
new file mode 100644
index 0000000..0008c16
--- /dev/null
+++ b/doc/note/qemu/setup-android-env.txt
@@ -0,0 +1,85 @@
+#
+# Tools for Android development.
+#
+# HINT: Since JDK-8 is no longer available, we have to add clutter to
+# apksigner command. Eg:
+# apksigner -J-add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED \
+# -J-add-opens=java.base/sun.security.x509=ALL-UNNAMED \
+# -J-add-opens=java.base/sun.security.pkcs=ALL-UNNAMED
+#
+# Refs:
+# - [Clean Android HelloWorld Tutorial](https://www.hanshq.net/command-line-android.html)
+# - [List of available versions](https://dl.google.com/android/repository/repository-11.xml)
+#
+set -e
+
+### Made for debian 10 (alias buster)
+true \
+ && PKGS_TO_ADD="curl unzip openjdk-17-jdk-headless aapt apksigner zipalign adb android-sdk-platform-tools-common" \
+ && SUDO=sudo \
+ && PKGINIT="$SUDO apt update" \
+ && PKGADD="$SUDO apt install -y --no-install-recommends" \
+ && PKGCLEAN="$SUDO apt clean" \
+ && PLATFORM_VERSION="24" \
+ && BUILD_TOOLS_VERSION="34.0.0" \
+ && CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip" \
+ && BUILD_TOOLS_URL="https://dl.google.com/android/repository/build-tools_r25-linux.zip" \
+ && PLATFORM_URL="https://dl.google.com/android/repository/platform-${PLATFORM_VERSION:?}_r02.zip" \
+ && NDK_URL="https://dl.google.com/android/repository/android-ndk-r26b-linux.zip" \
+ && BUILD_TOOLS_ZIP="$(basename "${BUILD_TOOLS_URL:?}")" \
+ && PLATFORM_ZIP="$(basename "${PLATFORM_URL:?}")" \
+ && NDK_ZIP="$(basename "${NDK_URL:?}")" \
+ \
+ && $PKGINIT \
+ && $PKGADD $PKGS_TO_ADD \
+ && `# Cmdline tools` \
+ && (cd /var/tmp && curl -sSLO "${CMDLINETOOLS_URL:?}") \
+ && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \
+ && (cd /tmp && unzip /var/tmp/$(basename "${CMDLINETOOLS_URL:?}") >/dev/null) \
+ && $SUDO mkdir /usr/lib/android-sdk/cmdline-tools \
+ && $SUDO mkdir /usr/lib/android-sdk/cmdline-tools/latest \
+ && (cd /tmp/cmdline-tools && tar --owner=0 --group=0 -c bin lib source.properties) | (cd /usr/lib/android-sdk/cmdline-tools/latest && $SUDO tar x) \
+ && `# Build Tools` \
+ && (cd /var/tmp && curl -sSL "${BUILD_TOOLS_URL:?}" -o "${BUILD_TOOLS_ZIP:?}") \
+ && mkdir "/tmp/${BUILD_TOOLS_ZIP%.*}" \
+ && (cd "/tmp/${BUILD_TOOLS_ZIP%.*}" && unzip "/var/tmp/${BUILD_TOOLS_ZIP:?}") \
+ && (cd "/tmp/${BUILD_TOOLS_ZIP%.*}" && tar --owner=0 --group=0 -c *) \
+ | (cd /usr/lib/android-sdk/build-tools && $SUDO tar x) \
+ && $SUDO find /usr/lib/android-sdk/build-tools -type d -exec chmod 755 {} + \
+ && `# Those for some reason are broken (wrong linker) so use the debian variant.` \
+ && (cd /usr/lib/android-sdk/build-tools/android* && $SUDO rm aapt zipalign) \
+ && `# Platform` \
+ && (cd /var/tmp && curl -sSL "${PLATFORM_URL:?}" -o "${PLATFORM_ZIP:?}") \
+ && if test -x /tmp/android*; then echo >&2 '[ERROR] /tmp/android* already exists'; false; fi \
+ && (cd /tmp && unzip "/var/tmp/${PLATFORM_ZIP:?}" >/dev/null) \
+ && $SUDO mkdir /usr/lib/android-sdk/platforms \
+ && (cd /tmp && mv android-* "android-${PLATFORM_VERSION:?}") \
+ && (cd /tmp && tar --owner=0 --group=0 -c "android-${PLATFORM_VERSION:?}") \
+ | (cd /usr/lib/android-sdk/platforms && $SUDO tar x) \
+ && $SUDO find /usr/lib/android-sdk/platforms/android-* -type d -exec chmod o+rx {} + \
+ && $SUDO find /usr/lib/android-sdk/platforms/android-* -type f -exec chmod o+r {} + \
+ && `# Environ` \
+ && printf >>~/.profile '%s\n' \
+ "PATH=/usr/lib/android-sdk/build-tools/debian:\$PATH" \
+ "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \
+ "PATH=\"/usr/lib/android-sdk/platform-tools:\$PATH\"" \
+ "PATH=\"$(ls -d /usr/lib/android-sdk/build-tools/android-*):\$PATH\"" \
+ "CLASSPATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib/dx.jar)" \
+ "LD_LIBRARY_PATH=$(ls -d /usr/lib/android-sdk/build-tools/android-*/lib64):\$LD_LIBRARY_PATH" \
+ "export PATH" \
+ "export CLASSPATH" \
+ "export LD_LIBRARY_PATH" \
+ && `# NDK` \
+ && (cd /var/tmp && curl -sSL "${NDK_URL:?}" -o "${NDK_ZIP:?}") \
+ && mkdir "/tmp/${NDK_ZIP%.*}" \
+ && (cd "/tmp/${NDK_ZIP%.*}" && unzip "/var/tmp/${NDK_ZIP:?}") \
+ && `# TODO may worth throw away some of that garbage before moving it into place` \
+ && (cd "/tmp/${NDK_ZIP%.*}" && tar --owner=0 --group=0 -c android-ndk-*) \
+ | (cd "/usr/lib" && $SUDO tar x) \
+ && $SUDO ln -s /usr/lib/android-ndk-* "/usr/lib/android-ndk" \
+ && `# Cleanup` \
+ && $PKGCLEAN \
+ && rm -rf /tmp/* 2>/dev/null || true \
+ && printf '\n Done :)\n\n Logout and login to get your new environ from ~/.profile\n\n' \
+ && true
+
diff --git a/doc/note/qemu/setup-dockerVM.txt b/doc/note/qemu/setup-dockerVM.txt
new file mode 100644
index 0000000..9bbcff7
--- /dev/null
+++ b/doc/note/qemu/setup-dockerVM.txt
@@ -0,0 +1,86 @@
+
+#
+# Use qemu to host dockerimages.
+#
+# [Execute Docker Containers as QEMU MicroVMs](https://mergeboard.com/blog/2-qemu-microvm-docker/)
+#
+
+true \
+ && LINUX_URL=https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-6.7.4.tar.xz \
+ && SUDO=sudo \
+ && CACHEDIR=/var/tmp \
+ && WORKDIR=/home/$USER/work \
+ && LINUX_TXZ=$(basename "${LINUX_URL:?}") \
+ && $SUDO apt install -y --no-install-recommends curl make gcc bc flex bison libc-dev libelf-dev libssl-dev \
+ && cd "${CACHEDIR:?}" \
+ && curl -L "${LINUX_URL:?}" -O \
+ && mkdir -p "${WORKDIR:?}" \
+ && cd "${WORKDIR:?}" \
+ && tar xf "${CACHEDIR:?}/${LINUX_TXZ:?}" \
+ && cd linux* \
+ && base64 -d <<EOF | gunzip > .config &&
+H4sIAFBoz2UAA4VZS3MctxG++1eo7EtySCjSoopOlQ4YALMD7eBBALMPplKoSKZkVUmiLVKp5N/n
+A2Z2F5jB0jppv+4BGv3u5k8v/vn47f2//vLb09Pvj/+4uPhy/+3j/buHf3/79e/vH75cfPj0+f7x
+4t3nh48Xf9x/+f63L5/ef3v4z5eLX+8/vH/4+uHTx7/+8NMLqlUrVqHXlPQbbp3QKpDB6xefHl98
+fXh68Xj/9MPE4/ZuIwx9878DYLQTuyBvBz5woMfDBsd70VROIAMT/vS90qG7O/3sxKoLlrvghYQk
+J0LjWDBWU+5cIJT685Sw+flE9MStnSfelVBgvCf72UGJsKtgQkdOPSgv1OpE6zUuH9rgOtH6N1eX
+xycNkoSG9ETRkf+olJICIVoy9D5wRZqes4qyJJc0u7Hp14GurB7MCaOtw5GKbQXz3Qm2PiTG4GjH
+WcY+okYwtwBby/kdtwu8G1bc902GG5jXLw9gfCMoX8BgL7V6EILbdgE2JsNwjQ0quyi9JjnnTA02
+GrTUFKQJQglvs+fj9OjDcPS+BN8KH0i/JXsXtCoFSEZqWW5IqqUhPjR2XYuRnjRJl71wPnTEMq5y
+E8BZW9EXruRk9pTdzeuwuyJG0PzKiPKd54pBAaYnvtVWVm7v9lDrRjhtw2rgLtO6IZZshK0gwRmh
+EP5rV6HFSKz6v7LJEd5cXt3kolsOy3geIB8UpNdchUZrH4S9dfMHScrzJxxpUlBEtGa88sD0nbOl
+viCIYPPTr3u+4X0tB0kWQwuBR+3e+Py7GQkP92LDQ7M/RGvluBjWeUZjyF9jUrh8ebzS0i4erO0+
+OkDDZ/J3nK5DI7SDb1k7GC9yN1zzHacBXlPkWIuw11LcQTziaqoy3d4J+Do8W6zUm5f/vXyZ/h0z
+GF8Rug+bKSIClwMcq7jDDc7A52qOJhpuFSkFhR2i69+GmHQr8CHlrfQmhT98GMmQl5yC9Txx9ISx
+PB8J5XmfyBlGfr6aBB8lOYoevXfAy+8SpfKCdTJE5vNvB2lwacOz5CA1G/qcaQSie1OOGkDYgjSo
+BTyV1eUxztJjye0X1wYnVjUM7kWuL69q+c7zlRW+kgp9Z7X3Zd5JBUUygDwGvMuVHWnbpswWXvgk
+KdtEqxUBJx2D8x55qmHNW/EnHPIW1XZM8oyTJNif8a338MOKdRuhWumRSlzWrkwR2Glv+mFVwy3+
+t8n8a+1k1g4g5BxewJVPVdGQFX+eGiSUJVzu4T2H8ghkP2Gt1cq7LcmqwJ3ROvMHBEsMjVkvo3hh
+H7rOfw9K7LIs01qZytkMckODJq4XdD8jSLGyYzIo2CEAypqgmRSiEEOYgGgEB3ElevCakOqDXX5h
+4Nw+tkKuoI3lZOQgeY9zpCGCGp3rGBSjzPx3YB1dgrE4LVFLbAnKdFUFelZ2I6STYXNZA6/y8Ik6
+DEyQVcWTPUUPpdEw1uLuSGwErYbKkWGLbmCrNXueq8P/zsgg2XWRj4TZvJ6MiZrG2xoFD2v1jKBR
+3WRyosBI8ZhEdqJWZEfS0CBASx0DXtrmCD5nHTBlpiju4avXod/6Qam8FpxonSS0CMNZ1QCy1XYd
+K3BqoBA20pTdE/eo50UcNFYwZIwKRbXROAoJhq7rKPKMPUdynD5DRY+kipySE6PsUOMz1PSyyrPC
+DrmXeHQ9KOLWoyt9lqn+vDhiFYE4YTDEqyKkgAtTiT/gc12OqCcWQw2a1bec+jNEt1foEHb7OVmR
+c19I4jAJW8L4GQa8XBJzVhgmbEUctEirvOUZ/aRQ53zMixBPmi0x2schObuB0Wyu2zgdS0imwthD
+YfxdEqTpl/NfvMFYoasE2hPnxGwWe5vvAtJwhvgm8tSMHFPDFqqBdV2t/8bpv5Q+iN+T7Fk9TJ4e
+Ot6b3B8KOKQK8+PF47tPXy9+e3j6/fP3jz8edcU3Xpo2e/cBQWuG6Sh7NrwZhtSzdiq2Yb3WZqFf
+EPMZ14nyVzwPcVaEacIRC6tykIwomnn0aPDeLEWJRPG0VNM4sOfLkSFr5jc8r7aTqGOtP7V0vsMU
+wH21qGx7Uuu7hTKDD20bu65o0rIORhrUWR1oJmrR0m1gu23SkEaWR9fHZluXacoxfl9zHriZwIR0
+c3X9sph7TjDMhupG0QuxVLpw0/PnHITJzJIRMTYP6IExN9eoNk7ZGLjrLNGBmmEZn4sLoZFxOqxA
+i8Aw3oTL65sbhKjOE7DRW0SE5a40ebeV1XEqeoKEkFvMHzETox8WJvetiR4nu5YIG4cYy+v0tPhB
+L52Php1glmwLSQQLK66gm3rrE+nklee0O0826A3OUlHT16I+qUQycq61+2fIAr+eoe/hV2VGK+j8
+bs3Pfy38ebnXXDlEgT8zZUWWtGJxuq1HbuLATMLtGQkG16AbM0bbWrPGMPXMU0vMR4cVxgSRYceE
+M8XmLu2cmBWbYv17yJOYkXW+b5hwKXN/zsDjdLsUx5MydY7LBT9glEDF3YVsi2xJ9iHf+VehXQLT
+KpzQfklCBzaUU3miMN7kQ+jbhl3NMXw8raKKPUtLlPai3S+RuKqLq3AUM+TJ2dLhdtCezH6m7lWo
+dVKAbceQO9oZ5RzFcWTcEqugsoq9MTHbuFDP1RJ38mFNdR7iCUNY9AxDchb5ZVUdS2pFndMiOudF
+v0VcV0PCjnhvK3h/96qK6toZdxXwzvl8net88UZ0Hrk4Ch3XtPV68+P3pw83x3bCxL2P87H5QJxD
+z6vcqUajx/0G3+evW3jSAWkrXIc55DwlTfWZFyGeoxM5PVjKK5/FP+uoYXeeMs7TxBL5DA+iPlaI
+ZzjSQtTyW3iMDxvSD/xUDJNmNKb5rOEaIU4roOO3YrNAqaX+8iUTbTC0x5C4YCDpr1ALuPCTgyS2
+gXPCOeoU6otOdiJ121oXIVYq7vd5ypRxsSFaQWdLVreH10g0jRiX0t8YJvfJb6dhfF2W3+xtXPTk
+O6UY2+MUl7W0e0VknMzLPORiMUdTKXG7LDpgsEXzpcUgMs5s6JDIsjRKbG/nH7WLY9axleyLNmxa
+BYJa05aHBuLUmA9HOAeSMIKEVVulWzqEaZPW98f59vqXbK0OjnRotfC150lxkT89zQwVcQe1RXeK
+xqZFdHAYI2VciPZ/TosyDcUdAAA=
+EOF
+true \
+ && make olddefconfig \
+ && make -j$(nproc) \
+ && base64 -d <<EOF | gunzip > "${CACHEDIR:?}/gagainit.c" &&
+H4sIACTh02UAA41UTYvbMBC9+1cIlwY7JKtsYSlsNoVSAj1s0x62pzQsijxORCzJq4+wacl/35Ed
+u85HS3ywnp5n5j2NJNP+3Bq+SNbOlfaeUglmBUvNTHbDtaTLQq/oh+ELSD+Ughu9lcNM8w0YmvZp
+9E4oXvgMyAMYo/TN+lOHsi4T51QhlsecVwLpk7idpVJ75c5p69h/2IivmSF9rpV1JIOc+cI9g9oK
+o5UE5eYLMiF/IoJP/OPz09cJ9dbQQnNW0KVQ952pbectqKnqFWA8qArNfj4+DqL9OIq2WmSk8v3M
+18A3SfW9NlMbs9obDoMjzjFsuRucxeaiAFybA+l2JeZ4ZcVKQUYKrVa1TF6wle1mVg76GXMsSg/r
+tM547kjoEBEq1+gz0CInSeCSgzzphY8pmUzI8Jb0eqTa0TCdzr5PZ09NufCURqB2En8xwJxAM+/t
+L+wGqUul4zYwiMhNJkyrMvp4d5eSBzLq1qtqop42nZqYBdxpsyM5w05kcadueOBVuOS2Q+6j+h11
+LX4LbfqXxcpeCEiafWlsnva+020Sunu2hMZ+pXfB8ZHbfYQu0R+RTKgkAJTlg8O+I97OF03x7nGK
+lVaAi4hpaTQP4DCOEDZqlxMy2NLS2YAR1ui6LPniwVfTFl2XaNcyYCfL/Do1bHgYcbg+geaW8pXR
+vgxMi5rc+gaAW2vrFJOQxIe/GMZY8Rt0/pdJm3h4BV5gaLjpuIpKqoXVbb/0azk4bQ7CFIucnAMD
+zhtFbse4/W9r76rneAUAAA==
+EOF
+true \
+ && gcc -Wall -static -o "${CACHEDIR:?}/gagainit" "${CACHEDIR:?}/gagainit.c" \
+ && echo "[ERROR] TODO Need more steps here" && false \
+ && true
+
+
diff --git a/doc/note/qemu/setup-jni-env.txt b/doc/note/qemu/setup-jni-env.txt
new file mode 100644
index 0000000..62d204a
--- /dev/null
+++ b/doc/note/qemu/setup-jni-env.txt
@@ -0,0 +1,22 @@
+
+#
+# Debian with tools for java-native-interface development.
+#
+# HINT: Since migration from docker, I switched it to jdk-17 (because 11
+# seems no longer available). This change is not yet tested.
+#
+set -e
+
+
+### For debian 12
+true \
+ && SUDO= \
+ && export JAVA_HOME="/usr/lib/jvm/java-17-openjdk-amd64" \
+ && $SUDO apt update \
+ && $SUDO apt install -y --no-install-recommends \
+ g++ make openjdk-17-jdk-headless \
+ && (printf '%s\n' "JAVA_HOME=${JAVA_HOME:?}" \
+ ) | $SUDO tee "/etc/environment" >/dev/null \
+ && $SUDO apt clean \
+ && true
+
diff --git a/doc/note/qemu/setup-jre8-env.txt b/doc/note/qemu/setup-jre8-env.txt
new file mode 100644
index 0000000..f39cf98
--- /dev/null
+++ b/doc/note/qemu/setup-jre8-env.txt
@@ -0,0 +1,18 @@
+
+#
+# openjdk java 1.8 runtime environment.
+#
+
+### For alpine 3.16.0
+true \
+ && apk add openjdk8-jre \
+ && true
+
+### For debian 9
+true \
+ && SUDO= \
+ && $SUDO apt update \
+ && `# Use one of openjdk-8-jre-headless or openjdk-8-jre` \
+ && $SUDO apt install -y --no-install-recommends openjdk-8-jre \
+ && true
+
diff --git a/doc/note/qemu/setup-maven-env.txt b/doc/note/qemu/setup-maven-env.txt
new file mode 100644
index 0000000..e4ce16d
--- /dev/null
+++ b/doc/note/qemu/setup-maven-env.txt
@@ -0,0 +1,16 @@
+#
+# Maven build env.
+#
+# Use this to share your hosts repository with the container:
+#
+# mount -t cifs //10.0.2.2/path/to/.m2/repository /var/tmp/.m2/repository
+#
+set -e
+
+
+### Made for alpine 3.16.0
+true \
+ && apk add maven \
+ && sed -i "s,</settings>, <localRepository>/var/tmp/.m2/repository</localRepository>\n</settings>,g" /usr/share/java/maven-3/conf/settings.xml \
+ && true
+
diff --git a/doc/note/qemu/setup-nginx-env.txt b/doc/note/qemu/setup-nginx-env.txt
new file mode 100644
index 0000000..9a5aeb2
--- /dev/null
+++ b/doc/note/qemu/setup-nginx-env.txt
@@ -0,0 +1,48 @@
+#
+# Bare nginx server serving HTTP/80 and HTTPS/443.
+#
+set -e
+
+
+### Made for alpine 3.16.0
+true \
+ && CN="example.com" \
+ \
+ && apk add nginx openssl \
+ && mkdir /etc/ssl/private \
+ && openssl genrsa -out /etc/ssl/private/nginx.key 2048 \
+ && openssl req -new -key /etc/ssl/private/nginx.key \
+ -out /etc/ssl/private/nginx.csr \
+ -subj "/C=/ST=/L=/O=/OU=/CN=${CN:?}" \
+ && openssl x509 -req -days 365 -in /etc/ssl/private/nginx.csr \
+ -signkey /etc/ssl/private/nginx.key -out /etc/ssl/certs/nginx.crt \
+ && chgrp nginx /etc/ssl/private/nginx.key \
+ && chmod 0640 /etc/ssl/private/nginx.key \
+ && printf '%s\n' \
+ 'server {' \
+ ' listen 80 default_server;' \
+ ' listen [::]:80 default_server;' \
+ ' listen 443 ssl default_server;' \
+ ' listen [::]:443 default_server;' \
+ ' ssl_certificate /etc/ssl/certs/nginx.crt;' \
+ ' ssl_certificate_key /etc/ssl/private/nginx.key;' \
+ ' location / {' \
+ ' root /srv/www;' \
+ ' index index.html index.htm;' \
+ ' }' \
+ '}' \
+ > /etc/nginx/http.d/default.conf \
+ && mkdir /srv/www \
+ && printf '<h1>Nginx says hi</h1>\n' > /srv/www/index.html \
+ && chown nginx:nginx /srv/www \
+ && chown nginx:nginx /srv/www/index.html \
+ && apk del openssl \
+ && `# Configure nginx as a service` \
+ && rc-update add nginx \
+ && /etc/init.d/nginx start \
+ && sleep 1 \
+ && echo \
+ && printf 'GET /index.html HTTP/1.0\r\n\r\n'|nc localhost 80 \
+ && echo \
+ && true
+
diff --git a/doc/note/qemu/setup-windoof.txt b/doc/note/qemu/setup-windoof.txt
new file mode 100644
index 0000000..5df2cac
--- /dev/null
+++ b/doc/note/qemu/setup-windoof.txt
@@ -0,0 +1,40 @@
+
+Setup Windoof in a experiment VM
+================================
+
+Install system STRICTLY WITHOUT internet connection.
+
+Stop annoying windoof auto updates. Make sure to use windoof newlines (CRLF) in
+the reg file:
+
+
+Windows Registry Editor Version 5.00
+[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU]
+"NoAutoUpdate"=dword:00000001
+
+curl -sSD- http://10.0.2.9:80/stop-annoying-updates.reg -O
+
+After running "reg" file, we can reboot windoof with internet connection.
+
+Download/Install:
+- "https://learn.microsoft.com/en-us/sysinternals/downloads/sdelete"
+- "https://notepad-plus-plus.org/"
+
+Install needed software (Maybe: firefox, MsOffice, MsTeams, ..?).
+
+Manually trigger updates, reboot, updates, reboot, (likely some more turns ...)
+
+Configure Performance options. Disable all but screen fonts.
+
+Make sure no more updates are running. Then, I guess best is to reboot without
+internet access once more to cleanup the disk. Delete unused files like
+trashcan or downloaded installers:
+
+ SDelete.exe -nobanner -z C:
+
+Shutdown win guest, then compact disc.
+
+ qemu-img convert -O qcow2 input.qcow output.qcow2
+
+Image is ready.
+
diff --git a/doc/note/qemu/setup-zwp-env.txt b/doc/note/qemu/setup-zwp-env.txt
new file mode 100644
index 0000000..a4ce521
--- /dev/null
+++ b/doc/note/qemu/setup-zwp-env.txt
@@ -0,0 +1,24 @@
+
+(true \
+ && DIO_URL=https://wikit.post.ch/download/attachments/613505757/d-054897-060542.zip?api=v2 \
+ && SUDO=sudo \
+ && WORKDIR=/home/$USER/zarniwoop-workspace \
+ && CACHEDIR=/var/tmp \
+ && DIO_VERSION=$(echo $DIO_URL|sed -E 's_^.*/d-([0-9-]+).zip.*$_\1_') \
+ && $SUDO apt install -y --no-install-recommends openssh-server vim make curl git unzip \
+ && cd "${CACHEDIR:?}" \
+ && echo H4sIAAAAAAAAA1XLOw4CMQwA0Z5TUCMtsuNffBw7jiU6ak7PChqoZppX4NEUaLOw2Vfw6JRg8UXW6tdbHSA83c644by/Hs8Lp23PziU+AjcHdcKmjbjFJ8av0nPGRwGanmRkdSlsKTItcBQgjCL8U+b6VW9H4D67ogAAAA== | base64 -d | gunzip > MD5SUM \
+ && curl -LO 'https://wikit.post.ch/download/attachments/613505757/d-054897-060542.zip?api=v2' \
+ && grep "${DIO_VERSION:?}" MD5SUM | md5sum -c - \
+ && mkdir -p "${WORKDIR:?}" \
+ && cd "${WORKDIR:?}" \
+ && unzip "${CACHEDIR:?}/d-${DIO_VERSION:?}.zip" \
+ && mv DIO021E "d-${DIO_VERSION:?}" \
+ && cd "d-${DIO_VERSION:?}/devel" \
+ && rm -rf app \
+ && git clone https://gitit.post.ch/scm/isa/zarniwoop.git app \
+ && cd app \
+ && printf '\n Zarniwoop setup complete (TODO install compiler etc)\n\n' \
+ && true)
+
+
diff --git a/doc/note/redis/redis.txt b/doc/note/redis/redis.txt
new file mode 100644
index 0000000..d64948a
--- /dev/null
+++ b/doc/note/redis/redis.txt
@@ -0,0 +1,32 @@
+
+Redis
+================
+
+
+## Run redis commands from cli
+
+redis-cli -a ***** SCAN 0
+
+
+## Example commands
+
+SCAN 0 COUNT 42
+TYPE key
+GET redisques:queues:vehicleoperation-events-for-vehicle-9942
+LRANGE key start stop
+LRANGE key 1 1 (get head)
+LRANGE key -1 -1 (get tail)
+
+
+## List all keys (WARN: do NOT in production!)
+
+SCAN 0 MATCH *part:of:key:* COUNT 42
+KEYS *:part:inside-the:key:*
+KEYS redisques:queues:vehicleoperation-events-for-vehicle-*
+
+
+## Delete by pattern
+
+redis-cli --scan --pattern schedulers:* | xargs redis-cli del
+
+
diff --git a/doc/note/setup-debian/etc-environment b/doc/note/setup-debian/etc-environment
new file mode 100644
index 0000000..1a67ae1
--- /dev/null
+++ b/doc/note/setup-debian/etc-environment
@@ -0,0 +1,18 @@
+
+# Originally copy-pasted from latitude-E6530
+
+# Need that duplicate here because openbox is too stupid to load its own env
+# file.
+
+# Configure UI colors
+# .. for GTK-2
+#GTK2_RC_FILES=/usr/share/themes/Adwaita-dark/gtk-2.0/gtkrc
+# .. for GTK-3
+#GTK_THEME=Adwaita:dark
+# .. Qt 5 (needs 'qt5-style-plugins' to be installed)
+#QT_QPA_PLATFORMTHEME=gtk2
+
+# Disable QT DPI scaling. Can be really helpful for eg vlc in a multi-monitor
+# setup.
+#QT_AUTO_SCREEN_SCALE_FACTOR=0
+
diff --git a/doc/note/setup-debian/setup-debian.txt b/doc/note/setup-debian/setup-debian.txt
new file mode 100644
index 0000000..b36945e
--- /dev/null
+++ b/doc/note/setup-debian/setup-debian.txt
@@ -0,0 +1,130 @@
+
+How to Setup debian for MY needs
+================================
+
+## Partitioning
+
+Use the smaller & faster SSD for the system. Make sure it consists of TWO
+partitions so system can be easily upgraded later by just replacing it by a new
+install in the other partition.
+
+I prefer to put users home to a larger (maybe a HDD) for data storage. If
+there's only ONE drive, we need an additional partition for the home too.
+
+
+## Base system
+
+Install base system through debians ISO installers. Usually net installer is
+fine.
+
+Do NOT install ANY additional bloat! Eg untick ALL extra software like desktops
+etc. Even disable the standard-system-utilities option.
+
+
+## Install core tools
+
+ALWAYS use '--no-install-recommends' when installing something to prevent
+useless bloat to be installed.
+
+ && apt install -y --no-install-recommends vim net-tools openssh-server openssh-client bash bash-completion
+
+
+## Setup firewall
+
+WARN: Does NOT setup the effective rules. Rules need to be filled in by admin.
+
+WARN: This snippet may cut-off network connections. Including your remote shell!
+
+ && $SUDO apt install -y --no-install-recommends iptables iptables-persistent \
+ && printf '# TODO add contents here\n' | $SUDO tee /etc/iptables/src-default >/dev/null \
+ && printf '\n[WARN ] Needs more setup: /etc/iptables/src-default\n\n' \
+ && printf '%s\n' \
+ '## Apply from file' '' \
+ 'ncat /etc/iptables/src-default | $SUDO iptables-restore' '' \
+ '## store current session as default' '' \
+ '$SUDO iptables-save | $SUDO tee /etc/iptables/rules.v4 > /dev/null' \
+ | $SUDO tee /etc/iptables/README >/dev/null \
+ && printf '# TODO setup file contents\n' | $SUDO tee /etc/iptables/src-default4 >/dev/null \
+ && printf '%s\n' \
+ '*filter' '' \
+ '# Loopback' \
+ '-A INPUT -i lo -j ACCEPT' \
+ '-A OUTPUT -o lo -j ACCEPT' '' \
+ '# Log blocked connection attemps' \
+ '-A INPUT -j LOG --log-prefix "Fw6BadInn: " --log-level 6' \
+ '-A FORWARD -j LOG --log-prefix "Fw6BadFwd: " --log-level 6' \
+ '-A OUTPUT -j LOG --log-prefix "Fw6BadOut: " --log-level 6' '' \
+ '# Disallow any non-whitelisted packets' \
+ '-A INPUT -j DROP' \
+ '-A FORWARD -j REJECT' \
+ '-A OUTPUT -j REJECT' '' \
+ 'COMMIT' | $SUDO tee /etc/iptables/src-default6 >/dev/null \
+ && printf '%s\n' \
+ '*filter' \
+ '-A INPUT -j ACCEPT' \
+ '-A FORWARD -j ACCEPT' \
+ '-A OUTPUT -j ACCEPT' \
+ 'COMMIT' | $SUDO tee /etc/iptables/src-allowAll4 >/dev/null \
+ && $SUDO touch /etc/iptables/src-tmp \
+
+
+## Mount home partition
+
+# /etc/fstab
+UUID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx /mnt/nameOfHdd ext4 noatime 0 2
+/mnt/nameOfHdd/home /home none bind 0 0
+
+
+## Configure Locale
+
+- In "/etc/locale.gen" Enable all of:
+ "de_CH.UTF-8 UTF-8", "de_CH ISO-8859-1", "en_DK.UTF-8 UTF-8", "en_DK ISO-8859-1".
+- Run "locale-gen".
+- Check list with "locale -a".
+- Change "/etc/default/locale" contents to:
+ LANG=en_DK.UTF-8
+ LANGUAGE="en_US:en"
+
+
+## Install Desktop Env
+
+ && $SUDO apt install -y --no-install-recommends xorg openbox mate-terminal lightdm light-locker feh scrot lxpanel qalculate-gtk gmrun gnome-system-monitor vim-gtk3 \
+ && mkdir ~/.config ~/.config/openbox || true \
+ && update-alternatives \
+
+Populate "/etc/environment" as described by "./etc-environment".
+
+
+## Install whatever needed
+
+ && $SUDO apt install -y --no-install-recommends \
+ `# basic CLI` \
+ vim htop pv openssh-client iptables iptables-persistent xxd zip unzip xz-utils p7zip-full \
+ file trash-cli genisoimage ncat curl \
+ `# basic UI` \
+ firefox-esr file-roller thunderbird chromium evince \
+ `# software devel` \
+ git sqlite3 manpages-dev gdb qemu-utils qemu-system wireshark samba tigervnc-viewer \
+ `# server` \
+ nginx-light avahi-daemon \
+ `# mDNS client & tools` \
+ libnss-mdns avahi-utils \
+ `# multimedia` \
+ pulseaudio pavucontrol vlc audacity eom darktable gimp lame flac opus-tools ffmpeg \
+ `# encryption` \
+ keepassxc gpg \
+ `# UI customization` \
+ gnome-themes-extra \
+ `# Office Suite` \
+ libreoffice-writer libreoffice-calc libreoffice-draw libxrender1 libgl1 \
+ fonts-crosextra-caladea fonts-crosextra-carlito fonts-dejavu fonts-liberation \
+ fonts-liberation2 fonts-linuxlibertine fonts-noto-core fonts-noto-mono fonts-noto-ui-core \
+ fonts-sil-gentium-basic pdftk-java \
+ `# Others` \
+ lm-sensors fancontrol bc rsync \
+ `# Nvidia graphics (open)` \
+ mesa-utils clinfo mesa-opencl-icd \
+ `# Nvidia graphics (non-free, DoesNotWorkYet)` \
+ nvidia-detect nvidia-tesla-470-driver linux-headers-amd64 \
+
+
diff --git a/doc/note/ssh/ssh-setup.txt b/doc/note/ssh/ssh-setup.txt
index 6a2812b..9f3dde2 100644
--- a/doc/note/ssh/ssh-setup.txt
+++ b/doc/note/ssh/ssh-setup.txt
@@ -7,6 +7,7 @@ SSH Setup & Key Management
- [Insane answer about key formats](https://stackoverflow.com/a/29707204/4415884)
+
## Create New Ssh Key
Create "path/to/key" and "path/to/key.pub" as a 2048 bit RSA with
@@ -16,6 +17,13 @@ Create "path/to/key" and "path/to/key.pub" as a 2048 bit RSA with
ssh-keygen -t rsa -b 2048 -f path/to/key -C "your comment"
```
+Create "path/to/key" and "path/to/key.pub" as an elliptic curve.
+
+```sh
+ssh-keygen -t ed25519 -f path/to/key -C "your comment"
+```
+
+
## Change Passphrase
@@ -26,6 +34,7 @@ ssh-keygen -p -f path/to/key
NOTE: Just hitting enter when asked for the new one will remove the passphrase.
+
## Inspect keys
Print public key hash:
@@ -46,6 +55,7 @@ Print detailed DER file content:
openssl x509 -in dumpcertfile -inform DER -text
+
## Export pub key in misc formats
ssh-keygen -e -f path/to/ssh2pub-or-privKey -m PKCS8
@@ -62,11 +72,13 @@ TODO to PKCS8:
ssh-keygen -i -f path/to/key.pub -e -m PKCS8 > path/to/pub.pem
+
## Remove obsolete entry from known_hosts
ssh-keygen -f path/to/known_hosts -R "example.com"
+
## TODO
-e This option will read a private or public OpenSSH key file
diff --git a/doc/note/ssh/ssh-usage.txt b/doc/note/ssh/ssh-usage.txt
index f9bc2a8..ff76f4c 100644
--- a/doc/note/ssh/ssh-usage.txt
+++ b/doc/note/ssh/ssh-usage.txt
@@ -30,6 +30,11 @@ request to "localhost:7080" on HOST.
ssh -o 'ProxyCommand ncat -p12345 %h %p' MY_SERVER
+## Get rid of bullshit warnings
+
+ -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null
+
+
## Run Cmd via jumphost
Source: "https://www.cyberciti.biz/faq/linux-unix-ssh-proxycommand-passing-through-one-host-gateway-server/"
diff --git a/doc/note/tcpdump/tcpdump.txt b/doc/note/tcpdump/tcpdump.txt
index 9c9feb6..71ffb02 100644
--- a/doc/note/tcpdump/tcpdump.txt
+++ b/doc/note/tcpdump/tcpdump.txt
@@ -16,8 +16,13 @@ Tcpdump
redis=6379, brox=7022, fluentd=7099
- cd /tmp && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-STAGE-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip
+ cd /usr/local/vertx/houston-storage-file && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-STAGE-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip
+ cd /tmp && timeout --foreground -s INT 180 tcpdump -ni any -C 50M -W 999 -w houston-prod-tcp-`date -u +%Y%m%d-%H%M%S`.pcap "not port 443 and not port 6379 and not port 7022 and not port 7099" -z gzip
+
+ ocprod exec -i "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /tmp && tar c "houston-prod-tcp-20231114-165243.pcap*.gz"' > houston-prod-tcp-20231114-165243.pcap.gz.tar
+
+ ocprod exec -ti "$(ocprod get pods|egrep ston-[1-9]|cut -f1 -d' ')" -- sh -c 'cd /tmp && watch ls -Ahl'
## pcap cli PreProcessing
@@ -31,6 +36,11 @@ Tips: "https://chrissanders.org/2018/06/large-captures4-filter-whittling/"
+## Local dumps isa-only filter
+
+ "portrange 6000-9999 and not port 7778"
+
+
## Accept HTTP POST requests:
"tcp[((tcp[12:1] & 0xf0) >> 2):4] = 0x504F5354"
@@ -52,6 +62,15 @@ Tips: "https://chrissanders.org/2018/06/large-captures4-filter-whittling/"
"net 172.18.0.0/16"
+## Filter kube-probes "GET /houston/server/info" or '{"name":"houston",'
+
+ tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 8M -W 99 -G 600 "(tcp[((tcp[12:1]&0xf0)>>2)+0:4] = 0x47455420 && tcp[((tcp[12:1]&0xf0)>>2)+4:4] = 0x2F686F75 && tcp[((tcp[12:1]&0xf0)>>2)+8:4] = 0x73746F6E && tcp[((tcp[12:1]&0xf0)>>2)+12:4] = 0x2F736572 && tcp[((tcp[12:1]&0xf0)>>2)+16:4] = 0x7665722F && tcp[((tcp[12:1]&0xf0)>>2)+20:4] = 0x696E666F && tcp[((tcp[12:1]&0xf0)>>2)+24:1] = 0x20) or (tcp[((tcp[12:1]&0xf0)>>2)+115:4] = 0x7B226E61 && tcp[((tcp[12:1]&0xf0)>>2)+119:4] = 0x6D65223A && tcp[((tcp[12:1]&0xf0)>>2)+123:4] = 0x22686F75 && tcp[((tcp[12:1]&0xf0)>>2)+127:4] = 0x73746F6E && tcp[((tcp[12:1]&0xf0)>>2)+131:2] = 0x222C)"
+
+
+## Try dump kube-probes fully
+
+ timeout --foreground 900 tcpdump -nni any -w /tmp/houston-${PAISA_ENV:?}-tcp-$(date -u +%Y%m%d-%H%M%SZ)-%s.pcap -C 42M -W 42 -G 600 "host 10.127.73.1 and port 7012"
+
## Extract hosts file from DNS traffic
@@ -61,3 +80,8 @@ Not perfect because needs manual fine-tuning. But can be helpful anyway.
Vielleicht auch mal option "-zhosts" ausprobieren. Sollte auch sowas tun.
+
+[man tcpdump](https://www.tcpdump.org/manpages/tcpdump.1.html)
+[no name port numbers rhel patch](https://superuser.com/a/587304/1123359)
+[complex filter by byte contents](https://security.stackexchange.com/a/121013/179017)
+
diff --git a/doc/note/windoof/kill-auto-update.txt b/doc/note/windoof/kill-auto-update.txt
new file mode 100644
index 0000000..d6b896f
--- /dev/null
+++ b/doc/note/windoof/kill-auto-update.txt
@@ -0,0 +1,18 @@
+
+Stop Automatic windoof updates
+==============================
+
+For example in Virtual machines running in isolated environments
+shouting auto updates are nothing than annoying. Further, they
+continuously bloat VM images for no reason and make snapshotting
+unneccesarily tedious.
+
+
+
+## Stop annoying updates
+
+Windows Registry Editor Version 5.00
+[HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\WindowsUpdate\AU]
+"NoAutoUpdate"=dword:00000001
+
+
diff --git a/doc/note/msteams/msteams.txt b/doc/note/windoof/msteams.txt
index 216c9ce..216c9ce 100644
--- a/doc/note/msteams/msteams.txt
+++ b/doc/note/windoof/msteams.txt
diff --git a/doc/note/windoof/msys-path-issue.txt b/doc/note/windoof/msys-path-issue.txt
new file mode 100644
index 0000000..0b49323
--- /dev/null
+++ b/doc/note/windoof/msys-path-issue.txt
@@ -0,0 +1,3 @@
+
+MSYS_NO_PATHCONV=1
+
diff --git a/doc/note/windoof/outlook-expor-eml.txt b/doc/note/windoof/outlook-expor-eml.txt
new file mode 100644
index 0000000..895779f
--- /dev/null
+++ b/doc/note/windoof/outlook-expor-eml.txt
@@ -0,0 +1,14 @@
+
+How to export email as EML file in stupid systems
+=================================================
+
+- "https://outlook.office.com"
+- Compose a new eMail.
+- DragNDrop the email from the inbox to the draft (add attachment).
+- Now "Download" that attachment.
+
+
+## Source
+
+- [How to save mail as eml](https://superuser.com/a/1474143/1123359)
+
diff --git a/doc/note/windoof/proto-handler.txt b/doc/note/windoof/proto-handler.txt
new file mode 100644
index 0000000..b4d214c
--- /dev/null
+++ b/doc/note/windoof/proto-handler.txt
@@ -0,0 +1,6 @@
+
+
+
+## Source
+- [how to fix windoof](https://superuser.com/a/1066769/1123359)
+
diff --git a/src/main/c/PcapOne/PcapOne.c b/src/main/c/PcapOne/PcapOne.c
deleted file mode 100644
index 2eb9e25..0000000
--- a/src/main/c/PcapOne/PcapOne.c
+++ /dev/null
@@ -1,311 +0,0 @@
-/* TODO fix this bullshit */
-typedef unsigned u_int;
-typedef unsigned short u_short;
-typedef unsigned char u_char;
-#include <pcap/pcap.h>
-/* endOf TODO */
-
-
-/* System */
-#include <assert.h>
-#include <stdlib.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <string.h>
-
-static char const*const DEV_STDIN = "/dev/stdin";
-
-#define FLG_isHelp (1<<0)
-#define FLG_isTcpPsh (1<<3)
-#define FLG_isTcpRst (1<<4)
-#define FLG_isTcpSyn (1<<5)
-#define FLG_isTcpFin (1<<6)
-#define FLG_isHttpReq (1<<7)
-#define FLG_isLlLinux (1<<12)
-#define FLG_isHdrPrinted (1<<13)
-#define FLG_INIT (0)
-
-typedef struct PcapOne PcapOne;
-
-
-struct PcapOne {
- uint_least16_t flg;
- const char *dumpFilePath;
- char *pcapErrbuf;
- pcap_t *pcap;
- unsigned long frameNr;
- struct/*most recent frame*/{
- int llProto;
- int llHdrEnd;
- };
- struct/*most recent packet*/{
- int netProto;
- int netBodyLen;
- int netHdrEnd;
- int_fast32_t netTotLen;
- uint_least32_t ipSrcAddr, ipDstAddr;
- };
- struct/*most recent segment*/{
- int trspBodyLen;
- int trspSrcPort, trspDstPort;
- int trspHdrEnd;
- };
- struct/*most recent http requst*/{
- const uint8_t *httpReqHeadline;
- int httpReqHeadline_len;
- int httpReq_off; /* pkg offset from begin of most recent request */
- };
-};
-
-
-/*BEG func fwd decl*/
-static void parse_ll_LINUX_SLL( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_net_IPv4( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_trsp_TCP( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void parse_appl_HTTP_req( PcapOne*, const struct pcap_pkthdr*, const u_char* );
-static void printParsingResults( PcapOne*, const struct pcap_pkthdr* );
-/*END func fwd decl*/
-
-static void printHelp(){
- #define STRQUOT_21a9ffbe344c0792ed88688d6c676359(s) #s
- #define STRQUOT(s) STRQUOT_21a9ffbe344c0792ed88688d6c676359(s)
- const char *basename = "/"__FILE__ + sizeof("/"__FILE__);
- for(; basename[-1] != '/'; --basename );
- printf("%s%s%s", " \n"
- " ", basename, " " STRQUOT(PROJECT_VERSION) "\n"
- " \n"
- " Options:\n"
- " \n"
- " --pcap-stdin\n"
- " Like --pcap but reading from stdin.\n"
- " \n"
- " --pcap <path>\n"
- " Pcap file to operate on. Compressed files are NOT supported.\n"
- " \n");
- #undef STRQUOT_21a9ffbe344c0792ed88688d6c676359
- #undef STRQUOT
-}
-
-
-static int parseArgs( PcapOne*app, int argc, char**argv ){
- app->flg = FLG_INIT;
- app->dumpFilePath = NULL;
- for( int iA = 1 ; iA < argc ; ++iA ){
- const char *arg = argv[iA];
- if(0){
- }else if( !strcmp(arg,"--help") ){
- app->flg |= FLG_isHelp; return 0;
- }else if( !strcmp(arg,"--pcap") ){
- arg = argv[++iA];
- if( arg == NULL ){ fprintf(stderr, "EINVAL --pcap needs value\n"); return -1; }
- app->dumpFilePath = arg;
- }else if( !strcmp(arg,"--pcap-stdin") ){
- app->dumpFilePath = DEV_STDIN;
- }else{
- fprintf(stderr, "EINVAL: %s\n", arg); return -1;
- }
- }
- if( app->dumpFilePath == NULL ){
- fprintf(stderr, "EINVAL Arg missing: --pcap <path>\n"); return -1; }
- return 0;
-}
-
-
-static void onPcapPkg( u_char*user, const struct pcap_pkthdr*hdr, const u_char*buf ){
- PcapOne *const app = (void*)user;
-
- /* prepare for this new packet */
- app->frameNr += 1;
- app->flg &= ~(FLG_isTcpPsh | FLG_isTcpRst | FLG_isTcpSyn | FLG_isTcpFin | FLG_isHttpReq);
-
- /* data-link layer */
- switch( pcap_datalink(app->pcap) ){
- case 0x71: parse_ll_LINUX_SLL(app, hdr, buf); break;
- default: assert(!fprintf(stderr,"pcap_datalink() -> 0x%02X\n", pcap_datalink(app->pcap)));
- }
-
- /* network layer */
- switch( app->llProto ){
- case 0x0800: parse_net_IPv4(app, hdr, buf); break;
- default: printf("???, proto=0x%04X, network-layer\n", app->llProto); return;
- }
-
- /* transport layer */
- switch( app->netProto ){
- case 0x06: parse_trsp_TCP(app, hdr, buf); break;
- default: printf("???, proto=0x%02X, transport-layer\n", app->netProto); return;
- }
-
- assert(app->trspBodyLen >= 0);
-
- /* application layer, towards server */
- switch( app->trspDstPort ){
- case 80: parse_appl_HTTP_req(app, hdr, buf); break;
- case 7012: parse_appl_HTTP_req(app, hdr, buf); break;
- case 8080: parse_appl_HTTP_req(app, hdr, buf); break;
- }
-
- printParsingResults(app, hdr);
-}
-
-
-static void parse_ll_LINUX_SLL( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= 15);
- app->llProto = buf[14]<<8 | buf[15];
- app->llHdrEnd = 16;
-}
-
-
-static void parse_net_IPv4( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= app->llHdrEnd+19 && "TODO_775afde7f19010220e9df8d5e2924c3e");
- int_fast8_t netHdrLen = (buf[app->llHdrEnd+0] & 0x0F) * 4;
- app->netTotLen = buf[app->llHdrEnd+2] << 8 | buf[app->llHdrEnd+3];
- app->netProto = buf[app->llHdrEnd+9];
- app->ipSrcAddr = 0
- | ((uint_least32_t)buf[app->llHdrEnd+12]) << 24
- | ((uint_least32_t)buf[app->llHdrEnd+13]) << 16
- | buf[app->llHdrEnd+14] << 8
- | buf[app->llHdrEnd+15] ;
- app->ipDstAddr = 0
- | ((uint_least32_t)buf[app->llHdrEnd+16]) << 24
- | ((uint_least32_t)buf[app->llHdrEnd+17]) << 16
- | buf[app->llHdrEnd+18] << 8
- | buf[app->llHdrEnd+19] ;
- app->netHdrEnd = app->llHdrEnd + netHdrLen;
- app->netBodyLen = app->netTotLen - netHdrLen;
-}
-
-
-static void parse_trsp_TCP( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- assert(hdr->caplen >= app->netHdrEnd+12 && "TODO_058d5f41043d383e1ba2c492d0db4b6a");
- app->trspSrcPort = buf[app->netHdrEnd+0] << 8 | buf[app->netHdrEnd+1];
- app->trspDstPort = buf[app->netHdrEnd+2] << 8 | buf[app->netHdrEnd+3];
- int tcpHdrLen = (buf[app->netHdrEnd+12] >> 4) * 4;
- app->trspHdrEnd = app->netHdrEnd + tcpHdrLen;
- app->trspBodyLen = app->netBodyLen - tcpHdrLen;
-}
-
-
-static void parse_appl_HTTP_req( PcapOne*app, const struct pcap_pkthdr*hdr, const u_char*buf ){
- app->flg |= FLG_isHttpReq;
- app->httpReqHeadline = buf + app->trspHdrEnd;
- app->httpReqHeadline_len = 0;
- for(;; ++app->httpReqHeadline_len ){
- if( (app->trspHdrEnd + app->httpReqHeadline_len) > hdr->caplen ) break;
- if( app->httpReqHeadline[app->httpReqHeadline_len] == '\r' ) break;
- if( app->httpReqHeadline[app->httpReqHeadline_len] == '\n' ) break;
- }
- /* TODO improve, as now its like a guess only */
- int isNewRequest = 0
- | !memcmp(buf + app->trspHdrEnd, "GET ", 4)
- | !memcmp(buf + app->trspHdrEnd, "PUT ", 4)
- | !memcmp(buf + app->trspHdrEnd, "POST ", 5)
- | !memcmp(buf + app->trspHdrEnd, "DELETE ", 7)
- ;
- if( isNewRequest ){
- app->httpReq_off = 0;
- }else{
- app->httpReq_off = 42; /*TODO make more accurate*/
- }
-}
-
-
-static void printParsingResults( PcapOne*app, const struct pcap_pkthdr*hdr ){
-
- int isHttpRequest = (app->flg & FLG_isHttpReq);
- int isHttpReqBegin = isHttpRequest && app->httpReq_off == 0;
-
- if( isHttpRequest && isHttpReqBegin ){
- /* find http method */
- const uint8_t *method = app->httpReqHeadline;
- int method_len = 0;
- for(;; ++method_len ){
- if( method_len > app->httpReqHeadline_len ) break;
- if( method[method_len] == ' ' ) break;
- }
- /* find http uri */
- const uint8_t *uri = method + method_len + 1;
- int uri_len = 0;
- for(;; ++uri_len ){
- if( method_len + uri_len > app->httpReqHeadline_len ) break;
- if( uri[uri_len] == ' ' ) break;
- }
- if( !(app->flg & FLG_isHdrPrinted) ){
- app->flg |= FLG_isHdrPrinted;
- printf("h;Title;HTTP requests\n");
- printf("c;epochSec;srcIp;dstIp;srcPort;dstPort;http_method;http_uri\n");
- }
- /* print it as a quick-n-dirty CSV record */
- printf("r;%ld.%06ld;%d.%d.%d.%d;%d.%d.%d.%d;%d;%d;%.*s;%.*s\n",
- hdr->ts.tv_sec, hdr->ts.tv_usec,
- app->ipSrcAddr >> 24, app->ipSrcAddr >> 16 & 0xFF, app->ipSrcAddr >> 8 & 0xFF, app->ipSrcAddr & 0xFF,
- app->ipDstAddr >> 24, app->ipDstAddr >> 16 & 0xFF, app->ipDstAddr >> 8 & 0xFF, app->ipDstAddr & 0xFF,
- app->trspSrcPort, app->trspDstPort,
- method_len, method, uri_len, uri);
- }
-}
-
-
-static int run( PcapOne*app ){
- int err;
- err = pcap_init(PCAP_CHAR_ENC_UTF_8, app->pcapErrbuf);
- if( err == PCAP_ERROR ){
- fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
- app->pcap = pcap_open_offline(
- (app->dumpFilePath == DEV_STDIN) ? "-" : app->dumpFilePath,
- app->pcapErrbuf);
- if( app->pcap == NULL ){
- fprintf(stderr, "libpcap: %s\n", app->pcapErrbuf); err = -1; goto endFn; }
- for(;;){
- err = pcap_dispatch(app->pcap, -1, onPcapPkg, (void*)app);
- switch( err ){
- case PCAP_ERROR:
- fprintf(stderr, "pcap_dispatch(): %s\n", pcap_geterr(app->pcap));
- err = -1; goto endFn;
- case PCAP_ERROR_BREAK:
- case PCAP_ERROR_NOT_ACTIVATED:
- fprintf(stderr, "pcap_dispatch() -> %d\n", err);
- err = -1; goto endFn;
- }
- if( err > 0 ){
- fprintf(stderr, "Processed %d packages in this turn.\n", err);
- continue;
- }
- break;
- }
- err = 0;
-endFn:
- if( app->pcap != NULL ){ pcap_close(app->pcap); app->pcap = NULL; }
- return err;
-}
-
-
-int main( int argc, char**argv ){
- int err;
- static char errbuf[PCAP_ERRBUF_SIZE];
- errbuf[0] = '\0';
- PcapOne app = {
- .flg = FLG_INIT,
- .pcapErrbuf = errbuf,
- .pcap = NULL,
- .frameNr = 0,
- .trspBodyLen = 0,
- };
- #define app (&app)
-
- err = parseArgs(app, argc, argv);
- if( err ){ goto endFn; }
-
- if( app->flg & FLG_isHelp ){
- printHelp(); goto endFn; }
-
- err = run(app);
-
-endFn:
- if( err < 0 ) err = -err;
- if( err > 0x7F ) err = 1;
- return err;
- #undef app
-}
-
-
diff --git a/src/main/c/common/assert_is.h b/src/main/c/common/assert_is.h
new file mode 100644
index 0000000..316bf02
--- /dev/null
+++ b/src/main/c/common/assert_is.h
@@ -0,0 +1,39 @@
+
+#if !NDEBUG
+#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\
+const char*f,int l){if(p==NULL){fprintf(stderr,"assert(" STR_QUOT(T)\
+" != NULL) %s:%d\n",f,l);abort();}T*obj=p;if(!(PRED)){fprintf(stderr,\
+"ssert(type is \""STR_QUOT(T)"\") %s:%d\n",f,l);abort();}return p; }
+#else
+#define TPL_assert_is(T, PRED) static inline T*assert_is_##T(void*p,\
+const char*f,int l){return p;}
+#endif
+
+
+
+/* Example usage: */
+
+/* add some magic to your struct under check */
+typedef struct Person Person;
+struct Person {
+ char tYPE[sizeof"Hi, I'm a Person"];
+};
+
+/* instantiate a checker */
+TPL_assert_is(Person, !strcmp(obj->tYPE, "Hi, I'm a Person"))
+#define assert_is_Person(p) assert_is_Person(p, __FILE__, __LINE__)
+
+/* make sure magic is initialized (ALSO MAKE SURE TO PROPERLY INVALIDATE
+ * IT IN DTOR!)*/
+static void someCaller( void ){
+ Person p = {0};
+ strcpy(p.tYPE, "Hi, I'm a Person");
+ void *ptr = p; /*whops compiler cannot help us any longer*/
+ someCallee(ptr);
+}
+
+/* verify you reall got a Person*/
+static void someCallee( void*shouldBeAPerson ){
+ Person *p = assert_is_Person(shouldBeAPerson);
+}
+
diff --git a/src/main/c/common/commonKludge.h b/src/main/c/common/commonbase.h
index e0f0cba..e0f0cba 100644
--- a/src/main/c/common/commonKludge.h
+++ b/src/main/c/common/commonbase.h
diff --git a/src/main/c/common/offset_of.h b/src/main/c/common/offset_of.h
new file mode 100644
index 0000000..7d9179d
--- /dev/null
+++ b/src/main/c/common/offset_of.h
@@ -0,0 +1,9 @@
+#ifndef INCGUARD_yisgKqALPG4lfEqb
+#define INCGUARD_yisgKqALPG4lfEqb
+
+
+#define container_of(P, T, M) \
+ ((T*)( ((size_t)P) - ((size_t)((char*)&((T*)0)->M - (char*)0) )))
+
+
+#endif /* INCGUARD_yisgKqALPG4lfEqb */
diff --git a/src/main/c/common/windoof.h b/src/main/c/common/windoof.h
new file mode 100644
index 0000000..6ed9b41
--- /dev/null
+++ b/src/main/c/common/windoof.h
@@ -0,0 +1,59 @@
+
+#if 0
+# include <windows.h>
+#else
+
+#include <stdint.h>
+
+//#define HANDLE void*
+//typedef int BOOL;
+//typedef unsigned long LPDWORD;
+
+
+typedef struct _PROCESS_INFORMATION {
+ void* hProcess;
+ void* hThread;
+ uint32_t dwProcessId;
+ uint32_t dwThreadId;
+} PROCESS_INFORMATION, *PPROCESS_INFORMATION, *LPPROCESS_INFORMATION;
+
+
+typedef struct _SECURITY_ATTRIBUTES {
+ uint32_t nLength;
+ void* lpSecurityDescriptor;
+ int bInheritHandle;
+} SECURITY_ATTRIBUTES, *PSECURITY_ATTRIBUTES, *LPSECURITY_ATTRIBUTES;
+
+
+typedef struct _STARTUPINFOA {
+ uint32_t cb;
+ char *lpReserved;
+ char *lpDesktop;
+ char *lpTitle;
+ uint32_t dwX;
+ uint32_t dwY;
+ uint32_t dwXSize;
+ uint32_t dwYSize;
+ uint32_t dwXCountChars;
+ uint32_t dwYCountChars;
+ uint32_t dwFillAttribute;
+ uint32_t dwFlags;
+ short wShowWindow;
+ short cbReserved2;
+ uint8_t lpReserved2;
+ void *hStdInput, *hStdOutput, *hStdError;
+} STARTUPINFOA, *LPSTARTUPINFOA;
+
+
+
+int CreateProcessA( char const*, char*, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, int, uint32_t,
+ void*, char const*, LPSTARTUPINFOA, LPPROCESS_INFORMATION );
+
+
+int GetExitCodeProcess(void*, unsigned long*);
+
+
+
+
+
+#endif /*manual windoof on/off switch*/
diff --git a/src/main/c/paisa-fleet/FindFullDisks.c b/src/main/c/paisa-fleet/FindFullDisks.c
new file mode 100644
index 0000000..429b71c
--- /dev/null
+++ b/src/main/c/paisa-fleet/FindFullDisks.c
@@ -0,0 +1,383 @@
+#if 0
+
+true `# configure FindFullDisks for NORMAL systems` \
+ && CC=gcc \
+ && MKDIR_P="mkdir -p" \
+ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \
+ && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lpthread,-lws2_w32,-Limport/lib" \
+ && true
+
+true `# configure FindFullDisks for BROKEN systems` \
+ && CC=x86_64-w64-mingw32-gcc \
+ && MKDIR_P="mkdir -p" \
+ && CFLAGS="-Wall -Werror -pedantic -Os -fmax-errors=1 -Wno-error=unused-variable -Wno-error=unused-function -Isrc/main/c -Iimport/include" \
+ && LDFLAGS="-Wl,-dn,-lgarbage,-lcJSON,-lexpat,-lmbedtls,-lmbedx509,-lmbedcrypto,-dy,-lws2_32,-Limport/lib" \
+ && true
+
+true `# make FindFullDisks` \
+ && ${MKDIR_P:?} build/bin \
+ && ${CC:?} -o build/bin/findfulldisks $CFLAGS src/main/c/paisa-fleet/FindFullDisks.c $LDFLAGS \
+ && true
+
+#endif
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "Garbage.h"
+
+#define FLG_isHelp (1<<0)
+
+#if !NDEBUG
+# define REGISTER register
+# define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+# define IF_DBG(expr) expr
+#else
+# define REGISTER
+# define LOGDBG(...)
+# define IF_DBG(expr)
+#endif
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+
+
+
+typedef struct FindFullDisks FindFullDisks;
+typedef struct Device Device;
+
+
+#define MAGIC_FindFullDisks 0xB5410200
+struct FindFullDisks {
+ IF_DBG(int mAGIC);
+ int flg;
+ const char *sshUser;
+ int sshPort;
+ int maxParallel, numInProgress;
+ struct GarbageEnv **env;
+ struct Garbage_CsvIStream **csvSrc;
+ struct Garbage_Process **child;
+ char *inBuf;
+ int inBuf_cap, inBuf_len;
+ Device *devices;
+ int devices_cap, devices_cnt;
+ int iDevice; /* Next device to be triggered. */
+ int exitCode;
+};
+
+
+#define MAGIC_Device 0xAB420200
+struct Device {
+ IF_DBG(int mAGIC);
+ struct FindFullDisks *app;
+ char hostname[sizeof"lunkwill-0123456789AB_____"];
+ char eddieName[sizeof"eddie12345_____"];
+ char stdoutBuf[8192];
+ int stdoutBuf_cap, stdoutBuf_len;
+};
+
+
+/*BEG fwd decls*/
+static void beginNextDevice( void* );
+static void feedNextChunkFromStdinToCsvParser( void* );
+/*END fwd decls*/
+
+
+static void printHelp( void ){
+ printf("%s%s%s", " \n"
+ " ", strrchr(__FILE__,'/')+1, "\n"
+ " \n"
+ " Expected format on stdin is a CSV like:\n"
+ " \n"
+ " eddie00042 <SEMICOLON> lunkwill-ABBABEAFABBA <LF>\n"
+ " ...\n"
+ " \n"
+ " Options:\n"
+ " \n"
+ " --sshUser <str>\n"
+ " \n"
+ " --sshPort <int>\n"
+ " Default: 22\n"
+ " \n"
+ " --maxParallel <int>\n"
+ " Default 1.\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, FindFullDisks*app ){
+ int iA = 1;
+ app->sshUser = NULL;
+ app->sshPort = 22;
+ app->maxParallel = 1;
+nextArg:;
+ const char *arg = argv[iA++];
+ if( arg == NULL ) goto validateArgs;
+ if( !strcmp(arg, "--help")){
+ app->flg |= FLG_isHelp; return 0;
+ }else if( !strcmp(arg, "--sshUser")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --sshUser needs value\n"); return -1; }
+ app->sshUser = arg;
+ }else if( !strcmp(arg, "--sshPort")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --sshPort needs value\n"); return -1; }
+ errno = 0;
+ app->sshPort = strtol(arg, NULL, 0);
+ if( errno ){ LOGERR("EINVAL: --sshPort %s\n", arg); return -1; }
+ }else if( !strcmp(arg, "--maxParallel")){
+ arg = argv[iA++];
+ if( arg == NULL ){ LOGERR("EINVAL: Arg --maxParallel needs value\n"); return -1; }
+ errno = 0;
+ app->maxParallel = strtol(arg, NULL, 0);
+ if( errno ){ LOGERR("EINVAL: --maxParallel %s\n", arg); return -1; }
+ }else{
+ LOGERR("EINVAL: %s\n", arg);
+ }
+ goto nextArg;
+validateArgs:;
+ if( app->sshUser == NULL ){ LOGERR("EINVAL: Arg --sshUser missing\n"); return -1; }
+ return 0;
+}
+
+
+static void no_op( void*_ ){}
+
+
+static void examineDeviceResult( void*device_ ){
+ REGISTER int err;
+ Device*const device = device_; assert(device->mAGIC = MAGIC_Device);
+ //FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ FILE *outFd = NULL;
+ if( device->stdoutBuf_len <= 0 ){ /*nothing to do*/ goto endFn; }
+ char outName[sizeof"result/eddie12345-lunkwill-1234567890123456.log"];
+ err = snprintf(outName, sizeof outName, "result/%s-%s.log", device->eddieName, device->hostname);
+ assert(err < sizeof outName);
+ outFd = fopen(outName, "wb");
+ if( outFd == NULL ){ LOGDBG("assert(fopen(%s) != %d) %s:%d\n", outName, errno, __FILE__, __LINE__); abort(); }
+ err = fwrite(device->stdoutBuf, 1, device->stdoutBuf_len, outFd);
+ assert(err == device->stdoutBuf_len);
+endFn:
+ if( outFd != NULL ) fclose(outFd);
+}
+
+
+static void Child_onStdout( const char*buf, int buf_len, void*cls ){
+ Device*const device = cls; assert(device->mAGIC = MAGIC_Device);
+ FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( buf_len > 0 ){ /*another chunk*/
+ if( device->stdoutBuf_len + buf_len >= device->stdoutBuf_cap ) assert(!"TODO_VD8CAIVAgBDwIA4mECAKVjAgB1XwIAfk");
+ memcpy(device->stdoutBuf + device->stdoutBuf_len, buf, buf_len);
+ device->stdoutBuf_len += buf_len;
+ //printf("%.*s", buf_len, buf);
+ }else{ /*EOF*/
+ assert(buf_len == 0);
+ }
+}
+
+
+static void Child_onJoined( int retval, int exitCode, int sigNum, void*cls ){
+ Device*const device = cls; assert(device->mAGIC == MAGIC_Device);
+ FindFullDisks*const app = device->app; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( retval != 0 || exitCode != 0 || sigNum != 0 ){
+ LOGDBG("[DEBUG] %s(%d, %d, %d)\n", __func__, retval, exitCode, sigNum);
+ }
+ assert(app->numInProgress > 0);
+ app->numInProgress -= 1;
+ (*app->env)->enqueBlocking(app->env, examineDeviceResult, device);
+ (*app->env)->enqueBlocking(app->env, beginNextDevice, app);
+}
+
+
+static void visitDevice( FindFullDisks*app, Device*device ){
+ assert(device != NULL && device->mAGIC == MAGIC_Device);
+ assert(device < app->devices + app->devices_cnt);
+ LOGERR("\n[INFO ] %s \"%s\" (behind \"%s\")\n", __func__, device->hostname, device->eddieName);
+ int err;
+ char eddieCmd[2048];
+ //err = snprintf(eddieCmd, sizeof eddieCmd, "true"
+ // " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ // " && STAGE=$PAISA_ENV"
+ // " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ // " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true"
+ // " && echo wrong host. Want %s found $HOSTNAME && false"
+ // " ;fi"
+ // " && df",
+ // device->eddieName, device->eddieName
+ //);
+ err = snprintf(eddieCmd, sizeof eddieCmd, "true"
+ " && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ " && STAGE=$PAISA_ENV"
+ " && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ " && if test \"$(echo ${HOSTNAME}|sed -E 's_^vted_teddie_g')\" != \"%s\"; then true"
+ " && echo wrong host. Want %s found $HOSTNAME && false"
+ " ;fi"
+ " && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ " -p%d %s@%s"
+ " -- sh -c 'true"
+ " && HOSTNAME=$(hostname|sed '\"'\"'s_.isa.localdomain__'\"'\"')"
+ " && STAGE=$PAISA_ENV"
+ " && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\""
+ // on some machine, df failed with "Stale file handle" But I want to
+ // continue with next device regardless of such errors.
+ " && df || true"
+ "'",
+ device->eddieName, device->eddieName, app->sshPort, app->sshUser,
+ strncmp("fook-",device->hostname,5) ? device->hostname : "fook"
+ );
+ assert(err < sizeof eddieCmd);
+ assert(app->sshPort > 0 && app->sshPort <= 0xFFFF);
+ char sshPortStr[sizeof"65535"];
+ err = snprintf(sshPortStr, sizeof sshPortStr, "%d", app->sshPort);
+ assert(err < (int)sizeof sshPortStr);
+ char userAtEddie[64];
+ err = snprintf(userAtEddie, sizeof userAtEddie, "%s@%s", app->sshUser, device->eddieName);
+ assert(err < sizeof userAtEddie);
+ char *childArgv[] = { "ssh",
+ "-oRemoteCommand=none",
+ "-oStrictHostKeyChecking=no",
+ "-oUserKnownHostsFile=/dev/null",
+ "-oConnectTimeout=4",
+ "-p", sshPortStr,
+ userAtEddie,
+ "--", "sh", "-c", eddieCmd,
+ NULL
+ };
+ //LOGDBG("CMDLINE:");
+ //for( int i = 0 ; childArgv[i] != NULL ; ++i ) LOGDBG(" \"%s\"", childArgv[i]);
+ //LOGDBG("\n\n");
+ app->child = (*app->env)->newProcess(app->env, &(struct Garbage_Process_Mentor){
+ .cls = device,
+ .usePathSearch = !0,
+ .argv = childArgv,
+ .onStdout = Child_onStdout,
+ //.onStderr = ,
+ .onJoined = Child_onJoined,
+ });
+ assert(app->child != NULL);
+ (*app->child)->join(app->child, 42000);
+}
+
+
+static void beginNextDevice( void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+maybeBeginAnotherOne:
+ if( app->numInProgress >= app->maxParallel ){
+ //LOGDBG("[DEBUG] Already %d/%d in progress. Do NOT trigger more for now.\n",
+ // app->numInProgress, app->maxParallel);
+ goto endFn;
+ }
+ if( app->iDevice >= app->devices_cnt ){
+ //LOGDBG("[INFO ] Work on %d devices triggered. No more devices to trigger.\n", app->iDevice);
+ goto endFn;
+ }
+ assert(app->iDevice >= 0 && app->iDevice < INT_MAX);
+ app->iDevice += 1;
+ assert(app->numInProgress >= 0 && app->numInProgress < INT_MAX);
+ app->numInProgress += 1;
+ visitDevice(app, app->devices + app->iDevice - 1);
+ goto maybeBeginAnotherOne;
+endFn:;
+}
+
+
+static void onCsvRow( struct Garbage_CsvIStream_BufWithLength*row, int numCols, void*cls ){
+ REGISTER int err;
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( app->exitCode ) return;
+ if( numCols != 2 ){
+ LOGERR("[ERROR] Expected 2 column in input CSV but found %d\n", numCols);
+ app->exitCode = -1; return;
+ }
+ if( app->devices_cap <= app->devices_cnt ){
+ app->devices_cap += 4096;
+ void *tmp = realloc(app->devices, app->devices_cap*sizeof*app->devices);
+ if( tmp == NULL ) assert(!"TODO_c04CAJtRAgDYWQIAm10CAOAeAgA0KgIA");
+ app->devices = tmp;
+ }
+ #define DEVICE (app->devices + app->devices_cnt)
+ IF_DBG(DEVICE->mAGIC = MAGIC_Device);
+ DEVICE->app = app;
+ DEVICE->stdoutBuf_cap = sizeof DEVICE->stdoutBuf / sizeof*DEVICE->stdoutBuf;
+ if( row[0].len >= sizeof DEVICE->eddieName ){
+ LOGERR("[ERROR] eddieName too long: len=%d\n", row[0].len);
+ app->exitCode = -1; return;
+ }
+ if( row[1].len >= sizeof DEVICE->hostname ){
+ LOGERR("[ERROR] hostname too long: len=%d\n", row[1].len);
+ app->exitCode = -1; return;
+ }
+ memcpy(DEVICE->eddieName, row[0].buf, row[0].len);
+ DEVICE->eddieName[row[0].len] = '\0';
+ memcpy(DEVICE->hostname, row[1].buf, row[1].len);
+ DEVICE->hostname[row[1].len] = '\0';
+ #undef DEVICE
+ app->devices_cnt += 1;
+}
+
+
+static void onCsvParserCloseSnkDone( int retval, void*app_ ){
+ FindFullDisks*const app = app_; assert(app->mAGIC == MAGIC_FindFullDisks);
+ LOGDBG("[DEBUG] Found %d devices in input.\n", app->devices_cnt);
+ (*app->env)->enqueBlocking(app->env, beginNextDevice, app);
+}
+
+
+static void onCsvParserWriteDone( int retval, void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( retval <= 0 ){ LOGDBG("assert(retval != %d) %s:%d\n", retval, __FILE__, __LINE__); abort(); }
+ (*app->env)->enqueBlocking(app->env, feedNextChunkFromStdinToCsvParser, app);
+}
+
+
+static void feedNextChunkFromStdinToCsvParser( void*cls ){
+ REGISTER int err;
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ if( app->exitCode ) return;
+ #define SRC (stdin)
+ if( app->inBuf == NULL || app->inBuf_cap < 1<<15 ){
+ app->inBuf_cap = 1<<15;
+ if( app->inBuf ) free(app->inBuf);
+ app->inBuf = malloc(app->inBuf_cap*sizeof*app->inBuf);;
+ if( app->inBuf == NULL ){ assert(!"TODO_TT8CAGQLAgCoawIA9jgCANA6AgBTaAIA"); }
+ }
+ err = fread(app->inBuf, 1, app->inBuf_cap, SRC);
+ if( err <= 0 ){
+ (*app->csvSrc)->closeSnk(app->csvSrc, onCsvParserCloseSnkDone, app);
+ return;
+ }
+ app->inBuf_len = err;
+ (*app->csvSrc)->write(app->inBuf, app->inBuf_len, app->csvSrc, onCsvParserWriteDone, app);
+ #undef SRC
+}
+
+
+static void initCsvParserForDeviceListOnStdin( void*cls ){
+ FindFullDisks*const app = cls; assert(app->mAGIC == MAGIC_FindFullDisks);
+ static struct Garbage_CsvIStream_Mentor csvMentor = {
+ .onCsvRow = onCsvRow,
+ .onCsvDocEnd = no_op,
+ };
+ struct Garbage_CsvIStream_Opts csvOpts = { .delimCol = ';' };
+ app->csvSrc = (*app->env)->newCsvIStream(app->env, &csvOpts, &csvMentor, app);
+ feedNextChunkFromStdinToCsvParser(app);
+}
+
+
+int main( int argc, char**argv ){
+ void *envMemory[SIZEOF_struct_GarbageEnv/sizeof(void*)];
+ FindFullDisks app = {0}; assert((void*)0 == NULL);
+ #define app (&app)
+ IF_DBG(app->mAGIC = MAGIC_FindFullDisks);
+ if( parseArgs(argc, argv, app) ){ app->exitCode = -1; goto endFn; }
+ if( app->flg & FLG_isHelp ){ printHelp(); goto endFn; }
+ app->env = GarbageEnv_ctor(envMemory, sizeof envMemory);
+ assert(app->env != NULL);
+ (*app->env)->enqueBlocking(app->env, initCsvParserForDeviceListOnStdin, app);
+ (*app->env)->runUntilDone(app->env);
+endFn:
+ return !!app->exitCode;
+ #undef app
+}
+
+
diff --git a/src/main/c/postshit/launch/mvn/mvn-launch.c b/src/main/c/postshit/launch/mvn/mvn-launch.c
new file mode 100644
index 0000000..8886e9e
--- /dev/null
+++ b/src/main/c/postshit/launch/mvn/mvn-launch.c
@@ -0,0 +1,214 @@
+/*
+
+ Shitty policies require shitty workarounds. Standard maven ships with a 'cmd'
+ file for its execution. But as some shiny 'security' policies forbid
+ execution of 'cmd' files, we need to waste our time writing stuff like this
+ instead doing our work. Grrr...
+
+ ${CC:?} -o build/bin/mvn-launch.exe \
+ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \
+ -DPROJECT_VERSION=0.0.0-$(date -u +%s) \
+ src/main/c/postshit/launch/mvn/mvn-launch.c \
+
+*/
+
+#include <windows.h>
+#include <assert.h>
+#include <stdio.h>
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+
+#define STR_QUOT_3q9o58uhzjad(s) #s
+#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s)
+
+
+static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ memcpy(dst + dst_len, src, src_len);
+ dst_len += src_len;
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ for( err = 0 ; err < src_len ; ++err ){
+ if( src[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ dst[dst_len++] = src[err];
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendArg( char*cmdline, int*cmdline_len, int cmdline_cap, const char*newArg, int newArg_len ){
+ #define cmdline_len (*cmdline_len)
+ register int err;
+ if( cmdline_cap < cmdline_len + newArg_len + sizeof" \"\"" ){
+ LOGERR("ENOBUFS: Cmdline too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn;
+ }
+ cmdline[cmdline_len++] = ' ';
+ cmdline[cmdline_len++] = '"';
+ for( err = 0 ; err < newArg_len ; ++err ){
+ if( newArg[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ cmdline[cmdline_len++] = newArg[err];
+ }
+ cmdline[cmdline_len++] = '"';
+ err = 0;
+endFn:
+ return err;
+ #undef cmdline_len
+}
+
+
+static int appendFromEnvironIfNotEmpty( char*cmdline, int*cmdline_len, int cmdline_cap, const char*envKey ){
+ #define cmdline_len (*cmdline_len)
+ assert(envKey != NULL);
+ register int err;
+ char envval[0x7FFF];
+ const int envval_cap = sizeof envval;
+ err = GetEnvironmentVariable(envKey, envval, envval_cap-1);
+ if( err >= envval_cap-1 ){
+ LOGERR("ENOBUFS: environ.%s too long. %s:%d\n", envKey, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn;
+ }
+ if( err > 0 ){
+ err = appendArg(cmdline, &cmdline_len, cmdline_cap, envval, err);
+ if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+ cmdline_len += err;
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef cmdline_len
+}
+
+
+int main( int argc, char**argv ){
+ register int err;
+
+ char tmp[2];
+ err = GetEnvironmentVariable("LAUNCHR_HELP", tmp, 1);
+ if( err == 0 ){
+ if( GetLastError() != ERROR_ENVVAR_NOT_FOUND ){
+ LOGERR("ERROR: GetEnvironmentVariable(LAUNCHR_HELP): %lu. %s:%d\n", GetLastError(), __FILE__, __LINE__);
+ err = -1; goto endFn; }
+ /*no such variable. interpret as no-help-wanted*/;
+ }else{
+ printf("\n %s " STR_QUOT(PROJECT_VERSION) "\n \n Delegates the call to maven without 'cmd' files.\n\n", strrchr(__FILE__,'/')+1);
+ err = -1; goto endFn;
+ }
+
+ char username[16];
+ const int username_cap = sizeof username;
+ err = GetEnvironmentVariable("USERNAME", username, username_cap);
+ if( err == 0 ){ LOGERR("ERROR: GetEnvironmentVariable(USERNAME) -> 0x%lX\n", GetLastError());
+ err = -1; goto endFn; }
+ if( err > username_cap ){
+ LOGERR("ENOBUFS: environ.USERNAME too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ assert(err > 0);
+ const int username_len = err;
+
+ char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/
+ cmdline[0] = '\0';
+ const int cmdline_cap = sizeof cmdline;
+ int cmdline_len = 0;
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "C:/Users/", 9) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/java/bin/java.exe", 23) < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "JVM_CONFIG_MAVEN_PROPS") < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_OPTS") < 0
+ || appendFromEnvironIfNotEmpty(cmdline, &cmdline_len, cmdline_cap, "MAVEN_DEBUG_OPTS") < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -classpath", 11) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " C:/Users/", 10) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/boot/plexus-classworlds-2.5.2.jar", 45) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dclassworlds.conf=C:/Users/", 29) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven/bin/m2.conf", 23) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " -Dmaven.home=C:/Users/", 23) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, username, username_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "/.opt/maven", 11) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+
+ char workDir[0x7FFF];
+ const int workDir_cap = sizeof workDir;
+ err = GetCurrentDirectory(workDir_cap, workDir);
+ if( err == 0 ){
+ LOGERR("ERROR: GetCurrentDirectory() -> 0x%lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( err >= workDir_cap ){
+ LOGERR("ENOBUFS: Working dir too long. %s:%d\n", strrchr(__FILE__,'/')+1, __LINE__);
+ err = -ENOBUFS; goto endFn; }
+ assert(err > 0);
+ const int workDir_len = err;
+ for( err = 0 ; err < workDir_len ; ++err ){ if( workDir[err] == '\\' ) workDir[err] = '/'; }
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " \"-Dmaven.multiModuleProjectDirectory=", 38) < 0
+ || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, workDir, workDir_len) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, " org.codehaus.plexus.classworlds.launcher.Launcher", 50) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; }
+
+ /*append all other args*/
+ for( int iA=1 ; iA < argc ; ++iA ){
+ char *arg = argv[iA];
+ err = appendArg(cmdline, &cmdline_len, cmdline_cap, arg, strlen(arg));
+ if( err < 0 ){ LOGDBG("[TRACE] at %s:%d\n", __FILE__, __LINE__); goto endFn; }
+ }
+
+ STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, };
+ startInfo.cb = sizeof(startInfo);
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = exitCode;
+endFn:
+ if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); }
+ if( err < 0 ) err = -err;
+ return err;
+}
+
diff --git a/src/main/c/postshit/launch/mvn/mvn-versions-set.c b/src/main/c/postshit/launch/mvn/mvn-versions-set.c
new file mode 100644
index 0000000..888183d
--- /dev/null
+++ b/src/main/c/postshit/launch/mvn/mvn-versions-set.c
@@ -0,0 +1,133 @@
+/*
+
+ Shitty policies require shitty workarounds. Standard maven ships with a 'cmd'
+ file for its execution. But as some shiny 'security' policies forbid
+ execution of 'cmd' files, we need to waste our time writing stuff like this
+ instead doing our work. Grrr...
+
+ ${CC:?} -o build/bin/mvn-versions-set.exe \
+ -Wall -Werror -fmax-errors=3 -Wno-error=unused-function -Wno-error=unused-variable \
+ -DPROJECT_VERSION=0.0.0-$(date -u +%s) \
+ src/main/c/postshit/launch/mvn/mvn-versions-set.c \
+
+*/
+
+#include <windows.h>
+#include <assert.h>
+#include <stdio.h>
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+
+#define STR_QUOT_3q9o58uhzjad(s) #s
+#define STR_QUOT(s) STR_QUOT_3q9o58uhzjad(s)
+
+
+static int appendRaw( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGERR("ENOBUFS: %s Cannot add: %.*s\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ memcpy(dst + dst_len, src, src_len);
+ dst_len += src_len;
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+static int appendQuotEscaped( char*dst, int*dst_len, int dst_cap, const char*src, int src_len ){
+ #define dst_len (*dst_len)
+ register int err;
+ if( dst_cap < dst_len + src_len ){
+ LOGDBG("ENOBUFS: %s: cannot append \"%.*s\"\n", strrchr(__FILE__,'/')+1, src_len, src);
+ err = -ENOBUFS; goto endFn;
+ }
+ for( err = 0 ; err < src_len ; ++err ){
+ if( src[err] == '"' ){
+ LOGERR("ENOTSUP: Quotes in args not impl. %s:%d\n", __FILE__, __LINE__);
+ err = -ENOTSUP; goto endFn;
+ }
+ dst[dst_len++] = src[err];
+ }
+ err = 0;
+endFn:
+ return err;
+ #undef dst_len
+}
+
+
+int main( int argc, char**argv ){
+ register int err;
+ int isHelp = 0;
+ const char *newVersion = NULL;
+
+ /*parse args*/
+ for( err = 1 ; err < argc ; ++err ){
+ const char *arg = argv[err];
+ if( !strcmp(arg, "--help") ){
+ isHelp = !0; break;
+ }else if( newVersion == NULL ){
+ newVersion = arg;
+ }else{
+ LOGERR("EINVAL: Only ONE arg expected. But got: %s\n", arg); err = -1; goto endFn;
+ }
+ }
+ if( isHelp ){
+ printf("\n"
+ " %s " STR_QUOT(PROJECT_VERSION) "\n"
+ " \n"
+ " Set a specific maven version. Usage:\n"
+ " \n"
+ " %s 0.0.0-SNAPSHOT\n"
+ "\n", strrchr(__FILE__,'/')+1, argv[0]);
+ err = -1; goto endFn;
+ }
+ if( newVersion == NULL ){
+ LOGERR("EINVAL: new version to use missing. Try --help\n");
+ err = -1; goto endFn;
+ }
+ const int newVersion_len = strlen(newVersion);
+
+ char cmdline[32767]; /*[length](https://stackoverflow.com/questions/3205027/#comment17734587_3205048)*/
+ cmdline[0] = '\0';
+ const int cmdline_cap = sizeof cmdline;
+ int cmdline_len = 0;
+
+ err = 0
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "mvn versions:set -DgenerateBackupPoms=false \"-DnewVersion=", 58) < 0
+ || appendQuotEscaped(cmdline, &cmdline_len, cmdline_cap, newVersion, newVersion_len)
+ || appendRaw(cmdline, &cmdline_len, cmdline_cap, "\"", 1) < 0
+ ;
+ if( err ){ LOGDBG("[TRACE] at %s:%d", __FILE__, __LINE__); err = -1; goto endFn; }
+
+ STARTUPINFOA startInfo = { .lpDesktop = NULL, .lpTitle = NULL, .dwFlags = 0, };
+ startInfo.cb = sizeof(startInfo);
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+ err = exitCode;
+endFn:
+ if( err != 0 && cmdline_len > 0 ){ LOGDBG("[DEBUG] %.*s\n", cmdline_len, cmdline); }
+ if( err < 0 ) err = -err;
+ return err;
+}
+
+
diff --git a/src/main/c/postshit/launch/openshift/ocexec.c b/src/main/c/postshit/launch/openshift/ocexec.c
new file mode 100644
index 0000000..45c4af9
--- /dev/null
+++ b/src/main/c/postshit/launch/openshift/ocexec.c
@@ -0,0 +1,152 @@
+/*
+
+SH: true \
+SH: && `# Configure` \
+SH: && CC=x86_64-w64-mingw32-cc \
+SH: && MKDIR_P="mkdir -p" \
+SH: && CFLAGS="-Wall -Werror -pedantic -O0 -g -Isrc/main/c/common -DPROJECT_VERSION=0.0.0-$(date -u +%s) -fmax-errors=1 -Wno-error=unused-variable" \
+SH: && LDFLAGS="-Wl,--gc-sections,--as-needed" \
+SH: && `# Make` \
+SH: && ${MKDIR_P:?} build/bin \
+SH: && ${CC:?} -o build/bin/ocexec ${CFLAGS:?} src/main/c/postshit/launch/openshift/ocexec.c ${LDFLAGS:?} \
+SH: && true
+
+*/
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+#if __WIN32
+# include <windoof.h>
+#endif
+
+#define LOGERR(...) fprintf(stderr, __VA_ARGS__)
+#if !NDEBUG
+# define REGISTER
+# define LOGDBG(...) fprintf(stderr, __VA_ARGS__)
+#else
+# define REGISTER register
+# define LOGDBG(...)
+#endif
+
+#define FLG_isHelp (1<<0)
+
+
+typedef struct App App;
+
+
+struct App {
+ int flg;
+ char const *ocNamespace;
+ char const *podName;
+};
+
+
+static void printHelp( void ){
+ printf(" \n"
+ " TODO write help page\n"
+ " \n");
+}
+
+
+static int parseArgs( int argc, char**argv, App*app ){
+ REGISTER int err;
+ int iArg = 1;
+ if( argc <= 1 ){ LOGERR("EINVAL: Luke.. use arguments!\n"); return-1; }
+nextArg:;
+ char const *arg = argv[iArg++];
+ if( arg == NULL ) goto verifyArgs;
+ if( !strcmp(arg,"--help") ){
+ app->flg |= FLG_isHelp;
+ //LOGDBG("[DEBUG] help -> true\n", arg);
+ return 0;
+ }else if( !strcmp(arg,"-n") || !strcmp(arg,"--namespace") ){
+ arg = argv[iArg++];
+ if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; }
+ app->ocNamespace = arg;
+ //LOGDBG("[DEBUG] namespace -> \"%s\"\n", arg);
+ }else if( !strcmp(arg,"-p") || !strcmp(arg,"--pod") ){
+ arg = argv[iArg++];
+ if( arg == NULL ){ LOGERR("EINVAL: %s needs value\n", argv[iArg-2]); return-1; }
+ app->podName = arg;
+ //LOGDBG("[DEBUG] pod -> \"%s\"\n", arg);
+ }else{
+ LOGERR("EINVAL: %s\n", arg); return -1;
+ }
+ goto nextArg;
+verifyArgs:
+ return 0;
+}
+
+
+static int fetchPodnames( App*app ){
+ assert(!"TODO_hCICALJrAgDwNgIAZ0ACAD9sAgB5UwIA");
+ return -1;
+}
+
+
+static int resolvePodname( App*app ){
+ REGISTER int err;
+ err = fetchPodnames(app);
+ if( err ) return err;
+ if( !strcmp(app->podName, "houston") ){
+ }
+}
+
+
+static int resolveNamespace( App*app ){
+ if(0){
+ }else if( !strcmp(app->ocNamespace,"test") ){
+ app->ocNamespace = "isa-houston-test";
+ }else if( !strcmp(app->ocNamespace,"int") ){
+ app->ocNamespace = "isa-houston-int";
+ }else if( !strcmp(app->ocNamespace,"preprod") ){
+ app->ocNamespace = "isa-houston-preprod";
+ }else{
+ LOGDBG("[DEBUG] Use oc namespace as provided: \"%s\"\n", app->ocNamespace);
+ }
+ return 0;
+}
+
+
+static int run( App*app ){
+ REGISTER int err;
+ err = resolveNamespace(app); if( err ) return err;
+ err = resolvePodname(app); if( err ) return err;
+
+ LOGDBG("ENOTSUP: TODO continue here %s:%d\n", __FILE__, __LINE__);
+
+ PROCESS_INFORMATION proc;
+ err = CreateProcessA(NULL, cmdline, NULL, NULL, !0, 0, NULL, NULL, &startInfo, &proc);
+ if( err == 0 ){
+ LOGERR("ERROR: CreateProcess(): 0x%0lX. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ err = WaitForSingleObject(proc.hProcess, INFINITE);
+ if( err != WAIT_OBJECT_0 ){
+ LOGERR("ERROR: WaitForSingleObject() -> %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ long unsigned exitCode;
+ err = GetExitCodeProcess(proc.hProcess, &exitCode);
+ if( err == 0 ){
+ LOGERR("ERROR: GetExitCodeProcess(): %lu. %s:%d\n", GetLastError(), strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn; }
+ if( (exitCode & 0x7FFFFFFF) != exitCode ){
+ LOGERR("EDOM: Exit code %lu out of bounds. %s:%d\n", exitCode, strrchr(__FILE__,'/')+1, __LINE__);
+ err = -1; goto endFn;
+ }
+}
+
+
+int main( int argc, char**argv ){
+ REGISTER int err;
+ App app = {0}; assert((void*)0 == NULL);
+ #define app (&app)
+ if( parseArgs(argc, argv, app) ){ err = -1; goto endFn; }
+ LOGDBG("[DEBUG] flags are 0x%X\n", app->flg);
+ if( app->flg & FLG_isHelp ){ printHelp(); err = 0; goto endFn; }
+ err = run(app);
+endFn:
+ return !!err;
+ #undef app
+}
+
diff --git a/src/main/docker/android-dev.Dockerfile b/src/main/docker/android-dev.Dockerfile
deleted file mode 100644
index 3f7b4b4..0000000
--- a/src/main/docker/android-dev.Dockerfile
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# Tools for Android development.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="curl unzip openjdk-11-jdk-headless aapt apksigner zipalign"
-ARG PKGS_TO_DEL="curl unzip"
-ARG PKGINIT="apt-get update"
-ARG PKGADD="apt-get install -y --no-install-recommends"
-ARG PKGDEL="apt-get purge -y"
-ARG PKGCLEAN="apt-get clean"
-ARG PLATFORM_VERSION="22"
-ARG BUILD_TOOLS_VERSION="22.0.1"
-ARG CMDLINETOOLS_URL="https://dl.google.com/android/repository/commandlinetools-linux-8512546_latest.zip"
-
-ENV ANDROID_HOME="/usr/lib/android-sdk"
-ENV PATH="$PATH:/usr/lib/android-sdk/build-tools/debian:/usr/lib/android-sdk/cmdline-tools/latest/bin:/usr/lib/android-sdk/build-tools/$BUILD_TOOLS_VERSION"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && (cd /tmp && curl -sSLO "$CMDLINETOOLS_URL") \
- && if test -x /tmp/cmdline-tools; then echo >&2 "[ERROR] /tmp/cmdline-tools already exists"; false; fi \
- && (cd /tmp && unzip $(basename "$CMDLINETOOLS_URL") >/dev/null) \
- && mkdir /usr/lib/android-sdk/cmdline-tools \
- && mkdir /usr/lib/android-sdk/cmdline-tools/latest \
- && mv /tmp/cmdline-tools/* /usr/lib/android-sdk/cmdline-tools/latest/. \
- && yes | sdkmanager --install "platforms;android-$PLATFORM_VERSION" "build-tools;$BUILD_TOOLS_VERSION" \
- # Those for some reason are broken (wrong linker) so use the debian variant.
- && (cd "/usr/lib/android-sdk/build-tools/${BUILD_TOOLS_VERSION:?}" && rm aapt zipalign) \
- && chown 1000:1000 /work \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && rm -rf /tmp/* \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/gateleen.Dockerfile b/src/main/docker/gateleen.Dockerfile
deleted file mode 100644
index f604dc2..0000000
--- a/src/main/docker/gateleen.Dockerfile
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-# A Gateleen playground instance.
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG GATELEEN_GIT_TAG=v1.3.28
-ARG UID=1000
-ARG GID=1000
-ARG PKGS_TO_ADD="maven nodejs npm curl redis openjdk11-jre-headless"
-#ARG PKGS_TO_DEL="maven nodejs npm"
-ARG PKGS_TO_DEL="nodejs npm"
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && printf 'user:x:%s:%s:user:/work:/bin/sh\n' "${UID:?}" "${GID:?}" >> /etc/passwd \
- && true
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGS_TO_ADD \
- && sed -i "s,</settings>, <localRepository>/data/maven/.m2/repository</localRepository>\n</settings>,g" /usr/share/java/maven-3/conf/settings.xml \
- && mkdir /data /data/maven /work/gateleen \
- && chown "${UID:?}:${GID:?}" /data/maven /work /work/gateleen \
- && curl -sSL https://github.com/swisspush/gateleen/archive/refs/tags/"$GATELEEN_GIT_TAG".tar.gz > "/tmp/gateleen-$GATELEEN_GIT_TAG.tgz" \
- && cd /work/gateleen \
- && su user -c 'tar --strip-components 1 -xf /tmp/gateleen-"$GATELEEN_GIT_TAG".tgz' \
- && (cd gateleen-hook-js && su user -c 'npm install') \
- && su user -c 'mkdir -p gateleen-hook-js/node/node_modules/npm/bin' \
- && su user -c 'ln -s /usr/bin/node gateleen-hook-js/node/node' \
- && printf "require('/usr/lib/node_modules/npm/lib/cli.js')\n" | su user -c 'tee gateleen-hook-js/node/node_modules/npm/bin/npm-cli.js' >/dev/null \
- && su user -c 'mvn install -PpublicRepos -DskipTests -Dskip.installnodenpm -pl gateleen-hook-js' \
- && su user -c 'mvn install -PpublicRepos -DfailIfNoTests=false \
- -pl !gateleen-test,!gateleen-hook-js \
- -Dtest=!ReleaseLockLuaScriptTests,!RedisCacheStorageTest,!DeltaHandlerTest,!QueueCircuitBreakerCloseCircuitLuaScriptTests,!QueueCircuitBreakerGetAllCircuitsLuaScriptTests,!QueueCircuitBreakerHalfOpenCircuitsLuaScriptTests,!QueueCircuitBreakerReOpenCircuitLuaScriptTests,!QueueCircuitBreakerUpdateStatsLuaScriptTests,!RemoveExpiredQueuesLuaScriptTests,!StartQueueTimerLuaScriptTests' \
- && mkdir /work/classpath \
- && chown "${UID:?}:${GID:?}" /work/classpath \
- && su user -c 'cd gateleen-playground && mvn dependency:copy-dependencies \
- -DexcludeScope=provided -DoutputDirectory=/work/classpath/.' \
- && cp gateleen-playground/target/gateleen-playground-*.jar /work/classpath/. \
- && mkdir /work/etc \
- && printf >/work/etc/redis.conf '%s\n' \
- 'save ""' \
- 'appendonly yes' \
- 'appenddirname "redis-state"' \
- 'appendfilename appendonly.aof' \
- && (su user -c 'cd /work && redis-server /work/etc/redis.conf & \
- java -cp '"'/work/classpath/*'"' org.swisspush.gateleen.playground.Server' \
- & sleep 3) \
- && su user -c 'cd /work/gateleen && mvn deploy -PuploadStaticFiles' \
- && pkill -INT java && pkill -INT redis-server \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER "${UID}:${GID}"
-
-#CMD ["sleep", "36000"]
-CMD ["sh", "-c", "ip a|grep inet && redis-server /work/etc/redis.conf & java -cp '/work/classpath/*' org.swisspush.gateleen.playground.Server"]
-
diff --git a/src/main/docker/gcc-windoof.Dockerfile b/src/main/docker/gcc-windoof.Dockerfile
deleted file mode 100644
index 69cc18e..0000000
--- a/src/main/docker/gcc-windoof.Dockerfile
+++ /dev/null
@@ -1,233 +0,0 @@
-#
-# Windoof GCC build env
-#
-
-ARG BASE_IMG=alpine:3.16.0
-FROM $BASE_IMG
-
-ARG PKGSTOADD="ca-certificates curl mingw-w64-gcc make tar"
-ARG PKGSTODEL="ca-certificates curl"
-ARG PKGADD="apk add"
-ARG PKGDEL="apk del"
-ARG PKGCLEAN="true"
-ARG PKGINIT="true"
-ARG VERSION_CJSON="1.7.15"
-ARG VERSION_EXPAT="2.4.2"
-ARG VERSION_LUA="5.4.3"
-ARG VERSION_MBEDTLS="3.1.0"
-ARG VERSION_SDL2="2.0.20"
-ARG VERSION_SQLITE="3.33.0"
-ARG VERSION_ZLIB="1.2.11"
-
-ENV NDEBUG=1 MAKE_JOBS=8 HOST=x86_64-w64-mingw32
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGSTOADD \
- #
- && ensureSourceIsCached () { \
- local localPath=${1:?}; \
- local url=${2:?}; \
- if test -f "${localPath:?}"; then \
- echo "[DEBUG] Source avail as \"${localPath:?}\""; \
- return; \
- fi; \
- echo "[DEBUG] Downloading \"${localPath:?}\""; \
- echo "[DEBUG] from \"${url:?}\""; \
- curl -L "$url" -o "${localPath:?}"; \
- } \
- #
- && makeZlib () { echo "\n Build zlib\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir "/tmp/zlib" && cd "/tmp/zlib" \
- && tar xzf "${tarbal:?}" \
- && cd zlib-* \
- && mkdir build \
- && export DESTDIR=./build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \
- && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \
- && make -e -j$MAKE_JOBS -fwin32/Makefile.gcc PREFIX="${HOST:?}"- \
- && make -e -fwin32/Makefile.gcc install PREFIX="${HOST:?}"- \
- && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \
- && cp README build/. \
- && (cd build && rm -rf lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-windoof.tgz" \
- && cd / && rm -rf "/tmp/zlib" \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f "/tmp/zlib-${version:?}-windoof.tgz" -x include lib \
- && echo -e "\n zlib Done :)\n" \
- && cd "${origDir:?}" ; } \
- && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \
- && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \
- #
- && $PKGADD xz \
- && makeExpat () { echo -e "\n Build Expat\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/expat && cd /tmp/expat \
- && tar xf "${tarbal:?}" --strip-components=1 \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" CFLAGS="-Wall -pedantic --std=c99 -O2" \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md build/. \
- && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/expat \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \
- && echo -e "\n Expat Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \
- && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \
- #
- && makeCJSON () { echo -e "\n Build cJSON\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/cJSON && cd /tmp/cJSON \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build build/obj build/lib build/include \
- && CC="${HOST:?}-gcc" \
- && AR="${HOST:?}-ar" \
- && CFLAGS="-Wall -pedantic -fPIC" \
- && ${CC:?} $CFLAGS -c -o build/obj/cJSON.o cJSON.c \
- && ${CC:?} $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \
- && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \
- && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \
- && ${AR:?} rcs build/lib/libcJSON.a build/obj/cJSON.o \
- && unset CC AR CFLAGS \
- && cp -t build/. LICENSE README.md \
- && cp -t build/include/. cJSON.h \
- && rm -rf build/obj \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \
- && cd / && rm -rf /tmp/cJSON \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \
- && echo -e "\n cJSON Done :)\n"; } \
- && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \
- && makeCJSON "${VERSION_CJSON:?}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \
- #
- && $PKGADD python3 \
- && makeMbedtls () { echo -e "\n Build mbedtls\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/mbedtls && cd /tmp/mbedtls \
- && tar xf "${tarbal:?}" \
- && cd * \
- && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \
- # Yet another hack around as gethostname seems not to exist and I do
- # not understand how to disable compiling those "programs" which I
- # do not want anyway.
- && rm programs/ssl/ssl_mail_client.c programs/test/udp_proxy.c \
- && sed -i '/^\t\+\(ssl\/ssl_mail_client\|test\/udp_proxy\) \+\\$/d' programs/Makefile \
- && sed -i '/^ \+ssl_mail_client$/d' programs/ssl/CMakeLists.txt \
- && export CC="${HOST:?}-gcc" AR="${HOST:?}-ar" WINDOWS_BUILD=1 SHARED=1 \
- && make -e -j$MAKE_JOBS no_test \
- && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \
- && make -e install \
- && unset CC AR WINDOWS_BUILD SHARED \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/mbedtls \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/mbedtls-${version:?}-windoof.tgz -x bin include lib \
- && cd "${origDir:?}" \
- && echo -e "\n mbedtls Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \
- && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \
- #
- && makeSqLite () { echo -e "\n Build SqLite\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/sqlite && cd /tmp/sqlite \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build \
- && $PKGADD gcc musl-dev tcl \
- && export CC="${HOST}-gcc" CPP="${HOST:?}-cpp" CXX="${HOST:?}-g++" BCC=gcc \
- && ./configure --prefix="${PWD:?}/build" --host=$HOST CC=$CC CPP=$CPP CXX=$CXX BCC=gcc BEXE=.exe config_TARGET_EXEEXT=.exe \
- && ln -s mksourceid.exe mksourceid \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && $PKGDEL gcc musl-dev tcl \
- && make -e install \
- && unset CC CPP CXX BCC \
- && (cd build && rm -rf lemon* mksourceid lib/pkgconfig lib/*.la) \
- && cp README.md LICENSE.md VERSION build/. \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-3.33.0-windoof.tgz" \
- && cd / && rm -rf /tmp/sqlite \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/sqlite-${version:?}-windoof.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n SqLite Done :)\n"; } \
- && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \
- && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \
- #
- && $PKGADD binutils \
- && makeLua () { echo -e "\n Build Lua\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/lua && cd /tmp/lua \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir -p build/bin build/include build/lib build/man/man1 \
- && make -e -j$MAKE_JOBS PLAT=mingw CC="${HOST:?}-gcc -std=gnu99" "AR=${HOST:?}-ar rcu" "RANLIB=${HOST:?}-ranlib" \
- && cp -t build/. README \
- && cp -t build/bin/. src/lua.exe src/luac.exe \
- && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \
- && cp -t build/lib/. src/liblua.a \
- && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/lua \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/lua-${version:?}-windoof.tgz -x bin include lib man \
- && cd "$origDir" \
- && echo -e "\n Lua Done :)\n"; } \
- && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \
- && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \
- #
- && $PKGADD alsa-lib libxext-dev pulseaudio-dev \
- && makeSDL2 () { echo -e "\n Build SDL2\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/SDL2 && cd /tmp/SDL2 \
- && tar xf "${tarbal:?}" \
- && cd * \
- && ./configure --prefix="${PWD:?}/build" --host="${HOST:?}" \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \
- && (cd build \
- && ls -A \
- | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \
- | xargs rm -rf) \
- && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \
- && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-windoof.tgz" \
- && cd / && rm -rf /tmp/SDL2 \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/SDL2-${version:?}-windoof.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n SDL2 Done :)\n"; } \
- && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \
- && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \
- #
- && $PKGDEL $PKGSTODEL && $PKGCLEAN \
- && true
-
-WORKDIR /work
-
-CMD sleep 999999999
-
-
diff --git a/src/main/docker/gcc.Dockerfile b/src/main/docker/gcc.Dockerfile
deleted file mode 100644
index 5894667..0000000
--- a/src/main/docker/gcc.Dockerfile
+++ /dev/null
@@ -1,220 +0,0 @@
-#
-# Debian GCC build env
-#
-
-ARG BASE_IMG=debian:9-slim
-FROM $BASE_IMG
-
-ARG PKGSTOADD="ca-certificates curl gcc make tar"
-ARG PKGSTODEL="ca-certificates curl"
-ARG PKGADD="apt-get install -y --no-install-recommends"
-ARG PKGDEL="apt-get purge -y"
-ARG PKGCLEAN="apt-get clean"
-ARG PKGINIT="apt-get update"
-ARG VERSION_CJSON="1.7.15"
-ARG VERSION_EXPAT="2.4.2"
-ARG VERSION_LUA="5.4.3"
-ARG VERSION_MBEDTLS="3.1.0"
-ARG VERSION_SDL2="2.0.20"
-ARG VERSION_SQLITE="3.33.0"
-ARG VERSION_ZLIB="1.2.11"
-
-ENV NDEBUG=1 MAKE_JOBS=8
-
-RUN true \
- && $PKGINIT && $PKGADD $PKGSTOADD \
- #
- && ensureSourceIsCached () { \
- local localPath=${1:?}; \
- local url=${2:?}; \
- if test -f "${localPath:?}"; then \
- echo "[DEBUG] Source avail as \"${localPath:?}\""; \
- return; \
- fi; \
- echo "[DEBUG] Downloading \"${localPath:?}\""; \
- echo "[DEBUG] from \"${url:?}\""; \
- curl -L "$url" -o "${localPath:?}"; \
- } \
- #
- && $PKGADD libc-dev \
- && makeZlib () { echo "\n Build zlib\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir "/tmp/zlib" && cd "/tmp/zlib" \
- && tar xzf "${tarbal:?}" \
- && cd zlib-* \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build/" \
- && make -e -j$MAKE_JOBS \
- && make install \
- && cp README build/. \
- && (cd build \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/zlib-${version:?}-debian.tgz" \
- && cd / && rm -rf "/tmp/zlib" \
- && mkdir -p /usr/local/ \
- && tar -C /usr/local -f "/tmp/zlib-${version:?}-debian.tgz" -x include lib \
- && cd "${origDir:?}" \
- && echo -e "\n zlib Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/zlib-${VERSION_ZLIB:?}.tgz" "https://downloads.sourceforge.net/project/libpng/zlib/${VERSION_ZLIB:?}/zlib-${VERSION_ZLIB}.tar.gz" \
- && makeZlib "${VERSION_ZLIB:?}" "/tmp/zlib-${VERSION_ZLIB:?}.tgz" \
- #
- && $PKGADD libc-dev xz-utils \
- && makeExpat () { echo -e "\n Build Expat\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/expat && cd /tmp/expat \
- && tar xf "${tarbal:?}" --strip-components=1 \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" CFLAGS='-Wall -pedantic --std=c99 -O2' \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md build/. \
- && (cd build && rm -rf lib/cmake lib/libexpat.la lib/pkgconfig) \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/expat-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/expat \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/expat-2.4.2-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n Expat Done :)\n" ; } \
- && ensureSourceIsCached "/tmp/expat-${VERSION_EXPAT}.txz" "https://github.com/libexpat/libexpat/releases/download/R_2_4_2/expat-${VERSION_EXPAT}.tar.xz" \
- && makeExpat "${VERSION_EXPAT:?}" "/tmp/expat-${VERSION_EXPAT}.txz" \
- #
- && $PKGADD libc-dev \
- && makeCJSON () { echo -e "\n Build cJSON\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/cJSON && cd /tmp/cJSON \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build build/obj build/lib build/include \
- && CFLAGS="-Wall -pedantic -fPIC" \
- && gcc $CFLAGS -c -o build/obj/cJSON.o cJSON.c \
- && gcc $CFLAGS -shared -o build/lib/libcJSON.so.1.7.15 build/obj/cJSON.o \
- && unset CFLAGS \
- && (cd build/lib && ln -s libcJSON.so."${version:?}" libcJSON.so."${version%.*}") \
- && (cd build/lib && ln -s libcJSON.so."${version%.*}" libcJSON.so."${version%.*.*}") \
- && ar rcs build/lib/libcJSON.a build/obj/cJSON.o \
- && cp -t build/. LICENSE README.md \
- && cp -t build/include/. cJSON.h \
- && rm -rf build/obj \
- && (cd build && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -f "/tmp/cJSON-${version:?}-debian.tgz" -cz *) \
- && cd / && rm -rf /tmp/cJSON \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/cJSON-${version:?}-debian.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n cJSON Done :)\n"; } \
- && ensureSourceIsCached "/tmp/cJSON-${VERSION_CJSON:?}.tgz" "https://github.com/DaveGamble/cJSON/archive/refs/tags/v1.7.15.tar.gz" \
- && makeCJSON "${VERSION_CJSON}" "/tmp/cJSON-${VERSION_CJSON:?}.tgz" \
- #
- && $PKGADD libc-dev python3 \
- && makeMbedtls () { echo -e "\n Build mbedtls\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/mbedtls && cd /tmp/mbedtls \
- && tar xf "${tarbal:?}" \
- && cd * \
- && sed -i 's;^DESTDIR=.*$;DESTDIR='"$PWD"'/build;' Makefile \
- && SHARED=1 make -e -j$MAKE_JOBS tests lib mbedtls_test \
- && if [ -e build ]; then echo "ERR already exists: $PWD/build"; false; fi \
- && make -e install \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/mbedtls-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/mbedtls \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/mbedtls-${version:?}-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n mbedtls Done :)\n"; } \
- && ensureSourceIsCached "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" "https://github.com/Mbed-TLS/mbedtls/archive/refs/tags/v${VERSION_MBEDTLS:?}.tar.gz" \
- && makeMbedtls "${VERSION_MBEDTLS:?}" "/tmp/mbedtls-${VERSION_MBEDTLS:?}.tgz" \
- #
- && $PKGADD libc-dev tcl \
- && makeSqLite () { echo -e "\n Build SqLite\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/sqlite && cd /tmp/sqlite \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir build \
- && ./configure --prefix="${PWD:?}/build" \
- && make -e clean \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp README.md LICENSE.md VERSION build/. \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/sqlite-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/sqlite \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/sqlite-${version:?}-debian.tgz -x bin include lib \
- && cd "$origDir" \
- && echo -e "\n SqLite Done :)\n"; } \
- && ensureSourceIsCached "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" "https://github.com/sqlite/sqlite/archive/refs/tags/version-3.33.0.tar.gz" \
- && makeSqLite "${VERSION_SQLITE:?}" "/tmp/sqlite-${VERSION_SQLITE:?}.tgz" \
- #
- && $PKGADD libc-dev \
- && makeLua () { echo -e "\n Build Lua\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/lua && cd /tmp/lua \
- && tar xf "${tarbal:?}" \
- && cd * \
- && mkdir -p build/bin build/include build/lib build/man/man1 \
- && make -e -j$MAKE_JOBS \
- && cp -t build/. README \
- && cp -t build/bin/. src/lua src/luac \
- && cp -t build/include/. src/lua.h src/luaconf.h src/lualib.h src/lauxlib.h src/lua.hpp \
- && cp -t build/lib/. src/liblua.a \
- && cp -t build/man/man1/. doc/lua.1 doc/luac.1 \
- && (cd build && find -not -name MD5SUM -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/lua-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/lua \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/lua-${version:?}-debian.tgz -x bin include lib man \
- && cd "$origDir" \
- && echo -e "\n Lua Done :)\n"; } \
- && ensureSourceIsCached "/tmp/lua-${VERSION_LUA:?}.tgz" "https://www.lua.org/ftp/lua-${VERSION_LUA:?}.tar.gz" \
- && makeLua "${VERSION_LUA:?}" "/tmp/lua-${VERSION_LUA:?}.tgz" \
- #
- && $PKGADD libc-dev libasound2-dev libxext-dev libpulse-dev \
- && makeSDL2 () { echo -e "\n Build SDL2\n" \
- && local version="${1:?}" \
- && local tarbal="${2:?}" \
- && local origDir="${PWD:?}" \
- && mkdir /tmp/SDL2 && cd /tmp/SDL2 \
- && tar xf "${tarbal:?}" \
- && cd * \
- && ./configure --prefix="${PWD:?}/build" --host= \
- && make -e -j$MAKE_JOBS \
- && make -e install \
- && cp -t build/. CREDITS.txt LICENSE.txt README-SDL.txt README.md \
- && (cd build \
- && ls -A \
- | egrep -v '^(CREDITS.txt|LICENSE.txt|README-SDL.txt|RADME.md|bin|lib|include)$' \
- | xargs rm -rf) \
- && (cd build && rm -rf lib/cmake lib/pkgconfig lib/*.la) \
- && (cd build && find -type f -exec md5sum -b {} + > MD5SUM) \
- && (cd build && tar --owner=0 --group=0 -cz *) > "/tmp/SDL2-${version:?}-debian.tgz" \
- && cd / && rm -rf /tmp/SDL2 \
- && mkdir -p /usr/local \
- && tar -C /usr/local -f /tmp/SDL2-${version:?}-debian.tgz -x include lib \
- && cd "$origDir" \
- && echo -e "\n SDL2 Done :)\n"; } \
- && ensureSourceIsCached "/tmp/SDL2-${VERSION_SDL2:?}.tgz" "https://www.libsdl.org/release/SDL2-${VERSION_SDL2}.tar.gz" \
- && makeSDL2 "${VERSION_SDL2:?}" "/tmp/SDL2-${VERSION_SDL2:?}.tgz" \
- #
- && $PKGDEL $PKGSTODEL && $PKGCLEAN \
- && true
-
-WORKDIR /work
-
-CMD sleep 999999999
-
-
diff --git a/src/main/docker/gxx.Dockerfile b/src/main/docker/gxx.Dockerfile
deleted file mode 100644
index f29f168..0000000
--- a/src/main/docker/gxx.Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Debian with C++ compiler.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-RUN true \
- && apt update \
- && apt install -y --no-install-recommends \
- g++ make \
- && apt clean \
- && true
-
-USER 1000:1000
-WORKDIR /work
-CMD ["sleep", "36000"]
diff --git a/src/main/docker/jni.Dockerfile b/src/main/docker/jni.Dockerfile
deleted file mode 100644
index c790e47..0000000
--- a/src/main/docker/jni.Dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# Debian with tools for java-native-interface development.
-#
-
-ARG PARENT_IMAGE=debian:buster-20220622-slim
-FROM $PARENT_IMAGE
-
-ENV \
- JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
-
-RUN true \
- && apt update \
- && apt install -y --no-install-recommends \
- g++ make openjdk-11-jdk-headless \
- && apt clean \
- && true
-
-USER 1000:1000
-WORKDIR /work
-CMD ["sleep", "36000"]
diff --git a/src/main/docker/jre8.Dockerfile b/src/main/docker/jre8.Dockerfile
deleted file mode 100644
index 603b5f5..0000000
--- a/src/main/docker/jre8.Dockerfile
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# openjdk java 1.8 runtime environment.
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="openjdk8-jre"
-ARG PKGS_TO_DEL=""
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/maven.Dockerfile b/src/main/docker/maven.Dockerfile
deleted file mode 100644
index c33d519..0000000
--- a/src/main/docker/maven.Dockerfile
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# Maven build env.
-#
-# Use this to share your hosts repository with the container:
-#
-# -v "$HOME/.m2/repository:/data/maven/.m2/repository"
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG PKGS_TO_ADD="maven"
-ARG PKGS_TO_DEL=""
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && sed -i "s,</settings>, <localRepository>/data/maven/.m2/repository</localRepository>\n</settings>,g" /usr/share/java/maven-3/conf/settings.xml \
- && mkdir /data /data/maven \
- && chown 1000:1000 /data/maven \
- && chown 1000:1000 /work \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER 1000:1000
-
-CMD ["sleep", "36000"]
-
diff --git a/src/main/docker/nginx.Dockerfile b/src/main/docker/nginx.Dockerfile
deleted file mode 100644
index 097d283..0000000
--- a/src/main/docker/nginx.Dockerfile
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Bare nginx server serving HTTP/80 and HTTPS/443 from "/work/www".
-#
-
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG CN=example.com
-ARG PKGS_TO_ADD="nginx openssl"
-ARG PKGS_TO_DEL="openssl"
-ARG PKGINIT="true"
-ARG PKGADD="apk add"
-ARG PKGDEL="true"
-ARG PKGCLEAN="true"
-
-WORKDIR /work
-
-RUN true \
- && $PKGINIT \
- && $PKGADD $PKGS_TO_ADD \
- && mkdir /work/www \
- && openssl genrsa -out /etc/ssl/private/nginx.key 2048 \
- && openssl req -new -key /etc/ssl/private/nginx.key \
- -out /etc/ssl/private/nginx.csr \
- -subj "/C=/ST=/L=/O=/OU=/CN=${CN:?}" \
- && openssl x509 -req -days 365 -in /etc/ssl/private/nginx.csr \
- -signkey /etc/ssl/private/nginx.key -out /etc/ssl/certs/nginx.crt \
- && chgrp nginx /etc/ssl/private/nginx.key \
- && chmod 0640 /etc/ssl/private/nginx.key \
- && printf 'server {\n\
- listen 80 default_server;\n\
- listen [::]:80 default_server;\n\
- listen 443 ssl default_server;\n\
- listen [::]:443 default_server;\n\
- ssl_certificate /etc/ssl/certs/nginx.crt;\n\
- ssl_certificate_key /etc/ssl/private/nginx.key;\n\
- location / {\n\
- root /work/www;\n\
- index index.html index.htm;\n\
- }\n\
-}\n' > /etc/nginx/http.d/default.conf \
- && chown nginx:nginx /work /work/www \
- && $PKGDEL $PKGS_TO_DEL \
- && $PKGCLEAN \
- && true
-
-USER nginx:nginx
-
-CMD ["nginx", "-g", "daemon off;"]
-
diff --git a/src/main/docker/zlib-deb.Dockerfile b/src/main/docker/zlib-deb.Dockerfile
deleted file mode 100644
index c5abaf6..0000000
--- a/src/main/docker/zlib-deb.Dockerfile
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-deb.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)"
-#
-ARG PARENT_IMAGE=debian:9-slim
-FROM $PARENT_IMAGE
-
-ARG ZLIB_VERSION="1.2.11"
-ARG PKGS_TO_ADD="curl gcc make tar libc-dev ca-certificates vim"
-ARG PKGS_TO_DEL=""
-ARG PKG_INIT="apt-get update"
-ARG PKG_ADD="apt-get install -y --no-install-recommends"
-ARG PKG_DEL="apt-get purge"
-ARG PKG_CLEAN="apt-get clean"
-
-RUN true \
- && WORKDIR="/work" \
- && THEOLDPWD="$PWD" \
- # Prepare System
- && $PKG_INIT \
- && $PKG_ADD $PKGS_TO_ADD \
- # Prepare zlib
- && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \
- && mkdir tarballs tree build \
- && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \
- && cd "${WORKDIR:?}/tree" \
- && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \
- # Make zlib
- && ./configure --prefix="${WORKDIR:?}/build" \
- && make -e \
- && make install \
- && cp README "${WORKDIR}/build/." \
- && cd "${WORKDIR}/build" \
- && rm -rf lib/pkgconfig \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \
- && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" \
- && cd "${WORKDIR}" \
- && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \
- # install zlib
- && mkdir -p /usr/local/ \
- && tar -C /usr/local -f "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}-debian.tgz" -x include lib \
- # cleanup
- && cd "${THEOLDPWD:?}" \
- && unset THEOLDPWD WORKDIR \
- && $PKG_DEL $PKGS_TO_DEL \
- && $PKG_CLEAN \
- && true
-
-WORKDIR /work
-
diff --git a/src/main/docker/zlib-mingw.Dockerfile b/src/main/docker/zlib-mingw.Dockerfile
deleted file mode 100644
index abaa241..0000000
--- a/src/main/docker/zlib-mingw.Dockerfile
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# curl -sSL "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/plain/src/main/docker/zlib-mingw.Dockerfile" | docker build -f- . -t "zlib-deb:$(date +%Y%m%d)"
-#
-ARG PARENT_IMAGE=alpine:3.16.0
-FROM $PARENT_IMAGE
-
-ARG ZLIB_VERSION="1.2.11"
-ARG PKGS_TO_ADD="curl mingw-w64-gcc make tar ca-certificates"
-ARG PKGS_TO_DEL=""
-ARG PKG_INIT="true"
-ARG PKG_ADD="apk add "
-ARG PKG_DEL="apk del"
-ARG PKG_CLEAN="true"
-
-RUN true \
- && WORKDIR="/work" \
- && THEOLDPWD="$PWD" \
- # Prepare System
- && $PKG_INIT \
- && $PKG_ADD $PKGS_TO_ADD \
- # Prepare zlib
- && mkdir "${WORKDIR:?}" && cd "${WORKDIR:?}" \
- && mkdir tarballs tree build \
- && curl -sSL -o "tarballs/zlib-${ZLIB_VERSION}.tgz" "https://github.com/madler/zlib/archive/refs/tags/v${ZLIB_VERSION:?}.tar.gz" \
- && cd "${WORKDIR:?}/tree" \
- && tar --strip-components 1 -xzf "${WORKDIR:?}/tarballs/zlib-${ZLIB_VERSION:?}.tgz" \
- # Make zlib
- && sed -i "s;^PREFIX =.\*\$;;" win32/Makefile.gcc \
- && export DESTDIR=../build BINARY_PATH=/bin INCLUDE_PATH=/include LIBRARY_PATH=/lib \
- && make -e -fwin32/Makefile.gcc PREFIX=x86_64-w64-mingw32- \
- && make -e -fwin32/Makefile.gcc install PREFIX=x86_64-w64-mingw32- \
- && unset DESTDIR BINARY_PATH INCLUDE_PATH LIBRARY_PATH \
- && cp README ../build/. \
- && cd "${WORKDIR:?}/build" \
- && rm -rf lib/pkgconfig \
- && find -type f -not -name MD5SUM -exec md5sum -b {} + > MD5SUM \
- && tar --owner=0 --group=0 -cz * > "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" \
- && cd "${WORKDIR:?}" \
- && rm -rf "${WORKDIR:?}/tree" "${WORKDIR:?}/build" \
- # Install zlib
- && mkdir -p /usr/local/x86_64-w64-mingw32 \
- && tar -C /usr/x86_64-w64-mingw32 -f "${WORKDIR:?}/tarballs/zlib-1.2.11-windoof.tgz" -x include lib \
- && cd "${THEOLDPWD:?}" \
- && unset THEOLDPWD WORKDIR \
- && $PKG_DEL $PKGS_TO_DEL \
- && $PKG_CLEAN \
- && true
-
-WORKDIR /work
-
-
diff --git a/src/main/eagle b/src/main/eagle
new file mode 120000
index 0000000..f5160d6
--- /dev/null
+++ b/src/main/eagle
@@ -0,0 +1 @@
+C:/work/projects/isa-svc/eagle/.git/meins \ No newline at end of file
diff --git a/src/main/firefox/gaga-plugin/main.js b/src/main/firefox/gaga-plugin/main.js
index 2a5bbae..4447719 100644
--- a/src/main/firefox/gaga-plugin/main.js
+++ b/src/main/firefox/gaga-plugin/main.js
@@ -1,15 +1,10 @@
/*
- * For how to install see:
- *
- * "https://git.hiddenalpha.ch/UnspecifiedGarbage.git/tree/doc/note/firefox/firefox.txt"
+ * [How to install](UnspecifiedGarbage/doc/note/firefox/firefox.txt)
*/
;(function(){ try{
var NDEBUG = false;
var STATUS_INIT = 1;
- var STATUS_RUNNING = 2;
- var STATUS_DONE = 3;
- var STATUS_OBSOLETE = 4;
var NOOP = function(){};
var LOGERR = console.error.bind(console);
var N = null;
@@ -19,11 +14,10 @@
function main(){
var app = Object.seal({
ui: {},
- status: Object.seal({
- checklistBtn: STATUS_INIT,
- developmentBtn: STATUS_INIT,
- }),
lastClickEpochMs: 0,
+ wantChecklistExpanded: false,
+ wantDevelopmentExpanaded: false,
+ wantBigTemplateExpanded: false,
});
if( NDEBUG ){
setTimeout = window.setTimeout;
@@ -32,14 +26,16 @@
}else{ /* fix broken tooling */
setTimeout = setTimeoutWithCatch.bind(0, app);
logErrors = logErrorsImpl.bind(N, app);
- LOGDBG = console.debug.bind(console);
+ LOGDBG = console.debug.bind(console, "[gaga-plugin]");
}
document.addEventListener("DOMContentLoaded", logErrors.bind(N, onDOMContentLoaded, app));
+ scheduleNextStateCheck(app);
+ LOGDBG("gaga-plugin initialized");
}
function onDOMContentLoaded( app ){
- cleanupClutter(app);
+ LOGDBG("onDOMContentLoaded()");
attachDomObserver(app);
}
@@ -50,83 +46,58 @@
}
- function onDomHasChangedSomehow( app, changes, mutationObserver ){
- var nowEpochMs = Date.now();
- if( (app.lastClickEpochMs + 2000) > nowEpochMs ){
- LOGDBG("ignore, likely triggered by user.");
- return; }
- var needsReEval = false;
- for( var change of changes ){
- if( change.target.nodeName != "BUTTON" ) continue;
- var isAriaExpanded = (change.attributeName == "aria-expanded");
- var isChildAdded = (change.addedNodes.length > 0);
- var isChildRemoved = (change.removedNodes.length > 0);
- var isChildAddedOrRemoved = isChildAdded || isChildRemoved;
- if( !isAriaExpanded && !isChildAddedOrRemoved ) continue;
- if( isAriaExpanded ){
- LOGDBG("Suspicious, isExpanded: ", change.target);
- needsReEval = true; break;
- }
- if( !isChildAddedOrRemoved ) continue;
- var isBloatyChecklistBtnStillThere = document.body.contains(getBloatyChecklistBtn(app));
- if( !isBloatyChecklistBtnStillThere ){
- LOGDBG("Suspicious, btn lost");
- needsReEval = true; break;
- }
- var isBloatyDevelopmentBtnStillThere = document.body.contains(getBloatyDevelopmentBtn(app));
- if( !isBloatyDevelopmentBtnStillThere ){
- LOGDBG("Suspicious, btn lost");
- needsReEval = true; break;
- }
- }
- if( needsReEval ){
- LOGDBG("Change detected! Eval again");
- app.ui.bloatyChecklistBtn = null;
- app.ui.bloatyDevelopmentBtn = null;
- setTimeout(cleanupClutter, 42, app);
+ function scheduleNextStateCheck( app ){
+ //LOGDBG("scheduleNextStateCheck()");
+ if( app.stateCheckTimer ){
+ LOGDBG("Why is stateCheckTimer not zero?", app.stateCheckTimer);
}
+ app.stateCheckTimer = setTimeout(function(){
+ app.stateCheckTimer = null;
+ scheduleNextStateCheck(app);
+ performStateCheck(app);
+ }, 42);
}
- function cleanupClutter( app ){
- if( app.bloatyChecklistDone != STATUS_RUNNING ){
- app.bloatyChecklistDone = STATUS_OBSOLETE
- setTimeout(hideBloatyButton, 0, app, "checklistBtn");
- }
- if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
- app.bloatyDevelopmentDone = STATUS_OBSOLETE;
- setTimeout(hideBloatyButton, 0, app, "developmentBtn");
- }
- if( app.bloatyDevelopmentDone != STATUS_RUNNING ){
- app.bloatyDevelopmentDone = STATUS_OBSOLETE;
- setTimeout(hideBloatyButton, 0, app, "bigTemplateBtn");
+ function performStateCheck( app ){
+ var buttons = [ "checklistBtn", "developmentBtn", "bigTemplateBtn" ];
+ var wantKey = [ "wantChecklistExpanded", "wantDevelopmentExpanaded", "wantBigTemplateExpanded" ];
+ for( var i = 0 ; i < buttons.length ; ++i ){
+ var btnKey = buttons[i];
+ var btnElem = getBloatyButton(app, btnKey);
+ if( !btnElem ) continue;
+ var isExpanded = isAriaBtnExpanded(app, btnElem)
+ var wantExpanded = app[wantKey[i]];
+ //LOGDBG(btnKey +" expanded is", isExpanded);
+ if( isExpanded && !wantExpanded ){
+ collapseAriaBtn(app, btnElem);
+ }
}
}
- function setLastClickTimeToNow( app ){ app.lastClickEpochMs = Date.now(); }
+ function onDomHasChangedSomehow( app, changes, mutationObserver ){
+ var nowEpochMs = Date.now();
+ LOGDBG("DOM Change detected!");
+ /*refresh dom refs so check will work on correct elems*/
+ Object.keys(app.ui).forEach(function( key ){
+ app.ui[key] = null;
+ });
+ }
- function hideBloatyButton( app, btnKey ){
- if( app.status[btnKey] == STATUS_DONE ){
- LOGDBG(btnKey +" now hidden");
- return; }
- app.status[btnKey] == STATUS_RUNNING;
- var btn = getBloatyButton(app, btnKey);
- do{
- if( !btn ){ LOGDBG(btnKey +" not found. DOM maybe not yet ready?"); break; }
- var isExpanded = isAriaBtnExpanded(app, btn);
- if( isExpanded === true ){
- LOGDBG(btnKey +".click()");
- btn.click();
- }else if( isExpanded === false ){
- app.status[btnKey] = STATUS_DONE;
- }else{
- throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded);
- }
- }while(0);
- /* try later */
- setTimeout(hideBloatyButton, 16, app, btnKey);
+ function onBloatyChecklistBtnMousedown( app ){
+ app.wantChecklistExpanded = !app.wantChecklistExpanded;
+ }
+
+
+ function onBloatyDevelopmentBtnMousedown( app ){
+ app.wantDevelopmentExpanaded = !app.wantDevelopmentExpanaded;
+ }
+
+
+ function onBloatyBigTemplateBtnMousedown( app ){
+ app.wantBigTemplateExpanded = !app.wantBigTemplateExpanded;
}
@@ -135,19 +106,22 @@
}else if( btnKey == "checklistBtn" ){
var selector = "button[aria-label=Checklists]";
var uiKey = "bloatyChecklistBtn";
+ var onMousedown = onBloatyChecklistBtnMousedown;
}else if( btnKey == "developmentBtn" ){
var selector = "button[aria-label=Development]";
var uiKey = "bloatyDevelopmentBtn";
+ var onMousedown = onBloatyDevelopmentBtnMousedown;
}else if( btnKey == "bigTemplateBtn" ){
var selector = "button[aria-label=BigTemplate]";
var uiKey = "bloatyBigTemplateBtn";
+ var onMousedown = onBloatyBigTemplateBtnMousedown;
}else{
throw Error(btnKey);
}
if( !app.ui[uiKey] ){
var btn = fetchUiRefOrNull(app, document, selector);
if( btn ){
- btn.addEventListener("mousedown", logErrors.bind(N, setLastClickTimeToNow, app));
+ btn.addEventListener("mousedown", logErrors.bind(N, onMousedown, app));
app.ui[uiKey] = btn;
}
}
@@ -155,6 +129,21 @@
}
+ function collapseAriaBtn( app, btnElem ){
+ do{
+ var isExpanded = isAriaBtnExpanded(app, btnElem);
+ if( isExpanded === true ){
+ LOGDBG("click()");
+ btnElem.click();
+ }else if( isExpanded === false ){
+ break;
+ }else{
+ throw Error("Neither true nor false "+ typeof(isExpanded) +" "+ isExpanded);
+ }
+ }while(0);
+ }
+
+
function isAriaBtnExpanded( app, btnElem ){
var value = btnElem.getAttribute("aria-expanded");
if( value === "true" ){
diff --git a/src/main/gimp/nek2023-scan2/arrange-pdf b/src/main/gimp/nek2023-scan2/arrange-pdf
new file mode 100644
index 0000000..e2d2c7b
--- /dev/null
+++ b/src/main/gimp/nek2023-scan2/arrange-pdf
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Once used to batch process some PDFs. This is NOT functional. It is only here
+# for reference purposes.
+#
+# scan.pdf
+# scan0001.pdf
+# scan0002.pdf
+# scan0003.pdf
+# scan0004.pdf
+# scan0005.pdf
+#
+
+true \
+ && mkdir scan scan0001 scan0002 scan0003 scan0004 scan0005 \
+ && pdfimages -all scan.pdf scan-tmp1/scan-img \
+ && pdfimages -all scan0001.pdf scan0001-tmp1/scan0001-img \
+ && pdfimages -all scan0002.pdf scan0002-tmp1/scan0002-img \
+ && pdfimages -all scan0003.pdf scan0003-tmp1/scan0003-img \
+ && pdfimages -all scan0004.pdf scan0004-tmp1/scan0004-img \
+ && pdfimages -all scan0005.pdf scan0005-tmp1/scan0005-img \
+ && X=nVXBcpswEL3nK7acRDuywUkPnvTCNErijmtnHJK7ArKiFAS1ZDf9+wosRB08GTligF28b/X27Y6MCqY/AzoDs5CQckFLpgAVQun2W7OCsS7rccmEVGOVUTlpn2NRchxF0eil5oFfbHxC7MQn1mwfOwOfQOgY0IvdMeCHqZ57AnvjxBoHQN8aB0DfGgfAD9d44Qk8d8aJ4gyAvuIMgL7iXDjjRKoDoC/VAdCX6ldnDKmG4cFxAZH1q62+k9y5BufsfEP/0KeijxX5K+DYOpnUEE8bp7lRztZCMkD6WaiZ+rFVOrneZr+E5Fe0lPOqqsHAu0RrQN8aH0yWENAT40La+pBi+hN0NJHUz21gd8zZOlyc4YsyugG0FgXDLzXjuKhoDrFLYd/hG6Srbg/noqyNapQzzJnGNNNix3BB/7JNs8l/6DayQ+OC7Vih+my3s/t0ebNKfuLHZP5AXNOi0aR5TAGuk/k9Abg0xOptf2DHowgO1iVwWpb0MEMb5TKY5vUpDgnuS8kquWMbjYXM2SvLW7m+LxePZJXiq1l6S1Z4sVwQ9+0umZM0JfiGLMgqSQmcT+xu6eqBQBAcamiHBym9MY3GtK6ZzJ3wAW6YjGrJg1692kTqt4hA0Z1xoekh6AoCm7mHtd01mbCJZE1zTSFOc0ujG2WYmttex0TJmfn/ZG1Xu1/fmVr0pZ2/2HIJ3cS/g2m33W/5eyt044dn/wA4gyeC \
+ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \
+ && mkdir scan-out scan0001-out scan0002-out scan0003-out scan0004-out scan0005-out \
+ && mv -t scan-out/. scan-tmp1/*-gimp.png \
+ && mv -t scan0001-out/. scan0001-tmp1/*-gimp.png \
+ && mv -t scan0002-out/. scan0002-tmp1/*-gimp.png \
+ && mv -t scan0003-out/. scan0003-tmp1/*-gimp.png \
+ && mv -t scan0004-out/. scan0004-tmp1/*-gimp.png \
+ && mv -t scan0005-out/. scan0005-tmp1/*-gimp.png \
+
+
diff --git a/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh
new file mode 100644
index 0000000..68caf52
--- /dev/null
+++ b/src/main/gimp/re-arrange-pdfs/re-arrange-pdfs.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+#
+# Once used to batch process some PDFs. This is NOT functional. It is only here
+# for reference purposes.
+#
+
+true \
+ && pdftk \
+ A=scan.pdf \
+ B=scan0001.pdf \
+ shuffle A Bend-1 \
+ output all-pages.pdf \
+ && pdftk \
+ all-pages.pdf \
+ cat 1 3 5 7 8 9 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \
+ output pages-with-content.pdf \
+ && mkdir pages-with-content-img \
+ && pdfimages -all pages-with-content.pdf pages-with-content-img/img \
+ && X=pZZNc9owEIbv+RVbn+RmBBZp0zLpxdM4CR0KGeLkrtiLUGrLLhIk/feVjS2HTCeHSoyAtfdZ7cc7BlKg+QjkBOwiUqkFL1EDKaQ27bVmBWNT1uMSpdJjnXHFxjUXqOmzNBuaVcqgMlSWYmw3jaJo9FSL4H9p5kVPvOgzL/qTF/3Ziz73or940V+96KkPzby0xry0xry0xry0xry0xry0xry0xry0xry0xl5pLQyPHnkQdXa1M7dKONNy7nu+5c/8sRh8Zf4ClHVGpgxM2jvNJjmupUIgZiP1TP/YaRNf7bJfUolLXqp5VdVg8T7QGsi3xgYbJQTyiEKqrkii0XyAPk2izKZ17B/VXR3Oz+ZLMr4FspYF0qcaBS0qngNzIbrP8A3pqjvgQpa17ZztJhVoKM+M3CMt+B/cNoe8olvPnqYF7rHQQ7Sb2V26vF7FP+lDPL9P3OSi0aR5mwJcxfO7BODCJlbvhh8dNorgaF2A4GXJjyO0Xi6CHd4Q4jjBQylWEnvcWkmoHF8wb9v1fbl4SFYpvZylN8mKLpaLxF27jedJmib0OlkkqzhN4GzSnZau7hMIguMeduIh2mztoCmva1S5a3xAm0xGtRLB0L3aepq3RKD53prQzBBMBUEXecDa6dpI1HpiM1xbiOt5l0YvZZja3b3+1ZQc7X8AbKfa331HteS01R/rcgmd4t9h2mMPR/7eSdPY4clfwj6IxA== \
+ && echo $X|base64 -d|inflate| gimp -nidfsc -b - \
+ && mkdir pages-image-adjusted \
+ && mv -t pages-image-adjusted/. pages-with-content-img/*gimp.png \
+
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java
new file mode 100644
index 0000000..c911061
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastMetricsOptions.java
@@ -0,0 +1,35 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge;
+
+import io.vertx.core.json.JsonObject;
+import io.vertx.core.metrics.MetricsOptions;
+import io.vertx.core.spi.VertxMetricsFactory;
+
+
+public class FailFastMetricsOptions extends io.vertx.core.metrics.MetricsOptions {
+
+ private final String dbgMsg;
+
+ public FailFastMetricsOptions( String dbgMsg ){ this.dbgMsg = dbgMsg; }
+
+ public FailFastMetricsOptions(){ this(failCtor()); }
+
+ private FailFastMetricsOptions( MetricsOptions o ){ this(failCtor()); }
+
+ private FailFastMetricsOptions( JsonObject json ){ this(failCtor()); }
+
+ private static String failCtor(){ throw new IllegalStateException("Do NOT use this ctor!"); }
+
+ @Override public boolean isEnabled(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions setEnabled(boolean en){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public VertxMetricsFactory getFactory(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions setFactory( VertxMetricsFactory f ){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public JsonObject toJson(){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public String toString(){ throw new UnsupportedOperationException(dbgMsg); }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java
new file mode 100644
index 0000000..fa0d7e1
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/FailFastVertxMetricsFactory.java
@@ -0,0 +1,27 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge;
+
+import io.vertx.core.VertxOptions;
+import io.vertx.core.impl.VertxBuilder;
+import io.vertx.core.json.JsonObject;
+import io.vertx.core.metrics.MetricsOptions;
+import io.vertx.core.spi.metrics.VertxMetrics;
+
+
+public class FailFastVertxMetricsFactory implements io.vertx.core.spi.VertxMetricsFactory {
+
+ private final String dbgMsg;
+
+ public FailFastVertxMetricsFactory(String dbgMsg ){ this.dbgMsg = dbgMsg; }
+
+ @Override public void init(VertxBuilder b) { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public VertxMetrics metrics(VertxOptions o){ throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions() { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions(MetricsOptions o) { throw new UnsupportedOperationException(dbgMsg); }
+
+ @Override public MetricsOptions newOptions(JsonObject j) { throw new UnsupportedOperationException(dbgMsg); }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java
new file mode 100644
index 0000000..aa4ad48
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateHttpServerRequest.java
@@ -0,0 +1,394 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.netty.handler.codec.DecoderResult;
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpConnection;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerFileUpload;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.HttpVersion;
+import io.vertx.core.http.ServerWebSocket;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.net.NetSocket;
+import io.vertx.core.net.SocketAddress;
+import io.vertx.core.streams.Pipe;
+import io.vertx.core.streams.WriteStream;
+
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.security.cert.X509Certificate;
+import java.util.Map;
+import java.util.Set;
+
+public class DelegateHttpServerRequest implements io.vertx.core.http.HttpServerRequest {
+
+ private final io.vertx.core.http.HttpServerRequest delegate;
+ private final boolean isDebugging = true;
+
+ public DelegateHttpServerRequest(HttpServerRequest delegate) {
+ this.delegate = delegate;
+ }
+
+ private void breakpoint(){
+ try{
+ throw new UnsupportedOperationException();
+ }catch(UnsupportedOperationException ex){}
+ }
+
+ @Override
+ public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.exceptionHandler(handler);
+ }
+
+ @Override
+ public HttpServerRequest handler(Handler<Buffer> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.handler(handler);
+ }
+
+ @Override
+ public HttpServerRequest pause() {
+ if( isDebugging ) breakpoint();
+ return delegate.pause();
+ }
+
+ @Override
+ public HttpServerRequest resume() {
+ if( isDebugging ) breakpoint();
+ return delegate.resume();
+ }
+
+ @Override
+ public HttpServerRequest fetch(long amount) {
+ if( isDebugging ) breakpoint();
+ return delegate.fetch(amount);
+ }
+
+ @Override
+ public HttpServerRequest endHandler(Handler<Void> endHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.endHandler(endHandler);
+ }
+
+ @Override
+ public HttpVersion version() {
+ if( isDebugging ) breakpoint();
+ return delegate.version();
+ }
+
+ @Override
+ public HttpMethod method() {
+ if( isDebugging ) breakpoint();
+ return delegate.method();
+ }
+
+ @Override
+ public boolean isSSL() {
+ if( isDebugging ) breakpoint();
+ return delegate.isSSL();
+ }
+
+ @Override
+ public String scheme() {
+ if( isDebugging ) breakpoint();
+ return delegate.scheme();
+ }
+
+ @Override
+ public String uri() {
+ if( isDebugging ) breakpoint();
+ return delegate.uri();
+ }
+
+ @Override
+ public String path() {
+ if( isDebugging ) breakpoint();
+ return delegate.path();
+ }
+
+ @Override
+ public String query() {
+ if( isDebugging ) breakpoint();
+ return delegate.query();
+ }
+
+ @Override
+ public String host() {
+ if( isDebugging ) breakpoint();
+ return delegate.host();
+ }
+
+ @Override
+ public long bytesRead() {
+ if( isDebugging ) breakpoint();
+ return delegate.bytesRead();
+ }
+
+ @Override
+ public HttpServerResponse response() {
+ if( isDebugging ) breakpoint();
+ return delegate.response();
+ }
+
+ @Override
+ public MultiMap headers() {
+ if( isDebugging ) breakpoint();
+ return delegate.headers();
+ }
+
+ @Override
+ public String getHeader(String headerName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public String getHeader(CharSequence headerName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public MultiMap params() {
+ if( isDebugging ) breakpoint();
+ return delegate.params();
+ }
+
+ @Override
+ public String getParam(String paramName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName);
+ }
+
+ @Override
+ public String getParam(String paramName, String defaultValue) {
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName, defaultValue);
+ }
+
+ @Override
+ public SocketAddress remoteAddress() {
+ if( isDebugging ) breakpoint();
+ return delegate.remoteAddress();
+ }
+
+ @Override
+ public SocketAddress localAddress() {
+ if( isDebugging ) breakpoint();
+ return delegate.localAddress();
+ }
+
+ @Override
+ public SSLSession sslSession() {
+ if( isDebugging ) breakpoint();
+ return delegate.sslSession();
+ }
+
+ @Override
+ public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException {
+ if( isDebugging ) breakpoint();
+ return delegate.peerCertificateChain();
+ }
+
+ @Override
+ public String absoluteURI() {
+ if( isDebugging ) breakpoint();
+ return delegate.absoluteURI();
+ }
+
+ @Override
+ public HttpServerRequest bodyHandler(Handler<Buffer> bodyHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.bodyHandler(bodyHandler);
+ }
+
+ @Override
+ public HttpServerRequest body(Handler<AsyncResult<Buffer>> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.body(handler);
+ }
+
+ @Override
+ public Future<Buffer> body() {
+ if( isDebugging ) breakpoint();
+ return delegate.body();
+ }
+
+ @Override
+ public void end(Handler<AsyncResult<Void>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.end(handler);
+ }
+
+ @Override
+ public Future<Void> end() {
+ if( isDebugging ) breakpoint();
+ return delegate.end();
+ }
+
+ @Override
+ public void toNetSocket(Handler<AsyncResult<NetSocket>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.toNetSocket(handler);
+ }
+
+ @Override
+ public Future<NetSocket> toNetSocket() {
+ if( isDebugging ) breakpoint();
+ return delegate.toNetSocket();
+ }
+
+ @Override
+ public HttpServerRequest setExpectMultipart(boolean expect) {
+ if( isDebugging ) breakpoint();
+ return delegate.setExpectMultipart(expect);
+ }
+
+ @Override
+ public boolean isExpectMultipart() {
+ if( isDebugging ) breakpoint();
+ return delegate.isExpectMultipart();
+ }
+
+ @Override
+ public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> uploadHandler) {
+ if( isDebugging ) breakpoint();
+ return delegate.uploadHandler(uploadHandler);
+ }
+
+ @Override
+ public MultiMap formAttributes() {
+ if( isDebugging ) breakpoint();
+ return delegate.formAttributes();
+ }
+
+ @Override
+ public String getFormAttribute(String attributeName) {
+ if( isDebugging ) breakpoint();
+ return delegate.getFormAttribute(attributeName);
+ }
+
+ @Override
+ public int streamId() {
+ if( isDebugging ) breakpoint();
+ return delegate.streamId();
+ }
+
+ @Override
+ public void toWebSocket(Handler<AsyncResult<ServerWebSocket>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.toWebSocket(handler);
+ }
+
+ @Override
+ public Future<ServerWebSocket> toWebSocket() {
+ if( isDebugging ) breakpoint();
+ return delegate.toWebSocket();
+ }
+
+ @Override
+ public boolean isEnded() {
+ if( isDebugging ) breakpoint();
+ return delegate.isEnded();
+ }
+
+ @Override
+ public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.customFrameHandler(handler);
+ }
+
+ @Override
+ public HttpConnection connection() {
+ if( isDebugging ) breakpoint();
+ return delegate.connection();
+ }
+
+ @Override
+ public StreamPriority streamPriority() {
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriority();
+ }
+
+ @Override
+ public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriorityHandler(handler);
+ }
+
+ @Override
+ public DecoderResult decoderResult() {
+ if( isDebugging ) breakpoint();
+ return delegate.decoderResult();
+ }
+
+ @Override
+ public Cookie getCookie(String name) {
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name);
+ }
+
+ @Override
+ public Cookie getCookie(String name, String domain, String path) {
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name, domain, path);
+ }
+
+ @Override
+ public int cookieCount() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookieCount();
+ }
+
+ @Override
+ @Deprecated
+ public Map<String, Cookie> cookieMap() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookieMap();
+ }
+
+ @Override
+ public Set<Cookie> cookies(String name) {
+ if( isDebugging ) breakpoint();
+ return delegate.cookies(name);
+ }
+
+ @Override
+ public Set<Cookie> cookies() {
+ if( isDebugging ) breakpoint();
+ return delegate.cookies();
+ }
+
+ @Override
+ public HttpServerRequest routed(String route) {
+ if( isDebugging ) breakpoint();
+ return delegate.routed(route);
+ }
+
+ @Override
+ public Pipe<Buffer> pipe() {
+ if( isDebugging ) breakpoint();
+ return delegate.pipe();
+ }
+
+ @Override
+ public Future<Void> pipeTo(WriteStream<Buffer> dst) {
+ if( isDebugging ) breakpoint();
+ return delegate.pipeTo(dst);
+ }
+
+ @Override
+ public void pipeTo(WriteStream<Buffer> dst, Handler<AsyncResult<Void>> handler) {
+ if( isDebugging ) breakpoint();
+ delegate.pipeTo(dst, handler);
+ }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java
new file mode 100644
index 0000000..92fe3fc
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerRequestInternal.java
@@ -0,0 +1,475 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.netty.handler.codec.DecoderResult;
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Context;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpConnection;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerFileUpload;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.HttpVersion;
+import io.vertx.core.http.ServerWebSocket;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.http.impl.HttpServerRequestInternal;
+import io.vertx.core.net.NetSocket;
+import io.vertx.core.net.SocketAddress;
+import io.vertx.core.streams.Pipe;
+import io.vertx.core.streams.WriteStream;
+import org.slf4j.Logger;
+
+import javax.net.ssl.SSLPeerUnverifiedException;
+import javax.net.ssl.SSLSession;
+import javax.security.cert.X509Certificate;
+import java.util.Map;
+import java.util.Set;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+public class DelegateVertxHttpServerRequestInternal implements HttpServerRequestInternal {
+
+ private static final Logger log = getLogger(DelegateVertxHttpServerRequestInternal.class);
+ private final HttpServerRequestInternal delegate;
+ private final boolean isDebugging = true;
+ private final String dbgHint;
+
+ public DelegateVertxHttpServerRequestInternal(String debugHint, HttpServerRequest delegate) {
+ log.trace("{}: new DelegateVertxHttpServerRequestInternal()", debugHint);
+ this.delegate = (HttpServerRequestInternal) delegate;
+ this.dbgHint = debugHint;
+ }
+
+ private void breakpoint(){
+ try{
+ throw new UnsupportedOperationException();
+ }catch(UnsupportedOperationException ex){}
+ }
+
+ @Override
+ public HttpServerRequest exceptionHandler(Handler<Throwable> handler) {
+ log.trace("{}: exceptionHandler(Hdlr<Ex>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.exceptionHandler(handler);
+ }
+
+ @Override
+ public HttpServerRequest handler(Handler<Buffer> handler) {
+ log.trace("{}: handler(Hdlr<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.handler(handler);
+ }
+
+ @Override
+ public HttpServerRequest pause() {
+ log.trace("{}: pause()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pause();
+ }
+
+ @Override
+ public HttpServerRequest resume() {
+ log.trace("{}: resume()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.resume();
+ }
+
+ @Override
+ public HttpServerRequest fetch(long amount) {
+ log.trace("{}: fetch({})", dbgHint, amount);
+ if( isDebugging ) breakpoint();
+ return delegate.fetch(amount);
+ }
+
+ @Override
+ public HttpServerRequest endHandler(Handler<Void> endHandler) {
+ log.trace("{}: endHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.endHandler(endHandler);
+ }
+
+ @Override
+ public HttpVersion version() {
+ log.trace("{}: version()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.version();
+ }
+
+ @Override
+ public HttpMethod method() {
+ log.trace("{}: method()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.method();
+ }
+
+ @Override
+ public boolean isSSL() {
+ log.trace("{}: isSSL()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isSSL();
+ }
+
+ @Override
+ public String scheme() {
+ log.trace("{}: scheme()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.scheme();
+ }
+
+ @Override
+ public String uri() {
+ log.trace("{}: uri()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.uri();
+ }
+
+ @Override
+ public String path() {
+ log.trace("{}: path()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.path();
+ }
+
+ @Override
+ public String query() {
+ log.trace("{}: query()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.query();
+ }
+
+ @Override
+ public String host() {
+ log.trace("{}: host()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.host();
+ }
+
+ @Override
+ public long bytesRead() {
+ log.trace("{}: bytesRead()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.bytesRead();
+ }
+
+ @Override
+ public HttpServerResponse response() {
+ log.trace("{}: response()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.response();
+ }
+
+ @Override
+ public MultiMap headers() {
+ log.trace("{}: headers()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.headers();
+ }
+
+ @Override
+ public String getHeader(String headerName) {
+ log.trace("{}: getHeader(\"{}\")", dbgHint, headerName);
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public String getHeader(CharSequence headerName) {
+ log.trace("{}: getHeader(\"{}\")", dbgHint, headerName);
+ if( isDebugging ) breakpoint();
+ return delegate.getHeader(headerName);
+ }
+
+ @Override
+ public MultiMap params() {
+ log.trace("{}: params()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.params();
+ }
+
+ @Override
+ public String getParam(String paramName) {
+ log.trace("{}: getParam(\"{}\")", dbgHint, paramName);
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName);
+ }
+
+ @Override
+ public String getParam(String paramName, String defaultValue) {
+ log.trace("{}: getParam(\"{}\", \"{}\")", dbgHint, paramName, defaultValue);
+ if( isDebugging ) breakpoint();
+ return delegate.getParam(paramName, defaultValue);
+ }
+
+ @Override
+ public SocketAddress remoteAddress() {
+ log.trace("{}: remoteAddress()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.remoteAddress();
+ }
+
+ @Override
+ public SocketAddress localAddress() {
+ log.trace("{}: localAddress()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.localAddress();
+ }
+
+ @Override
+ public SSLSession sslSession() {
+ log.trace("{}: sslSession()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.sslSession();
+ }
+
+ @Override
+ public X509Certificate[] peerCertificateChain() throws SSLPeerUnverifiedException {
+ log.trace("{}: peerCertificateChain()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.peerCertificateChain();
+ }
+
+ @Override
+ public String absoluteURI() {
+ log.trace("{}: absoluteURI()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.absoluteURI();
+ }
+
+ @Override
+ public HttpServerRequest bodyHandler(Handler<Buffer> bodyHandler) {
+ log.trace("{}: bodyHandler(Hdlr<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.bodyHandler(bodyHandler);
+ }
+
+ @Override
+ public HttpServerRequest body(Handler<AsyncResult<Buffer>> handler) {
+ log.trace("{}: body(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.body(handler);
+ }
+
+ @Override
+ public Future<Buffer> body() {
+ log.trace("{}: body(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.body();
+ }
+
+ @Override
+ public void end(Handler<AsyncResult<Void>> handler) {
+ log.trace("{}: end(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.end(handler);
+ }
+
+ @Override
+ public Future<Void> end() {
+ log.trace("{}: end(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.end();
+ }
+
+ @Override
+ public void toNetSocket(Handler<AsyncResult<NetSocket>> handler) {
+ log.trace("{}: toNetSocket(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.toNetSocket(handler);
+ }
+
+ @Override
+ public Future<NetSocket> toNetSocket() {
+ log.trace("{}: toNetSocket(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.toNetSocket();
+ }
+
+ @Override
+ public HttpServerRequest setExpectMultipart(boolean expect) {
+ log.trace("{}: toNetSocket({})", dbgHint, expect);
+ if( isDebugging ) breakpoint();
+ return delegate.setExpectMultipart(expect);
+ }
+
+ @Override
+ public boolean isExpectMultipart() {
+ log.trace("{}: isExpectMultipart()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isExpectMultipart();
+ }
+
+ @Override
+ public HttpServerRequest uploadHandler(Handler<HttpServerFileUpload> uploadHandler) {
+ log.trace("{}: uploadHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.uploadHandler(uploadHandler);
+ }
+
+ @Override
+ public MultiMap formAttributes() {
+ log.trace("{}: formAttributes()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.formAttributes();
+ }
+
+ @Override
+ public String getFormAttribute(String attributeName) {
+ log.trace("{}: getFormAttribute(\"{}\")", dbgHint, attributeName);
+ if( isDebugging ) breakpoint();
+ return delegate.getFormAttribute(attributeName);
+ }
+
+ @Override
+ public int streamId() {
+ log.trace("{}: streamId()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamId();
+ }
+
+ @Override
+ public void toWebSocket(Handler<AsyncResult<ServerWebSocket>> handler) {
+ log.trace("{}: toWebSocket(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.toWebSocket(handler);
+ }
+
+ @Override
+ public Future<ServerWebSocket> toWebSocket() {
+ log.trace("{}: toWebSocket()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.toWebSocket();
+ }
+
+ @Override
+ public boolean isEnded() {
+ log.trace("{}: isEnded()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.isEnded();
+ }
+
+ @Override
+ public HttpServerRequest customFrameHandler(Handler<HttpFrame> handler) {
+ log.trace("{}: customFrameHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.customFrameHandler(handler);
+ }
+
+ @Override
+ public HttpConnection connection() {
+ log.trace("{}: connection()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.connection();
+ }
+
+ @Override
+ public StreamPriority streamPriority() {
+ log.trace("{}: streamPriority()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriority();
+ }
+
+ @Override
+ public HttpServerRequest streamPriorityHandler(Handler<StreamPriority> handler) {
+ log.trace("{}: streamPriorityHandler(Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.streamPriorityHandler(handler);
+ }
+
+ @Override
+ public DecoderResult decoderResult() {
+ log.trace("{}: decoderResult()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.decoderResult();
+ }
+
+ @Override
+ public Cookie getCookie(String name) {
+ log.trace("{}: getCookie(\"{}\")", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name);
+ }
+
+ @Override
+ public Cookie getCookie(String name, String domain, String path) {
+ log.trace("{}: getCookie(\"{}\", Str, Str)", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.getCookie(name, domain, path);
+ }
+
+ @Override
+ public int cookieCount() {
+ log.trace("{}: cookieCount()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookieCount();
+ }
+
+ @Override
+ @Deprecated
+ public Map<String, Cookie> cookieMap() {
+ log.trace("{}: cookieMap()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookieMap();
+ }
+
+ @Override
+ public Set<Cookie> cookies(String name) {
+ log.trace("{}: cookies(\"{}\")", dbgHint, name);
+ if( isDebugging ) breakpoint();
+ return delegate.cookies(name);
+ }
+
+ @Override
+ public Set<Cookie> cookies() {
+ log.trace("{}: cookies(void)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.cookies();
+ }
+
+ @Override
+ public HttpServerRequest routed(String route) {
+ log.trace("{}: routed(\"{}\")", dbgHint, route);
+ if( isDebugging ) breakpoint();
+ return delegate.routed(route);
+ }
+
+ @Override
+ public Pipe<Buffer> pipe() {
+ log.trace("{}: pipe()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pipe();
+ }
+
+ @Override
+ public Future<Void> pipeTo(WriteStream<Buffer> dst) {
+ log.trace("{}: pipeTo(WrStrm<Buf>)", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.pipeTo(dst);
+ }
+
+ @Override
+ public void pipeTo(WriteStream<Buffer> dst, Handler<AsyncResult<Void>> handler) {
+ log.trace("{}: pipeTo(WrStrm<Buf>,Hdlr)", dbgHint);
+ if( isDebugging ) breakpoint();
+ delegate.pipeTo(dst, handler);
+ }
+
+ @Override
+ public Context context() {
+ log.trace("{}: context()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.context();
+ }
+
+ @Override
+ public Object metric() {
+ log.trace("{}: metric()", dbgHint);
+ if( isDebugging ) breakpoint();
+ return delegate.metric();
+ }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java
new file mode 100644
index 0000000..87ce5a9
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/DelegateVertxHttpServerResponse.java
@@ -0,0 +1,111 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.AsyncResult;
+import io.vertx.core.Future;
+import io.vertx.core.Handler;
+import io.vertx.core.MultiMap;
+import io.vertx.core.buffer.Buffer;
+import io.vertx.core.http.Cookie;
+import io.vertx.core.http.HttpFrame;
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerResponse;
+import io.vertx.core.http.StreamPriority;
+import io.vertx.core.streams.ReadStream;
+import org.slf4j.Logger;
+
+import java.util.Set;
+
+import static org.slf4j.LoggerFactory.getLogger;
+
+public class DelegateVertxHttpServerResponse implements HttpServerResponse {
+
+ private static final Logger log = getLogger(DelegateVertxHttpServerResponse.class);
+ private final HttpServerResponse delegate;
+ private final String dbgHint;
+
+ public DelegateVertxHttpServerResponse(String debugHint, HttpServerResponse delegate) {
+ this.dbgHint = debugHint;
+ this.delegate = delegate;
+ }
+
+ @Override public HttpServerResponse exceptionHandler(Handler<Throwable> handler) { log.trace("{}: exceptionHandler()", dbgHint); return delegate.exceptionHandler(handler); }
+ @Override public HttpServerResponse setWriteQueueMaxSize(int maxSize) { log.trace("{}: setWriteQueueMaxSize()", dbgHint); return delegate.setWriteQueueMaxSize(maxSize); }
+ @Override public HttpServerResponse drainHandler(Handler<Void> handler) { log.trace("{}: drainHandler()", dbgHint); return delegate.drainHandler(handler); }
+ @Override public int getStatusCode() { log.trace("{}: getStatusCode()", dbgHint); return delegate.getStatusCode(); }
+ @Override public HttpServerResponse setStatusCode(int statusCode) { log.trace("{}: setStatusCode()", dbgHint); return delegate.setStatusCode(statusCode); }
+ @Override public String getStatusMessage() { log.trace("{}: getStatusMessage()", dbgHint); return delegate.getStatusMessage(); }
+ @Override public HttpServerResponse setStatusMessage(String statusMessage) { log.trace("{}: setStatusMessage()", dbgHint); return delegate.setStatusMessage(statusMessage); }
+ @Override public HttpServerResponse setChunked(boolean chunked) { log.trace("{}: setChunked()", dbgHint); return delegate.setChunked(chunked); }
+ @Override public boolean isChunked() { log.trace("{}: isChunked()", dbgHint); return delegate.isChunked(); }
+ @Override public MultiMap headers() { log.trace("{}: headers()", dbgHint); return delegate.headers(); }
+ @Override public HttpServerResponse putHeader(String name, String value) { log.trace("{}: putHeader(Str,Str)", dbgHint); return delegate.putHeader(name, value); }
+ @Override public HttpServerResponse putHeader(CharSequence name, CharSequence value) { log.trace("{}: putHeader(ChrSeq,ChrSeq)", dbgHint); return delegate.putHeader(name, value); }
+ @Override public HttpServerResponse putHeader(String name, Iterable<String> values) { log.trace("{}: putHeader(Str,Iter<Str>)", dbgHint); return delegate.putHeader(name, values); }
+ @Override public HttpServerResponse putHeader(CharSequence name, Iterable<CharSequence> values) { log.trace("{}: putHeader(ChrSeq,Iter<ChrSeq>)", dbgHint); return delegate.putHeader(name, values); }
+ @Override public MultiMap trailers() { log.trace("{}: trailers()", dbgHint); return delegate.trailers(); }
+ @Override public HttpServerResponse putTrailer(String name, String value) { log.trace("{}: putTrailer(Str,Str)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse putTrailer(CharSequence name, CharSequence value) { log.trace("{}: putTrailer(ChrSeq,ChrSeq)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse putTrailer(String name, Iterable<String> values) { log.trace("{}: putTrailer(Str,Iter<Str>)", dbgHint); return delegate.putTrailer(name, values); }
+ @Override public HttpServerResponse putTrailer(CharSequence name, Iterable<CharSequence> value) { log.trace("{}: putTrailer(ChrSeq,Iter<ChrSeq>)", dbgHint); return delegate.putTrailer(name, value); }
+ @Override public HttpServerResponse closeHandler(Handler<Void> handler) { log.trace("{}: closeHandler()", dbgHint); return delegate.closeHandler(handler); }
+ @Override public HttpServerResponse endHandler(Handler<Void> handler) { log.trace("{}: endHandler()", dbgHint); return delegate.endHandler(handler); }
+ @Override public Future<Void> write(String chunk, String enc) { log.trace("{}: write(Str,Str)", dbgHint); return delegate.write(chunk, enc); }
+ @Override public void write(String chunk, String enc, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Str,Str,Hdlr)", dbgHint); delegate.write(chunk, enc, handler); }
+ @Override public Future<Void> write(String chunk) { log.trace("{}: write(Str)", dbgHint); return delegate.write(chunk); }
+ @Override public void write(String chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Str,Hdlr)", dbgHint); delegate.write(chunk, handler); }
+ @Override public HttpServerResponse writeContinue() { log.trace("{}: writeContinue()", dbgHint); return delegate.writeContinue(); }
+ @Override public Future<Void> end(String chunk) { log.trace("{}: end(Str)", dbgHint); return delegate.end(chunk); }
+ @Override public void end(String chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Str,Hdlr)", dbgHint); delegate.end(chunk, handler); }
+ @Override public Future<Void> end(String chunk, String enc) { log.trace("{}: end(Str,Str)", dbgHint); return delegate.end(chunk, enc); }
+ @Override public void end(String chunk, String enc, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Str,Str,Hdlr)", dbgHint); delegate.end(chunk, enc, handler); }
+ @Override public Future<Void> end(Buffer chunk) { log.trace("{}: end(Buf)", dbgHint); return delegate.end(chunk); }
+ @Override public void end(Buffer chunk, Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Buf,Hdlr)", dbgHint); delegate.end(chunk, handler); }
+ @Override public Future<Void> end() { log.trace("{}: end(void)", dbgHint); return delegate.end(); }
+ @Override public void send(Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Hdlr)", dbgHint); delegate.send(handler); }
+ @Override public Future<Void> send() { log.trace("{}: send(void)", dbgHint); return delegate.send(); }
+ @Override public void send(String body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Str,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(String body) { log.trace("{}: send(Str)", dbgHint); return delegate.send(body); }
+ @Override public void send(Buffer body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(Buf,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(Buffer body) { log.trace("{}: send(Buf)", dbgHint); return delegate.send(body); }
+ @Override public void send(ReadStream<Buffer> body, Handler<AsyncResult<Void>> handler) { log.trace("{}: send(RdStr<Buf>,Hdlr)", dbgHint); delegate.send(body, handler); }
+ @Override public Future<Void> send(ReadStream<Buffer> body) { log.trace("{}: send(RdStr<Buf>)", dbgHint); return delegate.send(body); }
+ @Override public Future<Void> sendFile(String filename) { log.trace("{}: sendFile(Str)", dbgHint); return delegate.sendFile(filename); }
+ @Override public Future<Void> sendFile(String filename, long offset) { log.trace("{}: sendFile(Str,lng)", dbgHint); return delegate.sendFile(filename, offset); }
+ @Override public Future<Void> sendFile(String filename, long offset, long length) { log.trace("{}: sendFile(Str,lng,lng)", dbgHint); return delegate.sendFile(filename, offset, length); }
+ @Override public HttpServerResponse sendFile(String filename, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,Hdlr)", dbgHint); return delegate.sendFile(filename, resultHandler); }
+ @Override public HttpServerResponse sendFile(String filename, long offset, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, resultHandler); }
+ @Override public HttpServerResponse sendFile(String filename, long offset, long length, Handler<AsyncResult<Void>> resultHandler) { log.trace("{}: sendFile(Str,lng,lng,Hdlr)", dbgHint); return delegate.sendFile(filename, offset, length, resultHandler); }
+ @Override public void close() { log.trace("{}: close()", dbgHint); delegate.close(); }
+ @Override public boolean ended() { log.trace("{}: ended()", dbgHint); return delegate.ended(); }
+ @Override public boolean closed() { log.trace("{}: closed()", dbgHint); return delegate.closed(); }
+ @Override public boolean headWritten() { log.trace("{}: headWritten()", dbgHint); return delegate.headWritten(); }
+ @Override public HttpServerResponse headersEndHandler(Handler<Void> handler) { log.trace("{}: headersEndHandler()", dbgHint); return delegate.headersEndHandler(handler); }
+ @Override public HttpServerResponse bodyEndHandler(Handler<Void> handler) { log.trace("{}: bodyEndHandler()", dbgHint); return delegate.bodyEndHandler(handler); }
+ @Override public long bytesWritten() { log.trace("{}: bytesWritten()", dbgHint); return delegate.bytesWritten(); }
+ @Override public int streamId() { log.trace("{}: streamId()", dbgHint); return delegate.streamId(); }
+ @Override public HttpServerResponse push(HttpMethod method, String host, String path, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Str,Hdlr)", dbgHint); return delegate.push(method, host, path, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String host, String path) { log.trace("{}: push(Mthd,Str,Str)", dbgHint); return delegate.push(method, host, path); }
+ @Override public HttpServerResponse push(HttpMethod method, String path, MultiMap headers, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Map,Hdlr)", dbgHint); return delegate.push(method, path, headers, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Map)", dbgHint); return delegate.push(method, path, headers); }
+ @Override public HttpServerResponse push(HttpMethod method, String path, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Hdlr)", dbgHint); return delegate.push(method, path, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String path) { log.trace("{}: push(Mthd,Str)", dbgHint); return delegate.push(method, path); }
+ @Override public HttpServerResponse push(HttpMethod method, String host, String path, MultiMap headers, Handler<AsyncResult<HttpServerResponse>> handler) { log.trace("{}: push(Mthd,Str,Str,Map,Hdlr)", dbgHint); return delegate.push(method, host, path, headers, handler); }
+ @Override public Future<HttpServerResponse> push(HttpMethod method, String host, String path, MultiMap headers) { log.trace("{}: push(Mthd,Str,Str,Map)", dbgHint); return delegate.push(method, host, path, headers); }
+ @Override public boolean reset() { log.trace("{}: reset(void)", dbgHint); return delegate.reset(); }
+ @Override public boolean reset(long code) { log.trace("{}: reset({})", dbgHint, code); return delegate.reset(code); }
+ @Override public HttpServerResponse writeCustomFrame(int type, int flags, Buffer payload) { log.trace("{}: writeCustomFrame({}, {}, Buf)", dbgHint, type, flags); return delegate.writeCustomFrame(type, flags, payload); }
+ @Override public HttpServerResponse writeCustomFrame(HttpFrame frame) { log.trace("{}: writeCustomFrame()", dbgHint); return delegate.writeCustomFrame(frame); }
+ @Override public HttpServerResponse setStreamPriority(StreamPriority streamPriority) { log.trace("{}: setStreamPriority()", dbgHint); return delegate.setStreamPriority(streamPriority); }
+ @Override public HttpServerResponse addCookie(Cookie cookie) { log.trace("{}: addCookie()", dbgHint); return delegate.addCookie(cookie); }
+ @Override public Cookie removeCookie(String name) { log.trace("{}: removeCookie({})", dbgHint, name); return delegate.removeCookie(name); }
+ @Override public Cookie removeCookie(String name, boolean invalidate) { log.trace("{}: removeCookie({}, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, invalidate); }
+ @Override public Set<Cookie> removeCookies(String name) { log.trace("{}: removeCookies({})", dbgHint, name); return delegate.removeCookies(name); }
+ @Override public Set<Cookie> removeCookies(String name, boolean invalidate) { log.trace("{}: removeCookies({}, {})", dbgHint, name, invalidate); return delegate.removeCookies(name, invalidate); }
+ @Override public Cookie removeCookie(String name, String domain, String path) { log.trace("{}: removeCookie({}, Str, Str)", dbgHint, name); return delegate.removeCookie(name, domain, path); }
+ @Override public Cookie removeCookie(String name, String domain, String path, boolean invalidate) { log.trace("{}: removeCookie({}, Str, Str, {})", dbgHint, name, invalidate); return delegate.removeCookie(name, domain, path, invalidate); }
+ @Override public Future<Void> write(Buffer data) { log.trace("{}: write(Buf)", dbgHint); return delegate.write(data); }
+ @Override public void write(Buffer data, Handler<AsyncResult<Void>> handler) { log.trace("{}: write(Buf, Hdlr)", dbgHint); delegate.write(data, handler); }
+ @Override public void end(Handler<AsyncResult<Void>> handler) { log.trace("{}: end(Hdlr)", dbgHint); delegate.end(handler); }
+ @Override public boolean writeQueueFull() { log.trace("{}: writeQueueFull()", dbgHint); return delegate.writeQueueFull(); }
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java
new file mode 100644
index 0000000..8295088
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/Foo.java
@@ -0,0 +1,131 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.ext.web.RoutingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.lang.System.currentTimeMillis;
+
+
+public class Foo {
+
+ private static final Logger log = Foo.getLogger(Foo.class);
+ private static final boolean assertRequestEquality = false;
+ private static HttpServerRequest serverInfoRequest;
+ private static io.vertx.core.http.impl.HttpServerRequestInternal restStorageEvBusAdaptMappdHttpServReq;
+ private static long onBeginRouteEpochMs;
+
+ public static synchronized void onNewServerInfoRequst(HttpServerRequest request){
+ if( !isServerInfoRequst(request) ) return;
+ //assert serverInfoRequest == null;
+ log.trace("onNewServerInfoRequst()");
+ serverInfoRequest = request;
+ }
+
+ public static void downReqBegin(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("downReqBegin()");
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void downReqAuthorized(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("downReqAuthorized()");
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void onBeforeMainVerticleRouteGeneric(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("onBeforeMainVerticleRouteGeneric()");
+ onBeginRouteEpochMs = currentTimeMillis();
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static Logger getLogger(Class<?> clazz) {
+ assert clazz != null;
+ return getLogger(clazz.getName());
+ }
+
+ public static Logger getLogger(String name) {
+ assert name != null;
+ return LoggerFactory.getLogger("FOO."+ name);
+ }
+
+ public static boolean isServerInfoRequst(HttpServerRequest request) {
+ return isServerInfoRequst(request.uri());
+ }
+
+ private static boolean isServerInfoRequst(String uri) {
+ assert uri != null;
+ assert uri.startsWith("/");
+ try{
+ if( "/houston/server/info".equals(uri) ){
+ //log.trace("true <- isServerInfoRequst({})", uri);
+ return true;
+ }
+ //log.trace("false <- isServerInfoRequst({})", uri);
+ return false;
+ }catch(Throwable ex){
+ assert false;
+ throw ex;
+ }
+ }
+
+ public static void onBeforeEvBusAdapterDataHandler(String uri) {
+ if( !isServerInfoRequst(uri) ) return;
+ log.trace("onBeforeEvBusAdapterDataHandler({})", uri);
+ assert false;
+ }
+
+ public static void onBeforeEvBusAdapterEndHandler(String uri) {
+ if( !isServerInfoRequst(uri)) return;
+ log.trace("onBeforeEvBusAdapterEndHandler({})", uri);
+ assert false;
+ }
+
+ public static void onEvBusAdapterHandle(io.vertx.core.http.impl.HttpServerRequestInternal req) {
+ if( !isServerInfoRequst(req.uri()) ) return;
+ assert !assertRequestEquality || serverInfoRequest != req;
+ assert restStorageEvBusAdaptMappdHttpServReq == null;
+ log.trace("onEvBusAdapterHandle({})", req.uri());
+ restStorageEvBusAdaptMappdHttpServReq = req;
+ }
+
+ public static void onEvBusAdapterError(Throwable ex) {
+ log.error("onEvBusAdapterError()", new Exception("stacktrace", ex));
+ }
+
+ public static void onRestStorageHandlerHandle(HttpServerRequest req) {
+ if( !isServerInfoRequst(req) ) return;
+ log.trace("onRestStorageHandlerHandle({})", req.uri());
+ assert !assertRequestEquality || serverInfoRequest == req;
+ }
+
+ public static void onRestStorageHandler_getResource(io.vertx.ext.web.RoutingContext ctx) {
+ if( !isServerInfoRequst(ctx.request()) ) return;
+ assert !assertRequestEquality || serverInfoRequest == ctx.request();
+ log.trace("onRestStorageHandler_getResource({})", ctx.request().uri());
+ }
+
+ public static void onRestStorageHandler_getResource_before_storage_get(String path, int offset, int limit) {
+ //log.trace("onRestStorageHandler_getResource_before_storage_get({}, {}, {})", path, offset, limit);
+ }
+
+ public static void onRestStorageHandler_getResource_after_storage_get(String path, int offset, int limit, Object/*org.swisspush.reststorage.Resource*/ resource) {
+ //log.trace("onRestStorageHandler_getResource_after_storage_get({})", path);
+ }
+
+ public static void onGetHoustonServerInfo(RoutingContext ctx) {
+ var req = ctx.request();
+ log.trace("onGetHoustonServerInfo({})", req.uri());
+ assert !assertRequestEquality || serverInfoRequest != req;
+ }
+
+ public static void onEndCompleted(long responseBegEpochMs){
+ long nowEpochMs = currentTimeMillis();
+ log.debug("Request took {}ms and {}ms", nowEpochMs - onBeginRouteEpochMs, nowEpochMs - responseBegEpochMs);
+ }
+
+}
+
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java
new file mode 100644
index 0000000..a011c7f
--- /dev/null
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/gateleenKludge/tmoutissue20240123/HoustonInfoRequestTracer.java
@@ -0,0 +1,265 @@
+package ch.hiddenalpha.unspecifiedgarbage.gateleenKludge.tmoutissue20240123;
+
+import io.vertx.core.http.HttpMethod;
+import io.vertx.core.http.HttpServerRequest;
+import io.vertx.core.http.HttpServerResponse;
+import org.slf4j.Logger;
+
+import java.lang.reflect.Field;
+import java.util.NoSuchElementException;
+
+import static java.lang.System.currentTimeMillis;
+import static org.slf4j.LoggerFactory.getLogger;
+
+
+/**
+ * <p>This class got introduced to trace timings of "/houston/server/info"
+ * requests. It is optimized for exactly this purpose AND NOTHING ELSE! It was
+ * introduced because SDCISA-13746 is only observable on PROD. It does not
+ * reproduce locally, and not even on TEST, INT or PREPROD. So we do not really
+ * have another choice but tracing down this bug directly on PROD itself.
+ * Unluckily it is not that simple to do so. First debugging/testing on PROD env
+ * always has some risk. Plus, also our feedback-loop is terribly slow due to our
+ * heavyweight deployment process. So to be able to see if this code actually does
+ * what it should, we likely have to wait up to SEVERAL MONTHS.</p>
+ */
+public class HoustonInfoRequestTracer implements org.swisspush.gateleen.core.debug.InfoRequestTracer {
+
+ private static final Logger log = getLogger(HoustonInfoRequestTracer.class);
+ private static final String INFO_URI = "/houston/server/info";
+ private static final int MAX_REQUESTS = 8; /*WARN: do NOT go too high*/
+ private static final Long NO_VALUE = Long.MIN_VALUE / 2;
+ private static final Class<?> wrapperClazz;
+ private static final Field delegateField;
+ private static final int
+ FLG_WritingHttpResponseHasReturned = 1 << 0,
+ FLG_WritingHttpResponseEnd = 1 << 1,
+ FLG_slotIsBusy = 1 << 2;
+ private final int requestDurationBailTresholdLowMs = 42; /* requests faster than 42 millis likely not interesting*/
+ private final Object requestSlotLock = new Object();
+ private final HttpServerRequest[]
+ requestInstances = new HttpServerRequest[MAX_REQUESTS];
+ private int slotReUseOffset;
+ private final int[]
+ requestFlg = new int[MAX_REQUESTS];
+ private final long[]
+ requestNewHttpReqEpochMs = new long[MAX_REQUESTS],
+ authorizerBeginMs = new long[MAX_REQUESTS],
+ authorizerEndMs = new long[MAX_REQUESTS],
+ beforeCatchallRouting = new long[MAX_REQUESTS],
+ responseGotRequestedMs = new long[MAX_REQUESTS],
+ writingResponseBeginMs = new long[MAX_REQUESTS],
+ writingResponseHasReturnedMs = new long[MAX_REQUESTS],
+ writingResponseEndMs = new long[MAX_REQUESTS],
+ requestDoneMs = new long[MAX_REQUESTS];
+
+ static {
+ try {
+ wrapperClazz = Class.forName("io.vertx.ext.web.impl.HttpServerRequestWrapper");
+ delegateField = wrapperClazz.getDeclaredField("delegate");
+ delegateField.setAccessible(true);
+ } catch (ClassNotFoundException | NoSuchFieldException ex) {
+ assert false : "TODO_395w8zuj";
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
+ }
+ }
+
+ public void onNewHttpRequest(HttpServerRequest req) {
+ if( !isOfInterestEvenReqNotYetSeen(req) ) return;
+ req = unwrap(req);
+ int reqIdx;
+ synchronized (requestSlotLock){
+ reqIdx = getFreeSlotIdx();
+ if( reqIdx == -2 ) {
+ log.debug("No more space to trace yet another request");
+ return;
+ }
+ assert reqIdx >= 0 && reqIdx < MAX_REQUESTS;
+ assert !alreadyKnowRequest(req) : "TODO what if..";
+ requestFlg[reqIdx] = FLG_slotIsBusy;
+ }
+ requestInstances[reqIdx] = req;
+ requestNewHttpReqEpochMs[reqIdx] = currentTimeMillis();
+ authorizerBeginMs[reqIdx] = NO_VALUE;
+ authorizerEndMs[reqIdx] = NO_VALUE;
+ beforeCatchallRouting[reqIdx] = NO_VALUE;
+ responseGotRequestedMs[reqIdx] = NO_VALUE;
+ writingResponseBeginMs[reqIdx] = NO_VALUE;
+ writingResponseHasReturnedMs[reqIdx] = NO_VALUE;
+ writingResponseEndMs[reqIdx] = NO_VALUE;
+ requestDoneMs[reqIdx] = NO_VALUE;
+ }
+
+ public void onHttpRequestError(HttpServerRequest req, Throwable ex) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ long durMs = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet. Took "+durMs+"ms", ex);
+ }
+
+ public void onAuthorizerBegin(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ authorizerBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onAuthorizerEnd(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return;
+ int reqIdx = getIdxOf(req);
+ authorizerEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public HttpServerRequest filterRequestBeforeCallingCatchallRouter(HttpServerRequest req) {
+ if( !isOfInterest(req) ) return req;
+ int reqIdx = getIdxOf(req);
+ beforeCatchallRouting[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ return new InterceptingServerRequest("ai9oh8urtgj", req);
+ }
+
+ private void onHttpResponseGotRequested(HttpServerRequest req) {
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ responseGotRequestedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onWritingHttpResponseBegin(HttpServerRequest req) {
+ int reqIdx = getIdxOf(req);
+ writingResponseBeginMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ }
+
+ public void onWritingHttpResponseHasReturned(HttpServerRequest req) {
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ writingResponseHasReturnedMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ requestFlg[reqIdx] |= FLG_WritingHttpResponseHasReturned;
+ tryCompletingRequest(reqIdx);
+ }
+
+ public void onWritingHttpResponseEnd(Throwable ex, HttpServerRequest req) {
+ assert ex == null;
+ assert isOfInterest(req);
+ int reqIdx = getIdxOf(req);
+ writingResponseEndMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ requestFlg[reqIdx] |= FLG_WritingHttpResponseEnd;
+ tryCompletingRequest(reqIdx);
+ }
+
+ private void tryCompletingRequest(int reqIdx) {
+ int requestIsDoneMask = FLG_WritingHttpResponseHasReturned | FLG_WritingHttpResponseEnd;
+ if ((requestFlg[reqIdx] & requestIsDoneMask) != requestIsDoneMask) return;
+ requestDoneMs[reqIdx] = currentTimeMillis() - requestNewHttpReqEpochMs[reqIdx];
+ report(reqIdx);
+ /* free our slot */
+ synchronized (requestSlotLock){
+ requestFlg[reqIdx] &= ~FLG_slotIsBusy;
+ requestInstances[reqIdx] = null;
+ }
+ }
+
+ private void report(int reqIdx) {
+ if( requestDoneMs[reqIdx] < requestDurationBailTresholdLowMs ){
+ /*fast requests usually are not worth logging, we're interested in the slow requests only*/
+ if (log.isTraceEnabled()) log.trace(
+ "Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)",
+ requestDoneMs[reqIdx],
+ authorizerBeginMs[reqIdx],
+ authorizerEndMs[reqIdx],
+ beforeCatchallRouting[reqIdx],
+ responseGotRequestedMs[reqIdx],
+ writingResponseBeginMs[reqIdx],
+ writingResponseHasReturnedMs[reqIdx],
+ writingResponseEndMs[reqIdx]);
+ }else{
+ /*slow requests are interesting*/
+ log.info("Req took {}ms (authBeg={}ms, authEnd={}ms, route={}ms, getRsp={}ms, wrBeg={}ms, wrRet={}ms, wrEnd={}ms)",
+ requestDoneMs[reqIdx],
+ authorizerBeginMs[reqIdx],
+ authorizerEndMs[reqIdx],
+ beforeCatchallRouting[reqIdx],
+ responseGotRequestedMs[reqIdx],
+ writingResponseBeginMs[reqIdx],
+ writingResponseHasReturnedMs[reqIdx],
+ writingResponseEndMs[reqIdx]);
+ }
+ }
+
+ private boolean isOfInterest(HttpServerRequest req){
+ if( !isOfInterestEvenReqNotYetSeen(req) ) return false;
+ if( !alreadyKnowRequest(req) ) return false; // Without start point, we cannot report anything useful
+ return true;
+ }
+
+ private boolean isOfInterestEvenReqNotYetSeen(HttpServerRequest req) {
+ if( !log.isInfoEnabled() ) return false; // if we produce no output, makes no sense to burn CPU for it
+ if( !HttpMethod.GET.equals(req.method()) ) return false; // Only GET is interesting for us
+ if( !INFO_URI.equals(req.uri()) ) return false; // Only this specific URI is of interest
+ return true;
+ }
+
+ private int getIdxOf(HttpServerRequest req) {
+ req = unwrap(req);
+ for( int idx = 0 ; idx < MAX_REQUESTS ; ++idx ){
+ if( requestInstances[idx] == req ) return idx;
+ }
+ assert false : "why does this happen?";
+ throw new NoSuchElementException(/*TODO*/"Not impl yet");
+ }
+
+ /** @return either index of free slot or -2 if no slot available */
+ private int getFreeSlotIdx() {
+ for( int i = 0 ; i < MAX_REQUESTS ; ++i ){
+ if( (requestFlg[i+slotReUseOffset%MAX_REQUESTS] & FLG_slotIsBusy) == 0 ) {
+ slotReUseOffset = i + 1;
+ return i;
+ }
+ }
+ return -2;
+ }
+
+ private boolean alreadyKnowRequest(HttpServerRequest req) {
+ req = unwrap(req);
+ for( int i = 0 ; i < (0 + MAX_REQUESTS) ; ++i ){
+ if((requestFlg[i] & FLG_slotIsBusy) == 0) continue;
+ if( requestInstances[i] == req ) return true;
+ }
+ return false;
+ }
+
+ private HttpServerRequest unwrap(HttpServerRequest req){
+ for( boolean hasChanged = true ; hasChanged ;){
+ hasChanged = false;
+ while (req instanceof InterceptingServerRequest) {
+ hasChanged = true;
+ req = ((InterceptingServerRequest) req).delegate;
+ }
+ while(wrapperClazz.isInstance(req)){
+ hasChanged = true;
+ try {
+ req = (HttpServerRequest) delegateField.get(req);
+ } catch (IllegalAccessException ex) {
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
+ }
+ }
+ }
+ assert req != null;
+ return req;
+ }
+
+ private class InterceptingServerRequest extends DelegateVertxHttpServerRequestInternal {
+ private final HttpServerRequest delegate;
+
+ public InterceptingServerRequest(String debugHint, HttpServerRequest delegate) {
+ super(debugHint, delegate);
+ assert isOfInterest(delegate);
+ this.delegate = delegate;
+ }
+
+ @Override public HttpServerResponse response() {
+ assert isOfInterest(delegate);
+ onHttpResponseGotRequested(delegate);
+ return super.response();
+ }
+ }
+
+
+}
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
index 889b3f1..bebe970 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/stream/StreamUtils.java
@@ -25,6 +25,21 @@ public class StreamUtils {
return totalBytes;
}
+ public static Runnable newCopyTask(java.io.InputStream src, java.io.OutputStream dst, boolean doCloseDst){
+ return ()->{
+ try{
+ for( byte[] buf = new byte[8291] ;; ){
+ int readLen = src.read(buf, 0, buf.length);
+ if( readLen == -1 ) break;
+ dst.write(buf, 0, readLen);
+ }
+ if( doCloseDst ) dst.close();
+ }catch( java.io.IOException ex ){
+ throw new RuntimeException(ex);
+ }
+ };
+ }
+
public static <SRC,DST> java.util.Iterator<DST> map( java.util.Iterator<SRC> src , java.util.function.Function<SRC,DST> mapper ) {
return new java.util.Iterator<DST>() {
@Override public boolean hasNext() { return src.hasNext(); }
diff --git a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
index 2bb1bfb..d7d7ec8 100644
--- a/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
+++ b/src/main/java/ch/hiddenalpha/unspecifiedgarbage/time/TimeUtils.java
@@ -25,9 +25,9 @@ public class TimeUtils {
* Find smallest distance assuming integers overflow "like a circle".
*
* Computers cannot represent all existing integers. Due to how
- * integers are represented in computers, they are not infinite but
- * more like a circle. Speak when we infinitely increment an
- * integer, it overflows and (usually) continues to walk around this
+ * integers are represented in java, they are not infinite but
+ * more like a circle. Speak when we infinitely increment an integer,
+ * it overflows and (usually) continues to walk around this
* (imaginary) circle.
*
* This function takes two of those numbers on this circle and
diff --git a/src/main/java/org/apache/logging/slf4j/Log4jLogger.java b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
new file mode 100644
index 0000000..eb06c77
--- /dev/null
+++ b/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
@@ -0,0 +1,104 @@
+package org.apache.logging.slf4j;
+
+import org.apache.logging.log4j.spi.ExtendedLogger;
+import org.slf4j.Marker;
+import org.slf4j.event.Level;
+import org.slf4j.spi.LocationAwareLogger;
+import org.slf4j.spi.LoggingEventBuilder;
+
+import java.io.Serializable;
+
+
+/**
+ * <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
+ * how strong we tell it NOT to use it!</p>
+ *
+ * <p>This class only exists to prevent services from starting if IDEA still
+ * did miss the dependency changes in pom and still tries to use the wrong
+ * logger impl. So that I once and for all time can stop wasting my time
+ * waiting for logs which never arive because the wrong logger still is used
+ * somewhere.</p>
+ */
+public class Log4jLogger implements LocationAwareLogger, Serializable {
+
+ private final org.slf4j.Logger log;
+
+ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) {
+ this.log = new org.slf4j.simple.SimpleLoggerFactory().getLogger(name);
+ }
+
+ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
+ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
+ }
+
+ @Override public String getName() { return log.getName(); }
+ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); }
+ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); }
+ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); }
+ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
+ @Override public void trace(String s) { log.trace(s); }
+ @Override public void trace(String s, Object o) { log.trace(s, o); }
+ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
+ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
+ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
+ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
+ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); }
+ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
+ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
+ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
+ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
+ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
+ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
+ @Override public void debug(String s) { log.debug(s); }
+ @Override public void debug(String s, Object o) { log.debug(s, o); }
+ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
+ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
+ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
+ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
+ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
+ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
+ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
+ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
+ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
+ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); }
+ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
+ @Override public void info(String s) { log.info(s); }
+ @Override public void info(String s, Object o) { log.info(s, o); }
+ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
+ @Override public void info(String s, Object... objects) { log.info(s, objects); }
+ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
+ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
+ @Override public void info(Marker marker, String s) { log.info(marker, s); }
+ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
+ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
+ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
+ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
+ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); }
+ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
+ @Override public void warn(String s) { log.warn(s); }
+ @Override public void warn(String s, Object o) { log.warn(s, o); }
+ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
+ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
+ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
+ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
+ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
+ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
+ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
+ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
+ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
+ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); }
+ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
+ @Override public void error(String s) { log.error(s); }
+ @Override public void error(String s, Object o) { log.error(s, o); }
+ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
+ @Override public void error(String s, Object... objects) { log.error(s, objects); }
+ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
+ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
+ @Override public void error(Marker marker, String s) { log.error(marker, s); }
+ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
+ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
+ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
+ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
+ @Override public LoggingEventBuilder atError() { return log.atError(); }
+
+}
diff --git a/src/main/lua/brgmt-logs/DigBrgmtLogs.lua b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua
new file mode 100644
index 0000000..fb1f036
--- /dev/null
+++ b/src/main/lua/brgmt-logs/DigBrgmtLogs.lua
@@ -0,0 +1,5 @@
+--
+-- NOTHING HERE
+--
+-- See "brgmt-beef/scripts/". Instead.
+--
diff --git a/src/main/lua/git/GitflowChangelogGen.lua b/src/main/lua/git/GitflowChangelogGen.lua
new file mode 100644
index 0000000..3b44ac3
--- /dev/null
+++ b/src/main/lua/git/GitflowChangelogGen.lua
@@ -0,0 +1,195 @@
+
+local log = io.stderr
+local main
+
+
+function printHelp()
+ io.stdout:write(" \n"
+ .." Helper to extract essential data from a gitflow log which potentially\n"
+ .." is useful to write a CHANGELOG from.\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --since <date>\n"
+ .." Ignore commits with this ISO date and older.\n"
+ .." \n"
+ .." --remote <str>\n"
+ .." Name of the git remote to use. Defaults to 'upstream'.\n"
+ .." \n"
+ .." --no-fetch\n"
+ .." Do NOT update refs from remote. Just use what we have local.\n"
+ .." \n"
+ )
+end
+
+
+function parseArgs( app )
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--since" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --since needs value\n")return end
+ app.since = arg
+ elseif arg == "--remote" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --remote needs value\n")return end
+ app.remoteName = arg
+ elseif arg == "--no-fetch" then
+ app.isFetch = false
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ end
+ if not app.since then log:write("EINVAL: --since missing\n")return end
+ if not app.remoteName then app.remoteName = "upstream" end
+ return 0
+end
+
+
+function readCommitHdr( app )
+ --log:write("[DEBUG] parse hdr from '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+256) .."...'\n")
+ local f, t = app.fullHistory:find("^"
+ .."commit ........................................[^\n]*\n"
+ .."Merge: [0-9a-z]+ [0-9a-z]+\n"
+ .."Author: [^\n]+\n"
+ .."Date: [^\n]+\n"
+ .."\n"
+ , app.fullHistoryRdBeg)
+ if not f then f, t = app.fullHistory:find("^"
+ .."commit ........................................[^\n]*\n"
+ .."Author: [^\n]+\n"
+ .."Date: [^\n]+\n"
+ .."\n"
+ , app.fullHistoryRdBeg) end
+ if not f then
+ assert(app.fullHistory:len() == app.fullHistoryRdBeg-1, app.fullHistory:len()..", "..app.fullHistoryRdBeg)
+ app.parseFn = false
+ return
+ end
+ app.commitHdr = assert(app.fullHistory:sub(f, t-1))
+ --log:write("hdrBeginsWith '"..(app.commitHdr:sub(1, 32)).."...'\n")
+ app.fullHistoryRdBeg = t + 1
+ --log:write("hdr parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n")
+ app.parseFn = assert(readCommitMsg)
+end
+
+
+function readCommitMsg( app )
+ local idxOfC = app.fullHistoryRdBeg
+ local chrPrev = false
+ while true do idxOfC = idxOfC + 1
+ local chr = app.fullHistory:byte(idxOfC)
+ --log:write("CHR '"..tostring(app.fullHistory:sub(idxOfC, idxOfC)).."'\n")
+ if (chr == 0x63) and chrPrev == 0x0A then
+ idxOfC = idxOfC - 1
+ break -- LF followed by 'c' (aka 'commit') found
+ elseif not chr then
+ idxOfC = idxOfC - 1
+ break
+ else
+ chrPrev = assert(chr)
+ end
+ end
+ local mtch = app.fullHistory:sub(app.fullHistoryRdBeg, idxOfC - 1)
+ assert(mtch)
+ while mtch:byte(mtch:len()) == 0x0A do mtch = mtch:sub(1, -2) end
+ mtch = mtch:gsub("\n ", "\n"):gsub("^ ", "")
+ app.commitMsg = mtch
+ app.fullHistoryRdBeg = idxOfC + 1
+ app.parseFn = readCommitHdr
+ --log:write("msg parsed. rdCursr now points to '".. app.fullHistory:sub(app.fullHistoryRdBeg, app.fullHistoryRdBeg+16) .."...'\n")
+ table.insert(app.commits, {
+ hdr = assert(app.commitHdr),
+ msg = assert(app.commitMsg),
+ })
+end
+
+
+function run( app )
+ local snk = io.stdout
+ if app.isFetch then
+ -- Make sure refs are up-to-date
+ local gitFetch = "git fetch \"".. app.remoteName .."\""
+ log:write("[DEBUG] ".. gitFetch .."\n")
+ local gitFetch = io.popen(gitFetch)
+ while true do
+ local buf = gitFetch:read(1<<16)
+ if not buf then break end
+ log:write(buf)
+ end
+ end
+ -- Collect input
+ local git = "git log --date-order --first-parent --decorate --since \"".. app.since.."\""
+ .." \"".. app.remoteName .."/master\""
+ .." \"".. app.remoteName .."/develop\""
+ log:write("[DEBUG] ".. git .."\n")
+ local git = io.popen(git)
+ while true do
+ local buf = git:read(1<<16)
+ if not buf then break end
+ --io.stdout:write(buf)
+ table.insert(app.fullHistory, buf)
+ end
+ -- Parse raw commits
+ app.fullHistory = table.concat(app.fullHistory)
+ app.parseFn = assert(readCommitHdr)
+ while app.parseFn do app.parseFn(app) end
+ -- Prepare output
+ local prevDate = "0000-00-00"
+ local version, prevVersion = "v_._._", false
+ local dateEntry = false
+ local entries = {}
+ for k, v in ipairs(app.commits) do
+ local date = assert(v.hdr:match("\nDate: +([0-9-]+) "))
+ local author = assert(v.hdr:match("\nAuthor: +([^\n]+)\n"))
+ local prNr, short = v.msg:match("Pull request #(%d+): ([^\n]+)\n")
+ prevVersion = version
+ _, version = v.hdr:match("^([^\n]+)\n"):match("tag: ([a-z]+)-([^,]+)[,)]")
+ if not version then version = prevVersion end
+
+ if version ~= prevVersion or not dateEntry then
+ if dateEntry then table.insert(entries, dateEntry) end
+ dateEntry = {
+ txt = date .." - ".. version .."\n\nResolved issues:\n\n"
+ }
+ prevDate = date
+ end
+ if prNr then
+ dateEntry.txt = dateEntry.txt .. short .." (PR ".. prNr ..")\n"
+ else
+ dateEntry.txt = dateEntry.txt .. v.msg .."\n"
+ end
+ end
+ if dateEntry then table.insert(entries, dateEntry) end
+ -- output
+ for k, v in ipairs(entries) do
+ snk:write("\n\n")
+ snk:write(v.txt)
+ snk:write("\n")
+ end
+end
+
+
+function main()
+ local app = {
+ since = false,
+ remoteName = false,
+ isFetch = true,
+ fullHistory = {},
+ fullHistoryRdBeg = 1,
+ commits = {},
+ parseFn = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
+
diff --git a/src/main/lua/maven/MvnCentralDepScan.lua b/src/main/lua/maven/MvnCentralDepScan.lua
index 5322bc0..7f71afa 100644
--- a/src/main/lua/maven/MvnCentralDepScan.lua
+++ b/src/main/lua/maven/MvnCentralDepScan.lua
@@ -941,9 +941,6 @@ function mod.exportParentsLatest(app)
local stmt = app.stmtCache[stmtStr]
if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
local rs = stmt:execute()
- out:write("h;Title;Parent relations (latest only)\n")
- out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
- out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
-- Need to filter out the older artifacts.
local all = {}
while rs:next() do
@@ -954,18 +951,14 @@ function mod.exportParentsLatest(app)
if diff > 0 then -- existing is newer. Keep it and ignore newer one.
goto nextRecord
else -- Either no entry yet or found a newer one.
- local entry = { gid=false, aid=false, ver=false, pgid=false, paid=false, pver=false }
- entry.gid = gid
- entry.aid = aid
- entry.ver = ver
- entry.pgid = rs:value(4)
- entry.paid = rs:value(5)
- entry.pver = rs:value(6)
- all[key] = entry
+ all[key] = { gid=gid, aid=aid, ver=ver, pgid=rs:value(4), paid=rs:value(5), pver=rs:value(6) }
end
::nextRecord::
end
-- Print
+ out:write("h;Title;Parent relations (latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;ParentGid;ParentAid;ParentVersion\n")
for _, entry in pairs(all) do
out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
..";".. entry.pgid ..";".. entry.paid ..";".. entry.pver .."\n")
@@ -1031,9 +1024,6 @@ function mod.exportDepsLatest(app)
local stmt = app.stmtCache[stmtStr]
if not stmt then stmt = db:prepare(stmtStr) app.stmtCache[stmtStr] = stmt end
local rs = stmt:execute()
- out:write("h;Title;Dependencies (of latest only)\n")
- out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
- out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
-- Need to filter out the older artifacts.
local all = {}
local entry, key, gid, aid, ver, diff
@@ -1046,18 +1036,14 @@ function mod.exportDepsLatest(app)
if diff > 0 then -- existing is newer. Keep it and ignore newer one.
goto nextRecord
else -- Either no entry yet or found a newer one.
- local entry = { gid=false, aid=false, ver=false, dgid=false, daid=false, dver=false }
- entry.gid = gid
- entry.aid = aid
- entry.ver = ver
- entry.dgid = rs:value(4)
- entry.daid = rs:value(5)
- entry.dver = rs:value(6)
- all[key] = entry
+ all[key] = { gid=gid, aid=aid, ver=ver, dgid=rs:value(4), daid=rs:value(5), dver=rs:value(6) }
end
goto nextRecord
::endFiltering::
-- Print
+ out:write("h;Title;Dependencies (of latest only)\n")
+ out:write("h;ExportedAt;".. os.date("!%Y-%m-%d_%H:%M:%SZ") .."\n")
+ out:write("c;GroupId;ArtifactId;Version;Dependency GID;Dependency AID;Dependency Version\n")
for _, entry in pairs(all) do
out:write("r;".. entry.gid ..";".. entry.aid ..";".. entry.ver
..";".. entry.dgid ..";".. entry.daid ..";".. entry.dver .."\n")
diff --git a/src/main/lua/misc/JavaCallgraph.lua b/src/main/lua/misc/JavaCallgraph.lua
new file mode 100644
index 0000000..6d0bd62
--- /dev/null
+++ b/src/main/lua/misc/JavaCallgraph.lua
@@ -0,0 +1,159 @@
+
+local SL = require("scriptlee")
+local newJavaClassParser = SL.newJavaClassParser
+local objectSeal = SL.objectSeal
+SL = nil
+
+local snk = io.stdout
+
+local main
+
+
+function initParser( app )
+ app.parser = newJavaClassParser{
+ cls = app,
+ onMagic = function(m, app) assert(m == "\xCA\xFE\xBA\xBE") end,
+ onClassfileVersion = function(maj, min, app) assert(maj == 55 and min == 0) end,
+ onConstPoolClassRef = function(i, idx, app)
+ app.constPool[i] = objectSeal{ type = "CLASS_REF", classNameIdx = idx, className = false, }
+ end,
+ onConstPoolIfaceMethodRef = function(i, nameIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "IFACE_METHOD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, methodType = false,
+ }
+ end,
+ onConstPoolMethodRef = function(i, classIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "METHOD_REF", classIdx = classIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, signature = false,
+ }
+ end,
+ onConstPoolMethodType = function(i, descrIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "METHOD_TYPE", descrIdx = descrIdx, descrStr = false,
+ }
+ end,
+ onConstPoolNameAndType = function(i, nameIdx, typeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "NAME_AND_TYPE", nameIdx = nameIdx, typeIdx = typeIdx, nameStr = false, typeStr = false,
+ }
+ end,
+ onConstPoolUtf8 = function(i, str, app)
+ app.constPool[i] = objectSeal{ type = "UTF8", str = str, }
+ end,
+
+ onConstPoolInvokeDynamic = function(i, bootstrapMethodAttrIdx, nameAndTypeIdx, app)
+ app.constPool[i] = objectSeal{
+ type = "INVOKE_DYNAMIC", bootstrapMethodAttrIdx = bootstrapMethodAttrIdx, nameAndTypeIdx = nameAndTypeIdx,
+ methodName = false, methodType = false, factoryClass = false, factoryMethod = false, factoryType = false,
+ }
+ end,
+ onConstPoolFieldRef = function(i, nameIdx, nameAndTypeIdx, that)
+ app.constPool[i] = objectSeal{
+ type = "FIELD_REF", nameIdx = nameIdx, nameAndTypeIdx = nameAndTypeIdx,
+ className = false, methodName = false, methodType = false,
+ }
+ end,
+ --onConstPoolMethodHandle = function(i, refKind, refIdx, app)
+ -- app.constPool[i] = objectSeal{ type = "METHOD_HANDLE", refKind = refKind, refIdx = refIdx, }
+ --end,
+ --onConstPoolStrRef = function(i, dstIdx, app)
+ -- print("ConstPool["..i.."] <StrRef> #"..dstIdx)
+ --end,
+ --onThisClass = function(nameIdx, app)
+ -- -- TODO print("onThisClass(#"..nameIdx..")")
+ --end,
+ --onField = function(iField, accessFlags, nameIdx, descrIdx, numAttrs, app)
+ -- print(string.format("onField(0x%04X, #%d, #%d, %d)",accessFlags,nameIdx,descrIdx,numAttrs))
+ --end,
+ --onMethod = function(accessFlags, nameIdx, descrIdx, app)
+ -- print(string.format("onMethod(0x%04X, #%d, #%d)",accessFlags,nameIdx,descrIdx))
+ --end,
+
+ onConstPoolEnd = function( app )
+ -- 1st run
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ elseif cpe.type == "CLASS_REF" then
+ local tmp
+ tmp = assert(cpe.classNameIdx)
+ tmp = assert(app.constPool[cpe.classNameIdx], cpe.classNameIdx)
+ tmp = assert(tmp.str, tmp)
+ cpe.className = assert(tmp)
+ elseif cpe.type == "METHOD_TYPE" then
+ cpe.descrStr = assert(app.constPool[cpe.descrIdx].str)
+ elseif cpe.type == "NAME_AND_TYPE" then
+ cpe.nameStr = assert(app.constPool[cpe.nameIdx].str);
+ cpe.typeStr = assert(app.constPool[cpe.typeIdx].str);
+ end
+ end
+ -- 2nd run
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ elseif cpe.type == "FIELD_REF" then
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ cpe.className = assert(app.constPool[cpe.nameIdx].className);
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str);
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str);
+ elseif cpe.type == "METHOD_REF" then
+ local nameAndType = app.constPool[cpe.nameAndTypeIdx]
+ cpe.className = assert(app.constPool[cpe.classIdx].className)
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.signature = assert(app.constPool[nameAndType.typeIdx].str)
+ elseif cpe.type == "IFACE_METHOD_REF" then
+ local classRef = assert(app.constPool[cpe.nameIdx])
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ cpe.className = assert(classRef.className)
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str)
+ elseif cpe.type == "INVOKE_DYNAMIC" then
+ local nameAndType = assert(app.constPool[cpe.nameAndTypeIdx])
+ local bootstrapMethod = assert(app.constPool[cpe.bootstrapMethodAttrIdx], cpe.bootstrapMethodAttrIdx);
+ cpe.methodName = assert(app.constPool[nameAndType.nameIdx].str)
+ cpe.methodType = assert(app.constPool[nameAndType.typeIdx].str)
+ --cpe.factoryClass = ;
+ --cpe.factoryMethod = ;
+ --cpe.factoryType = ;
+ end
+ end
+ -- debug-print
+ snk:write("\n")
+ for _,cpIdx in pairs{ 13, 14, 15, 227, 230, 236, 704, 709, 717 }do
+ snk:write("CONST_POOL @ ".. cpIdx .."\n")
+ for k,v in pairs(app.constPool[cpIdx])do print("X",k,v)end
+ end
+ for i, cpe in pairs(app.constPool) do
+ if false then
+ --elseif cpe.type == "CLASSREF" then
+ -- snk:write("CLASS \"".. cpe.className .."\"\n")
+ end
+ end
+ end,
+ }
+end
+
+
+function main()
+ local app = objectSeal{
+ parser = false,
+ constPool = {},
+ }
+
+ initParser(app)
+
+ -- Read 1st arg as a classfile and pump it into the parser.
+ local src = arg[1] and io.open( arg[1], "rb" ) or nil
+ if not src then
+ print("ERROR: Failed to open file from 1st arg: "..(arg[1]or"nil")) return
+ end
+ while true do
+ local buf = src:read(8192)
+ if not buf then break end
+ app.parser:write(buf)
+ end
+ app.parser:closeSnk()
+end
+
+
+main()
diff --git a/src/main/lua/mshitteams/ListEmlInbox.lua b/src/main/lua/mshitteams/ListEmlInbox.lua
new file mode 100644
index 0000000..23b42aa
--- /dev/null
+++ b/src/main/lua/mshitteams/ListEmlInbox.lua
@@ -0,0 +1,322 @@
+--
+-- Sources:
+-- - [Authorize](https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http)
+-- - [Auth witout app register](https://techcommunity.microsoft.com/t5/teams-developer/authenticate-microsoft-graph-api-with-username-and-password/m-p/3940540)
+--
+-- TODO: scriptlee 0.0.5-83-gdffa272 seems to SEGFAULT constantly here. No
+-- matter if we use socket or newHttpClient.
+-- TODO: scriptlee 0.0.5-87-g946ebdc crashes through assertion:
+-- Assertion failed: cls->msg.connect.sck->vt->unwrap != NULL, file src/windoof/c/io/AsyncIO.c, line 421
+--
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+--local AF_INET = SL.posix.AF_INET
+--local getaddrinfo = SL.posix.getaddrinfo
+--local INADDR_ANY = SL.posix.INADDR_ANY
+--local inaddrOfHostname = SL.posix.inaddrOfHostname
+--local IPPROTO_TCP = SL.posix.IPPROTO_TCP
+local objectSeal = SL.objectSeal
+--local SOCK_STREAM = SL.posix.SOCK_STREAM
+--local socket = SL.posix.socket
+local startOrExecute = SL.reactor.startOrExecute
+--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1)
+SL = nil
+
+local authorizeToMsGraphApi, getAccessToken, getAuthHdr, httpUrlEncode, main, parseArgs, printHelp,
+ run, getMyProfileForDebugging
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+
+function printHelp()
+ out:write(" \n"
+ .." Experiments for M$ graph API.\n"
+ .." \n"
+ .." WARN: This tool is experimental! Do NOT use it!\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --user <str>\n"
+ .." M$ user.\n"
+ .." \n"
+ .." --pass <str>\n"
+ .." M$ password. TODO get rid of this insecure idea.\n"
+ .." \n"
+ .." --appId <str>\n"
+ .." AppId (aka client_id). See M$ doc about it.\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ if #_ENV.arg == 0 then log:write("EINVAL: Args missing\n")return-1 end
+ local iA = 0
+ --local isYolo = false
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ elseif arg == "--user" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --user needs value\n")return-1 end
+ app.msUser = arg
+ elseif arg == "--pass" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --pass needs value\n")return-1 end
+ app.msPass = arg
+ elseif arg == "--appId" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --appId needs value\n")return-1 end
+ app.msAppId = arg
+ --elseif arg == "--yolo" then
+ -- isYolo = true
+ else
+ log:write("EINVAL: ".. arg .."\n") return-1
+ end
+ end
+ if not app.msUser then log:write("EINVAL: --user missing\n") return-1 end
+ if not app.msPass then log:write("EINVAL: --pass missing\n") return-1 end
+ if not app.msAppId then log:write("EINVAL: --appId missing\n")return-1 end
+ --if not isYolo then log:write("EINVAL: --yolo missing\n")return-1 end
+ return 0
+end
+
+
+function getMyProfileForDebugging( app )
+ local http = app.http
+ local authKey, authVal = getAuthHdr(app)
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ uri = "/v1.0/me",
+ rspCode = false,
+ rspBody = {},
+ }
+ req.base = http:request{
+ cls = req,
+ host = app.msGraphHost,
+ port = app.msGraphPort,
+ connectTimeoutMs = 3000,
+ method = req.method,
+ url = req.uri,
+ hdrs = {
+ { authKey, authVal },
+ },
+ --useHostHdr = ,
+ --useTLS = true,
+ onRspHdr = function( rsp, cls )
+ cls.rspCode = rsp.status
+ if rsp.status ~= 200 then
+ log:write("> ".. req.method .." ".. req.uri .."\n> \n")
+ log:write("< ".. rsp.proto .." ".. rsp.status .." ".. rsp.phrase .."\n")
+ for _,h in ipairs(rsp.headers)do log:write("< "..h[1]..": "..h[2].."\n")end
+ log:write("\n")
+ end
+ end,
+ onRspChunk = function(buf, cls)
+ if cls.rspCode ~= 200 then
+ log:write("< ")
+ log:write((buf:gsub("\n", "\n< ")))
+ log:write("\n")
+ else
+ assert(type(buf) == "string")
+ table.insert(cls.rspBody, buf)
+ end
+ end,
+ onRspEnd = function(cls)
+ if cls.rspCode ~= 200 then error("Request failed.") end
+ cls.rspBody = table.concat(cls.rspBody)
+ log:write("Response was:\n\n")
+ log:write(cls.rspBody)
+ log:write("\n\n")
+ end,
+ }
+ req.base:closeSnk()
+end
+
+
+function authorizeToMsGraphApi( app )
+ local http = app.http
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ host = (app.proxyHost or app.msLoginHost),
+ port = (app.proxyPort or app.msLoginPort),
+ uri = false,
+ hdrs = {
+ { "Content-Type", "application/x-www-form-urlencoded" },
+ },
+ reqBody = ""
+ .. "grant_type=password"
+ .."&resource=https://graph.microsoft.com"
+ .."&username=".. httpUrlEncode(app, app.msUser) ..""
+ .."&password=".. httpUrlEncode(app, app.msPass) .."",
+ rspProto = false, rspCode = false, rspPhrase = false,
+ rspHdrs = false,
+ rspBody = {},
+ }
+ if app.proxyHost then
+ req.uri = "https://".. app.msLoginHost ..":".. app.msLoginPort
+ .."/".. app.msTenant .."/oauth2/v2.0/token"
+ else
+ req.uri = "/".. app.msTenant .."/oauth2/v2.0/token"
+ end
+ local ok, ex = xpcall(function()
+ req.base = http:request{
+ cls = req,
+ connectTimeoutMs = app.connectTimeoutMs,
+ host = req.host,
+ port = req.port,
+ method = req.method,
+ url = req.uri,
+ hdrs = req.hdrs,
+ onRspHdr = function( rsp, req )
+ req.rspProto = rsp.proto
+ req.rspCode = rsp.status
+ req.rspPhrase = rsp.phrase
+ req.rspHdrs = rsp.headers
+ end,
+ onRspChunk = function( buf, req ) table.insert(req.rspBody, buf) end,
+ onRspEnd = function( req )
+ local rspBody = table.concat(req.rspBody) req.rspBody = false
+ if req.rspCode ~= 200 then
+ log:write("[ERROR] Request failed\n")
+ log:write("peer ".. req.host ..":".. req.port .."\n")
+ log:write("> ".. req.method .." ".. req.uri .."\n")
+ for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("> \n")
+ log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n")
+ log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n")
+ for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end
+ log:write("< \n")
+ log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n")
+ error("TODO_10aa11de804e733337e7c244298791c6")
+ end
+ log:write("< ".. req.rspProto .." ".. req.rspCode .." ".. req.rspPhrase .."\n")
+ for _, h in ipairs(req.rspHdrs) do log:write("< ".. h[1] ..": ".. h[2] .."\n")end
+ log:write("< \n")
+ log:write("< ".. rspBody:gsub("\r?\n", "\n< ") .."\n")
+ -- How to continue:
+ --local token = rsp.bodyJson.access_token
+ --local authHdr = { "Authorization", "Bearer ".. token, }
+ end,
+ }
+ end, debug.traceback)
+ if not ok then
+ log:write("[ERROR] Request failed 2\n")
+ log:write("peer ".. req.host ..":".. req.port .."\n")
+ log:write("> ".. req.method .." ".. req.uri .."\n")
+ for _, h in ipairs(req.hdrs) do log:write("> ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("> \n")
+ log:write("> ".. req.reqBody:gsub("\r?\n", "\n> ") .."\n")
+ error(ex)
+ end
+ --req.base:write(req.reqBody)
+ req.base:closeSnk()
+end
+
+
+function httpUrlEncode( app, str )
+ local hexDigits, ret, beg, iRd = "0123456789ABCDEF", {}, 1, 0
+ ::nextInputChar::
+ iRd = iRd + 1
+ local byt = str:byte(iRd)
+ if not byt then
+ elseif byt == 0x2D -- dash
+ or byt == 0x2E -- dot
+ or byt >= 0x30 and byt <= 0x39 -- 0-9
+ or byt >= 0x40 and byt <= 0x5A -- A-Z
+ or byt >= 0x60 and byt <= 0x7A -- a-z
+ then
+ goto nextInputChar
+ end
+ if beg < iRd then table.insert(ret, str:sub(beg, iRd-1)) end
+ if not byt then return table.concat(ret) end
+ table.insert(ret, "%")
+ local hi = (byt & 0xF0) >> 4 +1
+ local lo = (byt & 0x0F) +1
+ table.insert(ret, hexDigits:sub(hi, hi) .. hexDigits:sub(lo, lo))
+ beg = iRd + 1
+ goto nextInputChar
+end
+
+
+function getAccessToken( app )
+ -- See "https://learn.microsoft.com/en-us/graph/auth-v2-user?tabs=http#3-request-an-access-token"
+ local method = "POST"
+ local uri = "/".. app.msTenant .."/oauth2/v2.0/token"
+ local hdrs = {
+ { "Host", "https://login.microsoftonline.com" },
+ { "Content-Type", "application/x-www-form-urlencoded" },
+ }
+ local body = ""
+ .."client_id=".. assert(app.appId)
+ .."&scope=".. scope
+ .."&code=".. code
+ .."&redirect_uri=".. redirUri
+ .."&grant_type=authorization_code"
+end
+
+
+-- @return 1 - HTTP header key
+-- @return 2 - HTTP header value
+function getAuthHdr( app )
+ assert(app.msToken)
+ return "Authorization", ("Bearer ".. app.msToken)
+end
+
+
+function run( app )
+ app.http = newHttpClient{}
+ authorizeToMsGraphApi(app)
+ --getMyProfileForDebugging(app)
+end
+
+
+function main()
+ local loginHost, loginPort, graphHost, graphPort, proxyHost, proxyPort
+ local choice = 3
+ if choice == 1 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "graph.microsoft.com"; graphPort = 443
+ proxyHost = "127.0.0.1"; proxyPort = 3128
+ elseif choice == 2 then
+ loginHost = "127.0.0.1"; loginPort = 8081
+ graphHost = "127.0.0.1"; graphPort = 8081
+ proxyHost = false; proxyPort = false
+ elseif choice == 3 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "127.0.0.1"; graphPort = 8081
+ proxyHost = "127.0.0.1"; proxyPort = 3128
+ elseif choice == 4 then
+ loginHost = "login.microsoftonline.com"; loginPort = 443
+ graphHost = "graph.microsoft.com"; graphPort = 443
+ proxyHost = false; proxyPort = false
+ else error("TODO_1700683244") end
+ local app = objectSeal{
+ isHelp = false,
+ msLoginHost = loginHost, msLoginPort = loginPort,
+ msGraphHost = graphHost, msGraphPort = graphPort,
+ proxyHost = proxyHost, proxyPort = proxyPort,
+ -- TODO take this from a failed api call, which has this in the rsp headers.
+ msTenant = "common", -- TODO configurable
+ -- TODO take this from a failed api call, which has this in the rsp headers.
+ msAppId = false,
+ msPerms = "offline_access user.read mail.read",
+ msToken = false,
+ msUser = false,
+ msPass = false,
+ http = false,
+ connectTimeoutMs = 3000,
+ --sck = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
diff --git a/src/main/lua/mshitteams/SendRawMsEmail.lua b/src/main/lua/mshitteams/SendRawMsEmail.lua
new file mode 100644
index 0000000..2d2940e
--- /dev/null
+++ b/src/main/lua/mshitteams/SendRawMsEmail.lua
@@ -0,0 +1,60 @@
+
+local SL = require("scriptlee")
+--local newHttpClient = SL.newHttpClient
+--local newShellcmd = SL.newShellcmd
+--local objectSeal = SL.objectSeal
+--local parseJSON = SL.parseJSON
+--local sleep = SL.posix.sleep
+--local newCond = SL.posix.newCond
+--local async = SL.reactor.async
+--local startOrExecute = SL.reactor.startOrExecute
+--for k,v in pairs(SL)do print("SL",k,v)end os.exit(1)
+SL = nil
+
+local mod = {}
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+
+function mod.printHelp()
+ out:write(" \n"
+ .." Options:\n"
+ .." \n"
+ .."\n\n")
+end
+
+
+function mod.parseArgs( app )
+ local isStdinn = false
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ else
+ log:write("Unknown arg: ".. arg .."\n") return-1
+ end
+ end
+ if not isStdinn then log:write("Bad args\n")return-1 end
+ return 0
+end
+
+
+function mod.run( app )
+ error("TODO_20230608125925")
+end
+
+
+function mod.main()
+ local app = objectSeal{
+ isHelp = false,
+ }
+ if mod.parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then mod.printHelp() return end
+ mod.run(app)
+end
+
+
+startOrExecute(mod.main)
+
diff --git a/src/main/lua/paisa-fleet/FindFullDisks.lua b/src/main/lua/paisa-fleet/FindFullDisks.lua
new file mode 100644
index 0000000..9963838
--- /dev/null
+++ b/src/main/lua/paisa-fleet/FindFullDisks.lua
@@ -0,0 +1,322 @@
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+local newShellcmd = SL.newShellcmd
+local newSqlite = SL.newSqlite
+local objectSeal = SL.objectSeal
+local parseJSON = SL.parseJSON
+local startOrExecute = SL.reactor.startOrExecute
+SL = nil
+
+local log = io.stdout
+
+
+function printHelp()
+ io.write("\n"
+ .." WARN: This is experimental.\n"
+ .." \n"
+ .." Options:\n"
+ .." --backendHost <inaddr> (eg \"localhost\")\n"
+ .." --backendPort <int> (eg 80)\n"
+ .." --sshPort <int> (eg 22)\n"
+ .." --sshUser <str> (eg \"eddieuser\")\n"
+ .." --state <path> (eg \"path/to/state\")\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ app.backendPort = 80
+ app.sshPort = 22
+ app.sshUser = os.getenv("USERNAME") or false
+ app.statePath = ":memory:"
+ local iA = 0
+ ::nextArg::
+ iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ goto verifyResult
+ elseif arg == "--help" then
+ app.isHelp = true return 0
+ elseif arg == "--backendHost" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendHost needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPort needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--sshPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshPort needs value\n")return end
+ app.sshPort = arg
+ elseif arg == "--sshUser" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshUser needs value\n")return end
+ app.sshUser = arg
+ elseif arg == "--state" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --state needs value\n")return end
+ app.statePath = arg
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ goto nextArg
+ ::verifyResult::
+ if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end
+ if not app.sshUser then log:write("EINVAL: --sshUser missing")return end
+ return 0
+end
+
+
+function getStateDb(app)
+ if not app.stateDb then
+ local db = newSqlite{ database = assert(app.statePath) }
+ -- TODO normalize scheme
+ db:prepare("CREATE TABLE IF NOT EXISTS DeviceDfLog(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." \"when\" TEXT NOT NULL,\n" -- "https://xkcd.com/1179"
+ .." hostname TEXT NOT NULL,\n"
+ .." eddieName TEXT NOT NULL,\n"
+ .." rootPartitionUsedPercent INT,\n"
+ .." varLibDockerUsedPercent INT,\n"
+ .." varLogUsedPercent INT,\n"
+ .." dataUsedPercent INT,\n"
+ .." stderr TEXT NOT NULL,\n"
+ .." stdout TEXT NOT NULL)\n"
+ ..";"):execute()
+ app.stateDb = db
+ end
+ return app.stateDb
+end
+
+
+function storeDiskFullResult( app, hostname, eddieName, stderrBuf, stdoutBuf )
+ assert(app and hostname and eddieName and stderrBuf and stdoutBuf);
+ local rootPartitionUsedPercent = stdoutBuf:match("\n/[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /\n")
+ local varLibDockerUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/lib/docker\n")
+ local dataUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /data\n")
+ local varLogUsedPercent = stdoutBuf:match("\n[^ ]+ +%d+ +%d+ +%d+ +(%d+)%% /var/log\n")
+ local stmt = getStateDb(app):prepare("INSERT INTO DeviceDfLog("
+ .." \"when\", hostname, eddieName, stderr, stdout,"
+ .." rootPartitionUsedPercent, dataUsedPercent, varLibDockerUsedPercent, varLogUsedPercent, dataUsedPercent"
+ ..")VALUES("
+ .." $when, $hostname, $eddieName, $stderr, $stdout,"
+ .." $rootPartitionUsedPercent, $dataUsedPercent, $varLibDockerUsedPercent, $varLogUsedPercent, $dataUsedPercent);")
+ stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%SZ"))
+ stmt:bind("$hostname", hostname)
+ stmt:bind("$eddieName", eddieName)
+ stmt:bind("$stderr", stderrBuf)
+ stmt:bind("$stdout", stdoutBuf)
+ stmt:bind("$rootPartitionUsedPercent", rootPartitionUsedPercent)
+ stmt:bind("$varLibDockerUsedPercent", varLibDockerUsedPercent)
+ stmt:bind("$varLogUsedPercent", varLogUsedPercent)
+ stmt:bind("$dataUsedPercent", dataUsedPercent)
+ stmt:execute()
+end
+
+
+function doWhateverWithDevices( app )
+ for k, dev in pairs(app.devices) do
+ log:write("[INFO ] Inspecting '".. dev.hostname .."' (@ ".. dev.eddieName ..") ...\n")
+ local fookCmd = "true"
+ .." && HOSTNAME=$(hostname|sed 's_.isa.localdomain__')"
+ .." && STAGE=$PAISA_ENV"
+ .." && printf \"remoteHostname=$HOSTNAME, remoteStage=$STAGE\\n\""
+ -- on some machine, df failed with "Stale file handle" But I want to continue
+ -- with next device regardless of such errors.
+ .." && df || true"
+ local eddieCmd = "true"
+ .." && HOSTNAME=$(hostname|sed 's_.pnet.ch__')"
+ .." && STAGE=$PAISA_ENV"
+ .." && printf \"remoteEddieName=$HOSTNAME, remoteStage=$STAGE\\n\""
+ .." && if test \"${HOSTNAME}\" != \"".. dev.eddieName .."\"; then true"
+ .." && echo wrong host. Want ".. dev.eddieName .." found $HOSTNAME && false"
+ .." ;fi"
+ .." && ssh -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ .." -p".. app.sshPort .." ".. app.sshUser .."@".. ((dev.type == "FOOK")and"fook"or dev.hostname)
+ .." \\\n --"
+ .." sh -c 'true && ".. fookCmd:gsub("'", "'\"'\"'") .."'"
+ local localCmd = assert(os.getenv("SSH_EXE"), "environ.SSH_EXE missing")
+ .." -oRemoteCommand=none -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null"
+ .." -p".. app.sshPort .." ".. app.sshUser .."@".. dev.eddieName ..""
+ .." \\\n --"
+ .." sh -c 'true && ".. eddieCmd:gsub("'", "'\"'\"'") .."'"
+ -- TODO get rid of this ugly use-tmp-file-as-script workaround
+ local tmpPath = assert(os.getenv("TMP"), "environ.TMP missing"):gsub("\\", "/") .."/b30589uj30oahujotehuj.sh"
+ --log:write("[DEBUG] tmpPath '".. tmpPath .."'\n")
+ local tmpFile = assert(io.open(tmpPath, "wb"), "Failed to open '".. tmpPath .."'")
+ tmpFile:write("#!/bin/sh\n".. localCmd .."\n")
+ tmpFile:close()
+ --log:write("[DEBUG] tmpPath ".. tmpPath .."\n")
+ -- EndOf kludge
+ local cmd = objectSeal{
+ base = false,
+ stdoutBuf = {},
+ stderrBuf = {},
+ }
+ cmd.base = newShellcmd{
+ cls = cmd,
+ cmdLine = "sh \"".. tmpPath .."\"",
+ onStdout = function( buf, cmd ) table.insert(cmd.stdoutBuf, buf or"") end,
+ onStderr = function( buf, cmd ) table.insert(cmd.stderrBuf, buf or"") end,
+ }
+ cmd.base:start()
+ cmd.base:closeSnk()
+ local exit, signal = cmd.base:join(17)
+ cmd.stderrBuf = table.concat(cmd.stderrBuf)
+ cmd.stdoutBuf = table.concat(cmd.stdoutBuf)
+ if exit == 255 and signal == nil then
+ log:write("[DEBUG] fd2: ".. cmd.stderrBuf:gsub("\n", "\n[DEBUG] fd2: "):gsub("\n%[DEBUG%] fd2: $", "") .."\n")
+ goto nextDevice
+ end
+ log:write("[DEBUG] fd1: ".. cmd.stdoutBuf:gsub("\n", "\n[DEBUG] fd1: "):gsub("\n%[DEBUG%] fd1: $", "") .."\n")
+ storeDiskFullResult(app, dev.hostname, dev.eddieName, cmd.stderrBuf, cmd.stdoutBuf)
+ if exit ~= 0 or signal ~= nil then
+ error("exit=".. tostring(exit)..", signal="..tostring(signal))
+ end
+ ::nextDevice::
+ end
+end
+
+
+function sortDevicesMostRecentlySeenFirst( app )
+ table.sort(app.devices, function(a, b) return a.lastSeen > b.lastSeen end)
+end
+
+
+-- Don't want to visit just seen devices over and over again. So drop devices
+-- we've recently seen from our devices-to-visit list.
+function dropDevicesRecentlySeen( app )
+ -- Collect recently seen devices.
+ local devicesToRemove = {}
+ local st = getStateDb(app):prepare("SELECT hostname FROM DeviceDfLog WHERE \"when\" > $tresholdDate")
+ st:bind("$tresholdDate", os.date("!%Y-%m-%dT%H:%M:%SZ", os.time()-42*3600))
+ local rs = st:execute()
+ while rs:next() do
+ local hostname = rs:value(1)
+ devicesToRemove[hostname] = true
+ end
+ -- Remove selected devices
+ local numKeep, numDrop = 0, 0
+ local iD = 0 while true do iD = iD + 1
+ local device = app.devices[iD]
+ if not device then break end
+ if devicesToRemove[device.hostname] then
+ --log:write("[DEBUG] Drop '".. device.hostname .."' (".. device.eddieName ..")\n")
+ numDrop = numDrop + 1
+ app.devices[iD] = app.devices[#app.devices]
+ app.devices[#app.devices] = nil
+ iD = iD - 1
+ else
+ --log:write("[DEBUG] Keep '".. device.hostname .."' (".. device.eddieName ..")\n")
+ numKeep = numKeep + 1
+ end
+ end
+ log:write("[INFO ] Of "..(numKeep+numDrop).." devices from state visit ".. numKeep
+ .." and skip ".. numDrop .." (bcause seen recently)\n")
+end
+
+
+function fetchDevices( app )
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ uri = "/houston/vehicle/inventory/v1/info/devices",
+ rspCode = false,
+ rspBody = false,
+ isDone = false,
+ }
+ req.base = app.http:request{
+ cls = req, connectTimeoutMs = 3000,
+ host = app.backendHost, port = app.backendPort,
+ method = req.method, url = req.uri,
+ onRspHdr = function( rspHdr, req )
+ req.rspCode = rspHdr.status
+ if rspHdr.status ~= 200 then
+ log:write(".-----------------------------------------\n")
+ log:write("| ".. req.method .." ".. req.uri .."\n")
+ log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n")
+ log:write("+-----------------------------------------\n")
+ log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n")
+ for i,h in ipairs(rspHdr.headers) do log:write("| ".. h[1] ..": ".. h[2] .."\n") end
+ log:write("| \n")
+ end
+ end,
+ onRspChunk = function( buf, req )
+ if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end
+ if buf then
+ if not req.rspBody then req.rspBody = buf
+ else req.rspBody = req.rspBody .. buf end
+ end
+ end,
+ onRspEnd = function( req )
+ if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end
+ req.isDone = true
+ end,
+ }
+ req.base:closeSnk()
+ assert(req.isDone)
+ if req.rspCode ~= 200 then log:write("ERROR: Couldn't fetch devices\n")return end
+ assert(not app.devices)
+ app.devices = {}
+ log:write("[DEBUG] rspBody.len is ".. req.rspBody:len() .."\n")
+ --io.write(req.rspBody)io.write("\n")
+ for iD, device in pairs(parseJSON(req.rspBody).devices) do
+ --print("Wa", iD, device)
+ --for k,v in pairs(device)do print("W",k,v)end
+ -- TODO how to access 'device.type'?
+ local hostname , eddieName , lastSeen
+ = device.hostname:value(), device.eddieName:value(), device.lastSeen:value()
+ local typ
+ if false then
+ elseif hostname:find("^eddie%d%d%d%d%d$") then
+ typ = "EDDIE"
+ elseif hostname:find("^fook%-[a-z0-9]+$") then
+ typ = "FOOK"
+ elseif hostname:find("^lunkwill%-[a-z0-9]+$") then
+ typ = "LUNKWILL"
+ elseif hostname:find("^fook$") then
+ log:write("[WARN ] WTF?!? '"..hostname.."'\n")
+ typ = false
+ else error("TODO_359zh8i3wjho "..hostname) end
+ table.insert(app.devices, objectSeal{
+ hostname = hostname,
+ eddieName = eddieName,
+ type = typ,
+ lastSeen = lastSeen,
+ })
+ end
+ log:write("[INFO ] Fetched ".. #app.devices .." devices.\n")
+end
+
+
+function run( app )
+ fetchDevices(app)
+ dropDevicesRecentlySeen(app)
+ --sortDevicesMostRecentlySeenFirst(app)
+ doWhateverWithDevices(app)
+end
+
+
+function main()
+ local app = objectSeal{
+ isHelp = false,
+ backendHost = false,
+ backendPort = false,
+ sshPort = false,
+ sshUser = false,
+ statePath = false,
+ stateDb = false,
+ http = newHttpClient{},
+ devices = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
+
diff --git a/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua
new file mode 100644
index 0000000..949d1fe
--- /dev/null
+++ b/src/main/lua/paisa-fleet/RmArtifactBaseDir.lua
@@ -0,0 +1,381 @@
+
+local SL = require("scriptlee")
+local newHttpClient = SL.newHttpClient
+local newShellcmd = SL.newShellcmd
+local newSqlite = SL.newSqlite
+local objectSeal = SL.objectSeal
+local parseJSON = SL.parseJSON
+local sleep = SL.posix.sleep
+local startOrExecute = SL.reactor.startOrExecute
+SL = nil
+local log = io.stdout
+
+
+function printHelp()
+ io.write("\n"
+ .." WARN: This is experimental.\n"
+ .." \n"
+ .." Options:\n"
+ .." --backendHost <inaddr> (eg \"localhost\")\n"
+ .." --backendPort <int> (eg 80)\n"
+ .." --backendPath <str> (eg \"/houston\")\n"
+ .." --sshPort <int> (eg 22)\n"
+ .." --sshUser <str> (eg \"eddieuser\")\n"
+ .." --state <path> (eg \"path/to/state\")\n"
+ .." \n"
+ .." --exportLatestStatus\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ app.backendPort = 80
+ app.statePath = ":memory:"
+ local iA = 0
+ ::nextArg::
+ iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ goto verifyResult
+ elseif arg == "--help" then
+ app.isHelp = true return 0
+ elseif arg == "--backendHost" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendHost needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPort needs value\n")return end
+ app.backendHost = arg
+ elseif arg == "--backendPath" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --backendPath needs value\n")return end
+ app.backendPath = arg
+ elseif arg == "--sshPort" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshPort needs value\n")return end
+ app.sshPort = arg
+ elseif arg == "--sshUser" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --sshUser needs value\n")return end
+ app.sshUser = arg
+ elseif arg == "--state" then
+ iA = iA + 1; arg = _ENV.arg[iA]
+ if not arg then log:write("EINVAL: --state needs value\n")return end
+ app.statePath = arg
+ elseif arg == "--exportLatestStatus" then
+ app.exportLatestStatus = true
+ else
+ log:write("EINVAL: ".. arg .."\n")return
+ end
+ goto nextArg
+ ::verifyResult::
+ if app.exportLatestStatus then
+ if not app.statePath then log:write("EINVAL: --state missing\n")return end
+ else
+ if not app.backendHost then log:write("EINVAL: --backendHost missing\n")return end
+ if not app.backendPath then log:write("EINVAL: --backendPath missing\n")return end
+ if app.backendPath:find("^C:.") then log:write("[WARN ] MSYS_NO_PATHCONV=1 likely missing? ".. app.backendPath.."\n") end
+ end
+ return 0
+end
+
+
+function removeCompletedEddies( app )
+ local db = getStateDb(app)
+ local rs = db:prepare("SELECT eddieName FROM Eddie"
+ .." JOIN EddieLog ON Eddie.id = eddieId"
+ .." WHERE status = \"OK\";"):execute()
+ local eddieNamesToRemoveSet = {}
+ while rs:next() do
+ assert(rs:type(1) == "TEXT", rs:type(1))
+ assert(rs:name(1) == "eddieName", rs:name(1))
+ local eddieName = rs:value(1)
+ eddieNamesToRemoveSet[eddieName] = true
+ end
+ local oldEddies = app.eddies
+ app.eddies = {}
+ local numKeep, numDrop = 0, 0
+ for _, eddie in pairs(oldEddies) do
+ if not eddieNamesToRemoveSet[eddie.eddieName] then
+ --log:write("[DEBUG] Keep '".. eddie.eddieName .."'\n")
+ numKeep = numKeep + 1
+ table.insert(app.eddies, eddie)
+ else
+ numDrop = numDrop + 1
+ --log:write("[DEBUG] Drop '".. eddie.eddieName .."': Already done\n")
+ end
+ end
+ log:write("[DEBUG] todo: ".. numKeep ..", done: ".. numDrop .."\n")
+end
+
+
+function setEddieStatus( app, statusStr, eddieName, stderrStr, stdoutStr )
+ assert(type(app) == "table")
+ assert(type(eddieName) == "string")
+ assert(statusStr == "OK" or statusStr == "ERROR")
+ log:write("[DEBUG] setEddieStatus(".. eddieName ..", ".. statusStr ..")\n")
+ local db = getStateDb(app)
+ local stmt = db:prepare("INSERT INTO Eddie(eddieName)VALUES($eddieName);")
+ stmt:bind("$eddieName", eddieName)
+ local ok, emsg = xpcall(function()
+ stmt:execute()
+ end, debug.traceback)
+ if not ok and not emsg:find("UNIQUE constraint failed: Eddie.eddieName") then
+ error(emsg)
+ end
+ local stmt = db:prepare("INSERT INTO EddieLog('when',eddieId,status,stderr,stdout)"
+ .."VALUES($when, (SELECT rowid FROM Eddie WHERE eddieName = $eddieName), $status, $stderr, $stdout)")
+ stmt:reset()
+ stmt:bind("$when", os.date("!%Y-%m-%dT%H:%M:%S+00:00"))
+ stmt:bind("$eddieName", eddieName)
+ stmt:bind("$status", statusStr)
+ stmt:bind("$stderr", stderrStr)
+ stmt:bind("$stdout", stdoutStr)
+ stmt:execute()
+end
+
+
+function getStateDb( app )
+ if not app.stateDb then
+ app.stateDb = newSqlite{ database = app.statePath }
+ app.stateDb:prepare("CREATE TABLE IF NOT EXISTS Eddie(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." eddieName TEXT UNIQUE NOT NULL)\n"
+ ..";"):execute()
+ app.stateDb:prepare("CREATE TABLE IF NOT EXISTS EddieLog(\n"
+ .." id INTEGER PRIMARY KEY,\n"
+ .." 'when' TEXT NOT NULL,\n"
+ .." eddieId INT NOT NULL,\n"
+ .." status TEXT, -- OneOf OK, ERROR\n"
+ .." stderr TEXT NOT NULL,\n"
+ .." stdout TEXT NOT NULL)\n"
+ ..";\n"):execute()
+ end
+ return app.stateDb
+end
+
+
+function loadEddies( app )
+ local httpClient = newHttpClient{}
+ local req = objectSeal{
+ base = false,
+ method = "GET",
+ path = app.backendPath .."/data/preflux/inventory",
+ rspCode = false,
+ rspBody = false,
+ isDone = false,
+ }
+ req.base = httpClient:request{
+ cls = req,
+ host = app.backendHost, port = app.backendPort,
+ method = req.method, url = req.path,
+ onRspHdr = function( rspHdr, req )
+ req.rspCode = rspHdr.status
+ if rspHdr.status ~= 200 then
+ log:write(".-----------------------------------------\n")
+ log:write("| ".. req.method .." ".. req.path .."\n")
+ log:write("| Host: ".. app.backendHost ..":".. app.backendPort .."\n")
+ log:write("+-----------------------------------------\n")
+ log:write("| ".. rspHdr.proto .." ".. rspHdr.status .." ".. rspHdr.phrase .."\n")
+ for i,h in ipairs(rspHdr.headers) do
+ log:write("| ".. h[1] ..": ".. h[2] .."\n")
+ end
+ log:write("| \n")
+ end
+ end,
+ onRspChunk = function( buf, req )
+ if req.rspCode ~= 200 then log:write("| ".. buf:gsub("\n", "\n| ")) return end
+ if buf then
+ if not req.rspBody then req.rspBody = buf
+ else req.rspBody = req.rspBody .. buf end
+ end
+ end,
+ onRspEnd = function( req )
+ if req.rspCode ~= 200 then log:write("\n'-----------------------------------------\n") end
+ req.isDone = true
+ end,
+ }
+ req.base:closeSnk()
+ assert(req.isDone)
+ if req.rspCode ~= 200 then log:write("ERROR: Couldn't load eddies\n")return end
+ local prefluxInventory = parseJSON(req.rspBody)
+ local eddies = {}
+ for eddieName, detail in pairs(prefluxInventory.hosts) do
+ table.insert(eddies, objectSeal{
+ eddieName = eddieName,
+ lastSeen = detail.lastSeen:value(),
+ })
+ end
+ app.eddies = eddies
+end
+
+
+function makeWhateverWithEddies( app )
+ local ssh = "C:/Users/fankhauseand/.opt/gitPortable-2.27.0-x64/usr/bin/ssh.exe"
+ local cmdLinePre = ssh .." -oConnectTimeout=3 -oRemoteCommand=none"
+ if app.sshPort then cmdLinePre = cmdLinePre .." -p".. app.sshPort end
+ if app.sshUser then cmdLinePre = cmdLinePre .." \"-oUser=".. app.sshUser .."\"" end
+ for k,eddie in pairs(app.eddies) do
+ local eddieName = eddie.eddieName
+ local isEddie = eddieName:find("^eddie%d%d%d%d%d$")
+ local isTeddie = eddieName:find("^teddie%d%d$")
+ local isVted = eddieName:find("^vted%d%d$")
+ local isAws = eddieName:find("^10.117.%d+.%d+$")
+ local isDevMachine = eddieName:find("^w00[a-z0-9][a-z0-9][a-z0-9]$")
+ if isAws or isDevMachine or isVted then
+ log:write("[DEBUG] Skip \"".. eddieName .."\"\n")
+ goto nextEddie
+ end
+ assert(isEddie or isTeddie, eddieName or"nil")
+ local okMarker = "OK_".. math.random(10000000, 99999999) .."wCAkgQQA2AJAzAIA"
+ local cmdLine = cmdLinePre .." ".. eddieName
+ .." -- \"true"
+ .. " && if test \"".. eddieName .."\" != \"$(hostname|sed 's,.pnet.ch$,,'); then true\""
+ .. " && echo WrongHost expected=".. eddieName .." actual=$(hostname|sed 's,.pnet.ch$,,') && false"
+ .. " ;fi"
+ .. " && echo hostname=$(hostname|sed 's,.pnet.ch,,')"
+ .. " && echo stage=${PAISA_ENV:?}"
+ .. " && echo Scan /data/instances/default/??ARTIFACT_BASE_DIR?"
+ --[[report only]]
+ --.. " && test -e /data/instances/default/??ARTIFACT_BASE_DIR? && ls -Ahl /data/instances/default/??ARTIFACT_BASE_DIR?"
+ --[[Find un-/affected eddies]]
+ .. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true"
+ .. " ;else true"
+ .. " && echo ".. okMarker
+ .. " ;fi"
+ --[[DELETE them]]
+ --.. " && if test -e /data/instances/default/??ARTIFACT_BASE_DIR?; then true"
+ --.. " && find /data/instances/default/??ARTIFACT_BASE_DIR? -type d -mtime +420 -print -delete"
+ --.. " ;fi"
+ --.. " && echo ".. okMarker ..""
+ --[[]]
+ .. " \""
+ log:write("\n")
+ log:write("[INFO ] Try ".. eddieName .." ...\n")
+ log:write("[DEBUG] ".. cmdLine.."\n")
+ --log:write("[DEBUG] sleep ...\n")sleep(3)
+ local isStdioDone, isSuccess, stderrStr, stdoutStr = false, false, "", ""
+ local cmd = newShellcmd{
+ cmdLine = cmdLine,
+ onStdout = function( buf )
+ if buf then
+ if buf:find("\n"..okMarker.."\n",0,true) then isSuccess = true end
+ stdoutStr = stdoutStr .. buf
+ io.stdout:write(buf)
+ else isStdioDone = true end
+ end,
+ onStderr = function( buf )
+ stderrStr = buf and stderrStr .. buf or stderrStr
+ io.stderr:write(buf or"")
+ end,
+ }
+ cmd:start()
+ cmd:closeSnk()
+ local exitCode, signal = cmd:join(42)
+ if exitCode ~= 0 and signal ~= nil then
+ log:write("[WARN ] code="..tostring(exitCode)..", signal="..tostring(signal).."\n")
+ end
+ while not isStdioDone do sleep(0.042) end
+ -- Analyze outcome
+ if not isSuccess then
+ setEddieStatus(app, "ERROR", eddieName, stderrStr, stdoutStr)
+ goto nextEddie
+ end
+ setEddieStatus(app, "OK", eddieName, stderrStr, stdoutStr)
+ ::nextEddie::
+ end
+end
+
+
+function sortEddiesMostRecentlySeenFirst( app )
+ table.sort(app.eddies, function(a, b) return a.lastSeen > b.lastSeen end)
+end
+
+
+function quoteCsvVal( v )
+ local typ = type(v)
+ if false then
+ elseif typ == "string" then
+ if v:find("[\"\r\n]",0,false) then
+ v = '"'.. v:gsub('"', '""') ..'"'
+ end
+ else error("TODO_a928rzuga98oirh "..typ)end
+ return v
+end
+
+
+function exportLatestStatus( app )
+ local snk = io.stdout
+ local db = getStateDb(app)
+ local stmt = db:prepare("SELECT \"when\",eddieName,status,stderr,stdout FROM EddieLog"
+ .." JOIN Eddie ON Eddie.id = eddieId"
+ .." ORDER BY eddieId,[when]"
+ .." ;")
+ rs = stmt:execute()
+ snk:write("c;when;eddieName;status;stderr;stdout\n")
+ local prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout
+ local qt = quoteCsvVal
+ while rs:next() do
+ local when , eddieName , status , stderr , stdout
+ = rs:value(1), rs:value(2), rs:value(3), rs:value(4), rs:value(5)
+ --log:write("[DEBUG] "..tostring(when).." "..tostring(eddieName).." "..tostring(status).."\n")
+ assert(when and eddieName and status and stderr and stdout)
+ if eddieName == prevEddieName then
+ if not prevWhen or when > prevWhen then
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." take\n")
+ goto assignPrevThenNextEntry
+ else
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." obsolete\n")
+ goto nextEntry
+ end
+ elseif prevEddieName then
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." Eddie complete\n")
+ snk:write("r;".. qt(when) ..";".. qt(eddieName) ..";".. qt(status) ..";".. qt(stderr) ..";".. qt(stdout) .."\n")
+ else
+ --log:write("[DEBUG] ".. when .." ".. eddieName .." Another eddie\n")
+ goto assignPrevThenNextEntry
+ end
+ ::assignPrevThenNextEntry::
+ --[[]] prevWhen, prevEddieName, prevStatus, prevStderr, prevStdout
+ = when , eddieName , status , stderr , stdout
+ ::nextEntry::
+ end
+ snk:write("t;status;OK\n")
+end
+
+
+function run( app )
+ if app.exportLatestStatus then
+ exportLatestStatus(app)
+ return
+ end
+ loadEddies(app)
+ assert(app.eddies)
+ removeCompletedEddies(app)
+ sortEddiesMostRecentlySeenFirst(app)
+ makeWhateverWithEddies(app)
+end
+
+
+function main()
+ local app = objectSeal{
+ isHelp = false,
+ backendHost = false,
+ backendPort = false,
+ backendPath = false,
+ sshPort = false,
+ sshUser = false,
+ statePath = false,
+ stateDb = false,
+ exportLatestStatus = false,
+ eddies = false,
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+startOrExecute(main)
+
diff --git a/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua
new file mode 100644
index 0000000..cbd84b2
--- /dev/null
+++ b/src/main/lua/paisa-jvm-memLeak/LogStatistics.lua
@@ -0,0 +1,112 @@
+
+local newLogParser = require("PaisaLogParser").newLogParser
+
+local inn, out, log = io.stdin, io.stdout, io.stderr
+
+local main, printHelp, parseArgs, run, onLogEntry, printStats
+
+
+function printHelp( app )
+ io.stdout:write(" \n"
+ .." TODO write help page\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ local arg = _ENV.arg[1]
+ if arg == "--help" then app.isHelp = true return 0 end
+ if arg ~= "--yolo" then log:write("EINVAL\n")return end
+ return 0
+end
+
+
+function onLogEntry( entry, app )
+ local isTheEntryWeReSearching = false
+ -- HOT!
+ --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: null"))
+ -- HOT!
+ or (entry.file == "HttpHeaderUtil" and entry.msg:find("Keep.Alive. values do not match timeout.42 .. timeout.120 for request "))
+ -- HOT!
+ --or (entry.msg:find("timetable"))
+ -- nope
+ --or (entry.file == "ContextImpl" and entry.msg:find("IllegalStateException: You must set the Content%-Length header"))
+ -- nope
+ --or (entry.file == "LocalHttpServerResponse" and entry.msg:find("non-proper HttpServerResponse occured", 0, true))
+ -- TODO
+ local instantKey = entry.date
+ local instant = app.instants[instantKey]
+ if not instant then
+ instant = {
+ date = entry.date,
+ count = 0,
+ }
+ app.instants[instantKey] = instant
+ end
+ if isTheEntryWeReSearching then
+ instant.count = instant.count + 1
+ end
+end
+
+
+function printStats( app )
+ -- Arrange data
+ local numGroups = 0
+ local groupSet = {}
+ local countMax = 1
+ for date, instant in pairs(app.instants) do
+ assert(date == instant.date)
+ local key = date:sub(1, 15)
+ local group = groupSet[key]
+ if not group then
+ numGroups = numGroups + 1
+ group = { key = key, date = date, count = 0, }
+ groupSet[key] = group
+ end
+ group.count = group.count + instant.count
+ if countMax < group.count then countMax = group.count end
+ end
+ local groupArr = {}
+ for _, group in pairs(groupSet) do
+ table.insert(groupArr, group)
+ end
+ table.sort(groupArr, function( a, b )return a.key < b.key end)
+ -- Plot
+ out:write("\n")
+ out:write(string.format(" Splitted into %9d groups\n", numGroups))
+ out:write(string.format(" Peak value %9d num log entries\n", countMax))
+ out:write("\n")
+ local fullBar = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
+ for _, group in pairs(groupArr) do
+ out:write(string.format("%s... |", group.key))
+ local len = math.floor(group.count / countMax * fullBar:len())
+ out:write(fullBar:sub(1, len))
+ out:write("\n")
+ end
+end
+
+
+function run( app )
+ app.logParser = newLogParser{
+ cls = app,
+ patternV1 = "DATE STAGE SERVICE LEVEL FILE - MSG",
+ onLogEntry = onLogEntry,
+ }
+ app.logParser:tryParseLogs()
+ printStats(app)
+end
+
+
+function main()
+ local app = {
+ isHelp = false,
+ logParser = false,
+ instants = {},
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
diff --git a/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua
new file mode 100644
index 0000000..b17c00f
--- /dev/null
+++ b/src/main/lua/paisa-jvm-memLeak/MemLeakTry1.lua
@@ -0,0 +1,235 @@
+
+local inn, out, log = io.stdin, io.stdout, io.stderr
+local main, parseArgs, printHelp, run, runAsPipe, runWithStdinFilelist
+
+
+function printHelp()
+ io.stdout:write(" \n"
+ .." Try to get some useful data out of a 'smap' dump.\n"
+ .." \n"
+ .." Options:\n"
+ .." \n"
+ .." --yolo\n"
+ .." WARN: Only use if you know what you do.\n"
+ .." \n"
+ .." --stdin-filelist\n"
+ .." Read LF separated file list form stdin.\n"
+ .." \n")
+end
+
+
+function parseArgs( app )
+ if #_ENV.arg == 0 then log:write("EINVAL: Try --help\n") return end
+ app.isHelp = false
+ local isYolo = false
+ local iA = 0
+ while true do iA = iA + 1
+ local arg = _ENV.arg[iA]
+ if not arg then
+ break
+ elseif arg == "--help" then
+ app.isHelp = true; return 0
+ elseif arg == "--yolo" then
+ isYolo = true
+ elseif arg == "--date" then
+ iA = iA + 1
+ app.dateStr = _ENV.arg[iA]
+ if not app.dateStr then log:write("EINVAL: --date needs value\n") return end
+ elseif arg == "--stdin-filelist" then
+ app.isStdinFilelist = true
+ else
+ log:write("EINVAL: ".. arg .."\n") return
+ end
+ end
+ return 0
+end
+
+
+function runAsPipe( app )
+ local iLine = 0
+ if #app.whitelist > 0 then
+ log:write("[INFO ] Filtering enabled\n")
+ end
+ local isHdrWritten = false
+ while true do
+ iLine = iLine + 1
+ local buf = inn:read("l")
+ if iLine == 1 then goto nextLine end
+ --log:write("BUF: ".. buf .."\n")
+ local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$")
+ if not sz and buf:find("^ +total +%d+[KMGTPE]$") then break end
+ if not sz then log:write("BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end
+ if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end
+ if #app.whitelist > 0 then
+ if not whitelist[addr] then goto nextLine end
+ end
+ if not isHdrWritten then
+ isHdrWritten = true
+ out:write("c; Addr ; Size ; Perm ; Note ; arg.date\n")
+ end
+ out:write(string.format("r; %s ; %12d ; %s ; %-12s ; %s\n", addr, sz, perm, note, (app.dateStr or"")))
+ ::nextLine::
+ end
+end
+
+
+function debugPrintRecursive( out, obj, prefix, isSubCall )
+ local typ = type(obj)
+ if false then
+ elseif typ == "string" then
+ out:write("\"") out:write((obj:gsub("\n", "\\n"):gsub("\r", "\\r"))) out:write("\"")
+ elseif typ == "number" then
+ out:write(obj)
+ elseif typ == "nil" then
+ out:write("nil")
+ elseif typ == "table" then
+ local subPrefix = (prefix)and(prefix.." ")or(" ")
+ for k, v in pairs(obj) do
+ out:write("\n") out:write(prefix or "")
+ debugPrintRecursive(out, k, prefix, true) out:write(": ")
+ debugPrintRecursive(out, v, subPrefix, true)
+ end
+ else
+ error(tostring(typ))
+ end
+ if not isSubCall then out:write("\n")end
+end
+
+
+function runWithStdinFilelist( app )
+ while true do
+ local srcFilePath = inn:read("l")
+ if not srcFilePath then break end
+ --log:write("[DEBUG] src file \"".. srcFilePath .."\"\n")
+ local srcFile = io.open(srcFilePath, "rb")
+ if not srcFile then error("fopen(\""..tostring(srcFilePath).."\")") end
+ collectData(app, srcFile, srcFilePath)
+ end
+ removeUnchanged(app)
+ printResult(app)
+end
+
+
+function collectData( app, src, timestamp )
+ assert(src)
+ assert(timestamp)
+ local iLine = 0
+ while true do
+ iLine = iLine + 1
+ local buf = src:read("l")
+ if iLine == 1 then goto nextLine end
+ local addr, sz, perm, note = buf:match("^([%w]+) +(%d+[A-Za-z]?) ([^ ]+) +(.*)$")
+ if not sz and buf:find("^ +total +%d+[A-Za-z]?\r?$") then break end
+ if not sz then log:write("[ERROR] BUF: '"..tostring(buf).."'\n")error("TODO_20231103111415") end
+ if sz:find("K$") then sz = sz:gsub("K$", "") * 1024 end
+ local addrObj = app.addrs[addr]
+ if not addrObj then
+ addrObj = { measures = {} }
+ app.addrs[addr] = addrObj
+ end
+ local measure = { ts = timestamp, sz = sz, }
+ assert(not addrObj.measures[timestamp])
+ addrObj.measures[timestamp] = measure
+ ::nextLine::
+ end
+end
+
+
+function removeUnchanged( app )
+ local addrsWhichHaveChanged = {}
+ local knownSizes = {}
+ for addr, addrObj in pairs(app.addrs) do
+ for ts, measure in pairs(addrObj.measures) do
+ local knownSizeKey = assert(addr)
+ local knownSize = knownSizes[knownSizeKey]
+ if not knownSize then
+ knownSize = measure.sz;
+ knownSizes[knownSizeKey] = knownSize
+ elseif knownSize ~= measure.sz then
+ addrsWhichHaveChanged[addr] = true
+ end
+ end
+ end
+ local newAddrs = {}
+ for addr, addrObj in pairs(app.addrs) do
+ if addrsWhichHaveChanged[addr] then
+ newAddrs[addr] = addrObj
+ end
+ end
+ app.addrs = newAddrs
+end
+
+
+function printResult( app )
+ -- arrange data
+ local addrSet, tsSet, szByAddrAndTs = {}, {}, {}
+ for addr, addrObj in pairs(app.addrs) do
+ local measures = assert(addrObj.measures)
+ addrSet[addr] = true
+ for ts, measure in pairs(measures) do
+ assert(ts == measure.ts)
+ local sz = measure.sz
+ tsSet[ts] = true
+ szByAddrAndTs[addr.."\0"..ts] = sz
+ end
+ end
+ local addrArr, tsArr = {}, {}
+ for k,v in pairs(addrSet)do table.insert(addrArr, k) end
+ for k,v in pairs(tsSet)do table.insert(tsArr, k) end
+ table.sort(addrArr, function( a, b )return a < b end)
+ table.sort(tsArr, function( a, b )return a < b end)
+ --
+ out:write("c;file")
+ for _, addr in ipairs(addrArr) do out:write(";".. addr) end
+ out:write("\n")
+ for iTs, ts in ipairs(tsArr) do
+ out:write("r;".. filterTsForOutput(app, ts))
+ for iAddr, addr in ipairs(addrArr) do
+ local sz = szByAddrAndTs[assert(addr).."\0"..assert(ts)]
+ out:write(";".. sz)
+ end
+ out:write("\n")
+ end
+end
+
+
+function filterTsForOutput( app, ts )
+ local y, mnth, d, h, min, sec = ts:match("^houston%-prod%-pmap%-(%d%d%d%d)(%d%d)(%d%d)%-(%d%d)(%d%d)(%d%d).txt$")
+ return "".. os.time{ year=y, month=mnth, day=d, hour=h, min=min, sec=sec, }
+end
+
+
+function sortedFromMap( map, smallerPredicate )
+ if not smallerPredicate then smallerPredicate = function(a,b)return a.key < b.key end end
+ local arr = {}
+ for k, v in pairs(map) do table.insert(arr, {key=k, val=v}) end
+ table.sort(arr, smallerPredicate)
+ return arr
+end
+
+
+function run( app )
+ if app.isStdinFilelist then
+ runWithStdinFilelist(app)
+ else
+ runAsPipe(app)
+ end
+end
+
+
+function main()
+ local app = {
+ isHelp = false,
+ isStdinFilelist = false,
+ addrs = {},
+ whitelist = {
+ --["00000000DEADBEAF"] = true,
+ }
+ }
+ if parseArgs(app) ~= 0 then os.exit(1) end
+ if app.isHelp then printHelp() return end
+ run(app)
+end
+
+
+main()
diff --git a/src/main/lua/paisa-logs/DigHoustonLogs.lua b/src/main/lua/paisa-logs/DigHoustonLogs.lua
new file mode 100644
index 0000000..92ef035
--- /dev/null
+++ b/src/main/lua/paisa-logs/DigHoustonLogs.lua
@@ -0,0 +1,252 @@
+#!/usr/bin/env lua
+--[====================================================================[
+
+ projDir='/c/path/to/proj/root'
+ export LUA_PATH="${projDir:?}/src/main/lua/paisa-logs/?.lua"
+ lua -W "${projDir:?}/src/main/lua/paisa-logs/DigHoustonLogs.lua"
+
+ ]====================================================================]
+
+local PaisaLogParser = require("PaisaLogParser")
+local normalizeIsoDateTime = require("PaisaLogParser").normalizeIsoDateTime
+local LOGDBG = function(msg)io.stderr:write(msg)end
+
+local main, onLogEntry, isWorthToPrint, loadFilters, initFilters
+
+
+function main()
+ local that = {
+ logPattern = "DATE STAGE SERVICE LEVEL FILE - MSG", -- Since 2021-09-24 on prod
+ printRaw = true,
+ filters = false,
+ }
+ loadFilters(that)
+ initFilters(that)
+ local parser = PaisaLogParser.newLogParser({
+ cls = that,
+ patternV1 = that.logPattern,
+ onLogEntry = onLogEntry,
+ })
+ parser:tryParseLogs();
+end
+
+
+function loadFilters( that )
+ assert(not that.filters)
+ that.filters = {
+ -- General: Append new rules AT END if not closely related to another one.
+
+-- { action = "drop", beforeDate = "2024-10-18 03:00:00.000", },
+-- { action = "drop", afterDate = "2024-01-31 23:59:59.999", },
+
+ { action = "drop", level = "TRACE" },
+ { action = "drop", level = "DEBUG" },
+ { action = "drop", level = "INFO" },
+ --{ action = "drop", level = "WARN" },
+
+ -- FUCK those damn nonsense spam logs!!!
+ { action = "drop", file = "Forwarder" },
+ { action = "drop", level = "ERROR", file = "HttpClientRequestImpl" },
+ { action = "drop", level = "ERROR", file = "BisectClient" },
+
+ -- Seen: 2024-04-10 prod.
+ -- Reported 20240410 via "https://github.com/swisspost/vertx-redisques/pull/166"
+ { action = "drop", file = "RedisQues", level = "WARN",
+ msgPattern = "^Registration for queue .- has changed to .-$", },
+
+ -- Reported: SDCISA-13717
+ -- Seen: 2024-01-05 prod, 2023-10-18 prod
+ { action = "drop", file = "LocalHttpServerResponse", level = "ERROR",
+ msgPattern = "^non%-proper HttpServerResponse occured\r?\n"
+ .."java.lang.IllegalStateException:"
+ .." You must set the Content%-Length header to be the total size of the message body BEFORE sending any data if you are not using"
+ .." HTTP chunked encoding.", },
+
+ -- Reported: <none>
+ -- Seen: 2024-01-05 prod, 2023-10-18 prod
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgPattern = "Unhandled exception\n"
+ .."java.lang.IllegalStateException: You must set the Content%-Length header to be the total size of the message body BEFORE sending"
+ .." any data if you are not using HTTP chunked encoding.", },
+
+ -- Seen: 2023-10-18
+ -- Happens all the time as gateleens error reporting is broken-by-desing.
+ { action = "drop", file = "Forwarder", level = "WARN",
+ msgPattern = "^..... ................................ Problem to request /from%-houston/[0-9]+/eagle/nsync/v1/push/trillian%-phonebooks"
+ .."%-affiliated%-planning%-area%-[0-9]+%-vehicles: io.netty.channel.ConnectTimeoutException: connection timed out:"
+ .." eddie[0-9]+.pnet.ch/[0-9]+:7012", },
+ -- Seen: 2023-10-18
+ -- Nearly same as above but on ERROR level instead.
+ { action = "drop", file = "Forwarder", level = "ERROR",
+ msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles"
+ .." The timeout period of 30000ms has been exceeded while executing POST /from.houston/%d+/eagle/nsync/v1/push/"
+ .."trillian.phonebooks.affiliated.planning.area.%d+.vehicles for server eddie%d+:7012", },
+ -- Seen: 2023-10-18 prod
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip The timeout period of 30000ms has been exceeded"
+ .." while executing PUT /houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip for server localhost:9089", },
+ -- Seen: 2023-10-18 prod
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://localhost:9089/houston/vehicles/%d+/vehicle/backup/v1/executions/%d+/backup.zip Timeout$" },
+
+ -- Seen: 2024-04-10 prod, 2023-10-18 prod
+ { action = "drop", file = "ConnectionBase", level = "ERROR", msgEquals = "Connection reset by peer", },
+
+ -- Seen: 2024-04-10 prod, 2023-10-18 prod
+ { action = "drop", file = "EventBusBridgeImpl", level = "ERROR", msgEquals = "SockJSSocket exception\nio.vertx.core.VertxException: Connection was closed", },
+
+ -- Seen: 2024-04-10 prod, 2024-01-05 prod, 2023-10-18 prod
+ -- Reported: TODO link existing issue here
+ { action = "drop", file = "HttpHeaderUtil", level = "ERROR",
+ msgPattern = "Keep%-Alive%} values do not match timeout=42 != timeout=120 for request /googleplex/.*", },
+
+ -- Seen: 2024-01-05 prod
+ -- Reported: <unknown>
+ { action = "drop", file = "Utils", level = "ERROR",
+ msgPattern = "^Exception occurred\njava.lang.Exception: %(TIMEOUT,%-1%) Timed out after waiting 30000%(ms%) for a reply. address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync",
+ stackPattern = "^"
+ .."%s-at org.swisspush.nsync.NSyncHandler.lambda.onPutClientSyncBody.%d+"
+ .."%(NSyncHandler.java:%d+%) ..nsync.-at io.vertx.core.impl.future.FutureImpl.%d+.onFailure%(FutureImpl.java:%d+%)"
+ ..".-"
+ .."Caused by: io.vertx.core.eventbus.ReplyException: Timed out after waiting 30000%(ms%) for a reply."
+ .." address: __vertx.reply.%d+, repliedAddress: nsync%-[re]+gister%-sync"
+ },
+
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/vending/accounting/v1/users/%d+/years/%d+/months/%d%d/account Connection was closed$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection was closed$", },
+ -- Seen 2024-01-10 prod
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/query.index The timeout period of 30000ms has been exceeded while executing"
+ .." POST /from.houston/%d+/eagle/nsync/v1/query-index for server eddie%d+:7012$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+"
+ .." http://eddie%d+:7012/from.houston/%d+/eagle/timetable/notification/v1/planningareas/%d+/notifications/%x+ Connection was closed$", },
+ -- WELL_KNOWN: I guess happens when vehicle looses connection. Seen 2023-10-18 prod.
+ { action = "drop", file = "Forwarder", level = "ERROR", msgPattern = "^%%%w+ %x+ http://eddie%d+:7012/from.houston/%d+/eagle/nsync/v1/push/trillian.phonebooks.affiliated.planning.area.%d+.vehicles Connection reset by peer$", },
+
+ -- Reported: SDCISA-9574
+ -- TODO rm when resolved
+ -- Seen: 2021-09-17 2022-06-20, 2022-08-30 prod,
+ { action = "drop", file = "Utils", level = "ERROR",
+ msgPattern = "%(RECIPIENT_FAILURE,500%) Sync failed.\n{.+}", },
+
+ -- TODO analyze
+ -- Seen 2024-03-20 prod
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgPattern = "^Unhandled exception\njava.lang.IllegalStateException: Response head already sent", },
+
+ -- Seen: 2024-04-10 prod.
+ { action = "drop", level = "ERROR", file = "HttpClientRequestImpl",
+ msgEquals = "Connection reset by peer\njava.io.IOException: Connection reset by peer",
+ stackPattern = "^"
+ .."%s-at sun.nio.ch.FileDispatcherImpl.read0%(.-\n"
+ .."%s-at sun.nio.ch.SocketDispatcher.read%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.readIntoNativeBuffer%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.read%(.-\n"
+ .."%s-at sun.nio.ch.IOUtil.read%(.-\n"
+ .."%s-at sun.nio.ch.SocketChannelImpl.read%(.-\n"
+ .."%s-at io.netty.buffer.PooledByteBuf.setBytes%(.-\n"
+ .."%s-at io.netty.buffer.AbstractByteBuf.writeBytes%(.-\n"
+ .."%s-at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes%(.-\n"
+ .."%s-at io.netty.channel.nio.AbstractNioByteChannel.NioByteUnsafe.read%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKey%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.processSelectedKeys%(.-\n"
+ .."%s-at io.netty.channel.nio.NioEventLoop.run%(.-\n"
+ .."%s-at io.netty.util.concurrent.SingleThreadEventExecutor.%d+.run%(.-\n"
+ .."%s-at io.netty.util.internal.ThreadExecutorMap.%d+.run%(.-\n"
+ .."%s-at io.netty.util.concurrent.FastThreadLocalRunnable.run%(.-\n"
+ .."%s-at java.lang.Thread.run%(.-", },
+
+ -- Seen: 2024-04-10 prod.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.IllegalStateException: null",
+ stackPattern = "^"
+ ..".-io.vertx.-%.HttpClientResponseImpl.checkEnded%(.-\n"
+ ..".-io.vertx.-%.HttpClientResponseImpl.endHandler%(.-\n"
+ ..".-gateleen.routing.Forwarder.-\n", },
+
+ -- Seen: 2024-04-10 prod.
+ -- TODO get rid of this silly base class.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: Do override this method to mock expected behaviour.", },
+
+ -- Seen: 2024-04-10 prod.
+ -- TODO get rid of this silly base class.
+ { action = "drop", file = "ContextImpl", level = "ERROR",
+ msgEquals = "Unhandled exception\njava.lang.UnsupportedOperationException: null", },
+
+ }
+end
+
+
+function initFilters( that )
+ for iF = 1, #(that.filters) do
+ local descr = that.filters[iF]
+ local beforeDate = descr.beforeDate and normalizeIsoDateTime(descr.beforeDate)
+ local afterDate = descr.afterDate and normalizeIsoDateTime(descr.afterDate)
+ local file, level, msgPattern, msgEquals = descr.file, descr.level, descr.msgPattern, descr.msgEquals
+ local rawPattern, stackPattern = descr.rawPattern, descr.stackPattern
+ local stackStartsWith = descr.stackStartsWith
+ local filter = { action = descr.action, matches = false, }
+ local hasAnyCondition = (beforeDate or afterDate or file or level or msgPattern or rawPattern or stackPattern or stackStartsWith);
+ if not hasAnyCondition then
+ filter.matches = function( that, log ) --[[LOGDBG("match unconditionally\n")]] return true end
+ else
+ filter.matches = function( that, log )
+ local match, mismatch = true, false
+ if not log.date then log:debugPrint() end
+ if level and level ~= log.level then --[[LOGDBG("level mismatch: \"".. level .."\" != \"".. log.level .."\"\n")]] return mismatch end
+ if file and file ~= log.file then --[[LOGDBG("file mismatch: \"".. file .."\" != \"".. log.file .."\"\n")]] return mismatch end
+ local logDate = normalizeIsoDateTime(log.date)
+ local isBeforeDate = (not beforeDate or logDate < beforeDate);
+ local isAfterDate = (not afterDate or logDate >= afterDate);
+ if not isBeforeDate then --[[LOGDBG("not before: \"".. tostring(beforeDate) .."\", \"".. logDate .."\"\n")]] return mismatch end
+ if not isAfterDate then --[[LOGDBG("not after: \"".. tostring(afterDate) .."\", \"".. logDate .."\"\n")]] return mismatch end
+ if msgEquals and log.msg ~= msgEquals then return mismatch end
+ if stackStartsWith and log.stack and log.stack:sub(1, #stackStartsWith) ~= stackStartsWith then return mismatch end
+ if msgPattern and not log.msg:find(msgPattern) then --[[LOGDBG("match: msgPattern\n")]] return mismatch end
+ if stackPattern and log.stack and not log.stack:find(stackPattern) then return mismatch end
+ if rawPattern and not log.raw:find(rawPattern) then return mismatch end
+ --LOGDBG("DEFAULT match\n")
+ return match
+ end
+ end
+ that.filters[iF] = filter
+ end
+end
+
+
+function onLogEntry( log, that )
+ local isWorthIt = isWorthToPrint(that, log)
+ if isWorthIt then
+ if that.printRaw then
+ print(log.raw)
+ else
+ log:debugPrint()
+ end
+ end
+end
+
+
+function isWorthToPrint( that, log )
+ local pass, drop = true, false
+ for iF = 1, #(that.filters) do
+ local filter = that.filters[iF]
+ if filter.matches(that, log) then
+ if filter.action == "drop" then return drop end
+ if filter.action == "keep" then return pass end
+ error("Unknown filter.action: \"".. filter.action .."\"");
+ end
+ end
+ return pass
+end
+
+
+main()
+
diff --git a/src/main/lua/paisa-logs/PaisaLogParser.lua b/src/main/lua/paisa-logs/PaisaLogParser.lua
new file mode 100644
index 0000000..f6ac0ce
--- /dev/null
+++ b/src/main/lua/paisa-logs/PaisaLogParser.lua
@@ -0,0 +1,435 @@
+
+local exports = {}
+local mod = {}
+local stderr = io.stderr
+
+
+local LogParse = { -- class
+ line = nil,
+ log = nil,
+}
+
+
+function exports.newLogParser( config )
+ return LogParse:new(nil, config )
+end
+
+
+function LogParse:new(o, config)
+ if not config or type(config.onLogEntry) ~= "function" then
+ error( "Arg 'config.onLogEntry' must be a function" )
+ end
+ o = o or {};
+ setmetatable(o, self);
+ self.__index = self;
+ -- Register callbacks
+ self.cb_cls = config.cls
+ self.cb_onLogEntry = config.onLogEntry
+ self.cb_onEnd = config.onEnd
+ self.cb_onError = config.onError or function(s)
+ error(s or "nil")
+ end
+ self.cb_onWarn = config.onWarn or function(s)
+ io.stdout:flush()
+ warn(s)
+ end
+ -- END callbacks
+ mod.setupParserPattern( o, config )
+ return o;
+end
+
+
+function mod.setupParserPattern( this, c )
+ local inputPat
+ if c.patternV1 then
+ inputPat = c.patternV1; -- Use the one from parameter.
+ else
+ this.cb_onWarn( "No 'c.patternV1' specified. Fallback to internal obsolete one." )
+ inputPat = "DATE POD STAGE SERVICE THREAD LEVEL FILE - MSG"
+ end
+ local parts = {}
+ for part in string.gmatch(inputPat,"[^ ]+") do
+ table.insert( parts, part )
+ end
+ this.parts = parts
+end
+
+
+local function writeStderr(...)
+ local args = table.pack(...)
+ for i=1,args.n do
+ io.stderr:write( args[i] or "nil" )
+ end
+end
+
+
+function LogParse:tryParseLogs()
+ while true do
+ self.line = io.read("l");
+ if self.line==nil then -- EOF
+ self:publishLogEntry();
+ break;
+ end
+
+ --io.write( "\nBUF: ", self.line, "\n\n" );
+ --io.flush()
+
+ if self.line:match("%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d ") then
+ -- Looks like the beginning of a new log entry.
+ self:initLogEntryFromLine();
+ elseif self.line:match("^%s+at [^ ]") then
+ -- Looks like a line from exception stack
+ self:appendStacktraceLine();
+ elseif self.line:match("^%s*Caused by: ") then
+ -- Looks like a stacktrace 'Caused by' line
+ self:appendStacktraceLine();
+ elseif self.line:match("^%s+Suppressed: ") then
+ -- Looks like a stacktrace 'Suppressed: ' line
+ self:appendStacktraceLine();
+ elseif self.line:match("^%\t... (%d+) more$") then
+ -- Looks like folded stacktrace elements
+ self:appendStacktraceLine();
+ else
+ -- Probably msg containing newlines.
+ self:appendLogMsg();
+ end
+
+ end
+end
+
+
+function LogParse:initLogEntryFromLine()
+ self:publishLogEntry();
+ local log = self:getOrNewLogEntry();
+
+ -- Try some alternative parsers
+ mod.parseByPattern( self )
+ --if log.date==nil then
+ -- self:parseOpenshiftServiceLogLine();
+ --end
+ --if log.date==nil then
+ -- self:parseEagleLogLine();
+ --end
+ --if log.date==nil then
+ -- self:parseJettyServiceLogLine();
+ --end
+
+ if log.date==nil then
+ self.cb_onWarn("Failed to parse log line:\n\n".. self.line .."\n\n", self.cb_cls)
+ end
+end
+
+
+function mod.parseByPattern( this )
+ local date, pod, stage, service, thread, level, file, msg, matchr, match
+ local line = this.line
+ local log = this:getOrNewLogEntry();
+
+ -- We can just return on failure. if log is missing, it will report error
+ -- on caller side. Just ensure that 'date' is nil.
+ log.date = nil
+
+ local rdPos = 1
+ for i,part in ipairs(this.parts) do
+ if part=="DATE" then
+ date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d) ", rdPos)()
+ if not date or date=="" then return end
+ rdPos = rdPos + date:len()
+ --stderr:write("date: "..tostring(date).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="STAGE" then
+ match = line:gmatch( " +[^%s]+", rdPos)()
+ if not match then return end
+ stage = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("stage: "..tostring(stage).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="SERVICE" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ service = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("service: "..tostring(service).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="LEVEL" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ level = match:gmatch("[^%s]+")()
+ if not level:find("^[ABCDEFGINORTUW]+$") then -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ this.cb_onWarn( "Does not look like a level: "..(level or"nil"), this.cb_cls )
+ end
+ rdPos = rdPos + match:len()
+ --stderr:write("level: "..tostring(level).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="FILE" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ file = match:gmatch("[^%s]+")()
+ if file=="WARN" then stderr:write("\n"..tostring(line).."\n\n")error("Doesn't look like a file: "..tostring(file)) end
+ rdPos = rdPos + match:len()
+ --stderr:write("file: "..tostring(file).." (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="-" then
+ match = line:gmatch(" +%-", rdPos)()
+ rdPos = rdPos + match:len();
+ --stderr:write("dash (rdPos="..tostring(rdPos)..")\n");
+ elseif part=="MSG" then
+ match = line:gmatch(" +.*$", rdPos)()
+ if not match then return end
+ msg = match:gmatch("[^%s].*$")()
+ rdPos = rdPos + match:len()
+ --stderr:write("msg: "..tostring(msg).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="POD" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ if not match then return end
+ pod = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("pod: "..tostring(pod).." (rdPos="..tostring(rdPos)..")\n")
+ elseif part=="THREAD" then
+ match = line:gmatch(" +[^%s]+", rdPos)()
+ thread = match:gmatch("[^%s]+")()
+ rdPos = rdPos + match:len()
+ --stderr:write("thrd: "..tostring(thread).." (rdPos="..tostring(rdPos)..")\n")
+ end
+ end
+
+ log.raw = this.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.thread = thread;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseOpenshiftServiceLogLine()
+ local date, pod, stage, service, thread, level, file, msg
+ local this = self
+ local line = this.line
+ local log = self:getOrNewLogEntry();
+
+ -- We can just return on failure. if log is missing, it will report error
+ -- on caller side. Just ensure that 'date' is nil.
+ log.date = nil
+
+ -- VERSION 3 (Since 2021-09-24 houstonProd)
+ local rdPos = 1
+ -- Date
+ date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)()
+ if not date then return end
+ rdPos = rdPos + date:len()
+ -- Pod
+ pod = line:gmatch(" (%a+)", rdPos )()
+ if not pod then return end
+ rdPos = rdPos + pod:len()
+ -- stage
+ stage = line:gmatch( " (%a+)", rdPos)()
+ if not stage then return end
+ rdPos = rdPos + stage:len()
+ -- service
+ service = line:gmatch( " (%a+)", rdPos)()
+ if not service then return end
+ rdPos = rdPos + service:len()
+ -- thread (this only maybe exists)
+ thread = line:gmatch( " ([%a%d%-]+)", rdPos)()
+ -- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ if thread and thread:find("^[ABCDEFGINORTUW]+$") then
+ thread = nil; -- Does more look like an error level. So do NOT advance
+ else
+ rdPos = rdPos + thread:len()
+ end
+ -- level
+ level = line:gmatch( " ([A-Z]+)", rdPos)()
+ if not level then return end
+ rdPos = rdPos + level:len()
+ -- file
+ file = line:gmatch(" ([^%s]+)", rdPos)()
+ if not file then return end
+ rdPos = rdPos + file:len()
+ -- msg
+ msg = line:gmatch(" %- (.*)", rdPos)()
+ if not msg then return end
+ rdPos = rdPos + msg:len()
+
+ -- VERSION 2 (Since 2021-09-24 prefluxInt)
+ --local rdPos = 1
+ ---- Date
+ --date = line:gmatch("(%d%d%d%d%-%d%d%-%d%d[ T]%d%d:%d%d:%d%d,%d%d%d)", rdPos)()
+ --if not date then return end
+ --rdPos = rdPos + date:len()
+ ---- Pod
+ --pod = line:gmatch(" (%a+)", rdPos )()
+ --if not pod then return end
+ --rdPos = rdPos + pod:len()
+ ---- stage
+ --stage = line:gmatch( " (%a+)", rdPos)()
+ --if not stage then return end
+ --rdPos = rdPos + stage:len()
+ ---- service
+ --service = line:gmatch( " (%a+)", rdPos)()
+ --if not service then return end
+ --rdPos = rdPos + service:len()
+ ---- thread (this only maybe exists)
+ --thread = line:gmatch( " ([%a%d%-]+)", rdPos)()
+ ---- [ABCDEFGINORTUW]+ -> (ERROR|WARN|INFO|DEBUG|TRACE)
+ --if thread and thread:find("^[ABCDEFGINORTUW]+$") then
+ -- thread = nil; -- Does more look like an error level. So do NOT advance
+ --else
+ -- rdPos = rdPos + thread:len()
+ --end
+ ---- level
+ --level = line:gmatch( " ([A-Z]+)", rdPos)()
+ --if not level then return end
+ --rdPos = rdPos + level:len()
+ ---- file
+ --file = line:gmatch(" ([^%s]+)", rdPos)()
+ --if not file then return end
+ --rdPos = rdPos + file:len()
+ ---- msg
+ --msg = line:gmatch(" %- (.*)", rdPos)()
+ --if not msg then return end
+ --rdPos = rdPos + msg:len()
+
+ log.raw = self.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.thread = thread;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseEagleLogLine()
+ local log = self:getOrNewLogEntry();
+ local date, stage, service, level, file, msg = self.line:gmatch(""
+ .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime
+ .." (%a+)" -- stage
+ .." (%a+)" -- service
+ .." (%a+)" -- level
+ .." ([^%s]+)" -- file
+ .." %- (.*)" -- msg
+ )();
+ local pod = service; -- just 'mock' it
+ log.raw = self.line;
+ log.date = date;
+ log.service = service;
+ log.pod = pod;
+ log.stage = stage;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:parseJettyServiceLogLine()
+ local log = self:getOrNewLogEntry();
+ local date, pod, stage, service, level, file, msg = self.line:gmatch(""
+ .."(%d%d%d%d%-%d%d%-%d%d %d%d:%d%d:%d%d,%d%d%d)" -- datetime
+ .." (%S+)" -- pod (aka container)
+ .." (%a+)" -- stage
+ .." (%a+)" -- service
+ .." (%a+)" -- level
+ .." ([^%s]+)" -- file
+ .." %- (.*)" -- msg
+ )();
+ log.raw = self.line;
+ log.date = date;
+ log.pod = pod;
+ log.stage = stage;
+ log.service = service;
+ log.level = level;
+ log.file = file;
+ log.msg = msg;
+end
+
+
+function LogParse:appendLogMsg()
+ local log = self:getOrNewLogEntry()
+ log.msg = log.msg or "";
+ log.raw = log.raw or "";
+
+ log.msg = log.msg .."\n".. self.line;
+ -- Also append to raw to have the complete entry there.
+ log.raw = log.raw .."\n".. self.line;
+end
+
+
+function LogParse:appendStacktraceLine()
+ local log = self:getOrNewLogEntry()
+ if not log.stack then
+ log.stack = self.line
+ else
+ log.stack = log.stack .."\n".. self.line
+ end
+ -- Also append to raw to have the complete entry there.
+ log.raw = log.raw .."\n".. self.line;
+end
+
+
+function LogParse:publishLogEntry()
+ local log = self.log
+ if not log then
+ return -- nothing to do
+ end
+ if not log.raw then
+ -- WhatTheHeck?!?
+ local msg = "InternalError: Collected log unexpectedly empty"
+ self.cb_onError(msg, self.cb_cls)
+ error(msg); return
+ end
+ self.log = nil; -- Mark as consumed
+ -- Make sure log lines do NOT end in 0x0D
+ local msg = log.msg
+ if msg:byte(msg:len()) == 0x0D then log.msg = msg:sub(1, -2) end
+ self.cb_onLogEntry(log, self.cb_cls)
+end
+
+
+function LogParse:getOrNewLogEntry()
+ self.log = self.log or LogEntry:new(nil)
+ return self.log
+end
+
+
+function exports.normalizeIsoDateTime( str )
+ if str:find("%d%d%d%d%-%d%d%-%d%dT%d%d:%d%d:%d%d%.%d%d%d") then return str end
+ local y, mo, d, h, mi, s, ms = str:match("^(%d%d%d%d)-(%d%d)-(%d%d)[ T_-](%d%d):(%d%d):(%d%d)[,.](%d%d%d)$")
+ return y .."-".. mo .."-".. d .."T".. h ..":".. mi ..":".. s ..".".. ms
+end
+
+
+LogEntry = {
+ raw,
+ date,
+ service,
+ stack,
+}
+
+
+function LogEntry:new(o)
+ o = o or {};
+ setmetatable(o, self);
+ self.__index = self;
+ return o;
+end
+
+
+function LogEntry:debugPrint()
+ print( "+- PUBLISH ------------------------------------------------------------" );
+ print( "| date ---> ", self.date or "nil" );
+ print( "| pod ----> ", self.pod or "nil" );
+ print( "| service > ", self.service or "nil" );
+ print( "| stage --> ", self.stage or "nil" );
+ print( "| thread -> ", self.thread or "nil" );
+ print( "| level --> ", self.level or "nil" );
+ print( "| file ---> ", self.file or "nil" );
+ print( "| msg ----> ", self.msg or "nil" );
+ print( "| " )
+ io.write( "| RAW: ", self.raw or "nil", "\n" );
+ print( "`--------------------" );
+end
+
+
+return exports
+
diff --git a/src/main/lua/pcap/KubeProbeFilter.lua b/src/main/lua/pcap/KubeProbeFilter.lua
new file mode 100644
index 0000000..a5967e9
--- /dev/null
+++ b/src/main/lua/pcap/KubeProbeFilter.lua
@@ -0,0 +1,93 @@
+--
+-- Try to extract kube-probe related requests.
+--
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+local newPcapDumper = assert(require("pcapit").newPcapDumper)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables
+
+
+function onPcapFrame( app, it )
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ local userAgent, reqUri
+ --
+ if dstPort ~= 7012 and srcPort ~= 7012 then return end
+ local trspPayload = it:trspPayload()
+ local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 =
+ trspPayload:match("^([A-Z/1.0]+) ([^ ]+) ([^ \r\n]+)\r?\n")
+ if httpReqLinePart1 and not httpReqLinePart1:find("^HTTP/1.%d$") then -- assume HTTP request
+ reqUri = httpReqLinePart2
+ userAgent = trspPayload:match("\n[Uu][Ss][Ee][Rr]%-[Aa][Gg][Ee][Nn][Tt]:%s+([^\r\n]+)\r?\n");
+ if userAgent then
+ --if not userAgent:find("^kube%-probe/") then return end -- assume halfrunt
+ --log:write("User-Agent: ".. userAgent .."\n")
+ end
+ elseif httpReqLinePart1 then -- assume HTTP response
+ --out:write(trspPayload)
+ end
+ local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ local connKey = ((srcPort < dstPort)and(srcPort.."\0"..dstPort)or(dstPort.."\0"..srcPort))
+ .."\0"..((srcIp < dstIp)and(srcIp.."\0"..dstIp)or(dstIp.."\0"..srcIp))
+ local conn = app.connections[connKey]
+ if not conn then conn = {isOfInterest=false, pkgs={}} app.connections[connKey] = conn end
+ conn.isOfInterest = (conn.isOfInterest or reqUri == "/houston/server/info")
+ if not conn.isOfInterest then
+ if #conn.pkgs > 3 then -- Throw away all stuff except TCP handshake
+ conn.pkgs = { conn.pkgs[1], conn.pkgs[2], conn.pkgs[3] }
+ end
+ local sec, usec = it:frameArrivalTime()
+ --for k,v in pairs(getmetatable(it))do print("E",k,v)end
+ local pkg = {
+ sec = assert(sec), usec = assert(usec),
+ caplen = it:frameCaplen(), len = it:frameLen(),
+ tcpFlags = (conn.isOfInterest)and(it:tcpFlags())or false,
+ srcPort = srcPort, dstPort = dstPort,
+ trspPayload = trspPayload,
+ rawFrame = it:rawFrame(),
+ }
+ table.insert(conn.pkgs, pkg)
+ else
+ -- Stop memory hogging. Write that stuff to output
+ if #conn.pkgs > 0 then
+ for _, pkg in ipairs(conn.pkgs) do
+ --out:write(string.format("-- PKG 1 %d->%d %d.%09d tcpFlg=0x%04X\n", pkg.srcPort, pkg.dstPort, pkg.sec, pkg.usec, pkg.tcpFlags or 0))
+ --out:write(pkg.trspPayload)
+ --out:write("\n")
+ app.dumper:dump(pkg.sec, pkg.usec, pkg.caplen, pkg.len, pkg.rawFrame, 1, pkg.rawFrame:len())
+ end
+ conn.pkgs = {}
+ end
+ local tcpFlags = it:tcpFlags()
+ local sec, usec = it:frameArrivalTime()
+ local rawFrame = it:rawFrame()
+ --out:write(string.format("-- PKG 2 %d->%d %d.%09d tcpFlg=0x%04X, len=%d\n", srcPort, dstPort, sec, usec, tcpFlags or 0, trspPayload:len()))
+ --out:write(trspPayload)
+ --if trspPayload:byte(trspPayload:len()) ~= 0x0A then out:write("\n") end
+ --out:write("\n")
+ app.dumper:dump(sec, usec, it:frameCaplen(), it:frameLen(), rawFrame, 1, rawFrame:len())
+ end
+end
+
+
+function main()
+ local app = {
+ parser = false,
+ dumper = false,
+ connections = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.dumper = newPcapDumper{
+ dumpFilePath = "C:/work/tmp/KubeProbeFilter.out.pcap",
+ }
+ app.parser:resume()
+end
+
+
+main()
+
+
diff --git a/src/main/lua/pcap/extractDnsHosts.lua b/src/main/lua/pcap/extractDnsHosts.lua
new file mode 100644
index 0000000..655586f
--- /dev/null
+++ b/src/main/lua/pcap/extractDnsHosts.lua
@@ -0,0 +1,147 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+local out, log = io.stdout, io.stderr
+
+local main, onPcapFrame, vapourizeUrlVariables, printResult
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ dnsResponses = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printResult(app)
+end
+
+
+function onPcapFrame( app, it )
+ local out = io.stdout
+ local sec, usec = it:frameArrivalTime()
+ sec = sec + (usec/1e6)
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ if it:trspSrcPort() == 53 then
+ extractHostnameFromDns(app, it)
+ elseif it:tcpSeqNr() then
+ extractHostnameFromHttpHeaders(app, it)
+ end
+end
+
+
+function extractHostnameFromDns( app, it )
+ local payload = it:trspPayload()
+ local bug = 8 -- TODO looks as lib has a bug and payload is offset by some bytes.
+ local dnsFlags = (payload:byte(bug+3) << 8) | (payload:byte(bug+4))
+ if (dnsFlags & 0x0004) ~= 0 then return end -- ignore error responses
+ local numQuestions = payload:byte(bug+5) << 8 | payload:byte(bug+6)
+ local numAnswers = payload:byte(bug+7) << 8 | payload:byte(bug+8)
+ if numQuestions ~= 1 then
+ log:write("[WARN ] numQuestions ".. numQuestions .."?!?\n")
+ return
+ end
+ if numAnswers == 0 then return end -- empty answers are boring
+ if numAnswers ~= 1 then log:write("[WARN ] dns.count.answers ".. numAnswers .." not supported\n") return end
+ local questionsOffset = bug+13
+ local hostname = payload:match("^([^\0]+)", questionsOffset)
+ hostname = hostname:gsub("^[\r\n]", "") -- TODO WTF?!?
+ hostname = hostname:gsub("[\x04\x02]", ".") -- TODO WTF?!?
+ local answersOffset = bug + 13 + (24 * numQuestions)
+ local ttl = payload:byte(answersOffset+6) << 24 | payload:byte(answersOffset+7) << 16
+ | payload:byte(answersOffset+8) << 8 | payload:byte(answersOffset+9)
+ local dataLen = payload:byte(answersOffset+10) | payload:byte(answersOffset+11)
+ if dataLen ~= 4 then log:write("[WARN ] dns.resp.len ".. dataLen .." not impl\n") return end
+ local ipv4Str = string.format("%d.%d.%d.%d", payload:byte(answersOffset+12), payload:byte(answersOffset+13),
+ payload:byte(answersOffset+14), payload:byte(answersOffset+15))
+ --
+ addEntry(app, ipv4Str, hostname, ttl)
+end
+
+
+function extractHostnameFromHttpHeaders( app, it )
+ local payload = it:trspPayload()
+ local _, beg = payload:find("^([A-Z]+ [^ \r\n]+ HTTP/1%.%d\r?\n)")
+ if not beg then return end
+ beg = beg + 1
+ local httpHost
+ while true do
+ local line
+ local f, t = payload:find("^([^\r\n]+)\r?\n", beg)
+ if not f then return end
+ if not payload:byte(1) == 0x72 or payload:byte(1) == 0x68 then goto nextHdr end
+ line = payload:sub(f, t)
+ httpHost = line:match("^[Hh][Oo][Ss][Tt]:%s*([^\r\n]+)\r?\n$")
+ if not httpHost then goto nextHdr end
+ break
+ ::nextHdr::
+ beg = t
+ end
+ httpHost = httpHost:gsub("^(.+):%d+$", "%1")
+ local dstIp = it:netDstIpStr()
+ if dstIp == httpHost then return end
+ addEntry(app, dstIp, httpHost, false, "via http host header")
+end
+
+
+function addEntry( app, ipv4Str, hostname, ttl, kludge )
+ local key
+ --log:write("addEntry(app, ".. ipv4Str ..", ".. hostname ..")\n")
+ if kludge == "via http host header" then
+ key = ipv4Str .."\0".. hostname .."\0".. "via http host header"
+ else
+ key = ipv4Str .."\0".. hostname .."\0".. ttl
+ end
+ local entry = app.dnsResponses[key]
+ if not entry then
+ entry = { ipv4Str = ipv4Str, hostname = hostname, ttl = ttl, }
+ app.dnsResponses[key] = entry
+ end
+end
+
+
+function printResult( app )
+ local sorted = {}
+ for _, stream in pairs(app.dnsResponses) do
+ table.insert(sorted, stream)
+ end
+ table.sort(sorted, function(a, b)
+ if a.ipv4Str < b.ipv4Str then return true end
+ if a.ipv4Str > b.ipv4Str then return false end
+ return a.hostname < b.hostname
+ end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format("# Subject Hostname to IP addresses\n"))
+ out:write(string.format("# Begin %s\n", os.date(timeFmt, math.floor(app.oldestEpochSec))))
+ out:write(string.format("# Duration %.3f seconds\n", dumpDurationSec))
+ out:write("\n")
+ --out:write(" .-- KiB per Second\n")
+ --out:write(" | .-- IP endpoints\n")
+ --out:write(" | | .-- TCP server port\n")
+ --out:write(" | | | .-- TCP Payload (less is better)\n")
+ --out:write(" | | | |\n")
+ --out:write(".--+----. .----+----------------------. .+--. .-+------------\n")
+ for i, elem in ipairs(sorted) do
+ local ipv4Str, hostname, ttl = elem.ipv4Str, elem.hostname, elem.ttl
+ if ttl then
+ out:write(string.format("%-14s %-30s # TTL=%ds", ipv4Str, hostname, ttl))
+ else
+ out:write(string.format("%-14s %-30s # ", ipv4Str, hostname))
+ end
+ out:write("\n")
+ end
+ out:write("\n")
+end
+
+
+main()
+
+
diff --git a/src/main/lua/pcap/httpStats.lua b/src/main/lua/pcap/httpStats.lua
new file mode 100644
index 0000000..ff48bd2
--- /dev/null
+++ b/src/main/lua/pcap/httpStats.lua
@@ -0,0 +1,117 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables, printHttpRequestStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ foundHttpRequests = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printHttpRequestStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local portOfInterest = 7012
+ if dstPort == portOfInterest then
+ local httpMethod, httpUri =
+ it:trspPayload():match("^([A-Z]+) ([^ ]+) [^ \r\n]+\r?\n")
+ if httpMethod then
+ --out:write(string.format("%5d->%5d %s %s\n", srcPort, dstPort, httpMethod, httpUri))
+ httpUri = vapourizeUrlVariables(app, httpUri)
+ local key = httpUri -- httpMethod .." ".. httpUri
+ local obj = app.foundHttpRequests[key]
+ if not obj then
+ obj = { count=0, httpMethod=false, httpUri=false, }
+ app.foundHttpRequests[key] = obj
+ end
+ obj.count = obj.count + 1
+ obj.httpMethod = httpMethod
+ obj.httpUri = httpUri
+ end
+ elseif srcPort == portOfInterest then
+ local httpStatus, httpPhrase =
+ it:trspPayload():match("^HTTP/%d.%d (%d%d%d) ([^\r\n]*)\r?\n")
+ if httpStatus then
+ --out:write(string.format("%5d<-%5d %s %s\n", srcPort, dstPort, httpStatus, httpPhrase))
+ end
+ end
+end
+
+
+function vapourizeUrlVariables( app, uri )
+ -- A very specific case
+ uri = uri:gsub("^(/houston/users/)%d+(/.*)$", "%1{}%2");
+ if uri:find("^/houston/users/[^/]+/user/.*$") then return uri end
+ --
+ -- Try to do some clever guesses to group URIs wich only differ in variable segments
+ uri = uri:gsub("(/|-)[%dI_-]+/", "%1{}/"):gsub("(/|-)[%dI-]+/", "%1{}/") -- two turns, to also get consecutive number segments
+ uri = uri:gsub("([/-])[%dI_-]+$", "%1{}")
+ uri = uri:gsub("/%d+(%.%w+)$", "/{}%1")
+ uri = uri:gsub("(/|-)[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+%-[%w%d]+(/?)$", "%1{}%2")
+ uri = uri:gsub("/v%d/", "/v0/") -- Merge all API versions
+ --
+ -- Generify remaining by trimming URIs from right
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/fis/information/).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/nsync/v%d/push/trillian%-phonebooks%-).*$", "%1...")
+ uri = uri:gsub("^(/from%-houston/[^/]+/eagle/timetable/wait/).*$", "%1...")
+ uri = uri:gsub("^(/houston/service%-instances/).*$", "%1...")
+ uri = uri:gsub("^(/vortex/stillInterested%?vehicleId%=).*$", "%1...")
+ uri = uri:gsub("^(/houston/[^/]+/[^/]+/).*$", "%1...")
+ return uri
+end
+
+
+function printHttpRequestStats( app )
+ local sorted = {}
+ local maxOccurValue = 0
+ local overallCount = 0
+ for _, reqObj in pairs(app.foundHttpRequests) do
+ if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end
+ overallCount = overallCount + reqObj.count
+ table.insert(sorted, reqObj)
+ end
+ table.sort(sorted, function(a, b)return a.count > b.count end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject HTTP Request Statistics\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Throughput %.1f HTTP requests per second\n", overallCount / dumpDurationSec))
+ out:write("\n")
+ out:write(" .-- HTTP Requests per Second\n")
+ out:write(" | .-- URI\n")
+ out:write(".--+--. .-+---------\n")
+ local chartWidth = 60
+ local cntPrinted = 0
+ for i, elem in ipairs(sorted) do
+ local count, httpMethod, httpUri = elem.count, elem.httpMethod, elem.httpUri
+ local cntPerSec = math.floor((count / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%7.1f %s\n", cntPerSec, httpUri))
+ cntPrinted = cntPrinted + 1
+ ::nextPort::
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/tcpDataAmountStats.lua b/src/main/lua/pcap/tcpDataAmountStats.lua
new file mode 100644
index 0000000..496687a
--- /dev/null
+++ b/src/main/lua/pcap/tcpDataAmountStats.lua
@@ -0,0 +1,97 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local main, onPcapFrame, vapourizeUrlVariables, printResult
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ nextStreamNr = 1,
+ httpStreams = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printResult(app)
+end
+
+
+function onPcapFrame( app, it )
+ local out = io.stdout
+ --
+ if not it:tcpSeqNr() then return end
+ --
+ --
+ local sec, usec = it:frameArrivalTime()
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ local lowIp = (srcIp < dstIp)and(srcIp)or(dstIp)
+ local higIp = (lowIp == dstIp)and(srcIp)or(dstIp)
+ local lowPort = math.min(srcPort, dstPort)
+ local streamId = lowIp .."\0".. higIp .."\0".. lowPort
+ local stream = app.httpStreams[streamId]
+ if not stream then
+ stream = {
+ srcIp = srcIp, dstIp = dstIp, srcPort = srcPort, dstPort = dstPort,
+ streamNr = app.nextStreamNr, numBytes = 0,
+ }
+ app.nextStreamNr = app.nextStreamNr + 1
+ app.httpStreams[streamId] = stream
+ end
+ local trspPayload = it:trspPayload()
+ stream.numBytes = stream.numBytes + trspPayload:len()
+end
+
+
+function printResult( app )
+ local out = io.stdout
+ local sorted = {}
+ local overalValue, maxValue = 0, 0
+ for _, stream in pairs(app.httpStreams) do
+ if stream.numBytes > maxValue then maxValue = stream.numBytes end
+ overalValue = overalValue + stream.numBytes
+ table.insert(sorted, stream)
+ end
+ table.sort(sorted, function(a, b)return a.numBytes > b.numBytes end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local overallBytesPerSec = overalValue / dumpDurationSec
+ local maxValuePerSec = maxValue / dumpDurationSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject TCP data throughput\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format(" Overall %.3f KiB per second (%.3f KiBit per second)\n",
+ overallBytesPerSec/1024, overallBytesPerSec/1024*8))
+ out:write("\n")
+ out:write(" .-- KiB per Second\n")
+ out:write(" | .-- IP endpoints\n")
+ out:write(" | | .-- TCP server port\n")
+ out:write(" | | | .-- TCP Payload (less is better)\n")
+ out:write(" | | | |\n")
+ out:write(".--+----. .----+----------------------. .+--. .-+------------\n")
+ local bar = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ for i, elem in ipairs(sorted) do
+ local streamNr, srcIp, dstIp, srcPort, dstPort, numBytes =
+ elem.streamNr, elem.srcIp, elem.dstIp, elem.srcPort, elem.dstPort, elem.numBytes
+ local lowPort = math.min(srcPort, dstPort)
+ local bytesPerSecond = math.floor((numBytes / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%9.3f %-14s %-14s %5d ", bytesPerSecond/1024, srcIp, dstIp, lowPort))
+ local part = bytesPerSecond / maxValuePerSec;
+ out:write(bar:sub(0, math.floor(part * bar:len())))
+ out:write("\n")
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/tcpPortStats.lua b/src/main/lua/pcap/tcpPortStats.lua
new file mode 100644
index 0000000..9038db7
--- /dev/null
+++ b/src/main/lua/pcap/tcpPortStats.lua
@@ -0,0 +1,82 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, printStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ foundPortNumbers = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --local srcIp, dstIp = it:netSrcIpStr(), it:netDstIpStr()
+ --local isTcp = (it:tcpSeqNr() ~= nil)
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ if not app.foundPortNumbers[srcPort] then app.foundPortNumbers[srcPort] = 1
+ else app.foundPortNumbers[srcPort] = app.foundPortNumbers[srcPort] + 1 end
+ if not app.foundPortNumbers[dstPort+100000] then app.foundPortNumbers[dstPort+100000] = 1
+ else app.foundPortNumbers[dstPort+100000] = app.foundPortNumbers[dstPort+100000] + 1 end
+end
+
+
+function printStats( app )
+ local sorted = {}
+ local totalPackets, maxOccurValue = 0, 0
+ for port, pkgcnt in pairs(app.foundPortNumbers) do
+ if pkgcnt > maxOccurValue then maxOccurValue = pkgcnt end
+ table.insert(sorted, { port=port, pkgcnt=pkgcnt })
+ totalPackets = totalPackets + pkgcnt
+ end
+ table.sort(sorted, function(a, b)return a.pkgcnt > b.pkgcnt end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject TCP/UDP stats\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Throughput %.1f packets per second\n", totalPackets / dumpDurationSec))
+ out:write("\n")
+ out:write(" .- TCP/UDP Port\n")
+ out:write(" | .-Direction (Send, Receive)\n")
+ out:write(" | | .- Packets per second\n")
+ out:write(".-+-. | .---+-.\n")
+ local chartWidth = 60
+ for i, elem in ipairs(sorted) do
+ local port, pkgcnt = elem.port, elem.pkgcnt
+ local dir = (port > 100000)and("R")or("S")
+ if port > 100000 then port = port - 100000 end
+ if port > 30000 then goto nextPort end
+ local pkgsPerSec = math.floor((pkgcnt / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%5d %s %7.1f |", port, dir, pkgsPerSec))
+ local barLen = pkgcnt / maxOccurValue
+ --local barLen = (math.log(pkgcnt) / math.log(maxOccurValue))
+ for i=1, chartWidth-1 do
+ out:write((i < (barLen*chartWidth))and("=")or(" "))
+ end
+ out:write("|\n")
+ ::nextPort::
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/pcap/xServiceStats.lua b/src/main/lua/pcap/xServiceStats.lua
new file mode 100644
index 0000000..3bc94a4
--- /dev/null
+++ b/src/main/lua/pcap/xServiceStats.lua
@@ -0,0 +1,90 @@
+
+local newPcapParser = assert(require("pcapit").newPcapParser)
+
+local out, log = io.stdout, io.stderr
+local main, onPcapFrame, vapourizeUrlVariables, printStats
+
+
+function main()
+ local app = {
+ parser = false,
+ youngestEpochSec = -math.huge,
+ oldestEpochSec = math.huge,
+ services = {},
+ }
+ app.parser = newPcapParser{
+ dumpFilePath = "-",
+ onFrame = function(f)onPcapFrame(app, f)end,
+ }
+ app.parser:resume()
+ printStats(app)
+end
+
+
+function onPcapFrame( app, it )
+ local sec, usec = it:frameArrivalTime()
+ local srcPort, dstPort = it:trspSrcPort(), it:trspDstPort()
+ --
+ if sec < app.oldestEpochSec then app.oldestEpochSec = sec end
+ if sec > app.youngestEpochSec then app.youngestEpochSec = sec end
+ --
+ local portsOfInterest = {
+ [ 80] = true,
+ [8080] = true,
+ [7012] = true,
+ }
+ --if not portsOfInterest[dstPort] and not portsOfInterest[srcPort] then return end
+ local trspPayload = it:trspPayload()
+ local httpReqLinePart1, httpReqLinePart2, httpReqLinePart3 =
+ trspPayload:match("^([A-Z/1.0]+) ([^ ]+) [^ \r\n]+\r?\n")
+ if not httpReqLinePart1 then return end
+ if httpReqLinePart1:find("^HTTP/1.%d$") then return end
+ --log:write(string.format("%5d->%5d %s %s %s\n", srcPort, dstPort, httpReqLinePart1, httpReqLinePart2, httpReqLinePart3))
+ xService = trspPayload:match("\n[Xx]%-[Ss][Ee][Rr][Vv][Ii][Cc][Ee]:%s+([^\r\n]+)\r?\n");
+ if not xService then return end
+ --log:write("X-Service is '".. xService .."'\n")
+ local obj = app.services[xService]
+ if not obj then
+ app.services[xService] = {
+ xService = xService,
+ count=0,
+ }
+ else
+ assert(xService == obj.xService)
+ obj.count = obj.count + 1
+ end
+end
+
+
+function printStats( app )
+ local sorted = {}
+ local maxOccurValue = 0
+ local overallCount = 0
+ for _, reqObj in pairs(app.services) do
+ if reqObj.count > maxOccurValue then maxOccurValue = reqObj.count end
+ overallCount = overallCount + reqObj.count
+ table.insert(sorted, reqObj)
+ end
+ table.sort(sorted, function(a, b)return a.count > b.count end)
+ local dumpDurationSec = app.youngestEpochSec - app.oldestEpochSec
+ local timeFmt = "!%Y-%m-%d_%H:%M:%SZ"
+ out:write("\n")
+ out:write(string.format(" Subject Pressure by Services\n"))
+ out:write(string.format(" Begin %s\n", os.date(timeFmt,app.oldestEpochSec)))
+ out:write(string.format(" Duration %d seconds\n", dumpDurationSec))
+ out:write(string.format("Matching Requests %.1f (HTTP requests per second)\n", overallCount / dumpDurationSec))
+ out:write("\n")
+ out:write(" .-- HTTP Requests per Second\n")
+ out:write(" | .-- Service\n")
+ out:write(".-+---. .-+-----\n")
+ for i, elem in ipairs(sorted) do
+ local xService, count = elem.xService, elem.count
+ local countPerSecond = math.floor((count / dumpDurationSec)*10+.5)/10
+ out:write(string.format("%7.1f %s\n", countPerSecond, xService))
+ end
+ out:write("\n")
+end
+
+
+main()
+
diff --git a/src/main/lua/wireshark/HttpTime.lua b/src/main/lua/wireshark/HttpTime.lua
index b06c0a7..514c62b 100644
--- a/src/main/lua/wireshark/HttpTime.lua
+++ b/src/main/lua/wireshark/HttpTime.lua
@@ -10,7 +10,7 @@ local mod = {}
function mod.init()
local that = mod.seal{
- proto = Proto("__", "Additional Metadata"),
+ proto = Proto("AdditMeta", "Additional Metadata"),
f_andy_httpTime = ProtoField.float("_.httpTime", "HttpTime"),
f_andy_synSeen = ProtoField.bool("_.synSeen", "SynSeen"),
f_andy_uri = ProtoField.string("_.uri", "Request URI"),
diff --git a/src/main/nodejs/misc/ProduceLotsOfQueues.js b/src/main/nodejs/misc/ProduceLotsOfQueues.js
new file mode 100644
index 0000000..810ac63
--- /dev/null
+++ b/src/main/nodejs/misc/ProduceLotsOfQueues.js
@@ -0,0 +1,119 @@
+;(function(){
+
+ const http = require("http");
+ const log = process.stderr;
+ const out = process.stdout;
+ const NOOP = function(){};
+
+ setTimeout(main); return;
+
+
+ function main(){
+ const app = Object.seal({
+ isHelp: false,
+ host: "localhost",
+ port: 7013,
+ uri: "/houston/tmp/gugus/bar",
+ queueName: "my-gaga-queue",
+ });
+ if( parseArgs(app, process.argv) !== 0 ) process.exit(1);
+ if( app.isHelp ){ printHelp(); return; }
+ run(app);
+ }
+
+
+
+ function printHelp(){
+ out.write("\n"
+ +" Produce a bunch of gateleen queues\n"
+ +" \n"
+ +" Options:\n"
+ +" \n"
+ +" \n")
+ }
+
+
+ function parseArgs( app, argv ){
+ var isYolo = false;
+ for( var iA = 2 ; iA < argv.length ; ++iA ){
+ var arg = argv[iA];
+ if( arg == "--help" ){
+ app.isHelp = true; return 0;
+ }else if( arg == "--yolo" ){
+ isYolo = true;
+ }else{
+ log.write("EINVAL: "+ arg +"\n");
+ return -1;
+ }
+ }
+ if( !isYolo ){ log.write("EINVAL: wanna yolo?\n"); return; }
+ return 0;
+ }
+
+
+ function run( app ){
+ //placeHook(app);
+ putSomeNonsense(app);
+ }
+
+
+ function placeHook( app ){
+ const req = Object.seal({
+ base: null,
+ app: app,
+ });
+ req.base = http.request({
+ host: app.host, port: app.port,
+ method: "PUT", path: app.uri +"/_hooks/listeners/http",
+ //headers: {
+ // "X-Expire-After": "42",
+ //},
+ });
+ req.base.on("response", onResponse.bind(0, req));
+ req.base.end(JSON.stringify({
+ destination: "http://127.0.0.1:7099/guguseli",
+ queueExpireAfter/*seconds*/: 42,
+ }));
+ function onResponse( req, rsp ){
+ var app = req.app;
+ log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n");
+ for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n");
+ }
+ }
+
+
+ function putSomeNonsense( app ){
+ const nonsense = Object.seal({
+ app: app,
+ req: null,
+ i: 0,
+ limit: 42,
+ });
+ putNextRequest(nonsense);
+ function putNextRequest( nonsense ){
+ nonsense.req = http.request({
+ host: app.host, port: app.port,
+ method: "PUT", path: app.uri +"/foo/"+ nonsense.i,
+ headers: {
+ "X-Queue": app.queueName +"-"+ nonsense.i,
+ "X-Queue-Expire-After": 9999999,
+ },
+ });
+ nonsense.req.on("response", onResponse.bind(0, nonsense));
+ nonsense.req.end("{\"guguseli\":\""+ new Date().toISOString() +"\"}\n");
+ }
+ function onResponse( nonsense, rsp ){
+ var app = nonsense.app;
+ if( rsp.statusCode != 200 ){
+ log.write("[DEBUG] < HTTP/"+ rsp.httpVersion +" "+ rsp.statusCode +" "+ rsp.statusMessage +"\n");
+ for( var k of Object.keys(rsp.headers) ) log.write("[DEBUG] < "+ k +": "+ rsp.headers[k] +"\n");
+ }
+ rsp.on("data", NOOP);
+ if( nonsense.i++ < nonsense.limit ){
+ putNextRequest(nonsense);
+ }
+ }
+ }
+
+
+}());
diff --git a/src/main/nodejs/paisa-nonslim/README.txt b/src/main/nodejs/paisa-nonslim/README.txt
new file mode 100644
index 0000000..e3a94f7
--- /dev/null
+++ b/src/main/nodejs/paisa-nonslim/README.txt
@@ -0,0 +1,3 @@
+
+Created 20240419 as it seems we need some automation for those tasks.
+
diff --git a/src/main/nodejs/paisa-nonslim/foo.js b/src/main/nodejs/paisa-nonslim/foo.js
new file mode 100644
index 0000000..43cf8aa
--- /dev/null
+++ b/src/main/nodejs/paisa-nonslim/foo.js
@@ -0,0 +1,902 @@
+/*
+
+Related:
+- [Remove Slim Packaging](SDCISA-15648)
+
+*/
+;(function(){ "use-strict";
+
+ const child_process = require("child_process");
+ const fs = require("fs");
+ const promisify = require("util").promisify;
+ const zlib = require("zlib");
+ const noop = function(){};
+ const log = process.stderr;
+ const out = process.stdout;
+ const logAsString = function( buf ){ log.write(buf.toString()); };
+
+ setImmediate(main);
+
+
+ function printHelp( argv, app ){
+ process.stdout.write(" \n"
+ +" Autmoate some steps that are tedious manually.\n"
+ +" \n"
+ +" Options:\n"
+ +" \n"
+ +" --fetch\n"
+ +" Update local repos from remote.\n"
+ +" \n"
+ +" --reset-hard\n"
+ +" Reset worktree to develop.\n"
+ +" \n"
+ +" --patch-platform\n"
+ +" Remove slim packaging from patform and set snapshot version.\n"
+ +" \n"
+ +" --patch-services\n"
+ +" Disable slim packaging in Jenkinsfile and use platform snapshot in\n"
+ +" pom.\n"
+ +" \n"
+ +" --commit\n"
+ +" Create a git commit with our changes.\n"
+ +" \n"
+ +" --push | --push-force\n"
+ +" Create commits for patched services and push them to upstream. If\n"
+ +" not given, the change is only made locally (aka without cluttering\n"
+ +" remote git repo). The force variant will replace existing branches\n"
+ +" on the remnote. If given multiple times, less-invasive wins.\n"
+ +" \n"
+ +" --print-isa-version\n"
+ +" Prints an isaVersion JSON that can be fed to preflux.\n"
+ +" \n"
+ // not impl yet
+ //+" --max-parallel <int>\n"
+ //+" How many tasks to run concurrently. Defaults to 1. Which means to\n"
+ //+" do all the work sequentially (HINT: very handy for debugging).\n"
+ //+" \n"
+ );
+ }
+
+
+ function parseArgs( argv, app ){
+ if( argv.length <= 2 ){
+ log.write("EINVAL: Refuse to produce damage with zero args.\n");
+ return -1;
+ }
+ for( var iA = 2 ; iA < argv.length ; ++iA ){
+ var arg = argv[iA];
+ if( arg == "--help" ){
+ app.isHelp = true; return 0;
+ }else if( arg == "--fetch" ){
+ app.isFetch = true;
+ }else if( arg == "--reset-hard" ){
+ app.isResetHard = true;
+ }else if( arg == "--patch-platform" ){
+ app.isPatchPlatform = true;
+ }else if( arg == "--patch-services" ){
+ app.isPatchServices = true;
+ }else if( arg == "--commit" ){
+ app.isCommit = true;
+ }else if( arg == "--push" ){
+ if( app.isPushForce ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; }
+ app.isPush = true;
+ }else if( arg == "--push-force" ){
+ if( app.isPush ){ log.write("EINVAL: only one of push and push-force allowed\n"); return-1; }
+ app.isPushForce = true;
+ }else if( arg == "--print-isa-version" ){
+ app.isPrintIsaVersion = true;
+ //}else if( arg == "--max-parallel" ){
+ // arg = argv[++iA];
+ // if( !/^[0-9]+$/.test(arg) ){ log.write("EINVAL: --max-parallel "+ arg +"\n"); return -1; }
+ // app.maxParallel = 0 + arg;
+ }else{
+ log.write("EINVAL: "+ arg +"\n");
+ return -1;
+ }
+ }
+ return 0;
+ }
+
+
+ function isThingyNameValid( app, thingyName ){
+ if( typeof thingyName !== "string" ) return false;
+ if( !/^[a-z-]+$/.test(thingyName) ) return false;
+ return true;
+ }
+
+
+ function workdirOfSync( app, thingyName ){
+ if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName);
+ return app.workdir +"/"+ thingyName;
+ }
+
+
+ function gitUrlOfSync( app, thingyName ){
+ if( !isThingyNameValid(app, thingyName) ) throw TypeError(thingyName);
+ return "https://gitit.post.ch/scm/isa/"+ thingyName +".git";
+ }
+
+
+ function isCloned( app, thingyName, onDone){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ var child = child_process.spawn(
+ "git", ["status", "--porcelain"],
+ { cwd: workdirOfSync(app, thingyName), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", noop);
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ onDone(null, true);
+ }
+ });
+ }
+
+
+ function isWorktreeClean( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ var isStdoutDirty = false;
+ var child = child_process.spawn(
+ "git", ["status", "--porcelain"],
+ { cwd: workdirOfSync(app, thingyName), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", function(){ isStdoutDirty = true; });
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( signal !== null ){
+ throw Error("code "+ code +", signal "+ signal +"");
+ }else{
+ onDone(null, !isStdoutDirty);
+ }
+ });
+ }
+
+
+ function getDropSlimArtifactsTagInPlatformPatch( app, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ /* patch which empties the <slimArtifacts> tag in
+ * "poms/service/paisa-service-superpom/pom.xml" as described in
+ * SDCISA-15648 */
+ var patch = ""
+ +"tVrdb9s2EH/PX8EZ2OosIe2kadOw7Zaia4sM3Vo0fdhDgYGSKJkOJQok7dgr+r/vqA9/NHZisrIf"
+ +"IpIif3e8O94HlUSkKcI4ExaxQalyMzBcT0XMByUThuGmh82k5BreuzlklksU+cw+EEXCZyhKT9L4"
+ +"ybMnQ0LO2fmz6On56TN0Mhw+PTs7wBj7cXBwdHTkycXlJcIXw5Pjk5PH6Mg1LtDl5QF66PdikCkm"
+ +"zW87zIxVkYpsopkVqthhQbWo1AoYtYIDCbzTErfKSJG/glUpi63PQvdjqmRSClbEnK60X/yE8ecP"
+ +"f3ygqOBTrlEiNI+tnKOJ4QkSBSols6nSOcaeBOMRKZWxRFhS6YgwKRxtWemp0VctPHhovgf0ROVM"
+ +"FPtCBx3G3Jh9wUuVZaLYm2xueRQArXRGmCnBRMa0ed5yBoZT2ZFmhRFWTHlrP9EcmVK7XdSTjbcV"
+ +"bSCo7R6NFtxGyozlGpwHjSUzJmeWb93dSERcFzADT0G8CbNKB9EsFJdCGaK5sZJbumGM8JklTnvQ"
+ +"3srPOBeFmO2Lhc7JLg0VqOWqIM4iad2urBPn3DIQK/PeDyAYHHFWTKyQht4Z2YMIKwKxSnhM13qh"
+ +"QFKCVUNQMXTDWBhoKiSflFKxhN4dCoMUii6bYRCSFRld7QTC1B6TftfvFAyzUnQOSC8uyBCiFTe4"
+ +"UBbzmTDWj0gB/sGkhI9iFo84bZ4BDr4F+i9XuhzR5tn1YXFefeE6ycJ10g3u9EeAx2zKZgQSLQMC"
+ +"5S7zWRIYlwyfkhN/fVYRqXTiJSNrS1BnqQpeWEOrrhTQ7hQxLEhvxctF7ptPrAZFEGp8Y1pX3XQw"
+ +"K8BwWZhjegg7IAF6GNTFlAhKlC6AHZadl+vgbgCPjX58MtyeG414IlV8E3R+MpecpMKMCKRytDZ1"
+ +"Lv1w2lWL5f7HoV651D/9fiDIYxIzKQhUOWLagt4wbdnKUAiXTmlO6QA3i8L5WocJM8+7OCIvPbXn"
+ +"rGAcKWNIG1uqXhtZ9pq0OtpGpmdjOo4lVpCF46q7v9S82m0Vbmj98LQAlTBswffRRSsoMo7dAZcC"
+ +"TlzT2EcunimVSU5c9khSsJBokhnqnMnwiR+YUCQTdjSJqliIQenAIyAZz6R0Rd0Tia3arOxOdNxE"
+ +"rsaI3ROoVn9D69XtgEHBvy2QVtpVbQbKV7Gw830kSxtobiWzoXALoKmAltpjwdfcTEDy25R74Qn2"
+ +"PaCuXgbX1A2mmUPkz7GBuDYx3fIJZa4WcUeYoCSNNc8g6dWhIXgT7pSPRCzBsIobXKrb7uQ6Ypon"
+ +"4JymIuuS3xVAOJldIrtrRSgo3AUqVpEzskD0pV+t/gYCVFdsqWY5v1X6hrZXbqp86FYOMsd8GocF"
+ +"/y1E63u+zvDcfc32u5rlNrrcQ6xAubOAIu5+PPdxolS6S9yQcLgFjM9KCBbGO7O+B9Jy32uMe8BA"
+ +"x11igdmHVBBNwuxKkmXzh+4Q2qyBPpg+8DziSQKOcpzPrJsQmELXi0kLR+/gBu+ludpaXHGVSsnT"
+ +"Pe7HpeROL7Ge5JEhbTlNF3V1re+OQcMu1aKJlBVuDnpO5nTZDDzHY62geKT1w7/EsCM1yUbWHQtD"
+ +"ZpApcJbT5unxSXLw3TfJo51Xri+8A7Qjee8PqS+MmuiYe1gaFObgtSsHQAooqNyFOfn1AO+O8JHZ"
+ +"EXL7ey+iqv2yGjIk47YPOxhDyAQDETIhdb2s9Bwdod4AystB7/D57pRWiBCr3gKn/UOS3wCq6Tuc"
+ +"3ZESnqI1lQDTS2GTtVfgZ6Ww/d6XL+bIi113PK0s+r13Wk1KCoC8pmUViji6fn/1Vw4eCvVAGmsU"
+ +"D31o1PJlC245OCv0FbEU4V8y68FuK5cbPgdpsJRkju2rxOmKOh5hqCVz5Xm1WIki7feaPf+Ofsbn"
+ +"Q9M7dsQ8dut+IkX9df24DARSVtN3YIfoqx/egj2nKYQroaE5N0CFW2RiMAqnsEcg6akAh/4IsSIB"
+ +"J1PO0Z+vPrlXlmkw9cqcD3ueu3E/kGtN5iXqtVR6/jDV2XN8vdUqrzVYHWar3Ju+j+XegfysAHD1"
+ +"+EE2peSU91siBbj7EAqDQSv7lnPg99q6wNY/dJbXasQZYM3JyoQAYTuv4UymnC8oHjfAx+jagm6Z"
+ +"Tl5D/0Ppsh/y6c3H969ev/n3zT9X15+v/n7nu8lviEvDOzHKQvla17fdp+84FSJZE2F2ioVtPIKd"
+ +"APuqBCs6O32+BNmBns9/IL0Y8BmPJw/MXJkFcfV/3mNHXg=="
+ ;
+ patch = Buffer.from(patch, 'base64');
+ patch = zlib.inflateRaw(patch, function( ex, patch ){
+ if( ex ){ throw ex; }
+ setImmediate(onDone, null, patch);
+ });
+ }
+
+
+ function getJettyServiceNamesAsArray( app, onDone ){
+ setImmediate(onDone, null, [ /*TODO get via args/file */
+ TODO_GX0CAJ9hAgCNRAIA9hgCAP5jAgDGCgIA
+ ]);
+ }
+
+
+ function getVersionByServiceName(app, svcName, onDone){
+ /* if we did patch services, we already know the version without
+ * lookup. This is a performance optimization, because maven performs
+ * absolutely terrible. Performance DOES matter! */
+ //if( app.isPatchServices ){
+ setImmediate(onDone, null, app.jenkinsSnapVersion);
+ //}else{
+ // wasteOurTimeBecausePerformanceDoesNotMatter();
+ //}
+ //function wasteOurTimeBecausePerformanceDoesNotMatter( ex ){
+ // if( ex ) throw ex;
+ // var stdoutBufs = [];
+ // /* SHOULD start maven with low prio to not kill windoof. But I
+ // * guess spawning a process with other prio is YAGNI, and so we're
+ // * now fucked. Therefore I wish you happy time-wasting, as the only
+ // * option left is to NOT start too many maven childs
+ // * simultaneously. */
+ // var child = child_process.spawn(
+ // "mvn", ["help:evaluate", "-o", "-q", "-DforceStdout", "-Dexpression=project.version"],
+ // { cwd:workdirOfSync(app, svcName) }
+ // );
+ // child.on("error", console.error.bind(console));
+ // child.stderr.on("data", logAsString);
+ // child.stdout.on("data", stdoutBufs.push.bind(stdoutBufs));
+ // child.on("close", function( code, signal ){
+ // if( code !== 0 || signal !== null ){
+ // endFn(Error("code="+ code +", signal="+ signal +""));
+ // return;
+ // }
+ // if( stdoutBufs.length <= 0 ) throw Error("maven has failed");
+ // var version = stdoutBufs.join().trim();
+ // onDone(null, version);
+ // });
+ //}
+ }
+
+
+ function printIsaVersion( app, onDone ){
+ var iSvcQuery = 0, iSvcPrinted = 0;
+ printIntro();
+ function printIntro( ex ){
+ if( ex ) throw ex;
+ var epochMs = Date.now();
+ out.write('{\n');
+ out.write(' "timestamp": "'+ new Date().toISOString() +'",\n');
+ out.write(' "isaVersionId": "SDCISA-15648-'+ epochMs +'",\n');
+ out.write(' "isaVersionName": "SDCISA-15648-'+ epochMs +'",\n');
+ out.write(' "trial": true,\n');
+ out.write(' "services": [\n');
+ out.write(' { "name": "eagle", "version": "02.23.01.00" },\n');
+ out.write(' { "name": "storage", "version": "00.25.00.02" },\n');
+ out.write(' { "name": "platform", "version": "'+ app.platformJenkinsVersion +'" }');
+ /* maven performance is an absolute terrible monster.
+ * Problem 1: Doing this sequentially takes forever.
+ * Problem 2: Doing this parallel for all makes windoof freeze.
+ * Workaround: Do at most a few of them in parallel. */
+ for( var i = 3 ; i ; --i ) nextService();
+ }
+ function nextService( ex ){
+ if( ex ) throw ex;
+ if( iSvcQuery >= app.services.length ){ /*printTail();*/ return; }
+ var svcName = app.services[iSvcQuery++];
+ getVersionByServiceName(app, svcName, function(e,r){ printService(e,r,svcName); });
+ }
+ function printService( ex, svcVersion, svcName ){
+ if( ex ) throw ex;
+ if( typeof svcVersion != "string") throw Error(svcVersion);
+ iSvcPrinted += 1;
+ out.write(",\n ");
+ out.write('{ "name": "'+ svcName +'", "version": "'+ svcVersion +'" }');
+ if( iSvcPrinted >= app.services.length ){ printTail(); }else{ nextService(); }
+ }
+ function printTail( ex ){
+ if( ex ) throw ex;
+ out.write('\n');
+ out.write(' ],\n');
+ out.write(' "featureSwitches": [],\n');
+ out.write(' "mergedBundles": []\n');
+ out.write('}\n');
+ onDone(/*ex*/null, /*ret*/null);
+ }
+ }
+
+
+ function pushService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ){ throw TypeError("onDone"); }
+ var iRemoteNameToTry = 0;
+ push();
+ function push( ex, isClean ){
+ if( ex ) throw ex;
+ var remoteName = app.remoteNamesToTry[iRemoteNameToTry++];
+ if( remoteName === undefined ){ endFn(Error("No more remote names. s="+ thingyName +"")); return; }
+ log.write("[DEBUG] "+ thingyName +" - git push "+ remoteName +" "
+ + app.branchName +(app.isPushForce?" --force":"")+"\n");
+ argv = ["push", remoteName, "refs/heads/"+app.branchName +":refs/heads/"+ app.branchName];
+ if( app.isPushForce ) argv.push("--force");
+ var child = child_process.spawn(
+ "git", argv,
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code === 128 ){ /* retry with next upstream name */
+ push(); return;
+ }else if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ endFn();
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function commitService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ){ throw Error("onDone"); }
+ incrNumTasks(app);
+ isWorktreeClean(app, thingyName, gitAdd);
+ function gitAdd( ex, isClean ){
+ if( ex ) throw ex;
+ if( isClean ){
+ log.write("[INFO ] Nothing to commit in \""+ thingyName +"\"\n");
+ endFn(null, null); return;
+ }
+ log.write("[DEBUG] "+ thingyName +"$ git add Jenkinsfile\n");
+ var child = child_process.spawn(
+ "git", ["add", "--", "."],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ gitCommit();
+ });
+ }
+ function gitCommit( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git commit -m \""+ app.commitMsg +"\"\n");
+ var child = child_process.spawn(
+ "git", ["commit", "-m", app.commitMsg],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ var stdoutBufs = [];
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.stdout.on("data", function( buf ){ stdoutBufs.push(buf); });
+ child.on("exit", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ var stdoutStr = "";
+ for( var buf in stdoutBufs ){ log.write(buf.toString()); }
+ endFn(Error("code="+ code +", signal="+ signal));
+ return;
+ }
+ createBranch(); return;
+ });
+ }
+ function createBranch( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git branch "+ app.branchName +"\n");
+ var child = child_process.spawn(
+ "git", ["branch", "-f", app.branchName],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("exit", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code="+ code +", signal="+ signal +""));
+ return;
+ }
+ endFn(); return;
+ });
+ }
+ function endFn( ex, ret ){
+ decrNumTasks(app);
+ onDone(ex, ret);
+ }
+ }
+
+
+ function commitAllServices( app, onDone ){
+ var iSvc = 0;
+ var services;
+ incrNumTasks(app);
+ getJettyServiceNamesAsArray(app, onGetJettyServiceNamesAsArrayDone);
+ function onGetJettyServiceNamesAsArrayDone( ex, ret ){
+ if( ex ) throw ex;
+ services = ret;
+ nextService(null);
+ }
+ function nextService( ex ){
+ if( ex ) throw ex;
+ if( iSvc >= services.length ){ endFn(null); return; }
+ var thingyName = services[iSvc++];
+ if( !thingyName ) throw Error("assert(thingyName != NULL)");
+ commitService(app, thingyName, nextService);
+ }
+ function endFn( ex ){
+ decrNumTasks(app);
+ if( ex ) throw ex;
+ log.write("[DEBUG] No more services to commit\n");
+ onDone(null, null);
+ }
+ }
+
+
+ function giveServiceOurSpecialVersion( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ doit();
+ function doit( ex ){
+ if( ex ) throw ex;
+ var child = child_process.spawn(
+ "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DallowSnapshots=true",
+ "-DnewVersion="+ app.serviceSnapVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ onDone();
+ });
+ }
+ }
+
+
+ function setPlatformVersionInService( app, thingyName, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ updateParent();
+ function updateParent(){
+ log.write("[DEBUG] "+ thingyName +" - Set platform version "+ app.parentVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:update-parent", "-DgenerateBackupPoms=false", "-DallowDowngrade=true",
+ "-DallowSnapshots=true", "-DforceUpdate=true", "-DskipResolution=true",
+ "-DparentVersion="+app.parentVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ updateProperty();
+ });
+ }
+ function updateProperty( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +" - Set parent.version "+ app.parentVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:set-property", "-DgenerateBackupPoms=false", "-DallowSnapshots=true",
+ "-Dproperty=platform.version", "-DnewVersion="+ app.parentVersion],
+ { cwd: workdirOfSync(app, thingyName) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ onDone();
+ });
+ }
+ }
+
+
+ function dropSlimFromAllJenkinsfiles( app, onDone ){
+ var iSvc = -1;
+ var jettyServices;
+ var jettyService;
+ incrNumTasks(app);
+ getJettyServiceNamesAsArray(app, function( ex, jettyServices_ ){
+ if( ex ) throw ex;
+ jettyServices = jettyServices_;
+ nextJettyService();
+ });
+ function nextJettyService( ex ){
+ decrNumTasks(app);
+ if( ex ) throw ex;
+ if( ++iSvc >= jettyServices.length ){ onNoMoreJettyServices(); return; }
+ incrNumTasks(app);
+ jettyService = jettyServices[iSvc];
+ isWorktreeClean(app, jettyService, onIsWorktreeCleanRsp);
+ }
+ function onIsWorktreeCleanRsp( ex, isClean ){
+ if( ex ) throw ex;
+ if( !isClean ){
+ log.write("[WARN ] Wont patch: Worktree not clean: "+ jettyService +"\n");
+ nextJettyService();
+ return;
+ }
+ log.write("[DEBUG] Patching \""+ jettyService +"/Jenkinsfile\"\n");
+ var child = child_process.spawn(
+ "sed", [ "-i", "-E", "s_^(.*?buildMaven.*?),? *slim: *true,? *(.*?)$_\\1\\2_", "Jenkinsfile" ],
+ { cwd: workdirOfSync(app, jettyService) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", removeEmptyArray);
+ }
+ /* Pipeline is too dump for an empty array */
+ function removeEmptyArray( ex ){
+ if( ex ) throw ex;
+ var child = child_process.spawn(
+ "sed", [ "-i", "-E", "s_^(.*?).buildMaven\\(\\[\\]\\))(.*?)$_\\1\\2_", "Jenkinsfile" ],
+ { cwd: workdirOfSync(app, jettyService) },
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", nextJettyService);
+ }
+ function onNoMoreJettyServices(){
+ onDone(null, null);
+ }
+ }
+
+
+ function checkoutUpstreamDevelop( app, thingyName, onDone){
+ var iRemoteName = 0;
+ checkout();
+ function checkout(){
+ var remoteName = app.remoteNamesToTry[iRemoteName];
+ if( remoteName === undefined ){ onDone(Error("No more remote names for "+ thingyName)); return; }
+ log.write("[DEBUG] git checkout "+ thingyName +" "+ remoteName +"/develop\n");
+ var child = child_process.spawn(
+ "git", ["checkout", remoteName+"/develop"],
+ { cwd: workdirOfSync(app, thingyName), });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( !"TODO_GlACAIQoAgDMTwIAIh8CAOJvAgALLgIA" ){
+ checkout(); /* try next remote name */
+ }else if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ onDone(null, null);
+ }
+ });
+ }
+ }
+
+
+ function fetchChangesFromGitit( app, thingyName, onDone ){
+ var child;
+ var iRemoteName = 0;
+ mkAppWorkdir();
+ function mkAppWorkdir( ex ){
+ if( ex ) throw ex;
+ fs.mkdir(app.workdir, {recursive:true}, checkRepoExists);
+ }
+ function checkRepoExists( ex ){
+ if( ex ) throw ex;
+ fs.exists(workdirOfSync(app, thingyName) +"/.git", function( isLocalCloneExists ){
+ isLocalCloneExists ? fetch() : clone();
+ });
+ }
+ function clone( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] git clone "+ thingyName +"\n");
+ var child = child_process.spawn(
+ "git", ["clone", "--no-single-branch", "--depth", "4", gitUrlOfSync(app, thingyName)],
+ { cwd: app.workdir });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal)); return;
+ }
+ onDone(null, null);
+ });
+ }
+ function fetch( ex ){
+ if( ex ) throw ex;
+ var remoteName = app.remoteNamesToTry[iRemoteName++];
+ if( remoteName === undefined ){
+ onDone(Error("No more remotes to try for "+ thingyName)); return; }
+ log.write("[DEBUG] "+ thingyName +" - git fetch "+ remoteName +"\n");
+ var child = child_process.spawn(
+ "git", ["fetch", remoteName],
+ { cwd: workdirOfSync(app, thingyName), });
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", function( buf ){ log.write(buf); });
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal)); return;
+ }
+ onDone(null, null);
+ });
+ }
+ }
+
+
+ function setVersionInPlatform( app, onDone ){
+ if( typeof onDone != "function" ) throw TypeError("onDone");
+ setVersion();
+ function setVersion(){
+ log.write("[DEBUG] platform - mvn versions:set "+ app.platformSnapVersion +"\n");
+ var child = child_process.spawn(
+ "mvn", ["versions:set", "-DgenerateBackupPoms=false", "-DnewVersion="+app.platformSnapVersion],
+ { cwd: workdirOfSync(app, "platform"), }
+ );
+ child.on("error", console.error.bind(console));
+ child.stdout.on("data", noop);
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ endFn(Error("code "+ code +", signal "+ signal));
+ return;
+ }
+ endFn();
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function patchAwaySlimPackagingInPlatform( app, onDone ){
+ var onDoneCalledNTimes = 0;
+ incrNumTasks(app);
+ isWorktreeClean(app, "platform", function( ex, isClean ){
+ if( ex ) throw ex;
+ if( !isClean ){ log.write("[WARN ] Skip platform patch: Worktree not clean\n");
+ endFn(); return; }
+ getDropSlimArtifactsTagInPlatformPatch(app, onPatchBufReady);
+ });
+ function onPatchBufReady( ex, patch ){
+ if( ex ) throw ex;
+ var stdoutBufs = [];
+ var gitApply = child_process.spawn(
+ "git", ["apply"],
+ { cwd: workdirOfSync(app, "platform"), });
+ gitApply.on("error", console.error.bind(console));
+ gitApply.stderr.on("data", logAsString);
+ gitApply.stdout.on("data", stdoutBufs.push.bind(stdoutBufs));
+ gitApply.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ for( var buf in stdoutBufs ){ log.write(buf.toString()); }
+ throw Error(""+ code +", "+ signal +"");
+ }
+ endFn(null, null);
+ });
+ setTimeout/*TODO why?*/(function(){
+ gitApply.stdin.write(patch);
+ gitApply.stdin.end();
+ }, 42);
+ }
+ function endFn( ex, ret ){
+ if( onDoneCalledNTimes !== 0 ){ throw Error("assert(onDoneCalledNTimes == 0)"); }
+ onDoneCalledNTimes += 1;
+ decrNumTasks(app);
+ onDone(ex, ret);
+ }
+ }
+
+
+ function incrNumTasks( app ){
+ //if( app.numRunningTasks >= app.maxParallel ){
+ // throw Error("assert(app.numRunningTasks < app.maxParallel)");
+ //}
+ app.numRunningTasks += 1;
+ }
+
+
+ function decrNumTasks( app ){
+ if( app.numRunningTasks <= 0 ) throw Error("assert(app.numRunningTasks > 0)");
+ app.numRunningTasks -= 1;
+ }
+
+
+ function forEachInArrayDo( app, array, onService, onDone ){
+ var iE = 0;
+ var isOnDoneCalled = false;
+ nextElem();
+ function nextElem( ex ){
+ if( ex ){ endFn(ex); return; }
+ if( iE >= array.length ){ endFn(); return; }
+ onService(app, array[iE++], nextElem);
+ }
+ function endFn( ex ){
+ if( isOnDoneCalled ){
+ throw (ex) ? ex : Error("onDone MUST be called ONCE only");
+ }else{
+ isOnDoneCalled = true;
+ onDone(ex);
+ }
+ }
+ }
+
+
+ function forEachJettyService( app, onService, onDone ){
+ getJettyServiceNamesAsArray(app, onServicesArrived);
+ function onServicesArrived( ex, services ){
+ if( ex ) throw ex;
+ forEachInArrayDo(app, services, onService, onDone);
+ }
+ }
+
+
+ function resetHardToDevelop( app, thingyName, onDone ){
+ var iRemoteName = 0;
+ if( typeof onDone !== "function" ) throw Error("onDone");
+ detach();
+ function detach(){
+ log.write("[DEBUG] "+ thingyName +"$ git checkout --detach\n");
+ var child = child_process.spawn(
+ "git", ["checkout", "--detach"],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ tryResetHard();
+ }
+ });
+ }
+ function tryResetHard(){
+ var remoteName = app.remoteNamesToTry[iRemoteName++];
+ if( remoteName === undefined ){ onDone(Error("no usable remote found")); return; }
+ log.write("[DEBUG] "+ thingyName +"$ git reset --hard "+ remoteName +"/develop\n");
+ var child = child_process.spawn(
+ "git", ["reset", "--hard", remoteName +"/develop"],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else if( code !== 0 ){
+ tryResetHard(); /*try next remoteName*/
+ }else{
+ wipeWorktree();
+ }
+ });
+ }
+ function wipeWorktree(){
+ log.write("[DEBUG] "+ thingyName +"$ git rimraf\n");
+ var child = child_process.spawn(
+ "git", ["rimraf"/*TODO make portable*/],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ deleteBranch();
+ }
+ });
+ }
+ function deleteBranch( ex ){
+ if( ex ) throw ex;
+ log.write("[DEBUG] "+ thingyName +"$ git branch --delete --force "+ app.branchName +"\n");
+ var child = child_process.spawn(
+ "git", ["branch", "--delete", "--force", app.branchName],
+ { cwd:workdirOfSync(app, thingyName) }
+ );
+ child.on("error", console.error.bind(console));
+ child.stderr.on("data", logAsString);
+ child.on("close", function( code, signal ){
+ if( code == 1 ){ /* assume branch doesnt exist*/
+ log.write("[INFO ] Ignore: Failed to delete branch '"+ app.branchName +"' in '"
+ + thingyName +"'.\n");
+ endFn(null, null);
+ }else if( code !== 0 || signal !== null ){
+ onDone(Error("code "+ code +", signal "+ signal));
+ }else{
+ endFn(null, null);
+ }
+ });
+ }
+ function endFn( ex, ret ){
+ onDone(ex, ret);
+ }
+ }
+
+
+ function setPlatformVersionInAllServices( app, onDone ){
+ forEachJettyService(app, setPlatformVersionInService, onDone);
+ }
+
+
+ function fetchRemoteChanges( app, onDone ){
+ var platformAndServices = app.services.slice(0);
+ platformAndServices.unshift("platform");
+ forEachInArrayDo(app, platformAndServices, fetchChangesFromGitit, onDone);
+ }
+
+
+ function fetchListOfServices( app, onDone ){
+ getJettyServiceNamesAsArray(app, function( ex, ret ){
+ if( ex ) throw ex;
+ app.services = ret;
+ onDone();
+ });
+ }
+
+
+ function run( app ){
+ var actions = [ fetchListOfServices ];
+ if( app.isFetch ){ actions.push(fetchRemoteChanges); }
+ if( app.isResetHard ){
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, checkoutUpstreamDevelop, onDone);
+ });
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, resetHardToDevelop, onDone);
+ });
+ }
+ if( app.isPatchPlatform ){
+ actions.push(patchAwaySlimPackagingInPlatform);
+ actions.push(setVersionInPlatform);
+ }
+ if( app.isPatchServices ){
+ actions.push(dropSlimFromAllJenkinsfiles);
+ actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, giveServiceOurSpecialVersion, onDone);
+ });
+ }
+ if( app.isCommit ) actions.push(function( app, onDone ){
+ forEachInArrayDo(app, app.services, commitService, onDone);
+ });
+ if( app.isPush || app.isPushForce ){
+ actions.push(function( app, onDone ){
+ forEachJettyService(app, pushService, onDone);
+ });
+ }
+ if( app.isPrintIsaVersion ){ actions.push(printIsaVersion); }
+ actions.push(function( app, onDone ){
+ log.write("[INFO ] App done\n");
+ });
+ triggerNextAction();
+ function triggerNextAction( ex ){
+ if( ex ) throw ex;
+ var action = actions.shift();
+ if( action === undefined ){ endFn(); return; }
+ action(app, triggerNextAction);
+ }
+ }
+
+
+ function main(){
+ const app = {
+ isHelp: false,
+ isFetch: false,
+ isResetHard: false,
+ isPatchPlatform: false,
+ isPatchServices: false,
+ iscommit: false,
+ isPush: false,
+ isPushForce: false,
+ isPrintIsaVersion: false,
+ remoteNamesToTry: ["origin"],
+ workdir: "C:/work/tmp/git-scripted",
+ maxParallel: 1,
+ numRunningTasks: 0,
+ services: null,
+ branchName: "SDCISA-15648-RemoveSlimPackaging-n1",
+ commitMsg: "[SDCISA-15648] Remove slim packaging",
+ platformSnapVersion: "0.0.0-SNAPSHOT",
+ serviceSnapVersion: "0.0.0-SNAPSHOT",
+ platformJenkinsVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT",
+ jenkinsSnapVersion: "0.0.0-SDCISA-15648-RemoveSlimPackaging-n1-SNAPSHOT",
+ parentVersion: null,
+ };
+ app.parentVersion = "0.0.0-"+ app.branchName +"-SNAPSHOT";
+ if( parseArgs(process.argv, app) !== 0 ){ process.exit(1); }
+ if( app.isHelp ){ printHelp(); return; }
+ run(app);
+ }
+
+
+}());
diff --git a/src/main/patch/eagle/default-bak20211124-080400.patch b/src/main/patch/eagle/default-bak20211124-080400.patch
new file mode 100644
index 0000000..c7d3a8c
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20211124-080400.patch
@@ -0,0 +1,103 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only.
+ - Disable NSync. To suppress that useless noise.
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,7 +21,27 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
+- <!-- project -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+ <artifactId>eagle-domain</artifactId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -377,9 +377,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (classpathResourceHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+
+ // Attention to the order of handlers - handlers from here can fire selfrequests
+ if ((env.equals("dev") || env.equals("test")) && validationHandler.isToValidate(request)) {
+@@ -440,7 +440,7 @@ public class MainVerticle extends AbstractVerticle {
+ vertxRouter.route().handler(routingContextHandler);
+
+ mainServer.requestHandler(vertxRouter::accept);
+- mainServer.listen(mainPort, x -> {
++ mainServer.listen(mainPort, "127.0.0.1", x -> {
+ propertyHandler.addRefreshable(schedulerResourceManager);
+ log.info("<init> I am ready");
+ String bootLogFilename = props.getOrDefault("service.boot.log", "/data/init/boot.log").toString();
+@@ -530,7 +530,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Future<String> storageDataFuture = newLoggingFuture.apply("storage-data" );
+ final Future<String> mirrorModFuture = newLoggingFuture.apply("MirrorMod" );
+ final Future<String> metricsModuleFuture = newLoggingFuture.apply("MetricsModule" );
+- final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
++ //final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
+ CompositeFuture.all(futuresToWaitFor).setHandler(handler);
+
+
+@@ -669,11 +669,11 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync /////////////////////////////////////////////////////////////////////////
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
++// NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++// .withBasePath(EAGLE_NSYNC_PATH)
++// .withMainPort(mainPort);
++// DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++// vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default-bak20230220-121000.patch b/src/main/patch/eagle/default-bak20230220-121000.patch
new file mode 100644
index 0000000..76cedd0
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20230220-121000.patch
@@ -0,0 +1,102 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,6 +21,28 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -268,7 +268,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -325,7 +325,7 @@ public class MainVerticle extends AbstractVerticle {
+ .withLoggingResourceManager(loggingResourceManager)
+ .withMonitoringHandler(monitoringHandler)
+ .withHttpClientFactory(this::createHttpClientForRouter)
+- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler))
++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null))
+ .build();
+ });
+ });
+@@ -412,9 +412,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -559,7 +559,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Future<String> storageDataFuture = newLoggingFuture.apply("storage-data" );
+ final Future<String> mirrorModFuture = newLoggingFuture.apply("MirrorMod" );
+ final Future<String> metricsModuleFuture = newLoggingFuture.apply("MetricsModule" );
+- final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
++ //final Future<String> nsyncFuture = newLoggingFuture.apply("NSync" );
+ CompositeFuture.all(futuresToWaitFor).setHandler(handler);
+
+
+@@ -721,10 +721,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
++ //NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++ // .withBasePath(EAGLE_NSYNC_PATH)
++ // .withMainPort(mainPort);
++ //DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncFuture.completer());
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default-bak20231024-082300.patch b/src/main/patch/eagle/default-bak20231024-082300.patch
new file mode 100644
index 0000000..5578433
--- /dev/null
+++ b/src/main/patch/eagle/default-bak20231024-082300.patch
@@ -0,0 +1,101 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+ Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near
+ "eagle-02.01.10.00".
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -23,6 +23,23 @@
+ </properties>
+
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -323,7 +323,7 @@ public class MainVerticle extends AbstractVerticle {
+ doubleSlashCheckHandler = new DoubleSlashCheckHandler(doubleSlashCheckerMode);
+ ClasspathResourceHandler classpathResourceHandler = new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort); /*TODO revert*/
+
+ ReturnHttpErrorHandler returnHttpErrorHandler = new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+
+@@ -387,7 +387,7 @@ public class MainVerticle extends AbstractVerticle {
+ .withLoggingResourceManager(loggingResourceManager)
+ .withMonitoringHandler(monitoringHandler)
+ .withHttpClientFactory(this::createHttpClientForRouter)
+- .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, nSyncHandler, authorizer))
++ .addDoneHandler(aVoid -> this.onRouterReady(selfClient, classpathResourceHandler, returnHttpErrorHandler, null, authorizer))
+ .build();
+ });
+ });
+@@ -476,9 +476,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -624,7 +624,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Promise<String> storageLogPromise = newLoggingFuture.apply("storage-log" );
+ final Promise<String> storageDataPromise = newLoggingFuture.apply("storage-data" );
+ final Promise<String> metricsModulePromise = newLoggingFuture.apply("MetricsModule" );
+- final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync" );
++ //final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync" ); /*TODO revert*/
+ CompositeFuture.all(futuresToWaitFor).onComplete(handler);
+
+
+@@ -776,10 +776,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync /////////////////////////////////////////////////////////////////////////
+ //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
+- .withBasePath(EAGLE_NSYNC_PATH)
+- .withMainPort(mainPort);
+- DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
++ // TODO NSyncVerticleConfig nSyncVerticleConfig = new NSyncVerticleConfig()
++ // TODO .withBasePath(EAGLE_NSYNC_PATH)
++ // TODO .withMainPort(mainPort);
++ // TODO DeploymentOptions deplOpt = new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
++ // TODO vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
+ }
+
+ vertx.deployVerticle(new DirectMemoryUsageLogger(), new DeploymentOptions().setConfig(new JsonObject(props)));
diff --git a/src/main/patch/eagle/default.patch b/src/main/patch/eagle/default.patch
new file mode 100644
index 0000000..c0b6785
--- /dev/null
+++ b/src/main/patch/eagle/default.patch
@@ -0,0 +1,101 @@
+
+ General patch to fix crappy desing.
+
+ Contains:
+ - Logging override to get back control over logging.
+ - Listen on localhost only (DISABLED for zarniwoop)
+ - Disable NSync. To suppress that useless noise.
+
+ Based on "326188f9ed8830cce3ec9865ea3598945726c308" from "2023-02-13" near
+ "eagle-02.01.10.00".
+
+
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -23,6 +23,23 @@
+ </properties>
+
+ <dependencies>
++ <!-- ******************************************* TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- ******************************************* TODO: Remove -->
+ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+diff --git a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+index 13ebdc51..9e947a2d 100644
+--- a/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
++++ b/eagle-process/src/main/java/ch/post/it/paisa/eagle/process/main/MainVerticle.java
+@@ -367,7 +367,7 @@ public class MainVerticle extends AbstractVerticle {
+ ClasspathResourceHandler classpathResourceHandler =
+ new ClasspathResourceHandler("static-web-apps/", SERVER_ROOT + "/apps/");
+
+- NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
++ //NSyncHandler nSyncHandler = new NSyncHandler(vertx, EAGLE_NSYNC_PATH, mainPort);
+
+ ReturnHttpErrorHandler returnHttpErrorHandler =
+ new ReturnHttpErrorHandler(RETURN_HTTP_ERROR_ROOT);
+@@ -470,7 +470,7 @@ public class MainVerticle extends AbstractVerticle {
+ selfClient,
+ classpathResourceHandler,
+ returnHttpErrorHandler,
+- nSyncHandler,
++ null,
+ authorizer))
+ .build();
+ });
+@@ -607,9 +607,9 @@ public class MainVerticle extends AbstractVerticle {
+ if (hookHandler.handle(request)) {
+ return;
+ }
+- if (nSyncHandler.handle(request)) {
+- return;
+- }
++ //if (nSyncHandler.handle(request)) {
++ // return;
++ //}
+ if (eventBusHandler.handle(request)) {
+ return;
+ }
+@@ -777,7 +777,7 @@ public class MainVerticle extends AbstractVerticle {
+ final Promise<String> storageLogPromise = newLoggingFuture.apply("storage-log");
+ final Promise<String> storageDataPromise = newLoggingFuture.apply("storage-data");
+ final Promise<String> metricsModulePromise = newLoggingFuture.apply("MetricsModule");
+- final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync");
++ //final Promise<String> nsyncPromise = newLoggingFuture.apply("NSync"); /*TODO revert*/
+ CompositeFuture.all(futuresToWaitFor).onComplete(handler);
+
+ String redisHost = (String) props.get("redis.host");
+@@ -979,10 +979,10 @@ public class MainVerticle extends AbstractVerticle {
+ //////////////////////////////////// NSync
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////
+ {
+- NSyncVerticleConfig nSyncVerticleConfig =
+- new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort);
+- DeploymentOptions deplOpt =
+- new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig));
+- vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise);
++ //NSyncVerticleConfig nSyncVerticleConfig = /*TODO revert*/
++ // new NSyncVerticleConfig().withBasePath(EAGLE_NSYNC_PATH).withMainPort(mainPort); /*TODO revert*/
++ //DeploymentOptions deplOpt = /*TODO revert*/
++ // new DeploymentOptions().setConfig(JsonObject.mapFrom(nSyncVerticleConfig)); /*TODO revert*/
++ //vertx.deployVerticle(new NSyncVerticle(), deplOpt, nsyncPromise); /*TODO revert*/
+ }
+ }
+
diff --git a/src/main/patch/eagle/simplelogger.patch b/src/main/patch/eagle/simplelogger.patch
new file mode 100644
index 0000000..97cd8de
--- /dev/null
+++ b/src/main/patch/eagle/simplelogger.patch
@@ -0,0 +1,33 @@
+diff --git a/eagle-process/pom.xml b/eagle-process/pom.xml
+index 5b226670..45acc276 100644
+--- a/eagle-process/pom.xml
++++ b/eagle-process/pom.xml
+@@ -21,7 +21,27 @@
+ <skip.node.install>true</skip.node.install>
+ </properties>
+ <dependencies>
+- <!-- project -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>1.7.25</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <!-- project -->
+ <dependency>
+ <groupId>ch.post.it.paisa.eagle</groupId>
+ <artifactId>eagle-domain</artifactId>
diff --git a/src/main/patch/houston/default-20230203.patch b/src/main/patch/houston/default-20230203.patch
deleted file mode 100644
index c1deeca..0000000
--- a/src/main/patch/houston/default-20230203.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- <skip.node.install>true</skip.node.install>
- </properties>
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -256,7 +256,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("brox-from-vehicles-.*").withRetryIntervals(10, 20, 30, 60, 120)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(10_000));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(10, 20, 30, 60, 120));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5));/*TODO revert*/
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
diff --git a/src/main/patch/houston/default-20230214.patch b/src/main/patch/houston/default-20230214.patch
deleted file mode 100644
index 3f8fa16..0000000
--- a/src/main/patch/houston/default-20230214.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
- Patch based on "houston-02.01.12.00" aka
- "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- </properties>
-
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>1.7.25</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -309,7 +309,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
-
diff --git a/src/main/patch/houston/default-20230331.patch b/src/main/patch/houston/default-20230331.patch
deleted file mode 100644
index 64d3628..0000000
--- a/src/main/patch/houston/default-20230331.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-
- My custom patch ready-to-apply to have an "usable" houston.
-
- Contains:
- - Simplelogger
- - Listen localhost only
- - Queue-Retry every 5 seconds.
-
- Patch based on "houston-02.01.12.00" aka
- "3c61ef7ded53d6340001d2b56cd829d2ae450580" from 2023-01-12.
-
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index fff9c178..960c0098 100644
---- a/houston-process/pom.xml
-+++ b/houston-process/pom.xml
-@@ -20,6 +20,26 @@
- </properties>
-
- <dependencies>
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
-+ <dependency> <!-- TODO: Remove -->
-+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
-+ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
-+ <version>2.0.1</version> <!-- TODO: Remove -->
-+ </dependency> <!-- TODO: Remove -->
- <!-- project -->
- <dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index ee7d8b02..b28ae8d6 100644
---- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -309,7 +309,7 @@ public class Deployer {
- qc.add(new QueueConfiguration().withPattern("timetable-situation-trips-for-aws").withRetryIntervals(standardDelays)
- .withEnqueueDelayMillisPerSize(10).withEnqueueMaxDelayMillis(100));
- // All other queues (typically to backend services) with a slow-down pattern after failed delivery
-- qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
-+ qc.add(new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig = RedisquesConfiguration.with()
- .address(Address.redisquesAddress())
-
diff --git a/src/main/patch/houston/default.patch b/src/main/patch/houston/default.patch
index d70b12b..b5b7639 100644
--- a/src/main/patch/houston/default.patch
+++ b/src/main/patch/houston/default.patch
@@ -2,29 +2,55 @@
Custom houston patch to have a "usable" service at all.
Patch based on "develop" aka
- "3b1275e123c2b7aa2ffaa34270a5e1a373a65993" from "2023-04-27".
+ "497a9477c9e2100130f9a29ec130c1131220c935" from "2024-04-22".
-diff --git a/pom.xml b/pom.xml
-index 0ed4f7f3..b44c5693 100644
--- a/pom.xml
+++ b/pom.xml
-@@ -72,8 +72,6 @@
- <skip.wagon>false</skip.wagon>
+@@ -73,7 +73,7 @@
<skip.copy-dependencies>false</skip.copy-dependencies>
-- <!-- spotless -->
+ <!-- spotless -->
- <source.format.apply.phase>compile</source.format.apply.phase>
- </properties>
++ <source.format.apply.phase>none</source.format.apply.phase>
- <scm>
-diff --git a/houston-process/pom.xml b/houston-process/pom.xml
-index 374dcb97..3c24937c 100644
+ <!-- JavaMelody -->
+ <jetty.version>9.4.43.v20210629</jetty.version>
+
+
+--- a/pom.xml
++++ b/pom.xml
+@@ -301,4 +301,25 @@
+ </properties>
+ </profile>
+ </profiles>
++ <build>
++ <plugins>
++ <plugin>
++ <groupId>com.diffplug.spotless</groupId>
++ <artifactId>spotless-maven-plugin</artifactId>
++ <executions>
++ <execution>
++ <id>spotless-apply</id>
++ <phase>none</phase>
++ </execution>
++ <execution>
++ <id>spotless-check</id>
++ <phase>none</phase>
++ </execution>
++ </executions>
++ <configuration>
++ <skip>true</skip>
++ </configuration>
++ </plugin>
++ </plugins>
++ </build>
+ </project>
+
+
--- a/houston-process/pom.xml
+++ b/houston-process/pom.xml
-@@ -25,6 +25,26 @@
- </properties>
-
+@@ -27,3 +27,23 @@
<dependencies>
+ <dependency> <!-- TODO: Remove -->
+ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
@@ -48,27 +74,43 @@ index 374dcb97..3c24937c 100644
+ </dependency> <!-- TODO: Remove -->
<!-- project -->
<dependency>
- <groupId>ch.post.it.paisa.houston</groupId>
-diff --git a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-index 432efb01..d1729fe9 100644
+
+
+--- a/houston-process/pom.xml
++++ b/houston-process/pom.xml
+@@ -212,6 +232,2 @@
+ </dependency>
+- <dependency>
+- <groupId>org.apache.logging.log4j</groupId>
+- <artifactId>log4j-slf4j2-impl</artifactId>
+- </dependency>
+ <dependency>
+
+
--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
-@@ -68,6 +68,9 @@ public class Deployer {
+@@ -98,6 +98,9 @@ public class Deployer {
private static final Logger LOGGER = LoggerFactory.getLogger(Deployer.class);
public static void main(String[] args) throws Exception {
+ boolean isAssertIsEnabled = false;
+ assert isAssertIsEnabled = true;
+ if (!isAssertIsEnabled) throw new UnsupportedOperationException("Enable assertions to fix this problem -> https://stackoverflow.com/a/68893479/4415884");
- setStartupProperties();
- Props.prepare();
+ throwIfLoggerAmbiguous();
-@@ -378,7 +378,7 @@ public class Deployer {
+ configureObjectMapper();
+
+
+--- a/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/Deployer.java
+@@ -471,7 +477,7 @@ public class Deployer {
// All other queues (typically to backend services) with a slow-down pattern after
// failed delivery
qc.add(
- new QueueConfiguration().withPattern(".*").withRetryIntervals(standardDelays));
+ new QueueConfiguration().withPattern(".*").withRetryIntervals(5/*TODO revert*/));
-
- RedisquesConfiguration redisquesConfig =
- RedisquesConfiguration.with()
+ LOGGER.info(
+ "Redisques redis-client will created with MaxPoolSize: {}, MaxPoolWaiting: {}, MaxWaitingHandlers: {}",
+ Props.getMaxRedisConnectionPoolSize4RedisQues(),
+
+
diff --git a/src/main/patch/houston/fixidiots.patch b/src/main/patch/houston/fixidiots.patch
new file mode 100644
index 0000000..7af8f9d
--- /dev/null
+++ b/src/main/patch/houston/fixidiots.patch
@@ -0,0 +1,365 @@
+
+ Why is it so fucking hard to just keep out all those random annoying logger
+ implementations?!?
+
+ Who the heck wants to configure all of them, and if ONE is missed just have
+ all important error reports concealed to the nirvana? Who the fuck wants such
+ shit?
+
+ Please: STOP THIS SHIT! Libraries solely have to depend on slf4j. As its name
+ already says, it is a FACADE! NOT AN IMPLEMENTATION! STOP MESSING THIS UP ALL
+ THE TIME WITH YET ANOTHER NEW SHITTY NERDY LOGGER IMPL!
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
+@@ -0,0 +1,109 @@
++package org.apache.logging.slf4j;
++
++import org.apache.logging.log4j.spi.ExtendedLogger;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.event.Level;
++import org.slf4j.spi.LocationAwareLogger;
++import org.slf4j.spi.LoggingEventBuilder;
++
++import java.io.Serializable;
++import java.lang.reflect.Constructor;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
++ * how strong we tell it NOT to use it!</p>
++ * <p>This class only exists to prevent services from starting if IDEA still did miss
++ * the dependency changes in pom and still tries to use the wrong logger impl.</p> */
++public class Log4jLogger implements LocationAwareLogger, Serializable {
++
++ private final org.slf4j.Logger log;
++
++ Log4jLogger(final Log4jMarkerFactory markerFactory, final ExtendedLogger logger, final String name) {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ Constructor<?> ctor = logrFactClz.getConstructor();
++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ this.log = (Logger) getLoggerFn.invoke(ctor.newInstance(), name);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
++ }
++
++ @Override public String getName() { return log.getName(); }
++ @Override public LoggingEventBuilder makeLoggingEventBuilder(Level level) { return log.makeLoggingEventBuilder(level); }
++ @Override public LoggingEventBuilder atLevel(Level level) { return log.atLevel(level); }
++ @Override public boolean isEnabledForLevel(Level level) { return log.isEnabledForLevel(level); }
++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
++ @Override public void trace(String s) { log.trace(s); }
++ @Override public void trace(String s, Object o) { log.trace(s, o); }
++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
++ @Override public LoggingEventBuilder atTrace() { return log.atTrace(); }
++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Object o) { log.debug(s, o); }
++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
++ @Override public LoggingEventBuilder atDebug() { return log.atDebug(); }
++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Object o) { log.info(s, o); }
++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
++ @Override public void info(String s, Object... objects) { log.info(s, objects); }
++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
++ @Override public void info(Marker marker, String s) { log.info(marker, s); }
++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
++ @Override public LoggingEventBuilder atInfo() { return log.atInfo(); }
++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
++ @Override public void warn(String s) { log.warn(s); }
++ @Override public void warn(String s, Object o) { log.warn(s, o); }
++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
++ @Override public LoggingEventBuilder atWarn() { return log.atWarn(); }
++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
++ @Override public void error(String s) { log.error(s); }
++ @Override public void error(String s, Object o) { log.error(s, o); }
++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
++ @Override public void error(String s, Object... objects) { log.error(s, objects); }
++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
++ @Override public void error(Marker marker, String s) { log.error(marker, s); }
++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
++ @Override public LoggingEventBuilder atError() { return log.atError(); }
++
++}
+
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jLoggerAdapter.java
+@@ -0,0 +1,16 @@
++package org.slf4j.reload4j;
++
++/** <p>FU** this fu***** damn sh** code that still tries to use log4j, no matter
++ * how strong we tell it NOT to use it!</p>
++ * <p>This class only exists to prevent services from starting if IDEA still did miss
++ * the dependency changes in pom and still tries to use the wrong logger impl.</p> */
++public class Reload4jLoggerAdapter {
++
++ public Reload4jLoggerAdapter(org.apache.log4j.Logger l) {
++ throw new UnsupportedOperationException("Fuck those fucking script-kiddies!"
++ + " How fucking hard can it be to just properly setup logging?!?"
++ + " Please !STOP! intermixing interfaces with implementations all the time!"
++ + " This fucking shit just conceals erros all the time! STOP IT!");
++ }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/slf4j/reload4j/Reload4jServiceProvider.java
+@@ -0,0 +1,64 @@
++package org.slf4j.reload4j;
++
++import org.slf4j.ILoggerFactory;
++import org.slf4j.IMarkerFactory;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.MDCAdapter;
++import org.slf4j.spi.SLF4JServiceProvider;
++
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++import java.util.Deque;
++import java.util.Map;
++
++
++/** <p>How many of those fu**ing damn stupid idiots are still out there
++ * continuing to stubbornly include those stupid logger impls with their
++ * libraries?!?</p> */
++public class Reload4jServiceProvider implements SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter {
++
++ private final Object slf4jSimpleLoggerFactory;
++ private final Method getLoggerFn;
++
++ public Reload4jServiceProvider() {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public ILoggerFactory getLoggerFactory() { return this; }
++ @Override public IMarkerFactory getMarkerFactory() { return this; }
++ @Override public MDCAdapter getMDCAdapter() { return this; }
++ @Override public String getRequestedApiVersion() { return "2.0"; }
++ @Override public void initialize() {}
++
++ @Override
++ public Logger getLogger(String name) {
++ try {
++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name);
++ } catch (IllegalAccessException | InvocationTargetException ex) {
++ throw new RuntimeException(ex);
++ }
++ }
++
++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void remove(String s) { assert false : "TODO not impl yet"; }
++ @Override public void clear() { assert false : "TODO not impl yet"; }
++ @Override public Map<String, String> getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; }
++ @Override public void setContextMap(Map<String, String> map) { assert false : "TODO not impl yet"; }
++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public Deque<String> getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/apache/logging/slf4j/SLF4JServiceProvider.java
+@@ -0,0 +1,62 @@
++package org.apache.logging.slf4j;
++
++import org.slf4j.ILoggerFactory;
++import org.slf4j.IMarkerFactory;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.MDCAdapter;
++
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++import java.util.Deque;
++import java.util.Map;
++
++
++/** <p>How many of those fu**ing damn stupid idiotic libs are still out there
++ * continuing to stubbornly include those stupid logger impls?!?</p> */
++public class SLF4JServiceProvider implements org.slf4j.spi.SLF4JServiceProvider, ILoggerFactory, IMarkerFactory, MDCAdapter {
++
++ private final Object slf4jSimpleLoggerFactory;
++ private final Method getLoggerFn;
++
++ public SLF4JServiceProvider() {
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public ILoggerFactory getLoggerFactory() { return this; }
++ @Override public IMarkerFactory getMarkerFactory() { return this; }
++ @Override public MDCAdapter getMDCAdapter() { return this; }
++ @Override public String getRequestedApiVersion() { return "2.0"; }
++ @Override public void initialize() {}
++
++ @Override
++ public Logger getLogger(String name) {
++ try {
++ return (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, name);
++ } catch (IllegalAccessException | InvocationTargetException ex) {
++ throw new RuntimeException(ex);
++ }
++ }
++
++ @Override public Marker getMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public boolean exists(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public boolean detachMarker(String s) { assert false : "TODO not impl yet"; return false; }
++ @Override public Marker getDetachedMarker(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void put(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String get(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void remove(String s) { assert false : "TODO not impl yet"; }
++ @Override public void clear() { assert false : "TODO not impl yet"; }
++ @Override public Map<String, String> getCopyOfContextMap() { assert false : "TODO not impl yet"; return null; }
++ @Override public void setContextMap(Map<String, String> map) { assert false : "TODO not impl yet"; }
++ @Override public void pushByKey(String s, String s1) { assert false : "TODO not impl yet"; }
++ @Override public String popByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public Deque<String> getCopyOfDequeByKey(String s) { assert false : "TODO not impl yet"; return null; }
++ @Override public void clearDequeByKey(String s) { assert false : "TODO not impl yet"; }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java
+@@ -0,0 +1,38 @@
++package net.bull.javamelody.internal.common;
++
++import org.slf4j.Logger;
++
++import javax.servlet.http.HttpServletRequest;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>How many of those fu**ing damn stupid idiot libs are still out there
++ * continuing to stubbornly include those stupid logger impls?!?</p> */
++public class Log4J2Logger implements net.bull.javamelody.JavaMelodyLogger {
++
++ private final org.slf4j.Logger log;
++
++ public Log4J2Logger(){
++ try {
++ Class<?> logrFactClz = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ Object slf4jSimpleLoggerFactory = logrFactClz.getConstructor().newInstance();
++ Method getLoggerFn = logrFactClz.getMethod("getLogger", String.class);
++ this.log = (Logger) getLoggerFn.invoke(slf4jSimpleLoggerFactory, "net.bull.javamelody");
++ } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) {
++ throw new UnsupportedOperationException(ex);
++ }
++ }
++
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Throwable ex) { log.debug(s, ex); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Throwable ex) { log.info(s, ex);}
++ @Override public void warn(String s, Throwable ex) { log.warn(s, ex);}
++ @Override public void logHttpRequest(
++ HttpServletRequest httpRequest, String requestName, long duration, boolean systemError, int responseStatus, long responseSize, String loggerName
++ ){
++ if (log.isInfoEnabled()) log.info("{}", LOG.buildLogMessage(httpRequest, duration, systemError, responseStatus, responseSize));
++ }
++
++}
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/org/eclipse/jetty/util/log/Slf4jLog.java
+@@ -0,0 +1,32 @@
++package org.eclipse.jetty.util.log;
++
++import org.slf4j.LoggerFactory;
++
++
++/** Yet another fu**ing bastard lib having its own shiny stupid loggers. */
++public class Slf4jLog {
++
++ private final org.slf4j.Logger log;
++
++ public Slf4jLog() {
++ this.log = LoggerFactory.getLogger("org.eclipse.jetty.util.log");
++ }
++
++ public Slf4jLog(String name) {
++ this.log = LoggerFactory.getLogger(name);
++ }
++
++ public String getName() { return log.getName(); }
++ public void warn(String msg, Object... args) { log.warn(msg, args); }
++ public void warn(Throwable thrown) { log.warn("", thrown); }
++ public void warn(String msg, Throwable thrown) { log.warn(msg, thrown); }
++ public void info(String msg, Object... args) { log.info(msg, args); }
++ public void info(Throwable thrown) { log.info("", thrown); }
++ public void info(String msg, Throwable thrown) { log.info(msg, thrown); }
++ public void debug(String msg, Object... args) { log.debug(msg, args); }
++ public void debug(String msg, long arg) { if (log.isDebugEnabled()) log.debug(msg, arg); }
++ public void debug(Throwable thrown) { this.debug("", thrown); }
++ public void debug(String msg, Throwable thrown) { log.debug(msg, thrown); }
++ public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ public void setDebugEnabled(boolean enabled) { log.warn("setDebugEnabled not implemented"); }
++
++}
+
diff --git a/src/main/patch/houston/future.patch b/src/main/patch/houston/future.patch
new file mode 100644
index 0000000..2ac5922
--- /dev/null
+++ b/src/main/patch/houston/future.patch
@@ -0,0 +1,47 @@
+
+ Some patches that maybe will become relevant in future. For example bcause a
+ PR is no yet merged or similar.
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/net/bull/javamelody/internal/common/Log4J2Logger.java
+@@ -22,1 +22,1 @@
+ if (!iLoaders.hasNext()) throw new IllegalStateException("Too few logger impls");
+ SLF4JServiceProvider loggerProvider = iLoaders.next();
+- if (iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls");
++ if (!(loggerProvider instanceof org.slf4j.simple.SimpleServiceProvider) && iLoaders.hasNext()) throw new IllegalStateException("Too many logger impls");
+ loggerProvider.initialize();
+ ILoggerFactory loggerFactory = loggerProvider.getLoggerFactory();
+
+
+--- /dev/null
++++ b/houston-process/src/main/java/ch/post/it/paisa/houston/process/main/BadLoggerImplKiller.java
+@@ -0,0 +1,26 @@
++package ch.post.it.paisa.houston.process.main;
++
++import org.slf4j.spi.SLF4JServiceProvider;
++
++import java.util.ServiceLoader;
++
++
++public class BadLoggerImplKiller {
++
++ public static void assertExactlyOneLoggerImplPresent(){
++ Class<?> log4jProviderClz, simpleProviderClz;
++ try {
++ log4jProviderClz = Class.forName("org.apache.logging.slf4j.SLF4JServiceProvider");
++ simpleProviderClz = Class.forName("org.slf4j.simple.SimpleServiceProvider");
++ }catch( ClassNotFoundException ex ){
++ throw new RuntimeException(ex);
++ }
++ for( SLF4JServiceProvider provider : ServiceLoader.load(SLF4JServiceProvider.class) ){
++ Class<?> providerClass = provider.getClass();
++ if( log4jProviderClz.isAssignableFrom(providerClass) ) continue;
++ if( simpleProviderClz.isAssignableFrom(providerClass) ) continue;
++ throw new IllegalStateException("Go away with this ugly logger: "+ providerClass.getName());
++ }
++ }
++
++}
+
+
diff --git a/src/main/patch/preflux/default.patch b/src/main/patch/preflux/default.patch
new file mode 100644
index 0000000..897d731
--- /dev/null
+++ b/src/main/patch/preflux/default.patch
@@ -0,0 +1,236 @@
+
+ TODO describe (like in houston)
+
+
+--- a/pom.xml
++++ b/pom.xml
+@@ -57,6 +57,24 @@
+
+ <!-- atlas -->
+ <atlas.version>00.01.00.00</atlas.version>
++ <skip.angular.build>true</skip.angular.build>
++ <skip.frontend.sourceformat.check>true</skip.frontend.sourceformat.check>
++ <skip.grunt.build>true</skip.grunt.build>
++ <skip.install.nodeAndNpm>false</skip.install.nodeAndNpm>
++ <skip.integration.tests>true</skip.integration.tests>
++ <skip.jacoco>true</skip.jacoco>
++ <skip.jacoco.regularbuild>true</skip.jacoco.regularbuild>
++ <skip.junit.tests>false</skip.junit.tests>
++ <skip.karma.tests>true</skip.karma.tests>
++ <skip.npm.install>true</skip.npm.install>
++ <skip.remote.tests>true</skip.remote.tests>
++ <skip.selenium.tests>true</skip.selenium.tests>
++ <skip.spotless>true</skip.spotless>
++ <skip.test.install.nodeAndNpm>true</skip.test.install.nodeAndNpm>
++ <skip.test.npm.install>true</skip.test.npm.install>
++ <skip.unpack.dependencies>true</skip.unpack.dependencies>
++ <skip.web.install.nodeAndNpm>true</skip.web.install.nodeAndNpm>
++ <skip.web.npm.install>true</skip.web.npm.install>
+ </properties>
+
+ <modules>
+
+
+--- a/preflux-web/pom.xml
++++ b/preflux-web/pom.xml
+@@ -14,6 +14,26 @@
+ <packaging>war</packaging>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!-- PA-ISA Platform -->
+ <dependency>
+ <groupId>ch.post.it.paisa.alice</groupId>
+
+
+--- a/preflux-test/pom.xml
++++ b/preflux-test/pom.xml
+@@ -16,6 +16,26 @@
+ </properties>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>${slf4j.version}</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <!--Alice Test Framework-->
+ <dependency>
+ <groupId>ch.post.it.paisa.alice</groupId>
+
+
+--- a/preflux-web/package.json
++++ b/preflux-web/package.json
+@@ -10,10 +10,10 @@
+ "check": "npm run format:check && npm run lint && npm run test",
+ "check:ci": "npm run format:check && npm run lint",
+ "check:fix": "npm run format:fix && npm run lint:fix && npm run test",
+- "format:check": "prettier --check \"src/main/angular/**/*.{ts,html,css,json}\"",
+- "format:fix": "prettier --write \"src/main/angular/**/*.{ts,html,css,json}\"",
+- "lint": "ng lint",
+- "lint:fix": "ng lint --fix",
++ "format:check": "true",
++ "format:fix": "true",
++ "lint": "true",
++ "lint:fix": "true",
+ "test": "ng test --no-watch --browsers=ChromeHeadlessNoSandbox",
+ "test:ci": "npm run test",
+ "test:watch": "ng test --watch --browsers=ChromeHeadlessNoSandbox"
+
+
+--- /dev/null
++++ b/preflux-web/src/main/java/org/apache/logging/slf4j/Log4jLogger.java
+@@ -0,0 +1,115 @@
++package org.apache.logging.slf4j;
++
++import org.apache.logging.log4j.spi.ExtendedLogger;
++import org.slf4j.Logger;
++import org.slf4j.Marker;
++import org.slf4j.spi.LocationAwareLogger;
++
++import java.io.Serializable;
++import java.lang.reflect.Constructor;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++
++
++/** <p>This class only exists to really, really, really, really use the wanted
++ * logger impl. And only the one choosen logger impl and no other log impl. In
++ * fact there should be no reason for this class to exist. But it seems as some
++ * code still manages to stubbornly use some unwanted logger impls occasionally,
++ * for whatever reason. As it seems impossible to configure this properly, this
++ * class here at least make it fail-fast, before make devs wasting time searching
++ * expected logs which magically never appear.</p>
++ */
++public class Log4jLogger implements LocationAwareLogger, Serializable {
++
++ private static final Method getLoggerFn;
++ private static final Object loggerFactory;
++ private final Logger log;
++
++ static {
++ try {
++ Class<?> slfClass = Class.forName("org.slf4j.simple.SimpleLoggerFactory");
++ getLoggerFn = slfClass.getDeclaredMethod("getLogger", String.class);
++ Constructor<?> ctor = slfClass.getConstructor();
++ ctor.setAccessible(true);
++ loggerFactory = ctor.newInstance();
++ } catch (ClassNotFoundException | NoSuchMethodException | InvocationTargetException | InstantiationException |
++ IllegalAccessException ex) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
++ }
++ }
++
++ Log4jLogger(Object markerFactory, ExtendedLogger logger, final String name) {
++ try {
++ this.log = (Logger) getLoggerFn.invoke(loggerFactory, name);
++ } catch (InvocationTargetException | IllegalAccessException ex) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet", ex);
++ }
++ }
++
++ @Override public void log(Marker marker, String s, int i, String s1, Object[] objects, Throwable throwable) {
++ throw new UnsupportedOperationException(/*TODO*/"Not impl yet");
++ }
++
++ @Override public String getName() { return log.getName(); }
++ @Override public boolean isTraceEnabled() { return log.isTraceEnabled(); }
++ @Override public void trace(String s) { log.trace(s); }
++ @Override public void trace(String s, Object o) { log.trace(s, o); }
++ @Override public void trace(String s, Object o, Object o1) { log.trace(s, o, o1); }
++ @Override public void trace(String s, Object... objects) { log.trace(s, objects); }
++ @Override public void trace(String s, Throwable throwable) { log.trace(s, throwable); }
++ @Override public boolean isTraceEnabled(Marker marker) { return log.isTraceEnabled(marker); }
++ @Override public void trace(Marker marker, String s) { log.trace(marker, s); }
++ @Override public void trace(Marker marker, String s, Object o) { log.trace(marker, s, o); }
++ @Override public void trace(Marker marker, String s, Object o, Object o1) { log.trace(marker, s, o, o1); }
++ @Override public void trace(Marker marker, String s, Object... objects) { log.trace(marker, s, objects); }
++ @Override public void trace(Marker marker, String s, Throwable throwable) { log.trace(marker, s, throwable); }
++ @Override public boolean isDebugEnabled() { return log.isDebugEnabled(); }
++ @Override public void debug(String s) { log.debug(s); }
++ @Override public void debug(String s, Object o) { log.debug(s, o); }
++ @Override public void debug(String s, Object o, Object o1) { log.debug(s, o, o1); }
++ @Override public void debug(String s, Object... objects) { log.debug(s, objects); }
++ @Override public void debug(String s, Throwable throwable) { log.debug(s, throwable); }
++ @Override public boolean isDebugEnabled(Marker marker) { return log.isDebugEnabled(marker); }
++ @Override public void debug(Marker marker, String s) { log.debug(marker, s); }
++ @Override public void debug(Marker marker, String s, Object o) { log.debug(marker, s, o); }
++ @Override public void debug(Marker marker, String s, Object o, Object o1) { log.debug(marker, s, o, o1); }
++ @Override public void debug(Marker marker, String s, Object... objects) { log.debug(marker, s, objects); }
++ @Override public void debug(Marker marker, String s, Throwable throwable) { log.debug(marker, s, throwable); }
++ @Override public boolean isInfoEnabled() { return log.isInfoEnabled(); }
++ @Override public void info(String s) { log.info(s); }
++ @Override public void info(String s, Object o) { log.info(s, o); }
++ @Override public void info(String s, Object o, Object o1) { log.info(s, o, o1); }
++ @Override public void info(String s, Object... objects) { log.info(s, objects); }
++ @Override public void info(String s, Throwable throwable) { log.info(s, throwable); }
++ @Override public boolean isInfoEnabled(Marker marker) { return log.isInfoEnabled(marker); }
++ @Override public void info(Marker marker, String s) { log.info(marker, s); }
++ @Override public void info(Marker marker, String s, Object o) { log.info(marker, s, o); }
++ @Override public void info(Marker marker, String s, Object o, Object o1) { log.info(marker, s, o, o1); }
++ @Override public void info(Marker marker, String s, Object... objects) { log.info(marker, s, objects); }
++ @Override public void info(Marker marker, String s, Throwable throwable) { log.info(marker, s, throwable); }
++ @Override public boolean isWarnEnabled() { return log.isWarnEnabled(); }
++ @Override public void warn(String s) { log.warn(s); }
++ @Override public void warn(String s, Object o) { log.warn(s, o); }
++ @Override public void warn(String s, Object... objects) { log.warn(s, objects); }
++ @Override public void warn(String s, Object o, Object o1) { log.warn(s, o, o1); }
++ @Override public void warn(String s, Throwable throwable) { log.warn(s, throwable); }
++ @Override public boolean isWarnEnabled(Marker marker) { return log.isWarnEnabled(marker); }
++ @Override public void warn(Marker marker, String s) { log.warn(marker, s); }
++ @Override public void warn(Marker marker, String s, Object o) { log.warn(marker, s, o); }
++ @Override public void warn(Marker marker, String s, Object o, Object o1) { log.warn(marker, s, o, o1); }
++ @Override public void warn(Marker marker, String s, Object... objects) { log.warn(marker, s, objects); }
++ @Override public void warn(Marker marker, String s, Throwable throwable) { log.warn(marker, s, throwable); }
++ @Override public boolean isErrorEnabled() { return log.isErrorEnabled(); }
++ @Override public void error(String s) { log.error(s); }
++ @Override public void error(String s, Object o) { log.error(s, o); }
++ @Override public void error(String s, Object o, Object o1) { log.error(s, o, o1); }
++ @Override public void error(String s, Object... objects) { log.error(s, objects); }
++ @Override public void error(String s, Throwable throwable) { log.error(s, throwable); }
++ @Override public boolean isErrorEnabled(Marker marker) { return log.isErrorEnabled(marker); }
++ @Override public void error(Marker marker, String s) { log.error(marker, s); }
++ @Override public void error(Marker marker, String s, Object o) { log.error(marker, s, o); }
++ @Override public void error(Marker marker, String s, Object o, Object o1) { log.error(marker, s, o, o1); }
++ @Override public void error(Marker marker, String s, Object... objects) { log.error(marker, s, objects); }
++ @Override public void error(Marker marker, String s, Throwable throwable) { log.error(marker, s, throwable); }
++
++}
+
+
+
+
diff --git a/src/main/patch/slarti/default.patch b/src/main/patch/slarti/default.patch
new file mode 100644
index 0000000..2d910d7
--- /dev/null
+++ b/src/main/patch/slarti/default.patch
@@ -0,0 +1,31 @@
+diff --git a/slarti-web/pom.xml b/slarti-web/pom.xml
+index 7933bdf86..3a1730377 100644
+--- a/slarti-web/pom.xml
++++ b/slarti-web/pom.xml
+@@ -17,6 +17,26 @@
+ <packaging>war</packaging>
+
+ <dependencies>
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-api</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>slf4j-simple</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jcl-over-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
++ <dependency> <!-- TODO: Remove -->
++ <groupId>org.slf4j</groupId> <!-- TODO: Remove -->
++ <artifactId>jul-to-slf4j</artifactId> <!-- TODO: Remove -->
++ <version>2.0.1</version> <!-- TODO: Remove -->
++ </dependency> <!-- TODO: Remove -->
+ <dependency>
+ <groupId>ch.post.it.webjars</groupId>
+ <artifactId>linti</artifactId>
diff --git a/src/main/php/sqlite-exec.php b/src/main/php/sqlite-exec.php
new file mode 100644
index 0000000..8df2fe0
--- /dev/null
+++ b/src/main/php/sqlite-exec.php
@@ -0,0 +1,30 @@
+<?php
+
+throw new Exception("Sorry, cannot just execute from file :(");
+
+
+function run( $app ){
+ $lotsOfSql = file_get_contents($app->srcPath);
+ if( !$lotsOfSql ) throw new Exception("fopen(\"{$app->srcPath}\")");
+ $app->db = new SQLite3($app->dstPath);
+ if( !$app->db ) throw new Exception("SQLite3(\"{$app->dstPath}\")");
+ $db = $app->db;
+ $db->enableExceptions(true);
+ $st = $db->prepare($lotsOfSql);
+ $st->execute();
+ $st->close();
+}
+
+
+function main(){
+ $app = (object)array(
+ "srcPath" => NULL/*TODO set me*/,
+ "dstPath" => NULL/*TODO set me*/,
+ "srcFile" => NULL,
+ "db" => NULL,
+ );
+ run($app);
+}
+
+
+main();
diff --git a/src/main/shell/BackupByRsync/backup.sh b/src/main/shell/BackupByRsync/backup.sh
index 40189c2..16c1aa2 100755
--- a/src/main/shell/BackupByRsync/backup.sh
+++ b/src/main/shell/BackupByRsync/backup.sh
@@ -53,57 +53,68 @@ run () {
rsync --archive --verbose \
--link-dest "${DIR_TO}/latest/${DST_PREFIX:?}" \
--filter=':- .gitignore' \
+ --exclude=".git/branches" \
--exclude=".git/COMMIT_EDITMSG" \
--exclude=".git/FETCH_HEAD" \
- --exclude=".git/ORIG_HEAD" \
- --exclude=".git/branches" \
--exclude=".git/hooks/*.sample" \
--exclude=".git/index" \
--exclude=".git/info" \
--exclude=".git/logs" \
--exclude=".git/objects" \
+ --exclude=".git/ORIG_HEAD" \
--exclude=".git/packed-refs" \
--exclude=".git/refs/remotes" \
--exclude=".git/refs/tags" \
--exclude=".idea" \
- --exclude="/.git-credentials" \
- --exclude="/.NERDTreeBookmarks" \
- --exclude="/.Xauthority" \
+ --exclude="/.android" \
--exclude="/.bash_history" \
+ --exclude="/.cache" \
+ --exclude="/.config/chromium" \
+ --exclude="/.config/GIMP" \
+ --exclude="/.config/inkscape" \
+ --exclude="/.config/JetBrains" \
+ --exclude="/.config/libreoffice" \
+ --exclude="/.config/VirtualBox/compreg.dat" \
--exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.leases*" \
--exclude="/.config/VirtualBox/HostInterfaceNetworking-vboxnet0-Dhcpd.log*" \
- --exclude="/.config/VirtualBox/VBoxSVC.log*" \
- --exclude="/.config/VirtualBox/compreg.dat" \
--exclude="/.config/VirtualBox/selectorwindow.log*" \
--exclude="/.config/VirtualBox/vbox-ssl-cacertificate.crt" \
+ --exclude="/.config/VirtualBox/VBoxSVC.log*" \
--exclude="/.config/VirtualBox/xpti.dat" \
- --exclude="/.config/libreoffice" \
- --exclude="/.config/GIMP" \
- --exclude="/.config/JetBrains" \
+ --exclude="/.eclipse" \
--exclude="/.gdb_history" \
+ --exclude="/.git-credentials" \
+ --exclude="/.gmrun_history" \
--exclude="/.lesshst" \
- --exclude="/.xsession-errors" \
- --exclude="/.xsession-errors.old" \
- --exclude="/mnt" \
- --exclude="/.android" \
- --exclude="/.cache" \
- --exclude="/.config/chromium" \
- --exclude="/.config/inkscape" \
--exclude="/.local/share" \
--exclude="/.m2/repository" \
+ --exclude="/mnt" \
--exclude="/.mozilla/firefox" \
+ --exclude="/.NERDTreeBookmarks" \
+ --exclude="/.recently-used" \
+ --exclude="/.recoll" \
+ --exclude="/.sh_history" \
+ --exclude="/.sqlite_history" \
--exclude="/.squirrel-sql" \
--exclude="/.viking-maps" \
- --exclude="/Downloads" \
+ --exclude="/.viminfo" \
+ --exclude="/.viminfo.tmp" \
+ --exclude="/.Xauthority" \
+ --exclude="/.xsession-errors" \
+ --exclude="/.xsession-errors.old" \
--exclude="/crashdumps" \
+ --exclude="/Downloads" \
--exclude="/images" \
+ --exclude="/mnt" \
--exclude="/projects/forks" \
- --exclude="cee-misc-lib/external" \
- --exclude="cee-misc-lib/tmp" \
--exclude="/tmp" \
--exclude="/virtualbox-*" \
- --exclude="/vmshare" \
+ --exclude="/VirtualBox VMs" \
--exclude="/vm-qemu" \
+ --exclude="/vm-share" \
+ --exclude="/vmshare" \
+ --exclude="cee-misc-lib/external" \
+ --exclude="cee-misc-lib/tmp" \
"${DIR_FROM:?}" \
"${BACKUP_PATH:?}/${DST_PREFIX}" \
;